0
0
mirror of https://github.com/go-gitea/gitea.git synced 2026-04-12 16:30:43 +02:00

Merge branch 'main' into fix-rpm-routing-main-37086

This commit is contained in:
Giteabot 2026-04-03 12:11:30 +08:00 committed by GitHub
commit 2e95e8f65f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 1111 additions and 551 deletions

View File

@ -322,16 +322,16 @@ func GetRunByRepoAndID(ctx context.Context, repoID, runID int64) (*ActionRun, er
return &run, nil
}
func GetRunByIndex(ctx context.Context, repoID, index int64) (*ActionRun, error) {
func GetRunByRepoAndIndex(ctx context.Context, repoID, runIndex int64) (*ActionRun, error) {
run := &ActionRun{
RepoID: repoID,
Index: index,
Index: runIndex,
}
has, err := db.GetEngine(ctx).Get(run)
if err != nil {
return nil, err
} else if !has {
return nil, fmt.Errorf("run with index %d %d: %w", repoID, index, util.ErrNotExist)
return nil, fmt.Errorf("run with repo_id %d and index %d: %w", repoID, runIndex, util.ErrNotExist)
}
return run, nil

View File

@ -18,6 +18,11 @@ import (
"xorm.io/builder"
)
// MaxJobNumPerRun is the maximum number of jobs in a single run.
// https://docs.github.com/en/actions/reference/limits#existing-system-limits
// TODO: check this limit when creating jobs
const MaxJobNumPerRun = 256
// ActionRunJob represents a job of a run
type ActionRunJob struct {
ID int64

View File

@ -1,9 +1,29 @@
# type ActionRun struct {
# ID int64 `xorm:"pk autoincr"`
# RepoID int64 `xorm:"index"`
# Index int64
# ID int64 `xorm:"pk autoincr"`
# RepoID int64 `xorm:"index"`
# Index int64
# CommitSHA string `xorm:"commit_sha"`
# Event string
# TriggerEvent string
# EventPayload string `xorm:"LONGTEXT"`
# }
-
id: 106
repo_id: 1
id: 990
repo_id: 100
index: 7
commit_sha: merge-sha
event: pull_request
event_payload: '{"pull_request":{"head":{"sha":"sha-shared"}}}'
-
id: 991
repo_id: 100
index: 8
commit_sha: sha-shared
event: push
event_payload: '{"head_commit":{"id":"sha-shared"}}'
-
id: 1991
repo_id: 100
index: 9
commit_sha: sha-other
event: release

View File

@ -3,8 +3,14 @@
# RunID int64 `xorm:"index"`
# }
-
id: 530
run_id: 106
id: 997
run_id: 990
-
id: 531
run_id: 106
id: 998
run_id: 990
-
id: 1997
run_id: 991
-
id: 1998
run_id: 1991

View File

@ -1,29 +1,51 @@
# type CommitStatus struct {
# ID int64 `xorm:"pk autoincr"`
# RepoID int64 `xorm:"index"`
# SHA string
# TargetURL string
# }
-
id: 10
repo_id: 1
id: 10010
repo_id: 100
sha: sha-shared
target_url: /testuser/repo1/actions/runs/7/jobs/0
-
id: 11
repo_id: 1
id: 10011
repo_id: 100
sha: sha-shared
target_url: /testuser/repo1/actions/runs/7/jobs/1
-
id: 12
repo_id: 1
id: 10012
repo_id: 100
sha: sha-shared
target_url: /testuser/repo1/actions/runs/8/jobs/0
-
id: 10013
repo_id: 100
sha: sha-other
target_url: /testuser/repo1/actions/runs/9/jobs/0
-
id: 10014
repo_id: 100
sha: sha-shared
target_url: /otheruser/badrepo/actions/runs/7/jobs/0
-
id: 13
repo_id: 1
id: 10015
repo_id: 100
sha: sha-shared
target_url: /testuser/repo1/actions/runs/10/jobs/0
-
id: 14
repo_id: 1
id: 10016
repo_id: 100
sha: sha-shared
target_url: /testuser/repo1/actions/runs/7/jobs/3
-
id: 15
repo_id: 1
id: 10017
repo_id: 100
sha: sha-shared
target_url: https://ci.example.com/build/123
-
id: 10018
repo_id: 100
sha: sha-shared
target_url: /testuser/repo1/actions/runs/990/jobs/997

View File

@ -6,14 +6,14 @@
# TargetURL string
# }
-
id: 20
repo_id: 1
sha: "012345"
state: success
id: 10020
repo_id: 100
sha: sha-shared
state: pending
target_url: /testuser/repo1/actions/runs/7/jobs/0
-
id: 21
repo_id: 1
sha: "678901"
state: success
target_url: https://ci.example.com/build/123
id: 10021
repo_id: 100
sha: sha-other
state: pending
target_url: /testuser/repo1/actions/runs/9/jobs/0

View File

@ -4,6 +4,6 @@
# Name string
# }
-
id: 1
id: 100
owner_name: testuser
name: repo1

View File

@ -400,7 +400,7 @@ func prepareMigrationTasks() []*migration {
newMigration(323, "Add support for actions concurrency", v1_26.AddActionsConcurrency),
newMigration(324, "Fix closed milestone completeness for milestones with no issues", v1_26.FixClosedMilestoneCompleteness),
newMigration(325, "Fix missed repo_id when migrate attachments", v1_26.FixMissedRepoIDWhenMigrateAttachments),
newMigration(326, "Migrate commit status target URL to use run ID and job ID", v1_26.FixCommitStatusTargetURLToUseRunAndJobID),
newMigration(326, "Partially migrate commit status target URL to use run ID and job ID", v1_26.FixCommitStatusTargetURLToUseRunAndJobID),
newMigration(327, "Add disabled state to action runners", v1_26.AddDisabledToActionRunner),
newMigration(328, "Add TokenPermissions column to ActionRunJob", v1_26.AddTokenPermissionsToActionRunJob),
newMigration(329, "Add unique constraint for user badge", v1_26.AddUniqueIndexForUserBadge),

View File

@ -4,18 +4,33 @@
package v1_26
import (
"errors"
"fmt"
"net/url"
"strconv"
"strings"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
webhook_module "code.gitea.io/gitea/modules/webhook"
"xorm.io/xorm"
)
const actionsRunPath = "/actions/runs/"
const (
actionsRunPath = "/actions/runs/"
// Only commit status target URLs whose resolved run ID is smaller than this threshold are rewritten by this partial migration.
// The fixed value 1000 is a conservative cutoff chosen to cover the smaller legacy run indexes that are most likely to be confused with ID-based URLs at runtime.
// Larger legacy {run} or {job} numbers are usually easier to disambiguate. For example:
// * /actions/runs/1200/jobs/1420 is most likely an ID-based URL, because a run should not contain more than 256 jobs.
// * /actions/runs/1500/jobs/3 is most likely an index-based URL, because a job ID cannot be smaller than its run ID.
// But URLs with small numbers, such as /actions/runs/5/jobs/6, are much harder to distinguish reliably.
// This migration therefore prioritizes rewriting target URLs for runs in that lower range.
legacyURLIDThreshold int64 = 1000
)
type migrationRepository struct {
ID int64
@ -24,9 +39,13 @@ type migrationRepository struct {
}
type migrationActionRun struct {
ID int64
RepoID int64
Index int64
ID int64
RepoID int64
Index int64
CommitSHA string `xorm:"commit_sha"`
Event webhook_module.HookEventType
TriggerEvent string
EventPayload string
}
type migrationActionRunJob struct {
@ -40,93 +59,133 @@ type migrationCommitStatus struct {
TargetURL string
}
func FixCommitStatusTargetURLToUseRunAndJobID(x *xorm.Engine) error {
runByIndexCache := make(map[int64]map[int64]*migrationActionRun)
jobsByRunIDCache := make(map[int64][]int64)
repoLinkCache := make(map[int64]string)
if err := migrateCommitStatusTargetURL(x, "commit_status", runByIndexCache, jobsByRunIDCache, repoLinkCache); err != nil {
return err
}
return migrateCommitStatusTargetURL(x, "commit_status_summary", runByIndexCache, jobsByRunIDCache, repoLinkCache)
type commitSHAAndRuns struct {
commitSHA string
runs map[int64]*migrationActionRun
}
func migrateCommitStatusTargetURL(
// FixCommitStatusTargetURLToUseRunAndJobID partially migrates legacy Actions
// commit status target URLs to the new run/job ID-based form.
//
// Only rows whose resolved run ID is below legacyURLIDThreshold are rewritten.
// This is because smaller legacy run indexes are more likely to collide with run ID URLs during runtime resolution,
// so this migration prioritizes that lower range and leaves the remaining legacy target URLs to the web compatibility logic.
func FixCommitStatusTargetURLToUseRunAndJobID(x *xorm.Engine) error {
jobsByRunIDCache := make(map[int64][]int64)
repoLinkCache := make(map[int64]string)
groups, err := loadLegacyMigrationRunGroups(x)
if err != nil {
return err
}
for repoID, groupsBySHA := range groups {
for _, group := range groupsBySHA {
if err := migrateCommitStatusTargetURLForGroup(x, "commit_status", repoID, group.commitSHA, group.runs, jobsByRunIDCache, repoLinkCache); err != nil {
return err
}
if err := migrateCommitStatusTargetURLForGroup(x, "commit_status_summary", repoID, group.commitSHA, group.runs, jobsByRunIDCache, repoLinkCache); err != nil {
return err
}
}
}
return nil
}
func loadLegacyMigrationRunGroups(x *xorm.Engine) (map[int64]map[string]*commitSHAAndRuns, error) {
var runs []migrationActionRun
if err := x.Table("action_run").
Where("id < ?", legacyURLIDThreshold).
Cols("id", "repo_id", "`index`", "commit_sha", "event", "trigger_event", "event_payload").
Find(&runs); err != nil {
return nil, fmt.Errorf("query action_run: %w", err)
}
groups := make(map[int64]map[string]*commitSHAAndRuns)
for i := range runs {
run := runs[i]
commitID, err := getCommitStatusCommitID(&run)
if err != nil {
log.Warn("skip action_run id=%d when resolving commit status commit SHA: %v", run.ID, err)
continue
}
if commitID == "" {
// empty commitID means the run didn't create any commit status records, just skip
continue
}
if groups[run.RepoID] == nil {
groups[run.RepoID] = make(map[string]*commitSHAAndRuns)
}
if groups[run.RepoID][commitID] == nil {
groups[run.RepoID][commitID] = &commitSHAAndRuns{
commitSHA: commitID,
runs: make(map[int64]*migrationActionRun),
}
}
groups[run.RepoID][commitID].runs[run.Index] = &run
}
return groups, nil
}
func migrateCommitStatusTargetURLForGroup(
x *xorm.Engine,
table string,
runByIndexCache map[int64]map[int64]*migrationActionRun,
repoID int64,
sha string,
runs map[int64]*migrationActionRun,
jobsByRunIDCache map[int64][]int64,
repoLinkCache map[int64]string,
) error {
const batchSize = 500
var lastID int64
var rows []migrationCommitStatus
if err := x.Table(table).
Where("repo_id = ?", repoID).
And("sha = ?", sha).
Cols("id", "repo_id", "target_url").
Find(&rows); err != nil {
return fmt.Errorf("query %s for repo_id=%d sha=%s: %w", table, repoID, sha, err)
}
for {
var rows []migrationCommitStatus
sess := x.Table(table).
Where("target_url LIKE ?", "%"+actionsRunPath+"%").
And("id > ?", lastID).
Asc("id").
Limit(batchSize)
if err := sess.Find(&rows); err != nil {
return fmt.Errorf("query %s: %w", table, err)
}
if len(rows) == 0 {
return nil
for _, row := range rows {
repoLink, err := getRepoLinkCached(x, repoLinkCache, row.RepoID)
if err != nil || repoLink == "" {
if err != nil {
log.Warn("convert %s id=%d getRepoLinkCached: %v", table, row.ID, err)
} else {
log.Warn("convert %s id=%d: repo=%d not found", table, row.ID, row.RepoID)
}
continue
}
for _, row := range rows {
lastID = row.ID
if row.TargetURL == "" {
continue
}
runNum, jobNum, ok := parseTargetURL(row.TargetURL, repoLink)
if !ok {
continue
}
repoLink, err := getRepoLinkCached(x, repoLinkCache, row.RepoID)
if err != nil || repoLink == "" {
if err != nil {
log.Warn("convert %s id=%d getRepoLinkCached: %v", table, row.ID, err)
} else {
log.Warn("convert %s id=%d: repo=%d not found", table, row.ID, row.RepoID)
}
continue
}
run, ok := runs[runNum]
if !ok {
continue
}
runNum, jobNum, ok := parseTargetURL(row.TargetURL, repoLink)
if !ok {
continue
jobID, ok, err := getJobIDByIndexCached(x, jobsByRunIDCache, run.ID, jobNum)
if err != nil || !ok {
if err != nil {
log.Warn("convert %s id=%d getJobIDByIndexCached: %v", table, row.ID, err)
} else {
log.Warn("convert %s id=%d: job not found for run_id=%d job_index=%d", table, row.ID, run.ID, jobNum)
}
continue
}
run, err := getRunByIndexCached(x, runByIndexCache, row.RepoID, runNum)
if err != nil || run == nil {
if err != nil {
log.Warn("convert %s id=%d getRunByIndexCached: %v", table, row.ID, err)
} else {
log.Warn("convert %s id=%d: run not found for repo_id=%d run_index=%d", table, row.ID, row.RepoID, runNum)
}
continue
}
oldURL := row.TargetURL
newURL := fmt.Sprintf("%s%s%d/jobs/%d", repoLink, actionsRunPath, run.ID, jobID)
if oldURL == newURL {
continue
}
jobID, ok, err := getJobIDByIndexCached(x, jobsByRunIDCache, run.ID, jobNum)
if err != nil || !ok {
if err != nil {
log.Warn("convert %s id=%d getJobIDByIndexCached: %v", table, row.ID, err)
} else {
log.Warn("convert %s id=%d: job not found for run_id=%d job_index=%d", table, row.ID, run.ID, jobNum)
}
continue
}
oldURL := row.TargetURL
newURL := fmt.Sprintf("%s%s%d/jobs/%d", repoLink, actionsRunPath, run.ID, jobID) // expect: {repo_link}/actions/runs/{run_id}/jobs/{job_id}
if oldURL == newURL {
continue
}
if _, err := x.Table(table).ID(row.ID).Cols("target_url").Update(&migrationCommitStatus{TargetURL: newURL}); err != nil {
return fmt.Errorf("update %s id=%d target_url from %s to %s: %w", table, row.ID, oldURL, newURL, err)
}
if _, err := x.Table(table).ID(row.ID).Cols("target_url").Update(&migrationCommitStatus{TargetURL: newURL}); err != nil {
return fmt.Errorf("update %s id=%d target_url from %s to %s: %w", table, row.ID, oldURL, newURL, err)
}
}
return nil
}
func getRepoLinkCached(x *xorm.Engine, cache map[int64]string, repoID int64) (string, error) {
@ -147,35 +206,6 @@ func getRepoLinkCached(x *xorm.Engine, cache map[int64]string, repoID int64) (st
return link, nil
}
func getRunByIndexCached(x *xorm.Engine, cache map[int64]map[int64]*migrationActionRun, repoID, runIndex int64) (*migrationActionRun, error) {
if repoCache, ok := cache[repoID]; ok {
if run, ok := repoCache[runIndex]; ok {
if run == nil {
return nil, fmt.Errorf("run repo_id=%d run_index=%d not found", repoID, runIndex)
}
return run, nil
}
}
var run migrationActionRun
has, err := x.Table("action_run").Where("repo_id=? AND `index`=?", repoID, runIndex).Get(&run)
if err != nil {
return nil, err
}
if !has {
if cache[repoID] == nil {
cache[repoID] = make(map[int64]*migrationActionRun)
}
cache[repoID][runIndex] = nil
return nil, fmt.Errorf("run repo_id=%d run_index=%d not found", repoID, runIndex)
}
if cache[repoID] == nil {
cache[repoID] = make(map[int64]*migrationActionRun)
}
cache[repoID][runIndex] = &run
return &run, nil
}
func getJobIDByIndexCached(x *xorm.Engine, cache map[int64][]int64, runID, jobIndex int64) (int64, bool, error) {
jobIDs, ok := cache[runID]
if !ok {
@ -202,7 +232,7 @@ func parseTargetURL(targetURL, repoLink string) (runNum, jobNum int64, ok bool)
}
rest := targetURL[len(prefix):]
parts := strings.Split(rest, "/") // expect: {run_num}/jobs/{job_num}
parts := strings.Split(rest, "/")
if len(parts) == 3 && parts[1] == "jobs" {
runNum, err1 := strconv.ParseInt(parts[0], 10, 64)
jobNum, err2 := strconv.ParseInt(parts[2], 10, 64)
@ -214,3 +244,72 @@ func parseTargetURL(targetURL, repoLink string) (runNum, jobNum int64, ok bool)
return 0, 0, false
}
func getCommitStatusCommitID(run *migrationActionRun) (string, error) {
switch run.Event {
case webhook_module.HookEventPush:
payload, err := getPushEventPayload(run)
if err != nil {
return "", fmt.Errorf("getPushEventPayload: %w", err)
}
if payload.HeadCommit == nil {
return "", errors.New("head commit is missing in event payload")
}
return payload.HeadCommit.ID, nil
case webhook_module.HookEventPullRequest,
webhook_module.HookEventPullRequestSync,
webhook_module.HookEventPullRequestAssign,
webhook_module.HookEventPullRequestLabel,
webhook_module.HookEventPullRequestReviewRequest,
webhook_module.HookEventPullRequestMilestone:
payload, err := getPullRequestEventPayload(run)
if err != nil {
return "", fmt.Errorf("getPullRequestEventPayload: %w", err)
}
if payload.PullRequest == nil {
return "", errors.New("pull request is missing in event payload")
} else if payload.PullRequest.Head == nil {
return "", errors.New("head of pull request is missing in event payload")
}
return payload.PullRequest.Head.Sha, nil
case webhook_module.HookEventPullRequestReviewApproved,
webhook_module.HookEventPullRequestReviewRejected,
webhook_module.HookEventPullRequestReviewComment:
payload, err := getPullRequestEventPayload(run)
if err != nil {
return "", fmt.Errorf("getPullRequestEventPayload: %w", err)
}
if payload.PullRequest == nil {
return "", errors.New("pull request is missing in event payload")
} else if payload.PullRequest.Head == nil {
return "", errors.New("head of pull request is missing in event payload")
}
return payload.PullRequest.Head.Sha, nil
case webhook_module.HookEventRelease:
return run.CommitSHA, nil
default:
return "", nil
}
}
func getPushEventPayload(run *migrationActionRun) (*api.PushPayload, error) {
if run.Event != webhook_module.HookEventPush {
return nil, fmt.Errorf("event %s is not a push event", run.Event)
}
var payload api.PushPayload
if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil {
return nil, err
}
return &payload, nil
}
func getPullRequestEventPayload(run *migrationActionRun) (*api.PullRequestPayload, error) {
if !run.Event.IsPullRequest() && !run.Event.IsPullRequestReview() {
return nil, fmt.Errorf("event %s is not a pull request event", run.Event)
}
var payload api.PullRequestPayload
if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil {
return nil, err
}
return &payload, nil
}

View File

@ -28,9 +28,13 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) {
}
type ActionRun struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"index"`
Index int64
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"index"`
Index int64
CommitSHA string `xorm:"commit_sha"`
Event string
TriggerEvent string
EventPayload string `xorm:"LONGTEXT"`
}
type ActionRunJob struct {
@ -41,6 +45,7 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) {
type CommitStatus struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"index"`
SHA string
TargetURL string
}
@ -61,14 +66,6 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) {
)
defer deferable()
newURL1 := "/testuser/repo1/actions/runs/106/jobs/530"
newURL2 := "/testuser/repo1/actions/runs/106/jobs/531"
invalidWrongRepo := "/otheruser/badrepo/actions/runs/7/jobs/0"
invalidNonexistentRun := "/testuser/repo1/actions/runs/10/jobs/0"
invalidNonexistentJob := "/testuser/repo1/actions/runs/7/jobs/3"
externalTargetURL := "https://ci.example.com/build/123"
require.NoError(t, FixCommitStatusTargetURLToUseRunAndJobID(x))
cases := []struct {
@ -76,14 +73,26 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) {
id int64
want string
}{
{table: "commit_status", id: 10, want: newURL1},
{table: "commit_status", id: 11, want: newURL2},
{table: "commit_status", id: 12, want: invalidWrongRepo},
{table: "commit_status", id: 13, want: invalidNonexistentRun},
{table: "commit_status", id: 14, want: invalidNonexistentJob},
{table: "commit_status", id: 15, want: externalTargetURL},
{table: "commit_status_summary", id: 20, want: newURL1},
{table: "commit_status_summary", id: 21, want: externalTargetURL},
// Legacy URLs for runs whose resolved run IDs are below the threshold should be rewritten.
{table: "commit_status", id: 10010, want: "/testuser/repo1/actions/runs/990/jobs/997"},
{table: "commit_status", id: 10011, want: "/testuser/repo1/actions/runs/990/jobs/998"},
{table: "commit_status", id: 10012, want: "/testuser/repo1/actions/runs/991/jobs/1997"},
// Runs whose resolved IDs are above the threshold are intentionally left unchanged.
{table: "commit_status", id: 10013, want: "/testuser/repo1/actions/runs/9/jobs/0"},
// URLs that do not resolve cleanly as legacy Actions URLs should remain untouched.
{table: "commit_status", id: 10014, want: "/otheruser/badrepo/actions/runs/7/jobs/0"},
{table: "commit_status", id: 10015, want: "/testuser/repo1/actions/runs/10/jobs/0"},
{table: "commit_status", id: 10016, want: "/testuser/repo1/actions/runs/7/jobs/3"},
{table: "commit_status", id: 10017, want: "https://ci.example.com/build/123"},
// Already ID-based URLs are valid inputs and should not be rewritten again.
{table: "commit_status", id: 10018, want: "/testuser/repo1/actions/runs/990/jobs/997"},
// The same rewrite rules apply to commit_status_summary rows.
{table: "commit_status_summary", id: 10020, want: "/testuser/repo1/actions/runs/990/jobs/997"},
{table: "commit_status_summary", id: 10021, want: "/testuser/repo1/actions/runs/9/jobs/0"},
}
for _, tc := range cases {

View File

@ -5,13 +5,9 @@
package highlight
import (
"bufio"
"bytes"
"fmt"
gohtml "html"
"html/template"
"io"
"strings"
"slices"
"sync"
"code.gitea.io/gitea/modules/log"
@ -23,12 +19,14 @@ import (
"github.com/alecthomas/chroma/v2/styles"
)
// don't index files larger than this many bytes for performance purposes
// don't highlight files larger than this many bytes for performance purposes
const sizeLimit = 1024 * 1024
type globalVarsType struct {
highlightMapping map[string]string
githubStyles *chroma.Style
escapeFull []template.HTML
escCtrlCharsMap []template.HTML
}
var (
@ -44,10 +42,69 @@ func globalVars() *globalVarsType {
globalVarsPtr = &globalVarsType{}
globalVarsPtr.githubStyles = styles.Get("github")
globalVarsPtr.highlightMapping = setting.GetHighlightMapping()
globalVarsPtr.escCtrlCharsMap = make([]template.HTML, 256)
// ASCII Table 0x00 - 0x1F
controlCharNames := []string{
"NUL", "SOH", "STX", "ETX", "EOT", "ENQ", "ACK", "BEL",
"BS", "HT", "LF", "VT", "FF", "CR", "SO", "SI",
"DLE", "DC1", "DC2", "DC3", "DC4", "NAK", "SYN", "ETB",
"CAN", "EM", "SUB", "ESC", "FS", "GS", "RS", "US",
}
// Uncomment this line if you'd debug the layout without creating a special file, then Space (0x20) will also be escaped.
// Don't worry, even if you forget to comment it out and push it to git repo, the CI tests will catch it and fail.
// controlCharNames = append(controlCharNames, "SP")
for i, s := range controlCharNames {
globalVarsPtr.escCtrlCharsMap[i] = template.HTML(`<span class="broken-code-point" data-escaped="` + s + `"><span class="char">` + string(byte(i)) + `</span></span>`)
}
globalVarsPtr.escCtrlCharsMap[0x7f] = template.HTML(`<span class="broken-code-point" data-escaped="DEL"><span class="char">` + string(byte(0x7f)) + `</span></span>`)
globalVarsPtr.escCtrlCharsMap['\t'] = ""
globalVarsPtr.escCtrlCharsMap['\n'] = ""
globalVarsPtr.escCtrlCharsMap['\r'] = ""
globalVarsPtr.escapeFull = slices.Clone(globalVarsPtr.escCtrlCharsMap)
// exactly the same as Golang's html.EscapeString
globalVarsPtr.escapeFull['&'] = "&amp;"
globalVarsPtr.escapeFull['\''] = "&#39;"
globalVarsPtr.escapeFull['<'] = "&lt;"
globalVarsPtr.escapeFull['>'] = "&gt;"
globalVarsPtr.escapeFull['"'] = "&#34;"
}
return globalVarsPtr
}
func escapeByMap(code []byte, escapeMap []template.HTML) template.HTML {
firstEscapePos := -1
for i, c := range code {
if escapeMap[c] != "" {
firstEscapePos = i
break
}
}
if firstEscapePos == -1 {
return template.HTML(util.UnsafeBytesToString(code))
}
buf := make([]byte, firstEscapePos, len(code)*2)
copy(buf[:firstEscapePos], code[:firstEscapePos])
for i := firstEscapePos; i < len(code); i++ {
c := code[i]
if esc := escapeMap[c]; esc != "" {
buf = append(buf, esc...)
} else {
buf = append(buf, c)
}
}
return template.HTML(util.UnsafeBytesToString(buf))
}
func escapeFullString(code string) template.HTML {
return escapeByMap(util.UnsafeStringToBytes(code), globalVars().escapeFull)
}
func escapeControlChars(code []byte) template.HTML {
return escapeByMap(code, globalVars().escCtrlCharsMap)
}
// UnsafeSplitHighlightedLines splits highlighted code into lines preserving HTML tags
// It always includes '\n', '\n' can appear at the end of each line or in the middle of HTML tags
// The '\n' is necessary for copying code from web UI to preserve original code lines
@ -90,7 +147,7 @@ func RenderCodeSlowGuess(fileName, language, code string) (output template.HTML,
}
if len(code) > sizeLimit {
return template.HTML(template.HTMLEscapeString(code)), nil, ""
return escapeFullString(code), nil, ""
}
lexer = detectChromaLexerWithAnalyze(fileName, language, util.UnsafeStringToBytes(code)) // it is also slow
@ -104,86 +161,66 @@ func RenderCodeByLexer(lexer chroma.Lexer, code string) template.HTML {
html.PreventSurroundingPre(true),
)
htmlbuf := bytes.Buffer{}
htmlw := bufio.NewWriter(&htmlbuf)
iterator, err := lexer.Tokenise(nil, code)
if err != nil {
log.Error("Can't tokenize code: %v", err)
return template.HTML(template.HTMLEscapeString(code))
}
// style not used for live site but need to pass something
err = formatter.Format(htmlw, globalVars().githubStyles, iterator)
if err != nil {
log.Error("Can't format code: %v", err)
return template.HTML(template.HTMLEscapeString(code))
return escapeFullString(code)
}
_ = htmlw.Flush()
// Chroma will add newlines for certain lexers in order to highlight them properly
// Once highlighted, strip them here, so they don't cause copy/paste trouble in HTML output
return template.HTML(strings.TrimSuffix(htmlbuf.String(), "\n"))
htmlBuf := &bytes.Buffer{}
// style not used for live site but need to pass something
err = formatter.Format(htmlBuf, globalVars().githubStyles, iterator)
if err != nil {
log.Error("Can't format code: %v", err)
return escapeFullString(code)
}
// At the moment, we do not escape control chars here (unlike RenderFullFile which escapes control chars).
// The reason is: it is a very rare case that a text file contains control chars.
// This function is usually used by highlight diff and blame, not quite sure whether there will be side effects.
// If there would be new user feedback about this, we can re-consider about various edge cases.
return template.HTML(htmlBuf.String())
}
// RenderFullFile returns a slice of chroma syntax highlighted HTML lines of code and the matched lexer name
func RenderFullFile(fileName, language string, code []byte) ([]template.HTML, string, error) {
if len(code) > sizeLimit {
return RenderPlainText(code), "", nil
func RenderFullFile(fileName, language string, code []byte) ([]template.HTML, string) {
if language == LanguagePlaintext || len(code) > sizeLimit {
return renderPlainText(code), formatLexerName(LanguagePlaintext)
}
formatter := html.New(html.WithClasses(true),
html.WithLineNumbers(false),
html.PreventSurroundingPre(true),
)
lexer := detectChromaLexerWithAnalyze(fileName, language, code)
lexerName := formatLexerName(lexer.Config().Name)
iterator, err := lexer.Tokenise(nil, string(code))
if err != nil {
return nil, "", fmt.Errorf("can't tokenize code: %w", err)
rendered := RenderCodeByLexer(lexer, util.UnsafeBytesToString(code))
unsafeLines := UnsafeSplitHighlightedLines(rendered)
lines := make([]template.HTML, 0, len(unsafeLines))
for _, lineBytes := range unsafeLines {
line := escapeControlChars(lineBytes)
lines = append(lines, line)
}
tokensLines := chroma.SplitTokensIntoLines(iterator.Tokens())
htmlBuf := &bytes.Buffer{}
lines := make([]template.HTML, 0, len(tokensLines))
for _, tokens := range tokensLines {
iterator = chroma.Literator(tokens...)
err = formatter.Format(htmlBuf, globalVars().githubStyles, iterator)
if err != nil {
return nil, "", fmt.Errorf("can't format code: %w", err)
}
lines = append(lines, template.HTML(htmlBuf.String()))
htmlBuf.Reset()
}
return lines, lexerName, nil
return lines, lexerName
}
// RenderPlainText returns non-highlighted HTML for code
func RenderPlainText(code []byte) []template.HTML {
r := bufio.NewReader(bytes.NewReader(code))
m := make([]template.HTML, 0, bytes.Count(code, []byte{'\n'})+1)
for {
content, err := r.ReadString('\n')
if err != nil && err != io.EOF {
log.Error("failed to read string from buffer: %v", err)
break
// renderPlainText returns non-highlighted HTML for code
func renderPlainText(code []byte) []template.HTML {
lines := make([]template.HTML, 0, bytes.Count(code, []byte{'\n'})+1)
pos := 0
for pos < len(code) {
var content []byte
nextPos := bytes.IndexByte(code[pos:], '\n')
if nextPos == -1 {
content = code[pos:]
pos = len(code)
} else {
content = code[pos : pos+nextPos+1]
pos += nextPos + 1
}
if content == "" && err == io.EOF {
break
}
s := template.HTML(gohtml.EscapeString(content))
m = append(m, s)
lines = append(lines, escapeFullString(util.UnsafeBytesToString(content)))
}
return m
return lines
}
func formatLexerName(name string) string {
if name == "fallback" {
if name == LanguagePlaintext || name == chromaLexerFallback {
return "Plaintext"
}
return util.ToTitleCaseNoLower(name)
}

View File

@ -118,8 +118,7 @@ c=2
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
out, lexerName, err := RenderFullFile(tt.name, "", []byte(tt.code))
assert.NoError(t, err)
out, lexerName := RenderFullFile(tt.name, "", []byte(tt.code))
assert.Equal(t, tt.want, out)
assert.Equal(t, tt.lexerName, lexerName)
})
@ -182,7 +181,7 @@ c=2`),
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
out := RenderPlainText([]byte(tt.code))
out := renderPlainText([]byte(tt.code))
assert.Equal(t, tt.want, out)
})
}
@ -205,3 +204,14 @@ func TestUnsafeSplitHighlightedLines(t *testing.T) {
assert.Equal(t, "<span>a</span>\n", string(ret[0]))
assert.Equal(t, "<span>b\n</span>", string(ret[1]))
}
func TestEscape(t *testing.T) {
assert.Equal(t, template.HTML("\t\r\n<span class=\"broken-code-point\" data-escaped=\"NUL\"><span class=\"char\">\x00</span></span><span class=\"broken-code-point\" data-escaped=\"US\"><span class=\"char\">\x1f</span></span>&'\"<>"), escapeControlChars([]byte("\t\r\n\x00\x1f&'\"<>")))
assert.Equal(t, template.HTML("<span class=\"broken-code-point\" data-escaped=\"NUL\"><span class=\"char\">\x00</span></span><span class=\"broken-code-point\" data-escaped=\"US\"><span class=\"char\">\x1f</span></span>&amp;&#39;&#34;&lt;&gt;\t\r\n"), escapeFullString("\x00\x1f&'\"<>\t\r\n"))
out, _ := RenderFullFile("a.py", "", []byte("# \x7f<>"))
assert.Equal(t, template.HTML(`<span class="c1"># <span class="broken-code-point" data-escaped="DEL"><span class="char">`+string(byte(0x7f))+`</span></span>&lt;&gt;</span>`), out[0])
out = renderPlainText([]byte("# \x7f<>"))
assert.Equal(t, template.HTML(`# <span class="broken-code-point" data-escaped="DEL"><span class="char">`+string(byte(0x7f))+`</span></span>&lt;&gt;`), out[0])
}

View File

@ -16,7 +16,11 @@ import (
"github.com/go-enry/go-enry/v2"
)
const mapKeyLowerPrefix = "lower/"
const (
mapKeyLowerPrefix = "lower/"
LanguagePlaintext = "plaintext"
chromaLexerFallback = "fallback"
)
// chromaLexers is fully managed by us to do fast lookup for chroma lexers by file name or language name
// Don't use lexers.Get because it is very slow in many cases (iterate all rules, filepath glob match, etc.)

View File

@ -1531,6 +1531,7 @@
"repo.issues.context.edit": "Edit",
"repo.issues.context.delete": "Delete",
"repo.issues.no_content": "No description provided.",
"repo.issues.comment_no_content": "No comment provided.",
"repo.issues.close": "Close Issue",
"repo.issues.comment_pull_merged_at": "merged commit %[1]s into %[2]s %[3]s",
"repo.issues.comment_manually_pull_merged_at": "manually merged commit %[1]s into %[2]s %[3]s",

View File

@ -213,6 +213,9 @@
"editor.buttons.switch_to_legacy.tooltip": "Utiliser lancien éditeur à la place",
"editor.buttons.enable_monospace_font": "Activer la police à chasse fixe",
"editor.buttons.disable_monospace_font": "Désactiver la police à chasse fixe",
"editor.code_editor.command_palette": "Palette de commandes",
"editor.code_editor.find": "Rechercher",
"editor.code_editor.placeholder": "Saisissez ici le contenu du fichier",
"filter.string.asc": "AZ",
"filter.string.desc": "ZA",
"error.occurred": "Une erreur sest produite",
@ -2250,13 +2253,14 @@
"repo.settings.webhook.delivery.success": "Un événement a été ajouté à la file d'attente. Cela peut prendre quelques secondes avant qu'il n'apparaisse dans l'historique de livraison.",
"repo.settings.githooks_desc": "Les déclencheurs Git sont lancés par Git lui-même. Ils sont modifiables dans la liste ci-dessous afin de configurer des opérations personnalisées.",
"repo.settings.githook_edit_desc": "Si un Hook est inactif, un exemple de contenu vous sera proposé. Un contenu laissé vide signifie un Hook inactif.",
"repo.settings.githook_name": "Nom du Hook",
"repo.settings.githook_content": "Contenu du Hook",
"repo.settings.update_githook": "Mettre le Hook à jour",
"repo.settings.add_webhook_desc": "Gitea enverra à l'URL cible des requêtes <code>POST</code> avec un type de contenu spécifié. Lire la suite dans le <a target=\"_blank\" rel=\"noopener noreferrer\" href=\"%s\">guide des webhooks</a>.",
"repo.settings.payload_url": "URL cible",
"repo.settings.http_method": "Méthode HTTP",
"repo.settings.content_type": "Type de contenu POST",
"repo.settings.webhook.name": "Nom du déclencheur web",
"repo.settings.webhook.name_helper": "Optionnellement donner un nom convivial à ce déclencheur web",
"repo.settings.webhook.name_empty": "Déclencheur web sans nom",
"repo.settings.secret": "Secret",
"repo.settings.webhook_secret_desc": "Si le serveur webhook supporte lusage de secrets, vous pouvez indiquer un secret ici en vous basant sur leur documentation.",
"repo.settings.slack_username": "Nom d'utilisateur",
@ -2778,9 +2782,9 @@
"org.settings.labels_desc": "Ajoute des labels qui peuvent être utilisés sur les tickets pour <strong>tous les dépôts</strong> de cette organisation.",
"org.members.membership_visibility": "Visibilité des membres:",
"org.members.public": "Public",
"org.members.public_helper": "rendre caché",
"org.members.public_helper": "Cacher",
"org.members.private": "Caché",
"org.members.private_helper": "rendre visible",
"org.members.private_helper": "Révéler",
"org.members.member_role": "Rôle du membre :",
"org.members.owner": "Propriétaire",
"org.members.member": "Membre",
@ -2808,7 +2812,10 @@
"org.teams.no_desc": "Aucune description",
"org.teams.settings": "Paramètres",
"org.teams.owners_permission_desc": "Les propriétaires ont un accès complet à <strong>tous les dépôts</strong> et disposent <strong> d'un accès administrateur</strong> de l'organisation.",
"org.teams.owners_permission_suggestion": "Vous pouvez créer de nouvelles équipes pour les membres afin davoir un contrôle précis sur les droits daccès.",
"org.teams.members": "Membres de L'Équipe",
"org.teams.manage_team_member": "Gérer les équipes et les membres",
"org.teams.manage_team_member_prompt": "Les membres sont gérés par des équipes. Ajoutez des utilisateurs à une équipe pour les inviter dans cette organisation.",
"org.teams.update_settings": "Appliquer les paramètres",
"org.teams.delete_team": "Supprimer l'équipe",
"org.teams.add_team_member": "Ajouter un Membre",
@ -3715,6 +3722,8 @@
"actions.runs.workflow_run_count_1": "%d exécution du workflow",
"actions.runs.workflow_run_count_n": "%d exécutions du workflow",
"actions.runs.commit": "Révision",
"actions.runs.run_details": "Détails de lexécution",
"actions.runs.workflow_file": "Fichier de flux de travail",
"actions.runs.scheduled": "Planifié",
"actions.runs.pushed_by": "soumis par",
"actions.runs.invalid_workflow_helper": "La configuration du flux de travail est invalide. Veuillez vérifier votre fichier %s.",

View File

@ -213,6 +213,9 @@
"editor.buttons.switch_to_legacy.tooltip": "Úsáid an eagarthóir oidhreachta ina ionad",
"editor.buttons.enable_monospace_font": "Cumasaigh cló monospace",
"editor.buttons.disable_monospace_font": "Díchumasaigh cló monospace",
"editor.code_editor.command_palette": "Pailéad Ordú",
"editor.code_editor.find": "Aimsigh",
"editor.code_editor.placeholder": "Cuir isteach ábhar an chomhaid anseo",
"filter.string.asc": "A - Z",
"filter.string.desc": "Z - A",
"error.occurred": "Tharla earráid",
@ -2250,13 +2253,14 @@
"repo.settings.webhook.delivery.success": "Cuireadh imeacht leis an scuaine seachadta. D'fhéadfadh sé cúpla soicind a thógáil sula dtaispeántar sé sa stair seachadta.",
"repo.settings.githooks_desc": "Tá Git Crúcaí faoi thiomáint ag Git féin. Is féidir leat comhaid crúca a chur in eagar thíos chun oibríochtaí saincheaptha a shocrú.",
"repo.settings.githook_edit_desc": "Mura bhfuil an hook neamhghníomhach, cuirfear ábhar samplach i láthair. Má fhágann tú ábhar go luach folamh díchumasófar an crúca seo.",
"repo.settings.githook_name": "Ainm Crúca",
"repo.settings.githook_content": "Ábhar Crúca",
"repo.settings.update_githook": "Nuashonraigh Crúca",
"repo.settings.add_webhook_desc": "Seolfaidh Gitea iarratais <code>POST</code> le cineál ábhar sonraithe chuig an spriocURL. Léigh tuilleadh sa <a target=\"_blank\" rel=\"noopener noreferrer\" href=\"%s\">treoir Crúcaí Gréasán</a>.",
"repo.settings.payload_url": "URL spriocdhírithe",
"repo.settings.http_method": "Modh HTTP",
"repo.settings.content_type": "Cineál Ábhar POST",
"repo.settings.webhook.name": "Ainm an Crúca Gréasáin",
"repo.settings.webhook.name_helper": "Tabhair ainm cairdiúil don crúca gréasáin seo más mian leat",
"repo.settings.webhook.name_empty": "Crúca Gréasáin Gan Ainm",
"repo.settings.secret": "Rúnda",
"repo.settings.webhook_secret_desc": "Más féidir le freastalaí an webhook rún a úsáid, is féidir leat lámhleabhar an webhook a leanúint agus rún a líonadh isteach anseo.",
"repo.settings.slack_username": "Ainm úsáideora",
@ -2778,9 +2782,9 @@
"org.settings.labels_desc": "Cuir lipéid leis ar féidir iad a úsáid ar shaincheisteanna do <strong>gach stóras</strong> faoin eagraíocht seo.",
"org.members.membership_visibility": "Infheictheacht Ballraíochta:",
"org.members.public": "Infheicthe",
"org.members.public_helper": "dhéanamh i bhfolach",
"org.members.public_helper": "Déan i bhfolach",
"org.members.private": "I bhfolach",
"org.members.private_helper": "a dhéanamh le feiceáil",
"org.members.private_helper": "Déan infheicthe",
"org.members.member_role": "Ról Comhalta:",
"org.members.owner": "Úinéir",
"org.members.member": "Comhalta",
@ -2808,7 +2812,10 @@
"org.teams.no_desc": "Níl aon tuairisc ag an bhfoireann seo",
"org.teams.settings": "Socruithe",
"org.teams.owners_permission_desc": "Tá rochtain iomlán ag úinéirí ar <strong>gach stórais</strong> agus tá <strong>rochtain ag an riarthóir</strong> ar an eagraíocht.",
"org.teams.owners_permission_suggestion": "Is féidir leat foirne nua a chruthú do bhaill chun rialú rochtana mionsonraithe a fháil.",
"org.teams.members": "Baill Foirne",
"org.teams.manage_team_member": "Bainistigh foirne agus baill",
"org.teams.manage_team_member_prompt": "Déantar baill a bhainistiú trí fhoirne. Cuir úsáideoirí le foireann chun cuireadh a thabhairt dóibh chuig an eagraíocht seo.",
"org.teams.update_settings": "Nuashonrú Socruithe",
"org.teams.delete_team": "Scrios Foireann",
"org.teams.add_team_member": "Cuir Comhalta Foirne leis",
@ -3715,6 +3722,8 @@
"actions.runs.workflow_run_count_1": "%d rith sreabha oibre",
"actions.runs.workflow_run_count_n": "%d rith sreabha oibre",
"actions.runs.commit": "Tiomantas",
"actions.runs.run_details": "Sonraí Rith",
"actions.runs.workflow_file": "Comhad sreabhadh oibre",
"actions.runs.scheduled": "Sceidealaithe",
"actions.runs.pushed_by": "bhrú ag",
"actions.runs.invalid_workflow_helper": "Tá comhad cumraíochta sreabhadh oibre nebhailí. Seiceáil do chomhad cumraithe le do thoil: %s",

View File

@ -6,7 +6,6 @@ package repo
import (
"errors"
"fmt"
"net/http"
"code.gitea.io/gitea/models/organization"
@ -14,6 +13,7 @@ import (
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/optional"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
@ -83,6 +83,35 @@ func ListForks(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, apiForks)
}
func prepareDoerCreateRepoInOrg(ctx *context.APIContext, orgName string) *organization.Organization {
org, err := organization.GetOrgByName(ctx, orgName)
if errors.Is(err, util.ErrNotExist) {
ctx.APIErrorNotFound()
return nil
} else if err != nil {
ctx.APIErrorInternal(err)
return nil
}
if !organization.HasOrgOrUserVisible(ctx, org.AsUser(), ctx.Doer) {
ctx.APIErrorNotFound()
return nil
}
if !ctx.Doer.IsAdmin {
canCreate, err := org.CanCreateOrgRepo(ctx, ctx.Doer.ID)
if err != nil {
ctx.APIErrorInternal(err)
return nil
}
if !canCreate {
ctx.APIError(http.StatusForbidden, "User is not allowed to create repositories in this organization.")
return nil
}
}
return org
}
// CreateFork create a fork of a repo
func CreateFork(ctx *context.APIContext) {
// swagger:operation POST /repos/{owner}/{repo}/forks repository createFork
@ -118,41 +147,18 @@ func CreateFork(ctx *context.APIContext) {
// "$ref": "#/responses/validationError"
form := web.GetForm(ctx).(*api.CreateForkOption)
repo := ctx.Repo.Repository
var forker *user_model.User // user/org that will own the fork
if form.Organization == nil {
forker = ctx.Doer
} else {
org, err := organization.GetOrgByName(ctx, *form.Organization)
if err != nil {
if organization.IsErrOrgNotExist(err) {
ctx.APIError(http.StatusUnprocessableEntity, err)
} else {
ctx.APIErrorInternal(err)
}
forkOwner := ctx.Doer // user/org that will own the fork
if form.Organization != nil {
org := prepareDoerCreateRepoInOrg(ctx, *form.Organization)
if ctx.Written() {
return
}
if !ctx.Doer.IsAdmin {
isMember, err := org.IsOrgMember(ctx, ctx.Doer.ID)
if err != nil {
ctx.APIErrorInternal(err)
return
} else if !isMember {
ctx.APIError(http.StatusForbidden, fmt.Sprintf("User is no Member of Organisation '%s'", org.Name))
return
}
}
forker = org.AsUser()
forkOwner = org.AsUser()
}
var name string
if form.Name == nil {
name = repo.Name
} else {
name = *form.Name
}
fork, err := repo_service.ForkRepository(ctx, ctx.Doer, forker, repo_service.ForkRepoOptions{
repo := ctx.Repo.Repository
name := optional.FromPtr(form.Name).ValueOrDefault(repo.Name)
fork, err := repo_service.ForkRepository(ctx, ctx.Doer, forkOwner, repo_service.ForkRepoOptions{
BaseRepo: repo,
Name: name,
Description: repo.Description,

View File

@ -498,31 +498,11 @@ func CreateOrgRepo(ctx *context.APIContext) {
// "403":
// "$ref": "#/responses/forbidden"
opt := web.GetForm(ctx).(*api.CreateRepoOption)
org, err := organization.GetOrgByName(ctx, ctx.PathParam("org"))
if err != nil {
if organization.IsErrOrgNotExist(err) {
ctx.APIError(http.StatusUnprocessableEntity, err)
} else {
ctx.APIErrorInternal(err)
}
orgName := ctx.PathParam("org")
org := prepareDoerCreateRepoInOrg(ctx, orgName)
if ctx.Written() {
return
}
if !organization.HasOrgOrUserVisible(ctx, org.AsUser(), ctx.Doer) {
ctx.APIErrorNotFound("HasOrgOrUserVisible", nil)
return
}
if !ctx.Doer.IsAdmin {
canCreate, err := org.CanCreateOrgRepo(ctx, ctx.Doer.ID)
if err != nil {
ctx.APIErrorInternal(err)
return
} else if !canCreate {
ctx.APIError(http.StatusForbidden, "Given user is not allowed to create repository in organization.")
return
}
}
CreateUserRepo(ctx, org.AsUser(), *opt)
}

View File

@ -68,9 +68,138 @@ func getCurrentRunByPathParam(ctx *context_module.Context) (run *actions_model.A
return run
}
// resolveCurrentRunForView resolves GET Actions page URLs and supports both ID-based and legacy index-based forms.
//
// By default, run summary pages (/actions/runs/{run}) use a best-effort ID-first fallback,
// and job pages (/actions/runs/{run}/jobs/{job}) try to confirm an ID-based URL first and prefer the ID-based interpretation when both are valid.
//
// `by_id=1` param explicitly forces the ID-based path, and `by_index=1` explicitly forces the legacy index-based path.
// If both are present, `by_id` takes precedence.
func resolveCurrentRunForView(ctx *context_module.Context) *actions_model.ActionRun {
// `by_id` explicitly requests ID-based resolution, so the request skips the legacy index-based disambiguation logic and resolves the run by ID directly.
// It takes precedence over `by_index` when both query parameters are present.
if ctx.PathParam("run") == "latest" || ctx.FormBool("by_id") {
return getCurrentRunByPathParam(ctx)
}
runNum := ctx.PathParamInt64("run")
if runNum <= 0 {
ctx.NotFound(nil)
return nil
}
byIndex := ctx.FormBool("by_index")
if ctx.PathParam("job") == "" {
// The URL does not contain a {job} path parameter, so it cannot use the
// job-specific rules to disambiguate ID-based URLs from legacy index-based URLs.
// Because of that, this path is handled with a best-effort ID-first fallback by default.
//
// When the same repository contains:
// - a run whose ID matches runNum, and
// - a different run whose repo-scope index also matches runNum
// this path prefers the ID match and may show a different run than the old legacy URL originally intended,
// unless `by_index=1` explicitly forces the legacy index-based interpretation.
if !byIndex {
runByID, err := actions_model.GetRunByRepoAndID(ctx, ctx.Repo.Repository.ID, runNum)
if err == nil {
return runByID
}
if !errors.Is(err, util.ErrNotExist) {
ctx.ServerError("GetRun:"+ctx.PathParam("run"), err)
return nil
}
}
runByIndex, err := actions_model.GetRunByRepoAndIndex(ctx, ctx.Repo.Repository.ID, runNum)
if err == nil {
ctx.Redirect(fmt.Sprintf("%s/actions/runs/%d", ctx.Repo.RepoLink, runByIndex.ID), http.StatusFound)
return nil
}
if !errors.Is(err, util.ErrNotExist) {
ctx.ServerError("GetRunByRepoAndIndex", err)
return nil
}
ctx.NotFound(nil)
return nil
}
jobNum := ctx.PathParamInt64("job")
if jobNum < 0 {
ctx.NotFound(nil)
return nil
}
// A job index should not be larger than MaxJobNumPerRun, so larger values can skip the legacy index-based path and be treated as job IDs directly.
if !byIndex && jobNum >= actions_model.MaxJobNumPerRun {
return getCurrentRunByPathParam(ctx)
}
var runByID, runByIndex *actions_model.ActionRun
var targetJobByIndex *actions_model.ActionRunJob
// Each run must have at least one job, so a valid job ID in the same run cannot be smaller than the run ID.
if !byIndex && jobNum >= runNum {
// Probe the repo-scoped job ID first and only accept it when the job exists and belongs to the same runNum.
job, err := actions_model.GetRunJobByRepoAndID(ctx, ctx.Repo.Repository.ID, jobNum)
if err != nil && !errors.Is(err, util.ErrNotExist) {
ctx.ServerError("GetRunJobByRepoAndID", err)
return nil
}
if job != nil {
if err := job.LoadRun(ctx); err != nil {
ctx.ServerError("LoadRun", err)
return nil
}
if job.Run.ID == runNum {
runByID = job.Run
}
}
}
// Try to resolve the request as a legacy run-index/job-index URL.
{
run, err := actions_model.GetRunByRepoAndIndex(ctx, ctx.Repo.Repository.ID, runNum)
if err != nil && !errors.Is(err, util.ErrNotExist) {
ctx.ServerError("GetRunByRepoAndIndex", err)
return nil
}
if run != nil {
jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID)
if err != nil {
ctx.ServerError("GetRunJobsByRunID", err)
return nil
}
if jobNum < int64(len(jobs)) {
runByIndex = run
targetJobByIndex = jobs[jobNum]
}
}
}
if runByID == nil && runByIndex == nil {
ctx.NotFound(nil)
return nil
}
if runByID != nil && runByIndex == nil {
return runByID
}
if runByID == nil && runByIndex != nil {
ctx.Redirect(fmt.Sprintf("%s/actions/runs/%d/jobs/%d", ctx.Repo.RepoLink, runByIndex.ID, targetJobByIndex.ID), http.StatusFound)
return nil
}
// Reaching this point means both ID-based and legacy index-based interpretations are valid. Prefer the ID-based interpretation by default.
// Use `by_index=1` query parameter to access the legacy index-based interpretation when necessary.
return runByID
}
func View(ctx *context_module.Context) {
ctx.Data["PageIsActions"] = true
run := getCurrentRunByPathParam(ctx)
run := resolveCurrentRunForView(ctx)
if ctx.Written() {
return
}

View File

@ -21,6 +21,7 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
@ -31,31 +32,22 @@ import (
// NewComment create a comment for issue
func NewComment(ctx *context.Context) {
form := web.GetForm(ctx).(*forms.CreateCommentForm)
issue := GetActionIssue(ctx)
if ctx.Written() {
if issue == nil {
return
}
if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
if log.IsTrace() {
if ctx.IsSigned {
issueType := "issues"
if issue.IsPull {
issueType = "pulls"
}
log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
"User in Repo has Permissions: %-+v",
ctx.Doer,
issue.PosterID,
issueType,
ctx.Repo.Repository,
ctx.Repo.Permission)
} else {
log.Trace("Permission Denied: Not logged in")
}
}
if ctx.HasError() {
ctx.JSONError(ctx.GetErrMsg())
return
}
form := web.GetForm(ctx).(*forms.CreateCommentForm)
issueType := util.Iif(issue.IsPull, "pulls", "issues")
if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
"User in Repo has Permissions: %-+v", ctx.Doer, issue.PosterID, issueType, ctx.Repo.Repository, ctx.Repo.Permission)
ctx.HTTPError(http.StatusForbidden)
return
}
@ -65,137 +57,12 @@ func NewComment(ctx *context.Context) {
return
}
var attachments []string
if setting.Attachment.Enabled {
attachments = form.Files
}
attachments := util.Iif(setting.Attachment.Enabled, form.Files, nil)
if ctx.HasError() {
ctx.JSONError(ctx.GetErrMsg())
return
}
var comment *issues_model.Comment
defer func() {
// Check if issue admin/poster changes the status of issue.
if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) &&
(form.Status == "reopen" || form.Status == "close") &&
!(issue.IsPull && issue.PullRequest.HasMerged) {
// Duplication and conflict check should apply to reopen pull request.
var pr *issues_model.PullRequest
if form.Status == "reopen" && issue.IsPull {
pull := issue.PullRequest
var err error
pr, err = issues_model.GetUnmergedPullRequest(ctx, pull.HeadRepoID, pull.BaseRepoID, pull.HeadBranch, pull.BaseBranch, pull.Flow)
if err != nil {
if !issues_model.IsErrPullRequestNotExist(err) {
ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
return
}
}
// Regenerate patch and test conflict.
if pr == nil {
issue.PullRequest.HeadCommitID = ""
pull_service.StartPullRequestCheckImmediately(ctx, issue.PullRequest)
}
// check whether the ref of PR <refs/pulls/pr_index/head> in base repo is consistent with the head commit of head branch in the head repo
// get head commit of PR
if pull.Flow == issues_model.PullRequestFlowGithub {
prHeadRef := pull.GetGitHeadRefName()
if err := pull.LoadBaseRepo(ctx); err != nil {
ctx.ServerError("Unable to load base repo", err)
return
}
prHeadCommitID, err := gitrepo.GetFullCommitID(ctx, pull.BaseRepo, prHeadRef)
if err != nil {
ctx.ServerError("Get head commit Id of pr fail", err)
return
}
// get head commit of branch in the head repo
if err := pull.LoadHeadRepo(ctx); err != nil {
ctx.ServerError("Unable to load head repo", err)
return
}
if exist, _ := git_model.IsBranchExist(ctx, pull.HeadRepo.ID, pull.BaseBranch); !exist {
// todo localize
ctx.JSONError("The origin branch is delete, cannot reopen.")
return
}
headBranchRef := git.RefNameFromBranch(pull.HeadBranch)
headBranchCommitID, err := gitrepo.GetFullCommitID(ctx, pull.HeadRepo, headBranchRef.String())
if err != nil {
ctx.ServerError("Get head commit Id of head branch fail", err)
return
}
err = pull.LoadIssue(ctx)
if err != nil {
ctx.ServerError("load the issue of pull request error", err)
return
}
if prHeadCommitID != headBranchCommitID {
// force push to base repo
err := gitrepo.Push(ctx, pull.HeadRepo, pull.BaseRepo, git.PushOptions{
Branch: pull.HeadBranch + ":" + prHeadRef,
Force: true,
Env: repo_module.InternalPushingEnvironment(pull.Issue.Poster, pull.BaseRepo),
})
if err != nil {
ctx.ServerError("force push error", err)
return
}
}
}
}
if pr != nil {
ctx.Flash.Info(ctx.Tr("repo.pulls.open_unmerged_pull_exists", pr.Index))
} else {
if form.Status == "close" && !issue.IsClosed {
if err := issue_service.CloseIssue(ctx, issue, ctx.Doer, ""); err != nil {
log.Error("CloseIssue: %v", err)
if issues_model.IsErrDependenciesLeft(err) {
if issue.IsPull {
ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
} else {
ctx.JSONError(ctx.Tr("repo.issues.dependency.issue_close_blocked"))
}
return
}
} else {
if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil {
ctx.ServerError("stopTimerIfAvailable", err)
return
}
log.Trace("Issue [%d] status changed to closed: %v", issue.ID, issue.IsClosed)
}
} else if form.Status == "reopen" && issue.IsClosed {
if err := issue_service.ReopenIssue(ctx, issue, ctx.Doer, ""); err != nil {
log.Error("ReopenIssue: %v", err)
}
}
}
}
// Redirect to comment hashtag if there is any actual content.
typeName := "issues"
if issue.IsPull {
typeName = "pulls"
}
if comment != nil {
ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d#%s", ctx.Repo.RepoLink, typeName, issue.Index, comment.HashTag()))
} else {
ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, typeName, issue.Index))
}
}()
// Fix #321: Allow empty comments, as long as we have attachments.
if len(form.Content) == 0 && len(attachments) == 0 {
// Can allow empty comments if there are attachments or a status change (close, reopen, approve, reject)
// So, only stop if there is no content, no attachments, and no status change.
if form.Content == "" && len(attachments) == 0 && form.Status == "" {
ctx.JSONError(ctx.Tr("repo.issues.comment_no_content"))
return
}
@ -209,7 +76,115 @@ func NewComment(ctx *context.Context) {
return
}
log.Trace("Comment created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, comment.ID)
// ATTENTION: From now on, do not use ctx.JSONError, don't return on user error, because the comment has been created.
// Always use ctx.Flash.Xxx and then redirect, then the message will be displayed
// TODO: need further refactoring to the code below
// Check if doer can change the status of issue (close, reopen).
if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) &&
(form.Status == "reopen" || form.Status == "close") &&
!(issue.IsPull && issue.PullRequest.HasMerged) {
// Duplication and conflict check should apply to reopen pull request.
var branchOtherUnmergedPR *issues_model.PullRequest
if form.Status == "reopen" && issue.IsPull {
pull := issue.PullRequest
branchOtherUnmergedPR, err = issues_model.GetUnmergedPullRequest(ctx, pull.HeadRepoID, pull.BaseRepoID, pull.HeadBranch, pull.BaseBranch, pull.Flow)
if err != nil {
if !issues_model.IsErrPullRequestNotExist(err) {
ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
}
}
if branchOtherUnmergedPR != nil {
ctx.Flash.Error(ctx.Tr("repo.pulls.open_unmerged_pull_exists", branchOtherUnmergedPR.Index))
} else {
// Regenerate patch and test conflict.
issue.PullRequest.HeadCommitID = ""
pull_service.StartPullRequestCheckImmediately(ctx, issue.PullRequest)
}
// check whether the ref of PR <refs/pulls/pr_index/head> in base repo is consistent with the head commit of head branch in the head repo
// get head commit of PR
if branchOtherUnmergedPR != nil && pull.Flow == issues_model.PullRequestFlowGithub {
prHeadRef := pull.GetGitHeadRefName()
if err := pull.LoadBaseRepo(ctx); err != nil {
ctx.ServerError("Unable to load base repo", err)
return
}
prHeadCommitID, err := gitrepo.GetFullCommitID(ctx, pull.BaseRepo, prHeadRef)
if err != nil {
ctx.ServerError("Get head commit Id of pr fail", err)
return
}
// get head commit of branch in the head repo
if err := pull.LoadHeadRepo(ctx); err != nil {
ctx.ServerError("Unable to load head repo", err)
return
}
if exist, _ := git_model.IsBranchExist(ctx, pull.HeadRepo.ID, pull.BaseBranch); !exist {
ctx.Flash.Error("The origin branch is delete, cannot reopen.")
return
}
headBranchRef := git.RefNameFromBranch(pull.HeadBranch)
headBranchCommitID, err := gitrepo.GetFullCommitID(ctx, pull.HeadRepo, headBranchRef.String())
if err != nil {
ctx.ServerError("Get head commit Id of head branch fail", err)
return
}
err = pull.LoadIssue(ctx)
if err != nil {
ctx.ServerError("load the issue of pull request error", err)
return
}
if prHeadCommitID != headBranchCommitID {
// force push to base repo
err := gitrepo.Push(ctx, pull.HeadRepo, pull.BaseRepo, git.PushOptions{
Branch: pull.HeadBranch + ":" + prHeadRef,
Force: true,
Env: repo_module.InternalPushingEnvironment(pull.Issue.Poster, pull.BaseRepo),
})
if err != nil {
ctx.ServerError("force push error", err)
return
}
}
}
}
if form.Status == "close" && !issue.IsClosed {
if err := issue_service.CloseIssue(ctx, issue, ctx.Doer, ""); err != nil {
log.Error("CloseIssue: %v", err)
if issues_model.IsErrDependenciesLeft(err) {
if issue.IsPull {
ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
} else {
ctx.Flash.Error(ctx.Tr("repo.issues.dependency.issue_close_blocked"))
}
}
} else {
if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil {
ctx.ServerError("stopTimerIfAvailable", err)
return
}
log.Trace("Issue [%d] status changed to closed: %v", issue.ID, issue.IsClosed)
}
} else if form.Status == "reopen" && issue.IsClosed && branchOtherUnmergedPR == nil {
if err := issue_service.ReopenIssue(ctx, issue, ctx.Doer, ""); err != nil {
log.Error("ReopenIssue: %v", err)
ctx.Flash.Error("Unable to reopen.")
}
}
} // end if: handle close or reopen
// Redirect to the comment, add hashtag if it exists
redirect := fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, issueType, issue.Index)
if comment != nil {
redirect += "#" + comment.HashTag()
}
ctx.JSONRedirect(redirect)
}
// UpdateCommentContent change comment of issue's content

View File

@ -119,12 +119,8 @@ func handleFileViewRenderSource(ctx *context.Context, attrs *attribute.Attribute
}
language := attrs.GetLanguage().Value()
fileContent, lexerName, err := highlight.RenderFullFile(filename, language, buf)
fileContent, lexerName := highlight.RenderFullFile(filename, language, buf)
ctx.Data["LexerName"] = lexerName
if err != nil {
log.Error("highlight.RenderFullFile failed, fallback to plain text: %v", err)
fileContent = highlight.RenderPlainText(buf)
}
status := &charset.EscapeStatus{}
statuses := make([]*charset.EscapeStatus, len(fileContent))
for i, line := range fileContent {

View File

@ -132,10 +132,12 @@ func OrgAssignment(orgAssignmentOpts OrgAssignmentOptions) func(ctx *Context) {
ctx.ServerError("IsOrgMember", err)
return
}
ctx.Org.CanCreateOrgRepo, err = org.CanCreateOrgRepo(ctx, ctx.Doer.ID)
if err != nil {
ctx.ServerError("CanCreateOrgRepo", err)
return
if ctx.Org.IsMember {
ctx.Org.CanCreateOrgRepo, err = org.CanCreateOrgRepo(ctx, ctx.Doer.ID)
if err != nil {
ctx.ServerError("CanCreateOrgRepo", err)
return
}
}
}
} else {

View File

@ -1140,7 +1140,7 @@ func TestHighlightCodeLines(t *testing.T) {
ret := highlightCodeLinesForDiffFile(diffFile, true, []byte("a\nb\n"))
assert.Equal(t, map[int]template.HTML{
0: `<span class="n">a</span>` + nl,
1: `<span class="n">b</span>`,
1: `<span class="n">b</span>` + nl,
}, ret)
})
}

View File

@ -221,6 +221,7 @@
{{end}}
<div class="divider"></div>
<script type="module">
(() => {
const defaultMergeTitle = {{.DefaultMergeMessage}};
const defaultSquashMergeTitle = {{.DefaultSquashMergeMessage}};
const defaultMergeMessage = {{.DefaultMergeBody}};
@ -299,6 +300,7 @@
}
];
window.config.pageData.pullRequestMergeForm = mergeForm;
})();
</script>
{{$showGeneralMergeForm = true}}

View File

@ -110,7 +110,7 @@
<a href="{{.GetCommentLink ctx}}" class="tw-inline-block tw-truncate issue title">{{(.GetIssueTitle ctx) | ctx.RenderUtils.RenderIssueSimpleTitle}}</a>
{{$comment := index .GetIssueInfos 1}}
{{if $comment}}
<div class="render-content markup tw-text-14">{{ctx.RenderUtils.MarkdownToHtml $comment}}</div>
<div class="render-content markup truncated-markup">{{ctx.RenderUtils.MarkdownToHtml $comment}}</div>
{{end}}
{{else if .GetOpType.InActions "merge_pull_request"}}
<div class="flex-item-body tw-text-text">{{index .GetIssueInfos 1 | ctx.RenderUtils.RenderIssueSimpleTitle}}</div>

View File

@ -9,30 +9,40 @@ import (
"net/url"
"testing"
actions_model "code.gitea.io/gitea/models/actions"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/setting"
actions_web "code.gitea.io/gitea/routers/web/repo/actions"
runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestActionsRoute(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
user2Session := loginUser(t, user2.Name)
user2Token := getTokenForLoggedInUser(t, user2Session, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
t.Run("testActionsRouteForIDBasedURL", testActionsRouteForIDBasedURL)
t.Run("testActionsRouteForLegacyIndexBasedURL", testActionsRouteForLegacyIndexBasedURL)
})
}
repo1 := createActionsTestRepo(t, user2Token, "actions-route-test-1", false)
runner1 := newMockRunner()
runner1.registerAsRepoRunner(t, user2.Name, repo1.Name, "mock-runner", []string{"ubuntu-latest"}, false)
repo2 := createActionsTestRepo(t, user2Token, "actions-route-test-2", false)
runner2 := newMockRunner()
runner2.registerAsRepoRunner(t, user2.Name, repo2.Name, "mock-runner", []string{"ubuntu-latest"}, false)
func testActionsRouteForIDBasedURL(t *testing.T) {
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
user2Session := loginUser(t, user2.Name)
user2Token := getTokenForLoggedInUser(t, user2Session, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
workflowTreePath := ".gitea/workflows/test.yml"
workflowContent := `name: test
repo1 := createActionsTestRepo(t, user2Token, "actions-route-id-url-1", false)
runner1 := newMockRunner()
runner1.registerAsRepoRunner(t, user2.Name, repo1.Name, "mock-runner", []string{"ubuntu-latest"}, false)
repo2 := createActionsTestRepo(t, user2Token, "actions-route-id-url-2", false)
runner2 := newMockRunner()
runner2.registerAsRepoRunner(t, user2.Name, repo2.Name, "mock-runner", []string{"ubuntu-latest"}, false)
workflowTreePath := ".gitea/workflows/test.yml"
workflowContent := `name: test
on:
push:
paths:
@ -44,57 +54,239 @@ jobs:
- run: echo job1
`
opts := getWorkflowCreateFileOptions(user2, repo1.DefaultBranch, "create "+workflowTreePath, workflowContent)
createWorkflowFile(t, user2Token, user2.Name, repo1.Name, workflowTreePath, opts)
createWorkflowFile(t, user2Token, user2.Name, repo2.Name, workflowTreePath, opts)
opts := getWorkflowCreateFileOptions(user2, repo1.DefaultBranch, "create "+workflowTreePath, workflowContent)
createWorkflowFile(t, user2Token, user2.Name, repo1.Name, workflowTreePath, opts)
createWorkflowFile(t, user2Token, user2.Name, repo2.Name, workflowTreePath, opts)
task1 := runner1.fetchTask(t)
_, job1, run1 := getTaskAndJobAndRunByTaskID(t, task1.Id)
task2 := runner2.fetchTask(t)
_, job2, run2 := getTaskAndJobAndRunByTaskID(t, task2.Id)
task1 := runner1.fetchTask(t)
_, job1, run1 := getTaskAndJobAndRunByTaskID(t, task1.Id)
task2 := runner2.fetchTask(t)
_, job2, run2 := getTaskAndJobAndRunByTaskID(t, task2.Id)
// run1 and job1 belong to repo1, success
req := NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run1.ID, job1.ID))
resp := user2Session.MakeRequest(t, req, http.StatusOK)
var viewResp actions_web.ViewResponse
DecodeJSON(t, resp, &viewResp)
assert.Len(t, viewResp.State.Run.Jobs, 1)
assert.Equal(t, job1.ID, viewResp.State.Run.Jobs[0].ID)
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo1.Name, run1.ID))
user2Session.MakeRequest(t, req, http.StatusOK)
// run2 and job2 do not belong to repo1, failure
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run2.ID, job2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run1.ID, job2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run2.ID, job1.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/workflow", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/approve", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/cancel", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/delete", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/artifacts/test.txt", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "DELETE", fmt.Sprintf("/%s/%s/actions/runs/%d/artifacts/test.txt", user2.Name, repo1.Name, run2.ID))
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo1.Name, 999999))
user2Session.MakeRequest(t, req, http.StatusNotFound)
// run1 and job1 belong to repo1, success
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run1.ID, job1.ID))
resp := user2Session.MakeRequest(t, req, http.StatusOK)
var viewResp actions_web.ViewResponse
DecodeJSON(t, resp, &viewResp)
assert.Len(t, viewResp.State.Run.Jobs, 1)
assert.Equal(t, job1.ID, viewResp.State.Run.Jobs[0].ID)
// run2 and job2 do not belong to repo1, failure
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run2.ID, job2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run1.ID, job2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo1.Name, run2.ID, job1.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/workflow", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/approve", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/cancel", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/delete", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/artifacts/test.txt", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "DELETE", fmt.Sprintf("/%s/%s/actions/runs/%d/artifacts/test.txt", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
// make the tasks complete, then test rerun
runner1.execTask(t, task1, &mockTaskOutcome{
result: runnerv1.Result_RESULT_SUCCESS,
})
runner2.execTask(t, task2, &mockTaskOutcome{
result: runnerv1.Result_RESULT_SUCCESS,
})
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/rerun", user2.Name, repo1.Name, run2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d/rerun", user2.Name, repo1.Name, run2.ID, job2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d/rerun", user2.Name, repo1.Name, run1.ID, job2.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d/rerun", user2.Name, repo1.Name, run2.ID, job1.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
}
func testActionsRouteForLegacyIndexBasedURL(t *testing.T) {
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
user2Session := loginUser(t, user2.Name)
user2Token := getTokenForLoggedInUser(t, user2Session, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
repo := createActionsTestRepo(t, user2Token, "actions-route-legacy-url", false)
mkRun := func(id, index int64, title, sha string) *actions_model.ActionRun {
return &actions_model.ActionRun{
ID: id,
Index: index,
RepoID: repo.ID,
OwnerID: user2.ID,
Title: title,
WorkflowID: "legacy-route.yml",
TriggerUserID: user2.ID,
Ref: "refs/heads/master",
CommitSHA: sha,
Status: actions_model.StatusWaiting,
}
}
mkJob := func(id, runID int64, name, sha string) *actions_model.ActionRunJob {
return &actions_model.ActionRunJob{
ID: id,
RunID: runID,
RepoID: repo.ID,
OwnerID: user2.ID,
CommitSHA: sha,
Name: name,
Status: actions_model.StatusWaiting,
}
}
// A small ID-based run/job pair that should always resolve directly.
smallIDRun := mkRun(80, 20, "legacy route small id", "aaa001")
smallIDJob := mkJob(170, smallIDRun.ID, "legacy-small-job", smallIDRun.CommitSHA)
// Another small run used to provide a job ID that belongs to a different run.
otherSmallRun := mkRun(90, 30, "legacy route other small", "aaa002")
otherSmallJob := mkJob(180, otherSmallRun.ID, "legacy-other-small-job", otherSmallRun.CommitSHA)
// A large-ID run whose legacy run index should redirect to its ID-based URL.
normalRun := mkRun(1500, 900, "legacy route normal", "aaa003")
normalRunJob := mkJob(1600, normalRun.ID, "legacy-normal-job", normalRun.CommitSHA)
// A run whose index collides with normalRun.ID to exercise summary-page ID-first behavior.
collisionRun := mkRun(2400, 1500, "legacy route collision", "aaa004")
collisionJobIdx0 := mkJob(2600, collisionRun.ID, "legacy-collision-job-1", collisionRun.CommitSHA)
collisionJobIdx1 := mkJob(2601, collisionRun.ID, "legacy-collision-job-2", collisionRun.CommitSHA)
// A small ID-based run/job pair that collides with a different legacy run/job index pair.
ambiguousIDRun := mkRun(3, 1, "legacy route ambiguous id", "aaa005")
ambiguousIDJob := mkJob(4, ambiguousIDRun.ID, "legacy-ambiguous-id-job", ambiguousIDRun.CommitSHA)
// The legacy run/job target for the ambiguous /runs/3/jobs/4 URL.
ambiguousLegacyRun := mkRun(1501, ambiguousIDRun.ID, "legacy route ambiguous legacy", "aaa006")
ambiguousLegacyJobIdx0 := mkJob(1601, ambiguousLegacyRun.ID, "legacy-ambiguous-legacy-job-0", ambiguousLegacyRun.CommitSHA)
ambiguousLegacyJobIdx1 := mkJob(1602, ambiguousLegacyRun.ID, "legacy-ambiguous-legacy-job-1", ambiguousLegacyRun.CommitSHA)
ambiguousLegacyJobIdx2 := mkJob(1603, ambiguousLegacyRun.ID, "legacy-ambiguous-legacy-job-2", ambiguousLegacyRun.CommitSHA)
ambiguousLegacyJobIdx3 := mkJob(1604, ambiguousLegacyRun.ID, "legacy-ambiguous-legacy-job-3", ambiguousLegacyRun.CommitSHA)
ambiguousLegacyJobIdx4 := mkJob(1605, ambiguousLegacyRun.ID, "legacy-ambiguous-legacy-job-4", ambiguousLegacyRun.CommitSHA) // job_index=4
ambiguousLegacyJobIdx5 := mkJob(1606, ambiguousLegacyRun.ID, "legacy-ambiguous-legacy-job-5", ambiguousLegacyRun.CommitSHA)
ambiguousLegacyJobs := []*actions_model.ActionRunJob{
ambiguousLegacyJobIdx0,
ambiguousLegacyJobIdx1,
ambiguousLegacyJobIdx2,
ambiguousLegacyJobIdx3,
ambiguousLegacyJobIdx4,
ambiguousLegacyJobIdx5,
}
targetAmbiguousLegacyJob := ambiguousLegacyJobs[int(ambiguousIDJob.ID)]
insertBeansWithExplicitIDs(t, "action_run",
smallIDRun, otherSmallRun, normalRun, ambiguousIDRun, ambiguousLegacyRun, collisionRun,
)
insertBeansWithExplicitIDs(t, "action_run_job",
smallIDJob, otherSmallJob, normalRunJob, ambiguousIDJob, collisionJobIdx0, collisionJobIdx1,
ambiguousLegacyJobIdx0, ambiguousLegacyJobIdx1, ambiguousLegacyJobIdx2, ambiguousLegacyJobIdx3, ambiguousLegacyJobIdx4, ambiguousLegacyJobIdx5,
)
t.Run("OnlyRunID", func(t *testing.T) {
// ID-based URLs must be valid
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo.Name, smallIDRun.ID))
user2Session.MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo.Name, normalRun.ID))
user2Session.MakeRequest(t, req, http.StatusOK)
})
t.Run("OnlyRunIndex", func(t *testing.T) {
// legacy run index should redirect to the ID-based URL
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo.Name, normalRun.Index))
resp := user2Session.MakeRequest(t, req, http.StatusFound)
assert.Equal(t, fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo.Name, normalRun.ID), resp.Header().Get("Location"))
// Best-effort compatibility prefers the run ID when the same number also exists as a legacy run index.
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo.Name, collisionRun.Index))
resp = user2Session.MakeRequest(t, req, http.StatusOK)
assert.Contains(t, resp.Body.String(), fmt.Sprintf(`data-run-id="%d"`, normalRun.ID)) // because collisionRun.Index == normalRun.ID
// by_index=1 should force the summary page to use the legacy run index interpretation.
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d?by_index=1", user2.Name, repo.Name, collisionRun.Index))
resp = user2Session.MakeRequest(t, req, http.StatusFound)
assert.Equal(t, fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo.Name, collisionRun.ID), resp.Header().Get("Location"))
})
t.Run("RunIDAndJobID", func(t *testing.T) {
// ID-based URLs must be valid
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, smallIDRun.ID, smallIDJob.ID))
user2Session.MakeRequest(t, req, http.StatusOK)
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, normalRun.ID, normalRunJob.ID))
user2Session.MakeRequest(t, req, http.StatusOK)
})
t.Run("RunIndexAndJobIndex", func(t *testing.T) {
// /user2/repo2/actions/runs/3/jobs/4 is ambiguous:
// - it may resolve as the ID-based URL for run_id=3/job_id=4,
// - or as the legacy index-based URL for run_index=3/job_index=4 which should redirect to run_id=1501/job_id=1605.
idBasedURL := fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, ambiguousIDRun.ID, ambiguousIDJob.ID)
indexBasedURL := fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, ambiguousLegacyRun.Index, 4) // for ambiguousLegacyJobIdx4
assert.Equal(t, idBasedURL, indexBasedURL)
// When both interpretations are valid, prefer the ID-based target by default.
req := NewRequest(t, "GET", indexBasedURL)
user2Session.MakeRequest(t, req, http.StatusOK)
redirectURL := fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, ambiguousLegacyRun.ID, targetAmbiguousLegacyJob.ID)
// by_index=1 should explicitly force the legacy run/job index interpretation.
req = NewRequest(t, "GET", indexBasedURL+"?by_index=1")
resp := user2Session.MakeRequest(t, req, http.StatusFound)
assert.Equal(t, redirectURL, resp.Header().Get("Location"))
// legacy job index 0 should redirect to the first job's ID
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/0", user2.Name, repo.Name, collisionRun.Index))
resp = user2Session.MakeRequest(t, req, http.StatusFound)
assert.Equal(t, fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, collisionRun.ID, collisionJobIdx0.ID), resp.Header().Get("Location"))
// legacy job index 1 should redirect to the second job's ID
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/1", user2.Name, repo.Name, collisionRun.Index))
resp = user2Session.MakeRequest(t, req, http.StatusFound)
assert.Equal(t, fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, collisionRun.ID, collisionJobIdx1.ID), resp.Header().Get("Location"))
})
t.Run("InvalidURLs", func(t *testing.T) {
// the job ID from a different run should not match
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d", user2.Name, repo.Name, smallIDRun.ID, otherSmallJob.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
// make the tasks complete, then test rerun
runner1.execTask(t, task1, &mockTaskOutcome{
result: runnerv1.Result_RESULT_SUCCESS,
})
runner2.execTask(t, task2, &mockTaskOutcome{
result: runnerv1.Result_RESULT_SUCCESS,
})
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/rerun", user2.Name, repo1.Name, run2.ID))
// resolve the run by index first and then return not found because the job index is out-of-range
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/2", user2.Name, repo.Name, normalRun.ID))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d/rerun", user2.Name, repo1.Name, run2.ID, job2.ID))
// an out-of-range job index should return not found
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/2", user2.Name, repo.Name, collisionRun.Index))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d/rerun", user2.Name, repo1.Name, run1.ID, job2.ID))
// a missing run number should return not found
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d", user2.Name, repo.Name, 999999))
user2Session.MakeRequest(t, req, http.StatusNotFound)
req = NewRequest(t, "POST", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/%d/rerun", user2.Name, repo1.Name, run2.ID, job1.ID))
// a missing legacy run index should return not found
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/actions/runs/%d/jobs/0", user2.Name, repo.Name, 999999))
user2Session.MakeRequest(t, req, http.StatusNotFound)
})
}
func insertBeansWithExplicitIDs(t *testing.T, table string, beans ...any) {
t.Helper()
ctx, committer, err := db.TxContext(t.Context())
require.NoError(t, err)
defer committer.Close()
if setting.Database.Type.IsMSSQL() {
_, err = db.Exec(ctx, fmt.Sprintf("SET IDENTITY_INSERT [%s] ON", table))
require.NoError(t, err)
defer func() {
_, err = db.Exec(ctx, fmt.Sprintf("SET IDENTITY_INSERT [%s] OFF", table))
require.NoError(t, err)
}()
}
require.NoError(t, db.Insert(ctx, beans...))
require.NoError(t, committer.Commit())
}

View File

@ -19,15 +19,35 @@ import (
"github.com/stretchr/testify/assert"
)
func TestCreateForkNoLogin(t *testing.T) {
func TestAPIFork(t *testing.T) {
defer tests.PrepareTestEnv(t)()
t.Run("CreateForkNoLogin", testCreateForkNoLogin)
t.Run("CreateForkOrgNoCreatePermission", testCreateForkOrgNoCreatePermission)
t.Run("APIForkListLimitedAndPrivateRepos", testAPIForkListLimitedAndPrivateRepos)
t.Run("GetPrivateReposForks", testGetPrivateReposForks)
}
func testCreateForkNoLogin(t *testing.T) {
req := NewRequestWithJSON(t, "POST", "/api/v1/repos/user2/repo1/forks", &api.CreateForkOption{})
MakeRequest(t, req, http.StatusUnauthorized)
}
func TestAPIForkListLimitedAndPrivateRepos(t *testing.T) {
defer tests.PrepareTestEnv(t)()
func testCreateForkOrgNoCreatePermission(t *testing.T) {
user4Sess := loginUser(t, "user4")
org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
canCreate, err := org_model.OrgFromUser(org).CanCreateOrgRepo(t.Context(), 4)
assert.NoError(t, err)
assert.False(t, canCreate)
user4Token := getTokenForLoggedInUser(t, user4Sess, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteOrganization)
req := NewRequestWithJSON(t, "POST", "/api/v1/repos/user2/repo1/forks", &api.CreateForkOption{
Organization: &org.Name,
}).AddTokenAuth(user4Token)
MakeRequest(t, req, http.StatusForbidden)
}
func testAPIForkListLimitedAndPrivateRepos(t *testing.T) {
user1Sess := loginUser(t, "user1")
user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: "user1"})
@ -64,10 +84,7 @@ func TestAPIForkListLimitedAndPrivateRepos(t *testing.T) {
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1/forks")
resp := MakeRequest(t, req, http.StatusOK)
var forks []*api.Repository
DecodeJSON(t, resp, &forks)
forks := DecodeJSON(t, resp, []*api.Repository{})
assert.Empty(t, forks)
assert.Equal(t, "0", resp.Header().Get("X-Total-Count"))
})
@ -78,9 +95,7 @@ func TestAPIForkListLimitedAndPrivateRepos(t *testing.T) {
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1/forks").AddTokenAuth(user1Token)
resp := MakeRequest(t, req, http.StatusOK)
var forks []*api.Repository
DecodeJSON(t, resp, &forks)
forks := DecodeJSON(t, resp, []*api.Repository{})
assert.Len(t, forks, 2)
assert.Equal(t, "2", resp.Header().Get("X-Total-Count"))
@ -88,28 +103,22 @@ func TestAPIForkListLimitedAndPrivateRepos(t *testing.T) {
req = NewRequest(t, "GET", "/api/v1/repos/user2/repo1/forks").AddTokenAuth(user1Token)
resp = MakeRequest(t, req, http.StatusOK)
forks = []*api.Repository{}
DecodeJSON(t, resp, &forks)
forks = DecodeJSON(t, resp, []*api.Repository{})
assert.Len(t, forks, 2)
assert.Equal(t, "2", resp.Header().Get("X-Total-Count"))
})
}
func TestGetPrivateReposForks(t *testing.T) {
defer tests.PrepareTestEnv(t)()
func testGetPrivateReposForks(t *testing.T) {
user1Sess := loginUser(t, "user1")
repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) // private repository
privateOrg := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 23})
user1Token := getTokenForLoggedInUser(t, user1Sess, auth_model.AccessTokenScopeWriteRepository)
forkedRepoName := "forked-repo"
// create fork from a private repository
req := NewRequestWithJSON(t, "POST", "/api/v1/repos/"+repo2.FullName()+"/forks", &api.CreateForkOption{
Organization: &privateOrg.Name,
Name: &forkedRepoName,
Name: new("forked-repo"),
}).AddTokenAuth(user1Token)
MakeRequest(t, req, http.StatusAccepted)
@ -117,8 +126,7 @@ func TestGetPrivateReposForks(t *testing.T) {
req = NewRequest(t, "GET", "/api/v1/repos/"+repo2.FullName()+"/forks").AddTokenAuth(user1Token)
resp := MakeRequest(t, req, http.StatusOK)
forks := []*api.Repository{}
DecodeJSON(t, resp, &forks)
forks := DecodeJSON(t, resp, []*api.Repository{})
assert.Len(t, forks, 1)
assert.Equal(t, "1", resp.Header().Get("X-Total-Count"))
assert.Equal(t, "forked-repo", forks[0].Name)

View File

@ -415,7 +415,7 @@ func DecodeJSON[T any](t testing.TB, resp *httptest.ResponseRecorder, v T) (ret
// FIXME: JSON-KEY-CASE: for testing purpose only, because many structs don't provide `json` tags, they just use capitalized field names
decoder := json.NewDecoderCaseInsensitive(resp.Body)
require.NoError(t, decoder.Decode(v))
require.NoError(t, decoder.Decode(&v))
return v
}

View File

@ -33,6 +33,7 @@
@import "./modules/flexcontainer.css";
@import "./modules/codeeditor.css";
@import "./modules/chroma.css";
@import "./modules/charescape.css";
@import "./shared/flex-list.css";
@import "./shared/milestone.css";

View File

@ -0,0 +1,48 @@
/*
Show the escaped and hide the real char:
<span class="broken-code-point" data-escaped="DEL"><span class="char">{real-char}</span></span>
Only show the real-char:
<span class="broken-code-point">{real-char}</span>
*/
.broken-code-point:not([data-escaped]),
.broken-code-point[data-escaped]::before {
border-radius: 4px;
padding: 0 2px;
color: var(--color-body);
background: var(--color-text-light-1);
}
.broken-code-point[data-escaped]::before {
visibility: visible;
content: attr(data-escaped);
}
.broken-code-point[data-escaped] .char {
/* make it copyable by selecting the text (AI suggestion, no other solution) */
position: absolute;
opacity: 0;
pointer-events: none;
}
/*
Show the escaped and hide the real-char:
<span class="unicode-escaped">
<span class="escaped-code-point" data-escaped="U+1F600"><span class="char">{real-char}</span></span>
</span>
Hide the escaped and show the real-char:
<span>
<span class="escaped-code-point" data-escaped="U+1F600"><span class="char">{real-char}</span></span>
</span>
*/
.unicode-escaped .escaped-code-point[data-escaped]::before {
visibility: visible;
content: attr(data-escaped);
color: var(--color-red);
}
.unicode-escaped .escaped-code-point .char {
display: none;
}
.unicode-escaped .ambiguous-code-point {
border: 1px var(--color-yellow) solid;
}

View File

@ -8,26 +8,6 @@
min-width: 40% !important;
}
.repository .unicode-escaped .escaped-code-point[data-escaped]::before {
visibility: visible;
content: attr(data-escaped);
font-family: var(--fonts-monospace);
color: var(--color-red);
}
.repository .unicode-escaped .escaped-code-point .char {
display: none;
}
.repository .broken-code-point {
font-family: var(--fonts-monospace);
color: var(--color-blue);
}
.repository .unicode-escaped .ambiguous-code-point {
border: 1px var(--color-yellow) solid;
}
.issue-content {
display: flex;
align-items: flex-start;

View File

@ -6,10 +6,20 @@ import {initMarkupTasklist} from './tasklist.ts';
import {registerGlobalSelectorFunc} from '../modules/observer.ts';
import {initMarkupRenderIframe} from './render-iframe.ts';
import {initMarkupRefIssue} from './refissue.ts';
import {toggleElemClass} from '../utils/dom.ts';
// code that runs for all markup content
export function initMarkupContent(): void {
registerGlobalSelectorFunc('.markup', (el: HTMLElement) => {
if (el.matches('.truncated-markup')) {
// when the rendered markup is truncated (e.g.: user's home activity feed)
// we should not initialize any of the features (e.g.: code copy button), due to:
// * truncated markup already means that the container doesn't want to show complex contents
// * truncated markup may contain incomplete HTML/mermaid elements
// so the only thing we need to do is to remove the "is-loading" class added by the backend render.
toggleElemClass(el.querySelectorAll('.is-loading'), 'is-loading', false);
return;
}
initMarkupCodeCopy(el);
initMarkupTasklist(el);
initMarkupCodeMermaid(el);