0
0
mirror of https://github.com/go-gitea/gitea.git synced 2026-05-09 05:37:47 +02:00

Merge branch 'main' into fix/project-board-api-review-feedback

This commit is contained in:
silverwind 2026-03-10 21:37:37 +01:00 committed by GitHub
commit b8a654a29f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
229 changed files with 7787 additions and 2702 deletions

View File

@ -3,14 +3,14 @@
FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.26-alpine3.23 AS frontend-build
RUN apk --no-cache add build-base git nodejs pnpm
WORKDIR /src
COPY package.json pnpm-lock.yaml .npmrc ./
RUN --mount=type=cache,target=/root/.local/share/pnpm/store pnpm install --frozen-lockfile
COPY --exclude=.git/ . .
RUN --mount=type=cache,target=/root/.local/share/pnpm/store make frontend
RUN make frontend
# Build backend for each target platform
FROM docker.io/library/golang:1.26-alpine3.23 AS build-env
ARG GOPROXY=direct
ARG GITEA_VERSION
ARG TAGS="sqlite sqlite_unlock_notify"
ENV TAGS="bindata timetzdata $TAGS"
@ -22,14 +22,15 @@ RUN apk --no-cache add \
git
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
COPY go.mod go.sum ./
RUN go mod download
# Use COPY instead of bind mount as read-only one breaks makefile state tracking and read-write one needs binary to be moved as it's discarded.
# ".git" directory is mounted separately later only for version data extraction.
COPY --exclude=.git/ . .
COPY --from=frontend-build /src/public/assets public/assets
# Build gitea, .git mount is required for version data
RUN --mount=type=cache,target=/go/pkg/mod \
--mount=type=cache,target="/root/.cache/go-build" \
RUN --mount=type=cache,target="/root/.cache/go-build" \
--mount=type=bind,source=".git/",target=".git/" \
make backend

View File

@ -3,14 +3,14 @@
FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.26-alpine3.23 AS frontend-build
RUN apk --no-cache add build-base git nodejs pnpm
WORKDIR /src
COPY package.json pnpm-lock.yaml .npmrc ./
RUN --mount=type=cache,target=/root/.local/share/pnpm/store pnpm install --frozen-lockfile
COPY --exclude=.git/ . .
RUN --mount=type=cache,target=/root/.local/share/pnpm/store make frontend
RUN make frontend
# Build backend for each target platform
FROM docker.io/library/golang:1.26-alpine3.23 AS build-env
ARG GOPROXY=direct
ARG GITEA_VERSION
ARG TAGS="sqlite sqlite_unlock_notify"
ENV TAGS="bindata timetzdata $TAGS"
@ -22,13 +22,14 @@ RUN apk --no-cache add \
git
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
COPY go.mod go.sum ./
RUN go mod download
# See the comments in Dockerfile
COPY --exclude=.git/ . .
COPY --from=frontend-build /src/public/assets public/assets
# Build gitea, .git mount is required for version data
RUN --mount=type=cache,target=/go/pkg/mod \
--mount=type=cache,target="/root/.cache/go-build" \
RUN --mount=type=cache,target="/root/.cache/go-build" \
--mount=type=bind,source=".git/",target=".git/" \
make backend

View File

@ -15,7 +15,7 @@ XGO_VERSION := go-1.25.x
AIR_PACKAGE ?= github.com/air-verse/air@v1
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.9.2
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.10.1
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.11.2
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.15
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.8.0
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.33.1
@ -155,6 +155,7 @@ GO_SOURCES := $(wildcard *.go)
GO_SOURCES += $(shell find $(GO_DIRS) -type f -name "*.go")
GO_SOURCES += $(GENERATED_GO_DEST)
ESLINT_CONCURRENCY ?= 2
SWAGGER_SPEC := templates/swagger/v1_json.tmpl
SWAGGER_SPEC_INPUT := templates/swagger/v1_input.json
@ -292,12 +293,12 @@ lint-backend-fix: lint-go-fix lint-go-gitea-vet lint-editorconfig ## lint backen
.PHONY: lint-js
lint-js: node_modules ## lint js and ts files
$(NODE_VARS) pnpm exec eslint --color --max-warnings=0 $(ESLINT_FILES)
$(NODE_VARS) pnpm exec eslint --color --max-warnings=0 --concurrency $(ESLINT_CONCURRENCY) $(ESLINT_FILES)
$(NODE_VARS) pnpm exec vue-tsc
.PHONY: lint-js-fix
lint-js-fix: node_modules ## lint js and ts files and fix issues
$(NODE_VARS) pnpm exec eslint --color --max-warnings=0 $(ESLINT_FILES) --fix
$(NODE_VARS) pnpm exec eslint --color --max-warnings=0 --concurrency $(ESLINT_CONCURRENCY) $(ESLINT_FILES) --fix
$(NODE_VARS) pnpm exec vue-tsc
.PHONY: lint-css
@ -368,11 +369,11 @@ lint-yaml: .venv ## lint yaml files
.PHONY: lint-json
lint-json: node_modules ## lint json files
$(NODE_VARS) pnpm exec eslint -c eslint.json.config.ts --color --max-warnings=0
$(NODE_VARS) pnpm exec eslint -c eslint.json.config.ts --color --max-warnings=0 --concurrency $(ESLINT_CONCURRENCY)
.PHONY: lint-json-fix
lint-json-fix: node_modules ## lint and fix json files
$(NODE_VARS) pnpm exec eslint -c eslint.json.config.ts --color --max-warnings=0 --fix
$(NODE_VARS) pnpm exec eslint -c eslint.json.config.ts --color --max-warnings=0 --concurrency $(ESLINT_CONCURRENCY) --fix
.PHONY: watch
watch: ## watch everything and continuously rebuild

View File

@ -926,7 +926,7 @@ export default defineConfig([
},
extends: [
vue.configs['flat/recommended'],
vueScopedCss.configs['flat/recommended'] as any,
vueScopedCss.configs.recommended as any,
],
rules: {
'vue/attributes-order': [0],

6
flake.lock generated
View File

@ -2,11 +2,11 @@
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1772198003,
"narHash": "sha256-I45esRSssFtJ8p/gLHUZ1OUaaTaVLluNkABkk6arQwE=",
"lastModified": 1772773019,
"narHash": "sha256-E1bxHxNKfDoQUuvriG71+f+s/NT0qWkImXsYZNFFfCs=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "dd9b079222d43e1943b6ebd802f04fd959dc8e61",
"rev": "aca4d95fce4914b3892661bcb80b8087293536c6",
"type": "github"
},
"original": {

4
go.mod
View File

@ -1,6 +1,6 @@
module code.gitea.io/gitea
go 1.26.0
go 1.26.1
// rfc5280 said: "The serial number is an integer assigned by the CA to each certificate."
// But some CAs use negative serial number, just relax the check. related:
@ -174,7 +174,7 @@ require (
github.com/caddyserver/zerossl v0.1.4 // indirect
github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
github.com/cloudflare/circl v1.6.3 // indirect
github.com/couchbase/go-couchbase v0.1.1 // indirect
github.com/couchbase/gomemcached v0.3.3 // indirect
github.com/couchbase/goutils v0.1.2 // indirect

4
go.sum
View File

@ -227,8 +227,8 @@ github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObk
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8=
github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=

View File

@ -14,6 +14,7 @@ import (
"net/url"
"slices"
"strings"
"time"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/container"
@ -27,6 +28,11 @@ import (
"xorm.io/xorm"
)
// Authorization codes should expire within 10 minutes per https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.2
const oauth2AuthorizationCodeValidity = 10 * time.Minute
var ErrOAuth2AuthorizationCodeInvalidated = errors.New("oauth2 authorization code already invalidated")
// OAuth2Application represents an OAuth2 client (RFC 6749)
type OAuth2Application struct {
ID int64 `xorm:"pk autoincr"`
@ -386,6 +392,14 @@ func (code *OAuth2AuthorizationCode) TableName() string {
return "oauth2_authorization_code"
}
// IsExpired reports whether the authorization code is expired.
func (code *OAuth2AuthorizationCode) IsExpired() bool {
if code.ValidUntil.IsZero() {
return true
}
return code.ValidUntil <= timeutil.TimeStampNow()
}
// GenerateRedirectURI generates a redirect URI for a successful authorization request. State will be used if not empty.
func (code *OAuth2AuthorizationCode) GenerateRedirectURI(state string) (*url.URL, error) {
redirect, err := url.Parse(code.RedirectURI)
@ -403,8 +417,14 @@ func (code *OAuth2AuthorizationCode) GenerateRedirectURI(state string) (*url.URL
// Invalidate deletes the auth code from the database to invalidate this code
func (code *OAuth2AuthorizationCode) Invalidate(ctx context.Context) error {
_, err := db.GetEngine(ctx).ID(code.ID).NoAutoCondition().Delete(code)
return err
affected, err := db.GetEngine(ctx).ID(code.ID).NoAutoCondition().Delete(code)
if err != nil {
return err
}
if affected == 0 {
return ErrOAuth2AuthorizationCodeInvalidated
}
return nil
}
// ValidateCodeChallenge validates the given verifier against the saved code challenge. This is part of the PKCE implementation.
@ -472,6 +492,7 @@ func (grant *OAuth2Grant) GenerateNewAuthorizationCode(ctx context.Context, redi
// for code scanners to grab sensitive tokens.
codeSecret := "gta_" + base32Lower.EncodeToString(rBytes)
validUntil := time.Now().Add(oauth2AuthorizationCodeValidity)
code = &OAuth2AuthorizationCode{
Grant: grant,
GrantID: grant.ID,
@ -479,6 +500,7 @@ func (grant *OAuth2Grant) GenerateNewAuthorizationCode(ctx context.Context, redi
Code: codeSecret,
CodeChallenge: codeChallenge,
CodeChallengeMethod: codeChallengeMethod,
ValidUntil: timeutil.TimeStamp(validUntil.Unix()),
}
if err := db.Insert(ctx, code); err != nil {
return nil, err

View File

@ -5,13 +5,45 @@ package auth_test
import (
"testing"
"time"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
)
func TestOAuth2AuthorizationCodeValidity(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
t.Run("GenerateSetsValidUntil", func(t *testing.T) {
grant := unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Grant{ID: 1})
expectedValidUntil := timeutil.TimeStamp(time.Now().Unix() + 600)
code, err := grant.GenerateNewAuthorizationCode(t.Context(), "http://127.0.0.1/", "", "")
assert.NoError(t, err)
assert.Equal(t, expectedValidUntil, code.ValidUntil)
assert.False(t, code.IsExpired())
assert.NoError(t, code.Invalidate(t.Context()))
})
t.Run("Expired", func(t *testing.T) {
defer timeutil.MockSet(time.Unix(2, 0).UTC())()
code := &auth_model.OAuth2AuthorizationCode{ValidUntil: timeutil.TimeStamp(1)}
assert.True(t, code.IsExpired())
})
t.Run("InvalidateTwice", func(t *testing.T) {
code, err := auth_model.GetOAuth2AuthorizationByCode(t.Context(), "authcode")
assert.NoError(t, err)
if assert.NotNil(t, code) {
assert.NoError(t, code.Invalidate(t.Context()))
assert.ErrorIs(t, code.Invalidate(t.Context()), auth_model.ErrOAuth2AuthorizationCodeInvalidated)
}
})
}
func TestOAuth2Application_GenerateClientSecret(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
app := unittest.AssertExistsAndLoadBean(t, &auth_model.OAuth2Application{ID: 1})

View File

@ -75,7 +75,7 @@ func (f *file) readAt(fileMeta *dbfsMeta, offset int64, p []byte) (n int, err er
}
func (f *file) Read(p []byte) (n int, err error) {
if f.metaID == 0 || !f.allowRead {
if !f.allowRead {
return 0, os.ErrInvalid
}
@ -89,7 +89,7 @@ func (f *file) Read(p []byte) (n int, err error) {
}
func (f *file) Write(p []byte) (n int, err error) {
if f.metaID == 0 || !f.allowWrite {
if !f.allowWrite {
return 0, os.ErrInvalid
}
@ -184,10 +184,6 @@ func (f *file) Close() error {
}
func (f *file) Stat() (os.FileInfo, error) {
if f.metaID == 0 {
return nil, os.ErrInvalid
}
fileMeta, err := findFileMetaByID(f.ctx, f.metaID)
if err != nil {
return nil, err
@ -232,15 +228,17 @@ func (f *file) open(flag int) (err error) {
if f.metaID != 0 {
return os.ErrExist
}
} else {
// create a new file if none exists.
if f.metaID == 0 {
if err = f.createEmpty(); err != nil {
return err
}
}
// create a new file if not exists.
if f.metaID == 0 {
if err = f.createEmpty(); err != nil {
return err
}
}
}
if f.metaID == 0 {
return os.ErrNotExist
}
if flag&os.O_TRUNC != 0 {
if err = f.truncate(); err != nil {
return err
@ -252,7 +250,7 @@ func (f *file) open(flag int) (err error) {
}
}
return nil
}
} // end if: allowWrite
// read only mode
if f.metaID == 0 {
@ -322,9 +320,6 @@ func (f *file) delete() error {
}
func (f *file) size() (int64, error) {
if f.metaID == 0 {
return 0, os.ErrNotExist
}
fileMeta, err := findFileMetaByID(f.ctx, f.metaID)
if err != nil {
return 0, err
@ -339,7 +334,7 @@ func findFileMetaByID(ctx context.Context, metaID int64) (*dbfsMeta, error) {
} else if ok {
return &fileMeta, nil
}
return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
return nil, os.ErrNotExist
}
func buildPath(path string) string {

View File

@ -40,6 +40,9 @@ The DBFS solution:
* In the future, when Gitea action needs to limit the log size (other CI/CD services also do so), it's easier to calculate the log file size.
* Even sometimes the UI needs to render the tailing lines, the tailing lines can be found be counting the "\n" from the end of the file by seek.
The seeking and finding is not the fastest way, but it's still acceptable and won't affect the performance too much.
Limitations of the DBFS solution:
* Not fully POSIX-compliant, some behaviors may be different from the real filesystem, especially for concurrent read/write
*/
type dbfsMeta struct {

View File

@ -9,19 +9,14 @@ import (
"os"
"testing"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func changeDefaultFileBlockSize(n int64) (restore func()) {
old := defaultFileBlockSize
defaultFileBlockSize = n
return func() {
defaultFileBlockSize = old
}
}
func TestDbfsBasic(t *testing.T) {
defer changeDefaultFileBlockSize(4)()
defer test.MockVariableValue(&defaultFileBlockSize, 4)()
// test basic write/read
f, err := OpenFile(t.Context(), "test.txt", os.O_RDWR|os.O_CREATE)
@ -122,10 +117,55 @@ func TestDbfsBasic(t *testing.T) {
stat, err = f.Stat()
assert.NoError(t, err)
assert.EqualValues(t, 10, stat.Size())
t.Run("NonExisting", func(t *testing.T) {
f, err := OpenFile(t.Context(), "non-existing.txt", os.O_RDONLY)
assert.ErrorIs(t, err, os.ErrNotExist)
assert.Nil(t, f)
f, err = OpenFile(t.Context(), "non-existing.txt", os.O_WRONLY)
assert.ErrorIs(t, err, os.ErrNotExist)
assert.Nil(t, f)
f, err = OpenFile(t.Context(), "non-existing.txt", os.O_WRONLY|os.O_APPEND|os.O_TRUNC)
assert.ErrorIs(t, err, os.ErrNotExist)
assert.Nil(t, f)
})
t.Run("Existing", func(t *testing.T) {
assertFileContent := func(f File, expected string) {
_, err := f.Seek(0, io.SeekStart)
require.NoError(t, err)
buf, err := io.ReadAll(f)
require.NoError(t, err)
assert.Equal(t, expected, string(buf))
}
f, err := OpenFile(t.Context(), "existing.txt", os.O_RDWR|os.O_CREATE)
require.NoError(t, err)
_, _ = f.Write([]byte("test"))
assertFileContent(f, "test")
assert.NoError(t, f.Close())
f, err = OpenFile(t.Context(), "existing.txt", os.O_RDWR|os.O_CREATE|os.O_APPEND)
require.NoError(t, err)
_, _ = f.Write([]byte("\nnew"))
assertFileContent(f, "test\nnew")
assert.NoError(t, f.Close())
f, err = OpenFile(t.Context(), "existing.txt", os.O_RDWR|os.O_TRUNC)
require.NoError(t, err)
assertFileContent(f, "")
assert.NoError(t, f.Close())
f, err = OpenFile(t.Context(), "existing.txt", os.O_RDWR|os.O_CREATE|os.O_EXCL)
assert.ErrorIs(t, err, os.ErrExist)
assert.Nil(t, f)
})
}
func TestDbfsReadWrite(t *testing.T) {
defer changeDefaultFileBlockSize(4)()
defer test.MockVariableValue(&defaultFileBlockSize, 4)()
f1, err := OpenFile(t.Context(), "test.log", os.O_RDWR|os.O_CREATE)
assert.NoError(t, err)
@ -157,30 +197,32 @@ func TestDbfsReadWrite(t *testing.T) {
}
func TestDbfsSeekWrite(t *testing.T) {
defer changeDefaultFileBlockSize(4)()
defer test.MockVariableValue(&defaultFileBlockSize, 4)()
f, err := OpenFile(t.Context(), "test2.log", os.O_RDWR|os.O_CREATE)
assert.NoError(t, err)
defer f.Close()
// write something
fw, err := OpenFile(t.Context(), "test2.log", os.O_RDWR|os.O_CREATE)
require.NoError(t, err)
defer fw.Close()
n, err := f.Write([]byte("111"))
n, err := fw.Write([]byte("111"))
assert.NoError(t, err)
_, err = f.Seek(int64(n), io.SeekStart)
_, err = fw.Seek(int64(n), io.SeekStart)
assert.NoError(t, err)
_, err = f.Write([]byte("222"))
_, err = fw.Write([]byte("222"))
assert.NoError(t, err)
_, err = f.Seek(int64(n), io.SeekStart)
_, err = fw.Seek(int64(n), io.SeekStart)
assert.NoError(t, err)
_, err = f.Write([]byte("333"))
_, err = fw.Write([]byte("333"))
assert.NoError(t, err)
// then read it
fr, err := OpenFile(t.Context(), "test2.log", os.O_RDONLY)
assert.NoError(t, err)
defer f.Close()
require.NoError(t, err)
defer fr.Close()
buf, err := io.ReadAll(fr)
assert.NoError(t, err)

View File

@ -147,19 +147,21 @@ func GetRepoAssignees(ctx context.Context, repo *Repository) (_ []*user_model.Us
}
// GetIssuePostersWithSearch returns users with limit of 30 whose username started with prefix that have authored an issue/pull request for the given repository
// If isShowFullName is set to true, also include full name prefix search
func GetIssuePostersWithSearch(ctx context.Context, repo *Repository, isPull bool, search string, isShowFullName bool) ([]*user_model.User, error) {
// It searches with the "user.name" and "user.full_name" fields case-insensitively.
func GetIssuePostersWithSearch(ctx context.Context, repo *Repository, isPull bool, search string) ([]*user_model.User, error) {
users := make([]*user_model.User, 0, 30)
var prefixCond builder.Cond = builder.Like{"lower_name", strings.ToLower(search) + "%"}
if search != "" && isShowFullName {
prefixCond = prefixCond.Or(db.BuildCaseInsensitiveLike("full_name", "%"+search+"%"))
}
cond := builder.In("`user`.id",
builder.Select("poster_id").From("issue").Where(
builder.Eq{"repo_id": repo.ID}.
And(builder.Eq{"is_pull": isPull}),
).GroupBy("poster_id")).And(prefixCond)
).GroupBy("poster_id"))
if search != "" {
var prefixCond builder.Cond = builder.Like{"lower_name", strings.ToLower(search) + "%"}
prefixCond = prefixCond.Or(db.BuildCaseInsensitiveLike("full_name", "%"+search+"%"))
cond = cond.And(prefixCond)
}
return users, db.GetEngine(ctx).
Where(cond).

View File

@ -44,12 +44,12 @@ func TestGetIssuePostersWithSearch(t *testing.T) {
repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
users, err := repo_model.GetIssuePostersWithSearch(t.Context(), repo2, false, "USER", false /* full name */)
users, err := repo_model.GetIssuePostersWithSearch(t.Context(), repo2, false, "USER")
require.NoError(t, err)
require.Len(t, users, 1)
assert.Equal(t, "user2", users[0].Name)
users, err = repo_model.GetIssuePostersWithSearch(t.Context(), repo2, false, "TW%O", true /* full name */)
users, err = repo_model.GetIssuePostersWithSearch(t.Context(), repo2, false, "TW%O")
require.NoError(t, err)
require.Len(t, users, 1)
assert.Equal(t, "user2", users[0].Name)

View File

@ -8,6 +8,7 @@ import (
"context"
"encoding/hex"
"fmt"
"html/template"
"mime"
"net/mail"
"net/url"
@ -28,6 +29,7 @@ import (
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/htmlutil"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
@ -417,16 +419,6 @@ func (u *User) IsTokenAccessAllowed() bool {
return u.Type == UserTypeIndividual || u.Type == UserTypeBot
}
// DisplayName returns full name if it's not empty,
// returns username otherwise.
func (u *User) DisplayName() string {
trimmed := strings.TrimSpace(u.FullName)
if len(trimmed) > 0 {
return trimmed
}
return u.Name
}
// EmailTo returns a string suitable to be put into a e-mail `To:` header.
func (u *User) EmailTo() string {
sanitizedDisplayName := globalVars().emailToReplacer.Replace(u.DisplayName())
@ -445,27 +437,45 @@ func (u *User) EmailTo() string {
return fmt.Sprintf("%s <%s>", mime.QEncoding.Encode("utf-8", add.Name), add.Address)
}
// GetDisplayName returns full name if it's not empty and DEFAULT_SHOW_FULL_NAME is set,
// returns username otherwise.
// TODO: DefaultShowFullName causes messy logic, there are already too many methods to display a user's "display name", need to refactor them
// * user.Name / user.FullName: directly used in templates
// * user.DisplayName(): always show FullName if it's not empty, otherwise show Name
// * user.GetDisplayName(): show FullName if it's not empty and DefaultShowFullName is set, otherwise show Name
// * user.ShortName(): used a lot in templates, but it should be removed and let frontend use "ellipsis" styles
// * activity action.ShortActUserName/GetActDisplayName/GetActDisplayNameTitle, etc: duplicate and messy
// DisplayName returns full name if it's not empty, returns username otherwise.
func (u *User) DisplayName() string {
fullName := strings.TrimSpace(u.FullName)
if fullName != "" {
return fullName
}
return u.Name
}
// GetDisplayName returns full name if it's not empty and DEFAULT_SHOW_FULL_NAME is set, otherwise, username.
func (u *User) GetDisplayName() string {
if setting.UI.DefaultShowFullName {
trimmed := strings.TrimSpace(u.FullName)
if len(trimmed) > 0 {
return trimmed
fullName := strings.TrimSpace(u.FullName)
if fullName != "" {
return fullName
}
}
return u.Name
}
// GetCompleteName returns the full name and username in the form of
// "Full Name (username)" if full name is not empty, otherwise it returns
// "username".
func (u *User) GetCompleteName() string {
trimmedFullName := strings.TrimSpace(u.FullName)
if len(trimmedFullName) > 0 {
return fmt.Sprintf("%s (%s)", trimmedFullName, u.Name)
// ShortName ellipses username to length (still used by many templates), it calls GetDisplayName and respects DEFAULT_SHOW_FULL_NAME
func (u *User) ShortName(length int) string {
return util.EllipsisDisplayString(u.GetDisplayName(), length)
}
func (u *User) GetShortDisplayNameLinkHTML() template.HTML {
fullName := strings.TrimSpace(u.FullName)
displayName, displayTooltip := u.Name, fullName
if setting.UI.DefaultShowFullName && fullName != "" {
displayName, displayTooltip = fullName, u.Name
}
return u.Name
return htmlutil.HTMLFormat(`<a class="muted" href="%s" data-tooltip-content="%s">%s</a>`, u.HomeLink(), displayTooltip, displayName)
}
func gitSafeName(name string) string {
@ -488,14 +498,6 @@ func (u *User) GitName() string {
return fmt.Sprintf("user-%d", u.ID)
}
// ShortName ellipses username to length
func (u *User) ShortName(length int) string {
if setting.UI.DefaultShowFullName && len(u.FullName) > 0 {
return util.EllipsisDisplayString(u.FullName, length)
}
return util.EllipsisDisplayString(u.Name, length)
}
// IsMailable checks if a user is eligible to receive emails.
// System users like Ghost and Gitea Actions are excluded.
func (u *User) IsMailable() bool {

View File

@ -33,21 +33,22 @@ const (
// It doesn't respect the file format in the filename like ".zst", since it's difficult to reopen a closed compressed file and append new content.
// Why doesn't it store logs in object storage directly? Because it's not efficient to append content to object storage.
func WriteLogs(ctx context.Context, filename string, offset int64, rows []*runnerv1.LogRow) ([]int, error) {
flag := os.O_WRONLY
flag, openFileFor := os.O_WRONLY, "write-only"
if offset == 0 {
// Create file only if offset is 0, or it could result in content holes if the file doesn't exist.
flag |= os.O_CREATE
// Only allow to create file if offset is 0 (the first write), see #25560.
// Otherwise, it might result in content holes if the file has been deleted after transferred (actions.TransferLogs).
flag, openFileFor = os.O_WRONLY|os.O_CREATE, "write-create"
}
name := DBFSPrefix + filename
f, err := dbfs.OpenFile(ctx, name, flag)
if err != nil {
return nil, fmt.Errorf("dbfs OpenFile %q: %w", name, err)
return nil, fmt.Errorf("dbfs.OpenFile %q for %s: %w", name, openFileFor, err)
}
defer f.Close()
stat, err := f.Stat()
if err != nil {
return nil, fmt.Errorf("dbfs Stat %q: %w", name, err)
return nil, fmt.Errorf("dbfs.Stat %q: %w", name, err)
}
if stat.Size() < offset {
// If the size is less than offset, refuse to write, or it could result in content holes.
@ -56,7 +57,7 @@ func WriteLogs(ctx context.Context, filename string, offset int64, rows []*runne
}
if _, err := f.Seek(offset, io.SeekStart); err != nil {
return nil, fmt.Errorf("dbfs Seek %q: %w", name, err)
return nil, fmt.Errorf("dbfs.Seek %q: %w", name, err)
}
writer := bufio.NewWriterSize(f, defaultBufSize)
@ -121,16 +122,17 @@ const (
// TransferLogs transfers logs from DBFS to object storage.
// It happens when the file is complete and no more logs will be appended.
// It respects the file format in the filename like ".zst", and compresses the content if needed.
// The task log file must be marked as "log_in_storage=true" after the transfer.
func TransferLogs(ctx context.Context, filename string) (func(), error) {
name := DBFSPrefix + filename
remove := func() {
if err := dbfs.Remove(ctx, name); err != nil {
log.Warn("dbfs remove %q: %v", name, err)
log.Warn("dbfs.Remove %q: %v", name, err)
}
}
f, err := dbfs.Open(ctx, name)
if err != nil {
return nil, fmt.Errorf("dbfs open %q: %w", name, err)
return nil, fmt.Errorf("dbfs.Open %q: %w", name, err)
}
defer f.Close()
@ -164,7 +166,7 @@ func RemoveLogs(ctx context.Context, inStorage bool, filename string) error {
name := DBFSPrefix + filename
err := dbfs.Remove(ctx, name)
if err != nil {
return fmt.Errorf("dbfs remove %q: %w", name, err)
return fmt.Errorf("dbfs.Remove %q: %w", name, err)
}
return nil
}
@ -180,7 +182,7 @@ func OpenLogs(ctx context.Context, inStorage bool, filename string) (io.ReadSeek
name := DBFSPrefix + filename
f, err := dbfs.Open(ctx, name)
if err != nil {
return nil, fmt.Errorf("dbfs open %q: %w", name, err)
return nil, fmt.Errorf("dbfs.Open %q: %w", name, err)
}
return f, nil
}

View File

@ -61,12 +61,14 @@ func (e *escapeStreamer) Text(data string) error {
until = len(data)
next = until
} else {
until, next = nextIdxs[0]+pos, nextIdxs[1]+pos
until = min(nextIdxs[0]+pos, len(data))
next = min(nextIdxs[1]+pos, len(data))
}
// from pos until we know that the runes are not \r\t\n or even ' '
runes := make([]rune, 0, next-until)
positions := make([]int, 0, next-until+1)
n := next - until
runes := make([]rune, 0, n)
positions := make([]int, 0, n+1)
for pos < until {
r, sz := utf8.DecodeRune(dataBytes[pos:])

View File

@ -11,6 +11,7 @@ import (
"slices"
"strconv"
"strings"
"time"
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/util"
@ -39,6 +40,10 @@ type GrepOptions struct {
PathspecList []string
}
// grepSearchTimeout is the timeout for git grep search, it should be long enough to get results
// but not too long to cause performance issues
const grepSearchTimeout = 30 * time.Second
func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepOptions) ([]*GrepResult, error) {
/*
The output is like this ( "^@" means \x00):
@ -76,6 +81,7 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO
stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
defer stdoutReaderClose()
err := cmd.WithDir(repo.Path).
WithTimeout(grepSearchTimeout).
WithPipelineFunc(func(ctx gitcmd.Context) error {
isInBlock := false
rd := bufio.NewReaderSize(stdoutReader, util.IfZero(opts.MaxLineLength, 16*1024))

View File

@ -46,14 +46,14 @@ func IsRelativeURL(s string) bool {
func getRequestScheme(req *http.Request) string {
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-Proto
if s := req.Header.Get("X-Forwarded-Proto"); s != "" {
return s
if proto, ok := parseForwardedProtoValue(req.Header.Get("X-Forwarded-Proto")); ok {
return proto
}
if s := req.Header.Get("X-Forwarded-Protocol"); s != "" {
return s
if proto, ok := parseForwardedProtoValue(req.Header.Get("X-Forwarded-Protocol")); ok {
return proto
}
if s := req.Header.Get("X-Url-Scheme"); s != "" {
return s
if proto, ok := parseForwardedProtoValue(req.Header.Get("X-Url-Scheme")); ok {
return proto
}
if s := req.Header.Get("Front-End-Https"); s != "" {
return util.Iif(s == "on", "https", "http")
@ -64,6 +64,13 @@ func getRequestScheme(req *http.Request) string {
return ""
}
func parseForwardedProtoValue(val string) (string, bool) {
if val == "http" || val == "https" {
return val, true
}
return "", false
}
// GuessCurrentAppURL tries to guess the current full public URL (with sub-path) by http headers. It always has a '/' suffix, exactly the same as setting.AppURL
// TODO: should rename it to GuessCurrentPublicURL in the future
func GuessCurrentAppURL(ctx context.Context) string {

View File

@ -47,6 +47,7 @@ func TestGuessCurrentHostURL(t *testing.T) {
defer test.MockVariableValue(&setting.AppURL, "http://cfg-host/sub/")()
defer test.MockVariableValue(&setting.AppSubURL, "/sub")()
headersWithProto := http.Header{"X-Forwarded-Proto": {"https"}}
maliciousProtoHeaders := http.Header{"X-Forwarded-Proto": {"http://attacker.host/?trash="}}
t.Run("Legacy", func(t *testing.T) {
defer test.MockVariableValue(&setting.PublicURLDetection, setting.PublicURLLegacy)()
@ -60,6 +61,9 @@ func TestGuessCurrentHostURL(t *testing.T) {
// if "X-Forwarded-Proto" exists, then use it and "Host" header
ctx = context.WithValue(t.Context(), RequestContextKey, &http.Request{Host: "req-host:3000", Header: headersWithProto})
assert.Equal(t, "https://req-host:3000", GuessCurrentHostURL(ctx))
ctx = context.WithValue(t.Context(), RequestContextKey, &http.Request{Host: "req-host:3000", Header: maliciousProtoHeaders})
assert.Equal(t, "http://cfg-host", GuessCurrentHostURL(ctx))
})
t.Run("Auto", func(t *testing.T) {
@ -76,6 +80,9 @@ func TestGuessCurrentHostURL(t *testing.T) {
ctx = context.WithValue(t.Context(), RequestContextKey, &http.Request{Host: "req-host:3000", Header: headersWithProto})
assert.Equal(t, "https://req-host:3000", GuessCurrentHostURL(ctx))
ctx = context.WithValue(t.Context(), RequestContextKey, &http.Request{Host: "req-host:3000", Header: maliciousProtoHeaders})
assert.Equal(t, "http://req-host:3000", GuessCurrentHostURL(ctx))
})
t.Run("Never", func(t *testing.T) {

View File

@ -24,7 +24,7 @@ func indexSettingToGitGrepPathspecList() (list []string) {
return list
}
func PerformSearch(ctx context.Context, page int, repoID int64, gitRepo *git.Repository, ref git.RefName, keyword string, searchMode indexer.SearchModeType) (searchResults []*code_indexer.Result, total int, err error) {
func PerformSearch(ctx context.Context, page int, repoID int64, gitRepo *git.Repository, ref git.RefName, keyword string, searchMode indexer.SearchModeType) (searchResults []*code_indexer.Result, total int64, err error) {
grepMode := git.GrepModeWords
switch searchMode {
case indexer.SearchModeExact:
@ -47,7 +47,7 @@ func PerformSearch(ctx context.Context, page int, repoID int64, gitRepo *git.Rep
return nil, 0, fmt.Errorf("gitRepo.GetRefCommitID: %w", err)
}
total = len(res)
total = int64(len(res))
pageStart := min((page-1)*setting.UI.RepoSearchPagingNum, len(res))
pageEnd := min(page*setting.UI.RepoSearchPagingNum, len(res))
res = res[pageStart:pageEnd]

View File

@ -130,7 +130,7 @@ func searchResult(result *internal.SearchResult, startIndex, endIndex int) (*Res
}
// PerformSearch perform a search on a repository
func PerformSearch(ctx context.Context, opts *SearchOptions) (int, []*Result, []*SearchResultLanguages, error) {
func PerformSearch(ctx context.Context, opts *SearchOptions) (int64, []*Result, []*SearchResultLanguages, error) {
if opts == nil || len(opts.Keyword) == 0 {
return 0, nil, nil, nil
}
@ -149,5 +149,5 @@ func PerformSearch(ctx context.Context, opts *SearchOptions) (int, []*Result, []
return 0, nil, nil, err
}
}
return int(total), displayResults, resultLanguages, nil
return total, displayResults, resultLanguages, nil
}

View File

@ -11,7 +11,6 @@ import (
"strings"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// CamoEncode encodes a lnk to fit with the go-camo and camo proxy links. The purposes of camo-proxy are:
@ -27,7 +26,7 @@ func CamoEncode(link string) string {
macSum := b64encode(mac.Sum(nil))
encodedURL := b64encode([]byte(link))
return util.URLJoin(setting.Camo.ServerURL, macSum, encodedURL)
return strings.TrimSuffix(setting.Camo.ServerURL, "/") + "/" + macSum + "/" + encodedURL
}
func b64encode(data []byte) string {

View File

@ -4,13 +4,13 @@
package markup
import (
"fmt"
"slices"
"strings"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/references"
"code.gitea.io/gitea/modules/util"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
@ -219,7 +219,7 @@ func hashCurrentPatternProcessor(ctx *RenderContext, node *html.Node) {
continue
}
link := "/:root/" + util.URLJoin(ctx.RenderOptions.Metas["user"], ctx.RenderOptions.Metas["repo"], "commit", hash)
link := fmt.Sprintf("/:root/%s/%s/commit/%s", ctx.RenderOptions.Metas["user"], ctx.RenderOptions.Metas["repo"], hash)
replaceContent(node, m[2], m[3], createCodeLink(link, base.ShortSha(hash), "commit"))
start = 0
node = node.NextSibling.NextSibling
@ -236,7 +236,7 @@ func commitCrossReferencePatternProcessor(ctx *RenderContext, node *html.Node) {
}
refText := ref.Owner + "/" + ref.Name + "@" + base.ShortSha(ref.CommitSha)
linkHref := "/:root/" + util.URLJoin(ref.Owner, ref.Name, "commit", ref.CommitSha)
linkHref := fmt.Sprintf("/:root/%s/%s/commit/%s", ref.Owner, ref.Name, ref.CommitSha)
link := createLink(ctx, linkHref, refText, "commit")
replaceContent(node, ref.RefLocation.Start, ref.RefLocation.End, link)

View File

@ -23,12 +23,12 @@ const (
// externalIssueLink an HTML link to an alphanumeric-style issue
func externalIssueLink(baseURL, class, name string) string {
return link(util.URLJoin(baseURL, name), class, name)
return link(strings.TrimSuffix(baseURL, "/")+"/"+name, class, name)
}
// numericLink an HTML to a numeric-style issue
func numericIssueLink(baseURL, class string, index int, marker string) string {
return link(util.URLJoin(baseURL, strconv.Itoa(index)), class, fmt.Sprintf("%s%d", marker, index))
return link(strings.TrimSuffix(baseURL, "/")+"/"+strconv.Itoa(index), class, fmt.Sprintf("%s%d", marker, index))
}
// link an HTML link
@ -116,7 +116,7 @@ func TestRender_IssueIndexPattern2(t *testing.T) {
links := make([]any, len(indices))
for i, index := range indices {
links[i] = numericIssueLink(util.URLJoin("/test-owner/test-repo", path), "ref-issue", index, marker)
links[i] = numericIssueLink("/test-owner/test-repo/"+path, "ref-issue", index, marker)
}
expectedNil := fmt.Sprintf(expectedFmt, links...)
testRenderIssueIndexPattern(t, s, expectedNil, NewTestRenderContext(TestAppURL, localMetas))
@ -210,7 +210,7 @@ func TestRender_IssueIndexPattern5(t *testing.T) {
metas["regexp"] = pattern
links := make([]any, len(ids))
for i, id := range ids {
links[i] = link(util.URLJoin("https://someurl.com/someUser/someRepo/", id), "ref-issue ref-external-issue", names[i])
links[i] = link("https://someurl.com/someUser/someRepo/"+id, "ref-issue ref-external-issue", names[i])
}
expected := fmt.Sprintf(expectedFmt, links...)
@ -288,11 +288,11 @@ func TestRender_AutoLink(t *testing.T) {
}
// render valid issue URLs
test(util.URLJoin(TestRepoURL, "issues", "3333"),
numericIssueLink(util.URLJoin(TestRepoURL, "issues"), "ref-issue", 3333, "#"))
test(TestRepoURL+"issues/3333",
numericIssueLink(TestRepoURL+"issues", "ref-issue", 3333, "#"))
// render valid commit URLs
tmp := util.URLJoin(TestRepoURL, "commit", "d8a994ef243349f321568f9e36d5c3f444b99cae")
tmp := TestRepoURL + "commit/d8a994ef243349f321568f9e36d5c3f444b99cae"
test(tmp, "<a href=\""+tmp+"\" class=\"commit\"><code>d8a994ef24</code></a>")
tmp += "#diff-2"
test(tmp, "<a href=\""+tmp+"\" class=\"commit\"><code>d8a994ef24 (diff-2)</code></a>")

View File

@ -4,6 +4,7 @@
package markup
import (
"fmt"
"strconv"
"strings"
@ -162,7 +163,7 @@ func issueIndexPatternProcessor(ctx *RenderContext, node *html.Node) {
issueOwner := util.Iif(ref.Owner == "", ctx.RenderOptions.Metas["user"], ref.Owner)
issueRepo := util.Iif(ref.Owner == "", ctx.RenderOptions.Metas["repo"], ref.Name)
issuePath := util.Iif(ref.IsPull, "pulls", "issues")
linkHref := "/:root/" + util.URLJoin(issueOwner, issueRepo, issuePath, ref.Issue)
linkHref := fmt.Sprintf("/:root/%s/%s/%s/%s", issueOwner, issueRepo, issuePath, ref.Issue)
// at the moment, only render the issue index in a full line (or simple line) as icon+title
// otherwise it would be too noisy for "take #1 as an example" in a sentence

View File

@ -113,16 +113,17 @@ func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
}
childNode.Parent = linkNode
absoluteLink := IsFullURLString(link)
if !absoluteLink {
// FIXME: it should be fully refactored in the future, it uses various hacky approaches to guess how to encode a path for wiki
// When a link contains "/", then we assume that the user has provided a well-encoded link.
if !absoluteLink && !strings.Contains(link, "/") {
// So only guess for links without "/".
if image {
link = strings.ReplaceAll(link, " ", "+")
} else {
// the hacky wiki name encoding: space to "-"
link = strings.ReplaceAll(link, " ", "-") // FIXME: it should support dashes in the link, eg: "the-dash-support.-"
}
if !strings.Contains(link, "/") {
link = url.PathEscape(link) // FIXME: it doesn't seem right and it might cause double-escaping
}
link = url.PathEscape(link)
}
if image {
title := props["title"]

View File

@ -4,6 +4,7 @@
package markup
import (
"fmt"
"strings"
"code.gitea.io/gitea/modules/references"
@ -26,14 +27,11 @@ func mentionProcessor(ctx *RenderContext, node *html.Node) {
loc.End += start
mention := node.Data[loc.Start:loc.End]
teams, ok := ctx.RenderOptions.Metas["teams"]
// FIXME: util.URLJoin may not be necessary here:
// - setting.AppURL is defined to have a terminal '/' so unless mention[1:]
// is an AppSubURL link we can probably fallback to concatenation.
// team mention should follow @orgName/teamName style
if ok && strings.Contains(mention, "/") {
mentionOrgAndTeam := strings.Split(mention, "/")
if mentionOrgAndTeam[0][1:] == ctx.RenderOptions.Metas["org"] && strings.Contains(teams, ","+strings.ToLower(mentionOrgAndTeam[1])+",") {
link := "/:root/" + util.URLJoin("org", ctx.RenderOptions.Metas["org"], "teams", mentionOrgAndTeam[1])
link := fmt.Sprintf("/:root/org/%s/teams/%s", ctx.RenderOptions.Metas["org"], mentionOrgAndTeam[1])
replaceContent(node, loc.Start, loc.End, createLink(ctx, link, mention, "" /*mention*/))
node = node.NextSibling.NextSibling
start = 0

View File

@ -34,15 +34,15 @@ func TestRender_Commits(t *testing.T) {
sha := "65f1bf27bc3bf70f64657658635e66094edbcb4d"
repo := markup.TestAppURL + testRepoOwnerName + "/" + testRepoName + "/"
commit := util.URLJoin(repo, "commit", sha)
commit := repo + "commit/" + sha
commitPath := "/user13/repo11/commit/" + sha
tree := util.URLJoin(repo, "tree", sha, "src")
tree := repo + "tree/" + sha + "/src"
file := util.URLJoin(repo, "commit", sha, "example.txt")
file := repo + "commit/" + sha + "/example.txt"
fileWithExtra := file + ":"
fileWithHash := file + "#L2"
fileWithHasExtra := file + "#L2:"
commitCompare := util.URLJoin(repo, "compare", sha+"..."+sha)
commitCompare := repo + "compare/" + sha + "..." + sha
commitCompareWithHash := commitCompare + "#L2"
test(sha, `<p><a href="`+commitPath+`" rel="nofollow"><code>65f1bf27bc</code></a></p>`)
@ -90,14 +90,14 @@ func TestRender_CrossReferences(t *testing.T) {
"/home/gitea/go-gitea/gitea#12345",
`<p>/home/gitea/go-gitea/gitea#12345</p>`)
test(
util.URLJoin(markup.TestAppURL, "gogitea", "gitea", "issues", "12345"),
`<p><a href="`+util.URLJoin(markup.TestAppURL, "gogitea", "gitea", "issues", "12345")+`" class="ref-issue" rel="nofollow">gogitea/gitea#12345</a></p>`)
markup.TestAppURL+"gogitea/gitea/issues/12345",
`<p><a href="`+markup.TestAppURL+`gogitea/gitea/issues/12345" class="ref-issue" rel="nofollow">gogitea/gitea#12345</a></p>`)
test(
util.URLJoin(markup.TestAppURL, "go-gitea", "gitea", "issues", "12345"),
`<p><a href="`+util.URLJoin(markup.TestAppURL, "go-gitea", "gitea", "issues", "12345")+`" class="ref-issue" rel="nofollow">go-gitea/gitea#12345</a></p>`)
markup.TestAppURL+"go-gitea/gitea/issues/12345",
`<p><a href="`+markup.TestAppURL+`go-gitea/gitea/issues/12345" class="ref-issue" rel="nofollow">go-gitea/gitea#12345</a></p>`)
test(
util.URLJoin(markup.TestAppURL, "gogitea", "some-repo-name", "issues", "12345"),
`<p><a href="`+util.URLJoin(markup.TestAppURL, "gogitea", "some-repo-name", "issues", "12345")+`" class="ref-issue" rel="nofollow">gogitea/some-repo-name#12345</a></p>`)
markup.TestAppURL+"gogitea/some-repo-name/issues/12345",
`<p><a href="`+markup.TestAppURL+`gogitea/some-repo-name/issues/12345" class="ref-issue" rel="nofollow">gogitea/some-repo-name#12345</a></p>`)
inputURL := setting.AppURL + "a/b/commit/0123456789012345678901234567890123456789/foo.txt?a=b#L2-L3"
test(
@ -375,7 +375,7 @@ func TestRender_emoji(t *testing.T) {
func TestRender_ShortLinks(t *testing.T) {
setting.AppURL = markup.TestAppURL
tree := util.URLJoin(markup.TestRepoURL, "src", "master")
tree := markup.TestRepoURL + "src/master"
test := func(input, expected string) {
buffer, err := markdown.RenderString(markup.NewTestRenderContext(tree), input)
@ -383,15 +383,15 @@ func TestRender_ShortLinks(t *testing.T) {
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(buffer)))
}
url := util.URLJoin(tree, "Link")
otherURL := util.URLJoin(tree, "Other-Link")
encodedURL := util.URLJoin(tree, "Link%3F")
imgurl := util.URLJoin(tree, "Link.jpg")
otherImgurl := util.URLJoin(tree, "Link+Other.jpg")
encodedImgurl := util.URLJoin(tree, "Link+%23.jpg")
notencodedImgurl := util.URLJoin(tree, "some", "path", "Link+#.jpg")
renderableFileURL := util.URLJoin(tree, "markdown_file.md")
unrenderableFileURL := util.URLJoin(tree, "file.zip")
url := tree + "/Link"
otherURL := tree + "/Other-Link"
encodedURL := tree + "/Link%3F"
imgurl := tree + "/Link.jpg"
otherImgurl := tree + "/Link+Other.jpg"
encodedImgurl := tree + "/Link+%23.jpg"
notencodedImgurl := tree + "/some/path/Link%20#.jpg"
renderableFileURL := tree + "/markdown_file.md"
unrenderableFileURL := tree + "/file.zip"
favicon := "http://google.com/favicon.ico"
test(
@ -466,6 +466,8 @@ func TestRender_ShortLinks(t *testing.T) {
"[[Name|Link #.jpg|alt=\"AltName\"|title='Title']]",
`<p><a href="`+encodedImgurl+`" rel="nofollow"><img src="`+encodedImgurl+`" title="Title" alt="AltName"/></a></p>`,
)
// FIXME: it's unable to resolve: [[link?k=v]]
// FIXME: it is a wrong test case, it is not an image, but a link with anchor "#.jpg"
test(
"[[some/path/Link #.jpg]]",
`<p><a href="`+notencodedImgurl+`" rel="nofollow"><img src="`+notencodedImgurl+`" title="Link #.jpg" alt="some/path/Link #.jpg"/></a></p>`,

View File

@ -14,7 +14,6 @@ import (
"code.gitea.io/gitea/modules/markup/markdown"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/util"
"github.com/stretchr/testify/assert"
)
@ -23,7 +22,6 @@ const (
AppURL = "http://localhost:3000/"
testRepoOwnerName = "user13"
testRepoName = "repo11"
FullURL = AppURL + testRepoOwnerName + "/" + testRepoName + "/"
)
// these values should match the const above
@ -47,8 +45,9 @@ func TestRender_StandardLinks(t *testing.T) {
func TestRender_Images(t *testing.T) {
setting.AppURL = AppURL
const baseLink = "http://localhost:3000/user13/repo11"
render := func(input, expected string) {
buffer, err := markdown.RenderString(markup.NewTestRenderContext(FullURL), input)
buffer, err := markdown.RenderString(markup.NewTestRenderContext(baseLink), input)
assert.NoError(t, err)
assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(buffer)))
}
@ -56,7 +55,7 @@ func TestRender_Images(t *testing.T) {
url := "../../.images/src/02/train.jpg"
title := "Train"
href := "https://gitea.io"
result := util.URLJoin(FullURL, url)
result := baseLink + "/.images/src/02/train.jpg" // resolved link should not go out of the base link
// hint: With Markdown v2.5.2, there is a new syntax: [link](URL){:target="_blank"} , but we do not support it now
render(
@ -88,6 +87,7 @@ func TestRender_Images(t *testing.T) {
}
func TestTotal_RenderString(t *testing.T) {
const FullURL = AppURL + testRepoOwnerName + "/" + testRepoName + "/"
setting.AppURL = AppURL
defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableAdditionalAttributes, true)()

View File

@ -5,28 +5,47 @@ package markup
import (
"context"
"net/url"
"path"
"strings"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// resolveLinkRelative tries to resolve the link relative to the "{base}/{cur}", and returns the final link.
// It only resolves the link, doesn't do any sanitization or validation, invalid links will be returned as is.
func resolveLinkRelative(ctx context.Context, base, cur, link string, absolute bool) (finalLink string) {
if IsFullURLString(link) {
return link
linkURL, err := url.Parse(link)
if err != nil {
return link // invalid URL, return as is
}
if linkURL.Scheme != "" || linkURL.Host != "" {
return link // absolute URL, return as is
}
if strings.HasPrefix(link, "/") {
if strings.HasPrefix(link, base) && strings.Count(base, "/") >= 4 {
// a trick to tolerate that some users were using absolute paths (the old gitea's behavior)
// a trick to tolerate that some users were using absolute paths (the old Gitea's behavior)
// if the link is likely "{base}/src/main" while "{base}" is something like "/owner/repo"
finalLink = link
} else {
finalLink = util.URLJoin(base, "./", link)
// need to resolve the link relative to "{base}"
cur = ""
}
} // else: link is relative to "{base}/{cur}"
if finalLink == "" {
finalLink = strings.TrimSuffix(base, "/") + path.Join("/"+cur, "/"+linkURL.EscapedPath())
finalLink = strings.TrimSuffix(finalLink, "/")
if linkURL.RawQuery != "" {
finalLink += "?" + linkURL.RawQuery
}
if linkURL.Fragment != "" {
finalLink += "#" + linkURL.Fragment
}
} else {
finalLink = util.URLJoin(base, "./", cur, link)
}
finalLink = strings.TrimSuffix(finalLink, "/")
if absolute {
finalLink = httplib.MakeAbsoluteURL(ctx, finalLink)
}

View File

@ -18,8 +18,16 @@ func TestResolveLinkRelative(t *testing.T) {
assert.Equal(t, "/a/b", resolveLinkRelative(ctx, "/a", "b", "", false))
assert.Equal(t, "/a/b/c", resolveLinkRelative(ctx, "/a", "b", "c", false))
assert.Equal(t, "/a/c", resolveLinkRelative(ctx, "/a", "b", "/c", false))
assert.Equal(t, "/a/c#id", resolveLinkRelative(ctx, "/a", "b", "/c#id", false))
assert.Equal(t, "/a/%2f?k=/", resolveLinkRelative(ctx, "/a", "b", "/%2f/?k=/", false))
assert.Equal(t, "/a/b/c?k=v#id", resolveLinkRelative(ctx, "/a", "b", "c/?k=v#id", false))
assert.Equal(t, "%invalid", resolveLinkRelative(ctx, "/a", "b", "%invalid", false))
assert.Equal(t, "http://localhost:3000/a", resolveLinkRelative(ctx, "/a", "", "", true))
// absolute link is returned as is
assert.Equal(t, "mailto:user@domain.com", resolveLinkRelative(ctx, "/a", "", "mailto:user@domain.com", false))
assert.Equal(t, "http://other/path/", resolveLinkRelative(ctx, "/a", "", "http://other/path/", false))
// some users might have used absolute paths a lot, so if the prefix overlaps and has enough slashes, we should tolerate it
assert.Equal(t, "/owner/repo/foo/owner/repo/foo/bar/xxx", resolveLinkRelative(ctx, "/owner/repo/foo", "", "/owner/repo/foo/bar/xxx", false))
assert.Equal(t, "/owner/repo/foo/bar/xxx", resolveLinkRelative(ctx, "/owner/repo/foo/bar", "", "/owner/repo/foo/bar/xxx", false))

View File

@ -13,7 +13,6 @@ import (
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
// Response is the structure of JSON returned from API
@ -24,17 +23,16 @@ type Response struct {
ErrorCodes []ErrorCode `json:"error-codes"`
}
const apiURL = "api/siteverify"
// Verify calls Google Recaptcha API to verify token
func Verify(ctx context.Context, response string) (bool, error) {
post := url.Values{
"secret": {setting.Service.RecaptchaSecret},
"response": {response},
}
reqURL := strings.TrimSuffix(setting.Service.RecaptchaURL, "/") + "/api/siteverify"
// Basically a copy of http.PostForm, but with a context
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
util.URLJoin(setting.Service.RecaptchaURL, apiURL), strings.NewReader(post.Encode()))
req, err := http.NewRequestWithContext(ctx, http.MethodPost, reqURL, strings.NewReader(post.Encode()))
if err != nil {
return false, fmt.Errorf("Failed to create CAPTCHA request: %w", err)
}

View File

@ -15,7 +15,6 @@ import (
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
)
// Scheme describes protocol types
@ -163,7 +162,7 @@ func MakeManifestData(appName, appURL, absoluteAssetURL string) []byte {
}
// MakeAbsoluteAssetURL returns the absolute asset url prefix without a trailing slash
func MakeAbsoluteAssetURL(appURL, staticURLPrefix string) string {
func MakeAbsoluteAssetURL(appURL *url.URL, staticURLPrefix string) string {
parsedPrefix, err := url.Parse(strings.TrimSuffix(staticURLPrefix, "/"))
if err != nil {
log.Fatal("Unable to parse STATIC_URL_PREFIX: %v", err)
@ -171,11 +170,12 @@ func MakeAbsoluteAssetURL(appURL, staticURLPrefix string) string {
if err == nil && parsedPrefix.Hostname() == "" {
if staticURLPrefix == "" {
return strings.TrimSuffix(appURL, "/")
return strings.TrimSuffix(appURL.String(), "/")
}
// StaticURLPrefix is just a path
return util.URLJoin(appURL, strings.TrimSuffix(staticURLPrefix, "/"))
appHostURL := &url.URL{Scheme: appURL.Scheme, Host: appURL.Host}
return appHostURL.String() + "/" + strings.Trim(staticURLPrefix, "/")
}
return strings.TrimSuffix(staticURLPrefix, "/")
@ -316,7 +316,7 @@ func loadServerFrom(rootCfg ConfigProvider) {
Domain = urlHostname
}
AbsoluteAssetURL = MakeAbsoluteAssetURL(AppURL, StaticURLPrefix)
AbsoluteAssetURL = MakeAbsoluteAssetURL(appURL, StaticURLPrefix)
AssetVersion = strings.ReplaceAll(AppVer, "+", "~") // make sure the version string is clear (no real escaping is needed)
manifestBytes := MakeManifestData(AppName, AppURL, AbsoluteAssetURL)

View File

@ -4,6 +4,7 @@
package setting
import (
"net/url"
"testing"
"code.gitea.io/gitea/modules/json"
@ -12,18 +13,26 @@ import (
)
func TestMakeAbsoluteAssetURL(t *testing.T) {
assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL("https://localhost:1234", "https://localhost:2345"))
assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL("https://localhost:1234/", "https://localhost:2345"))
assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL("https://localhost:1234/", "https://localhost:2345/"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234", "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/", "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/", "/foo/"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/foo", "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/foo/"))
assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL("https://localhost:1234/foo", "/bar"))
assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/bar"))
assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/bar/"))
appURL1, _ := url.Parse("https://localhost:1234")
appURL2, _ := url.Parse("https://localhost:1234/")
appURLSub1, _ := url.Parse("https://localhost:1234/foo")
appURLSub2, _ := url.Parse("https://localhost:1234/foo/")
// static URL is an absolute URL, so should be used
assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL(appURL1, "https://localhost:2345"))
assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL(appURL1, "https://localhost:2345/"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL(appURL1, "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL(appURL2, "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL(appURL1, "/foo/"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL(appURLSub1, "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL(appURLSub2, "/foo"))
assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL(appURLSub1, "/foo/"))
assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL(appURLSub1, "/bar"))
assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL(appURLSub2, "/bar"))
assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL(appURLSub1, "/bar/"))
}
func TestMakeManifestData(t *testing.T) {

View File

@ -25,7 +25,6 @@ var UI = struct {
ReactionMaxUserNum int
MaxDisplayFileSize int64
ShowUserEmail bool
DefaultShowFullName bool
DefaultTheme string
Themes []string
FileIconTheme string
@ -43,6 +42,15 @@ var UI = struct {
AmbiguousUnicodeDetection bool
// TODO: DefaultShowFullName is introduced by https://github.com/go-gitea/gitea/pull/6710
// But there are still many edge cases:
// * Many places still use "username", not respecting this setting
// * Many places use "Full Name" if it is not empty, cause inconsistent UI for users who have set their full name but some others don't
// * Even if DefaultShowFullName=false, many places still need to show the full name
// For most cases, either "username" or "username (Full Name)" should be used and are good enough.
// Only in very few cases (e.g.: unimportant lists, narrow layout), "username" or "Full Name" can be used.
DefaultShowFullName bool
Notification struct {
MinTimeout time.Duration
TimeoutStep time.Duration

View File

@ -5,6 +5,7 @@ package storage
import (
"context"
"errors"
"fmt"
"io"
"net/url"
@ -27,25 +28,32 @@ type LocalStorage struct {
// NewLocalStorage returns a local files
func NewLocalStorage(ctx context.Context, config *setting.Storage) (ObjectStorage, error) {
// prepare storage root path
if !filepath.IsAbs(config.Path) {
return nil, fmt.Errorf("LocalStorageConfig.Path should have been prepared by setting/storage.go and should be an absolute path, but not: %q", config.Path)
}
log.Info("Creating new Local Storage at %s", config.Path)
if err := os.MkdirAll(config.Path, os.ModePerm); err != nil {
return nil, err
return nil, fmt.Errorf("LocalStorage config.Path should have been prepared by setting/storage.go and should be an absolute path, but not: %q", config.Path)
}
storageRoot := util.FilePathJoinAbs(config.Path)
if config.TemporaryPath == "" {
config.TemporaryPath = filepath.Join(config.Path, "tmp")
// prepare storage temporary path
storageTmp := config.TemporaryPath
if storageTmp == "" {
storageTmp = filepath.Join(storageRoot, "tmp")
}
if !filepath.IsAbs(config.TemporaryPath) {
return nil, fmt.Errorf("LocalStorageConfig.TemporaryPath should be an absolute path, but not: %q", config.TemporaryPath)
if !filepath.IsAbs(storageTmp) {
return nil, fmt.Errorf("LocalStorage config.TemporaryPath should be an absolute path, but not: %q", config.TemporaryPath)
}
storageTmp = util.FilePathJoinAbs(storageTmp)
// create the storage root if not exist
log.Info("Creating new Local Storage at %s", storageRoot)
if err := os.MkdirAll(storageRoot, os.ModePerm); err != nil {
return nil, err
}
return &LocalStorage{
ctx: ctx,
dir: config.Path,
tmpdir: config.TemporaryPath,
dir: storageRoot,
tmpdir: storageTmp,
}, nil
}
@ -108,9 +116,21 @@ func (l *LocalStorage) Stat(path string) (os.FileInfo, error) {
return os.Stat(l.buildLocalPath(path))
}
// Delete delete a file
func (l *LocalStorage) deleteEmptyParentDirs(localFullPath string) {
for parent := filepath.Dir(localFullPath); len(parent) > len(l.dir); parent = filepath.Dir(parent) {
if err := os.Remove(parent); err != nil {
// since the target file has been deleted, parent dir error is not related to the file deletion itself.
break
}
}
}
// Delete deletes the file in storage and removes the empty parent directories (if possible)
func (l *LocalStorage) Delete(path string) error {
return util.Remove(l.buildLocalPath(path))
localFullPath := l.buildLocalPath(path)
err := util.Remove(localFullPath)
l.deleteEmptyParentDirs(localFullPath)
return err
}
// URL gets the redirect URL to a file
@ -118,34 +138,38 @@ func (l *LocalStorage) URL(path, name, _ string, reqParams url.Values) (*url.URL
return nil, ErrURLNotSupported
}
func (l *LocalStorage) normalizeWalkError(err error) error {
if errors.Is(err, os.ErrNotExist) {
// ignore it because the file may be deleted during the walk, and we don't care about it
return nil
}
return err
}
// IterateObjects iterates across the objects in the local storage
func (l *LocalStorage) IterateObjects(dirName string, fn func(path string, obj Object) error) error {
dir := l.buildLocalPath(dirName)
return filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return filepath.WalkDir(dir, func(path string, d os.DirEntry, errWalk error) error {
if err := l.ctx.Err(); err != nil {
return err
}
select {
case <-l.ctx.Done():
return l.ctx.Err()
default:
if errWalk != nil {
return l.normalizeWalkError(errWalk)
}
if path == l.dir {
return nil
}
if d.IsDir() {
if path == l.dir || d.IsDir() {
return nil
}
relPath, err := filepath.Rel(l.dir, path)
if err != nil {
return err
return l.normalizeWalkError(err)
}
obj, err := os.Open(path)
if err != nil {
return err
return l.normalizeWalkError(err)
}
defer obj.Close()
return fn(relPath, obj)
return fn(filepath.ToSlash(relPath), obj)
})
}

View File

@ -4,11 +4,14 @@
package storage
import (
"os"
"strings"
"testing"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestBuildLocalPath(t *testing.T) {
@ -53,6 +56,49 @@ func TestBuildLocalPath(t *testing.T) {
}
}
func TestLocalStorageDelete(t *testing.T) {
rootDir := t.TempDir()
st, err := NewLocalStorage(t.Context(), &setting.Storage{Path: rootDir})
require.NoError(t, err)
assertExists := func(t *testing.T, path string, exists bool) {
_, err = os.Stat(rootDir + "/" + path)
if exists {
require.NoError(t, err)
} else {
require.ErrorIs(t, err, os.ErrNotExist)
}
}
_, err = st.Save("dir/sub1/1-a.txt", strings.NewReader(""), -1)
require.NoError(t, err)
_, err = st.Save("dir/sub1/1-b.txt", strings.NewReader(""), -1)
require.NoError(t, err)
_, err = st.Save("dir/sub2/2-a.txt", strings.NewReader(""), -1)
require.NoError(t, err)
assertExists(t, "dir/sub1/1-a.txt", true)
assertExists(t, "dir/sub1/1-b.txt", true)
assertExists(t, "dir/sub2/2-a.txt", true)
require.NoError(t, st.Delete("dir/sub1/1-a.txt"))
assertExists(t, "dir/sub1", true)
assertExists(t, "dir/sub1/1-a.txt", false)
assertExists(t, "dir/sub1/1-b.txt", true)
assertExists(t, "dir/sub2/2-a.txt", true)
require.NoError(t, st.Delete("dir/sub1/1-b.txt"))
assertExists(t, ".", true)
assertExists(t, "dir/sub1", false)
assertExists(t, "dir/sub1/1-a.txt", false)
assertExists(t, "dir/sub1/1-b.txt", false)
assertExists(t, "dir/sub2/2-a.txt", true)
require.NoError(t, st.Delete("dir/sub2/2-a.txt"))
assertExists(t, ".", true)
assertExists(t, "dir", false)
}
func TestLocalStorageIterator(t *testing.T) {
testStorageIterator(t, setting.LocalStorageType, &setting.Storage{Path: t.TempDir()})
}

View File

@ -68,7 +68,12 @@ type ObjectStorage interface {
Stat(path string) (os.FileInfo, error)
Delete(path string) error
URL(path, name, method string, reqParams url.Values) (*url.URL, error)
IterateObjects(path string, iterator func(path string, obj Object) error) error
// IterateObjects calls the iterator function for each object in the storage with the given path as prefix
// The "fullPath" argument in callback is the full path in this storage.
// * IterateObjects("", ...): iterate all objects in this storage
// * IterateObjects("sub-path", ...): iterate all objects with "sub-path" as prefix in this storage, the "fullPath" will be like "sub-path/xxx"
IterateObjects(basePath string, iterator func(fullPath string, obj Object) error) error
}
// Copy copies a file from source ObjectStorage to dest ObjectStorage

View File

@ -8,13 +8,32 @@ import (
"html/template"
"path"
"strings"
"sync"
gitea_html "code.gitea.io/gitea/modules/htmlutil"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/public"
)
var svgIcons map[string]string
type svgIconItem struct {
html string
mocking bool
}
type svgCacheKey struct {
icon string
size int
class string
}
var (
svgIcons map[string]svgIconItem
svgCacheMu sync.Mutex
svgCache sync.Map
svgCacheCount int
svgCacheLimit = 10000
)
const defaultSize = 16
@ -26,7 +45,7 @@ func Init() error {
return err
}
svgIcons = make(map[string]string, len(files))
svgIcons = make(map[string]svgIconItem, len(files))
for _, file := range files {
if path.Ext(file) != ".svg" {
continue
@ -35,7 +54,7 @@ func Init() error {
if err != nil {
log.Error("Failed to read SVG file %s: %v", file, err)
} else {
svgIcons[file[:len(file)-4]] = string(Normalize(bs, defaultSize))
svgIcons[file[:len(file)-4]] = svgIconItem{html: string(Normalize(bs, defaultSize))}
}
}
return nil
@ -43,10 +62,13 @@ func Init() error {
func MockIcon(icon string) func() {
if svgIcons == nil {
svgIcons = make(map[string]string)
svgIcons = make(map[string]svgIconItem)
}
orig, exist := svgIcons[icon]
svgIcons[icon] = fmt.Sprintf(`<svg class="svg %s" width="%d" height="%d"></svg>`, icon, defaultSize, defaultSize)
svgIcons[icon] = svgIconItem{
html: fmt.Sprintf(`<svg class="svg %s" width="%d" height="%d"></svg>`, icon, defaultSize, defaultSize),
mocking: true,
}
return func() {
if exist {
svgIcons[icon] = orig
@ -58,11 +80,28 @@ func MockIcon(icon string) func() {
// RenderHTML renders icons - arguments icon name (string), size (int), class (string)
func RenderHTML(icon string, others ...any) template.HTML {
result, _ := renderHTML(icon, others...)
return result
}
func renderHTML(icon string, others ...any) (_ template.HTML, usingCache bool) {
if icon == "" {
return ""
return "", false
}
size, class := gitea_html.ParseSizeAndClass(defaultSize, "", others...)
if svgStr, ok := svgIcons[icon]; ok {
if svgItem, ok := svgIcons[icon]; ok {
svgStr := svgItem.html
// fast path for default size and no classes
if size == defaultSize && class == "" {
return template.HTML(svgStr), false
}
cacheKey := svgCacheKey{icon, size, class}
cachedHTML, cached := svgCache.Load(cacheKey)
if cached && !svgItem.mocking {
return cachedHTML.(template.HTML), true
}
// the code is somewhat hacky, but it just works, because the SVG contents are all normalized
if size != defaultSize {
svgStr = strings.Replace(svgStr, fmt.Sprintf(`width="%d"`, defaultSize), fmt.Sprintf(`width="%d"`, size), 1)
@ -71,8 +110,24 @@ func RenderHTML(icon string, others ...any) template.HTML {
if class != "" {
svgStr = strings.Replace(svgStr, `class="`, fmt.Sprintf(`class="%s `, class), 1)
}
return template.HTML(svgStr)
result := template.HTML(svgStr)
if !svgItem.mocking {
// no need to double-check, the rendering is fast enough and the cache is just an optimization
svgCacheMu.Lock()
if svgCacheCount >= svgCacheLimit {
svgCache.Clear()
svgCacheCount = 0
}
svgCacheCount++
svgCache.Store(cacheKey, result)
svgCacheMu.Unlock()
}
return result, false
}
// during test (or something wrong happens), there is no SVG loaded, so use a dummy span to tell that the icon is missing
return template.HTML(fmt.Sprintf("<span>%s(%d/%s)</span>", template.HTMLEscapeString(icon), size, template.HTMLEscapeString(class)))
dummy := template.HTML(fmt.Sprintf("<span>%s(%d/%s)</span>", template.HTMLEscapeString(icon), size, template.HTMLEscapeString(class)))
return dummy, false
}

54
modules/svg/svg_test.go Normal file
View File

@ -0,0 +1,54 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package svg
import (
"testing"
"code.gitea.io/gitea/modules/test"
"github.com/stretchr/testify/assert"
)
func TestRenderHTMLCache(t *testing.T) {
const svgRealContent = "RealContent"
svgIcons = map[string]svgIconItem{
"test": {html: `<svg class="svg test" width="16" height="16">` + svgRealContent + `</svg>`},
}
// default params: no cache entry
_, usingCache := renderHTML("test")
assert.False(t, usingCache)
_, usingCache = renderHTML("test")
assert.False(t, usingCache)
// non-default params: cached
_, usingCache = renderHTML("test", 24)
assert.False(t, usingCache)
_, usingCache = renderHTML("test", 24)
assert.True(t, usingCache)
// mocked svg shouldn't be cached
revertMock := MockIcon("test")
mockedHTML, usingCache := renderHTML("test", 24)
assert.False(t, usingCache)
assert.NotContains(t, mockedHTML, svgRealContent)
revertMock()
realHTML, usingCache := renderHTML("test", 24)
assert.True(t, usingCache)
assert.Contains(t, realHTML, svgRealContent)
t.Run("CacheWithLimit", func(t *testing.T) {
assert.NotZero(t, svgCacheCount)
const testLimit = 3
defer test.MockVariableValue(&svgCacheLimit, testLimit)()
for i := range 10 {
_, usingCache = renderHTML("test", 100+i)
assert.False(t, usingCache)
_, usingCache = renderHTML("test", 100+i)
assert.True(t, usingCache)
assert.LessOrEqual(t, svgCacheCount, testLimit)
}
})
}

View File

@ -37,7 +37,6 @@ func NewFuncMap() template.FuncMap {
"QueryEscape": queryEscape,
"QueryBuild": QueryBuild,
"SanitizeHTML": SanitizeHTML,
"URLJoin": util.URLJoin,
"DotEscape": dotEscape,
"PathEscape": url.PathEscape,
@ -96,9 +95,6 @@ func NewFuncMap() template.FuncMap {
"AssetVersion": func() string {
return setting.AssetVersion
},
"DefaultShowFullName": func() bool {
return setting.UI.DefaultShowFullName
},
"ShowFooterTemplateLoadTime": func() bool {
return setting.Other.ShowFooterTemplateLoadTime
},
@ -144,7 +140,7 @@ func NewFuncMap() template.FuncMap {
"ReactionToEmoji": reactionToEmoji,
// -----------------------------------------------------------------
// misc
// misc (TODO: move them to MiscUtils to avoid bloating the main func map)
"ShortSha": base.ShortSha,
"ActionContent2Commits": ActionContent2Commits,
"IsMultilineCommitMessage": isMultilineCommitMessage,

View File

@ -89,7 +89,7 @@ func (p *templateErrorPrettier) handleGenericTemplateError(err error) string {
return ""
}
tmplName, lineStr, message := groups[1], groups[2], groups[3]
return p.makeDetailedError(message, tmplName, lineStr, -1, "")
return p.makeDetailedError(message, tmplName, lineStr, "", "")
}
var reFuncNotDefinedError = regexp.MustCompile(`^template: (.*):([0-9]+): (function "(.*)" not defined)`)
@ -101,7 +101,7 @@ func (p *templateErrorPrettier) handleFuncNotDefinedError(err error) string {
}
tmplName, lineStr, message, funcName := groups[1], groups[2], groups[3], groups[4]
funcName, _ = strconv.Unquote(`"` + funcName + `"`)
return p.makeDetailedError(message, tmplName, lineStr, -1, funcName)
return p.makeDetailedError(message, tmplName, lineStr, "", funcName)
}
var reUnexpectedOperandError = regexp.MustCompile(`^template: (.*):([0-9]+): (unexpected "(.*)" in operand)`)
@ -113,7 +113,7 @@ func (p *templateErrorPrettier) handleUnexpectedOperandError(err error) string {
}
tmplName, lineStr, message, unexpected := groups[1], groups[2], groups[3], groups[4]
unexpected, _ = strconv.Unquote(`"` + unexpected + `"`)
return p.makeDetailedError(message, tmplName, lineStr, -1, unexpected)
return p.makeDetailedError(message, tmplName, lineStr, "", unexpected)
}
var reExpectedEndError = regexp.MustCompile(`^template: (.*):([0-9]+): (expected end; found (.*))`)
@ -124,7 +124,7 @@ func (p *templateErrorPrettier) handleExpectedEndError(err error) string {
return ""
}
tmplName, lineStr, message, unexpected := groups[1], groups[2], groups[3], groups[4]
return p.makeDetailedError(message, tmplName, lineStr, -1, unexpected)
return p.makeDetailedError(message, tmplName, lineStr, "", unexpected)
}
var (
@ -154,20 +154,20 @@ func HandleTemplateRenderingError(err error) string {
const dashSeparator = "----------------------------------------------------------------------"
func (p *templateErrorPrettier) makeDetailedError(errMsg, tmplName string, lineNum, posNum any, target string) string {
func (p *templateErrorPrettier) makeDetailedError(errMsg, tmplName, lineNumStr, posNumStr, target string) string {
code, layer, err := p.assets.ReadLayeredFile(tmplName + ".tmpl")
if err != nil {
return fmt.Sprintf("template error: %s, and unable to find template file %q", errMsg, tmplName)
}
line, err := util.ToInt64(lineNum)
line, err := strconv.Atoi(lineNumStr)
if err != nil {
return fmt.Sprintf("template error: %s, unable to parse template %q line number %q", errMsg, tmplName, lineNum)
return fmt.Sprintf("template error: %s, unable to parse template %q line number %s", errMsg, tmplName, lineNumStr)
}
pos, err := util.ToInt64(posNum)
pos, err := strconv.Atoi(util.IfZero(posNumStr, "-1"))
if err != nil {
return fmt.Sprintf("template error: %s, unable to parse template %q pos number %q", errMsg, tmplName, posNum)
return fmt.Sprintf("template error: %s, unable to parse template %q pos number %s", errMsg, tmplName, posNumStr)
}
detail := extractErrorLine(code, int(line), int(pos), target)
detail := extractErrorLine(code, line, pos, target)
var msg string
if pos >= 0 {

View File

@ -16,6 +16,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
gitea_html "code.gitea.io/gitea/modules/htmlutil"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
type AvatarUtils struct {
@ -29,13 +30,9 @@ func NewAvatarUtils(ctx context.Context) *AvatarUtils {
// AvatarHTML creates the HTML for an avatar
func AvatarHTML(src string, size int, class, name string) template.HTML {
sizeStr := strconv.Itoa(size)
if name == "" {
name = "avatar"
}
name = util.IfZero(name, "avatar")
// use empty alt, otherwise if the image fails to load, the width will follow the "alt" text's width
return template.HTML(`<img loading="lazy" alt class="` + class + `" src="` + src + `" title="` + html.EscapeString(name) + `" width="` + sizeStr + `" height="` + sizeStr + `"/>`)
return template.HTML(`<img loading="lazy" alt class="` + html.EscapeString(class) + `" src="` + html.EscapeString(src) + `" title="` + html.EscapeString(name) + `" width="` + sizeStr + `" height="` + sizeStr + `">`)
}
// Avatar renders user avatars. args: user, size (int), class (string)

View File

@ -14,10 +14,12 @@ import (
activities_model "code.gitea.io/gitea/models/activities"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/svg"
"github.com/editorconfig/editorconfig-core-go/v2"
@ -185,3 +187,49 @@ func tabSizeClass(ec *editorconfig.Editorconfig, filename string) string {
}
return "tab-size-4"
}
type MiscUtils struct {
ctx context.Context
}
func NewMiscUtils(ctx context.Context) *MiscUtils {
return &MiscUtils{ctx: ctx}
}
type MarkdownEditorContext struct {
PreviewMode string // "comment", "wiki", or empty for general
PreviewContext string // the path for resolving the links in the preview (repo preview already has default correct value)
PreviewLink string
MentionsLink string
}
func (m *MiscUtils) MarkdownEditorComment(repo *repo_model.Repository) *MarkdownEditorContext {
if repo == nil {
return nil
}
return &MarkdownEditorContext{
PreviewMode: "comment",
PreviewLink: repo.Link() + "/markup",
MentionsLink: repo.Link() + "/-/mentions-in-repo",
}
}
func (m *MiscUtils) MarkdownEditorWiki(repo *repo_model.Repository) *MarkdownEditorContext {
if repo == nil {
return nil
}
return &MarkdownEditorContext{
PreviewMode: "wiki",
PreviewLink: repo.Link() + "/markup",
MentionsLink: repo.Link() + "/-/mentions-in-repo",
}
}
func (m *MiscUtils) MarkdownEditorGeneral(owner *user_model.User) *MarkdownEditorContext {
ret := &MarkdownEditorContext{PreviewLink: setting.AppSubURL + "/-/markup"}
if owner != nil {
ret.PreviewContext = owner.HomeLink()
ret.MentionsLink = owner.HomeLink() + "/-/mentions-in-owner"
}
return ret
}

View File

@ -75,19 +75,21 @@ const filepathSeparator = string(os.PathSeparator)
// {`/foo`, ``, `bar`} => `/foo/bar`
// {`/foo`, `..`, `bar`} => `/foo/bar`
func FilePathJoinAbs(base string, sub ...string) string {
elems := make([]string, 1, len(sub)+1)
// POSIX filesystem can have `\` in file names. Windows: `\` and `/` are both used for path separators
// to keep the behavior consistent, we do not allow `\` in file names, replace all `\` with `/`
if isOSWindows() {
elems[0] = filepath.Clean(base)
} else {
elems[0] = filepath.Clean(strings.ReplaceAll(base, "\\", filepathSeparator))
if !isOSWindows() {
base = strings.ReplaceAll(base, "\\", filepathSeparator)
}
if !filepath.IsAbs(elems[0]) {
// This shouldn't happen. If there is really necessary to pass in relative path, return the full path with filepath.Abs() instead
panic(fmt.Sprintf("FilePathJoinAbs: %q (for path %v) is not absolute, do not guess a relative path based on current working directory", elems[0], elems))
if !filepath.IsAbs(base) {
// This shouldn't happen. If it is really necessary to handle relative paths, use filepath.Abs() to get absolute paths first
panic(fmt.Sprintf("FilePathJoinAbs: %q (for path %v) is not absolute, do not guess a relative path based on current working directory", base, sub))
}
if len(sub) == 0 {
return filepath.Clean(base)
}
elems := make([]string, 1, len(sub)+1)
elems[0] = base
for _, s := range sub {
if s == "" {
continue
@ -98,7 +100,7 @@ func FilePathJoinAbs(base string, sub ...string) string {
elems = append(elems, filepath.Clean(filepathSeparator+strings.ReplaceAll(s, "\\", filepathSeparator)))
}
}
// the elems[0] must be an absolute path, just join them together
// the elems[0] must be an absolute path, just join them together, and Join will also do Clean
return filepath.Join(elems...)
}

View File

@ -5,7 +5,6 @@ package util
import (
"net/url"
"path"
"strings"
)
@ -19,27 +18,6 @@ func PathEscapeSegments(path string) string {
return escapedPath
}
// URLJoin joins url components, like path.Join, but preserving contents
func URLJoin(base string, elems ...string) string {
if !strings.HasSuffix(base, "/") {
base += "/"
}
baseURL, err := url.Parse(base)
if err != nil {
return ""
}
joinedPath := path.Join(elems...)
argURL, err := url.Parse(joinedPath)
if err != nil {
return ""
}
joinedURL := baseURL.ResolveReference(argURL).String()
if !baseURL.IsAbs() && !strings.HasPrefix(base, "/") {
return joinedURL[1:] // Removing leading '/' if needed
}
return joinedURL
}
func SanitizeURL(s string) (string, error) {
u, err := url.Parse(s)
if err != nil {

View File

@ -11,39 +11,6 @@ import (
"github.com/stretchr/testify/assert"
)
func TestURLJoin(t *testing.T) {
type test struct {
Expected string
Base string
Elements []string
}
newTest := func(expected, base string, elements ...string) test {
return test{Expected: expected, Base: base, Elements: elements}
}
for _, test := range []test{
newTest("https://try.gitea.io/a/b/c",
"https://try.gitea.io", "a/b", "c"),
newTest("https://try.gitea.io/a/b/c",
"https://try.gitea.io/", "/a/b/", "/c/"),
newTest("https://try.gitea.io/a/c",
"https://try.gitea.io/", "/a/./b/", "../c/"),
newTest("a/b/c",
"a", "b/c/"),
newTest("a/b/d",
"a/", "b/c/", "/../d/"),
newTest("https://try.gitea.io/a/b/c#d",
"https://try.gitea.io", "a/b", "c#d"),
newTest("/a/b/d",
"/a/", "b/c/", "/../d/"),
newTest("/a/b/c",
"/a", "b/c/"),
newTest("/a/b/c#hash",
"/a", "b/c#hash"),
} {
assert.Equal(t, test.Expected, URLJoin(test.Base, test.Elements...))
}
}
func TestIsEmptyString(t *testing.T) {
cases := []struct {
s string

View File

@ -70,7 +70,8 @@ func preCheckHandler(fn reflect.Value, argsIn []reflect.Value) {
func prepareHandleArgsIn(resp http.ResponseWriter, req *http.Request, fn reflect.Value, fnInfo *routing.FuncInfo) []reflect.Value {
defer func() {
if err := recover(); err != nil {
if recovered := recover(); recovered != nil {
err := fmt.Errorf("%v\n%s", recovered, log.Stack(2))
log.Error("unable to prepare handler arguments for %s: %v", fnInfo.String(), err)
panic(err)
}
@ -117,7 +118,17 @@ func hasResponseBeenWritten(argsIn []reflect.Value) bool {
return false
}
func wrapHandlerProvider[T http.Handler](hp func(next http.Handler) T, funcInfo *routing.FuncInfo) func(next http.Handler) http.Handler {
type middlewareProvider = func(next http.Handler) http.Handler
func executeMiddlewaresHandler(w http.ResponseWriter, r *http.Request, middlewares []middlewareProvider, endpoint http.HandlerFunc) {
handler := endpoint
for i := len(middlewares) - 1; i >= 0; i-- {
handler = middlewares[i](handler).ServeHTTP
}
handler(w, r)
}
func wrapHandlerProvider[T http.Handler](hp func(next http.Handler) T, funcInfo *routing.FuncInfo) middlewareProvider {
return func(next http.Handler) http.Handler {
h := hp(next) // this handle could be dynamically generated, so we can't use it for debug info
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
@ -129,14 +140,14 @@ func wrapHandlerProvider[T http.Handler](hp func(next http.Handler) T, funcInfo
// toHandlerProvider converts a handler to a handler provider
// A handler provider is a function that takes a "next" http.Handler, it can be used as a middleware
func toHandlerProvider(handler any) func(next http.Handler) http.Handler {
func toHandlerProvider(handler any) middlewareProvider {
funcInfo := routing.GetFuncInfo(handler)
fn := reflect.ValueOf(handler)
if fn.Type().Kind() != reflect.Func {
panic(fmt.Sprintf("handler must be a function, but got %s", fn.Type()))
}
if hp, ok := handler.(func(next http.Handler) http.Handler); ok {
if hp, ok := handler.(middlewareProvider); ok {
return wrapHandlerProvider(hp, funcInfo)
} else if hp, ok := handler.(func(http.Handler) http.HandlerFunc); ok {
return wrapHandlerProvider(hp, funcInfo)

View File

@ -18,6 +18,13 @@ import (
"github.com/go-chi/chi/v5"
)
// PreMiddlewareProvider is a special middleware provider which will be executed
// before other middlewares on the same "routing" level (AfterRouting/Group/Methods/Any, but not BeforeRouting).
// A route can do something (e.g.: set middleware options) at the place where it is declared,
// and the code will be executed before other middlewares which are added before the declaration.
// Use cases: mark a route with some meta info, set some options for middlewares, etc.
type PreMiddlewareProvider func(next http.Handler) http.Handler
// Bind binding an obj to a handler's context data
func Bind[T any](_ T) http.HandlerFunc {
return func(resp http.ResponseWriter, req *http.Request) {
@ -41,7 +48,10 @@ func GetForm(dataStore reqctx.RequestDataStore) any {
// Router defines a route based on chi's router
type Router struct {
chiRouter *chi.Mux
chiRouter *chi.Mux
afterRouting []any
curGroupPrefix string
curMiddlewares []any
}
@ -52,8 +62,9 @@ func NewRouter() *Router {
return &Router{chiRouter: r}
}
// Use supports two middlewares
func (r *Router) Use(middlewares ...any) {
// BeforeRouting adds middlewares which will be executed before the request path gets routed
// It should only be used for framework-level global middlewares when it needs to change request method & path.
func (r *Router) BeforeRouting(middlewares ...any) {
for _, m := range middlewares {
if !isNilOrFuncNil(m) {
r.chiRouter.Use(toHandlerProvider(m))
@ -61,7 +72,13 @@ func (r *Router) Use(middlewares ...any) {
}
}
// Group mounts a sub-Router along a `pattern` string.
// AfterRouting adds middlewares which will be executed after the request path gets routed
// It can see the routed path and resolved path parameters
func (r *Router) AfterRouting(middlewares ...any) {
r.afterRouting = append(r.afterRouting, middlewares...)
}
// Group mounts a sub-router along a "pattern" string.
func (r *Router) Group(pattern string, fn func(), middlewares ...any) {
previousGroupPrefix := r.curGroupPrefix
previousMiddlewares := r.curMiddlewares
@ -93,36 +110,54 @@ func isNilOrFuncNil(v any) bool {
return r.Kind() == reflect.Func && r.IsNil()
}
func wrapMiddlewareAndHandler(curMiddlewares, h []any) ([]func(http.Handler) http.Handler, http.HandlerFunc) {
handlerProviders := make([]func(http.Handler) http.Handler, 0, len(curMiddlewares)+len(h)+1)
for _, m := range curMiddlewares {
if !isNilOrFuncNil(m) {
handlerProviders = append(handlerProviders, toHandlerProvider(m))
func wrapMiddlewareAppendPre(all []middlewareProvider, middlewares []any) []middlewareProvider {
for _, m := range middlewares {
if h, ok := m.(PreMiddlewareProvider); ok && h != nil {
all = append(all, toHandlerProvider(middlewareProvider(h)))
}
}
return all
}
func wrapMiddlewareAppendNormal(all []middlewareProvider, middlewares []any) []middlewareProvider {
for _, m := range middlewares {
if _, ok := m.(PreMiddlewareProvider); !ok && !isNilOrFuncNil(m) {
all = append(all, toHandlerProvider(m))
}
}
return all
}
func wrapMiddlewareAndHandler(useMiddlewares, curMiddlewares, h []any) (_ []middlewareProvider, _ http.HandlerFunc, hasPreMiddlewares bool) {
if len(h) == 0 {
panic("no endpoint handler provided")
}
for i, m := range h {
if !isNilOrFuncNil(m) {
handlerProviders = append(handlerProviders, toHandlerProvider(m))
} else if i == len(h)-1 {
panic("endpoint handler can't be nil")
}
if isNilOrFuncNil(h[len(h)-1]) {
panic("endpoint handler can't be nil")
}
handlerProviders := make([]middlewareProvider, 0, len(useMiddlewares)+len(curMiddlewares)+len(h)+1)
handlerProviders = wrapMiddlewareAppendPre(handlerProviders, useMiddlewares)
handlerProviders = wrapMiddlewareAppendPre(handlerProviders, curMiddlewares)
handlerProviders = wrapMiddlewareAppendPre(handlerProviders, h)
hasPreMiddlewares = len(handlerProviders) > 0
handlerProviders = wrapMiddlewareAppendNormal(handlerProviders, useMiddlewares)
handlerProviders = wrapMiddlewareAppendNormal(handlerProviders, curMiddlewares)
handlerProviders = wrapMiddlewareAppendNormal(handlerProviders, h)
middlewares := handlerProviders[:len(handlerProviders)-1]
handlerFunc := handlerProviders[len(handlerProviders)-1](nil).ServeHTTP
mockPoint := RouterMockPoint(MockAfterMiddlewares)
if mockPoint != nil {
middlewares = append(middlewares, mockPoint)
}
return middlewares, handlerFunc
return middlewares, handlerFunc, hasPreMiddlewares
}
// Methods adds the same handlers for multiple http "methods" (separated by ",").
// If any method is invalid, the lower level router will panic.
func (r *Router) Methods(methods, pattern string, h ...any) {
middlewares, handlerFunc := wrapMiddlewareAndHandler(r.curMiddlewares, h)
middlewares, handlerFunc, _ := wrapMiddlewareAndHandler(r.afterRouting, r.curMiddlewares, h)
fullPattern := r.getPattern(pattern)
if strings.Contains(methods, ",") {
methods := strings.SplitSeq(methods, ",")
@ -134,15 +169,19 @@ func (r *Router) Methods(methods, pattern string, h ...any) {
}
}
// Mount attaches another Router along ./pattern/*
// Mount attaches another Router along "/pattern/*"
func (r *Router) Mount(pattern string, subRouter *Router) {
subRouter.Use(r.curMiddlewares...)
r.chiRouter.Mount(r.getPattern(pattern), subRouter.chiRouter)
handlerProviders := make([]middlewareProvider, 0, len(r.afterRouting)+len(r.curMiddlewares))
handlerProviders = wrapMiddlewareAppendPre(handlerProviders, r.afterRouting)
handlerProviders = wrapMiddlewareAppendPre(handlerProviders, r.curMiddlewares)
handlerProviders = wrapMiddlewareAppendNormal(handlerProviders, r.afterRouting)
handlerProviders = wrapMiddlewareAppendNormal(handlerProviders, r.curMiddlewares)
r.chiRouter.With(handlerProviders...).Mount(r.getPattern(pattern), subRouter.chiRouter)
}
// Any delegate requests for all methods
func (r *Router) Any(pattern string, h ...any) {
middlewares, handlerFunc := wrapMiddlewareAndHandler(r.curMiddlewares, h)
middlewares, handlerFunc, _ := wrapMiddlewareAndHandler(r.afterRouting, r.curMiddlewares, h)
r.chiRouter.With(middlewares...).HandleFunc(r.getPattern(pattern), handlerFunc)
}
@ -178,12 +217,16 @@ func (r *Router) Patch(pattern string, h ...any) {
// ServeHTTP implements http.Handler
func (r *Router) ServeHTTP(w http.ResponseWriter, req *http.Request) {
// TODO: need to move it to the top-level common middleware, otherwise each "Mount" will cause it to be executed multiple times, which is inefficient.
r.normalizeRequestPath(w, req, r.chiRouter)
}
// NotFound defines a handler to respond whenever a route could not be found.
func (r *Router) NotFound(h http.HandlerFunc) {
r.chiRouter.NotFound(h)
middlewares, handlerFunc, _ := wrapMiddlewareAndHandler(r.afterRouting, r.curMiddlewares, []any{h})
r.chiRouter.NotFound(func(w http.ResponseWriter, r *http.Request) {
executeMiddlewaresHandler(w, r, middlewares, handlerFunc)
})
}
func (r *Router) normalizeRequestPath(resp http.ResponseWriter, req *http.Request, next http.Handler) {

View File

@ -27,11 +27,7 @@ func (g *RouterPathGroup) ServeHTTP(resp http.ResponseWriter, req *http.Request)
for _, m := range g.matchers {
if m.matchPath(chiCtx, path) {
chiCtx.RoutePatterns = append(chiCtx.RoutePatterns, m.pattern)
handler := m.handlerFunc
for i := len(m.middlewares) - 1; i >= 0; i-- {
handler = m.middlewares[i](handler).ServeHTTP
}
handler(resp, req)
executeMiddlewaresHandler(resp, req, m.middlewares, m.handlerFunc)
return
}
}
@ -67,7 +63,7 @@ type routerPathMatcher struct {
pattern string
re *regexp.Regexp
params []routerPathParam
middlewares []func(http.Handler) http.Handler
middlewares []middlewareProvider
handlerFunc http.HandlerFunc
}
@ -111,7 +107,10 @@ func isValidMethod(name string) bool {
}
func newRouterPathMatcher(methods string, patternRegexp *RouterPathGroupPattern, h ...any) *routerPathMatcher {
middlewares, handlerFunc := wrapMiddlewareAndHandler(patternRegexp.middlewares, h)
middlewares, handlerFunc, hasPreMiddlewares := wrapMiddlewareAndHandler(nil, patternRegexp.middlewares, h)
if hasPreMiddlewares {
panic("pre-middlewares are not supported in router path matcher")
}
p := &routerPathMatcher{methods: make(container.Set[string]), middlewares: middlewares, handlerFunc: handlerFunc}
for method := range strings.SplitSeq(methods, ",") {
method = strings.TrimSpace(method)

View File

@ -30,6 +30,71 @@ func chiURLParamsToMap(chiCtx *chi.Context) map[string]string {
return util.Iif(len(m) == 0, nil, m)
}
type testResult struct {
method string
pathParams map[string]string
handlerMarks []string
chiRoutePattern *string
}
type testRecorder struct {
res testResult
}
func (r *testRecorder) reset() {
r.res = testResult{}
}
func (r *testRecorder) handle(optMark ...string) func(resp http.ResponseWriter, req *http.Request) {
mark := util.OptionalArg(optMark, "")
return func(resp http.ResponseWriter, req *http.Request) {
chiCtx := chi.RouteContext(req.Context())
r.res.method = req.Method
r.res.pathParams = chiURLParamsToMap(chiCtx)
r.res.chiRoutePattern = new(chiCtx.RoutePattern())
if mark != "" {
r.res.handlerMarks = append(r.res.handlerMarks, mark)
}
}
}
func (r *testRecorder) provider(optMark ...string) func(next http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
r.handle(optMark...)(resp, req)
next.ServeHTTP(resp, req)
})
}
}
func (r *testRecorder) stop(optMark ...string) func(resp http.ResponseWriter, req *http.Request) {
mark := util.OptionalArg(optMark, "")
return func(resp http.ResponseWriter, req *http.Request) {
if stop := req.FormValue("stop"); stop != "" && (mark == "" || mark == stop) {
r.handle(stop)(resp, req)
resp.WriteHeader(http.StatusOK)
} else if mark != "" {
r.res.handlerMarks = append(r.res.handlerMarks, mark)
}
}
}
func (r *testRecorder) test(t *testing.T, rt *Router, methodPath string, expected testResult) {
r.reset()
methodPathFields := strings.Fields(methodPath)
req, err := http.NewRequest(methodPathFields[0], methodPathFields[1], nil)
assert.NoError(t, err)
buff := &bytes.Buffer{}
httpRecorder := httptest.NewRecorder()
httpRecorder.Body = buff
rt.ServeHTTP(httpRecorder, req)
if expected.chiRoutePattern == nil {
r.res.chiRoutePattern = nil
}
assert.Equal(t, expected, r.res)
}
func TestPathProcessor(t *testing.T) {
testProcess := func(pattern, uri string, expectedPathParams map[string]string) {
chiCtx := chi.NewRouteContext()
@ -51,42 +116,10 @@ func TestPathProcessor(t *testing.T) {
}
func TestRouter(t *testing.T) {
buff := &bytes.Buffer{}
recorder := httptest.NewRecorder()
recorder.Body = buff
type resultStruct struct {
method string
pathParams map[string]string
handlerMarks []string
chiRoutePattern *string
}
var res resultStruct
h := func(optMark ...string) func(resp http.ResponseWriter, req *http.Request) {
mark := util.OptionalArg(optMark, "")
return func(resp http.ResponseWriter, req *http.Request) {
chiCtx := chi.RouteContext(req.Context())
res.method = req.Method
res.pathParams = chiURLParamsToMap(chiCtx)
res.chiRoutePattern = new(chiCtx.RoutePattern())
if mark != "" {
res.handlerMarks = append(res.handlerMarks, mark)
}
}
}
stopMark := func(optMark ...string) func(resp http.ResponseWriter, req *http.Request) {
mark := util.OptionalArg(optMark, "")
return func(resp http.ResponseWriter, req *http.Request) {
if stop := req.FormValue("stop"); stop != "" && (mark == "" || mark == stop) {
h(stop)(resp, req)
resp.WriteHeader(http.StatusOK)
} else if mark != "" {
res.handlerMarks = append(res.handlerMarks, mark)
}
}
}
type resultStruct = testResult
resRecorder := &testRecorder{}
h := resRecorder.handle
stopMark := resRecorder.stop
r := NewRouter()
r.NotFound(h("not-found:/"))
@ -123,15 +156,7 @@ func TestRouter(t *testing.T) {
testRoute := func(t *testing.T, methodPath string, expected resultStruct) {
t.Run(methodPath, func(t *testing.T) {
res = resultStruct{}
methodPathFields := strings.Fields(methodPath)
req, err := http.NewRequest(methodPathFields[0], methodPathFields[1], nil)
assert.NoError(t, err)
r.ServeHTTP(recorder, req)
if expected.chiRoutePattern == nil {
res.chiRoutePattern = nil
}
assert.Equal(t, expected, res)
resRecorder.test(t, r, methodPath, expected)
})
}
@ -273,3 +298,39 @@ func TestRouteNormalizePath(t *testing.T) {
testPath("/v2/", paths{EscapedPath: "/v2", RawPath: "/v2", Path: "/v2"})
testPath("/v2/%2f", paths{EscapedPath: "/v2/%2f", RawPath: "/v2/%2f", Path: "/v2//"})
}
func TestPreMiddlewareProvider(t *testing.T) {
resRecorder := &testRecorder{}
h := resRecorder.handle
p := resRecorder.provider
root := NewRouter()
root.BeforeRouting(h("before-root"))
root.AfterRouting(h("root"))
root.Get("/a/1", h("mid"), PreMiddlewareProvider(p("pre-root")), h("end1"))
sub := NewRouter()
sub.BeforeRouting(h("before-sub"))
sub.AfterRouting(h("sub"))
sub.Get("/2", h("mid"), PreMiddlewareProvider(p("pre-sub")), h("end2"))
sub.NotFound(h("not-found"))
root.Mount("/a", sub)
resRecorder.test(t, root, "GET /a/1", testResult{
method: "GET",
handlerMarks: []string{"before-root", "pre-root", "root", "mid", "end1"},
})
resRecorder.test(t, root, "GET /a/2", testResult{
method: "GET",
handlerMarks: []string{"before-root", "root", "before-sub", "pre-sub", "sub", "mid", "end2"},
})
resRecorder.test(t, root, "GET /no-such", testResult{
method: "GET",
handlerMarks: []string{"before-root"},
})
resRecorder.test(t, root, "GET /a/no-such", testResult{
method: "GET",
handlerMarks: []string{"before-root", "root", "before-sub", "sub", "not-found"},
})
}

View File

@ -44,7 +44,7 @@ func MarkLongPolling(resp http.ResponseWriter, req *http.Request) {
}
// UpdatePanicError updates a context's error info, a panic may be recovered by other middlewares, but we still need to know that.
func UpdatePanicError(ctx context.Context, err any) {
func UpdatePanicError(ctx context.Context, err error) {
record, ok := ctx.Value(contextKey).(*requestRecord)
if !ok {
return

View File

@ -5,11 +5,13 @@ package routing
import (
"context"
"fmt"
"net/http"
"sync"
"time"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
)
@ -99,7 +101,7 @@ func (manager *requestRecordsManager) handler(next http.Handler) http.Handler {
localPanicErr := recover()
if localPanicErr != nil {
record.lock.Lock()
record.panicError = localPanicErr
record.panicError = fmt.Errorf("%v\n%s", localPanicErr, log.Stack(2))
record.lock.Unlock()
}

View File

@ -24,5 +24,5 @@ type requestRecord struct {
// mutable fields
isLongPolling bool
funcInfo *FuncInfo
panicError any
panicError error
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -84,6 +84,7 @@
"save": "Enregistrer",
"add": "Ajouter",
"add_all": "Tout Ajouter",
"dismiss": "Fermer",
"remove": "Retirer",
"remove_all": "Tout Retirer",
"remove_label_str": "Supprimer lélément « %s »",
@ -284,12 +285,6 @@
"install.register_confirm": "Exiger la confirmation du courriel lors de linscription",
"install.mail_notify": "Activer les notifications par courriel",
"install.server_service_title": "Paramètres Serveur et Tierce Parties",
"install.offline_mode": "Activer le mode hors-ligne",
"install.offline_mode_popup": "Désactiver l'utilisation de CDNs, et servir toutes les ressources localement.",
"install.disable_gravatar": "Désactiver Gravatar",
"install.disable_gravatar_popup": "Désactiver Gravatar et les autres sources d'avatars tierces. Un avatar par défaut sera utilisé pour les utilisateurs n'ayant pas téléversé un avatar personnalisé.",
"install.federated_avatar_lookup": "Activer les avatars unifiés",
"install.federated_avatar_lookup_popup": "Activer la recherche unifiée d'avatars en utilisant le service open source unifié basé sur libravatar.",
"install.disable_registration": "Désactiver le formulaire d'inscription",
"install.disable_registration_popup": "Désactiver les nouvelles inscriptions. Seuls les administrateurs pourront créer de nouveaux comptes utilisateurs.",
"install.allow_only_external_registration_popup": "N'autoriser l'inscription qu'à partir des services externes",
@ -871,7 +866,7 @@
"settings.permissions_list": "Autorisations :",
"settings.manage_oauth2_applications": "Gérer les applications OAuth2",
"settings.edit_oauth2_application": "Modifier l'application OAuth2",
"settings.oauth2_applications_desc": "Les applications OAuth2 permettent à votre application tierce d'authentifier en toute sécurité les utilisateurs de cette instance Gitea.",
"settings.oauth2_applications_desc": "OAuth2 permet a une application tierce dauthentifier les utilisateurs de cette instance Gitea.",
"settings.remove_oauth2_application": "Supprimer l'application OAuth2",
"settings.remove_oauth2_application_desc": "La suppression d'une application OAuth2 révoquera l'accès à tous les jetons d'accès signés. Continuer ?",
"settings.remove_oauth2_application_success": "L'application a été supprimée.",
@ -890,7 +885,7 @@
"settings.oauth2_regenerate_secret_hint": "Avez-vous perdu votre secret ?",
"settings.oauth2_client_secret_hint": "Le secret ne sera plus affiché après avoir quitté ou actualisé cette page. Veuillez vous assurer que vous l'avez enregistré.",
"settings.oauth2_application_edit": "Éditer",
"settings.oauth2_application_create_description": "Les applications OAuth2 permettent à votre application tierce d'accéder aux comptes d'utilisateurs de cette instance.",
"settings.oauth2_application_create_description": "OAuth2 permet à des applications tierces daccéder aux comptes utilisateurs de cette instance.",
"settings.oauth2_application_remove_description": "La suppression d'une application OAuth2 l'empêchera d'accéder aux comptes d'utilisateurs autorisés sur cette instance. Poursuivre ?",
"settings.oauth2_application_locked": "Gitea préinstalle des applications OAuth2 au démarrage si elles sont activées dans la configuration. Pour éviter des comportements inattendus, celles-ci ne peuvent être éditées ni supprimées. Veuillez vous référer à la documentation OAuth2 pour plus d'informations.",
"settings.authorized_oauth2_applications": "Applications OAuth2 autorisées",
@ -1524,6 +1519,7 @@
"repo.issues.commented_at": "a commenté <a href=\"#%s\"> %s</a>.",
"repo.issues.delete_comment_confirm": "Êtes-vous certain de vouloir supprimer ce commentaire?",
"repo.issues.context.copy_link": "Copier le lien",
"repo.issues.context.copy_source": "Copier la source",
"repo.issues.context.quote_reply": "Citer et répondre",
"repo.issues.context.reference_issue": "Référencer dans un nouveau ticket",
"repo.issues.context.edit": "Éditer",
@ -3192,7 +3188,6 @@
"admin.config.custom_conf": "Chemin du fichier de configuration",
"admin.config.custom_file_root_path": "Emplacement personnalisé du fichier racine",
"admin.config.domain": "Domaine du serveur",
"admin.config.offline_mode": "Mode hors-ligne",
"admin.config.disable_router_log": "Désactiver la Journalisation du Routeur",
"admin.config.run_user": "Exécuter avec l'utilisateur",
"admin.config.run_mode": "Mode d'Éxécution",
@ -3278,6 +3273,13 @@
"admin.config.cache_test_failed": "Impossible dinterroger le cache : %v.",
"admin.config.cache_test_slow": "Test du cache réussi, mais la réponse est lente : %s.",
"admin.config.cache_test_succeeded": "Test du cache réussi, réponse obtenue en %s.",
"admin.config.common.start_time": "Heure de début",
"admin.config.common.end_time": "Heure de fin",
"admin.config.common.skip_time_check": "Laisser le temps vide (effacer le champ) pour passer la vérification",
"admin.config.instance_maintenance": "Maintenance de linstance",
"admin.config.instance_maintenance_mode.admin_web_access_only": "Permettre uniquement aux administrateurs daccéder à linterface web",
"admin.config.instance_web_banner.enabled": "Afficher la bannière",
"admin.config.instance_web_banner.message_placeholder": "Message de bannière (supporte markdown)",
"admin.config.session_config": "Configuration de session",
"admin.config.session_provider": "Fournisseur de session",
"admin.config.provider_config": "Configuration du fournisseur",
@ -3288,7 +3290,7 @@
"admin.config.cookie_life_time": "Expiration du cookie",
"admin.config.picture_config": "Configuration de l'avatar",
"admin.config.picture_service": "Service d'Imagerie",
"admin.config.disable_gravatar": "Désactiver Gravatar",
"admin.config.enable_gravatar": "Activer Gravatar",
"admin.config.enable_federated_avatar": "Activer les avatars unifiés",
"admin.config.open_with_editor_app_help": "Les éditeurs disponibles via « Ouvrir avec ». Si laissé vide, la valeur par défaut sera utilisée. Développez pour voir la valeur par défaut.",
"admin.config.git_guide_remote_name": "Nom du dépôt distant pour les commandes git dans le guide",
@ -3672,6 +3674,8 @@
"actions.runners.reset_registration_token_confirm": "Voulez-vous révoquer le jeton actuel et en générer un nouveau ?",
"actions.runners.reset_registration_token_success": "Le jeton dinscription de lexécuteur a été réinitialisé avec succès",
"actions.runs.all_workflows": "Tous les flux de travail",
"actions.runs.workflow_run_count_1": "%d exécution du workflow",
"actions.runs.workflow_run_count_n": "%d exécutions du workflow",
"actions.runs.commit": "Révision",
"actions.runs.scheduled": "Planifié",
"actions.runs.pushed_by": "soumis par",

View File

@ -84,6 +84,7 @@
"save": "Sábháil",
"add": "Cuir",
"add_all": "Cuir Gach",
"dismiss": "Díbhe",
"remove": "Bain",
"remove_all": "Bain Gach",
"remove_label_str": "Bain mír “%s”",
@ -224,7 +225,7 @@
"startpage.lightweight": "Éadrom",
"startpage.lightweight_desc": "Tá íosta riachtanais íseal ag Gitea agus is féidir leo rith ar Raspberry Pi saor. Sábháil fuinneamh do mheaisín!",
"startpage.license": "Foinse Oscailte",
"startpage.license_desc": "Téigh go bhfaighidh <a target=\"_blank\" rel=\"noopener noreferrer\" href=\"%[1]s\">%[2]s</a>! Bí linn trí <a target=\"_blank\" rel=\"noopener noreferrer\" href=\"%[3]s\">cur leis</a> chun an tionscadal seo a fheabhsú fós. Ná bíodh cúthail ort a bheith i do rannpháirtí!",
"startpage.license_desc": "Téigh agus faigh <a target=\"_blank\" rel=\"noopener noreferrer\" href=\"%[1]s\">%[2]s</a>! Bí linn trí <a target=\"_blank\" rel=\"noopener noreferrer\" href=\"%[3]s\">cur leis</a> chun an tionscadal seo a dhéanamh níos fearr fós. Ná bíodh leisce ort cur leis!",
"install.install": "Suiteáil",
"install.installing_desc": "Suiteáil anois, fan go fóill…",
"install.title": "Cumraíocht Tosaigh",
@ -284,12 +285,6 @@
"install.register_confirm": "Deimhniú Ríomhphoist a cheangal le Clárú",
"install.mail_notify": "Cumasaigh Fógraí Ríomhphoist",
"install.server_service_title": "Socruithe Freastalaí agus Seirbhíse Tríú Páirtí",
"install.offline_mode": "Cumasaigh Mód Áitiúil",
"install.offline_mode_popup": "Díchumasaigh líonraí seachadta ábhair tríú páirtí agus freastal ar na hacmhainní go léir go háitiúil.",
"install.disable_gravatar": "Díchumasaigh Gravatar",
"install.disable_gravatar_popup": "Díchumasaigh foinsí abhatár Gravatar agus tríú páirtí. Úsáidfear abhatár réamhshocraithe mura n-uaslódálann úsáideoir abhatár go háitiúil.",
"install.federated_avatar_lookup": "Cumasaigh Abhatáir Chónaidhme",
"install.federated_avatar_lookup_popup": "Cumasaigh cuardach avatar cónaidhme ag baint úsáide as Libravatar.",
"install.disable_registration": "Díchumasaigh Féin-Chlárú",
"install.disable_registration_popup": "Díchumasaigh féinchlárú úsáideora. Ní bheidh ach riarthóirí in ann cuntais úsáideora nua a chruthú.",
"install.allow_only_external_registration_popup": "Ceadaigh Clárú Trí Sheirbhísí Seachtracha amháin",
@ -871,7 +866,7 @@
"settings.permissions_list": "Ceadanna:",
"settings.manage_oauth2_applications": "Bainistigh Feidhmchláir OAuth2",
"settings.edit_oauth2_application": "Cuir Feidhmchlár OAuth2 in eagar",
"settings.oauth2_applications_desc": "Cumasaíonn feidhmchláir OAuth2 dfheidhmchlár tríú páirtí úsáideoirí a fhíordheimhniú go slán ag an ásc Gitea seo.",
"settings.oauth2_applications_desc": "Cuireann feidhmchláir OAuth2 ar chumas dfheidhmchlár tríú páirtí úsáideoirí a fhíordheimhniú go slán ag an gcás Gitea seo.",
"settings.remove_oauth2_application": "Bain Feidhmchlár OAuth2",
"settings.remove_oauth2_application_desc": "Ag baint feidhmchlár OAuth2, cúlghairfear rochtain ar gach comhartha rochtana sínithe. Lean ar aghaidh?",
"settings.remove_oauth2_application_success": "Scriosadh an feidhmchlár.",
@ -890,7 +885,7 @@
"settings.oauth2_regenerate_secret_hint": "Chaill tú do rún?",
"settings.oauth2_client_secret_hint": "Ní thaispeánfar an rún arís tar éis duit an leathanach seo a fhágáil nó a athnuachan. Déan cinnte le do thoil gur shábháil tú é.",
"settings.oauth2_application_edit": "Cuir in eagar",
"settings.oauth2_application_create_description": "Tugann feidhmchláir OAuth2 rochtain d'iarratas tríú páirtí ar chuntais úsáideora ar an gcás seo.",
"settings.oauth2_application_create_description": "Tugann feidhmchláir OAuth2 rochtain do dfheidhmchlár tríú páirtí ar chuntais úsáideora ar an gcás seo.",
"settings.oauth2_application_remove_description": "Cuirfear feidhmchlár OAuth2 a bhaint cosc air rochtain a fháil ar chuntais úsáideora údaraithe ar an gcás seo. Lean ar aghaidh?",
"settings.oauth2_application_locked": "Réamhchláraíonn Gitea roinnt feidhmchlár OAuth2 ar thosú má tá sé cumasaithe i gcumraíocht. Chun iompar gan choinne a chosc, ní féidir iad seo a chur in eagar ná a bhaint. Féach do thoil do dhoiciméadú OAuth2 le haghaidh tuilleadh faisnéise.",
"settings.authorized_oauth2_applications": "Feidhmchláir Údaraithe OAuth2",
@ -1524,6 +1519,7 @@
"repo.issues.commented_at": "trácht <a href=\"#%s\">%s</a> ",
"repo.issues.delete_comment_confirm": "An bhfuil tú cinnte gur mhaith leat an trácht seo a scriosadh?",
"repo.issues.context.copy_link": "Cóipeáil Nasc",
"repo.issues.context.copy_source": "Cóipeáil Foinse",
"repo.issues.context.quote_reply": "Luaigh Freagra",
"repo.issues.context.reference_issue": "Tagairt in Eagrán Nua",
"repo.issues.context.edit": "Cuir in eagar",
@ -3192,7 +3188,6 @@
"admin.config.custom_conf": "Cosán Comhad Cumraíochta",
"admin.config.custom_file_root_path": "Cosán Fréamh Comhad Saincheaptha",
"admin.config.domain": "Fearann Freastalaí",
"admin.config.offline_mode": "Mód Áitiúil",
"admin.config.disable_router_log": "Díchumasaigh Loga an Ródaire",
"admin.config.run_user": "Rith Mar Ainm úsáideora",
"admin.config.run_mode": "Mód Rith",
@ -3278,6 +3273,13 @@
"admin.config.cache_test_failed": "Theip ar an taisce a thaiscéaladh: %v.",
"admin.config.cache_test_slow": "D'éirigh leis an tástáil taisce, ach tá an freagra mall: %s.",
"admin.config.cache_test_succeeded": "D'éirigh leis an tástáil taisce, fuair sé freagra i %s.",
"admin.config.common.start_time": "Am tosaithe",
"admin.config.common.end_time": "Am deiridh",
"admin.config.common.skip_time_check": "Fág an t-am folamh (glan an réimse) chun seiceáil ama a scipeáil",
"admin.config.instance_maintenance": "Cothabháil Cásanna",
"admin.config.instance_maintenance_mode.admin_web_access_only": "Lig don riarthóir amháin rochtain a fháil ar chomhéadan gréasáin",
"admin.config.instance_web_banner.enabled": "Taispeáin meirge",
"admin.config.instance_web_banner.message_placeholder": "Teachtaireacht meirge (tacaíonn sé le Markdown)",
"admin.config.session_config": "Cumraíocht Seisiúin",
"admin.config.session_provider": "Soláthraí Seisiúin",
"admin.config.provider_config": "Cumraíocht Soláthraí",
@ -3288,7 +3290,7 @@
"admin.config.cookie_life_time": "Am Saoil Fianán",
"admin.config.picture_config": "Cumraíocht Pictiúr agus Avatar",
"admin.config.picture_service": "Seirbhís Pictiúr",
"admin.config.disable_gravatar": "Díchumasaigh Gravatar",
"admin.config.enable_gravatar": "Cumasaigh Gravatar",
"admin.config.enable_federated_avatar": "Cumasaigh Avatars Cónaidhme",
"admin.config.open_with_editor_app_help": "Na heagarthóirí \"Oscailte le\" don roghchlár Clón. Má fhágtar folamh é, úsáidfear an réamhshocrú. Leathnaigh chun an réamhshocrú a fheiceáil.",
"admin.config.git_guide_remote_name": "Ainm iargúlta stórais le haghaidh orduithe git sa treoir",
@ -3672,6 +3674,8 @@
"actions.runners.reset_registration_token_confirm": "Ar mhaith leat an comhartha reatha a neamhbhailiú agus ceann nua a ghiniúint?",
"actions.runners.reset_registration_token_success": "D'éirigh le hathshocrú comhartha clárúcháin an dara háit",
"actions.runs.all_workflows": "Gach Sreafaí Oibre",
"actions.runs.workflow_run_count_1": "%d rith sreabha oibre",
"actions.runs.workflow_run_count_n": "%d rith sreabha oibre",
"actions.runs.commit": "Tiomantas",
"actions.runs.scheduled": "Sceidealaithe",
"actions.runs.pushed_by": "bhrú ag",

File diff suppressed because it is too large Load Diff

View File

@ -1399,17 +1399,17 @@
"repo.issues.new.clear_labels": "清除选中标签",
"repo.issues.new.projects": "项目",
"repo.issues.new.clear_projects": "清除项目",
"repo.issues.new.no_projects": "暂无项目",
"repo.issues.new.no_projects": "未选择项目",
"repo.issues.new.open_projects": "开启中的项目",
"repo.issues.new.closed_projects": "已关闭的项目",
"repo.issues.new.no_items": "无可选项",
"repo.issues.new.milestone": "里程碑",
"repo.issues.new.no_milestone": "未选择里程碑",
"repo.issues.new.clear_milestone": "取消选中里程碑",
"repo.issues.new.assignees": "指派成员",
"repo.issues.new.clear_assignees": "取消指派成员",
"repo.issues.new.no_assignees": "未指派员",
"repo.issues.new.no_reviewers": "评审人",
"repo.issues.new.assignees": "指派",
"repo.issues.new.clear_assignees": "取消指派",
"repo.issues.new.no_assignees": "未指派员",
"repo.issues.new.no_reviewers": "未指定评审人",
"repo.issues.new.blocked_user": "无法创建工单,因为您已被仓库所有者屏蔽。",
"repo.issues.edit.already_changed": "无法保存对工单的更改。其内容似乎已被其他用户更改。请刷新页面并重新编辑以避免覆盖他们的更改。",
"repo.issues.edit.blocked_user": "无法编辑内容,因为您已被仓库所有者或工单创建者屏蔽。",
@ -1465,9 +1465,9 @@
"repo.issues.filter_milestone_closed": "已关闭的里程碑",
"repo.issues.filter_project": "项目",
"repo.issues.filter_project_all": "所有项目",
"repo.issues.filter_project_none": "未加项目",
"repo.issues.filter_project_none": "项目",
"repo.issues.filter_assignee": "指派人筛选",
"repo.issues.filter_assignee_no_assignee": "未指派任何人",
"repo.issues.filter_assignee_no_assignee": "未指派任何人",
"repo.issues.filter_assignee_any_assignee": "已有指派",
"repo.issues.filter_poster": "作者",
"repo.issues.filter_user_placeholder": "搜索用户",
@ -1487,8 +1487,8 @@
"repo.issues.filter_sort.leastupdate": "最早更新",
"repo.issues.filter_sort.mostcomment": "最多评论",
"repo.issues.filter_sort.leastcomment": "最少评论",
"repo.issues.filter_sort.nearduedate": "到期日从近到远",
"repo.issues.filter_sort.farduedate": "到期日从远到近",
"repo.issues.filter_sort.nearduedate": "截止日期从近到远",
"repo.issues.filter_sort.farduedate": "截止日期从远到近",
"repo.issues.filter_sort.moststars": "点赞由多到少",
"repo.issues.filter_sort.feweststars": "点赞由少到多",
"repo.issues.filter_sort.mostforks": "派生由多到少",
@ -1519,6 +1519,7 @@
"repo.issues.commented_at": "评论于 <a href=\"#%s\">%s</a>",
"repo.issues.delete_comment_confirm": "您确定要删除该条评论吗?",
"repo.issues.context.copy_link": "复制链接",
"repo.issues.context.copy_source": "复制原文",
"repo.issues.context.quote_reply": "引用回复",
"repo.issues.context.reference_issue": "在新工单中引用",
"repo.issues.context.edit": "编辑",
@ -1927,8 +1928,8 @@
"repo.milestones.deletion_desc": "删除该里程碑将会移除所有工单中相关的信息。是否继续?",
"repo.milestones.deletion_success": "里程碑已删除。",
"repo.milestones.filter_sort.name": "名称",
"repo.milestones.filter_sort.earliest_due_data": "到期日从远到近",
"repo.milestones.filter_sort.latest_due_date": "到期日从近到远",
"repo.milestones.filter_sort.earliest_due_data": "截止日期从远到近",
"repo.milestones.filter_sort.latest_due_date": "截止日期从近到远",
"repo.milestones.filter_sort.least_complete": "完成度从低到高",
"repo.milestones.filter_sort.most_complete": "完成度从高到低",
"repo.milestones.filter_sort.most_issues": "工单从多到少",
@ -2011,7 +2012,7 @@
"repo.activity.title.issues_closed_from": "%[2]s 关闭了 %[1]s",
"repo.activity.title.issues_created_by": "%[2]s 创建了 %[1]s",
"repo.activity.closed_issue_label": "已关闭",
"repo.activity.new_issues_count_1": "开启的工单",
"repo.activity.new_issues_count_1": "开启的工单",
"repo.activity.new_issues_count_n": "已打开的工单",
"repo.activity.new_issue_label": "打开的",
"repo.activity.title.unresolved_conv_1": "%d 未解决的会话",
@ -3673,6 +3674,8 @@
"actions.runners.reset_registration_token_confirm": "是否吊销当前令牌并生成一个新令牌?",
"actions.runners.reset_registration_token_success": "成功重置运行器注册令牌",
"actions.runs.all_workflows": "所有工作流",
"actions.runs.workflow_run_count_1": "%d 次工作流运行",
"actions.runs.workflow_run_count_n": "%d 次工作流运行",
"actions.runs.commit": "提交",
"actions.runs.scheduled": "已计划的",
"actions.runs.pushed_by": "推送者",

View File

@ -1,6 +1,6 @@
{
"type": "module",
"packageManager": "pnpm@10.30.1",
"packageManager": "pnpm@10.30.3",
"engines": {
"node": ">= 22.6.0",
"pnpm": ">= 10.0.0"
@ -22,7 +22,7 @@
"@techknowlogick/license-checker-webpack-plugin": "0.3.0",
"add-asset-webpack-plugin": "3.1.1",
"ansi_up": "6.0.6",
"asciinema-player": "3.15.0",
"asciinema-player": "3.15.1",
"chart.js": "4.5.1",
"chartjs-adapter-dayjs-4": "1.0.4",
"chartjs-plugin-zoom": "2.2.0",
@ -39,7 +39,7 @@
"idiomorph": "0.7.4",
"jquery": "4.0.0",
"js-yaml": "4.1.1",
"katex": "0.16.28",
"katex": "0.16.37",
"mermaid": "11.12.3",
"mini-css-extract-plugin": "2.10.0",
"monaco-editor": "0.55.1",
@ -47,10 +47,10 @@
"online-3d-viewer": "0.18.0",
"pdfobject": "2.3.1",
"perfect-debounce": "2.1.0",
"postcss": "8.5.6",
"postcss": "8.5.8",
"postcss-loader": "8.2.1",
"sortablejs": "1.15.7",
"swagger-ui-dist": "5.31.2",
"swagger-ui-dist": "5.32.0",
"tailwindcss": "3.4.17",
"throttle-debounce": "5.0.2",
"tippy.js": "6.3.7",
@ -58,32 +58,32 @@
"tributejs": "5.1.3",
"uint8-to-base64": "0.2.1",
"vanilla-colorful": "0.7.2",
"vue": "3.5.28",
"vue": "3.5.29",
"vue-bar-graph": "2.2.0",
"vue-chartjs": "5.3.3",
"vue-loader": "17.4.2",
"webpack": "5.105.2",
"webpack": "5.105.4",
"webpack-cli": "6.0.1",
"wrap-ansi": "10.0.0"
},
"devDependencies": {
"@eslint-community/eslint-plugin-eslint-comments": "4.6.0",
"@eslint-community/eslint-plugin-eslint-comments": "4.7.1",
"@eslint/json": "0.14.0",
"@playwright/test": "1.58.2",
"@stylistic/eslint-plugin": "5.9.0",
"@stylistic/eslint-plugin": "5.10.0",
"@stylistic/stylelint-plugin": "5.0.1",
"@types/codemirror": "5.60.17",
"@types/dropzone": "5.7.9",
"@types/jquery": "3.5.33",
"@types/jquery": "4.0.0",
"@types/js-yaml": "4.0.9",
"@types/katex": "0.16.8",
"@types/node": "25.3.0",
"@types/node": "25.3.5",
"@types/pdfobject": "2.2.5",
"@types/sortablejs": "1.15.9",
"@types/swagger-ui-dist": "3.30.6",
"@types/throttle-debounce": "5.0.2",
"@types/toastify-js": "1.12.4",
"@typescript-eslint/parser": "8.56.0",
"@typescript-eslint/parser": "8.56.1",
"@vitejs/plugin-vue": "6.0.4",
"@vitest/eslint-plugin": "1.6.9",
"eslint": "9.39.2",
@ -91,30 +91,30 @@
"eslint-plugin-array-func": "5.1.0",
"eslint-plugin-github": "6.0.0",
"eslint-plugin-import-x": "4.16.1",
"eslint-plugin-playwright": "2.7.0",
"eslint-plugin-playwright": "2.9.0",
"eslint-plugin-regexp": "3.0.0",
"eslint-plugin-sonarjs": "4.0.0",
"eslint-plugin-sonarjs": "4.0.1",
"eslint-plugin-unicorn": "63.0.0",
"eslint-plugin-vue": "10.8.0",
"eslint-plugin-vue-scoped-css": "2.12.0",
"eslint-plugin-vue-scoped-css": "3.0.0",
"eslint-plugin-wc": "3.1.0",
"globals": "17.3.0",
"happy-dom": "20.7.0",
"globals": "17.4.0",
"happy-dom": "20.8.3",
"jiti": "2.6.1",
"markdownlint-cli": "0.47.0",
"material-icon-theme": "5.31.0",
"markdownlint-cli": "0.48.0",
"material-icon-theme": "5.32.0",
"nolyfill": "1.0.44",
"postcss-html": "1.8.1",
"spectral-cli-bundle": "1.0.7",
"stylelint": "17.3.0",
"stylelint": "17.4.0",
"stylelint-config-recommended": "18.0.0",
"stylelint-declaration-block-no-ignored-properties": "3.0.0",
"stylelint-declaration-strict-value": "1.11.0",
"stylelint-declaration-strict-value": "1.11.1",
"stylelint-value-no-unknown-custom-properties": "6.1.1",
"svgo": "4.0.0",
"svgo": "4.0.1",
"typescript": "5.9.3",
"typescript-eslint": "8.56.0",
"updates": "17.6.2",
"typescript-eslint": "8.56.1",
"updates": "17.8.3",
"vite-string-plugin": "2.0.1",
"vitest": "4.0.18",
"vue-tsc": "3.2.5"

1233
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -103,7 +103,7 @@ func init() {
func ArtifactsRoutes(prefix string) *web.Router {
m := web.NewRouter()
m.Use(ArtifactContexter())
m.AfterRouting(ArtifactContexter())
r := artifactRoutes{
prefix: prefix,
@ -241,7 +241,7 @@ func (ar artifactRoutes) uploadArtifact(ctx *ArtifactContext) {
}
// get upload file size
fileRealTotalSize, contentLength := getUploadFileSize(ctx)
fileRealTotalSize := getUploadFileSize(ctx)
// get artifact retention days
expiredDays := setting.Actions.ArtifactRetentionDays
@ -265,17 +265,17 @@ func (ar artifactRoutes) uploadArtifact(ctx *ArtifactContext) {
return
}
// save chunk to storage, if success, return chunk stotal size
// save chunk to storage, if success, return chunks total size
// if artifact is not gzip when uploading, chunksTotalSize == fileRealTotalSize
// if artifact is gzip when uploading, chunksTotalSize < fileRealTotalSize
chunksTotalSize, err := saveUploadChunk(ar.fs, ctx, artifact, contentLength, runID)
chunksTotalSize, err := saveUploadChunkV3GetTotalSize(ar.fs, ctx, artifact, runID)
if err != nil {
log.Error("Error save upload chunk: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error save upload chunk")
return
}
// update artifact size if zero or not match, over write artifact size
// update artifact size if zero or not match, overwrite artifact size
if artifact.FileSize == 0 ||
artifact.FileCompressedSize == 0 ||
artifact.FileSize != fileRealTotalSize ||

View File

@ -12,7 +12,7 @@ import (
"fmt"
"hash"
"io"
"path/filepath"
"path"
"sort"
"strings"
"time"
@ -20,18 +20,73 @@ import (
"code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
)
func saveUploadChunkBase(st storage.ObjectStorage, ctx *ArtifactContext,
artifact *actions.ActionArtifact,
contentSize, runID, start, end, length int64, checkMd5 bool,
) (int64, error) {
type saveUploadChunkOptions struct {
start int64
end *int64
checkMd5 bool
}
func makeTmpPathNameV3(runID int64) string {
return fmt.Sprintf("tmp-upload/run-%d", runID)
}
func makeTmpPathNameV4(runID int64) string {
return fmt.Sprintf("tmp-upload/run-%d-v4", runID)
}
func makeChunkFilenameV3(runID, artifactID, start int64, endPtr *int64) string {
var end int64
if endPtr != nil {
end = *endPtr
}
return fmt.Sprintf("%d-%d-%d-%d.chunk", runID, artifactID, start, end)
}
func parseChunkFileItemV3(st storage.ObjectStorage, fpath string) (*chunkFileItem, error) {
baseName := path.Base(fpath)
if !strings.HasSuffix(baseName, ".chunk") {
return nil, errSkipChunkFile
}
var item chunkFileItem
var unusedRunID int64
if _, err := fmt.Sscanf(baseName, "%d-%d-%d-%d.chunk", &unusedRunID, &item.ArtifactID, &item.Start, &item.End); err != nil {
return nil, err
}
item.Path = fpath
if item.End == 0 {
fi, err := st.Stat(item.Path)
if err != nil {
return nil, err
}
item.Size = fi.Size()
item.End = item.Start + item.Size - 1
} else {
item.Size = item.End - item.Start + 1
}
return &item, nil
}
func saveUploadChunkV3(st storage.ObjectStorage, ctx *ArtifactContext, artifact *actions.ActionArtifact,
runID int64, opts saveUploadChunkOptions,
) (writtenSize int64, retErr error) {
// build chunk store path
storagePath := fmt.Sprintf("tmp%d/%d-%d-%d-%d.chunk", runID, runID, artifact.ID, start, end)
storagePath := fmt.Sprintf("%s/%s", makeTmpPathNameV3(runID), makeChunkFilenameV3(runID, artifact.ID, opts.start, opts.end))
// "end" is optional, so "contentSize=-1" means read until EOF
contentSize := int64(-1)
if opts.end != nil {
contentSize = *opts.end - opts.start + 1
}
var r io.Reader = ctx.Req.Body
var hasher hash.Hash
if checkMd5 {
if opts.checkMd5 {
// use io.TeeReader to avoid reading all body to md5 sum.
// it writes data to hasher after reading end
// if hash is not matched, delete the read-end result
@ -41,76 +96,81 @@ func saveUploadChunkBase(st storage.ObjectStorage, ctx *ArtifactContext,
// save chunk to storage
writtenSize, err := st.Save(storagePath, r, contentSize)
if err != nil {
return -1, fmt.Errorf("save chunk to storage error: %v", err)
return 0, fmt.Errorf("save chunk to storage error: %v", err)
}
var checkErr error
if checkMd5 {
defer func() {
if retErr != nil {
if err := st.Delete(storagePath); err != nil {
log.Error("Error deleting chunk: %s, %v", storagePath, err)
}
}
}()
if contentSize != -1 && writtenSize != contentSize {
return writtenSize, fmt.Errorf("writtenSize %d does not match contentSize %d", writtenSize, contentSize)
}
if opts.checkMd5 {
// check md5
reqMd5String := ctx.Req.Header.Get(artifactXActionsResultsMD5Header)
chunkMd5String := base64.StdEncoding.EncodeToString(hasher.Sum(nil))
log.Info("[artifact] check chunk md5, sum: %s, header: %s", chunkMd5String, reqMd5String)
log.Debug("[artifact] check chunk md5, sum: %s, header: %s", chunkMd5String, reqMd5String)
// if md5 not match, delete the chunk
if reqMd5String != chunkMd5String {
checkErr = errors.New("md5 not match")
return writtenSize, errors.New("md5 not match")
}
}
if writtenSize != contentSize {
checkErr = errors.Join(checkErr, fmt.Errorf("writtenSize %d not match contentSize %d", writtenSize, contentSize))
}
if checkErr != nil {
if err := st.Delete(storagePath); err != nil {
log.Error("Error deleting chunk: %s, %v", storagePath, err)
}
return -1, checkErr
}
log.Info("[artifact] save chunk %s, size: %d, artifact id: %d, start: %d, end: %d",
storagePath, contentSize, artifact.ID, start, end)
// return chunk total size
return length, nil
log.Debug("[artifact] save chunk %s, size: %d, artifact id: %d, start: %d, size: %d", storagePath, writtenSize, artifact.ID, opts.start, contentSize)
return writtenSize, nil
}
func saveUploadChunk(st storage.ObjectStorage, ctx *ArtifactContext,
artifact *actions.ActionArtifact,
contentSize, runID int64,
) (int64, error) {
func saveUploadChunkV3GetTotalSize(st storage.ObjectStorage, ctx *ArtifactContext, artifact *actions.ActionArtifact, runID int64) (totalSize int64, _ error) {
// parse content-range header, format: bytes 0-1023/146515
contentRange := ctx.Req.Header.Get("Content-Range")
start, end, length := int64(0), int64(0), int64(0)
if _, err := fmt.Sscanf(contentRange, "bytes %d-%d/%d", &start, &end, &length); err != nil {
log.Warn("parse content range error: %v, content-range: %s", err, contentRange)
return -1, fmt.Errorf("parse content range error: %v", err)
var start, end int64
if _, err := fmt.Sscanf(contentRange, "bytes %d-%d/%d", &start, &end, &totalSize); err != nil {
return 0, fmt.Errorf("parse content range error: %v", err)
}
return saveUploadChunkBase(st, ctx, artifact, contentSize, runID, start, end, length, true)
_, err := saveUploadChunkV3(st, ctx, artifact, runID, saveUploadChunkOptions{start: start, end: &end, checkMd5: true})
if err != nil {
return 0, err
}
return totalSize, nil
}
func appendUploadChunk(st storage.ObjectStorage, ctx *ArtifactContext,
artifact *actions.ActionArtifact,
start, contentSize, runID int64,
) (int64, error) {
end := start + contentSize - 1
return saveUploadChunkBase(st, ctx, artifact, contentSize, runID, start, end, contentSize, false)
// Returns uploaded length
func appendUploadChunkV3(st storage.ObjectStorage, ctx *ArtifactContext, artifact *actions.ActionArtifact, runID, start int64) (int64, error) {
opts := saveUploadChunkOptions{start: start}
if ctx.Req.ContentLength > 0 {
end := start + ctx.Req.ContentLength - 1
opts.end = &end
}
return saveUploadChunkV3(st, ctx, artifact, runID, opts)
}
type chunkFileItem struct {
RunID int64
ArtifactID int64
Start int64
End int64
Path string
// these offset/size related fields might be missing when parsing, they will be filled in the listing functions
Size int64
Start int64
End int64 // inclusive: Size=10, Start=0, End=9
ChunkName string // v4 only
}
func listChunksByRunID(st storage.ObjectStorage, runID int64) (map[int64][]*chunkFileItem, error) {
storageDir := fmt.Sprintf("tmp%d", runID)
func listV3UnorderedChunksMapByRunID(st storage.ObjectStorage, runID int64) (map[int64][]*chunkFileItem, error) {
storageDir := makeTmpPathNameV3(runID)
var chunks []*chunkFileItem
if err := st.IterateObjects(storageDir, func(fpath string, obj storage.Object) error {
baseName := filepath.Base(fpath)
// when read chunks from storage, it only contains storage dir and basename,
// no matter the subdirectory setting in storage config
item := chunkFileItem{Path: storageDir + "/" + baseName}
if _, err := fmt.Sscanf(baseName, "%d-%d-%d-%d.chunk", &item.RunID, &item.ArtifactID, &item.Start, &item.End); err != nil {
return fmt.Errorf("parse content range error: %v", err)
item, err := parseChunkFileItemV3(st, fpath)
if errors.Is(err, errSkipChunkFile) {
return nil
} else if err != nil {
return fmt.Errorf("unable to parse chunk name: %v", fpath)
}
chunks = append(chunks, &item)
chunks = append(chunks, item)
return nil
}); err != nil {
return nil, err
@ -123,52 +183,78 @@ func listChunksByRunID(st storage.ObjectStorage, runID int64) (map[int64][]*chun
return chunksMap, nil
}
func listChunksByRunIDV4(st storage.ObjectStorage, runID, artifactID int64, blist *BlockList) ([]*chunkFileItem, error) {
storageDir := fmt.Sprintf("tmpv4%d", runID)
var chunks []*chunkFileItem
chunkMap := map[string]*chunkFileItem{}
dummy := &chunkFileItem{}
for _, name := range blist.Latest {
chunkMap[name] = dummy
func listOrderedChunksForArtifact(st storage.ObjectStorage, runID, artifactID int64, blist *BlockList) ([]*chunkFileItem, error) {
emptyListAsError := func(chunks []*chunkFileItem) ([]*chunkFileItem, error) {
if len(chunks) == 0 {
return nil, fmt.Errorf("no chunk found for artifact id: %d", artifactID)
}
return chunks, nil
}
storageDir := makeTmpPathNameV4(runID)
var chunks []*chunkFileItem
var chunkMapV4 map[string]*chunkFileItem
if blist != nil {
// make a dummy map for quick lookup of chunk names, the values are nil now and will be filled after iterating storage objects
chunkMapV4 = map[string]*chunkFileItem{}
for _, name := range blist.Latest {
chunkMapV4[name] = nil
}
}
if err := st.IterateObjects(storageDir, func(fpath string, obj storage.Object) error {
baseName := filepath.Base(fpath)
if !strings.HasPrefix(baseName, "block-") {
item, err := parseChunkFileItemV4(st, artifactID, fpath)
if errors.Is(err, errSkipChunkFile) {
return nil
} else if err != nil {
return fmt.Errorf("unable to parse chunk name: %v", fpath)
}
// when read chunks from storage, it only contains storage dir and basename,
// no matter the subdirectory setting in storage config
item := chunkFileItem{Path: storageDir + "/" + baseName, ArtifactID: artifactID}
var size int64
var b64chunkName string
if _, err := fmt.Sscanf(baseName, "block-%d-%d-%s", &item.RunID, &size, &b64chunkName); err != nil {
return fmt.Errorf("parse content range error: %v", err)
}
rchunkName, err := base64.URLEncoding.DecodeString(b64chunkName)
if err != nil {
return fmt.Errorf("failed to parse chunkName: %v", err)
}
chunkName := string(rchunkName)
item.End = item.Start + size - 1
if _, ok := chunkMap[chunkName]; ok {
chunkMap[chunkName] = &item
// Single chunk upload with block id
if _, ok := chunkMapV4[item.ChunkName]; ok {
chunkMapV4[item.ChunkName] = item
} else if chunkMapV4 == nil {
if chunks != nil {
return errors.New("blockmap is required for chunks > 1")
}
chunks = []*chunkFileItem{item}
}
return nil
}); err != nil {
return nil, err
}
for i, name := range blist.Latest {
chunk, ok := chunkMap[name]
if !ok || chunk.Path == "" {
return nil, fmt.Errorf("missing Chunk (%d/%d): %s", i, len(blist.Latest), name)
if blist == nil && chunks == nil {
chunkUnorderedItemsMapV3, err := listV3UnorderedChunksMapByRunID(st, runID)
if err != nil {
return nil, err
}
chunks = append(chunks, chunk)
if i > 0 {
chunk.Start = chunkMap[blist.Latest[i-1]].End + 1
chunk.End += chunk.Start
chunks = chunkUnorderedItemsMapV3[artifactID]
sort.Slice(chunks, func(i, j int) bool {
return chunks[i].Start < chunks[j].Start
})
return emptyListAsError(chunks)
}
if len(chunks) == 0 && blist != nil {
for i, name := range blist.Latest {
chunk := chunkMapV4[name]
if chunk == nil {
return nil, fmt.Errorf("missing chunk (%d/%d): %s", i, len(blist.Latest), name)
}
chunks = append(chunks, chunk)
}
}
return chunks, nil
for i, chunk := range chunks {
if i == 0 {
chunk.End += chunk.Size - 1
} else {
chunk.Start = chunkMapV4[blist.Latest[i-1]].End + 1
chunk.End = chunk.Start + chunk.Size - 1
}
}
return emptyListAsError(chunks)
}
func mergeChunksForRun(ctx *ArtifactContext, st storage.ObjectStorage, runID int64, artifactName string) error {
@ -181,13 +267,13 @@ func mergeChunksForRun(ctx *ArtifactContext, st storage.ObjectStorage, runID int
return err
}
// read all uploading chunks from storage
chunksMap, err := listChunksByRunID(st, runID)
unorderedChunksMap, err := listV3UnorderedChunksMapByRunID(st, runID)
if err != nil {
return err
}
// range db artifacts to merge chunks
for _, art := range artifacts {
chunks, ok := chunksMap[art.ID]
chunks, ok := unorderedChunksMap[art.ID]
if !ok {
log.Debug("artifact %d chunks not found", art.ID)
continue
@ -239,12 +325,14 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st
}
mergedReader := io.MultiReader(readers...)
shaPrefix := "sha256:"
var hash hash.Hash
var hashSha256 hash.Hash
if strings.HasPrefix(checksum, shaPrefix) {
hash = sha256.New()
hashSha256 = sha256.New()
} else if checksum != "" {
setting.PanicInDevOrTesting("unsupported checksum format: %s, will skip the checksum verification", checksum)
}
if hash != nil {
mergedReader = io.TeeReader(mergedReader, hash)
if hashSha256 != nil {
mergedReader = io.TeeReader(mergedReader, hashSha256)
}
// if chunk is gzip, use gz as extension
@ -274,8 +362,8 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st
}
}()
if hash != nil {
rawChecksum := hash.Sum(nil)
if hashSha256 != nil {
rawChecksum := hashSha256.Sum(nil)
actualChecksum := hex.EncodeToString(rawChecksum)
if !strings.HasSuffix(checksum, actualChecksum) {
return fmt.Errorf("update artifact error checksum is invalid %v vs %v", checksum, actualChecksum)

View File

@ -20,8 +20,8 @@ const (
artifactXActionsResultsMD5Header = "x-actions-results-md5"
)
// The rules are from https://github.com/actions/toolkit/blob/main/packages/artifact/src/internal/path-and-artifact-name-validation.ts#L32
var invalidArtifactNameChars = strings.Join([]string{"\\", "/", "\"", ":", "<", ">", "|", "*", "?", "\r", "\n"}, "")
// The rules are from https://github.com/actions/toolkit/blob/main/packages/artifact/src/internal/upload/path-and-artifact-name-validation.ts
const invalidArtifactNameChars = "\\/\":<>|*?\r\n"
func validateArtifactName(ctx *ArtifactContext, artifactName string) bool {
if strings.ContainsAny(artifactName, invalidArtifactNameChars) {
@ -84,11 +84,10 @@ func parseArtifactItemPath(ctx *ArtifactContext) (string, string, bool) {
// getUploadFileSize returns the size of the file to be uploaded.
// The raw size is the size of the file as reported by the header X-TFS-FileLength.
func getUploadFileSize(ctx *ArtifactContext) (int64, int64) {
contentLength := ctx.Req.ContentLength
func getUploadFileSize(ctx *ArtifactContext) int64 {
xTfsLength, _ := strconv.ParseInt(ctx.Req.Header.Get(artifactXTfsFileLengthHeader), 10, 64)
if xTfsLength > 0 {
return xTfsLength, contentLength
return xTfsLength
}
return contentLength, contentLength
return ctx.Req.ContentLength
}

View File

@ -90,10 +90,12 @@ import (
"crypto/sha256"
"encoding/base64"
"encoding/xml"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"path"
"strconv"
"strings"
"time"
@ -109,7 +111,7 @@ import (
"code.gitea.io/gitea/services/context"
"google.golang.org/protobuf/encoding/protojson"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/known/timestamppb"
)
@ -157,33 +159,81 @@ func ArtifactsV4Routes(prefix string) *web.Router {
return m
}
func (r artifactV4Routes) buildSignature(endp, expires, artifactName string, taskID, artifactID int64) []byte {
func (r *artifactV4Routes) buildSignature(endpoint, expires, artifactName string, taskID, artifactID int64) []byte {
mac := hmac.New(sha256.New, setting.GetGeneralTokenSigningSecret())
mac.Write([]byte(endp))
mac.Write([]byte(endpoint))
mac.Write([]byte(expires))
mac.Write([]byte(artifactName))
fmt.Fprint(mac, taskID)
fmt.Fprint(mac, artifactID)
_, _ = fmt.Fprint(mac, taskID)
_, _ = fmt.Fprint(mac, artifactID)
return mac.Sum(nil)
}
func (r artifactV4Routes) buildArtifactURL(ctx *ArtifactContext, endp, artifactName string, taskID, artifactID int64) string {
func (r *artifactV4Routes) buildArtifactURL(ctx *ArtifactContext, endpoint, artifactName string, taskID, artifactID int64) string {
expires := time.Now().Add(60 * time.Minute).Format("2006-01-02 15:04:05.999999999 -0700 MST")
uploadURL := strings.TrimSuffix(httplib.GuessCurrentAppURL(ctx), "/") + strings.TrimSuffix(r.prefix, "/") +
"/" + endp + "?sig=" + base64.URLEncoding.EncodeToString(r.buildSignature(endp, expires, artifactName, taskID, artifactID)) + "&expires=" + url.QueryEscape(expires) + "&artifactName=" + url.QueryEscape(artifactName) + "&taskID=" + strconv.FormatInt(taskID, 10) + "&artifactID=" + strconv.FormatInt(artifactID, 10)
"/" + endpoint +
"?sig=" + base64.RawURLEncoding.EncodeToString(r.buildSignature(endpoint, expires, artifactName, taskID, artifactID)) +
"&expires=" + url.QueryEscape(expires) +
"&artifactName=" + url.QueryEscape(artifactName) +
"&taskID=" + strconv.FormatInt(taskID, 10) +
"&artifactID=" + strconv.FormatInt(artifactID, 10)
return uploadURL
}
func (r artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*actions.ActionTask, string, bool) {
func makeBlockFilenameV4(runID, artifactID, size int64, blockID string) string {
sizeInName := max(size, 0) // do not use "-1" in filename
return fmt.Sprintf("block-%d-%d-%d-%s", runID, artifactID, sizeInName, base64.URLEncoding.EncodeToString([]byte(blockID)))
}
var errSkipChunkFile = errors.New("skip this chunk file")
func parseChunkFileItemV4(st storage.ObjectStorage, artifactID int64, fpath string) (*chunkFileItem, error) {
baseName := path.Base(fpath)
if !strings.HasPrefix(baseName, "block-") {
return nil, errSkipChunkFile
}
var item chunkFileItem
var unusedRunID int64
var b64chunkName string
_, err := fmt.Sscanf(baseName, "block-%d-%d-%d-%s", &unusedRunID, &item.ArtifactID, &item.Size, &b64chunkName)
if err != nil {
return nil, err
}
if item.ArtifactID != artifactID {
return nil, errSkipChunkFile
}
chunkName, err := base64.URLEncoding.DecodeString(b64chunkName)
if err != nil {
return nil, err
}
item.ChunkName = string(chunkName)
item.Path = fpath
if item.Size <= 0 {
fi, err := st.Stat(item.Path)
if err != nil {
return nil, err
}
item.Size = fi.Size()
}
return &item, nil
}
func (r *artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*actions.ActionTask, string, bool) {
rawTaskID := ctx.Req.URL.Query().Get("taskID")
rawArtifactID := ctx.Req.URL.Query().Get("artifactID")
sig := ctx.Req.URL.Query().Get("sig")
expires := ctx.Req.URL.Query().Get("expires")
artifactName := ctx.Req.URL.Query().Get("artifactName")
dsig, _ := base64.URLEncoding.DecodeString(sig)
taskID, _ := strconv.ParseInt(rawTaskID, 10, 64)
artifactID, _ := strconv.ParseInt(rawArtifactID, 10, 64)
dsig, errSig := base64.RawURLEncoding.DecodeString(sig)
taskID, errTask := strconv.ParseInt(rawTaskID, 10, 64)
artifactID, errArtifactID := strconv.ParseInt(rawArtifactID, 10, 64)
err := errors.Join(errSig, errTask, errArtifactID)
if err != nil {
log.Error("Error decoding signature values: %v", err)
ctx.HTTPError(http.StatusBadRequest, "Error decoding signature values")
return nil, "", false
}
expecedsig := r.buildSignature(endp, expires, artifactName, taskID, artifactID)
if !hmac.Equal(dsig, expecedsig) {
log.Error("Error unauthorized")
@ -226,7 +276,7 @@ func (r *artifactV4Routes) getArtifactByName(ctx *ArtifactContext, runID int64,
return &art, nil
}
func (r *artifactV4Routes) parseProtbufBody(ctx *ArtifactContext, req protoreflect.ProtoMessage) bool {
func (r *artifactV4Routes) parseProtobufBody(ctx *ArtifactContext, req protoreflect.ProtoMessage) bool {
body, err := io.ReadAll(ctx.Req.Body)
if err != nil {
log.Error("Error decode request body: %v", err)
@ -242,7 +292,7 @@ func (r *artifactV4Routes) parseProtbufBody(ctx *ArtifactContext, req protorefle
return true
}
func (r *artifactV4Routes) sendProtbufBody(ctx *ArtifactContext, req protoreflect.ProtoMessage) {
func (r *artifactV4Routes) sendProtobufBody(ctx *ArtifactContext, req protoreflect.ProtoMessage) {
resp, err := protojson.Marshal(req)
if err != nil {
log.Error("Error encode response body: %v", err)
@ -257,7 +307,7 @@ func (r *artifactV4Routes) sendProtbufBody(ctx *ArtifactContext, req protoreflec
func (r *artifactV4Routes) createArtifact(ctx *ArtifactContext) {
var req CreateArtifactRequest
if ok := r.parseProtbufBody(ctx, &req); !ok {
if ok := r.parseProtobufBody(ctx, &req); !ok {
return
}
_, _, ok := validateRunIDV4(ctx, req.WorkflowRunBackendId)
@ -291,7 +341,7 @@ func (r *artifactV4Routes) createArtifact(ctx *ArtifactContext) {
Ok: true,
SignedUploadUrl: r.buildArtifactURL(ctx, "UploadArtifact", artifactName, ctx.ActionTask.ID, artifact.ID),
}
r.sendProtbufBody(ctx, &respData)
r.sendProtobufBody(ctx, &respData)
}
func (r *artifactV4Routes) uploadArtifact(ctx *ArtifactContext) {
@ -303,34 +353,34 @@ func (r *artifactV4Routes) uploadArtifact(ctx *ArtifactContext) {
comp := ctx.Req.URL.Query().Get("comp")
switch comp {
case "block", "appendBlock":
blockid := ctx.Req.URL.Query().Get("blockid")
if blockid == "" {
// get artifact by name
artifact, err := r.getArtifactByName(ctx, task.Job.RunID, artifactName)
// get artifact by name
artifact, err := r.getArtifactByName(ctx, task.Job.RunID, artifactName)
if err != nil {
log.Error("Error artifact not found: %v", err)
ctx.HTTPError(http.StatusNotFound, "Error artifact not found")
return
}
blockID := ctx.Req.URL.Query().Get("blockid")
if blockID == "" {
uploadedLength, err := appendUploadChunkV3(r.fs, ctx, artifact, artifact.RunID, artifact.FileSize)
if err != nil {
log.Error("Error artifact not found: %v", err)
ctx.HTTPError(http.StatusNotFound, "Error artifact not found")
log.Error("Error appending chunk %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error appending Chunk")
return
}
_, err = appendUploadChunk(r.fs, ctx, artifact, artifact.FileSize, ctx.Req.ContentLength, artifact.RunID)
if err != nil {
log.Error("Error runner api getting task: task is not running")
ctx.HTTPError(http.StatusInternalServerError, "Error runner api getting task: task is not running")
return
}
artifact.FileCompressedSize += ctx.Req.ContentLength
artifact.FileSize += ctx.Req.ContentLength
artifact.FileCompressedSize += uploadedLength
artifact.FileSize += uploadedLength
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
log.Error("Error UpdateArtifactByID: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error UpdateArtifactByID")
return
}
} else {
_, err := r.fs.Save(fmt.Sprintf("tmpv4%d/block-%d-%d-%s", task.Job.RunID, task.Job.RunID, ctx.Req.ContentLength, base64.URLEncoding.EncodeToString([]byte(blockid))), ctx.Req.Body, -1)
blockFilename := makeBlockFilenameV4(task.Job.RunID, artifact.ID, ctx.Req.ContentLength, blockID)
_, err := r.fs.Save(fmt.Sprintf("%s/%s", makeTmpPathNameV4(task.Job.RunID), blockFilename), ctx.Req.Body, ctx.Req.ContentLength)
if err != nil {
log.Error("Error runner api getting task: task is not running")
ctx.HTTPError(http.StatusInternalServerError, "Error runner api getting task: task is not running")
log.Error("Error uploading block blob %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error uploading block blob")
return
}
}
@ -338,10 +388,10 @@ func (r *artifactV4Routes) uploadArtifact(ctx *ArtifactContext) {
case "blocklist":
rawArtifactID := ctx.Req.URL.Query().Get("artifactID")
artifactID, _ := strconv.ParseInt(rawArtifactID, 10, 64)
_, err := r.fs.Save(fmt.Sprintf("tmpv4%d/%d-%d-blocklist", task.Job.RunID, task.Job.RunID, artifactID), ctx.Req.Body, -1)
_, err := r.fs.Save(fmt.Sprintf("%s/%d-%d-blocklist", makeTmpPathNameV4(task.Job.RunID), task.Job.RunID, artifactID), ctx.Req.Body, -1)
if err != nil {
log.Error("Error runner api getting task: task is not running")
ctx.HTTPError(http.StatusInternalServerError, "Error runner api getting task: task is not running")
log.Error("Error uploading blocklist %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error uploading blocklist")
return
}
ctx.JSON(http.StatusCreated, "created")
@ -357,7 +407,7 @@ type Latest struct {
}
func (r *artifactV4Routes) readBlockList(runID, artifactID int64) (*BlockList, error) {
blockListName := fmt.Sprintf("tmpv4%d/%d-%d-blocklist", runID, runID, artifactID)
blockListName := fmt.Sprintf("%s/%d-%d-blocklist", makeTmpPathNameV4(runID), runID, artifactID)
s, err := r.fs.Open(blockListName)
if err != nil {
return nil, err
@ -367,17 +417,22 @@ func (r *artifactV4Routes) readBlockList(runID, artifactID int64) (*BlockList, e
blockList := &BlockList{}
err = xdec.Decode(blockList)
_ = s.Close()
delerr := r.fs.Delete(blockListName)
if delerr != nil {
log.Warn("Failed to delete blockList %s: %v", blockListName, delerr)
}
return blockList, err
if err != nil {
return nil, err
}
return blockList, nil
}
func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) {
var req FinalizeArtifactRequest
if ok := r.parseProtbufBody(ctx, &req); !ok {
if ok := r.parseProtobufBody(ctx, &req); !ok {
return
}
_, runID, ok := validateRunIDV4(ctx, req.WorkflowRunBackendId)
@ -394,30 +449,20 @@ func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) {
}
var chunks []*chunkFileItem
blockList, err := r.readBlockList(runID, artifact.ID)
blockList, blockListErr := r.readBlockList(runID, artifact.ID)
chunks, err = listOrderedChunksForArtifact(r.fs, runID, artifact.ID, blockList)
if err != nil {
log.Warn("Failed to read BlockList, fallback to old behavior: %v", err)
chunkMap, err := listChunksByRunID(r.fs, runID)
if err != nil {
log.Error("Error merge chunks: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error merge chunks")
return
}
chunks, ok = chunkMap[artifact.ID]
if !ok {
log.Error("Error merge chunks")
ctx.HTTPError(http.StatusInternalServerError, "Error merge chunks")
return
}
} else {
chunks, err = listChunksByRunIDV4(r.fs, runID, artifact.ID, blockList)
if err != nil {
log.Error("Error merge chunks: %v", err)
ctx.HTTPError(http.StatusInternalServerError, "Error merge chunks")
return
}
artifact.FileSize = chunks[len(chunks)-1].End + 1
artifact.FileCompressedSize = chunks[len(chunks)-1].End + 1
log.Error("Error list chunks: %v", errors.Join(blockListErr, err))
ctx.HTTPError(http.StatusInternalServerError, "Error list chunks")
return
}
artifact.FileSize = chunks[len(chunks)-1].End + 1
artifact.FileCompressedSize = chunks[len(chunks)-1].End + 1
if req.Size != artifact.FileSize {
log.Error("Error merge chunks size mismatch")
ctx.HTTPError(http.StatusInternalServerError, "Error merge chunks size mismatch")
return
}
checksum := ""
@ -434,13 +479,13 @@ func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) {
Ok: true,
ArtifactId: artifact.ID,
}
r.sendProtbufBody(ctx, &respData)
r.sendProtobufBody(ctx, &respData)
}
func (r *artifactV4Routes) listArtifacts(ctx *ArtifactContext) {
var req ListArtifactsRequest
if ok := r.parseProtbufBody(ctx, &req); !ok {
if ok := r.parseProtobufBody(ctx, &req); !ok {
return
}
_, runID, ok := validateRunIDV4(ctx, req.WorkflowRunBackendId)
@ -485,13 +530,13 @@ func (r *artifactV4Routes) listArtifacts(ctx *ArtifactContext) {
respData := ListArtifactsResponse{
Artifacts: list,
}
r.sendProtbufBody(ctx, &respData)
r.sendProtobufBody(ctx, &respData)
}
func (r *artifactV4Routes) getSignedArtifactURL(ctx *ArtifactContext) {
var req GetSignedArtifactURLRequest
if ok := r.parseProtbufBody(ctx, &req); !ok {
if ok := r.parseProtobufBody(ctx, &req); !ok {
return
}
_, runID, ok := validateRunIDV4(ctx, req.WorkflowRunBackendId)
@ -525,7 +570,7 @@ func (r *artifactV4Routes) getSignedArtifactURL(ctx *ArtifactContext) {
if respData.SignedUrl == "" {
respData.SignedUrl = r.buildArtifactURL(ctx, "DownloadArtifact", artifactName, ctx.ActionTask.ID, artifact.ID)
}
r.sendProtbufBody(ctx, &respData)
r.sendProtobufBody(ctx, &respData)
}
func (r *artifactV4Routes) downloadArtifact(ctx *ArtifactContext) {
@ -555,7 +600,7 @@ func (r *artifactV4Routes) downloadArtifact(ctx *ArtifactContext) {
func (r *artifactV4Routes) deleteArtifact(ctx *ArtifactContext) {
var req DeleteArtifactRequest
if ok := r.parseProtbufBody(ctx, &req); !ok {
if ok := r.parseProtobufBody(ctx, &req); !ok {
return
}
_, runID, ok := validateRunIDV4(ctx, req.WorkflowRunBackendId)
@ -582,5 +627,5 @@ func (r *artifactV4Routes) deleteArtifact(ctx *ArtifactContext) {
Ok: true,
ArtifactId: artifact.ID,
}
r.sendProtbufBody(ctx, &respData)
r.sendProtobufBody(ctx, &respData)
}

View File

@ -270,7 +270,7 @@ func (s *Service) UpdateLog(
rows := req.Msg.Rows[ack-req.Msg.Index:]
ns, err := actions.WriteLogs(ctx, task.LogFilename, task.LogSize, rows)
if err != nil {
return nil, status.Errorf(codes.Internal, "write logs: %v", err)
return nil, status.Errorf(codes.Internal, "unable to append logs to dbfs file: %v", err)
}
task.LogLength += int64(len(rows))
for _, n := range ns {

View File

@ -94,7 +94,7 @@ func verifyAuth(r *web.Router, authMethods []auth.Method) {
}
authGroup := auth.NewGroup(authMethods...)
r.Use(func(ctx *context.Context) {
r.AfterRouting(func(ctx *context.Context) {
var err error
ctx.Doer, err = authGroup.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session)
if err != nil {
@ -111,7 +111,7 @@ func verifyAuth(r *web.Router, authMethods []auth.Method) {
func CommonRoutes() *web.Router {
r := web.NewRouter()
r.Use(context.PackageContexter())
r.AfterRouting(context.PackageContexter())
verifyAuth(r, []auth.Method{
&auth.OAuth2{},
@ -533,7 +533,7 @@ func CommonRoutes() *web.Router {
func ContainerRoutes() *web.Router {
r := web.NewRouter()
r.Use(context.PackageContexter())
r.AfterRouting(context.PackageContexter())
verifyAuth(r, []auth.Method{
&auth.Basic{},

View File

@ -50,7 +50,7 @@ func ListUnadoptedRepositories(ctx *context.APIContext) {
return
}
ctx.SetTotalCountHeader(int64(count))
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, repoNames)
}

View File

@ -51,7 +51,7 @@ func GetAllEmails(ctx *context.APIContext) {
results[i] = convert.ToEmailSearch(emails[i])
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, &results)
}

View File

@ -77,7 +77,7 @@ func ListHooks(ctx *context.APIContext) {
}
hooks[i] = h
}
ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetLinkHeader(total, listOptions.PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, hooks)
}

View File

@ -117,7 +117,7 @@ func GetAllOrgs(ctx *context.APIContext) {
orgs[i] = convert.ToOrganization(ctx, organization.OrgFromUser(users[i]))
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, &orgs)
}

View File

@ -534,7 +534,7 @@ func SearchUsers(ctx *context.APIContext) {
results[i] = convert.ToUser(ctx, users[i], ctx.Doer)
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, &results)
}

View File

@ -77,7 +77,6 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
@ -756,13 +755,9 @@ func buildAuthGroup() *auth.Group {
&auth.Basic{}, // FIXME: this should be removed once we don't allow basic auth in API
)
if setting.Service.EnableReverseProxyAuthAPI {
group.Add(&auth.ReverseProxy{})
group.Add(&auth.ReverseProxy{}) // TODO: does it still make sense to support reverse proxy auth in API?
}
if setting.IsWindows && auth_model.IsSSPIEnabled(graceful.GetManager().ShutdownContext()) {
group.Add(&auth.SSPI{}) // it MUST be the last, see the comment of SSPI
}
// others: API doesn't support SSPI auth because the caller should use token
return group
}
@ -872,9 +867,9 @@ func checkDeprecatedAuthMethods(ctx *context.APIContext) {
func Routes() *web.Router {
m := web.NewRouter()
m.Use(securityHeaders())
m.BeforeRouting(securityHeaders())
if setting.CORSConfig.Enabled {
m.Use(cors.Handler(cors.Options{
m.BeforeRouting(cors.Handler(cors.Options{
AllowedOrigins: setting.CORSConfig.AllowDomain,
AllowedMethods: setting.CORSConfig.Methods,
AllowCredentials: setting.CORSConfig.AllowCredentials,
@ -882,47 +877,48 @@ func Routes() *web.Router {
MaxAge: int(setting.CORSConfig.MaxAge.Seconds()),
}))
}
m.Use(context.APIContexter())
m.Use(checkDeprecatedAuthMethods)
m.AfterRouting(context.APIContexter())
m.AfterRouting(checkDeprecatedAuthMethods)
// Get user from session if logged in.
m.Use(apiAuth(buildAuthGroup()))
m.AfterRouting(apiAuth(buildAuthGroup()))
m.Use(verifyAuthWithOptions(&common.VerifyOptions{
m.AfterRouting(verifyAuthWithOptions(&common.VerifyOptions{
SignInRequired: setting.Service.RequireSignInViewStrict,
}))
addActionsRoutes := func(
m *web.Router,
reqChecker func(ctx *context.APIContext),
reqReaderCheck func(ctx *context.APIContext),
reqOwnerCheck func(ctx *context.APIContext),
act actions.API,
) {
m.Group("/actions", func() {
m.Group("/secrets", func() {
m.Get("", reqToken(), reqChecker, act.ListActionsSecrets)
m.Get("", reqToken(), reqOwnerCheck, act.ListActionsSecrets)
m.Combo("/{secretname}").
Put(reqToken(), reqChecker, bind(api.CreateOrUpdateSecretOption{}), act.CreateOrUpdateSecret).
Delete(reqToken(), reqChecker, act.DeleteSecret)
Put(reqToken(), reqOwnerCheck, bind(api.CreateOrUpdateSecretOption{}), act.CreateOrUpdateSecret).
Delete(reqToken(), reqOwnerCheck, act.DeleteSecret)
})
m.Group("/variables", func() {
m.Get("", reqToken(), reqChecker, act.ListVariables)
m.Get("", reqToken(), reqOwnerCheck, act.ListVariables)
m.Combo("/{variablename}").
Get(reqToken(), reqChecker, act.GetVariable).
Delete(reqToken(), reqChecker, act.DeleteVariable).
Post(reqToken(), reqChecker, bind(api.CreateVariableOption{}), act.CreateVariable).
Put(reqToken(), reqChecker, bind(api.UpdateVariableOption{}), act.UpdateVariable)
Get(reqToken(), reqOwnerCheck, act.GetVariable).
Delete(reqToken(), reqOwnerCheck, act.DeleteVariable).
Post(reqToken(), reqOwnerCheck, bind(api.CreateVariableOption{}), act.CreateVariable).
Put(reqToken(), reqOwnerCheck, bind(api.UpdateVariableOption{}), act.UpdateVariable)
})
m.Group("/runners", func() {
m.Get("", reqToken(), reqChecker, act.ListRunners)
m.Post("/registration-token", reqToken(), reqChecker, act.CreateRegistrationToken)
m.Get("/{runner_id}", reqToken(), reqChecker, act.GetRunner)
m.Delete("/{runner_id}", reqToken(), reqChecker, act.DeleteRunner)
m.Get("", reqToken(), reqOwnerCheck, act.ListRunners)
m.Post("/registration-token", reqToken(), reqOwnerCheck, act.CreateRegistrationToken)
m.Get("/{runner_id}", reqToken(), reqOwnerCheck, act.GetRunner)
m.Delete("/{runner_id}", reqToken(), reqOwnerCheck, act.DeleteRunner)
})
m.Get("/runs", reqToken(), reqChecker, act.ListWorkflowRuns)
m.Get("/jobs", reqToken(), reqChecker, act.ListWorkflowJobs)
m.Get("/runs", reqToken(), reqReaderCheck, act.ListWorkflowRuns)
m.Get("/jobs", reqToken(), reqReaderCheck, act.ListWorkflowJobs)
})
}
@ -1164,7 +1160,8 @@ func Routes() *web.Router {
m.Post("/reject", repo.RejectTransfer)
}, reqToken())
addActionsRoutes(m, reqOwner(), repo.NewAction()) // it adds the routes for secrets/variables and runner management
// Adds the routes for secrets/variables and runner management
addActionsRoutes(m, reqRepoReader(unit.TypeActions), reqOwner(), repo.NewAction())
m.Group("/actions/workflows", func() {
m.Get("", repo.ActionsListRepositoryWorkflows)
@ -1636,6 +1633,7 @@ func Routes() *web.Router {
})
addActionsRoutes(
m,
reqOrgMembership(),
reqOrgOwnership(),
org.NewAction(),
)

View File

@ -125,7 +125,7 @@ func ListRepoNotifications(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(totalCount), opts.PageSize)
ctx.SetLinkHeader(totalCount, opts.PageSize)
ctx.SetTotalCountHeader(totalCount)
ctx.JSON(http.StatusOK, convert.ToNotifications(ctx, nl))
}

View File

@ -86,7 +86,7 @@ func ListNotifications(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(totalCount), opts.PageSize)
ctx.SetLinkHeader(totalCount, opts.PageSize)
ctx.SetTotalCountHeader(totalCount)
ctx.JSON(http.StatusOK, convert.ToNotifications(ctx, nl))
}

View File

@ -67,7 +67,7 @@ func (Action) ListActionsSecrets(ctx *context.APIContext) {
}
}
ctx.SetLinkHeader(int(count), opts.PageSize)
ctx.SetLinkHeader(count, opts.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiSecrets)
}
@ -240,7 +240,7 @@ func (Action) ListVariables(ctx *context.APIContext) {
Description: v.Description,
}
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, variables)
}

View File

@ -45,7 +45,7 @@ func listMembers(ctx *context.APIContext, isMember bool) {
apiMembers[i] = convert.ToUser(ctx, member, ctx.Doer)
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiMembers)
}

View File

@ -42,7 +42,7 @@ func listUserOrgs(ctx *context.APIContext, u *user_model.User) {
apiOrgs[i] = convert.ToOrganization(ctx, orgs[i])
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, &apiOrgs)
}
@ -135,7 +135,7 @@ func GetUserOrgsPermissions(ctx *context.APIContext) {
op := api.OrganizationPermissions{}
if !organization.HasOrgOrUserVisible(ctx, o, ctx.ContextUser) {
if !organization.HasOrgOrUserVisible(ctx, o, ctx.Doer) {
ctx.APIErrorNotFound("HasOrgOrUserVisible", nil)
return
}
@ -215,7 +215,7 @@ func GetAll(ctx *context.APIContext) {
orgs[i] = convert.ToOrganization(ctx, organization.OrgFromUser(publicOrgs[i]))
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, &orgs)
}

View File

@ -70,7 +70,7 @@ func ListTeams(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiTeams)
}
@ -111,7 +111,7 @@ func ListUserTeams(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiTeams)
}
@ -411,7 +411,7 @@ func GetTeamMembers(ctx *context.APIContext) {
members[i] = convert.ToUser(ctx, member, ctx.Doer)
}
ctx.SetLinkHeader(ctx.Org.Team.NumMembers, listOptions.PageSize)
ctx.SetLinkHeader(int64(ctx.Org.Team.NumMembers), listOptions.PageSize)
ctx.SetTotalCountHeader(int64(ctx.Org.Team.NumMembers))
ctx.JSON(http.StatusOK, members)
}
@ -583,7 +583,7 @@ func GetTeamRepos(ctx *context.APIContext) {
}
repos[i] = convert.ToRepo(ctx, repo, permission)
}
ctx.SetLinkHeader(team.NumRepos, listOptions.PageSize)
ctx.SetLinkHeader(int64(team.NumRepos), listOptions.PageSize)
ctx.SetTotalCountHeader(int64(team.NumRepos))
ctx.JSON(http.StatusOK, repos)
}
@ -827,7 +827,7 @@ func SearchTeam(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, map[string]any{
"ok": true,
@ -882,7 +882,7 @@ func ListTeamActivityFeeds(ctx *context.APIContext) {
ctx.APIErrorInternal(err)
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, convert.ToActivities(ctx, feeds, ctx.Doer))
}

View File

@ -68,7 +68,7 @@ func ListPackages(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiPackages)
}
@ -249,7 +249,7 @@ func ListPackageVersions(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiPackages)
}

View File

@ -91,7 +91,7 @@ func (Action) ListActionsSecrets(ctx *context.APIContext) {
Created: v.CreatedUnix.AsTime(),
}
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiSecrets)
}
@ -506,7 +506,7 @@ func (Action) ListVariables(ctx *context.APIContext) {
}
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, variables)
}
@ -811,7 +811,7 @@ func ListActionTasks(ctx *context.APIContext) {
res.Entries[i] = convertedTask
}
ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetLinkHeader(total, listOptions.PageSize)
ctx.SetTotalCountHeader(total) // Duplicates api response field but it's better to set it for consistency
ctx.JSON(http.StatusOK, &res)
}

View File

@ -375,7 +375,7 @@ func ListBranches(ctx *context.APIContext) {
}
}
ctx.SetLinkHeader(int(totalNumOfBranches), listOptions.PageSize)
ctx.SetLinkHeader(totalNumOfBranches, listOptions.PageSize)
ctx.SetTotalCountHeader(totalNumOfBranches)
ctx.JSON(http.StatusOK, apiBranches)
}

View File

@ -290,7 +290,7 @@ func GetAllCommits(ctx *context.APIContext) {
}
}
ctx.SetLinkHeader(int(commitsCountTotal), listOptions.PageSize)
ctx.SetLinkHeader(commitsCountTotal, listOptions.PageSize)
ctx.SetTotalCountHeader(commitsCountTotal)
// kept for backwards compatibility

View File

@ -299,7 +299,7 @@ func SearchIssues(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(total), limit)
ctx.SetLinkHeader(total, limit)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, convert.ToAPIIssueList(ctx, ctx.Doer, issues))
}
@ -527,7 +527,7 @@ func ListIssues(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetLinkHeader(total, listOptions.PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, convert.ToAPIIssueList(ctx, ctx.Doer, issues))
}

View File

@ -81,7 +81,7 @@ func GetIssueDependencies(ctx *context.APIContext) {
canWrite := ctx.Repo.Permission.CanWriteIssuesOrPulls(issue.IsPull)
blockerIssues := make([]*issues_model.Issue, 0, listOptions.PageSize)
blockerIssues := make([]*issues_model.Issue, 0, min(listOptions.PageSize, setting.API.MaxResponseItems))
// 2. Get the issues this issue depends on, i.e. the `<#b>`: `<issue> <- <#b>`
blockersInfo, total, err := issue.BlockedByDependencies(ctx, listOptions)
@ -140,7 +140,7 @@ func GetIssueDependencies(ctx *context.APIContext) {
}
blockerIssues = append(blockerIssues, &blocker.Issue)
}
ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetLinkHeader(total, listOptions.PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, convert.ToAPIIssueList(ctx, ctx.Doer, blockerIssues))
}

View File

@ -179,7 +179,7 @@ func ListPushMirrors(ctx *context.APIContext) {
responsePushMirrors = append(responsePushMirrors, m)
}
}
ctx.SetLinkHeader(len(responsePushMirrors), utils.GetListOptions(ctx).PageSize)
ctx.SetLinkHeader(int64(len(responsePushMirrors)), utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, responsePushMirrors)
}

View File

@ -154,7 +154,7 @@ func ListPullRequests(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, &apiPrs)
}
@ -1449,7 +1449,7 @@ func GetPullRequestCommits(ctx *context.APIContext) {
apiCommits = append(apiCommits, apiCommit)
}
ctx.SetLinkHeader(totalNumberOfCommits, listOptions.PageSize)
ctx.SetLinkHeader(int64(totalNumberOfCommits), listOptions.PageSize)
ctx.SetTotalCountHeader(int64(totalNumberOfCommits))
ctx.RespHeader().Set("X-Page", strconv.Itoa(listOptions.Page))
@ -1591,7 +1591,7 @@ func GetPullRequestFiles(ctx *context.APIContext) {
apiFiles = append(apiFiles, convert.ToChangedFile(diff.Files[i], pr.BaseRepo, endCommitID))
}
ctx.SetLinkHeader(totalNumberOfFiles, listOptions.PageSize)
ctx.SetLinkHeader(int64(totalNumberOfFiles), listOptions.PageSize)
ctx.SetTotalCountHeader(int64(totalNumberOfFiles))
ctx.RespHeader().Set("X-Page", strconv.Itoa(listOptions.Page))

View File

@ -202,7 +202,7 @@ func ListReleases(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(int(filteredCount), listOptions.PageSize)
ctx.SetLinkHeader(filteredCount, listOptions.PageSize)
ctx.SetTotalCountHeader(filteredCount)
ctx.JSON(http.StatusOK, rels)
}

View File

@ -230,7 +230,7 @@ func Search(ctx *context.APIContext) {
}
results[i] = convert.ToRepo(ctx, repo, permission)
}
ctx.SetLinkHeader(int(count), opts.PageSize)
ctx.SetLinkHeader(count, opts.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, api.SearchResults{
OK: true,

View File

@ -206,7 +206,7 @@ func getCommitStatuses(ctx *context.APIContext, commitID string) {
apiStatuses = append(apiStatuses, convert.ToCommitStatus(ctx, status))
}
ctx.SetLinkHeader(int(maxResults), listOptions.PageSize)
ctx.SetLinkHeader(maxResults, listOptions.PageSize)
ctx.SetTotalCountHeader(maxResults)
ctx.JSON(http.StatusOK, apiStatuses)
@ -269,7 +269,7 @@ func GetCombinedCommitStatusByRef(ctx *context.APIContext) {
ctx.APIErrorInternal(fmt.Errorf("CountLatestCommitStatus[%s, %s]: %w", repo.FullName(), refCommit.CommitID, err))
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
combiStatus := convert.ToCombinedStatus(ctx, refCommit.Commit.ID.String(), statuses,

View File

@ -333,7 +333,7 @@ func ListWikiPages(ctx *context.APIContext) {
pages = append(pages, wiki_service.ToWikiPageMetaData(wikiName, c, ctx.Repo.Repository))
}
ctx.SetLinkHeader(len(entries), limit)
ctx.SetLinkHeader(int64(len(entries)), limit)
ctx.SetTotalCountHeader(int64(len(entries)))
ctx.JSON(http.StatusOK, pages)
}

View File

@ -79,7 +79,7 @@ func ListJobs(ctx *context.APIContext, ownerID, repoID, runID int64) {
}
res.Entries[i] = convertedWorkflowJob
}
ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetLinkHeader(total, listOptions.PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &res)
}
@ -185,7 +185,7 @@ func ListRuns(ctx *context.APIContext, ownerID, repoID int64) {
}
res.Entries[i] = convertedRun
}
ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetLinkHeader(total, listOptions.PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &res)
}

View File

@ -36,7 +36,7 @@ func ListBlocks(ctx *context.APIContext, blocker *user_model.User) {
users = append(users, convert.ToUser(ctx, b.Blockee, blocker))
}
ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetLinkHeader(total, listOptions.PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &users)
}

View File

@ -354,7 +354,7 @@ func ListVariables(ctx *context.APIContext) {
}
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, variables)
}

View File

@ -31,7 +31,7 @@ func listUserFollowers(ctx *context.APIContext, u *user_model.User) {
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
responseAPIUsers(ctx, users)
}
@ -97,7 +97,7 @@ func listUserFollowing(ctx *context.APIContext, u *user_model.User) {
return
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
responseAPIUsers(ctx, users)
}

View File

@ -94,7 +94,7 @@ func listPublicKeys(ctx *context.APIContext, user *user_model.User) {
}
}
ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetLinkHeader(count, listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, &apiKeys)
}

View File

@ -47,7 +47,7 @@ func listUserRepos(ctx *context.APIContext, u *user_model.User, private bool) {
}
}
ctx.SetLinkHeader(int(count), opts.PageSize)
ctx.SetLinkHeader(count, opts.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, &apiRepos)
}
@ -130,7 +130,7 @@ func ListMyRepos(ctx *context.APIContext) {
results[i] = convert.ToRepo(ctx, repo, permission)
}
ctx.SetLinkHeader(int(count), opts.ListOptions.PageSize)
ctx.SetLinkHeader(count, opts.ListOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, &results)
}

View File

@ -76,7 +76,7 @@ func GetStarredRepos(ctx *context.APIContext) {
return
}
ctx.SetLinkHeader(ctx.ContextUser.NumStars, utils.GetListOptions(ctx).PageSize)
ctx.SetLinkHeader(int64(ctx.ContextUser.NumStars), utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(int64(ctx.ContextUser.NumStars))
ctx.JSON(http.StatusOK, &repos)
}
@ -108,7 +108,7 @@ func GetMyStarredRepos(ctx *context.APIContext) {
ctx.APIErrorInternal(err)
}
ctx.SetLinkHeader(ctx.Doer.NumStars, utils.GetListOptions(ctx).PageSize)
ctx.SetLinkHeader(int64(ctx.Doer.NumStars), utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(int64(ctx.Doer.NumStars))
ctx.JSON(http.StatusOK, &repos)
}

Some files were not shown because too many files have changed in this diff Show More