0
0
mirror of https://github.com/go-gitea/gitea.git synced 2025-12-17 11:26:03 +01:00

Merge branch 'go-gitea:main' into main

This commit is contained in:
badhezi 2025-10-06 15:53:29 +03:00 committed by GitHub
commit be195659f6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
62 changed files with 860 additions and 971 deletions

View File

@ -151,6 +151,7 @@ func runCreateUser(ctx context.Context, c *cli.Command) error {
if err != nil {
return err
}
// codeql[disable-next-line=go/clear-text-logging]
fmt.Printf("generated random password is '%s'\n", password)
} else if userType == user_model.UserTypeIndividual {
return errors.New("must set either password or random-password flag")

View File

@ -58,6 +58,7 @@ func runMustChangePassword(ctx context.Context, c *cli.Command) error {
return err
}
// codeql[disable-next-line=go/clear-text-logging]
fmt.Printf("Updated %d users setting MustChangePassword to %t\n", n, mustChangePassword)
return nil
}

View File

@ -91,6 +91,7 @@ func runGenerateSecretKey(_ context.Context, c *cli.Command) error {
return err
}
// codeql[disable-next-line=go/clear-text-logging]
fmt.Printf("%s", secretKey)
if isatty.IsTerminal(os.Stdout.Fd()) {

View File

@ -186,7 +186,7 @@ Gitea or set your environment appropriately.`, "")
userID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPusherID), 10, 64)
prID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPRID), 10, 64)
deployKeyID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvDeployKeyID), 10, 64)
actionPerm, _ := strconv.ParseInt(os.Getenv(repo_module.EnvActionPerm), 10, 64)
actionPerm, _ := strconv.Atoi(os.Getenv(repo_module.EnvActionPerm))
hookOptions := private.HookOptions{
UserID: userID,
@ -196,7 +196,7 @@ Gitea or set your environment appropriately.`, "")
GitPushOptions: pushOptions(),
PullRequestID: prID,
DeployKeyID: deployKeyID,
ActionPerm: int(actionPerm),
ActionPerm: actionPerm,
}
scanner := bufio.NewScanner(os.Stdin)

2
go.mod
View File

@ -295,7 +295,7 @@ replace github.com/jaytaylor/html2text => github.com/Necoro/html2text v0.0.0-202
replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1
replace github.com/nektos/act => gitea.com/gitea/act v0.261.6
replace github.com/nektos/act => gitea.com/gitea/act v0.261.7-0.20251003180512-ac6e4b751763
// TODO: the only difference is in `PutObject`: the fork doesn't use `NewVerifyingReader(r, sha256.New(), oid, expectedSize)`, need to figure out why
replace github.com/charmbracelet/git-lfs-transfer => gitea.com/gitea/git-lfs-transfer v0.2.0

4
go.sum
View File

@ -31,8 +31,8 @@ dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
gitea.com/gitea/act v0.261.6 h1:CjZwKOyejonNFDmsXOw3wGm5Vet573hHM6VMLsxtvPY=
gitea.com/gitea/act v0.261.6/go.mod h1:Pg5C9kQY1CEA3QjthjhlrqOC/QOT5NyWNjOjRHw23Ok=
gitea.com/gitea/act v0.261.7-0.20251003180512-ac6e4b751763 h1:ohdxegvslDEllZmRNDqpKun6L4Oq81jNdEDtGgHEV2c=
gitea.com/gitea/act v0.261.7-0.20251003180512-ac6e4b751763/go.mod h1:Pg5C9kQY1CEA3QjthjhlrqOC/QOT5NyWNjOjRHw23Ok=
gitea.com/gitea/git-lfs-transfer v0.2.0 h1:baHaNoBSRaeq/xKayEXwiDQtlIjps4Ac/Ll4KqLMB40=
gitea.com/gitea/git-lfs-transfer v0.2.0/go.mod h1:UrXUCm3xLQkq15fu7qlXHUMlrhdlXHoi13KH2Dfiits=
gitea.com/gitea/go-xsd-duration v0.0.0-20220703122237-02e73435a078 h1:BAFmdZpRW7zMQZQDClaCWobRj9uL1MR3MzpCVJvc5s4=

View File

@ -213,3 +213,15 @@
is_deleted: false
deleted_by_id: 0
deleted_unix: 0
-
id: 26
repo_id: 10
name: 'feature/1'
commit_id: '65f1bf27bc3bf70f64657658635e66094edbcb4d'
commit_message: 'Initial commit'
commit_time: 1489950479
pusher_id: 2
is_deleted: false
deleted_by_id: 0
deleted_unix: 0

View File

@ -605,7 +605,7 @@ func (repo *Repository) IsGenerated() bool {
// RepoPath returns repository path by given user and repository name.
func RepoPath(userName, repoName string) string { //revive:disable-line:exported
return filepath.Join(user_model.UserPath(userName), strings.ToLower(repoName)+".git")
return filepath.Join(setting.RepoRootPath, filepath.Clean(strings.ToLower(userName)), filepath.Clean(strings.ToLower(repoName)+".git"))
}
// RepoPath returns the repository path

View File

@ -980,7 +980,7 @@ func GetInactiveUsers(ctx context.Context, olderThan time.Duration) ([]*User, er
// UserPath returns the path absolute path of user repositories.
func UserPath(userName string) string { //revive:disable-line:exported
return filepath.Join(setting.RepoRootPath, strings.ToLower(userName))
return filepath.Join(setting.RepoRootPath, filepath.Clean(strings.ToLower(userName)))
}
// GetUserByID returns the user object by given ID if exists.

View File

@ -61,17 +61,11 @@ func NewArgon2Hasher(config string) *Argon2Hasher {
return nil
}
parsed, err := parseUIntParam(vals[0], "time", "argon2", config, nil)
hasher.time = uint32(parsed)
parsed, err = parseUIntParam(vals[1], "memory", "argon2", config, err)
hasher.memory = uint32(parsed)
parsed, err = parseUIntParam(vals[2], "threads", "argon2", config, err)
hasher.threads = uint8(parsed)
parsed, err = parseUIntParam(vals[3], "keyLen", "argon2", config, err)
hasher.keyLen = uint32(parsed)
var err error
hasher.time, err = parseUintParam[uint32](vals[0], "time", "argon2", config, nil)
hasher.memory, err = parseUintParam[uint32](vals[1], "memory", "argon2", config, err)
hasher.threads, err = parseUintParam[uint8](vals[2], "threads", "argon2", config, err)
hasher.keyLen, err = parseUintParam[uint32](vals[3], "keyLen", "argon2", config, err)
if err != nil {
return nil
}

View File

@ -7,6 +7,7 @@ import (
"strconv"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
)
func parseIntParam(value, param, algorithmName, config string, previousErr error) (int, error) {
@ -18,11 +19,12 @@ func parseIntParam(value, param, algorithmName, config string, previousErr error
return parsed, previousErr // <- Keep the previous error as this function should still return an error once everything has been checked if any call failed
}
func parseUIntParam(value, param, algorithmName, config string, previousErr error) (uint64, error) { //nolint:unparam // algorithmName is always argon2
parsed, err := strconv.ParseUint(value, 10, 64)
func parseUintParam[T uint32 | uint8](value, param, algorithmName, config string, previousErr error) (ret T, _ error) {
_, isUint32 := any(ret).(uint32)
parsed, err := strconv.ParseUint(value, 10, util.Iif(isUint32, 32, 8))
if err != nil {
log.Error("invalid integer for %s representation in %s hash spec %s", param, algorithmName, config)
return 0, err
}
return parsed, previousErr // <- Keep the previous error as this function should still return an error once everything has been checked if any call failed
return T(parsed), previousErr // <- Keep the previous error as this function should still return an error once everything has been checked if any call failed
}

View File

@ -72,7 +72,7 @@ func newRequest(ctx context.Context, method, url string, body io.ReadCloser) (*h
// Adding padding will make requests more secure, however is also slower
// because artificial responses will be added to the response
// For more information, see https://www.troyhunt.com/enhancing-pwned-passwords-privacy-with-padding/
func (c *Client) CheckPassword(pw string, padding bool) (int, error) {
func (c *Client) CheckPassword(pw string, padding bool) (int64, error) {
if pw == "" {
return -1, ErrEmptyPassword
}
@ -111,7 +111,7 @@ func (c *Client) CheckPassword(pw string, padding bool) (int, error) {
if err != nil {
return -1, err
}
return int(count), nil
return count, nil
}
}
return 0, nil

View File

@ -37,25 +37,25 @@ func TestPassword(t *testing.T) {
count, err := client.CheckPassword("", false)
assert.ErrorIs(t, err, ErrEmptyPassword, "blank input should return ErrEmptyPassword")
assert.Equal(t, -1, count)
assert.EqualValues(t, -1, count)
count, err = client.CheckPassword("pwned", false)
assert.NoError(t, err)
assert.Equal(t, 1, count)
assert.EqualValues(t, 1, count)
count, err = client.CheckPassword("notpwned", false)
assert.NoError(t, err)
assert.Equal(t, 0, count)
assert.EqualValues(t, 0, count)
count, err = client.CheckPassword("paddedpwned", true)
assert.NoError(t, err)
assert.Equal(t, 1, count)
assert.EqualValues(t, 1, count)
count, err = client.CheckPassword("paddednotpwned", true)
assert.NoError(t, err)
assert.Equal(t, 0, count)
assert.EqualValues(t, 0, count)
count, err = client.CheckPassword("paddednotpwnedzero", true)
assert.NoError(t, err)
assert.Equal(t, 0, count)
assert.EqualValues(t, 0, count)
}

View File

@ -45,7 +45,7 @@ func GetHook(repoPath, name string) (*Hook, error) {
}
h := &Hook{
name: name,
path: filepath.Join(repoPath, "hooks", name+".d", name),
path: filepath.Join(repoPath, filepath.Join("hooks", name+".d", name)),
}
isFile, err := util.IsFile(h.path)
if err != nil {

View File

@ -18,6 +18,7 @@ func GetLevel() Level {
}
func Log(skip int, level Level, format string, v ...any) {
// codeql[disable-next-line=go/clear-text-logging]
GetLogger(DEFAULT).Log(skip+1, &Event{Level: level}, format, v...)
}

View File

@ -20,6 +20,7 @@ func BaseLoggerToGeneralLogger(b BaseLogger) Logger {
var _ Logger = (*baseToLogger)(nil)
func (s *baseToLogger) Log(skip int, event *Event, format string, v ...any) {
// codeql[disable-next-line=go/clear-text-logging]
s.base.Log(skip+1, event, format, v...)
}

View File

@ -65,7 +65,7 @@ func decodeEnvSectionKey(encoded string) (ok bool, section, key string) {
decodedBytes := make([]byte, len(toDecode)/2)
for i := 0; i < len(toDecode)/2; i++ {
// Can ignore error here as we know these should be hexadecimal from the regexp
byteInt, _ := strconv.ParseInt(toDecode[2*i:2*i+2], 16, 0)
byteInt, _ := strconv.ParseInt(toDecode[2*i:2*i+2], 16, 8)
decodedBytes[i] = byte(byteInt)
}
if inKey {

View File

@ -19,7 +19,7 @@ type TempDir struct {
}
func (td *TempDir) JoinPath(elems ...string) string {
return filepath.Join(append([]string{td.base, td.sub}, elems...)...)
return filepath.Join(append([]string{td.base, td.sub}, filepath.Join(elems...))...)
}
// MkdirAllSub works like os.MkdirAll, but the base directory must exist

View File

@ -62,6 +62,9 @@ sub = Changed Sub String
found := lang1.HasKey("no-such")
assert.False(t, found)
assert.NoError(t, ls.Close())
res := lang1.TrHTML("<no-such>")
assert.Equal(t, "&lt;no-such&gt;", string(res))
}
func TestLocaleStoreMoreSource(t *testing.T) {

View File

@ -6,6 +6,7 @@ package i18n
import (
"errors"
"fmt"
"html"
"html/template"
"slices"
@ -109,8 +110,7 @@ func (store *localeStore) Close() error {
}
func (l *locale) TrString(trKey string, trArgs ...any) string {
format := trKey
var format string
idx, ok := l.store.trKeyToIdxMap[trKey]
if ok {
if msg, ok := l.idxToMsgMap[idx]; ok {
@ -122,7 +122,9 @@ func (l *locale) TrString(trKey string, trArgs ...any) string {
}
}
}
if format == "" {
format = html.EscapeString(trKey)
}
msg, err := Format(format, trArgs...)
if err != nil {
log.Error("Error whilst formatting %q in %s: %v", trKey, l.langName, err)

View File

@ -26,13 +26,14 @@ func HexToRBGColor(colorString string) (float64, float64, float64) {
if len(hexString) == 8 {
hexString = hexString[0:6]
}
color, err := strconv.ParseUint(hexString, 16, 64)
color, err := strconv.ParseUint(hexString, 16, 32)
color32 := uint32(color)
if err != nil {
return 0, 0, 0
}
r := float64(uint8(0xFF & (uint32(color) >> 16)))
g := float64(uint8(0xFF & (uint32(color) >> 8)))
b := float64(uint8(0xFF & uint32(color)))
r := float64(uint8(0xFF & (color32 >> 16)))
g := float64(uint8(0xFF & (color32 >> 8)))
b := float64(uint8(0xFF & color32))
return r, g, b
}

View File

@ -2228,7 +2228,6 @@ settings.event_pull_request_merge=Sloučení pull requestu
settings.event_package=Balíček
settings.event_package_desc=Balíček vytvořen nebo odstraněn v repozitáři.
settings.branch_filter=Filtr větví
settings.branch_filter_desc=Povolené větve pro události nahrání, vytvoření větve a smazání větve jsou určeny pomocí zástupného vzoru. Pokud je prázdný nebo <code>*</code>, všechny události jsou ohlášeny. Podívejte se na dokumentaci syntaxe na <a href="%[1]s">github.com/gobwas/glob</a>. Příklady: <code>master</code>, <code>{master,release*}</code>.
settings.authorization_header=Autorizační hlavička
settings.authorization_header_desc=Pokud vyplněno, bude připojeno k požadavkům jako autorizační hlavička. Příklady: %s.
settings.active=Aktivní

View File

@ -2269,7 +2269,6 @@ settings.event_workflow_job_desc=Gitea Actions Workflow Job in Warteschlange, wa
settings.event_package=Paket
settings.event_package_desc=Paket wurde in einem Repository erstellt oder gelöscht.
settings.branch_filter=Branch-Filter
settings.branch_filter_desc=Whitelist für Branches für Push-, Erzeugungs- und Löschevents, als Glob-Pattern beschrieben. Es werden Events für alle Branches gemeldet, falls das Pattern <code>*</code> ist, oder falls es leer ist. Siehe die <a href="%[1]s">%[2]s</a> Dokumentation für die Syntax (Englisch). Beispiele: <code>master</code>, <code>{master,release*}</code>.
settings.authorization_header=Authorization-Header
settings.authorization_header_desc=Wird, falls vorhanden, als Authorization-Header mitgesendet. Beispiele: %s.
settings.active=Aktiv

View File

@ -109,6 +109,7 @@ copy_path=Copier le chemin
copy_success=Copié !
copy_error=Échec de la copie
copy_type_unsupported=Ce type de fichier ne peut pas être copié
copy_filename=Copier le nom du fichier
write=Écrire
preview=Aperçu
@ -2433,7 +2434,6 @@ settings.event_workflow_job_desc=Travaux du flux de travail Gitea Actions en fil
settings.event_package=Paquet
settings.event_package_desc=Paquet créé ou supprimé.
settings.branch_filter=Filtre de branche
settings.branch_filter_desc=Liste de branches et motifs globs autorisant la soumission, la création et suppression de branches. Laisser vide ou utiliser <code>*</code> englobent toutes les branches. Voir la <a href="%[1]s">%[2]s</a>. Exemples : <code>master</code>, <code>{master,release*}</code>.
settings.authorization_header=En-tête « Authorization »
settings.authorization_header_desc=Si présent, sera ajouté aux requêtes comme en-tête dauthentification. Exemples : %s.
settings.active=Actif
@ -3733,6 +3733,9 @@ settings.link.select=Sélectionner un dépôt
settings.link.button=Actualiser le lien du dépôt
settings.link.success=Le lien du dépôt a été mis à jour avec succès.
settings.link.error=Impossible de mettre à jour le lien du dépôt.
settings.link.repo_not_found=Dépôt %s non trouvé.
settings.unlink.error=Impossible de supprimer le lien du dépôt.
settings.unlink.success=Le lien du dépôt a été supprimé.
settings.delete=Supprimer le paquet
settings.delete.description=Supprimer un paquet est permanent et irréversible.
settings.delete.notice=Vous êtes sur le point de supprimer %s (%s). Cette opération est irréversible, êtes-vous sûr ?

View File

@ -109,6 +109,7 @@ copy_path=Cóipeáil cosán
copy_success=Cóipeáil!
copy_error=Theip ar an gcóipeáil
copy_type_unsupported=Ní féidir an cineál comhaid seo a chóipeáil
copy_filename=Cóipeáil ainm comhaid
write=Scríobh
preview=Réamhamharc
@ -2433,7 +2434,9 @@ settings.event_workflow_job_desc=Gitea Actions Sreabhadh oibre post ciúáilte,
settings.event_package=Pacáiste
settings.event_package_desc=Pacáiste a cruthaíodh nó a scriosadh i stóras.
settings.branch_filter=Scagaire brainse
settings.branch_filter_desc=Liosta bán brainse le haghaidh brú, cruthú brainse agus imeachtaí scriosta brainse, sonraithe mar phatrún glob. Má tá sé folamh nó <code>*</code>, tuairiscítear imeachtaí do gach brainse. Féach <a href="%[1]s">%[2]s</a> doiciméadú le haghaidh comhréire. Samplaí: <code>máistir</code>, <code>{master,release*}</code>.
settings.branch_filter_desc_1=Liosta ceadanna brainse (agus ainm tagartha) le haghaidh imeachtaí brú, cruthaithe brainse agus scriosadh brainse, sonraithe mar phatrún glob. Más folamh nó <code>*</code> é, tuairiscítear imeachtaí do na brainsí agus na clibeanna uile.
settings.branch_filter_desc_2=Úsáid réimír <code>refs/heads/</code> nó <code>refs/tags/</code> chun ainmneacha tagartha iomlána a mheaitseáil.
settings.branch_filter_desc_doc=Féach ar dhoiciméadú <a href="%[1]s">%[2]s</a> le haghaidh comhréir.
settings.authorization_header=Ceanntásc Údaraithe
settings.authorization_header_desc=Cuirfear san áireamh mar cheanntásc údaraithe d'iarratais nuair a bheidh ann Samplaí: %s.
settings.active=Gníomhach

View File

@ -2433,7 +2433,6 @@ settings.event_workflow_job_desc=Gitea Actions のワークフロージョブが
settings.event_package=パッケージ
settings.event_package_desc=リポジトリにパッケージが作成または削除されたとき。
settings.branch_filter=ブランチ フィルター
settings.branch_filter_desc=プッシュ、ブランチ作成、ブランチ削除のイベントを通知するブランチを、globパターンで指定するホワイトリストです。 空か<code>*</code>のときは、すべてのブランチのイベントを通知します。 文法については <a href="%[1]s">%[2]s</a> を参照してください。 例: <code>master</code> 、 <code>{master,release*}</code>
settings.authorization_header=Authorizationヘッダー
settings.authorization_header_desc=入力した場合、リクエストにAuthorizationヘッダーとして付加します。 例: %s
settings.active=有効

View File

@ -109,6 +109,7 @@ copy_path=Copiar caminho
copy_success=Copiado!
copy_error=Falha ao copiar
copy_type_unsupported=Este tipo de ficheiro não pode ser copiado
copy_filename=Copiar o nome do ficheiro
write=Escrever
preview=Pré-visualizar
@ -2433,7 +2434,9 @@ settings.event_workflow_job_desc=O trabalho da sequência de trabalho das opera
settings.event_package=Pacote
settings.event_package_desc=Pacote criado ou eliminado num repositório.
settings.branch_filter=Filtro de ramos
settings.branch_filter_desc=Lista dos ramos a serem considerados nos eventos de envio e de criação e eliminação de ramos, especificada como um padrão glob. Se estiver em branco ou for <code>*</code>, serão reportados eventos para todos os ramos. Veja a <a href="%[1]s">documentação</a> para ver os detalhes da sintaxe. Exemplos: <code>main</code>, <code>{main,release*}</code>.
settings.branch_filter_desc_1=A lista de permissão do ramo (e nome de referência) para eventos de envio, criação de ramos e eliminação de ramos, especificada como padrão glob. Se estiver em branco ou for <code>*</code>, serão reportados eventos para todos os ramos e etiquetas.
settings.branch_filter_desc_2=Use um prefixo <code>refs/heads/</code> ou <code>refs/tags/</code> para corresponder aos nomes completos de referência.
settings.branch_filter_desc_doc=Consulte a documentação <a href="%[1]s">%[2]s</a> para ver a sintaxe.
settings.authorization_header=Cabeçalho de Autorização
settings.authorization_header_desc=Será incluído como cabeçalho de autorização para pedidos, quando estiver presente. Exemplos: %s.
settings.active=Em funcionamento

File diff suppressed because it is too large Load Diff

View File

@ -2434,7 +2434,6 @@ settings.event_workflow_job_desc=Gitea 工作流队列中、等待中、正在
settings.event_package=软件包
settings.event_package_desc=软件包在仓库中已创建或删除。
settings.branch_filter=分支过滤
settings.branch_filter_desc=推送、创建,删除分支事件的分支白名单,使用 glob 表达式匹配指定。若为空或 <code>*</code>,则会报告所有分支的事件。语法文档见 <a href="%[1]s">%[2]s</a>。示例:<code>master</code>、<code>{master,release*}</code>。
settings.authorization_header=授权标头
settings.authorization_header_desc=当存在时将被作为授权标头包含在内。例如: %s。
settings.active=激活

View File

@ -2213,7 +2213,6 @@ settings.event_pull_request_merge=合併請求合併
settings.event_package=套件
settings.event_package_desc=套件已在儲存庫中建立或刪除。
settings.branch_filter=分支篩選
settings.branch_filter_desc=推送、建立分支、刪除分支事件的白名單,請使用 glob 比對模式。如果留白或輸入<code>*</code>,所有分支的事件都會被回報。語法參見 <a href="https://pkg.go.dev/github.com/gobwas/glob#Compile">github.com/gobwas/glob</a>。範例:<code>master</code>, <code>{master,release*}</code>。
settings.authorization_header=Authorization 標頭
settings.authorization_header_desc=存在時將將包含此 Authorization 標頭在請求中。例: %s。
settings.active=啟用

View File

@ -67,7 +67,6 @@
"devDependencies": {
"@eslint-community/eslint-plugin-eslint-comments": "4.5.0",
"@playwright/test": "1.55.1",
"@stoplight/spectral-cli": "6.15.0",
"@stylistic/eslint-plugin": "5.4.0",
"@stylistic/stylelint-plugin": "4.0.0",
"@types/codemirror": "5.60.16",
@ -102,6 +101,7 @@
"material-icon-theme": "5.27.0",
"nolyfill": "1.0.44",
"postcss-html": "1.8.0",
"spectral-cli-bundle": "1.0.3",
"stylelint": "16.24.0",
"stylelint-config-recommended": "17.0.0",
"stylelint-declaration-block-no-ignored-properties": "2.8.0",

827
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -169,7 +169,7 @@ func MoveIssuePin(ctx *context.APIContext) {
return
}
err = issues_model.MovePin(ctx, issue, int(ctx.PathParamInt64("position")))
err = issues_model.MovePin(ctx, issue, ctx.PathParamInt("position"))
if err != nil {
ctx.APIErrorInternal(err)
return

View File

@ -6,6 +6,7 @@ package devtest
import (
mathRand "math/rand/v2"
"net/http"
"slices"
"strconv"
"strings"
"time"
@ -17,25 +18,29 @@ import (
"code.gitea.io/gitea/services/context"
)
func generateMockStepsLog(logCur actions.LogCursor) (stepsLog []*actions.ViewStepLog) {
mockedLogs := []string{
"::group::test group for: step={step}, cursor={cursor}",
"in group msg for: step={step}, cursor={cursor}",
"in group msg for: step={step}, cursor={cursor}",
"in group msg for: step={step}, cursor={cursor}",
"::endgroup::",
type generateMockStepsLogOptions struct {
mockCountFirst int
mockCountGeneral int
groupRepeat int
}
func generateMockStepsLog(logCur actions.LogCursor, opts generateMockStepsLogOptions) (stepsLog []*actions.ViewStepLog) {
var mockedLogs []string
mockedLogs = append(mockedLogs, "::group::test group for: step={step}, cursor={cursor}")
mockedLogs = append(mockedLogs, slices.Repeat([]string{"in group msg for: step={step}, cursor={cursor}"}, opts.groupRepeat)...)
mockedLogs = append(mockedLogs, "::endgroup::")
mockedLogs = append(mockedLogs,
"message for: step={step}, cursor={cursor}",
"message for: step={step}, cursor={cursor}",
"##[group]test group for: step={step}, cursor={cursor}",
"in group msg for: step={step}, cursor={cursor}",
"##[endgroup]",
}
cur := logCur.Cursor // usually the cursor is the "file offset", but here we abuse it as "line number" to make the mock easier, intentionally
mockCount := util.Iif(logCur.Step == 0, 3, 1)
if logCur.Step == 1 && logCur.Cursor == 0 {
mockCount = 30 // for the first batch, return as many as possible to test the auto-expand and auto-scroll
}
for i := 0; i < mockCount; i++ {
)
// usually the cursor is the "file offset", but here we abuse it as "line number" to make the mock easier, intentionally
cur := logCur.Cursor
// for the first batch, return as many as possible to test the auto-expand and auto-scroll
mockCount := util.Iif(logCur.Cursor == 0, opts.mockCountFirst, opts.mockCountGeneral)
for range mockCount {
logStr := mockedLogs[int(cur)%len(mockedLogs)]
cur++
logStr = strings.ReplaceAll(logStr, "{step}", strconv.Itoa(logCur.Step))
@ -127,21 +132,28 @@ func MockActionsRunsJobs(ctx *context.Context) {
Duration: "3h",
})
var mockLogOptions []generateMockStepsLogOptions
resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &actions.ViewJobStep{
Summary: "step 0 (mock slow)",
Duration: time.Hour.String(),
Status: actions_model.StatusRunning.String(),
})
mockLogOptions = append(mockLogOptions, generateMockStepsLogOptions{mockCountFirst: 30, mockCountGeneral: 1, groupRepeat: 3})
resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &actions.ViewJobStep{
Summary: "step 1 (mock fast)",
Duration: time.Hour.String(),
Status: actions_model.StatusRunning.String(),
})
mockLogOptions = append(mockLogOptions, generateMockStepsLogOptions{mockCountFirst: 30, mockCountGeneral: 3, groupRepeat: 20})
resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &actions.ViewJobStep{
Summary: "step 2 (mock error)",
Duration: time.Hour.String(),
Status: actions_model.StatusRunning.String(),
})
mockLogOptions = append(mockLogOptions, generateMockStepsLogOptions{mockCountFirst: 30, mockCountGeneral: 3, groupRepeat: 3})
if len(req.LogCursors) == 0 {
ctx.JSON(http.StatusOK, resp)
return
@ -156,7 +168,7 @@ func MockActionsRunsJobs(ctx *context.Context) {
}
doSlowResponse = doSlowResponse || logCur.Step == 0
doErrorResponse = doErrorResponse || logCur.Step == 2
resp.Logs.StepsLog = append(resp.Logs.StepsLog, generateMockStepsLog(logCur)...)
resp.Logs.StepsLog = append(resp.Logs.StepsLog, generateMockStepsLog(logCur, mockLogOptions[logCur.Step])...)
}
if doErrorResponse {
if mathRand.Float64() > 0.5 {

View File

@ -35,7 +35,7 @@ type RepoSearchOptions struct {
// This function is also used to render the Admin Repository Management page.
func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) {
// Sitemap index for sitemap paths
page := int(ctx.PathParamInt64("idx"))
page := ctx.PathParamInt("idx")
isSitemap := ctx.PathParam("idx") != ""
if page <= 1 {
page = ctx.FormInt("page")

View File

@ -34,7 +34,7 @@ func isKeywordValid(keyword string) bool {
// RenderUserSearch render user search page
func RenderUserSearch(ctx *context.Context, opts user_model.SearchUserOptions, tplName templates.TplName) {
// Sitemap index for sitemap paths
opts.Page = int(ctx.PathParamInt64("idx"))
opts.Page = ctx.PathParamInt("idx")
isSitemap := ctx.PathParam("idx") != ""
if opts.Page <= 1 {
opts.Page = ctx.FormInt("page")

View File

@ -25,33 +25,28 @@ func Activity(ctx *context.Context) {
ctx.Data["PageIsPulse"] = true
ctx.Data["Period"] = ctx.PathParam("period")
timeUntil := time.Now()
var timeFrom time.Time
switch ctx.Data["Period"] {
period, timeFrom := "weekly", timeUntil.Add(-time.Hour*168)
switch ctx.PathParam("period") {
case "daily":
timeFrom = timeUntil.Add(-time.Hour * 24)
period, timeFrom = "daily", timeUntil.Add(-time.Hour*24)
case "halfweekly":
timeFrom = timeUntil.Add(-time.Hour * 72)
period, timeFrom = "halfweekly", timeUntil.Add(-time.Hour*72)
case "weekly":
timeFrom = timeUntil.Add(-time.Hour * 168)
period, timeFrom = "weekly", timeUntil.Add(-time.Hour*168)
case "monthly":
timeFrom = timeUntil.AddDate(0, -1, 0)
period, timeFrom = "monthly", timeUntil.AddDate(0, -1, 0)
case "quarterly":
timeFrom = timeUntil.AddDate(0, -3, 0)
period, timeFrom = "quarterly", timeUntil.AddDate(0, -3, 0)
case "semiyearly":
timeFrom = timeUntil.AddDate(0, -6, 0)
period, timeFrom = "semiyearly", timeUntil.AddDate(0, -6, 0)
case "yearly":
timeFrom = timeUntil.AddDate(-1, 0, 0)
default:
ctx.Data["Period"] = "weekly"
timeFrom = timeUntil.Add(-time.Hour * 168)
period, timeFrom = "yearly", timeUntil.AddDate(-1, 0, 0)
}
ctx.Data["DateFrom"] = timeFrom
ctx.Data["DateUntil"] = timeUntil
ctx.Data["PeriodText"] = ctx.Tr("repo.activity.period." + ctx.Data["Period"].(string))
ctx.Data["Period"] = period
ctx.Data["PeriodText"] = ctx.Tr("repo.activity.period." + period)
canReadCode := ctx.Repo.CanRead(unit.TypeCode)
if canReadCode {

View File

@ -376,7 +376,7 @@ func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string
ctx.Resp.WriteHeader(http.StatusBadRequest)
return
}
reqFile := filepath.Join(h.getRepoDir(), file)
reqFile := filepath.Join(h.getRepoDir(), filepath.Clean(file))
fi, err := os.Stat(reqFile)
if os.IsNotExist(err) {
@ -395,13 +395,12 @@ func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string
var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`)
func prepareGitCmdWithAllowedService(service string) (*gitcmd.Command, error) {
if service == "receive-pack" {
return gitcmd.NewCommand("receive-pack"), nil
if service == ServiceTypeReceivePack {
return gitcmd.NewCommand(ServiceTypeReceivePack), nil
}
if service == "upload-pack" {
return gitcmd.NewCommand("upload-pack"), nil
if service == ServiceTypeUploadPack {
return gitcmd.NewCommand(ServiceTypeUploadPack), nil
}
return nil, fmt.Errorf("service %q is not allowed", service)
}
@ -464,11 +463,16 @@ func serviceRPC(ctx *context.Context, h *serviceHandler, service string) {
}
}
const (
ServiceTypeUploadPack = "upload-pack"
ServiceTypeReceivePack = "receive-pack"
)
// ServiceUploadPack implements Git Smart HTTP protocol
func ServiceUploadPack(ctx *context.Context) {
h := httpBase(ctx)
if h != nil {
serviceRPC(ctx, h, "upload-pack")
serviceRPC(ctx, h, ServiceTypeUploadPack)
}
}
@ -476,16 +480,18 @@ func ServiceUploadPack(ctx *context.Context) {
func ServiceReceivePack(ctx *context.Context) {
h := httpBase(ctx)
if h != nil {
serviceRPC(ctx, h, "receive-pack")
serviceRPC(ctx, h, ServiceTypeReceivePack)
}
}
func getServiceType(ctx *context.Context) string {
serviceType := ctx.Req.FormValue("service")
if !strings.HasPrefix(serviceType, "git-") {
return ""
switch ctx.Req.FormValue("service") {
case "git-" + ServiceTypeUploadPack:
return ServiceTypeUploadPack
case "git-" + ServiceTypeReceivePack:
return ServiceTypeReceivePack
}
return strings.TrimPrefix(serviceType, "git-")
return ""
}
func updateServerInfo(ctx gocontext.Context, dir string) []byte {

View File

@ -1583,7 +1583,16 @@ func UpdatePullRequestTarget(ctx *context.Context) {
}
if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.Doer, targetBranch); err != nil {
if issues_model.IsErrPullRequestAlreadyExists(err) {
switch {
case git_model.IsErrBranchNotExist(err):
errorMessage := ctx.Tr("form.target_branch_not_exist")
ctx.Flash.Error(errorMessage)
ctx.JSON(http.StatusBadRequest, map[string]any{
"error": err.Error(),
"user_error": errorMessage,
})
case issues_model.IsErrPullRequestAlreadyExists(err):
err := err.(issues_model.ErrPullRequestAlreadyExists)
RepoRelPath := ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
@ -1594,7 +1603,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
"error": err.Error(),
"user_error": errorMessage,
})
} else if issues_model.IsErrIssueIsClosed(err) {
case issues_model.IsErrIssueIsClosed(err):
errorMessage := ctx.Tr("repo.pulls.is_closed")
ctx.Flash.Error(errorMessage)
@ -1602,7 +1611,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
"error": err.Error(),
"user_error": errorMessage,
})
} else if pull_service.IsErrPullRequestHasMerged(err) {
case pull_service.IsErrPullRequestHasMerged(err):
errorMessage := ctx.Tr("repo.pulls.has_merged")
ctx.Flash.Error(errorMessage)
@ -1610,7 +1619,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
"error": err.Error(),
"user_error": errorMessage,
})
} else if git_model.IsErrBranchesEqual(err) {
case git_model.IsErrBranchesEqual(err):
errorMessage := ctx.Tr("repo.pulls.nothing_to_compare")
ctx.Flash.Error(errorMessage)
@ -1618,7 +1627,7 @@ func UpdatePullRequestTarget(ctx *context.Context) {
"error": err.Error(),
"user_error": errorMessage,
})
} else {
default:
ctx.ServerError("UpdatePullRequestTarget", err)
}
return

View File

@ -279,7 +279,7 @@ func handleRepoViewSubmodule(ctx *context.Context, commitSubmoduleFile *git.Comm
ctx.Data["NotFoundPrompt"] = redirectLink
ctx.NotFound(nil)
} else {
ctx.Redirect(submoduleWebLink.CommitWebLink)
ctx.RedirectToCurrentSite(redirectLink)
}
}

View File

@ -31,7 +31,7 @@ func AvatarByUsernameSize(ctx *context.Context) {
return
}
}
cacheableRedirect(ctx, user.AvatarLinkWithSize(ctx, int(ctx.PathParamInt64("size"))))
cacheableRedirect(ctx, user.AvatarLinkWithSize(ctx, ctx.PathParamInt("size")))
}
// AvatarByEmailHash redirects the browser to the email avatar link

View File

@ -26,6 +26,7 @@ import (
"github.com/nektos/act/pkg/jobparser"
"github.com/nektos/act/pkg/model"
"gopkg.in/yaml.v3"
)
func EnableOrDisableWorkflow(ctx *context.APIContext, workflowID string, isEnable bool) error {
@ -136,9 +137,24 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re
return err
}
singleWorkflow := &jobparser.SingleWorkflow{}
if err := yaml.Unmarshal(content, singleWorkflow); err != nil {
return fmt.Errorf("failed to unmarshal workflow content: %w", err)
}
// get inputs from post
workflow := &model.Workflow{
RawOn: singleWorkflow.RawOn,
}
inputsWithDefaults := make(map[string]any)
if workflowDispatch := workflow.WorkflowDispatchConfig(); workflowDispatch != nil {
if err = processInputs(workflowDispatch, inputsWithDefaults); err != nil {
return err
}
}
giteaCtx := GenerateGiteaContext(run, nil)
workflows, err = jobparser.Parse(content, jobparser.WithGitContext(giteaCtx.ToGitHubContext()))
workflows, err = jobparser.Parse(content, jobparser.WithGitContext(giteaCtx.ToGitHubContext()), jobparser.WithInputs(inputsWithDefaults))
if err != nil {
return err
}
@ -154,17 +170,6 @@ func DispatchActionWorkflow(ctx reqctx.RequestContext, doer *user_model.User, re
)
}
// get inputs from post
workflow := &model.Workflow{
RawOn: workflows[0].RawOn,
}
inputsWithDefaults := make(map[string]any)
if workflowDispatch := workflow.WorkflowDispatchConfig(); workflowDispatch != nil {
if err = processInputs(workflowDispatch, inputsWithDefaults); err != nil {
return err
}
}
// ctx.Req.PostForm -> WorkflowDispatchPayload.Inputs -> ActionRun.EventPayload -> runner: ghc.Event
// https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
// https://docs.github.com/en/webhooks/webhook-events-and-payloads#workflow_dispatch

View File

@ -10,6 +10,7 @@ import (
"strings"
"text/template"
"time"
"unicode"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
@ -37,6 +38,16 @@ const keyOfRequestIDInTemplate = ".RequestID"
// So, we accept a Request ID with a maximum character length of 40
const maxRequestIDByteLength = 40
func isSafeRequestID(id string) bool {
for _, r := range id {
safe := unicode.IsPrint(r)
if !safe {
return false
}
}
return true
}
func parseRequestIDFromRequestHeader(req *http.Request) string {
requestID := "-"
for _, key := range setting.Log.RequestIDHeaders {
@ -45,6 +56,9 @@ func parseRequestIDFromRequestHeader(req *http.Request) string {
break
}
}
if !isSafeRequestID(requestID) {
return "-"
}
if len(requestID) > maxRequestIDByteLength {
requestID = requestID[:maxRequestIDByteLength] + "..."
}

View File

@ -69,3 +69,8 @@ func TestAccessLogger(t *testing.T) {
recorder.record(time.Date(2000, 1, 2, 3, 4, 5, 0, time.UTC), &testAccessLoggerResponseWriterMock{}, req)
assert.Equal(t, []string{`remote-addr - - [02/Jan/2000:03:04:05 +0000] "GET /path https" 200 123123 "referer" "user-agent"`}, mockLogger.logs)
}
func TestAccessLoggerRequestID(t *testing.T) {
assert.False(t, isSafeRequestID("\x00"))
assert.True(t, isSafeRequestID("a b-c"))
}

View File

@ -37,6 +37,11 @@ func (b *Base) PathParamInt64(p string) int64 {
return v
}
func (b *Base) PathParamInt(p string) int {
v, _ := strconv.Atoi(b.PathParam(p))
return v
}
// SetPathParam set request path params into routes
func (b *Base) SetPathParam(name, value string) {
if strings.HasPrefix(name, ":") {

View File

@ -63,6 +63,7 @@ func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *iss
var data issues_model.PushActionContent
if opts.IsForcePush {
data.CommitIDs = []string{oldCommitID, newCommitID}
data.IsForcePush = true
} else {
data.CommitIDs, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
if err != nil {

View File

@ -243,6 +243,17 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer
}
}
exist, err := git_model.IsBranchExist(ctx, pr.BaseRepoID, targetBranch)
if err != nil {
return err
}
if !exist {
return git_model.ErrBranchNotExist{
RepoID: pr.BaseRepoID,
BranchName: targetBranch,
}
}
// Check if branches are equal
branchesEqual, err := IsHeadEqualWithBranch(ctx, pr, targetBranch)
if err != nil {

View File

@ -139,7 +139,7 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
}
if err := gitcmd.NewCommand("fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
if err := gitcmd.NewCommand("fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+baseBranch, git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+"original_"+baseBranch).
Run(ctx, prCtx.RunOpts()); err != nil {
log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
cancel()

View File

@ -0,0 +1,85 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package integration
import (
"fmt"
"net/http"
"net/url"
"testing"
actions_model "code.gitea.io/gitea/models/actions"
auth_model "code.gitea.io/gitea/models/auth"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert"
)
func TestWorkflowWithInputsContext(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
session := loginUser(t, user2.Name)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
apiRepo := createActionsTestRepo(t, token, "actions-inputs-context", false)
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: apiRepo.ID})
httpContext := NewAPITestContext(t, user2.Name, repo.Name, auth_model.AccessTokenScopeWriteRepository)
defer doAPIDeleteRepository(httpContext)(t)
wRunner := newMockRunner()
wRunner.registerAsRepoRunner(t, user2.Name, repo.Name, "windows-runner", []string{"windows-runner"}, false)
lRunner := newMockRunner()
lRunner.registerAsRepoRunner(t, user2.Name, repo.Name, "linux-runner", []string{"linux-runner"}, false)
wf1TreePath := ".gitea/workflows/test-inputs-context.yml"
wf1FileContent := `name: Test Inputs Context
on:
workflow_dispatch:
inputs:
os:
description: 'OS'
required: true
type: choice
options:
- linux
- windows
run-name: Build APP on ${{ inputs.os }}
jobs:
build:
runs-on: ${{ inputs.os }}-runner
steps:
- run: echo 'Start building APP'
`
opts1 := getWorkflowCreateFileOptions(user2, repo.DefaultBranch, "create %s"+wf1TreePath, wf1FileContent)
createWorkflowFile(t, token, user2.Name, repo.Name, wf1TreePath, opts1)
// run the workflow with os=windows
urlStr := fmt.Sprintf("/%s/%s/actions/run?workflow=%s", user2.Name, repo.Name, "test-inputs-context.yml")
req := NewRequestWithValues(t, "POST", urlStr, map[string]string{
"_csrf": GetUserCSRFToken(t, session),
"ref": "refs/heads/main",
"os": "windows",
})
session.MakeRequest(t, req, http.StatusSeeOther)
// linux-runner cannot fetch the task
lRunner.fetchNoTask(t)
task := wRunner.fetchTask(t)
_, _, run := getTaskAndJobAndRunByTaskID(t, task.Id)
assert.Equal(t, "Build APP on windows", run.Title)
})
}
func getTaskAndJobAndRunByTaskID(t *testing.T, taskID int64) (*actions_model.ActionTask, *actions_model.ActionRunJob, *actions_model.ActionRun) {
actionTask := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: taskID})
actionRunJob := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRunJob{ID: actionTask.JobID})
actionRun := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRun{ID: actionRunJob.RunID})
return actionTask, actionRunJob, actionRun
}

View File

@ -93,7 +93,20 @@ func (r *mockRunner) registerAsRepoRunner(t *testing.T, ownerName, repoName, run
}
func (r *mockRunner) fetchTask(t *testing.T, timeout ...time.Duration) *runnerv1.Task {
fetchTimeout := 10 * time.Second
task := r.tryFetchTask(t, timeout...)
assert.NotNil(t, task, "failed to fetch a task")
return task
}
func (r *mockRunner) fetchNoTask(t *testing.T, timeout ...time.Duration) {
task := r.tryFetchTask(t, timeout...)
assert.Nil(t, task, "a task is fetched")
}
const defaultFetchTaskTimeout = 1 * time.Second
func (r *mockRunner) tryFetchTask(t *testing.T, timeout ...time.Duration) *runnerv1.Task {
fetchTimeout := defaultFetchTaskTimeout
if len(timeout) > 0 {
fetchTimeout = timeout[0]
}
@ -108,9 +121,9 @@ func (r *mockRunner) fetchTask(t *testing.T, timeout ...time.Duration) *runnerv1
task = resp.Msg.Task
break
}
time.Sleep(time.Second)
time.Sleep(200 * time.Millisecond)
}
assert.NotNil(t, task, "failed to fetch a task")
return task
}

View File

@ -23,6 +23,7 @@ import (
"code.gitea.io/gitea/tests"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func withKeyFile(t *testing.T, keyname string, callback func(string)) {
@ -160,20 +161,27 @@ func doGitPushTestRepositoryFail(dstPath string, args ...string) func(*testing.T
}
}
func doGitAddSomeCommits(dstPath, branch string) func(*testing.T) {
return func(t *testing.T) {
doGitCheckoutBranch(dstPath, branch)(t)
type localGitAddCommitOptions struct {
LocalRepoPath string
CheckoutBranch string
TreeFilePath string
TreeFileContent string
}
assert.NoError(t, os.WriteFile(filepath.Join(dstPath, fmt.Sprintf("file-%s.txt", branch)), []byte("file "+branch), 0o644))
assert.NoError(t, git.AddChanges(t.Context(), dstPath, true))
func doGitCheckoutWriteFileCommit(opts localGitAddCommitOptions) func(*testing.T) {
return func(t *testing.T) {
doGitCheckoutBranch(opts.LocalRepoPath, opts.CheckoutBranch)(t)
localFilePath := filepath.Join(opts.LocalRepoPath, opts.TreeFilePath)
require.NoError(t, os.WriteFile(localFilePath, []byte(opts.TreeFileContent), 0o644))
require.NoError(t, git.AddChanges(t.Context(), opts.LocalRepoPath, true))
signature := git.Signature{
Email: "test@test.test",
Name: "test",
}
assert.NoError(t, git.CommitChanges(t.Context(), dstPath, git.CommitChangesOptions{
require.NoError(t, git.CommitChanges(t.Context(), opts.LocalRepoPath, git.CommitChangesOptions{
Committer: &signature,
Author: &signature,
Message: "update " + branch,
Message: fmt.Sprintf("update %s @ %s", opts.TreeFilePath, opts.CheckoutBranch),
}))
}
}

View File

@ -58,8 +58,12 @@ func testGitPush(t *testing.T, u *url.URL) {
for i := range 5 {
branchName := fmt.Sprintf("branch-%d", i)
pushed = append(pushed, branchName)
doGitAddSomeCommits(gitPath, branchName)(t)
doGitCheckoutWriteFileCommit(localGitAddCommitOptions{
LocalRepoPath: gitPath,
CheckoutBranch: branchName,
TreeFilePath: fmt.Sprintf("file-%s.txt", branchName),
TreeFileContent: "file " + branchName,
})(t)
}
for i := 5; i < 10; i++ {

View File

@ -0,0 +1,109 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package integration
import (
"fmt"
"net/http"
"net/url"
"os"
"testing"
"time"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/unittest"
issues_service "code.gitea.io/gitea/services/issue"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func testWaitForPullRequestStatus(t *testing.T, prIssue *issues_model.Issue, expectedStatus issues_model.PullRequestStatus) (retIssue *issues_model.Issue) {
require.Eventually(t, func() bool {
prIssueCond := *prIssue
retIssue = unittest.AssertExistsAndLoadBean(t, &prIssueCond)
require.NoError(t, retIssue.LoadPullRequest(t.Context()))
return retIssue.PullRequest.Status == expectedStatus
}, 5*time.Second, 20*time.Millisecond)
return retIssue
}
func testPullCommentRebase(t *testing.T, u *url.URL, session *TestSession) {
testPRTitle := "Test PR for rebase comment"
// make a change on forked branch
testEditFile(t, session, "user1", "repo1", "test-branch/rebase", "README.md", "Hello, World (Edited)\n")
testPullCreate(t, session, "user1", "repo1", false, "test-branch/rebase", "test-branch/rebase", testPRTitle)
// create a conflict on base repo branch
testEditFile(t, session, "user2", "repo1", "test-branch/rebase", "README.md", "Hello, World (Edited Conflicted)\n")
// Now the pull request status should be conflicted
testWaitForPullRequestStatus(t, &issues_model.Issue{Title: testPRTitle}, issues_model.PullRequestStatusConflict)
dstPath := t.TempDir()
u.Path = "/user2/repo1.git"
doGitClone(dstPath, u)(t)
doGitCheckoutBranch(dstPath, "test-branch/rebase")(t)
doGitCreateBranch(dstPath, "local-branch/rebase")(t)
content, _ := os.ReadFile(dstPath + "/README.md")
require.Equal(t, "Hello, World (Edited Conflicted)\n", string(content))
doGitCheckoutWriteFileCommit(localGitAddCommitOptions{
LocalRepoPath: dstPath,
CheckoutBranch: "local-branch/rebase",
TreeFilePath: "README.md",
TreeFileContent: "Hello, World (Edited Conflict Resolved)\n",
})(t)
// do force push
u.Path = "/user1/repo1.git"
u.User = url.UserPassword("user1", userPassword)
doGitAddRemote(dstPath, "base-repo", u)(t)
doGitPushTestRepositoryFail(dstPath, "base-repo", "local-branch/rebase:test-branch/rebase")(t)
doGitPushTestRepository(dstPath, "--force", "base-repo", "local-branch/rebase:test-branch/rebase")(t)
// reload the pr
prIssue := testWaitForPullRequestStatus(t, &issues_model.Issue{Title: testPRTitle}, issues_model.PullRequestStatusMergeable)
comments, err := issues_model.FindComments(t.Context(), &issues_model.FindCommentsOptions{
IssueID: prIssue.ID,
Type: issues_model.CommentTypeUndefined, // get all comments type
})
require.NoError(t, err)
lastComment := comments[len(comments)-1]
assert.NoError(t, issues_service.LoadCommentPushCommits(t.Context(), lastComment))
assert.True(t, lastComment.IsForcePush)
}
func testPullCommentRetarget(t *testing.T, u *url.URL, session *TestSession) {
testPRTitle := "Test PR for retarget comment"
// keep a non-conflict branch
testCreateBranch(t, session, "user2", "repo1", "branch/test-branch/retarget", "test-branch/retarget-no-conflict", http.StatusSeeOther)
// make a change on forked branch
testEditFile(t, session, "user1", "repo1", "test-branch/retarget", "README.md", "Hello, World (Edited)\n")
testPullCreate(t, session, "user1", "repo1", false, "test-branch/retarget", "test-branch/retarget", testPRTitle)
// create a conflict line on user2/repo1 README.md
testEditFile(t, session, "user2", "repo1", "test-branch/retarget", "README.md", "Hello, World (Edited Conflicted)\n")
// Now the pull request status should be conflicted
prIssue := testWaitForPullRequestStatus(t, &issues_model.Issue{Title: testPRTitle}, issues_model.PullRequestStatusConflict)
// do retarget
req := NewRequestWithValues(t, "POST", fmt.Sprintf("/user2/repo1/pull/%d/target_branch", prIssue.PullRequest.Index), map[string]string{
"_csrf": GetUserCSRFToken(t, session),
"target_branch": "test-branch/retarget-no-conflict",
})
session.MakeRequest(t, req, http.StatusOK)
testWaitForPullRequestStatus(t, &issues_model.Issue{Title: testPRTitle}, issues_model.PullRequestStatusMergeable)
}
func TestPullComment(t *testing.T) {
onGiteaRun(t, func(t *testing.T, u *url.URL) {
session := loginUser(t, "user1")
testCreateBranch(t, session, "user2", "repo1", "branch/master", "test-branch/rebase", http.StatusSeeOther)
testCreateBranch(t, session, "user2", "repo1", "branch/master", "test-branch/retarget", http.StatusSeeOther)
testRepoFork(t, session, "user2", "repo1", "user1", "repo1", "")
t.Run("RebaseComment", func(t *testing.T) { testPullCommentRebase(t, u, session) })
t.Run("RetargetComment", func(t *testing.T) { testPullCommentRetarget(t, u, session) })
})
}

View File

@ -260,14 +260,16 @@ func TestCreateAgitPullWithReadPermission(t *testing.T) {
u.Path = "user2/repo1.git"
u.User = url.UserPassword("user4", userPassword)
t.Run("Clone", doGitClone(dstPath, u))
doGitClone(dstPath, u)(t)
doGitCheckoutWriteFileCommit(localGitAddCommitOptions{
LocalRepoPath: dstPath,
CheckoutBranch: "master",
TreeFilePath: "new-file-for-agit.txt",
TreeFileContent: "temp content",
})(t)
t.Run("add commit", doGitAddSomeCommits(dstPath, "master"))
t.Run("do agit pull create", func(t *testing.T) {
err := gitcmd.NewCommand("push", "origin", "HEAD:refs/for/master", "-o").AddDynamicArguments("topic="+"test-topic").Run(t.Context(), &gitcmd.RunOpts{Dir: dstPath})
assert.NoError(t, err)
})
err := gitcmd.NewCommand("push", "origin", "HEAD:refs/for/master", "-o").AddDynamicArguments("topic="+"test-topic").Run(t.Context(), &gitcmd.RunOpts{Dir: dstPath})
assert.NoError(t, err)
})
}

View File

@ -1144,8 +1144,8 @@ $.api.settings = {
},
regExp : {
required : /\{\$*[A-z0-9]+\}/g,
optional : /\{\/\$*[A-z0-9]+\}/g,
required : /\{\$*[_A-Za-z0-9]+\}/g, // GITEA-PATCH: use "_A-Za-z" instead of "A-z" for variable name matching
optional : /\{\/\$*[_A-Za-z0-9]+\}/g, // GITEA-PATCH: use "_A-Za-z" instead of "A-z" for variable name matching
},
className: {

View File

@ -66,7 +66,7 @@ $.fn.dropdown = function(parameters) {
moduleNamespace = 'module-' + namespace,
$module = $(this),
$context = $(settings.context),
$context = (typeof settings.context === 'string') ? $(document).find(settings.context) : $(settings.context), // GITEA-PATCH: use "jQuery.find(selector)" instead of "jQuery(selector)"
$text = $module.find(selector.text),
$search = $module.find(selector.search),
$sizer = $module.find(selector.sizer),

View File

@ -64,7 +64,7 @@ $.fn.modal = function(parameters) {
moduleNamespace = 'module-' + namespace,
$module = $(this),
$context = $(settings.context),
$context = (typeof settings.context === 'string') ? $(document).find(settings.context) : $(settings.context), // GITEA-PATCH: use "jQuery.find(selector)" instead of "jQuery(selector)"
$close = $module.find(selector.close),
$allModals,

View File

@ -2,7 +2,7 @@
import {SvgIcon} from '../svg.ts';
import ActionRunStatus from './ActionRunStatus.vue';
import {defineComponent, type PropType} from 'vue';
import {createElementFromAttrs, toggleElem} from '../utils/dom.ts';
import {addDelegatedEventListener, createElementFromAttrs, toggleElem} from '../utils/dom.ts';
import {formatDatetime} from '../utils/time.ts';
import {renderAnsi} from '../render/ansi.ts';
import {POST, DELETE} from '../modules/fetch.ts';
@ -40,6 +40,12 @@ type Step = {
status: RunStatus,
}
type JobStepState = {
cursor: string|null,
expanded: boolean,
manuallyCollapsed: boolean, // whether the user manually collapsed the step, used to avoid auto-expanding it again
}
function parseLineCommand(line: LogLine): LogLineCommand | null {
for (const prefix of LogLinePrefixesGroup) {
if (line.message.startsWith(prefix)) {
@ -54,9 +60,10 @@ function parseLineCommand(line: LogLine): LogLineCommand | null {
return null;
}
function isLogElementInViewport(el: Element): boolean {
function isLogElementInViewport(el: Element, {extraViewPortHeight}={extraViewPortHeight: 0}): boolean {
const rect = el.getBoundingClientRect();
return rect.top >= 0 && rect.bottom <= window.innerHeight; // only check height but not width
// only check whether bottom is in viewport, because the log element can be a log group which is usually tall
return 0 <= rect.bottom && rect.bottom <= window.innerHeight + extraViewPortHeight;
}
type LocaleStorageOptions = {
@ -104,7 +111,7 @@ export default defineComponent({
// internal state
loadingAbortController: null as AbortController | null,
intervalID: null as IntervalId | null,
currentJobStepsStates: [] as Array<Record<string, any>>,
currentJobStepsStates: [] as Array<JobStepState>,
artifacts: [] as Array<Record<string, any>>,
menuVisible: false,
isFullScreen: false,
@ -181,6 +188,19 @@ export default defineComponent({
// load job data and then auto-reload periodically
// need to await first loadJob so this.currentJobStepsStates is initialized and can be used in hashChangeListener
await this.loadJob();
// auto-scroll to the bottom of the log group when it is opened
// "toggle" event doesn't bubble, so we need to use 'click' event delegation to handle it
addDelegatedEventListener(this.elStepsContainer(), 'click', 'summary.job-log-group-summary', (el, _) => {
if (!this.optionAlwaysAutoScroll) return;
const elJobLogGroup = el.closest('details.job-log-group') as HTMLDetailsElement;
setTimeout(() => {
if (elJobLogGroup.open && !isLogElementInViewport(elJobLogGroup)) {
elJobLogGroup.scrollIntoView({behavior: 'smooth', block: 'end'});
}
}, 0);
});
this.intervalID = setInterval(() => this.loadJob(), 1000);
document.body.addEventListener('click', this.closeDropdown);
this.hashChangeListener();
@ -252,6 +272,8 @@ export default defineComponent({
this.currentJobStepsStates[idx].expanded = !this.currentJobStepsStates[idx].expanded;
if (this.currentJobStepsStates[idx].expanded) {
this.loadJobForce(); // try to load the data immediately instead of waiting for next timer interval
} else if (this.currentJob.steps[idx].status === 'running') {
this.currentJobStepsStates[idx].manuallyCollapsed = true;
}
},
// cancel a run
@ -293,7 +315,8 @@ export default defineComponent({
const el = this.getJobStepLogsContainer(stepIndex);
// if the logs container is empty, then auto-scroll if the step is expanded
if (!el.lastChild) return this.currentJobStepsStates[stepIndex].expanded;
return isLogElementInViewport(el.lastChild as Element);
// use extraViewPortHeight to tolerate some extra "virtual view port" height (for example: the last line is partially visible)
return isLogElementInViewport(el.lastChild as Element, {extraViewPortHeight: 5});
},
appendLogs(stepIndex: number, startTime: number, logLines: LogLine[]) {
@ -343,7 +366,6 @@ export default defineComponent({
const abortController = new AbortController();
this.loadingAbortController = abortController;
try {
const isFirstLoad = !this.run.status;
const job = await this.fetchJobData(abortController);
if (this.loadingAbortController !== abortController) return;
@ -353,10 +375,15 @@ export default defineComponent({
// sync the currentJobStepsStates to store the job step states
for (let i = 0; i < this.currentJob.steps.length; i++) {
const expanded = isFirstLoad && this.optionAlwaysExpandRunning && this.currentJob.steps[i].status === 'running';
const autoExpand = this.optionAlwaysExpandRunning && this.currentJob.steps[i].status === 'running';
if (!this.currentJobStepsStates[i]) {
// initial states for job steps
this.currentJobStepsStates[i] = {cursor: null, expanded};
this.currentJobStepsStates[i] = {cursor: null, expanded: autoExpand, manuallyCollapsed: false};
} else {
// if the step is not manually collapsed by user, then auto-expand it if option is enabled
if (autoExpand && !this.currentJobStepsStates[i].manuallyCollapsed) {
this.currentJobStepsStates[i].expanded = true;
}
}
}
@ -380,7 +407,10 @@ export default defineComponent({
if (!autoScrollStepIndexes.get(stepIndex)) continue;
autoScrollJobStepElement = this.getJobStepLogsContainer(stepIndex);
}
autoScrollJobStepElement?.lastElementChild.scrollIntoView({behavior: 'smooth', block: 'nearest'});
const lastLogElem = autoScrollJobStepElement?.lastElementChild;
if (lastLogElem && !isLogElementInViewport(lastLogElem)) {
lastLogElem.scrollIntoView({behavior: 'smooth', block: 'end'});
}
// clear the interval timer if the job is done
if (this.run.done && this.intervalID) {
@ -408,9 +438,13 @@ export default defineComponent({
if (this.menuVisible) this.menuVisible = false;
},
elStepsContainer(): HTMLElement {
return this.$refs.stepsContainer as HTMLElement;
},
toggleTimeDisplay(type: 'seconds' | 'stamp') {
this.timeVisible[`log-time-${type}`] = !this.timeVisible[`log-time-${type}`];
for (const el of (this.$refs.steps as HTMLElement).querySelectorAll(`.log-time-${type}`)) {
for (const el of this.elStepsContainer().querySelectorAll(`.log-time-${type}`)) {
toggleElem(el, this.timeVisible[`log-time-${type}`]);
}
},
@ -419,6 +453,7 @@ export default defineComponent({
this.isFullScreen = !this.isFullScreen;
toggleFullScreen('.action-view-right', this.isFullScreen, '.action-view-body');
},
async hashChangeListener() {
const selectedLogStep = window.location.hash;
if (!selectedLogStep) return;
@ -431,7 +466,7 @@ export default defineComponent({
// so logline can be selected by querySelector
await this.loadJob();
}
const logLine = (this.$refs.steps as HTMLElement).querySelector(selectedLogStep);
const logLine = this.elStepsContainer().querySelector(selectedLogStep);
if (!logLine) return;
logLine.querySelector<HTMLAnchorElement>('.line-num').click();
},
@ -566,7 +601,7 @@ export default defineComponent({
</div>
</div>
</div>
<div class="job-step-container" ref="steps" v-if="currentJob.steps.length">
<div class="job-step-container" ref="stepsContainer" v-if="currentJob.steps.length">
<div class="job-step-section" v-for="(jobStep, i) in currentJob.steps" :key="i">
<div class="job-step-summary" @click.stop="isExpandable(jobStep.status) && toggleStepLogs(i)" :class="[currentJobStepsStates[i].expanded ? 'selected' : '', isExpandable(jobStep.status) && 'step-expandable']">
<!-- If the job is done and the job step log is loaded for the first time, show the loading icon

View File

@ -26,13 +26,13 @@ test('textareaSplitLines', () => {
test('markdownHandleIndention', () => {
const testInput = (input: string, expected?: string) => {
const inputPos = input.indexOf('|');
input = input.replace('|', '');
input = input.replaceAll('|', '');
const ret = markdownHandleIndention({value: input, selStart: inputPos, selEnd: inputPos});
if (expected === null) {
expect(ret).toEqual({handled: false});
} else {
const expectedPos = expected.indexOf('|');
expected = expected.replace('|', '');
expected = expected.replaceAll('|', '');
expect(ret).toEqual({
handled: true,
valueSelection: {value: expected, selStart: expectedPos, selEnd: expectedPos},

View File

@ -333,7 +333,7 @@ export function initRepoPullRequestReview() {
let ntr = tr.nextElementSibling;
if (!ntr?.classList.contains('add-comment')) {
ntr = createElementFromHTML(`
<tr class="add-comment" data-line-type="${lineType}">
<tr class="add-comment" data-line-type="${htmlEscape(lineType)}">
${isSplit ? `
<td class="add-comment-left" colspan="4"></td>
<td class="add-comment-right" colspan="4"></td>

View File

@ -14,4 +14,7 @@ export function linkLabelAndInput(label: Element, input: Element) {
}
}
export const fomanticQuery = $;
export function fomanticQuery(s: string | Element | NodeListOf<Element>): ReturnType<typeof $> {
// intentionally make it only work for query selector, it isn't used for creating HTML elements (for safety)
return typeof s === 'string' ? $(document).find(s) : $(s);
}

View File

@ -35,7 +35,12 @@ export function isDarkTheme(): boolean {
/** strip <tags> from a string */
export function stripTags(text: string): string {
return text.replace(/<[^>]*>?/g, '');
let prev = '';
while (prev !== text) {
prev = text;
text = text.replace(/<[^>]*>?/g, '');
}
return text;
}
export function parseIssueHref(href: string): IssuePathInfo {