0
0
mirror of https://github.com/go-gitea/gitea.git synced 2025-07-20 03:58:26 +02:00

Merge branch 'go-gitea:main' into main

This commit is contained in:
badhezi 2025-05-28 10:05:30 +03:00 committed by GitHub
commit bfa2d101c4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
73 changed files with 888 additions and 636 deletions

View File

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
services:
pgsql:
image: postgres:12
image: postgres:14
env:
POSTGRES_DB: test
POSTGRES_PASSWORD: postgres

View File

@ -24,7 +24,7 @@ import (
)
const (
hookBatchSize = 30
hookBatchSize = 500
)
var (

View File

@ -5,6 +5,7 @@ package actions
import (
"context"
"errors"
"fmt"
"strings"
"time"
@ -298,6 +299,23 @@ func DeleteRunner(ctx context.Context, id int64) error {
return err
}
// DeleteEphemeralRunner deletes a ephemeral runner by given ID.
func DeleteEphemeralRunner(ctx context.Context, id int64) error {
runner, err := GetRunnerByID(ctx, id)
if err != nil {
if errors.Is(err, util.ErrNotExist) {
return nil
}
return err
}
if !runner.Ephemeral {
return nil
}
_, err = db.DeleteByID[ActionRunner](ctx, id)
return err
}
// CreateRunner creates new runner.
func CreateRunner(ctx context.Context, t *ActionRunner) error {
if t.OwnerID != 0 && t.RepoID != 0 {

View File

@ -336,6 +336,11 @@ func UpdateTask(ctx context.Context, task *ActionTask, cols ...string) error {
sess.Cols(cols...)
}
_, err := sess.Update(task)
// Automatically delete the ephemeral runner if the task is done
if err == nil && task.Status.IsDone() && util.SliceContainsString(cols, "status") {
return DeleteEphemeralRunner(ctx, task.RunnerID)
}
return err
}

View File

@ -38,3 +38,14 @@
repo_id: 0
description: "This runner is going to be deleted"
agent_labels: '["runner_to_be_deleted","linux"]'
-
id: 34350
name: runner_to_be_deleted-org-ephemeral
uuid: 3FF231BD-FBB7-4E4B-9602-E6F28363EF20
token_hash: 3FF231BD-FBB7-4E4B-9602-E6F28363EF20
ephemeral: true
version: "1.0.0"
owner_id: 3
repo_id: 0
description: "This runner is going to be deleted"
agent_labels: '["runner_to_be_deleted","linux"]'

View File

@ -117,6 +117,26 @@
log_length: 707
log_size: 90179
log_expired: 0
-
id: 52
job_id: 196
attempt: 1
runner_id: 34350
status: 6 # running
started: 1683636528
stopped: 1683636626
repo_id: 4
owner_id: 1
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
is_fork_pull_request: 0
token_hash: f8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc4784222
token_salt: ffffffffff
token_last_eight: ffffffff
log_filename: artifact-test2/2f/47.log
log_in_storage: 1
log_length: 707
log_size: 90179
log_expired: 0
-
id: 53
job_id: 198

View File

@ -7,6 +7,7 @@
target_url: https://example.com/builds/
description: My awesome CI-service
context: ci/awesomeness
context_hash: c65f4d64a3b14a3eced0c9b36799e66e1bd5ced7
creator_id: 2
-
@ -18,6 +19,7 @@
target_url: https://example.com/converage/
description: My awesome Coverage service
context: cov/awesomeness
context_hash: 3929ac7bccd3fa1bf9b38ddedb77973b1b9a8cfe
creator_id: 2
-
@ -29,6 +31,7 @@
target_url: https://example.com/converage/
description: My awesome Coverage service
context: cov/awesomeness
context_hash: 3929ac7bccd3fa1bf9b38ddedb77973b1b9a8cfe
creator_id: 2
-
@ -40,6 +43,7 @@
target_url: https://example.com/builds/
description: My awesome CI-service
context: ci/awesomeness
context_hash: c65f4d64a3b14a3eced0c9b36799e66e1bd5ced7
creator_id: 2
-
@ -51,4 +55,5 @@
target_url: https://example.com/builds/
description: My awesome deploy service
context: deploy/awesomeness
context_hash: ae9547713a6665fc4261d0756904932085a41cf2
creator_id: 2

View File

@ -298,27 +298,37 @@ type CommitStatusIndex struct {
MaxIndex int64 `xorm:"index"`
}
func makeRepoCommitQuery(ctx context.Context, repoID int64, sha string) *xorm.Session {
return db.GetEngine(ctx).Table(&CommitStatus{}).
Where("repo_id = ?", repoID).And("sha = ?", sha)
}
// GetLatestCommitStatus returns all statuses with a unique context for a given commit.
func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, int64, error) {
getBase := func() *xorm.Session {
return db.GetEngine(ctx).Table(&CommitStatus{}).
Where("repo_id = ?", repoID).And("sha = ?", sha)
}
func GetLatestCommitStatus(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, error) {
indices := make([]int64, 0, 10)
sess := getBase().Select("max( `index` ) as `index`").
GroupBy("context_hash").OrderBy("max( `index` ) desc")
sess := makeRepoCommitQuery(ctx, repoID, sha).
Select("max( `index` ) as `index`").
GroupBy("context_hash").
OrderBy("max( `index` ) desc")
if !listOptions.IsListAll() {
sess = db.SetSessionPagination(sess, &listOptions)
}
count, err := sess.FindAndCount(&indices)
if err != nil {
return nil, count, err
if err := sess.Find(&indices); err != nil {
return nil, err
}
statuses := make([]*CommitStatus, 0, len(indices))
if len(indices) == 0 {
return statuses, count, nil
return statuses, nil
}
return statuses, count, getBase().And(builder.In("`index`", indices)).Find(&statuses)
err := makeRepoCommitQuery(ctx, repoID, sha).And(builder.In("`index`", indices)).Find(&statuses)
return statuses, err
}
func CountLatestCommitStatus(ctx context.Context, repoID int64, sha string) (int64, error) {
return makeRepoCommitQuery(ctx, repoID, sha).
Select("count(context_hash)").
GroupBy("context_hash").
Count()
}
// GetLatestCommitStatusForPairs returns all statuses with a unique context for a given list of repo-sha pairs

View File

@ -55,7 +55,7 @@ func GetLatestCommitStatusForRepoAndSHAs(ctx context.Context, repoSHAs []RepoSHA
}
func UpdateCommitStatusSummary(ctx context.Context, repoID int64, sha string) error {
commitStatuses, _, err := GetLatestCommitStatus(ctx, repoID, sha, db.ListOptionsAll)
commitStatuses, err := GetLatestCommitStatus(ctx, repoID, sha, db.ListOptionsAll)
if err != nil {
return err
}

View File

@ -26,7 +26,7 @@ func TestGetCommitStatuses(t *testing.T) {
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
sha1 := "1234123412341234123412341234123412341234"
sha1 := "1234123412341234123412341234123412341234" // the mocked commit ID in test fixtures
statuses, maxResults, err := db.FindAndCount[git_model.CommitStatus](db.DefaultContext, &git_model.CommitStatusOptions{
ListOptions: db.ListOptions{Page: 1, PageSize: 50},
@ -256,3 +256,26 @@ func TestCommitStatusesHideActionsURL(t *testing.T) {
assert.Empty(t, statuses[0].TargetURL)
assert.Equal(t, "https://mycicd.org/1", statuses[1].TargetURL)
}
func TestGetCountLatestCommitStatus(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
sha1 := "1234123412341234123412341234123412341234" // the mocked commit ID in test fixtures
commitStatuses, err := git_model.GetLatestCommitStatus(db.DefaultContext, repo1.ID, sha1, db.ListOptions{
Page: 1,
PageSize: 2,
})
assert.NoError(t, err)
assert.Len(t, commitStatuses, 2)
assert.Equal(t, structs.CommitStatusFailure, commitStatuses[0].State)
assert.Equal(t, "ci/awesomeness", commitStatuses[0].Context)
assert.Equal(t, structs.CommitStatusError, commitStatuses[1].State)
assert.Equal(t, "deploy/awesomeness", commitStatuses[1].Context)
count, err := git_model.CountLatestCommitStatus(db.DefaultContext, repo1.ID, sha1)
assert.NoError(t, err)
assert.EqualValues(t, 3, count)
}

View File

@ -88,6 +88,8 @@ func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) {
sess.Asc("issue.created_unix").Asc("issue.id")
case "recentupdate":
sess.Desc("issue.updated_unix").Desc("issue.created_unix").Desc("issue.id")
case "recentclose":
sess.Desc("issue.closed_unix").Desc("issue.created_unix").Desc("issue.id")
case "leastupdate":
sess.Asc("issue.updated_unix").Asc("issue.created_unix").Asc("issue.id")
case "mostcomment":

View File

@ -152,7 +152,8 @@ func PullRequests(ctx context.Context, baseRepoID int64, opts *PullRequestsOptio
applySorts(findSession, opts.SortType, 0)
findSession = db.SetSessionPagination(findSession, opts)
prs := make([]*PullRequest, 0, opts.PageSize)
return prs, maxResults, findSession.Find(&prs)
found := findSession.Find(&prs)
return prs, maxResults, found
}
// PullRequestList defines a list of pull requests

View File

@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPullRequest_LoadAttributes(t *testing.T) {
@ -76,6 +77,47 @@ func TestPullRequestsNewest(t *testing.T) {
}
}
func TestPullRequests_Closed_RecentSortType(t *testing.T) {
// Issue ID | Closed At. | Updated At
// 2 | 1707270001 | 1707270001
// 3 | 1707271000 | 1707279999
// 11 | 1707279999 | 1707275555
tests := []struct {
sortType string
expectedIssueIDOrder []int64
}{
{"recentupdate", []int64{3, 11, 2}},
{"recentclose", []int64{11, 3, 2}},
}
assert.NoError(t, unittest.PrepareTestDatabase())
_, err := db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707270001, updated_unix = 1707270001, is_closed = true WHERE id = 2")
require.NoError(t, err)
_, err = db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707271000, updated_unix = 1707279999, is_closed = true WHERE id = 3")
require.NoError(t, err)
_, err = db.Exec(db.DefaultContext, "UPDATE issue SET closed_unix = 1707279999, updated_unix = 1707275555, is_closed = true WHERE id = 11")
require.NoError(t, err)
for _, test := range tests {
t.Run(test.sortType, func(t *testing.T) {
prs, _, err := issues_model.PullRequests(db.DefaultContext, 1, &issues_model.PullRequestsOptions{
ListOptions: db.ListOptions{
Page: 1,
},
State: "closed",
SortType: test.sortType,
})
require.NoError(t, err)
if assert.Len(t, prs, len(test.expectedIssueIDOrder)) {
for i := range test.expectedIssueIDOrder {
assert.Equal(t, test.expectedIssueIDOrder[i], prs[i].IssueID)
}
}
})
}
}
func TestLoadRequestedReviewers(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())

View File

@ -161,6 +161,11 @@ func UpdateRelease(ctx context.Context, rel *Release) error {
return err
}
func UpdateReleaseNumCommits(ctx context.Context, rel *Release) error {
_, err := db.GetEngine(ctx).ID(rel.ID).Cols("num_commits").Update(rel)
return err
}
// AddReleaseAttachments adds a release attachments
func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs []string) (err error) {
// Check attachments
@ -418,8 +423,8 @@ func UpdateReleasesMigrationsByType(ctx context.Context, gitServiceType structs.
return err
}
// PushUpdateDeleteTagsContext updates a number of delete tags with context
func PushUpdateDeleteTagsContext(ctx context.Context, repo *Repository, tags []string) error {
// PushUpdateDeleteTags updates a number of delete tags with context
func PushUpdateDeleteTags(ctx context.Context, repo *Repository, tags []string) error {
if len(tags) == 0 {
return nil
}
@ -448,58 +453,6 @@ func PushUpdateDeleteTagsContext(ctx context.Context, repo *Repository, tags []s
return nil
}
// PushUpdateDeleteTag must be called for any push actions to delete tag
func PushUpdateDeleteTag(ctx context.Context, repo *Repository, tagName string) error {
rel, err := GetRelease(ctx, repo.ID, tagName)
if err != nil {
if IsErrReleaseNotExist(err) {
return nil
}
return fmt.Errorf("GetRelease: %w", err)
}
if rel.IsTag {
if _, err = db.DeleteByID[Release](ctx, rel.ID); err != nil {
return fmt.Errorf("Delete: %w", err)
}
} else {
rel.IsDraft = true
rel.NumCommits = 0
rel.Sha1 = ""
if _, err = db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel); err != nil {
return fmt.Errorf("Update: %w", err)
}
}
return nil
}
// SaveOrUpdateTag must be called for any push actions to add tag
func SaveOrUpdateTag(ctx context.Context, repo *Repository, newRel *Release) error {
rel, err := GetRelease(ctx, repo.ID, newRel.TagName)
if err != nil && !IsErrReleaseNotExist(err) {
return fmt.Errorf("GetRelease: %w", err)
}
if rel == nil {
rel = newRel
if _, err = db.GetEngine(ctx).Insert(rel); err != nil {
return fmt.Errorf("InsertOne: %w", err)
}
} else {
rel.Sha1 = newRel.Sha1
rel.CreatedUnix = newRel.CreatedUnix
rel.NumCommits = newRel.NumCommits
rel.IsDraft = false
if rel.IsTag && newRel.PublisherID > 0 {
rel.PublisherID = newRel.PublisherID
}
if _, err = db.GetEngine(ctx).ID(rel.ID).AllCols().Update(rel); err != nil {
return fmt.Errorf("Update: %w", err)
}
}
return nil
}
// RemapExternalUser ExternalUserRemappable interface
func (r *Release) RemapExternalUser(externalName string, externalID, userID int64) error {
r.OriginalAuthor = externalName

View File

@ -137,7 +137,7 @@ func (opts *SearchUserOptions) toSearchQueryBase(ctx context.Context) *xorm.Sess
// SearchUsers takes options i.e. keyword and part of user name to search,
// it returns results in given range and number of total results.
func SearchUsers(ctx context.Context, opts *SearchUserOptions) (users []*User, _ int64, _ error) {
func SearchUsers(ctx context.Context, opts SearchUserOptions) (users []*User, _ int64, _ error) {
sessCount := opts.toSearchQueryBase(ctx)
defer sessCount.Close()
count, err := sessCount.Count(new(User))
@ -152,7 +152,7 @@ func SearchUsers(ctx context.Context, opts *SearchUserOptions) (users []*User, _
sessQuery := opts.toSearchQueryBase(ctx).OrderBy(opts.OrderBy.String())
defer sessQuery.Close()
if opts.Page > 0 {
sessQuery = db.SetSessionPagination(sessQuery, opts)
sessQuery = db.SetSessionPagination(sessQuery, &opts)
}
// the sql may contain JOIN, so we must only select User related columns

View File

@ -88,7 +88,7 @@ func TestCanCreateOrganization(t *testing.T) {
func TestSearchUsers(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
testSuccess := func(opts *user_model.SearchUserOptions, expectedUserOrOrgIDs []int64) {
testSuccess := func(opts user_model.SearchUserOptions, expectedUserOrOrgIDs []int64) {
users, _, err := user_model.SearchUsers(db.DefaultContext, opts)
assert.NoError(t, err)
cassText := fmt.Sprintf("ids: %v, opts: %v", expectedUserOrOrgIDs, opts)
@ -100,61 +100,61 @@ func TestSearchUsers(t *testing.T) {
}
// test orgs
testOrgSuccess := func(opts *user_model.SearchUserOptions, expectedOrgIDs []int64) {
testOrgSuccess := func(opts user_model.SearchUserOptions, expectedOrgIDs []int64) {
opts.Type = user_model.UserTypeOrganization
testSuccess(opts, expectedOrgIDs)
}
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1, PageSize: 2}},
testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1, PageSize: 2}},
[]int64{3, 6})
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 2, PageSize: 2}},
testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 2, PageSize: 2}},
[]int64{7, 17})
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 3, PageSize: 2}},
testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 3, PageSize: 2}},
[]int64{19, 25})
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}},
testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 4, PageSize: 2}},
[]int64{26, 41})
testOrgSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 5, PageSize: 2}},
testOrgSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 5, PageSize: 2}},
[]int64{42})
testOrgSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 6, PageSize: 2}},
testOrgSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 6, PageSize: 2}},
[]int64{})
// test users
testUserSuccess := func(opts *user_model.SearchUserOptions, expectedUserIDs []int64) {
testUserSuccess := func(opts user_model.SearchUserOptions, expectedUserIDs []int64) {
opts.Type = user_model.UserTypeIndividual
testSuccess(opts, expectedUserIDs)
}
testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}},
testUserSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}},
[]int64{1, 2, 4, 5, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40})
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(false)},
testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(false)},
[]int64{9})
testUserSuccess(&user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
testUserSuccess(user_model.SearchUserOptions{OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
[]int64{1, 2, 4, 5, 8, 10, 11, 12, 13, 14, 15, 16, 18, 20, 21, 24, 27, 28, 29, 30, 32, 34, 37, 38, 39, 40})
testUserSuccess(&user_model.SearchUserOptions{Keyword: "user1", OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
testUserSuccess(user_model.SearchUserOptions{Keyword: "user1", OrderBy: "id ASC", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
[]int64{1, 10, 11, 12, 13, 14, 15, 16, 18})
// order by name asc default
testUserSuccess(&user_model.SearchUserOptions{Keyword: "user1", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
testUserSuccess(user_model.SearchUserOptions{Keyword: "user1", ListOptions: db.ListOptions{Page: 1}, IsActive: optional.Some(true)},
[]int64{1, 10, 11, 12, 13, 14, 15, 16, 18})
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsAdmin: optional.Some(true)},
testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsAdmin: optional.Some(true)},
[]int64{1})
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsRestricted: optional.Some(true)},
testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsRestricted: optional.Some(true)},
[]int64{29})
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsProhibitLogin: optional.Some(true)},
testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsProhibitLogin: optional.Some(true)},
[]int64{37})
testUserSuccess(&user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsTwoFactorEnabled: optional.Some(true)},
testUserSuccess(user_model.SearchUserOptions{ListOptions: db.ListOptions{Page: 1}, IsTwoFactorEnabled: optional.Some(true)},
[]int64{24})
}

View File

@ -132,18 +132,22 @@ func (r *BlameReader) Close() error {
}
// CreateBlameReader creates reader for given repository, commit and file
func CreateBlameReader(ctx context.Context, objectFormat ObjectFormat, repoPath string, commit *Commit, file string, bypassBlameIgnore bool) (*BlameReader, error) {
reader, stdout, err := os.Pipe()
if err != nil {
return nil, err
}
func CreateBlameReader(ctx context.Context, objectFormat ObjectFormat, repoPath string, commit *Commit, file string, bypassBlameIgnore bool) (rd *BlameReader, err error) {
var ignoreRevsFileName string
var ignoreRevsFileCleanup func()
defer func() {
if err != nil && ignoreRevsFileCleanup != nil {
ignoreRevsFileCleanup()
}
}()
cmd := NewCommandNoGlobals("blame", "--porcelain")
var ignoreRevsFileName string
var ignoreRevsFileCleanup func() // TODO: maybe it should check the returned err in a defer func to make sure the cleanup could always be executed correctly
if DefaultFeatures().CheckVersionAtLeast("2.23") && !bypassBlameIgnore {
ignoreRevsFileName, ignoreRevsFileCleanup = tryCreateBlameIgnoreRevsFile(commit)
ignoreRevsFileName, ignoreRevsFileCleanup, err = tryCreateBlameIgnoreRevsFile(commit)
if err != nil && !IsErrNotExist(err) {
return nil, err
}
if ignoreRevsFileName != "" {
// Possible improvement: use --ignore-revs-file /dev/stdin on unix
// There is no equivalent on Windows. May be implemented if Gitea uses an external git backend.
@ -154,6 +158,10 @@ func CreateBlameReader(ctx context.Context, objectFormat ObjectFormat, repoPath
cmd.AddDynamicArguments(commit.ID.String()).AddDashesAndList(file)
done := make(chan error, 1)
reader, stdout, err := os.Pipe()
if err != nil {
return nil, err
}
go func() {
stderr := bytes.Buffer{}
// TODO: it doesn't work for directories (the directories shouldn't be "blamed"), and the "err" should be returned by "Read" but not by "Close"
@ -182,33 +190,29 @@ func CreateBlameReader(ctx context.Context, objectFormat ObjectFormat, repoPath
}, nil
}
func tryCreateBlameIgnoreRevsFile(commit *Commit) (string, func()) {
func tryCreateBlameIgnoreRevsFile(commit *Commit) (string, func(), error) {
entry, err := commit.GetTreeEntryByPath(".git-blame-ignore-revs")
if err != nil {
log.Error("Unable to get .git-blame-ignore-revs file: GetTreeEntryByPath: %v", err)
return "", nil
return "", nil, err
}
r, err := entry.Blob().DataAsync()
if err != nil {
log.Error("Unable to get .git-blame-ignore-revs file data: DataAsync: %v", err)
return "", nil
return "", nil, err
}
defer r.Close()
f, cleanup, err := setting.AppDataTempDir("git-repo-content").CreateTempFileRandom("git-blame-ignore-revs")
if err != nil {
log.Error("Unable to get .git-blame-ignore-revs file data: CreateTempFileRandom: %v", err)
return "", nil
return "", nil, err
}
filename := f.Name()
_, err = io.Copy(f, r)
_ = f.Close()
if err != nil {
cleanup()
log.Error("Unable to get .git-blame-ignore-revs file data: Copy: %v", err)
return "", nil
return "", nil, err
}
return filename, cleanup
return filename, cleanup, nil
}

View File

@ -34,7 +34,7 @@ type Commit struct {
// CommitSignature represents a git commit signature part.
type CommitSignature struct {
Signature string
Payload string // TODO check if can be reconstruct from the rest of commit information to not have duplicate data
Payload string
}
// Message returns the commit message. Same as retrieving CommitMessage directly.

View File

@ -6,10 +6,44 @@ package git
import (
"bufio"
"bytes"
"fmt"
"io"
"strings"
)
const (
commitHeaderGpgsig = "gpgsig"
commitHeaderGpgsigSha256 = "gpgsig-sha256"
)
func assignCommitFields(gitRepo *Repository, commit *Commit, headerKey string, headerValue []byte) error {
if len(headerValue) > 0 && headerValue[len(headerValue)-1] == '\n' {
headerValue = headerValue[:len(headerValue)-1] // remove trailing newline
}
switch headerKey {
case "tree":
objID, err := NewIDFromString(string(headerValue))
if err != nil {
return fmt.Errorf("invalid tree ID %q: %w", string(headerValue), err)
}
commit.Tree = *NewTree(gitRepo, objID)
case "parent":
objID, err := NewIDFromString(string(headerValue))
if err != nil {
return fmt.Errorf("invalid parent ID %q: %w", string(headerValue), err)
}
commit.Parents = append(commit.Parents, objID)
case "author":
commit.Author.Decode(headerValue)
case "committer":
commit.Committer.Decode(headerValue)
case commitHeaderGpgsig, commitHeaderGpgsigSha256:
// if there are duplicate "gpgsig" and "gpgsig-sha256" headers, then the signature must have already been invalid
// so we don't need to handle duplicate headers here
commit.Signature = &CommitSignature{Signature: string(headerValue)}
}
return nil
}
// CommitFromReader will generate a Commit from a provided reader
// We need this to interpret commits from cat-file or cat-file --batch
//
@ -21,90 +55,46 @@ func CommitFromReader(gitRepo *Repository, objectID ObjectID, reader io.Reader)
Committer: &Signature{},
}
payloadSB := new(strings.Builder)
signatureSB := new(strings.Builder)
messageSB := new(strings.Builder)
message := false
pgpsig := false
bufReader, ok := reader.(*bufio.Reader)
if !ok {
bufReader = bufio.NewReader(reader)
}
readLoop:
bufReader := bufio.NewReader(reader)
inHeader := true
var payloadSB, messageSB bytes.Buffer
var headerKey string
var headerValue []byte
for {
line, err := bufReader.ReadBytes('\n')
if err != nil {
if err == io.EOF {
if message {
_, _ = messageSB.Write(line)
if err != nil && err != io.EOF {
return nil, fmt.Errorf("unable to read commit %q: %w", objectID.String(), err)
}
if len(line) == 0 {
break
}
if inHeader {
inHeader = !(len(line) == 1 && line[0] == '\n') // still in header if line is not just a newline
k, v, _ := bytes.Cut(line, []byte{' '})
if len(k) != 0 || !inHeader {
if headerKey != "" {
if err = assignCommitFields(gitRepo, commit, headerKey, headerValue); err != nil {
return nil, fmt.Errorf("unable to parse commit %q: %w", objectID.String(), err)
}
}
_, _ = payloadSB.Write(line)
break readLoop
headerKey = string(k) // it also resets the headerValue to empty string if not inHeader
headerValue = v
} else {
headerValue = append(headerValue, v...)
}
return nil, err
}
if pgpsig {
if len(line) > 0 && line[0] == ' ' {
_, _ = signatureSB.Write(line[1:])
continue
}
pgpsig = false
}
if !message {
// This is probably not correct but is copied from go-gits interpretation...
trimmed := bytes.TrimSpace(line)
if len(trimmed) == 0 {
message = true
if headerKey != commitHeaderGpgsig && headerKey != commitHeaderGpgsigSha256 {
_, _ = payloadSB.Write(line)
continue
}
split := bytes.SplitN(trimmed, []byte{' '}, 2)
var data []byte
if len(split) > 1 {
data = split[1]
}
switch string(split[0]) {
case "tree":
commit.Tree = *NewTree(gitRepo, MustIDFromString(string(data)))
_, _ = payloadSB.Write(line)
case "parent":
commit.Parents = append(commit.Parents, MustIDFromString(string(data)))
_, _ = payloadSB.Write(line)
case "author":
commit.Author = &Signature{}
commit.Author.Decode(data)
_, _ = payloadSB.Write(line)
case "committer":
commit.Committer = &Signature{}
commit.Committer.Decode(data)
_, _ = payloadSB.Write(line)
case "encoding":
_, _ = payloadSB.Write(line)
case "gpgsig":
fallthrough
case "gpgsig-sha256": // FIXME: no intertop, so only 1 exists at present.
_, _ = signatureSB.Write(data)
_ = signatureSB.WriteByte('\n')
pgpsig = true
}
} else {
_, _ = messageSB.Write(line)
_, _ = payloadSB.Write(line)
}
}
commit.CommitMessage = messageSB.String()
commit.Signature = &CommitSignature{
Signature: signatureSB.String(),
Payload: payloadSB.String(),
}
if len(commit.Signature.Signature) == 0 {
commit.Signature = nil
}
commit.CommitMessage = messageSB.String()
if commit.Signature != nil {
commit.Signature.Payload = payloadSB.String()
}
return commit, nil
}

View File

@ -60,8 +60,7 @@ func TestGetFullCommitIDErrorSha256(t *testing.T) {
}
func TestCommitFromReaderSha256(t *testing.T) {
commitString := `9433b2a62b964c17a4485ae180f45f595d3e69d31b786087775e28c6b6399df0 commit 1114
tree e7f9e96dd79c09b078cac8b303a7d3b9d65ff9b734e86060a4d20409fd379f9e
commitString := `tree e7f9e96dd79c09b078cac8b303a7d3b9d65ff9b734e86060a4d20409fd379f9e
parent 26e9ccc29fad747e9c5d9f4c9ddeb7eff61cc45ef6a8dc258cbeb181afc055e8
author Adam Majer <amajer@suse.de> 1698676906 +0100
committer Adam Majer <amajer@suse.de> 1698676906 +0100
@ -112,8 +111,7 @@ VAEUo6ecdDxSpyt2naeg9pKus/BRi7P6g4B1hkk/zZstUX/QP4IQuAJbXjkvsC+X
HKRr3NlRM/DygzTyj0gN74uoa0goCIbyAQhiT42nm0cuhM7uN/W0ayrlZjGF1cbR
8NCJUL2Nwj0ywKIavC99Ipkb8AsFwpVT6U6effs6
=xybZ
-----END PGP SIGNATURE-----
`, commitFromReader.Signature.Signature)
-----END PGP SIGNATURE-----`, commitFromReader.Signature.Signature)
assert.Equal(t, `tree e7f9e96dd79c09b078cac8b303a7d3b9d65ff9b734e86060a4d20409fd379f9e
parent 26e9ccc29fad747e9c5d9f4c9ddeb7eff61cc45ef6a8dc258cbeb181afc055e8
author Adam Majer <amajer@suse.de> 1698676906 +0100

View File

@ -59,8 +59,7 @@ func TestGetFullCommitIDError(t *testing.T) {
}
func TestCommitFromReader(t *testing.T) {
commitString := `feaf4ba6bc635fec442f46ddd4512416ec43c2c2 commit 1074
tree f1a6cb52b2d16773290cefe49ad0684b50a4f930
commitString := `tree f1a6cb52b2d16773290cefe49ad0684b50a4f930
parent 37991dec2c8e592043f47155ce4808d4580f9123
author silverwind <me@silverwind.io> 1563741793 +0200
committer silverwind <me@silverwind.io> 1563741793 +0200
@ -108,8 +107,7 @@ sD53z/f0J+We4VZjY+pidvA9BGZPFVdR3wd3xGs8/oH6UWaLJAMGkLG6dDb3qDLm
mfeFhT57UbE4qukTDIQ0Y0WM40UYRTakRaDY7ubhXgLgx09Cnp9XTVMsHgT6j9/i
1pxsB104XLWjQHTjr1JtiaBQEwFh9r2OKTcpvaLcbNtYpo7CzOs=
=FRsO
-----END PGP SIGNATURE-----
`, commitFromReader.Signature.Signature)
-----END PGP SIGNATURE-----`, commitFromReader.Signature.Signature)
assert.Equal(t, `tree f1a6cb52b2d16773290cefe49ad0684b50a4f930
parent 37991dec2c8e592043f47155ce4808d4580f9123
author silverwind <me@silverwind.io> 1563741793 +0200
@ -126,8 +124,7 @@ empty commit`, commitFromReader.Signature.Payload)
}
func TestCommitWithEncodingFromReader(t *testing.T) {
commitString := `feaf4ba6bc635fec442f46ddd4512416ec43c2c2 commit 1074
tree ca3fad42080dd1a6d291b75acdfc46e5b9b307e5
commitString := `tree ca3fad42080dd1a6d291b75acdfc46e5b9b307e5
parent 47b24e7ab977ed31c5a39989d570847d6d0052af
author KN4CK3R <admin@oldschoolhack.me> 1711702962 +0100
committer KN4CK3R <admin@oldschoolhack.me> 1711702962 +0100
@ -172,8 +169,7 @@ SONRzusmu5n3DgV956REL7x62h7JuqmBz/12HZkr0z0zgXkcZ04q08pSJATX5N1F
yN+tWxTsWg+zhDk96d5Esdo9JMjcFvPv0eioo30GAERaz1hoD7zCMT4jgUFTQwgz
jw4YcO5u
=r3UU
-----END PGP SIGNATURE-----
`, commitFromReader.Signature.Signature)
-----END PGP SIGNATURE-----`, commitFromReader.Signature.Signature)
assert.Equal(t, `tree ca3fad42080dd1a6d291b75acdfc46e5b9b307e5
parent 47b24e7ab977ed31c5a39989d570847d6d0052af
author KN4CK3R <admin@oldschoolhack.me> 1711702962 +0100

View File

@ -9,13 +9,10 @@ import (
"fmt"
"io"
"strings"
"time"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/lfs"
@ -59,118 +56,6 @@ func SyncRepoTags(ctx context.Context, repoID int64) error {
return SyncReleasesWithTags(ctx, repo, gitRepo)
}
// SyncReleasesWithTags synchronizes release table with repository tags
func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository) error {
log.Debug("SyncReleasesWithTags: in Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name)
// optimized procedure for pull-mirrors which saves a lot of time (in
// particular for repos with many tags).
if repo.IsMirror {
return pullMirrorReleaseSync(ctx, repo, gitRepo)
}
existingRelTags := make(container.Set[string])
opts := repo_model.FindReleasesOptions{
IncludeDrafts: true,
IncludeTags: true,
ListOptions: db.ListOptions{PageSize: 50},
RepoID: repo.ID,
}
for page := 1; ; page++ {
opts.Page = page
rels, err := db.Find[repo_model.Release](gitRepo.Ctx, opts)
if err != nil {
return fmt.Errorf("unable to GetReleasesByRepoID in Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
}
if len(rels) == 0 {
break
}
for _, rel := range rels {
if rel.IsDraft {
continue
}
commitID, err := gitRepo.GetTagCommitID(rel.TagName)
if err != nil && !git.IsErrNotExist(err) {
return fmt.Errorf("unable to GetTagCommitID for %q in Repo[%d:%s/%s]: %w", rel.TagName, repo.ID, repo.OwnerName, repo.Name, err)
}
if git.IsErrNotExist(err) || commitID != rel.Sha1 {
if err := repo_model.PushUpdateDeleteTag(ctx, repo, rel.TagName); err != nil {
return fmt.Errorf("unable to PushUpdateDeleteTag: %q in Repo[%d:%s/%s]: %w", rel.TagName, repo.ID, repo.OwnerName, repo.Name, err)
}
} else {
existingRelTags.Add(strings.ToLower(rel.TagName))
}
}
}
_, err := gitRepo.WalkReferences(git.ObjectTag, 0, 0, func(sha1, refname string) error {
tagName := strings.TrimPrefix(refname, git.TagPrefix)
if existingRelTags.Contains(strings.ToLower(tagName)) {
return nil
}
if err := PushUpdateAddTag(ctx, repo, gitRepo, tagName, sha1, refname); err != nil {
// sometimes, some tags will be sync failed. i.e. https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tag/?h=v2.6.11
// this is a tree object, not a tag object which created before git
log.Error("unable to PushUpdateAddTag: %q to Repo[%d:%s/%s]: %v", tagName, repo.ID, repo.OwnerName, repo.Name, err)
}
return nil
})
return err
}
// PushUpdateAddTag must be called for any push actions to add tag
func PushUpdateAddTag(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, tagName, sha1, refname string) error {
tag, err := gitRepo.GetTagWithID(sha1, tagName)
if err != nil {
return fmt.Errorf("unable to GetTag: %w", err)
}
commit, err := gitRepo.GetTagCommit(tag.Name)
if err != nil {
return fmt.Errorf("unable to get tag Commit: %w", err)
}
sig := tag.Tagger
if sig == nil {
sig = commit.Author
}
if sig == nil {
sig = commit.Committer
}
var author *user_model.User
createdAt := time.Unix(1, 0)
if sig != nil {
author, err = user_model.GetUserByEmail(ctx, sig.Email)
if err != nil && !user_model.IsErrUserNotExist(err) {
return fmt.Errorf("unable to GetUserByEmail for %q: %w", sig.Email, err)
}
createdAt = sig.When
}
commitsCount, err := commit.CommitsCount()
if err != nil {
return fmt.Errorf("unable to get CommitsCount: %w", err)
}
rel := repo_model.Release{
RepoID: repo.ID,
TagName: tagName,
LowerTagName: strings.ToLower(tagName),
Sha1: commit.ID.String(),
NumCommits: commitsCount,
CreatedUnix: timeutil.TimeStamp(createdAt.Unix()),
IsTag: true,
}
if author != nil {
rel.PublisherID = author.ID
}
return repo_model.SaveOrUpdateTag(ctx, repo, &rel)
}
// StoreMissingLfsObjectsInRepository downloads missing LFS objects
func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, lfsClient lfs.Client) error {
contentStore := lfs.NewContentStore()
@ -286,18 +171,19 @@ func (shortRelease) TableName() string {
return "release"
}
// pullMirrorReleaseSync is a pull-mirror specific tag<->release table
// SyncReleasesWithTags is a tag<->release table
// synchronization which overwrites all Releases from the repository tags. This
// can be relied on since a pull-mirror is always identical to its
// upstream. Hence, after each sync we want the pull-mirror release set to be
// upstream. Hence, after each sync we want the release set to be
// identical to the upstream tag set. This is much more efficient for
// repositories like https://github.com/vim/vim (with over 13000 tags).
func pullMirrorReleaseSync(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository) error {
log.Trace("pullMirrorReleaseSync: rebuilding releases for pull-mirror Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name)
tags, numTags, err := gitRepo.GetTagInfos(0, 0)
func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository) error {
log.Debug("SyncReleasesWithTags: in Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name)
tags, _, err := gitRepo.GetTagInfos(0, 0)
if err != nil {
return fmt.Errorf("unable to GetTagInfos in pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
}
var added, deleted, updated int
err = db.WithTx(ctx, func(ctx context.Context) error {
dbReleases, err := db.Find[shortRelease](ctx, repo_model.FindReleasesOptions{
RepoID: repo.ID,
@ -318,9 +204,7 @@ func pullMirrorReleaseSync(ctx context.Context, repo *repo_model.Repository, git
TagName: tag.Name,
LowerTagName: strings.ToLower(tag.Name),
Sha1: tag.Object.String(),
// NOTE: ignored, since NumCommits are unused
// for pull-mirrors (only relevant when
// displaying releases, IsTag: false)
// NOTE: ignored, The NumCommits value is calculated and cached on demand when the UI requires it.
NumCommits: -1,
CreatedUnix: timeutil.TimeStamp(tag.Tagger.When.Unix()),
IsTag: true,
@ -349,13 +233,14 @@ func pullMirrorReleaseSync(ctx context.Context, repo *repo_model.Repository, git
return fmt.Errorf("unable to update tag %s for pull-mirror Repo[%d:%s/%s]: %w", tag.Name, repo.ID, repo.OwnerName, repo.Name, err)
}
}
added, deleted, updated = len(deletes), len(updates), len(inserts)
return nil
})
if err != nil {
return fmt.Errorf("unable to rebuild release table for pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
}
log.Trace("pullMirrorReleaseSync: done rebuilding %d releases", numTags)
log.Trace("SyncReleasesWithTags: %d tags added, %d tags deleted, %d tags updated", added, deleted, updated)
return nil
}

13
modules/util/map.go Normal file
View File

@ -0,0 +1,13 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package util
func GetMapValueOrDefault[T any](m map[string]any, key string, defaultValue T) T {
if value, ok := m[key]; ok {
if v, ok := value.(T); ok {
return v
}
}
return defaultValue
}

26
modules/util/map_test.go Normal file
View File

@ -0,0 +1,26 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package util
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestGetMapValueOrDefault(t *testing.T) {
testMap := map[string]any{
"key1": "value1",
"key2": 42,
"key3": nil,
}
assert.Equal(t, "value1", GetMapValueOrDefault(testMap, "key1", "default"))
assert.Equal(t, 42, GetMapValueOrDefault(testMap, "key2", 0))
assert.Equal(t, "default", GetMapValueOrDefault(testMap, "key4", "default"))
assert.Equal(t, 100, GetMapValueOrDefault(testMap, "key5", 100))
assert.Equal(t, "default", GetMapValueOrDefault(testMap, "key3", "default"))
}

View File

@ -1228,6 +1228,7 @@ migrate.migrating_issues = Migrating Issues
migrate.migrating_pulls = Migrating Pull Requests
migrate.cancel_migrating_title = Cancel Migration
migrate.cancel_migrating_confirm = Do you want to cancel this migration?
migrating_status = Migrating status
mirror_from = mirror of
forked_from = forked from

View File

@ -1228,6 +1228,7 @@ migrate.migrating_issues=Saincheisteanna Imirce
migrate.migrating_pulls=Iarratais Tarraingthe á n-Imirce
migrate.cancel_migrating_title=Cealaigh Imirce
migrate.cancel_migrating_confirm=Ar mhaith leat an imirce seo a chealú?
migrating_status=Stádas imirce
mirror_from=scáthán de
forked_from=forcailte ó

View File

@ -1190,7 +1190,7 @@ issues.context.edit=Modifica
issues.context.delete=Elimina
issues.reopen_issue=Riapri
issues.create_comment=Commento
issues.closed_at=`chiuso questo probleam <a id="%[1]s" href="#%[1]s">%[2]s</a>`
issues.closed_at=`ha chiuso questo problema <a id="%[1]s" href="#%[1]s">%[2]s</a>`
issues.reopened_at=`riaperto questo problema <a id="%[1]s" href="#%[1]s">%[2]s</a>`
issues.commit_ref_at=`ha fatto riferimento a questa issue dal commit <a id="%[1]s" href="#%[1]s">%[2]s</a>`
issues.ref_issue_from=`<a href="%[3]s">ha fatto riferimento a questo problema %[4]s</a> <a id="%[1]s" href="#%[1]s">%[2]s</a>`

View File

@ -3809,6 +3809,9 @@ runs.no_workflows.documentation=Gitea Actions の詳細については、<a targ
runs.no_runs=ワークフローはまだ実行されていません。
runs.empty_commit_message=(空のコミットメッセージ)
runs.expire_log_message=ログは古すぎるため消去されています。
runs.delete=ワークフローの実行を削除
runs.delete.description=このワークフローを完全に削除してもよろしいですか?この操作は元に戻せません。
runs.not_done=このワークフローの実行は完了していません。
workflow.disable=ワークフローを無効にする
workflow.disable_success=ワークフロー '%s' が無効になりました。

View File

@ -1523,7 +1523,7 @@ issues.remove_labels=removeu os rótulos %s %s
issues.add_remove_labels=adicionou o(s) rótulo(s) %s e removeu %s %s
issues.add_milestone_at=`adicionou esta questão à etapa <b>%s</b> %s`
issues.add_project_at=`adicionou esta questão ao planeamento <b>%s</b> %s`
issues.move_to_column_of_project=`isto foi movido para %s dentro de %s em %s`
issues.move_to_column_of_project=`moveu isto para %s em %s %s`
issues.change_milestone_at=`modificou a etapa de <b>%s</b> para <b>%s</b> %s`
issues.change_project_at=`modificou o planeamento de <b>%s</b> para <b>%s</b> %s`
issues.remove_milestone_at=`removeu esta questão da etapa <b>%s</b> %s`

View File

@ -113,9 +113,11 @@ copy_type_unsupported=Bu dosya türü kopyalanamaz
write=Yaz
preview=Önizleme
loading=Yükleniyor…
files=Dosyalar
error=Hata
error404=Ulaşmaya çalıştığınız sayfa <strong>mevcut değil</strong> veya <strong>görüntüleme yetkiniz yok</strong>.
error503=Sunucu isteğinizi gerçekleştiremedi. Lütfen daha sonra tekrar deneyin.
go_back=Geri Git
invalid_data=Geçersiz veri: %v
@ -128,6 +130,7 @@ pin=Sabitle
unpin=Sabitlemeyi kaldır
artifacts=Yapılar
expired=Süresi doldu
confirm_delete_artifact=%s yapısını silmek istediğinizden emin misiniz?
archived=Arşivlenmiş
@ -169,6 +172,10 @@ search=Ara...
type_tooltip=Arama türü
fuzzy=Bulanık
fuzzy_tooltip=Arama terimine benzeyen sonuçları da içer
words=Kelimeler
words_tooltip=Sadece arama terimi kelimeleriyle eşleşen sonuçları içer
regexp=Regexp
regexp_tooltip=Sadece regexp arama terimiyle tamamen eşleşen sonuçları içer
exact=Tam
exact_tooltip=Sadece arama terimiyle tamamen eşleşen sonuçları içer
repo_kind=Depoları ara...
@ -235,13 +242,17 @@ network_error=Ağ hatası
[startpage]
app_desc=Zahmetsiz, kendi sunucunuzda barındırabileceğiniz Git servisi
install=Kurulumu kolay
install_desc=Platformunuz için <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.com/installation/install-from-binary">ikili dosyayı çalıştırın</a>, <a target="_blank" rel="noopener noreferrer" href="https://github.com/go-gitea/gitea/tree/master/docker">Docker</a> ile yükleyin veya <a target="_blank" rel="noopener noreferrer" href="https://docs.gitea.com/installation/install-from-package">paket</a> olarak edinin.
platform=Farklı platformlarda çalışablir
platform_desc=Gitea <a target="_blank" rel="noopener noreferrer" href="%s">Go</a> ile derleme yapılabilecek her yerde çalışmaktadır: Windows, macOS, Linux, ARM, vb. Hangisini seviyorsanız onu seçin!
lightweight=Hafif
lightweight_desc=Gitea'nın minimal gereksinimleri çok düşüktür ve ucuz bir Raspberry Pi üzerinde çalışabilmektedir. Makine enerjinizden tasarruf edin!
license=ık Kaynak
license_desc=Gidin ve <a target="_blank" rel="noopener noreferrer" href="https://code.gitea.io/gitea">code.gitea.io/gitea</a>'yı edinin! Bu projeyi daha da iyi yapmak için <a target="_blank" rel="noopener noreferrer" href="https://github.com/go-gitea/gitea">katkıda bulunarak</a> bize katılın. Katkıda bulunmaktan çekinmeyin!
[install]
install=Kurulum
installing_desc=Şimdi kuruluyor, lütfen bekleyin...
title=Başlangıç Yapılandırması
docker_helper=Eğer Gitea'yı Docker içerisinde çalıştırıyorsanız, lütfen herhangi bir değişiklik yapmadan önce <a target="_blank" rel="noopener noreferrer" href="%s">belgeleri</a> okuyun.
require_db_desc=Gitea MySQL, PostgreSQL, MSSQL, SQLite3 veya TiDB (MySQL protokolü) gerektirir.
@ -352,6 +363,7 @@ enable_update_checker=Güncelleme Denetleyicisini Etkinleştir
enable_update_checker_helper=Düzenli olarak gitea.io'ya bağlanarak yeni yayınlanan sürümleri denetler.
env_config_keys=Ortam Yapılandırma
env_config_keys_prompt=Aşağıdaki ortam değişkenleri de yapılandırma dosyanıza eklenecektir:
config_write_file_prompt=Bu yapılandırma seçenekleri şuraya yazılacak: %s
[home]
nav_menu=Gezinti Menüsü
@ -380,6 +392,12 @@ show_only_public=Yalnızca açık olanlar gösteriliyor
issues.in_your_repos=Depolarınızda
guide_title=Etkinlik yok
guide_desc=Herhangi bir depo veya kullanıcı takip etmiyorsunuz, bu yüzden görüntülenecek bir içerik yok. Aşağıdaki bağlantıları kullanarak ilgi çekici depo ve kullanıcıları keşfedebilirsiniz.
explore_repos=Depoları keşfet
explore_users=Kullanıcıları keşfet
empty_org=Henüz bir organizasyon yok.
empty_repo=Henüz bir depo yok.
[explore]
repos=Depolar
@ -433,6 +451,7 @@ use_scratch_code=Bir çizgi kodu kullanınız
twofa_scratch_used=Geçici kodunuzu kullandınız. İki aşamalı ayarlar sayfasına yönlendirildiniz, burada aygıt kaydınızı kaldırabilir veya yeni bir geçici kod oluşturabilirsiniz.
twofa_passcode_incorrect=Şifreniz yanlış. Aygıtınızı yanlış yerleştirdiyseniz, oturum açmak için çizgi kodunuzu kullanın.
twofa_scratch_token_incorrect=Çizgi kodunuz doğru değildir.
twofa_required=Depolara erişmek için iki aşama doğrulama kullanmanız veya tekrar oturum açmayı denemeniz gereklidir.
login_userpass=Oturum Aç
login_openid=ık Kimlik
oauth_signup_tab=Yeni Hesap Oluştur
@ -441,6 +460,7 @@ oauth_signup_submit=Hesabı Tamamla
oauth_signin_tab=Mevcut Hesaba Bağla
oauth_signin_title=Bağlantılı Hesabı Yetkilendirmek için Giriş Yapın
oauth_signin_submit=Hesabı Bağla
oauth.signin.error.general=Yetkilendirme isteğini işlerken bir hata oluştu: %s. Eğer hata devam ederse lütfen site yöneticisiyle bağlantıya geçin.
oauth.signin.error.access_denied=Yetkilendirme isteği reddedildi.
oauth.signin.error.temporarily_unavailable=Yetkilendirme sunucusu geçici olarak erişilemez olduğu için yetkilendirme başarısız oldu. Lütfen daha sonra tekrar deneyin.
oauth_callback_unable_auto_reg=Otomatik kayıt etkin ancak OAuth2 Sağlayıcı %[1] eksik sahalar döndürdü: %[2]s, otomatik olarak hesap oluşturulamıyor, lütfen bir hesap oluşturun veya bağlantı verin, veya site yöneticisiyle iletişim kurun.
@ -457,10 +477,12 @@ authorize_application=Uygulamayı Yetkilendir
authorize_redirect_notice=Bu uygulamayı yetkilendirirseniz %s adresine yönlendirileceksiniz.
authorize_application_created_by=Bu uygulama %s tarafından oluşturuldu.
authorize_application_description=Erişime izin verirseniz, özel depolar ve organizasyonlar da dahil olmak üzere tüm hesap bilgilerinize erişebilir ve yazabilir.
authorize_application_with_scopes=Kapsamlar: %s
authorize_title=Hesabınıza erişmesi için "%s" yetkilendirilsin mi?
authorization_failed=Yetkilendirme başarısız oldu
authorization_failed_desc=Geçersiz bir istek tespit ettiğimiz için yetkilendirme başarısız oldu. Lütfen izin vermeye çalıştığınız uygulamanın sağlayıcısı ile iletişim kurun.
sspi_auth_failed=SSPI kimlik doğrulaması başarısız oldu
password_pwned=Seçtiğiniz parola, daha önce herkese açık veri ihlallerinde açığa çıkan bir <a target="_blank" rel="noopener noreferrer" href="%s">çalınan parola listesindedir</a>. Lütfen farklı bir parola ile tekrar deneyin ve başka yerlerde de bu parolayı değiştirmeyi düşünün.
password_pwned_err=HaveIBeenPwned'e yapılan istek tamamlanamadı
last_admin=Son yöneticiyi silemezsiniz. En azından bir yönetici olmalıdır.
signin_passkey=Bir parola anahtarı ile oturum aç
@ -583,6 +605,8 @@ lang_select_error=Listeden bir dil seçin.
username_been_taken=Bu kullanıcı adı daha önce alınmış.
username_change_not_local_user=Yerel olmayan kullanıcılar kendi kullanıcı adlarını değiştiremezler.
change_username_disabled=Kullanıcı adı değişikliği devre dışıdır.
change_full_name_disabled=Tam ad değişikliği devre dışıdır.
username_has_not_been_changed=Kullanıcı adı değişmedi
repo_name_been_taken=Depo adı zaten kullanılıyor.
repository_force_private=Gizliyi Zorla devrede: gizli depolar herkese açık yapılamaz.
@ -632,6 +656,7 @@ org_still_own_repo=Bu organizasyon hala bir veya daha fazla depoya sahip, önce
org_still_own_packages=Bu organizasyon hala bir veya daha fazla pakete sahip, önce onları silin.
target_branch_not_exist=Hedef dal mevcut değil.
target_ref_not_exist=Hedef referans mevcut değil %s
admin_cannot_delete_self=Yöneticiyken kendinizi silemezsiniz. Lütfen önce yönetici haklarınızı kaldırın.
@ -698,14 +723,18 @@ applications=Uygulamalar
orgs=Organizasyonları Yönet
repos=Depolar
delete=Hesabı Sil
twofa=İki Aşamalı Kimlik Doğrulama (TOTP)
account_link=Bağlı Hesaplar
organization=Organizasyonlar
uid=UID
webauthn=İki-Aşamalı Kimlik Doğrulama (Güvenlik Anahtarları)
public_profile=Herkese Açık Profil
biography_placeholder=Bize kendiniz hakkında birşeyler söyleyin! (Markdown kullanabilirsiniz)
location_placeholder=Yaklaşık konumunuzu başkalarıyla paylaşın
profile_desc=Profilinizin başkalarına nasıl gösterildiğini yönetin. Ana e-posta adresiniz bildirimler, parola kurtarma ve web tabanlı Git işlemleri için kullanılacaktır.
password_username_disabled=Yerel olmayan kullanıcılara kullanıcı adlarını değiştirme izni verilmemiştir. Daha fazla bilgi edinmek için lütfen site yöneticisi ile iletişime geçiniz.
password_full_name_disabled=Tam adınızı değiştirme izniniz yoktur. Daha fazla bilgi edinmek için lütfen site yöneticisi ile iletişime geçiniz.
full_name=Ad Soyad
website=Web Sitesi
location=Konum
@ -755,6 +784,7 @@ uploaded_avatar_not_a_image=Yüklenen dosya bir resim dosyası değil.
uploaded_avatar_is_too_big=Yüklenen dosyanın boyutu (%d KiB), azami boyutu (%d KiB) aşıyor.
update_avatar_success=Profil resminiz değiştirildi.
update_user_avatar_success=Kullanıcının avatarı güncellendi.
cropper_prompt=Kaydetmeden önce resmi düzenleyebilirsiniz. Düzenlenen resim PNG biçiminde kaydedilecektir.
change_password=Parolayı Güncelle
old_password=Mevcut Parola
@ -797,6 +827,7 @@ add_email_success=Yeni e-posta adresi eklendi.
email_preference_set_success=E-posta tercihi başarıyla ayarlandı.
add_openid_success=Yeni OpenID adresi eklendi.
keep_email_private=E-posta Adresini Gizle
keep_email_private_popup=Bu, e-posta adresinizi profilde, değişiklik isteği yaptığınızda veya web arayüzünde dosya düzenlediğinizde gizleyecektir. İtilen işlemeler değişmeyecektir.
openid_desc=OpenID, kimlik doğrulama işlemini harici bir sağlayıcıya devretmenize olanak sağlar.
manage_ssh_keys=SSH Anahtarlarını Yönet
@ -898,6 +929,9 @@ permission_not_set=Ayarlanmadı
permission_no_access=Erişim Yok
permission_read=Okunmuş
permission_write=Okuma ve Yazma
permission_anonymous_read=Anonim Okuma
permission_everyone_read=Herkes Okuyabilir
permission_everyone_write=Herkes Yazabilir
access_token_desc=Seçili token izinleri, yetkilendirmeyi ilgili <a %s>API</a> yollarıyla sınırlandıracaktır. Daha fazla bilgi için <a %s>belgeleri</a> okuyun.
at_least_one_permission=Bir token oluşturmak için en azından bir izin seçmelisiniz
permissions_list=İzinler:
@ -925,6 +959,7 @@ oauth2_client_secret_hint=Bu sayfadan ayrıldıktan veya yeniledikten sonra gizl
oauth2_application_edit=Düzenle
oauth2_application_create_description=OAuth2 uygulamaları, üçüncü taraf uygulamanıza bu durumda kullanıcı hesaplarına erişim sağlar.
oauth2_application_remove_description=Bir OAuth2 uygulamasının kaldırılması, bu sunucudaki yetkili kullanıcı hesaplarına erişmesini önler. Devam edilsin mi?
oauth2_application_locked=Gitea kimi OAuth2 uygulamalarının başlangıçta ön kaydını, yapılandırmada etkinleştirilmişse yapabilir. Beklenmeyen davranışı önlemek için bunlar ne düzenlenmeli ne de kaldırılmalı. Daha fazla bilgi için OAuth2 belgesine bakın.
authorized_oauth2_applications=Yetkili OAuth2 Uygulamaları
authorized_oauth2_applications_description=Kişisel Gitea hesabınıza bu üçüncü parti uygulamalara erişim izni verdiniz. Lütfen artık ihtiyaç duyulmayan uygulamalara erişimi iptal edin.
@ -933,13 +968,17 @@ revoke_oauth2_grant=Erişimi İptal Et
revoke_oauth2_grant_description=Bu üçüncü taraf uygulamasına erişimin iptal edilmesi bu uygulamanın verilerinize erişmesini önleyecektir. Emin misiniz?
revoke_oauth2_grant_success=Erişim başarıyla kaldırıldı.
twofa_desc=İki aşamalı kimlik doğrulama, hesabınızın güvenliğini artırır.
twofa_recovery_tip=Aygıtınızı kaybetmeniz durumunda, hesabınıza tekrar erişmek için tek kullanımlık kurtarma anahtarını kullanabileceksiniz.
twofa_is_enrolled=Hesabınız şu anda iki faktörlü kimlik doğrulaması içinde <strong>kaydedilmiş</strong>.
twofa_not_enrolled=Hesabınız şu anda iki faktörlü kimlik doğrulaması içinde kaydedilmemiş.
twofa_disable=İki Aşamalı Doğrulamayı Devre Dışı Bırak
twofa_scratch_token_regenerate=Geçici Kodu Yeniden Üret
twofa_scratch_token_regenerated=Geçici kodunuz şimdi %s. Güvenli bir yerde saklayın, tekrar gösterilmeyecektir.
twofa_enroll=İki Faktörlü Kimlik Doğrulamaya Kaydolun
twofa_disable_note=Gerekirse iki faktörlü kimlik doğrulamayı devre dışı bırakabilirsiniz.
twofa_disable_desc=İki faktörlü kimlik doğrulamayı devre dışı bırakmak hesabınızı daha az güvenli hale getirir. Devam edilsin mi?
regenerate_scratch_token_desc=Geçici kodunuzu kaybettiyseniz veya oturum açmak için kullandıysanız, buradan sıfırlayabilirsiniz.
twofa_disabled=İki faktörlü kimlik doğrulama devre dışı bırakıldı.
scan_this_image=Kim doğrulama uygulamanızla bu görüntüyü tarayın:
or_enter_secret=Veya gizli şeyi girin: %s
@ -993,6 +1032,8 @@ new_repo_helper=Bir depo, sürüm geçmişi dahil tüm proje dosyalarını içer
owner=Sahibi
owner_helper=Bazı organizasyonlar, en çok depo sayısı sınırı nedeniyle açılır menüde görünmeyebilir.
repo_name=Depo İsmi
repo_name_profile_public_hint=.profile herkese açık organizasyonunuzun profiline herkesin görüntüleyebileceği bir README.md dosyası eklemek için kullanabileceğiniz özel bir depodur. Başlamak için herkese açık olduğundan ve profile dizininde README ile başladığınızdan emin olun.
repo_name_profile_private_hint=.profile-private organizasyonunuzun üye profiline sadece organizasyon üyelerinin görüntüleyebileceği bir README.md eklemek için kullanabileceğiniz özel bir depodur. Başlamak için özel olduğundan ve profil dizininde README ile başladığınızdan emin olun.
repo_size=Depo Boyutu
template=Şablon
template_select=Bir şablon seçin.
@ -1011,6 +1052,8 @@ fork_to_different_account=Başka bir hesaba çatalla
fork_visibility_helper=Çatallanmış bir deponun görünürlüğü değiştirilemez.
fork_branch=Çatala klonlanacak dal
all_branches=Tüm dallar
view_all_branches=Tüm dalları görüntüle
view_all_tags=Tüm etiketleri görüntüle
fork_no_valid_owners=Geçerli bir sahibi olmadığı için bu depo çatallanamaz.
fork.blocked_user=Depo çatallanamıyor, depo sahibi tarafından engellenmişsiniz.
use_template=Bu şablonu kullan
@ -1022,6 +1065,8 @@ generate_repo=Depo Oluştur
generate_from=Şuradan Oluştur
repo_desc=ıklama
repo_desc_helper=Kısa açıklama girin (isteğe bağlı)
repo_no_desc=Hiçbir açıklama sağlanmadı
repo_lang=Dil
repo_gitignore_helper=.gitignore şablonlarını seç.
repo_gitignore_helper_desc=Sık kullanılan diller için bir şablon listesinden hangi dosyaların izlenmeyeceğini seçin. Her dilin oluşturma araçları tarafından oluşturulan tipik yapılar, varsayılan olarak .gitignore dosyasına dahil edilmiştir.
issue_labels=Konu Etiketleri
@ -1029,6 +1074,7 @@ issue_labels_helper=Bir konu etiket seti seçin.
license=Lisans
license_helper=Bir lisans dosyası seçin.
license_helper_desc=Bir lisans, başkalarının kodunuzla neler yapıp yapamayacağını yönetir. Projeniz için hangisinin doğru olduğundan emin değil misiniz? <a target="_blank" rel="noopener noreferrer" href="%s">Lisans seçme</a> konusuna bakın
multiple_licenses=Çoklu Lisans
object_format=Nesne Biçimi
object_format_helper=Deponun nesne biçimi. Daha sonra değiştirilemez. SHA1 en uyumlu olandır.
readme=README
@ -1082,15 +1128,20 @@ delete_preexisting_success=%s içindeki kabul edilmeyen dosyalar silindi
blame_prior=Bu değişiklikten önceki suçu görüntüle
blame.ignore_revs=<a href="%s">.git-blame-ignore-revs</a> dosyasındaki sürümler yok sayılıyor. Bunun yerine normal sorumlu görüntüsü için <a href="%s">buraya tıklayın</a>.
blame.ignore_revs.failed=<a href="%s">.git-blame-ignore-revs</a> dosyasındaki sürümler yok sayılamadı.
user_search_tooltip=En fazla 30 kullanıcı görüntüler
tree_path_not_found=%[1] yolu, %[2]s deposunda mevcut değil
transfer.accept=Aktarımı Kabul Et
transfer.accept_desc=`"%s" tarafına aktar`
transfer.reject=Aktarımı Reddet
transfer.reject_desc=`"%s" tarafına aktarımı iptal et`
transfer.no_permission_to_accept=Bu aktarımı kabul etme izniniz yok.
transfer.no_permission_to_reject=Bu aktarımı reddetme izniniz yok.
desc.private=Özel
desc.public=Genel
desc.public_access=Herkese Açık Erişim
desc.template=Şablon
desc.internal=Dahili
desc.archived=Arşivlenmiş
@ -1160,6 +1211,10 @@ migrate.gogs.description=Notabug.org veya diğer Gogs sunucularından veri aktar
migrate.onedev.description=Code.onedev.io ve diğer OneDev sunucularından veri aktar.
migrate.codebase.description=Codebasehq.com sitesinden veri aktar.
migrate.gitbucket.description=GitBucket sunucularından veri aktar.
migrate.codecommit.aws_access_key_id=AWS Erişim Anahtarı Kimliği
migrate.codecommit.aws_secret_access_key=AWS Gizli Erişim Anahtarı
migrate.codecommit.https_git_credentials_username=HTTPS Git Kimliği Kullanıcı Adı
migrate.codecommit.https_git_credentials_password=HTTPS Git Kimliği Parolası
migrate.migrating_git=Git Verilerini Taşıma
migrate.migrating_topics=Konuları Taşıma
migrate.migrating_milestones=Kilometre Taşlarını Taşıma
@ -1193,6 +1248,7 @@ create_new_repo_command=Komut satırında yeni bir depo oluşturuluyor
push_exist_repo=Komut satırından mevcut bir depo itiliyor
empty_message=Bu depoda herhangi bir içerik yok.
broken_message=Bu deponun altındaki Git verisi okunamıyor. Bu sunucunun yöneticisiyle bağlantıya geçin veya bu depoyu silin.
no_branch=Bu deponun hiç bir dalı yok.
code=Kod
code.desc=Kaynak koda, dosyalara, işlemelere ve dallara eriş.
@ -1302,6 +1358,8 @@ editor.new_branch_name_desc=Yeni dal ismi…
editor.cancel=İptal
editor.filename_cannot_be_empty=Dosya adı boş olamaz.
editor.filename_is_invalid=Dosya adı geçersiz: "%s".
editor.commit_email=İşleme e-postası
editor.invalid_commit_email=İşleme e-postası hatalı.
editor.branch_does_not_exist=Bu depoda "%s" dalı yok.
editor.branch_already_exists=Bu depoda "%s" dalı zaten var.
editor.directory_is_a_file=Dizin adı "%s" zaten bu depoda bir dosya adı olarak kullanılmaktadır.
@ -1350,6 +1408,7 @@ commits.signed_by_untrusted_user_unmatched=İşleyici ile eşleşmeyen güvenilm
commits.gpg_key_id=GPG Anahtar Kimliği
commits.ssh_key_fingerprint=SSH Anahtar Parmak İzi
commits.view_path=Geçmişte bu noktayı görüntüle
commits.view_file_diff=Bu dosyanın bu işlemedeki değişikliklerini görüntüle
commit.operations=İşlemler
commit.revert=Geri Al
@ -1410,6 +1469,8 @@ issues.filter_milestones=Kilometre Taşı Süzgeci
issues.filter_projects=Projeyi Süz
issues.filter_labels=Etiket Süzgeci
issues.filter_reviewers=Gözden Geçiren Süzgeci
issues.filter_no_results=Sonuç yok
issues.filter_no_results_placeholder=Arama filtrelerinizi ayarlamayı deneyin.
issues.new=Yeni Konu
issues.new.title_empty=Başlık boş olamaz
issues.new.labels=Etiketler
@ -1427,6 +1488,7 @@ issues.new.clear_milestone=Kilometre Taşlarını Temizle
issues.new.assignees=Atananlar
issues.new.clear_assignees=Atamaları Temizle
issues.new.no_assignees=Atanan Kişi Yok
issues.new.no_reviewers=Gözden geçiren yok
issues.new.blocked_user=Konu oluşturulamıyor, depo sahibi tarafından engellenmişsiniz.
issues.edit.already_changed=Konuya yapılan değişiklikler kaydedilemiyor. İçerik başka kullanıcı tarafından değiştirilmiş gözüküyor. Diğerlerinin değişikliklerinin üzerine yazmamak için lütfen sayfayı yenileyin ve tekrar düzenlemeye çalışın
issues.edit.blocked_user=İçerik düzenlenemiyor, gönderen veya depo sahibi tarafından engellenmişsiniz.
@ -1483,6 +1545,7 @@ issues.filter_project=Proje
issues.filter_project_all=Tüm projeler
issues.filter_project_none=Proje yok
issues.filter_assignee=Atanan
issues.filter_assignee_no_assignee=Hiç kimseye atanmamış
issues.filter_poster=Yazar
issues.filter_type=Tür
issues.filter_type.all_issues=Tüm konular
@ -2029,6 +2092,7 @@ contributors.contribution_type.deletions=Silmeler
settings=Ayarlar
settings.desc=Ayarlar, deponun ayarlarını yönetebileceğiniz yerdir
settings.options=Depo
settings.public_access=Herkese Açık Erişim
settings.collaboration=Katkıcılar
settings.collaboration.admin=Yönetici
settings.collaboration.write=Yazma

View File

@ -101,7 +101,7 @@ func GetAllOrgs(ctx *context.APIContext) {
listOptions := utils.GetListOptions(ctx)
users, maxResults, err := user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
users, maxResults, err := user_model.SearchUsers(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Type: user_model.UserTypeOrganization,
OrderBy: db.SearchOrderByAlphabetically,

View File

@ -423,7 +423,7 @@ func SearchUsers(ctx *context.APIContext) {
listOptions := utils.GetListOptions(ctx)
users, maxResults, err := user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
users, maxResults, err := user_model.SearchUsers(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Type: user_model.UserTypeIndividual,
LoginName: ctx.FormTrim("login_name"),

View File

@ -384,13 +384,13 @@ func (Action) CreateVariable(ctx *context.APIContext) {
// "$ref": "#/definitions/CreateVariableOption"
// responses:
// "201":
// description: response when creating an org-level variable
// "204":
// description: response when creating an org-level variable
// description: successfully created the org-level variable
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
// "409":
// description: variable name already exists.
// "500":
// "$ref": "#/responses/error"
opt := web.GetForm(ctx).(*api.CreateVariableOption)
@ -419,7 +419,7 @@ func (Action) CreateVariable(ctx *context.APIContext) {
return
}
ctx.Status(http.StatusNoContent)
ctx.Status(http.StatusCreated)
}
// UpdateVariable update an org-level variable

View File

@ -201,7 +201,7 @@ func GetAll(ctx *context.APIContext) {
listOptions := utils.GetListOptions(ctx)
publicOrgs, maxResults, err := user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
publicOrgs, maxResults, err := user_model.SearchUsers(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
ListOptions: listOptions,
Type: user_model.UserTypeOrganization,

View File

@ -339,12 +339,12 @@ func (Action) CreateVariable(ctx *context.APIContext) {
// responses:
// "201":
// description: response when creating a repo-level variable
// "204":
// description: response when creating a repo-level variable
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
// "409":
// description: variable name already exists.
// "500":
// "$ref": "#/responses/error"
opt := web.GetForm(ctx).(*api.CreateVariableOption)
@ -373,7 +373,7 @@ func (Action) CreateVariable(ctx *context.APIContext) {
return
}
ctx.Status(http.StatusNoContent)
ctx.Status(http.StatusCreated)
}
// UpdateVariable update a repo-level variable

View File

@ -73,7 +73,7 @@ func ListPullRequests(ctx *context.APIContext) {
// in: query
// description: Type of sort
// type: string
// enum: [oldest, recentupdate, leastupdate, mostcomment, leastcomment, priority]
// enum: [oldest, recentupdate, recentclose, leastupdate, mostcomment, leastcomment, priority]
// - name: milestone
// in: query
// description: ID of the milestone

View File

@ -258,19 +258,24 @@ func GetCombinedCommitStatusByRef(ctx *context.APIContext) {
repo := ctx.Repo.Repository
statuses, count, err := git_model.GetLatestCommitStatus(ctx, repo.ID, refCommit.Commit.ID.String(), utils.GetListOptions(ctx))
statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, refCommit.Commit.ID.String(), utils.GetListOptions(ctx))
if err != nil {
ctx.APIErrorInternal(fmt.Errorf("GetLatestCommitStatus[%s, %s]: %w", repo.FullName(), refCommit.CommitID, err))
return
}
count, err := git_model.CountLatestCommitStatus(ctx, repo.ID, refCommit.Commit.ID.String())
if err != nil {
ctx.APIErrorInternal(fmt.Errorf("CountLatestCommitStatus[%s, %s]: %w", repo.FullName(), refCommit.CommitID, err))
return
}
ctx.SetTotalCountHeader(count)
if len(statuses) == 0 {
ctx.JSON(http.StatusOK, &api.CombinedStatus{})
return
}
combiStatus := convert.ToCombinedStatus(ctx, statuses, convert.ToRepo(ctx, repo, ctx.Repo.Permission))
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, combiStatus)
}

View File

@ -127,13 +127,11 @@ func CreateVariable(ctx *context.APIContext) {
// "$ref": "#/definitions/CreateVariableOption"
// responses:
// "201":
// description: response when creating a variable
// "204":
// description: response when creating a variable
// description: successfully created the user-level variable
// "400":
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
// "409":
// description: variable name already exists.
opt := web.GetForm(ctx).(*api.CreateVariableOption)
@ -162,7 +160,7 @@ func CreateVariable(ctx *context.APIContext) {
return
}
ctx.Status(http.StatusNoContent)
ctx.Status(http.StatusCreated)
}
// UpdateVariable update a user-level variable which is created by current doer

View File

@ -73,7 +73,7 @@ func Search(ctx *context.APIContext) {
if ctx.PublicOnly {
visible = []structs.VisibleType{structs.VisibleTypePublic}
}
users, maxResults, err = user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
users, maxResults, err = user_model.SearchUsers(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Keyword: ctx.FormTrim("q"),
UID: uid,

View File

@ -27,7 +27,7 @@ func Organizations(ctx *context.Context) {
ctx.SetFormString("sort", UserSearchDefaultAdminSort)
}
explore.RenderUserSearch(ctx, &user_model.SearchUserOptions{
explore.RenderUserSearch(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Type: user_model.UserTypeOrganization,
IncludeReserved: true, // administrator needs to list all accounts include reserved

View File

@ -64,7 +64,7 @@ func Users(ctx *context.Context) {
"SortType": sortType,
}
explore.RenderUserSearch(ctx, &user_model.SearchUserOptions{
explore.RenderUserSearch(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Type: user_model.UserTypeIndividual,
ListOptions: db.ListOptions{

View File

@ -44,7 +44,7 @@ func Organizations(ctx *context.Context) {
ctx.SetFormString("sort", sortOrder)
}
RenderUserSearch(ctx, &user_model.SearchUserOptions{
RenderUserSearch(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Type: user_model.UserTypeOrganization,
ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},

View File

@ -32,7 +32,7 @@ func isKeywordValid(keyword string) bool {
}
// RenderUserSearch render user search page
func RenderUserSearch(ctx *context.Context, opts *user_model.SearchUserOptions, tplName templates.TplName) {
func RenderUserSearch(ctx *context.Context, opts user_model.SearchUserOptions, tplName templates.TplName) {
// Sitemap index for sitemap paths
opts.Page = int(ctx.PathParamInt64("idx"))
isSitemap := ctx.PathParam("idx") != ""
@ -151,7 +151,7 @@ func Users(ctx *context.Context) {
ctx.SetFormString("sort", sortOrder)
}
RenderUserSearch(ctx, &user_model.SearchUserOptions{
RenderUserSearch(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Type: user_model.UserTypeIndividual,
ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},

View File

@ -68,7 +68,7 @@ func Home(ctx *context.Context) {
func HomeSitemap(ctx *context.Context) {
m := sitemap.NewSitemapIndex()
if !setting.Service.Explore.DisableUsersPage {
_, cnt, err := user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
_, cnt, err := user_model.SearchUsers(ctx, user_model.SearchUserOptions{
Type: user_model.UserTypeIndividual,
ListOptions: db.ListOptions{PageSize: 1},
IsActive: optional.Some(true),

View File

@ -377,7 +377,7 @@ func Diff(ctx *context.Context) {
ctx.Data["FileIconPoolHTML"] = renderedIconPool.RenderToHTML()
}
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, commitID, db.ListOptionsAll)
statuses, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, commitID, db.ListOptionsAll)
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
}

View File

@ -291,7 +291,7 @@ func prepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue)
if len(compareInfo.Commits) != 0 {
sha := compareInfo.Commits[0].ID.String()
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptionsAll)
commitStatuses, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptionsAll)
if err != nil {
ctx.ServerError("GetLatestCommitStatus", err)
return nil
@ -358,7 +358,7 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
return nil
}
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
commitStatuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
if err != nil {
ctx.ServerError("GetLatestCommitStatus", err)
return nil
@ -454,7 +454,7 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
return nil
}
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
commitStatuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
if err != nil {
ctx.ServerError("GetLatestCommitStatus", err)
return nil

View File

@ -130,7 +130,7 @@ func getReleaseInfos(ctx *context.Context, opts *repo_model.FindReleasesOptions)
}
if canReadActions {
statuses, _, err := git_model.GetLatestCommitStatus(ctx, r.Repo.ID, r.Sha1, db.ListOptionsAll)
statuses, err := git_model.GetLatestCommitStatus(ctx, r.Repo.ID, r.Sha1, db.ListOptionsAll)
if err != nil {
return nil, err
}

View File

@ -131,7 +131,7 @@ func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
ctx.Data["LatestCommitVerification"] = verification
ctx.Data["LatestCommitUser"] = user_model.ValidateCommitWithEmail(ctx, latestCommit)
statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, latestCommit.ID.String(), db.ListOptionsAll)
statuses, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, latestCommit.ID.String(), db.ListOptionsAll)
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
}

View File

@ -16,7 +16,7 @@ import (
// SearchCandidates searches candidate users for dropdown list
func SearchCandidates(ctx *context.Context) {
users, _, err := user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
users, _, err := user_model.SearchUsers(ctx, user_model.SearchUserOptions{
Actor: ctx.Doer,
Keyword: ctx.FormTrim("q"),
Type: user_model.UserTypeIndividual,

View File

@ -155,6 +155,22 @@ func CleanupEphemeralRunners(ctx context.Context) error {
return nil
}
// CleanupEphemeralRunnersByPickedTaskOfRepo removes all ephemeral runners that have active/finished tasks on the given repository
func CleanupEphemeralRunnersByPickedTaskOfRepo(ctx context.Context, repoID int64) error {
subQuery := builder.Select("`action_runner`.id").
From(builder.Select("*").From("`action_runner`"), "`action_runner`"). // mysql needs this redundant subquery
Join("INNER", "`action_task`", "`action_task`.`runner_id` = `action_runner`.`id`").
Where(builder.And(builder.Eq{"`action_runner`.`ephemeral`": true}, builder.Eq{"`action_task`.`repo_id`": repoID}))
b := builder.Delete(builder.In("id", subQuery)).From("`action_runner`")
res, err := db.GetEngine(ctx).Exec(b)
if err != nil {
return fmt.Errorf("find runners: %w", err)
}
affected, _ := res.RowsAffected()
log.Info("Removed %d runners", affected)
return nil
}
// DeleteRun deletes workflow run, including all logs and artifacts.
func DeleteRun(ctx context.Context, run *actions_model.ActionRun) error {
if !run.Status.IsDone() {

View File

@ -92,7 +92,7 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er
}
ctxname := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event)
state := toCommitStatus(job.Status)
if statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil {
if statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll); err == nil {
for _, v := range statuses {
if v.Context == ctxname {
if v.State == state {

View File

@ -15,6 +15,7 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"github.com/nektos/act/pkg/model"
)
@ -167,34 +168,34 @@ func mergeTwoOutputs(o1, o2 map[string]string) map[string]string {
func (g *GiteaContext) ToGitHubContext() *model.GithubContext {
return &model.GithubContext{
Event: (*g)["event"].(map[string]any),
EventPath: (*g)["event_path"].(string),
Workflow: (*g)["workflow"].(string),
RunID: (*g)["run_id"].(string),
RunNumber: (*g)["run_number"].(string),
Actor: (*g)["actor"].(string),
Repository: (*g)["repository"].(string),
EventName: (*g)["event_name"].(string),
Sha: (*g)["sha"].(string),
Ref: (*g)["ref"].(string),
RefName: (*g)["ref_name"].(string),
RefType: (*g)["ref_type"].(string),
HeadRef: (*g)["head_ref"].(string),
BaseRef: (*g)["base_ref"].(string),
Event: util.GetMapValueOrDefault(*g, "event", map[string]any(nil)),
EventPath: util.GetMapValueOrDefault(*g, "event_path", ""),
Workflow: util.GetMapValueOrDefault(*g, "workflow", ""),
RunID: util.GetMapValueOrDefault(*g, "run_id", ""),
RunNumber: util.GetMapValueOrDefault(*g, "run_number", ""),
Actor: util.GetMapValueOrDefault(*g, "actor", ""),
Repository: util.GetMapValueOrDefault(*g, "repository", ""),
EventName: util.GetMapValueOrDefault(*g, "event_name", ""),
Sha: util.GetMapValueOrDefault(*g, "sha", ""),
Ref: util.GetMapValueOrDefault(*g, "ref", ""),
RefName: util.GetMapValueOrDefault(*g, "ref_name", ""),
RefType: util.GetMapValueOrDefault(*g, "ref_type", ""),
HeadRef: util.GetMapValueOrDefault(*g, "head_ref", ""),
BaseRef: util.GetMapValueOrDefault(*g, "base_ref", ""),
Token: "", // deliberately omitted for security
Workspace: (*g)["workspace"].(string),
Action: (*g)["action"].(string),
ActionPath: (*g)["action_path"].(string),
ActionRef: (*g)["action_ref"].(string),
ActionRepository: (*g)["action_repository"].(string),
Job: (*g)["job"].(string),
Workspace: util.GetMapValueOrDefault(*g, "workspace", ""),
Action: util.GetMapValueOrDefault(*g, "action", ""),
ActionPath: util.GetMapValueOrDefault(*g, "action_path", ""),
ActionRef: util.GetMapValueOrDefault(*g, "action_ref", ""),
ActionRepository: util.GetMapValueOrDefault(*g, "action_repository", ""),
Job: util.GetMapValueOrDefault(*g, "job", ""),
JobName: "", // not present in GiteaContext
RepositoryOwner: (*g)["repository_owner"].(string),
RetentionDays: (*g)["retention_days"].(string),
RepositoryOwner: util.GetMapValueOrDefault(*g, "repository_owner", ""),
RetentionDays: util.GetMapValueOrDefault(*g, "retention_days", ""),
RunnerPerflog: "", // not present in GiteaContext
RunnerTrackingID: "", // not present in GiteaContext
ServerURL: (*g)["server_url"].(string),
APIURL: (*g)["api_url"].(string),
GraphQLURL: (*g)["graphql_url"].(string),
ServerURL: util.GetMapValueOrDefault(*g, "server_url", ""),
APIURL: util.GetMapValueOrDefault(*g, "api_url", ""),
GraphQLURL: util.GetMapValueOrDefault(*g, "graphql_url", ""),
}
}

View File

@ -936,6 +936,15 @@ func RepoRefByType(detectRefType git.RefType) func(*Context) {
ctx.ServerError("GetCommitsCount", err)
return
}
if ctx.Repo.RefFullName.IsTag() {
rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, ctx.Repo.RefFullName.TagName())
if err == nil && rel.NumCommits <= 0 {
rel.NumCommits = ctx.Repo.CommitsCount
if err := repo_model.UpdateReleaseNumCommits(ctx, rel); err != nil {
log.Error("UpdateReleaseNumCommits", err)
}
}
}
ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
ctx.Repo.GitRepo.LastCommitCache = git.NewLastCommitCache(ctx.Repo.CommitsCount, ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, cache.GetCache())
}

View File

@ -84,7 +84,7 @@ func ParseCommitsWithStatus(ctx context.Context, oldCommits []*asymkey_model.Sig
commit := &git_model.SignCommitWithStatuses{
SignCommit: c,
}
statuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptions{})
statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptionsAll)
if err != nil {
return nil, err
}

View File

@ -54,6 +54,8 @@ func ToggleAssigneeWithNotify(ctx context.Context, issue *issues_model.Issue, do
if err != nil {
return false, nil, err
}
issue.AssigneeID = assigneeID
issue.Assignee = assignee
notify_service.IssueChangeAssignee(ctx, doer, issue, assignee, removed, comment)

View File

@ -180,11 +180,15 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag
continue
}
target := orig.PullRequestTargets[0]
description := ""
if orig.Description != nil {
description = *orig.Description
}
pr := &base.PullRequest{
Number: number,
Title: *orig.Title,
PosterName: c.getUsernameFromARN(*orig.AuthorArn),
Content: *orig.Description,
Content: description,
State: "open",
Created: *orig.CreationDate,
Updated: *orig.LastActivityDate,

View File

@ -151,7 +151,7 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR
return "", errors.Wrap(err, "LoadBaseRepo")
}
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
commitStatuses, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
if err != nil {
return "", errors.Wrap(err, "GetLatestCommitStatus")
}

View File

@ -1004,7 +1004,7 @@ func getAllCommitStatus(ctx context.Context, gitRepo *git.Repository, pr *issues
return nil, nil, shaErr
}
statuses, _, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
statuses, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
lastStatus = git_model.CalcCommitStatus(statuses)
return statuses, lastStatus, err
}

View File

@ -27,6 +27,7 @@ import (
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
actions_service "code.gitea.io/gitea/services/actions"
asymkey_service "code.gitea.io/gitea/services/asymkey"
"xorm.io/builder"
@ -133,6 +134,14 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID
return err
}
// CleanupEphemeralRunnersByPickedTaskOfRepo deletes ephemeral global/org/user that have started any task of this repo
// The cannot pick a second task hardening for ephemeral runners expect that task objects remain available until runner deletion
// This method will delete affected ephemeral global/org/user runners
// &actions_model.ActionRunner{RepoID: repoID} does only handle ephemeral repository runners
if err := actions_service.CleanupEphemeralRunnersByPickedTaskOfRepo(ctx, repoID); err != nil {
return fmt.Errorf("cleanupEphemeralRunners: %w", err)
}
if err := db.DeleteBeans(ctx,
&access_model.Access{RepoID: repo.ID},
&activities_model.Action{RepoID: repo.ID},

View File

@ -121,7 +121,7 @@ func (graph *Graph) LoadAndProcessCommits(ctx context.Context, repository *repo_
return repo_model.IsOwnerMemberCollaborator(ctx, repository, user.ID)
}, &keyMap)
statuses, _, err := git_model.GetLatestCommitStatus(ctx, repository.ID, c.Commit.ID.String(), db.ListOptions{})
statuses, err := git_model.GetLatestCommitStatus(ctx, repository.ID, c.Commit.ID.String(), db.ListOptionsAll)
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
} else {

View File

@ -149,9 +149,9 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
return repo, fmt.Errorf("SyncRepoBranchesWithRepo: %v", err)
}
// if releases migration are not requested, we will sync all tags here
// otherwise, the releases sync will be done out of this function
if !opts.Releases {
// note: this will greatly improve release (tag) sync
// for pull-mirrors with many tags
repo.IsMirror = opts.Mirror
if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
log.Error("Failed to synchronize tags to releases for repository: %v", err)

View File

@ -344,7 +344,7 @@ func pushDeleteBranch(ctx context.Context, repo *repo_model.Repository, pusher *
// PushUpdateAddDeleteTags updates a number of added and delete tags
func PushUpdateAddDeleteTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, addTags, delTags []string) error {
return db.WithTx(ctx, func(ctx context.Context) error {
if err := repo_model.PushUpdateDeleteTagsContext(ctx, repo, delTags); err != nil {
if err := repo_model.PushUpdateDeleteTags(ctx, repo, delTags); err != nil {
return err
}
return pushUpdateAddTags(ctx, repo, gitRepo, addTags)
@ -415,11 +415,6 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo
createdAt = sig.When
}
commitsCount, err := commit.CommitsCount()
if err != nil {
return fmt.Errorf("CommitsCount: %w", err)
}
rel, has := relMap[lowerTag]
parts := strings.SplitN(tag.Message, "\n", 2)
@ -435,7 +430,7 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo
LowerTagName: lowerTag,
Target: "",
Sha1: commit.ID.String(),
NumCommits: commitsCount,
NumCommits: -1, // the commits count will be updated when the UI needs it
Note: note,
IsDraft: false,
IsPrerelease: false,
@ -450,7 +445,6 @@ func pushUpdateAddTags(ctx context.Context, repo *repo_model.Repository, gitRepo
} else {
rel.Sha1 = commit.ID.String()
rel.CreatedUnix = timeutil.TimeStamp(createdAt.Unix())
rel.NumCommits = commitsCount
if rel.IsTag {
rel.Title = parts[0]
rel.Note = note

View File

@ -36,15 +36,23 @@
<div class="run-list-meta">{{svg "octicon-calendar" 16}}{{DateUtils.TimeSince $run.Updated}}</div>
<div class="run-list-meta">{{svg "octicon-stopwatch" 16}}{{$run.Duration}}</div>
</div>
{{if and ($.AllowDeleteWorkflowRuns) ($run.Status.IsDone)}}
<button class="btn interact-bg link-action tw-p-2"
data-url="{{$run.Link}}/delete"
data-modal-confirm="{{ctx.Locale.Tr "actions.runs.delete.description"}}"
data-tooltip-content="{{ctx.Locale.Tr "actions.runs.delete"}}"
>
{{svg "octicon-trash"}}
</button>
{{end}}
<div class="ui dropdown jump tw-p-2">
{{svg "octicon-kebab-horizontal"}}
<div class="menu flex-items-menu">
<!-- TODO: This redundant link should be replaced by something else in future,
because have not figured out how to add "View Workflow" or anything similar to GitHub.
Related: https://github.com/go-gitea/gitea/pull/34530 -->
<a class="item" href="{{$run.Link}}">{{svg "octicon-play"}}{{ctx.Locale.Tr "view"}}</a>
{{if and $.AllowDeleteWorkflowRuns $run.Status.IsDone}}
<a class="item link-action"
data-url="{{$run.Link}}/delete"
data-modal-confirm="{{ctx.Locale.Tr "actions.runs.delete.description"}}"
>
{{svg "octicon-trash"}}{{ctx.Locale.Tr "actions.runs.delete"}}
</a>
{{end}}
</div>
</div>
</div>
</div>
{{end}}

View File

@ -226,11 +226,19 @@
</a>
{{end}}
</div>
{{else if .Permission.IsAdmin}}
{{else}}
<div class="overflow-menu-items">
{{if(and .Repository.IsBeingCreated (.Permission.CanRead ctx.Consts.RepoUnitTypeCode))}}
<a class="{{if not .PageIsRepoSettings}}active {{end}}item" href="{{.RepoLink}}">
{{svg "octicon-clock"}} {{ctx.Locale.Tr "repo.migrating_status"}}
</a>
{{end}}
{{if .Permission.IsAdmin}}
<a class="{{if .PageIsRepoSettings}}active {{end}} item" href="{{.RepoLink}}/settings">
{{svg "octicon-tools"}} {{ctx.Locale.Tr "repo.settings"}}
</a>
{{end}}
</div>
{{end}}
</overflow-menu>

View File

@ -61,7 +61,7 @@
{{if $release.CreatedUnix}}
<span class="time">{{DateUtils.TimeSince $release.CreatedUnix}}</span>
{{end}}
{{if and (not $release.IsDraft) ($.Permission.CanRead ctx.Consts.RepoUnitTypeCode)}}
{{if and (gt $release.NumCommits 0) (not $release.IsDraft) ($.Permission.CanRead ctx.Consts.RepoUnitTypeCode)}}
| <span class="ahead"><a href="{{$.RepoLink}}/compare/{{$release.TagName | PathEscapeSegments}}...{{$release.TargetBehind | PathEscapeSegments}}">{{ctx.Locale.Tr "repo.release.ahead.commits" $release.NumCommitsBehind}}</a> {{ctx.Locale.Tr "repo.release.ahead.target" $release.TargetBehind}}</span>
{{end}}
</p>

View File

@ -2259,16 +2259,16 @@
],
"responses": {
"201": {
"description": "response when creating an org-level variable"
},
"204": {
"description": "response when creating an org-level variable"
"description": "successfully created the org-level variable"
},
"400": {
"$ref": "#/responses/error"
},
"404": {
"$ref": "#/responses/notFound"
"409": {
"description": "variable name already exists."
},
"500": {
"$ref": "#/responses/error"
}
}
},
@ -5263,14 +5263,14 @@
"201": {
"description": "response when creating a repo-level variable"
},
"204": {
"description": "response when creating a repo-level variable"
},
"400": {
"$ref": "#/responses/error"
},
"404": {
"$ref": "#/responses/notFound"
"409": {
"description": "variable name already exists."
},
"500": {
"$ref": "#/responses/error"
}
}
},
@ -12871,6 +12871,7 @@
"enum": [
"oldest",
"recentupdate",
"recentclose",
"leastupdate",
"mostcomment",
"leastcomment",
@ -17863,16 +17864,13 @@
],
"responses": {
"201": {
"description": "response when creating a variable"
},
"204": {
"description": "response when creating a variable"
"description": "successfully created the user-level variable"
},
"400": {
"$ref": "#/responses/error"
},
"404": {
"$ref": "#/responses/notFound"
"409": {
"description": "variable name already exists."
}
}
},

View File

@ -633,7 +633,7 @@ jobs:
assert.NotEmpty(t, addFileResp)
sha = addFileResp.Commit.SHA
assert.Eventually(t, func() bool {
latestCommitStatuses, _, err := git_model.GetLatestCommitStatus(db.DefaultContext, repo.ID, sha, db.ListOptionsAll)
latestCommitStatuses, err := git_model.GetLatestCommitStatus(db.DefaultContext, repo.ID, sha, db.ListOptionsAll)
assert.NoError(t, err)
if len(latestCommitStatuses) == 0 {
return false
@ -676,7 +676,7 @@ jobs:
}
func checkCommitStatusAndInsertFakeStatus(t *testing.T, repo *repo_model.Repository, sha string) {
latestCommitStatuses, _, err := git_model.GetLatestCommitStatus(db.DefaultContext, repo.ID, sha, db.ListOptionsAll)
latestCommitStatuses, err := git_model.GetLatestCommitStatus(db.DefaultContext, repo.ID, sha, db.ListOptionsAll)
assert.NoError(t, err)
assert.Len(t, latestCommitStatuses, 1)
assert.Equal(t, api.CommitStatusPending, latestCommitStatuses[0].State)

View File

@ -41,8 +41,6 @@ func testActionsRunnerAdmin(t *testing.T) {
runnerList := api.ActionRunnersResponse{}
DecodeJSON(t, runnerListResp, &runnerList)
assert.Len(t, runnerList.Entries, 4)
idx := slices.IndexFunc(runnerList.Entries, func(e *api.ActionRunner) bool { return e.ID == 34349 })
require.NotEqual(t, -1, idx)
expectedRunner := runnerList.Entries[idx]
@ -160,16 +158,20 @@ func testActionsRunnerOwner(t *testing.T) {
runnerList := api.ActionRunnersResponse{}
DecodeJSON(t, runnerListResp, &runnerList)
assert.Len(t, runnerList.Entries, 1)
assert.Equal(t, "runner_to_be_deleted-org", runnerList.Entries[0].Name)
assert.Equal(t, int64(34347), runnerList.Entries[0].ID)
assert.False(t, runnerList.Entries[0].Ephemeral)
assert.Len(t, runnerList.Entries[0].Labels, 2)
assert.Equal(t, "runner_to_be_deleted", runnerList.Entries[0].Labels[0].Name)
assert.Equal(t, "linux", runnerList.Entries[0].Labels[1].Name)
idx := slices.IndexFunc(runnerList.Entries, func(e *api.ActionRunner) bool { return e.ID == 34347 })
require.NotEqual(t, -1, idx)
expectedRunner := runnerList.Entries[idx]
require.NotNil(t, expectedRunner)
assert.Equal(t, "runner_to_be_deleted-org", expectedRunner.Name)
assert.Equal(t, int64(34347), expectedRunner.ID)
assert.False(t, expectedRunner.Ephemeral)
assert.Len(t, expectedRunner.Labels, 2)
assert.Equal(t, "runner_to_be_deleted", expectedRunner.Labels[0].Name)
assert.Equal(t, "linux", expectedRunner.Labels[1].Name)
// Verify get the runner by id
req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/orgs/org3/actions/runners/%d", runnerList.Entries[0].ID)).AddTokenAuth(token)
req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/orgs/org3/actions/runners/%d", expectedRunner.ID)).AddTokenAuth(token)
runnerResp := MakeRequest(t, req, http.StatusOK)
runner := api.ActionRunner{}
@ -183,11 +185,11 @@ func testActionsRunnerOwner(t *testing.T) {
assert.Equal(t, "linux", runner.Labels[1].Name)
// Verify delete the runner by id
req = NewRequest(t, "DELETE", fmt.Sprintf("/api/v1/orgs/org3/actions/runners/%d", runnerList.Entries[0].ID)).AddTokenAuth(token)
req = NewRequest(t, "DELETE", fmt.Sprintf("/api/v1/orgs/org3/actions/runners/%d", expectedRunner.ID)).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNoContent)
// Verify runner deletion
req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/orgs/org3/actions/runners/%d", runnerList.Entries[0].ID)).AddTokenAuth(token)
req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/orgs/org3/actions/runners/%d", expectedRunner.ID)).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNotFound)
})

View File

@ -35,11 +35,11 @@ func TestAPIRepoVariables(t *testing.T) {
},
{
Name: "_",
ExpectedStatus: http.StatusNoContent,
ExpectedStatus: http.StatusCreated,
},
{
Name: "TEST_VAR",
ExpectedStatus: http.StatusNoContent,
ExpectedStatus: http.StatusCreated,
},
{
Name: "test_var",
@ -81,7 +81,7 @@ func TestAPIRepoVariables(t *testing.T) {
req := NewRequestWithJSON(t, "POST", url, api.CreateVariableOption{
Value: "initial_val",
}).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNoContent)
MakeRequest(t, req, http.StatusCreated)
cases := []struct {
Name string
@ -138,7 +138,7 @@ func TestAPIRepoVariables(t *testing.T) {
req := NewRequestWithJSON(t, "POST", url, api.CreateVariableOption{
Value: "initial_val",
}).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNoContent)
MakeRequest(t, req, http.StatusCreated)
req = NewRequest(t, "DELETE", url).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNoContent)

View File

@ -29,11 +29,11 @@ func TestAPIUserVariables(t *testing.T) {
},
{
Name: "_",
ExpectedStatus: http.StatusNoContent,
ExpectedStatus: http.StatusCreated,
},
{
Name: "TEST_VAR",
ExpectedStatus: http.StatusNoContent,
ExpectedStatus: http.StatusCreated,
},
{
Name: "test_var",
@ -75,7 +75,7 @@ func TestAPIUserVariables(t *testing.T) {
req := NewRequestWithJSON(t, "POST", url, api.CreateVariableOption{
Value: "initial_val",
}).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNoContent)
MakeRequest(t, req, http.StatusCreated)
cases := []struct {
Name string
@ -132,7 +132,7 @@ func TestAPIUserVariables(t *testing.T) {
req := NewRequestWithJSON(t, "POST", url, api.CreateVariableOption{
Value: "initial_val",
}).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNoContent)
MakeRequest(t, req, http.StatusCreated)
req = NewRequest(t, "DELETE", url).AddTokenAuth(token)
MakeRequest(t, req, http.StatusNoContent)

View File

@ -0,0 +1,79 @@
// Copyright 2025 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package integration
import (
"testing"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/util"
repo_service "code.gitea.io/gitea/services/repository"
user_service "code.gitea.io/gitea/services/user"
"code.gitea.io/gitea/tests"
"github.com/stretchr/testify/assert"
)
func TestEphemeralActionsRunnerDeletion(t *testing.T) {
t.Run("ByTaskCompletion", testEphemeralActionsRunnerDeletionByTaskCompletion)
t.Run("ByRepository", testEphemeralActionsRunnerDeletionByRepository)
t.Run("ByUser", testEphemeralActionsRunnerDeletionByUser)
}
// Test that the ephemeral runner is deleted when the task is finished
func testEphemeralActionsRunnerDeletionByTaskCompletion(t *testing.T) {
defer tests.PrepareTestEnv(t)()
_, err := actions_model.GetRunnerByID(t.Context(), 34350)
assert.NoError(t, err)
task := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: 52})
assert.Equal(t, actions_model.StatusRunning, task.Status)
task.Status = actions_model.StatusSuccess
err = actions_model.UpdateTask(t.Context(), task, "status")
assert.NoError(t, err)
_, err = actions_model.GetRunnerByID(t.Context(), 34350)
assert.ErrorIs(t, err, util.ErrNotExist)
}
func testEphemeralActionsRunnerDeletionByRepository(t *testing.T) {
defer tests.PrepareTestEnv(t)()
_, err := actions_model.GetRunnerByID(t.Context(), 34350)
assert.NoError(t, err)
task := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: 52})
assert.Equal(t, actions_model.StatusRunning, task.Status)
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
err = repo_service.DeleteRepositoryDirectly(t.Context(), user, task.RepoID, true)
assert.NoError(t, err)
_, err = actions_model.GetRunnerByID(t.Context(), 34350)
assert.ErrorIs(t, err, util.ErrNotExist)
}
// Test that the ephemeral runner is deleted when a user is deleted
func testEphemeralActionsRunnerDeletionByUser(t *testing.T) {
defer tests.PrepareTestEnv(t)()
_, err := actions_model.GetRunnerByID(t.Context(), 34350)
assert.NoError(t, err)
task := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: 52})
assert.Equal(t, actions_model.StatusRunning, task.Status)
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
err = user_service.DeleteUser(t.Context(), user, true)
assert.NoError(t, err)
_, err = actions_model.GetRunnerByID(t.Context(), 34350)
assert.ErrorIs(t, err, util.ErrNotExist)
}

View File

@ -151,6 +151,15 @@ func testNewIssue(t *testing.T, session *TestSession, user, repo, title, content
return issueURL
}
func testIssueAssign(t *testing.T, session *TestSession, repoLink string, issueID, assigneeID int64) {
req := NewRequestWithValues(t, "POST", fmt.Sprintf(repoLink+"/issues/assignee?issue_ids=%d", issueID), map[string]string{
"_csrf": GetUserCSRFToken(t, session),
"id": strconv.FormatInt(assigneeID, 10),
"action": "", // empty action means assign
})
session.MakeRequest(t, req, http.StatusOK)
}
func testIssueAddComment(t *testing.T, session *TestSession, issueURL, content, status string) int64 {
req := NewRequest(t, "GET", issueURL)
resp := session.MakeRequest(t, req, http.StatusOK)

View File

@ -131,19 +131,19 @@ func (m *mockWebhookProvider) Close() {
}
func Test_WebhookCreate(t *testing.T) {
var payloads []api.CreatePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.CreatePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = string(webhook_module.HookEventCreate)
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.CreatePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.CreatePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = string(webhook_module.HookEventCreate)
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -163,19 +163,19 @@ func Test_WebhookCreate(t *testing.T) {
}
func Test_WebhookDelete(t *testing.T) {
var payloads []api.DeletePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.DeletePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "delete"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.DeletePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.DeletePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "delete"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -196,19 +196,19 @@ func Test_WebhookDelete(t *testing.T) {
}
func Test_WebhookFork(t *testing.T) {
var payloads []api.ForkPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.ForkPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "fork"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.ForkPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.ForkPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "fork"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user1")
@ -228,19 +228,19 @@ func Test_WebhookFork(t *testing.T) {
}
func Test_WebhookIssueComment(t *testing.T) {
var payloads []api.IssueCommentPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.IssueCommentPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "issue_comment"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.IssueCommentPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.IssueCommentPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "issue_comment"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -312,19 +312,19 @@ func Test_WebhookIssueComment(t *testing.T) {
}
func Test_WebhookRelease(t *testing.T) {
var payloads []api.ReleasePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.ReleasePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "release"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.ReleasePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.ReleasePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "release"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -345,19 +345,19 @@ func Test_WebhookRelease(t *testing.T) {
}
func Test_WebhookPush(t *testing.T) {
var payloads []api.PushPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.PushPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "push"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.PushPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.PushPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "push"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -416,19 +416,19 @@ func Test_WebhookPushDevBranch(t *testing.T) {
}
func Test_WebhookIssue(t *testing.T) {
var payloads []api.IssuePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.IssuePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "issues"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.IssuePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.IssuePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "issues"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -445,6 +445,45 @@ func Test_WebhookIssue(t *testing.T) {
assert.Equal(t, "user2/repo1", payloads[0].Issue.Repo.FullName)
assert.Equal(t, "Title1", payloads[0].Issue.Title)
assert.Equal(t, "Description1", payloads[0].Issue.Body)
assert.Positive(t, payloads[0].Issue.Created.Unix())
assert.Positive(t, payloads[0].Issue.Updated.Unix())
})
}
func Test_WebhookIssueAssign(t *testing.T) {
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.PullRequestPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.PullRequestPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "pull_request_assign"
}, http.StatusOK)
defer provider.Close()
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
repo1 := unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 1})
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
testAPICreateWebhookForRepo(t, session, "user2", "repo1", provider.URL(), "pull_request_assign")
// 2. trigger the webhook, issue 2 is a pull request
testIssueAssign(t, session, repo1.Link(), 2, user2.ID)
// 3. validate the webhook is triggered
assert.Equal(t, "pull_request_assign", triggeredEvent)
assert.Len(t, payloads, 1)
assert.EqualValues(t, "assigned", payloads[0].Action)
assert.Equal(t, "repo1", payloads[0].PullRequest.Base.Repository.Name)
assert.Equal(t, "user2/repo1", payloads[0].PullRequest.Base.Repository.FullName)
assert.Equal(t, "issue2", payloads[0].PullRequest.Title)
assert.Equal(t, "content for the second issue", payloads[0].PullRequest.Body)
assert.Equal(t, user2.ID, payloads[0].PullRequest.Assignee.ID)
})
}
@ -521,19 +560,19 @@ func Test_WebhookIssueMilestone(t *testing.T) {
}
func Test_WebhookPullRequest(t *testing.T) {
var payloads []api.PullRequestPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.PullRequestPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "pull_request"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.PullRequestPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.PullRequestPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "pull_request"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -558,19 +597,19 @@ func Test_WebhookPullRequest(t *testing.T) {
}
func Test_WebhookPullRequestComment(t *testing.T) {
var payloads []api.IssueCommentPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.IssueCommentPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "pull_request_comment"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.IssueCommentPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.IssueCommentPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "pull_request_comment"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -596,19 +635,19 @@ func Test_WebhookPullRequestComment(t *testing.T) {
}
func Test_WebhookWiki(t *testing.T) {
var payloads []api.WikiPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.WikiPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "wiki"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.WikiPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.WikiPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "wiki"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -628,19 +667,19 @@ func Test_WebhookWiki(t *testing.T) {
}
func Test_WebhookRepository(t *testing.T) {
var payloads []api.RepositoryPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.RepositoryPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "repository"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.RepositoryPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.RepositoryPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "repository"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user1")
@ -660,19 +699,19 @@ func Test_WebhookRepository(t *testing.T) {
}
func Test_WebhookPackage(t *testing.T) {
var payloads []api.PackagePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.PackagePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "package"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.PackagePayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
content, _ := io.ReadAll(r.Body)
var payload api.PackagePayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "package"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user1")
@ -697,24 +736,24 @@ func Test_WebhookPackage(t *testing.T) {
}
func Test_WebhookStatus(t *testing.T) {
var payloads []api.CommitStatusPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
assert.Contains(t, r.Header["X-Github-Event-Type"], "status", "X-GitHub-Event-Type should contain status")
assert.Contains(t, r.Header["X-Github-Hook-Installation-Target-Type"], "repository", "X-GitHub-Hook-Installation-Target-Type should contain repository")
assert.Contains(t, r.Header["X-Gitea-Event-Type"], "status", "X-Gitea-Event-Type should contain status")
assert.Contains(t, r.Header["X-Gitea-Hook-Installation-Target-Type"], "repository", "X-Gitea-Hook-Installation-Target-Type should contain repository")
assert.Contains(t, r.Header["X-Gogs-Event-Type"], "status", "X-Gogs-Event-Type should contain status")
content, _ := io.ReadAll(r.Body)
var payload api.CommitStatusPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "status"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.CommitStatusPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
assert.Contains(t, r.Header["X-Github-Event-Type"], "status", "X-GitHub-Event-Type should contain status")
assert.Contains(t, r.Header["X-Github-Hook-Installation-Target-Type"], "repository", "X-GitHub-Hook-Installation-Target-Type should contain repository")
assert.Contains(t, r.Header["X-Gitea-Event-Type"], "status", "X-Gitea-Event-Type should contain status")
assert.Contains(t, r.Header["X-Gitea-Hook-Installation-Target-Type"], "repository", "X-Gitea-Hook-Installation-Target-Type should contain repository")
assert.Contains(t, r.Header["X-Gogs-Event-Type"], "status", "X-Gogs-Event-Type should contain status")
content, _ := io.ReadAll(r.Body)
var payload api.CommitStatusPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "status"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -750,16 +789,16 @@ func Test_WebhookStatus(t *testing.T) {
}
func Test_WebhookStatus_NoWrongTrigger(t *testing.T) {
var trigger string
provider := newMockWebhookProvider(func(r *http.Request) {
assert.NotContains(t, r.Header["X-Github-Event-Type"], "status", "X-GitHub-Event-Type should not contain status")
assert.NotContains(t, r.Header["X-Gitea-Event-Type"], "status", "X-Gitea-Event-Type should not contain status")
assert.NotContains(t, r.Header["X-Gogs-Event-Type"], "status", "X-Gogs-Event-Type should not contain status")
trigger = "push"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var trigger string
provider := newMockWebhookProvider(func(r *http.Request) {
assert.NotContains(t, r.Header["X-Github-Event-Type"], "status", "X-GitHub-Event-Type should not contain status")
assert.NotContains(t, r.Header["X-Gitea-Event-Type"], "status", "X-Gitea-Event-Type should not contain status")
assert.NotContains(t, r.Header["X-Gogs-Event-Type"], "status", "X-Gogs-Event-Type should not contain status")
trigger = "push"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
session := loginUser(t, "user2")
@ -775,22 +814,22 @@ func Test_WebhookStatus_NoWrongTrigger(t *testing.T) {
}
func Test_WebhookWorkflowJob(t *testing.T) {
var payloads []api.WorkflowJobPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
assert.Contains(t, r.Header["X-Github-Event-Type"], "workflow_job", "X-GitHub-Event-Type should contain workflow_job")
assert.Contains(t, r.Header["X-Gitea-Event-Type"], "workflow_job", "X-Gitea-Event-Type should contain workflow_job")
assert.Contains(t, r.Header["X-Gogs-Event-Type"], "workflow_job", "X-Gogs-Event-Type should contain workflow_job")
content, _ := io.ReadAll(r.Body)
var payload api.WorkflowJobPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "workflow_job"
}, http.StatusOK)
defer provider.Close()
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
var payloads []api.WorkflowJobPayload
var triggeredEvent string
provider := newMockWebhookProvider(func(r *http.Request) {
assert.Contains(t, r.Header["X-Github-Event-Type"], "workflow_job", "X-GitHub-Event-Type should contain workflow_job")
assert.Contains(t, r.Header["X-Gitea-Event-Type"], "workflow_job", "X-Gitea-Event-Type should contain workflow_job")
assert.Contains(t, r.Header["X-Gogs-Event-Type"], "workflow_job", "X-Gogs-Event-Type should contain workflow_job")
content, _ := io.ReadAll(r.Body)
var payload api.WorkflowJobPayload
err := json.Unmarshal(content, &payload)
assert.NoError(t, err)
payloads = append(payloads, payload)
triggeredEvent = "workflow_job"
}, http.StatusOK)
defer provider.Close()
// 1. create a new webhook with special webhook for repo1
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
session := loginUser(t, "user2")