0
0
mirror of https://github.com/go-gitea/gitea.git synced 2026-05-12 11:13:21 +02:00

Merge branch 'main' into main

This commit is contained in:
Karthik Bhandary 2026-04-08 09:34:16 +05:30 committed by GitHub
commit 7af8f5f920
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 707 additions and 61 deletions

View File

@ -175,14 +175,15 @@ RUN_USER = ; git
;; The port number the builtin SSH server should listen on, defaults to SSH_PORT
;SSH_LISTEN_PORT =
;;
;; Root path of SSH directory, default is '~/.ssh', but you have to use '/home/git/.ssh'.
;; Root path of SSH user directory for the system's standalone SSH server if Gitea is not using its builtin SSH server.
;; Default is the '.ssh' directory in the run user's home directory.
;SSH_ROOT_PATH =
;;
;; Gitea will create a authorized_keys file by default when it is not using the internal ssh server
;; Gitea will create an authorized_keys file by default when it is not using the builtin SSH server
;; If you intend to use the AuthorizedKeysCommand functionality then you should turn this off.
;SSH_CREATE_AUTHORIZED_KEYS_FILE = true
;;
;; Gitea will create a authorized_principals file by default when it is not using the internal ssh server
;; Gitea will create an authorized_principals file by default when it is not using the builtin SSH server
;; If you intend to use the AuthorizedPrincipalsCommand functionality then you should turn this off.
;SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE = true
;;
@ -1178,16 +1179,16 @@ LEVEL = Info
;[repository.release]
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
;; Comma-separated list of allowed release attachment file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
;ALLOWED_TYPES =
;;
;; Number of releases that are displayed on release page
;DEFAULT_PAGING_NUM = 10
;;
;; Max size of each file in megabytes. Defaults to 2GB
;; Max size of each release attachment file in megabytes. Defaults to 2GB
;FILE_MAX_SIZE = 2048
;;
;; Max number of files per upload. Defaults to 5
;; Max number of release attachment files per upload. Defaults to 5
;MAX_FILES = 5
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@ -1994,16 +1995,18 @@ LEVEL = Info
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; Whether issue and pull request attachments are enabled. Defaults to `true`
;; Whether issue, pull-request and release attachments are enabled. Defaults to `true`
;; ALLOWED_TYPES/MAX_SIZE/MAX_FILES in this section only affect issue and pull-request attachments, not release attachments.
;; Release attachment has its own config options in [repository.release] section.
;ENABLED = true
;;
;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
;; Comma-separated list of allowed issue/pull-request attachment file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types.
;ALLOWED_TYPES = .avif,.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip
;;
;; Max size of each file. Defaults to 100MB
;; Max size of each issue/pull-request attachment file. Defaults to 100MB
;MAX_SIZE = 100
;;
;; Max number of files per upload. Defaults to 5
;; Max number of issue/pull-request attachment files per upload. Defaults to 5
;MAX_FILES = 5
;;
;; Storage type for attachments, `local` for local disk or `minio` for s3 compatible

View File

@ -1,2 +1,8 @@
#!/bin/bash
# $1 = exit code of the run script, $2 = signal
if [ "$1" -ne 0 ]; then
# avoid immediately restarting the sshd service, which may cause CPU 100% if the error (permission, configuration) is not fixed
echo "openssh failed with exit code $1 - waiting a short delay before attempting a restart"
sleep 3
fi
exit 0

View File

@ -46,10 +46,11 @@ type Package struct {
}
type VersionMetadata struct {
License string `json:"license,omitempty"`
ProjectURL string `json:"project_url,omitempty"`
Summary string `json:"summary,omitempty"`
Description string `json:"description,omitempty"`
License string `json:"license,omitempty"`
ProjectURL string `json:"project_url,omitempty"`
Summary string `json:"summary,omitempty"`
Description string `json:"description,omitempty"`
Updates []*Update `json:"updates,omitempty"`
}
type FileMetadata struct {
@ -296,3 +297,43 @@ func getChangelogs(h *rpmutils.RpmHeader) []*Changelog {
}
return changelogs
}
type DateAttr struct {
Date string `xml:"date,attr" json:"date"`
}
type Update struct {
From string `xml:"from,attr" json:"from"`
Status string `xml:"status,attr" json:"status"`
Type string `xml:"type,attr" json:"type"`
Version string `xml:"version,attr" json:"version"`
ID string `xml:"id" json:"id"`
Title string `xml:"title" json:"title"`
Severity string `xml:"severity" json:"severity"`
Description string `xml:"description" json:"description"`
Issued *DateAttr `xml:"issued" json:"issued"`
Updated *DateAttr `xml:"updated" json:"updated"`
References []*Reference `xml:"references>reference" json:"references"`
PkgList []*Collection `xml:"pkglist>collection" json:"pkg_list"`
}
type Reference struct {
Href string `xml:"href,attr" json:"href"`
ID string `xml:"id,attr" json:"id"`
Title string `xml:"title,attr" json:"title"`
Type string `xml:"type,attr" json:"type"`
}
type Collection struct {
Short string `xml:"short,attr" json:"short"`
Packages []*UpdatePackage `xml:"package" json:"packages"`
}
type UpdatePackage struct {
Arch string `xml:"arch,attr" json:"arch"`
Name string `xml:"name,attr" json:"name"`
Release string `xml:"release,attr" json:"release"`
Src string `xml:"src,attr" json:"src"`
Version string `xml:"version,attr" json:"version"`
Filename string `xml:"filename" json:"filename"`
}

View File

@ -173,6 +173,8 @@
"search.org_kind": "Cuardaigh eagraíochtaí…",
"search.team_kind": "Cuardaigh foirne…",
"search.code_kind": "Cuardaigh cód…",
"search.code_empty": "Tosaigh cuardach cóid.",
"search.code_empty_description": "Cuir isteach eochairfhocal chun cuardach a dhéanamh ar fud an chóid.",
"search.code_search_unavailable": "Níl cuardach cód ar fáil faoi láthair. Déan teagmháil le riarthóir an láithreáin.",
"search.code_search_by_git_grep": "Soláthraíonn “git grep” torthaí cuardaigh cód reatha. D'fhéadfadh torthaí níos fearr a bheith ann má chuireann riarthóir an láithreáin ar chumas Innéacsaithe",
"search.package_kind": "Cuardaigh pacáistí…",
@ -1216,7 +1218,7 @@
"repo.ambiguous_runes_description": "Tá carachtair Unicode sa chomhad seo a d'fhéadfadh a bheith mearbhall le carachtair eile. Má cheapann tú go bhfuil sé seo d'aon ghnó, is féidir leat neamhaird a dhéanamh go sábháilte don rabhadh seo Úsáid an cnaipe Escape chun iad a nochtadh. ",
"repo.invisible_runes_line": "Tá carachtair unicode dofheicthe ag an líne seo ",
"repo.ambiguous_runes_line": "Tá carachtair unicode débhríoch ag an líne seo ",
"repo.ambiguous_character": "Is féidir %[1]c [U+%04[1]X] a mheascadh le %[2]c [U+%04[2]X]",
"repo.ambiguous_character": "Is féidir mearbhall a dhéanamh idir %[1]s agus %[2]s",
"repo.escape_control_characters": "Éalú",
"repo.unescape_control_characters": "Dí-Éalú",
"repo.file_copy_permalink": "Cóipeáil Buan-nasc",
@ -1357,10 +1359,13 @@
"repo.projects.desc": "Saincheisteanna a bhainistiú agus tionscadail a tharraingt isteach.",
"repo.projects.description": "Cur síos (roghnach)",
"repo.projects.description_placeholder": "Cur síos",
"repo.projects.empty": "Gan aon tionscadail go fóill.",
"repo.projects.empty_description": "Cruthaigh tionscadal chun saincheisteanna agus iarratais tarraingthe a chomhordú.",
"repo.projects.create": "Cruthaigh Tionscadal",
"repo.projects.title": "Teideal",
"repo.projects.new": "Tionscadal Nua",
"repo.projects.new_subheader": "Déan do chuid oibre a chomhordú, a rianú agus a nuashonrú in aon áit amháin, ionas go bhfanann na tionscadail trédhearcach agus de réir sceidil.",
"repo.projects.no_results": "Níl aon tionscadail a oireann do do chuardach.",
"repo.projects.create_success": "Tá an tionscadal \"%s\" cruthaithe.",
"repo.projects.deletion": "Scrios tionscadal",
"repo.projects.deletion_desc": "Má scriostar tionscadal, bainfear de gach saincheist a bhaineann leis é. Lean ort?",
@ -2722,6 +2727,8 @@
"org.members": "Comhaltaí",
"org.teams": "Foirne",
"org.code": "Cód",
"org.repos.empty": "Gan aon stórtha fós.",
"org.repos.empty_description": "Cruthaigh stórlann chun cód a roinnt leis an eagraíocht.",
"org.lower_members": "comhaltaí",
"org.lower_repositories": "stórais",
"org.create_new_team": "Foireann Nua",
@ -2858,6 +2865,8 @@
"org.worktime.date_range_end": "Dáta deiridh",
"org.worktime.query": "Ceist",
"org.worktime.time": "Am",
"org.worktime.empty": "Gan aon sonraí ama oibre go fóill.",
"org.worktime.empty_description": "Coigeartaigh an raon dáta chun an t-am rianaithe a fheiceáil.",
"org.worktime.by_repositories": "De réir stórtha",
"org.worktime.by_milestones": "De réir clocha míle",
"org.worktime.by_members": "Ag baill",
@ -3506,6 +3515,7 @@
"packages.dependencies": "Spleithiúlachtaí",
"packages.keywords": "Eochairfhocail",
"packages.details": "Sonraí",
"packages.name": "Ainm an Phacáiste",
"packages.details.author": "Údar",
"packages.details.project_site": "Suíomh an Tionscadail",
"packages.details.repository_site": "Suíomh Stóras",
@ -3601,6 +3611,18 @@
"packages.swift.registry": "Socraigh an clárlann seo ón líne ordaithe:",
"packages.swift.install": "Cuir an pacáiste i do <code>chomhad Package.swift</code>:",
"packages.swift.install2": "agus reáchtáil an t-ordú seo a leanas:",
"packages.terraform.install": "Socraigh do stát chun an cúltaca HTTP a úsáid",
"packages.terraform.install2": "agus reáchtáil an t-ordú seo a leanas:",
"packages.terraform.lock_status": "Stádas Glas",
"packages.terraform.locked_by": "Glasáilte ag %s",
"packages.terraform.unlocked": "Díghlasáilte",
"packages.terraform.lock": "Glas",
"packages.terraform.unlock": "Díghlasáil",
"packages.terraform.lock.success": "Glasáladh staid Terraform go rathúil.",
"packages.terraform.unlock.success": "Díghlasáladh staid Terraform go rathúil.",
"packages.terraform.lock.error.already_locked": "Tá staid Terraform faoi ghlas cheana féin.",
"packages.terraform.delete.locked": "Tá staid an Terraform faoi ghlas agus ní féidir é a scriosadh.",
"packages.terraform.delete.latest": "Ní féidir an leagan is déanaí de staid Terraform a scriosadh.",
"packages.vagrant.install": "Chun bosca Vagrant a chur leis, reáchtáil an t-ordú seo a leanas:",
"packages.settings.link": "Nasc an pacáiste seo le stóras",
"packages.settings.link.description": "Má nascann tú pacáiste le stórlann, beidh an pacáiste le feiceáil i liosta pacáistí an stórlainne. Ní féidir ach stórlanna faoin úinéir céanna a nascadh. Má fhágtar an réimse folamh, bainfear an nasc.",
@ -3614,8 +3636,13 @@
"packages.settings.delete": "Scrios pacáiste",
"packages.settings.delete.description": "Tá pacáiste a scriosadh buan agus ní féidir é a chur ar ais.",
"packages.settings.delete.notice": "Tá tú ar tí %s (%s) a scriosadh. Tá an oibríocht seo dochúlaithe, an bhfuil tú cinnte?",
"packages.settings.delete.notice.package": "Tá tú ar tí %s agus a leaganacha uile a scriosadh. Ní féidir an oibríocht seo a aisiompú, an bhfuil tú cinnte?",
"packages.settings.delete.success": "Tá an pacáiste scriosta.",
"packages.settings.delete.version.success": "Scriosadh an leagan pacáiste.",
"packages.settings.delete.error": "Theip ar an pacáiste a scriosadh.",
"packages.settings.delete.version": "Scrios an leagan",
"packages.settings.delete.confirm": "Cuir isteach ainm an phacáiste le deimhniú",
"packages.settings.delete.invalid_package_name": "Tá ainm an phacáiste a chuir tú isteach mícheart.",
"packages.owner.settings.cargo.title": "Innéacs Clárlann Lasta",
"packages.owner.settings.cargo.initialize": "Innéacs a chur i dtosach",
"packages.owner.settings.cargo.initialize.description": "Tá gá le stóras innéacs speisialta Git chun an clárlann Cargo a úsáid. Tríd an rogha seo, cruthófar an stóras (nó athchruthófar é) agus cumrófar é go huathoibríoch.",

View File

@ -473,6 +473,7 @@ func CommonRoutes() *web.Router {
g.MatchPath("HEAD", "/<group:*>/repodata/<filename>", rpm.CheckRepositoryFileExistence)
g.MatchPath("GET", "/<group:*>/repodata/<filename>", rpm.GetRepositoryFile)
g.MatchPath("PUT", "/<group:*>/upload", reqPackageAccess(perm.AccessModeWrite), rpm.UploadPackageFile)
g.MatchPath("POST", "/<group:*>/package/<name>/<version>/errata", reqPackageAccess(perm.AccessModeWrite), rpm.UploadErrata)
// this URL pattern is only used internally in the RPM index, it is generated by us, the filename part is not really used (can be anything)
g.MatchPath("HEAD,GET", "/<group:*>/package/<name>/<version>/<architecture>/<filename>", rpm.DownloadPackageFile)
g.MatchPath("HEAD,GET", "/<group:*>/package/<name>/<version>/<architecture>", rpm.DownloadPackageFile)

View File

@ -10,6 +10,7 @@ import (
"io"
"net/http"
"strings"
"time"
"code.gitea.io/gitea/models/db"
packages_model "code.gitea.io/gitea/models/packages"
@ -316,3 +317,146 @@ func DeletePackageFile(webctx *context.Context) {
webctx.Status(http.StatusNoContent)
}
// UploadErrata handles uploading errata information for a package version
func UploadErrata(ctx *context.Context) {
name := ctx.PathParam("name")
version := ctx.PathParam("version")
group := ctx.PathParam("group")
var updates []*rpm_module.Update
if err := json.NewDecoder(ctx.Req.Body).Decode(&updates); err != nil {
apiError(ctx, http.StatusBadRequest, err)
return
}
pv, err := packages_model.GetVersionByNameAndVersion(ctx,
ctx.Package.Owner.ID,
packages_model.TypeRpm,
name,
version,
)
if err != nil {
if errors.Is(err, util.ErrNotExist) {
apiError(ctx, http.StatusNotFound, err)
} else {
apiError(ctx, http.StatusInternalServerError, err)
}
return
}
var vm *rpm_module.VersionMetadata
if pv.MetadataJSON != "" {
if err := json.Unmarshal([]byte(pv.MetadataJSON), &vm); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
} else {
vm = &rpm_module.VersionMetadata{}
}
now := time.Now().Format("2006-01-02 15:04:05")
for _, u := range updates {
if u == nil {
continue
}
// Sanitize to remove nil elements from JSON payload
var cleanPkgList []*rpm_module.Collection
for _, coll := range u.PkgList {
if coll == nil {
continue
}
var cleanPackages []*rpm_module.UpdatePackage
for _, pkg := range coll.Packages {
if pkg == nil {
continue
}
cleanPackages = append(cleanPackages, pkg)
}
coll.Packages = cleanPackages
cleanPkgList = append(cleanPkgList, coll)
}
u.PkgList = cleanPkgList
found := false
for i, existing := range vm.Updates {
if existing.ID == u.ID {
// Merge PkgList with deduplication
for _, newColl := range u.PkgList {
if newColl == nil {
continue
}
collFound := false
for j, existingColl := range existing.PkgList {
if existingColl.Short == newColl.Short {
// Merge packages
for _, newPkg := range newColl.Packages {
if newPkg == nil {
continue
}
pkgFound := false
for _, existingPkg := range existingColl.Packages {
if existingPkg.Name == newPkg.Name &&
existingPkg.Version == newPkg.Version &&
existingPkg.Release == newPkg.Release &&
existingPkg.Arch == newPkg.Arch {
pkgFound = true
break
}
}
if !pkgFound {
vm.Updates[i].PkgList[j].Packages = append(vm.Updates[i].PkgList[j].Packages, newPkg)
}
}
collFound = true
break
}
}
if !collFound {
vm.Updates[i].PkgList = append(vm.Updates[i].PkgList, newColl)
}
}
vm.Updates[i].From = u.From
vm.Updates[i].Status = u.Status
vm.Updates[i].Type = u.Type
vm.Updates[i].Version = u.Version
vm.Updates[i].Title = u.Title
vm.Updates[i].Severity = u.Severity
vm.Updates[i].Description = u.Description
vm.Updates[i].References = u.References
vm.Updates[i].Updated = &rpm_module.DateAttr{Date: now}
found = true
break
}
}
if !found {
if u.Issued == nil {
u.Issued = &rpm_module.DateAttr{Date: now}
}
if u.Updated == nil {
u.Updated = &rpm_module.DateAttr{Date: now}
}
vm.Updates = append(vm.Updates, u)
}
}
vmBytes, err := json.Marshal(vm)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
pv.MetadataJSON = string(vmBytes)
if err := packages_model.UpdateVersion(ctx, pv); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
if err := rpm_service.BuildSpecificRepositoryFiles(ctx, ctx.Package.Owner.ID, group); err != nil {
apiError(ctx, http.StatusInternalServerError, err)
return
}
ctx.Status(http.StatusOK)
}

View File

@ -186,7 +186,7 @@ func CreateIssueAttachment(ctx *context.APIContext) {
}
uploaderFile := attachment_service.NewLimitedUploaderKnownSize(file, header.Size)
attachment, err := attachment_service.UploadAttachmentGeneralSizeLimit(ctx, uploaderFile, setting.Attachment.AllowedTypes, &repo_model.Attachment{
attachment, err := attachment_service.UploadAttachmentForIssue(ctx, uploaderFile, &repo_model.Attachment{
Name: filename,
UploaderID: ctx.Doer.ID,
RepoID: ctx.Repo.Repository.ID,

View File

@ -193,7 +193,7 @@ func CreateIssueCommentAttachment(ctx *context.APIContext) {
}
uploaderFile := attachment_service.NewLimitedUploaderKnownSize(file, header.Size)
attachment, err := attachment_service.UploadAttachmentGeneralSizeLimit(ctx, uploaderFile, setting.Attachment.AllowedTypes, &repo_model.Attachment{
attachment, err := attachment_service.UploadAttachmentForIssue(ctx, uploaderFile, &repo_model.Attachment{
Name: filename,
UploaderID: ctx.Doer.ID,
RepoID: ctx.Repo.Repository.ID,

View File

@ -242,7 +242,7 @@ func CreateReleaseAttachment(ctx *context.APIContext) {
}
// Create a new attachment and save the file
attach, err := attachment_service.UploadAttachmentReleaseSizeLimit(ctx, uploaderFile, setting.Repository.Release.AllowedTypes, &repo_model.Attachment{
attach, err := attachment_service.UploadAttachmentForRelease(ctx, uploaderFile, &repo_model.Attachment{
Name: filename,
UploaderID: ctx.Doer.ID,
RepoID: ctx.Repo.Repository.ID,

View File

@ -23,16 +23,16 @@ import (
// UploadIssueAttachment response for Issue/PR attachments
func UploadIssueAttachment(ctx *context.Context) {
uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Attachment.AllowedTypes)
uploadAttachment(ctx, ctx.Repo.Repository.ID, attachment.UploadAttachmentForIssue)
}
// UploadReleaseAttachment response for uploading release attachments
func UploadReleaseAttachment(ctx *context.Context) {
uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Repository.Release.AllowedTypes)
uploadAttachment(ctx, ctx.Repo.Repository.ID, attachment.UploadAttachmentForRelease)
}
// UploadAttachment response for uploading attachments
func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
func uploadAttachment(ctx *context.Context, repoID int64, uploadFunc attachment.UploadAttachmentFunc) {
if !setting.Attachment.Enabled {
ctx.HTTPError(http.StatusNotFound, "attachment is not enabled")
return
@ -46,7 +46,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
defer file.Close()
uploaderFile := attachment.NewLimitedUploaderKnownSize(file, header.Size)
attach, err := attachment.UploadAttachmentReleaseSizeLimit(ctx, uploaderFile, allowedTypes, &repo_model.Attachment{
attach, err := uploadFunc(ctx, uploaderFile, &repo_model.Attachment{
Name: header.Filename,
UploaderID: ctx.Doer.ID,
RepoID: repoID,
@ -56,7 +56,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
ctx.HTTPError(http.StatusBadRequest, err.Error())
return
}
ctx.ServerError("UploadAttachmentReleaseSizeLimit", err)
ctx.ServerError("uploadAttachment(uploadFunc)", err)
return
}
@ -119,7 +119,7 @@ func DeleteAttachment(ctx *context.Context) {
})
}
// GetAttachment serve attachments with the given UUID
// ServeAttachment serve attachments with the given UUID
func ServeAttachment(ctx *context.Context, uuid string) {
attach, err := repo_model.GetAttachmentByUUID(ctx, uuid)
if err != nil {

View File

@ -57,22 +57,26 @@ func NewComment(ctx *context.Context) {
return
}
redirect := fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, issueType, issue.Index)
attachments := util.Iif(setting.Attachment.Enabled, form.Files, nil)
// Can allow empty comments if there are attachments or a status change (close, reopen, approve, reject)
// So, only stop if there is no content, no attachments, and no status change.
if form.Content == "" && len(attachments) == 0 && form.Status == "" {
ctx.JSONError(ctx.Tr("repo.issues.comment_no_content"))
return
}
comment, err := issue_service.CreateIssueComment(ctx, ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments)
if err != nil {
if errors.Is(err, user_model.ErrBlockedUser) {
ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_user"))
} else {
ctx.ServerError("CreateIssueComment", err)
// allow empty content if there are attachments
if form.Content != "" || len(attachments) > 0 {
comment, err := issue_service.CreateIssueComment(ctx, ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments)
if err != nil {
if errors.Is(err, user_model.ErrBlockedUser) {
ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_user"))
} else {
ctx.ServerError("CreateIssueComment", err)
}
return
}
// redirect to the comment's hashtag
redirect += "#" + comment.HashTag()
} else if form.Status == "" {
// if no status change (close, reopen), it is a plain comment, and content is required
// "approve/reject" are handled differently in SubmitReview
ctx.JSONError(ctx.Tr("repo.issues.comment_no_content"))
return
}
@ -86,6 +90,7 @@ func NewComment(ctx *context.Context) {
!(issue.IsPull && issue.PullRequest.HasMerged) {
// Duplication and conflict check should apply to reopen pull request.
var branchOtherUnmergedPR *issues_model.PullRequest
var err error
if form.Status == "reopen" && issue.IsPull {
pull := issue.PullRequest
branchOtherUnmergedPR, err = issues_model.GetUnmergedPullRequest(ctx, pull.HeadRepoID, pull.BaseRepoID, pull.HeadBranch, pull.BaseBranch, pull.Flow)
@ -179,11 +184,6 @@ func NewComment(ctx *context.Context) {
}
} // end if: handle close or reopen
// Redirect to the comment, add hashtag if it exists
redirect := fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, issueType, issue.Index)
if comment != nil {
redirect += "#" + comment.HashTag()
}
ctx.JSONRedirect(redirect)
}

View File

@ -54,12 +54,17 @@ func NewLimitedUploaderMaxBytesReader(r io.ReadCloser, w http.ResponseWriter) *U
return &UploaderFile{rd: r, size: -1, respWriter: w}
}
func UploadAttachmentGeneralSizeLimit(ctx context.Context, file *UploaderFile, allowedTypes string, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
return uploadAttachment(ctx, file, allowedTypes, setting.Attachment.MaxSize<<20, attach)
type UploadAttachmentFunc func(ctx context.Context, file *UploaderFile, attach *repo_model.Attachment) (*repo_model.Attachment, error)
func UploadAttachmentForIssue(ctx context.Context, file *UploaderFile, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
return uploadAttachment(ctx, file, setting.Attachment.AllowedTypes, setting.Attachment.MaxSize<<20, attach)
}
func UploadAttachmentReleaseSizeLimit(ctx context.Context, file *UploaderFile, allowedTypes string, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
return uploadAttachment(ctx, file, allowedTypes, setting.Repository.Release.FileMaxSize<<20, attach)
func UploadAttachmentForRelease(ctx context.Context, file *UploaderFile, attach *repo_model.Attachment) (*repo_model.Attachment, error) {
// FIXME: although the release attachment has different settings from the issue attachment,
// it still uses the same attachment table, the same storage and the same upload logic
// So if the "issue attachment [attachment]" is not enabled, it will also affect the release attachment, which is not expected.
return uploadAttachment(ctx, file, setting.Repository.Release.AllowedTypes, setting.Repository.Release.FileMaxSize<<20, attach)
}
func uploadAttachment(ctx context.Context, file *UploaderFile, allowedTypes string, maxFileSize int64, attach *repo_model.Attachment) (*repo_model.Attachment, error) {

View File

@ -88,7 +88,7 @@ func (h *ReplyHandler) Handle(ctx context.Context, content *MailContent, doer *u
for _, attachment := range content.Attachments {
attachmentBuf := bytes.NewReader(attachment.Content)
uploaderFile := attachment_service.NewLimitedUploaderKnownSize(attachmentBuf, attachmentBuf.Size())
a, err := attachment_service.UploadAttachmentGeneralSizeLimit(ctx, uploaderFile, setting.Attachment.AllowedTypes, &repo_model.Attachment{
a, err := attachment_service.UploadAttachmentForIssue(ctx, uploaderFile, &repo_model.Attachment{
Name: attachment.Name,
UploaderID: doer.ID,
RepoID: issue.Repo.ID,

View File

@ -13,6 +13,7 @@ import (
"errors"
"fmt"
"io"
"slices"
"strings"
"time"
@ -241,15 +242,22 @@ func BuildSpecificRepositoryFiles(ctx context.Context, ownerID int64, group stri
return err
}
data := []*repoData{primary, filelists, other}
updates := collectUpdateInfoUpdates(pfs, cache)
if len(updates) > 0 {
updateInfo, err := buildUpdateInfo(ctx, pv, updates, group)
if err != nil {
return err
}
data = append(data, updateInfo)
}
return buildRepomd(
ctx,
pv,
ownerID,
[]*repoData{
primary,
filelists,
other,
},
data,
group,
)
}
@ -563,6 +571,93 @@ func buildOther(ctx context.Context, pv *packages_model.PackageVersion, pfs []*p
}, group)
}
func collectUpdateInfoUpdates(pfs []*packages_model.PackageFile, c packageCache) (updates []*rpm_module.Update) {
seenVersions := make(map[int64]bool)
for _, pf := range pfs {
pd := c[pf]
if pd.Version != nil && !seenVersions[pd.Version.ID] && pd.VersionMetadata.Updates != nil {
updates = append(updates, pd.VersionMetadata.Updates...)
seenVersions[pd.Version.ID] = true
}
}
return updates
}
// buildUpdateInfo builds the updateinfo.xml file
func buildUpdateInfo(ctx context.Context, pv *packages_model.PackageVersion, updates []*rpm_module.Update, group string) (*repoData, error) {
// Group updates by ID to merge package lists
type updateKey struct {
ID string
}
updateMap := make(map[updateKey]*rpm_module.Update)
for _, u := range updates {
key := updateKey{ID: u.ID}
if existing, ok := updateMap[key]; ok {
for _, newColl := range u.PkgList {
collFound := false
for j, existingColl := range existing.PkgList {
if existingColl.Short == newColl.Short {
for _, newPkg := range newColl.Packages {
pkgFound := false
for _, existingPkg := range existingColl.Packages {
if existingPkg.Name == newPkg.Name &&
existingPkg.Version == newPkg.Version &&
existingPkg.Release == newPkg.Release &&
existingPkg.Arch == newPkg.Arch {
pkgFound = true
break
}
}
if !pkgFound {
existing.PkgList[j].Packages = append(existing.PkgList[j].Packages, newPkg)
}
}
collFound = true
break
}
}
if !collFound {
collCopy := *newColl
collCopy.Packages = append([]*rpm_module.UpdatePackage(nil), newColl.Packages...)
existing.PkgList = append(existing.PkgList, &collCopy)
}
}
} else {
// Create a shallow copy so we don't mutate the original cached pointer
uCopy := *u
// Deep copy PkgList and Collections to avoid mutating cache
// Note: References is shallow-copied, but safe as long as it remains immutable
uCopy.PkgList = make([]*rpm_module.Collection, len(u.PkgList))
for i, coll := range u.PkgList {
collCopy := *coll
collCopy.Packages = append([]*rpm_module.UpdatePackage(nil), coll.Packages...)
uCopy.PkgList[i] = &collCopy
}
updateMap[key] = &uCopy
}
}
var mergedUpdates []*rpm_module.Update
for _, u := range updateMap {
mergedUpdates = append(mergedUpdates, u)
}
slices.SortFunc(mergedUpdates, func(a, b *rpm_module.Update) int {
return strings.Compare(a.ID, b.ID)
})
type updateInfo struct {
XMLName xml.Name `xml:"updates"`
Xmlns string `xml:"xmlns,attr"`
Updates []*rpm_module.Update `xml:"update"`
}
return addDataAsFileToRepo(ctx, pv, "updateinfo", &updateInfo{
Xmlns: "http://linux.duke.edu/metadata/updateinfo",
Updates: mergedUpdates,
}, group)
}
// writtenCounter counts all written bytes
type writtenCounter struct {
written int64

View File

@ -12,12 +12,14 @@ import (
"io"
"net/http"
"net/http/httptest"
"slices"
"strings"
"testing"
"code.gitea.io/gitea/models/packages"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/json"
rpm_module "code.gitea.io/gitea/modules/packages/rpm"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/tests"
@ -74,6 +76,15 @@ Mu0UFYgZ/bYnuvn/vz4wtCz8qMwsHUvP0PX3tbYFUctAPdrY6tiiDtcCddDECahx7SuVNP5dpmb5
content, err := io.ReadAll(zr)
assert.NoError(t, err)
decodeGzipXML := func(t testing.TB, resp *httptest.ResponseRecorder, v any) {
t.Helper()
zr, err := gzip.NewReader(resp.Body)
assert.NoError(t, err)
assert.NoError(t, xml.NewDecoder(zr).Decode(v))
}
rootURL := fmt.Sprintf("/api/packages/%s/rpm", user.Name)
for _, group := range []string{"", "el9", "el9/stable"} {
@ -247,15 +258,6 @@ gpgkey=%sapi/packages/%s/rpm/repository.key`,
assert.Contains(t, resp.Body.String(), "-----BEGIN PGP SIGNATURE-----")
})
decodeGzipXML := func(t testing.TB, resp *httptest.ResponseRecorder, v any) {
t.Helper()
zr, err := gzip.NewReader(resp.Body)
assert.NoError(t, err)
assert.NoError(t, xml.NewDecoder(zr).Decode(v))
}
t.Run("primary.xml.gz", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
@ -420,6 +422,328 @@ gpgkey=%sapi/packages/%s/rpm/repository.key`,
})
})
t.Run("Errata", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
type updateInfo struct {
XMLName xml.Name `xml:"updates"`
Xmlns string `xml:"xmlns,attr"`
Updates []*rpm_module.Update `xml:"update"`
}
errataURL := fmt.Sprintf("%s/package/%s/%s/errata", groupURL, packageName, packageVersion)
advisory := rpm_module.Update{
From: "security@example.com",
Status: "stable",
Type: "security",
Version: "1.0",
ID: "CVE-2023-1234",
Title: "Test Security Update",
Severity: "Important",
Description: "This is a test security update.",
References: []*rpm_module.Reference{
{
Href: "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-1234",
ID: "CVE-2023-1234",
Title: "CVE-2023-1234",
Type: "cve",
},
},
PkgList: []*rpm_module.Collection{
{
Short: "el9",
Packages: []*rpm_module.UpdatePackage{
{
Arch: packageArchitecture,
Name: packageName,
Release: "1",
Src: "gitea-test-1.0.2-1.src.rpm",
Version: "1.0.2",
Filename: fmt.Sprintf("%s-%s.%s.rpm", packageName, packageVersion, packageArchitecture),
},
},
},
},
}
t.Run("Success", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
updates := []*rpm_module.Update{&advisory}
body, err := json.Marshal(updates)
assert.NoError(t, err)
req := NewRequestWithBody(t, "POST", errataURL, bytes.NewReader(body)).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusOK)
url := groupURL + "/repodata"
// Check repomd.xml contains updateinfo
req = NewRequest(t, "GET", url+"/repomd.xml")
resp := MakeRequest(t, req, http.StatusOK)
type testRepoData struct {
Type string `xml:"type,attr"`
}
var repomd struct {
Data []*testRepoData `xml:"data"`
}
err = xml.NewDecoder(resp.Body).Decode(&repomd)
require.NoError(t, err)
found := slices.IndexFunc(repomd.Data, func(s *testRepoData) bool {
return s.Type == "updateinfo"
}) >= 0
assert.True(t, found, "updateinfo not found in repomd.xml")
// Now check updateinfo.xml.gz
req = NewRequest(t, "GET", url+"/updateinfo.xml.gz")
resp = MakeRequest(t, req, http.StatusOK)
var result updateInfo
decodeGzipXML(t, resp, &result)
assert.Equal(t, "http://linux.duke.edu/metadata/updateinfo", result.Xmlns)
assert.Len(t, result.Updates, 1)
assert.Equal(t, "CVE-2023-1234", result.Updates[0].ID)
assert.NotEmpty(t, result.Updates[0].Issued.Date)
assert.NotEmpty(t, result.Updates[0].Updated.Date)
})
t.Run("InvalidJSON", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
req := NewRequestWithBody(t, "POST", errataURL, strings.NewReader("invalid json")).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusBadRequest)
})
t.Run("NullElementsInJSON", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
// Send a payload with null inside arrays
payload := `[
{
"id": "CVE-2023-5678",
"from": "security@example.com",
"status": "stable",
"type": "security",
"version": "1.0",
"title": "Test Null Elements",
"severity": "Important",
"description": "Test null elements",
"pkg_list": [
null,
{
"short": "el9",
"packages": [
null,
{
"arch": "x86_64",
"name": "gitea",
"release": "1",
"src": "gitea-1.0.0-1.src.rpm",
"version": "1.0.0",
"filename": "gitea-1.0.0-1.x86_64.rpm"
}
]
}
]
}
]`
req := NewRequestWithBody(t, "POST", errataURL, strings.NewReader(payload)).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusOK)
// Verify it was stored correctly (skipping nulls)
url := groupURL + "/repodata"
req = NewRequest(t, "GET", url+"/updateinfo.xml.gz")
resp := MakeRequest(t, req, http.StatusOK)
var result updateInfo
decodeGzipXML(t, resp, &result)
// We need to find the new advisory CVE-2023-5678
var newAdvisory *rpm_module.Update
for _, u := range result.Updates {
if u.ID == "CVE-2023-5678" {
newAdvisory = u
break
}
}
assert.NotNil(t, newAdvisory)
assert.Len(t, newAdvisory.PkgList, 1)
assert.Equal(t, "el9", newAdvisory.PkgList[0].Short)
assert.Len(t, newAdvisory.PkgList[0].Packages, 1)
assert.Equal(t, "gitea", newAdvisory.PkgList[0].Packages[0].Name)
})
t.Run("PackageNotFound", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
badURL := fmt.Sprintf("%s/package/%s/non-existent-version/errata", groupURL, packageName)
updates := []*rpm_module.Update{&advisory}
body, err := json.Marshal(updates)
assert.NoError(t, err)
req := NewRequestWithBody(t, "POST", badURL, bytes.NewReader(body)).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusNotFound)
})
t.Run("MergeAdvisories", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
// Upload a second advisory with the same ID but a different package
advisory2 := advisory
advisory2.PkgList = []*rpm_module.Collection{
{
Short: "el9",
Packages: []*rpm_module.UpdatePackage{
{
Arch: packageArchitecture,
Name: "another-package",
Release: "1",
Src: "another-package-1.0.0-1.src.rpm",
Version: "1.0.0",
Filename: "another-package-1.0.0-1.x86_64.rpm",
},
},
},
}
updates := []*rpm_module.Update{&advisory2}
body, err := json.Marshal(updates)
assert.NoError(t, err)
req := NewRequestWithBody(t, "POST", errataURL, bytes.NewReader(body)).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusOK)
// Check updateinfo.xml.gz again
url := groupURL + "/repodata"
req = NewRequest(t, "GET", url+"/updateinfo.xml.gz")
resp := MakeRequest(t, req, http.StatusOK)
var result updateInfo
decodeGzipXML(t, resp, &result)
var targetUpdate *rpm_module.Update
for _, u := range result.Updates {
if u.ID == "CVE-2023-1234" {
targetUpdate = u
break
}
}
assert.NotNil(t, targetUpdate)
// Verify that package lists are merged into the same collection
assert.Len(t, targetUpdate.PkgList, 1)
assert.Len(t, targetUpdate.PkgList[0].Packages, 2)
assert.Equal(t, "another-package", result.Updates[0].PkgList[0].Packages[1].Name)
})
t.Run("NewCollection", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
// Upload a third advisory with the same ID but a different collection
advisory3 := advisory
advisory3.PkgList = []*rpm_module.Collection{
{
Short: "el8",
Packages: []*rpm_module.UpdatePackage{
{
Arch: packageArchitecture,
Name: packageName,
Release: "1",
Src: "gitea-test-1.0.2-1.src.rpm",
Version: "1.0.2",
Filename: fmt.Sprintf("%s-%s.%s.rpm", packageName, packageVersion, packageArchitecture),
},
},
},
}
updates := []*rpm_module.Update{&advisory3}
body, err := json.Marshal(updates)
assert.NoError(t, err)
req := NewRequestWithBody(t, "POST", errataURL, bytes.NewReader(body)).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusOK)
// Check updateinfo.xml.gz again
url := groupURL + "/repodata"
req = NewRequest(t, "GET", url+"/updateinfo.xml.gz")
resp := MakeRequest(t, req, http.StatusOK)
var result updateInfo
decodeGzipXML(t, resp, &result)
var targetUpdate *rpm_module.Update
for _, u := range result.Updates {
if u.ID == "CVE-2023-1234" {
targetUpdate = u
break
}
}
assert.NotNil(t, targetUpdate)
// Verify that we now have 2 collections
assert.Len(t, targetUpdate.PkgList, 2)
// We need to be careful with order, map iteration is random
// Let's check both exist
shorts := []string{targetUpdate.PkgList[0].Short, targetUpdate.PkgList[1].Short}
assert.Contains(t, shorts, "el9")
assert.Contains(t, shorts, "el8")
})
t.Run("Idempotency", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
updates := []*rpm_module.Update{&advisory}
body, err := json.Marshal(updates)
assert.NoError(t, err)
// Post twice
req := NewRequestWithBody(t, "POST", errataURL, bytes.NewReader(body)).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusOK)
req = NewRequestWithBody(t, "POST", errataURL, bytes.NewReader(body)).
AddBasicAuth(user.Name)
MakeRequest(t, req, http.StatusOK)
// Check updateinfo.xml.gz
url := groupURL + "/repodata"
req = NewRequest(t, "GET", url+"/updateinfo.xml.gz")
resp := MakeRequest(t, req, http.StatusOK)
var result updateInfo
decodeGzipXML(t, resp, &result)
var targetUpdate *rpm_module.Update
for _, u := range result.Updates {
if u.ID == "CVE-2023-1234" {
targetUpdate = u
break
}
}
assert.NotNil(t, targetUpdate)
assert.Len(t, targetUpdate.PkgList, 2)
var el9Coll *rpm_module.Collection
for _, coll := range targetUpdate.PkgList {
if coll.Short == "el9" {
el9Coll = coll
break
}
}
assert.NotNil(t, el9Coll)
assert.Len(t, el9Coll.Packages, 2)
})
})
t.Run("Delete", func(t *testing.T) {
defer tests.PrintCurrentTest(t)()