diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0b20da7c16..4f82a5d8c6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -13,7 +13,7 @@ "ghcr.io/devcontainers/features/git-lfs:1.2.5": {}, "ghcr.io/jsburckhardt/devcontainer-features/uv:1": {}, "ghcr.io/devcontainers/features/python:1": { - "version": "3.13" + "version": "3.14" }, "ghcr.io/warrenbuckley/codespace-features/sqlite:1": {} }, diff --git a/.github/labeler.yml b/.github/labeler.yml index c940afef0c..64127e50e9 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -46,7 +46,7 @@ modifies/internal: - ".gitpod.yml" - ".markdownlint.yaml" - ".spectral.yaml" - - "stylelint.config.ts" + - "stylelint.config.*" - ".yamllint.yaml" - ".github/**" - ".gitea/**" @@ -89,4 +89,4 @@ topic/code-linting: - ".markdownlint.yaml" - ".spectral.yaml" - ".yamllint.yaml" - - "stylelint.config.ts" + - "stylelint.config.*" diff --git a/.github/workflows/cron-licenses.yml b/.github/workflows/cron-licenses.yml index c5dd70a1f8..ee1c3e0c75 100644 --- a/.github/workflows/cron-licenses.yml +++ b/.github/workflows/cron-licenses.yml @@ -20,7 +20,7 @@ jobs: - run: make generate-gitignore timeout-minutes: 40 - name: push translations to repo - uses: appleboy/git-push-action@v1.0.0 + uses: appleboy/git-push-action@v1.2.0 with: author_email: "teabot@gitea.io" author_name: GiteaBot diff --git a/.github/workflows/cron-translations.yml b/.github/workflows/cron-translations.yml index d87ba8b20d..56a30fb5ba 100644 --- a/.github/workflows/cron-translations.yml +++ b/.github/workflows/cron-translations.yml @@ -29,7 +29,7 @@ jobs: - name: update locales run: ./build/update-locales.sh - name: push translations to repo - uses: appleboy/git-push-action@v1.0.0 + uses: appleboy/git-push-action@v1.2.0 with: author_email: "teabot@gitea.io" author_name: GiteaBot diff --git a/.github/workflows/pull-compliance.yml b/.github/workflows/pull-compliance.yml index 99ff95e9c6..fb81622bd6 100644 --- a/.github/workflows/pull-compliance.yml +++ b/.github/workflows/pull-compliance.yml @@ -39,7 +39,7 @@ jobs: steps: - uses: actions/checkout@v6 - uses: astral-sh/setup-uv@v7 - - run: uv python install 3.12 + - run: uv python install 3.14 - uses: pnpm/action-setup@v4 - uses: actions/setup-node@v6 with: @@ -59,7 +59,7 @@ jobs: steps: - uses: actions/checkout@v6 - uses: astral-sh/setup-uv@v7 - - run: uv python install 3.12 + - run: uv python install 3.14 - run: make deps-py - run: make lint-yaml diff --git a/.golangci.yml b/.golangci.yml index e9b9a03c43..62c1d005fa 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -48,6 +48,10 @@ linters: desc: do not use the ini package, use gitea's config system instead - pkg: gitea.com/go-chi/cache desc: do not use the go-chi cache package, use gitea's cache system + - pkg: github.com/pkg/errors + desc: use builtin errors package instead + - pkg: github.com/go-ap/errors + desc: use builtin errors package instead nolintlint: allow-unused: false require-explanation: true diff --git a/CHANGELOG.md b/CHANGELOG.md index e3374e14c3..f0d93452ae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,39 @@ This changelog goes through the changes that have been made in each release without substantial changes to our git log; to see the highlights of what has been added to each release, please refer to the [blog](https://blog.gitea.com). -## [1.25.3](https://github.com/go-gitea/gitea/releases/tag/1.25.3) - 2025-12-17 +## [1.25.4](https://github.com/go-gitea/gitea/releases/tag/v1.25.4) - 2026-01-15 + +* SECURITY + * Release attachments must belong to the intended repo (#36347) (#36375) + * Fix permission check on org project operations (#36318) (#36373) + * Clean watches when make a repository private and check permission when send release emails (#36319) (#36370) + * Add more check for stopwatch read or list (#36340) (#36368) + * Fix openid setting check (#36346) (#36361) + * Fix cancel auto merge bug (#36341) (#36356) + * Fix delete attachment check (#36320) (#36355) + * LFS locks must belong to the intended repo (#36344) (#36349) + * Fix bug on notification read (#36339) #36387 +* ENHANCEMENTS + * Add more routes to the "expensive" list (#36290) + * Make "commit statuses" API accept slashes in "ref" (#36264) (#36275) +* BUGFIXES + * Fix git http service handling (#36396) + * Fix markdown newline handling during IME composition (#36421) (#36424) + * Fix missing repository id when migrating release attachments (#36389) + * Fix bug when compare in the pull request (#36363) (#36372) + * Fix incorrect text content detection (#36364) (#36369) + * Fill missing `has_code` in repository api (#36338) (#36359) + * Fix notifications pagination query parameters (#36351) (#36358) + * Fix some trivial problems (#36336) (#36337) + * Prevent panic when GitLab release has more links than sources (#36295) (#36305) + * Fix stats bug when syncing release (#36285) (#36294) + * Always honor user's choice for "delete branch after merge" (#36281) (#36286) + * Use the requested host for LFS links (#36242) (#36258) + * Fix panic when get editor config file (#36241) (#36247) + * Fix regression in writing authorized principals (#36213) (#36218) + * Fix WebAuthn error checking (#36219) (#36235) + +## [1.25.3](https://github.com/go-gitea/gitea/releases/tag/v1.25.3) - 2025-12-17 * SECURITY * Bump toolchain to go1.25.5, misc fixes (#36082) @@ -31,7 +63,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com). * Fix error handling in mailer and wiki services (#36041) (#36053) * Fix bugs when comparing and creating pull request (#36166) (#36144) -## [1.25.2](https://github.com/go-gitea/gitea/releases/tag/1.25.2) - 2025-11-23 +## [1.25.2](https://github.com/go-gitea/gitea/releases/tag/v1.25.2) - 2025-11-23 * SECURITY * Upgrade golang.org/x/crypto to 0.45.0 (#35985) (#35988) @@ -418,7 +450,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com). * Hide href attribute of a tag if there is no target_url (#34556) (#34684) * Fix tag target (#34781) #34783 -## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/1.24.0) - 2025-05-26 +## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/v1.24.0) - 2025-05-26 * BREAKING * Make Gitea always use its internal config, ignore `/etc/gitconfig` (#33076) @@ -788,7 +820,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com). * Bump x/net (#32896) (#32900) * Only activity tab needs heatmap data loading (#34652) -## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/1.23.8) - 2025-05-11 +## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/v1.23.8) - 2025-05-11 * SECURITY * Fix a bug when uploading file via lfs ssh command (#34408) (#34411) @@ -815,7 +847,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com). * Bump go version in go.mod (#34160) * remove hardcoded 'code' string in clone_panel.tmpl (#34153) (#34158) -## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/1.23.7) - 2025-04-07 +## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/v1.23.7) - 2025-04-07 * Enhancements * Add a config option to block "expensive" pages for anonymous users (#34024) (#34071) @@ -913,7 +945,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com). * BUGFIXES * Fix a bug caused by status webhook template #33512 -## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/1.23.2) - 2025-02-04 +## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/v1.23.2) - 2025-02-04 * BREAKING * Add tests for webhook and fix some webhook bugs (#33396) (#33442) @@ -3443,7 +3475,7 @@ Key highlights of this release encompass significant changes categorized under ` * Improve decryption failure message (#24573) (#24575) * Makefile: Use portable !, not GNUish -not, with find(1). (#24565) (#24572) -## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/1.19.3) - 2023-05-03 +## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/v1.19.3) - 2023-05-03 * SECURITY * Use golang 1.20.4 to fix CVE-2023-24539, CVE-2023-24540, and CVE-2023-29400 @@ -3456,7 +3488,7 @@ Key highlights of this release encompass significant changes categorized under ` * Fix incorrect CurrentUser check for docker rootless (#24435) * Getting the tag list does not require being signed in (#24413) (#24416) -## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/1.19.2) - 2023-04-26 +## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/v1.19.2) - 2023-04-26 * SECURITY * Require repo scope for PATs for private repos and basic authentication (#24362) (#24364) @@ -3955,7 +3987,7 @@ Key highlights of this release encompass significant changes categorized under ` * Display attachments of review comment when comment content is blank (#23035) (#23046) * Return empty url for submodule tree entries (#23043) (#23048) -## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/1.18.4) - 2023-02-20 +## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/v1.18.4) - 2023-02-20 * SECURITY * Provide the ability to set password hash algorithm parameters (#22942) (#22943) @@ -4382,7 +4414,7 @@ Key highlights of this release encompass significant changes categorized under ` * Fix the mode of custom dir to 0700 in docker-rootless (#20861) (#20867) * Fix UI mis-align for PR commit history (#20845) (#20859) -## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/1.17.1) - 2022-08-17 +## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/v1.17.1) - 2022-08-17 * SECURITY * Correctly escape within tribute.js (#20831) (#20832) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9d696bf6b1..52e4aefb6b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -80,7 +80,7 @@ The more detailed and specific you are, the faster we can fix the issue. \ It is really helpful if you can reproduce your problem on a site running on the latest commits, i.e. , as perhaps your problem has already been fixed on a current version. \ Please follow the guidelines described in [How to Report Bugs Effectively](http://www.chiark.greenend.org.uk/~sgtatham/bugs.html) for your report. -Please be kind, remember that Gitea comes at no cost to you, and you're getting free help. +Please be kind—remember that Gitea comes at no cost to you, and you're getting free help. ### Types of issues diff --git a/Makefile b/Makefile index ee75906b7f..49474b0c5c 100644 --- a/Makefile +++ b/Makefile @@ -32,14 +32,14 @@ XGO_VERSION := go-1.25.x AIR_PACKAGE ?= github.com/air-verse/air@v1 EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3 GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.9.2 -GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2 +GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.8.0 GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.15 MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.7.0 SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.33.1 XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1 GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1 -ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.7.9 +ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.7.10 DOCKER_IMAGE ?= gitea/gitea DOCKER_TAG ?= latest @@ -100,7 +100,7 @@ GITHUB_REF_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) # Enable typescript support in Node.js before 22.18 # TODO: Remove this once we can raise the minimum Node.js version to 22.18 (alpine >= 3.23) -NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v 2>/dev/null | cut -c2- | tr '.' ' ')) +NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v 2>/dev/null | cut -c2- | sed 's/-.*//' | tr '.' ' ')) ifeq ($(shell test "$(NODE_VERSION)" -lt "022018000"; echo $$?),0) NODE_VARS := NODE_OPTIONS="--experimental-strip-types" else @@ -211,16 +211,6 @@ help: Makefile ## print Makefile help information. @printf " \033[36m%-46s\033[0m %s\n" "test[#TestSpecificName]" "run unit test" @printf " \033[36m%-46s\033[0m %s\n" "test-sqlite[#TestSpecificName]" "run integration test for sqlite" -.PHONY: go-check -go-check: - $(eval MIN_GO_VERSION_STR := $(shell grep -Eo '^go\s+[0-9]+\.[0-9]+' go.mod | cut -d' ' -f2)) - $(eval MIN_GO_VERSION := $(shell printf "%03d%03d" $(shell echo '$(MIN_GO_VERSION_STR)' | tr '.' ' '))) - $(eval GO_VERSION := $(shell printf "%03d%03d" $(shell $(GO) version | grep -Eo '[0-9]+\.[0-9]+' | tr '.' ' ');)) - @if [ "$(GO_VERSION)" -lt "$(MIN_GO_VERSION)" ]; then \ - echo "Gitea requires Go $(MIN_GO_VERSION_STR) or greater to build. You can get it at https://go.dev/dl/"; \ - exit 1; \ - fi - .PHONY: git-check git-check: @if git lfs >/dev/null 2>&1 ; then : ; else \ @@ -228,20 +218,6 @@ git-check: exit 1; \ fi -.PHONY: node-check -node-check: - $(eval MIN_NODE_VERSION_STR := $(shell grep -Eo '"node":.*[0-9.]+"' package.json | sed -n 's/.*[^0-9.]\([0-9.]*\)"/\1/p')) - $(eval MIN_NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell echo '$(MIN_NODE_VERSION_STR)' | tr '.' ' '))) - $(eval PNPM_MISSING := $(shell hash pnpm > /dev/null 2>&1 || echo 1)) - @if [ "$(NODE_VERSION)" -lt "$(MIN_NODE_VERSION)" ]; then \ - echo "Gitea requires Node.js $(MIN_NODE_VERSION_STR) or greater to build. You can get it at https://nodejs.org/en/download/"; \ - exit 1; \ - fi - @if [ "$(PNPM_MISSING)" = "1" ]; then \ - echo "Gitea requires pnpm to build. You can install it at https://pnpm.io/installation"; \ - exit 1; \ - fi - .PHONY: clean-all clean-all: clean ## delete backend, frontend and integration files rm -rf $(WEBPACK_DEST_ENTRIES) node_modules @@ -341,11 +317,13 @@ lint-backend-fix: lint-go-fix lint-go-gitea-vet lint-editorconfig ## lint backen lint-js: node_modules ## lint js files $(NODE_VARS) pnpm exec eslint --color --max-warnings=0 $(ESLINT_FILES) $(NODE_VARS) pnpm exec vue-tsc + $(NODE_VARS) pnpm exec knip --no-progress --cache .PHONY: lint-js-fix lint-js-fix: node_modules ## lint js files and fix issues $(NODE_VARS) pnpm exec eslint --color --max-warnings=0 $(ESLINT_FILES) --fix $(NODE_VARS) pnpm exec vue-tsc + $(NODE_VARS) pnpm exec knip --no-progress --cache --fix .PHONY: lint-css lint-css: node_modules ## lint css files @@ -426,12 +404,12 @@ watch: ## watch everything and continuously rebuild @bash tools/watch.sh .PHONY: watch-frontend -watch-frontend: node-check node_modules ## watch frontend files and continuously rebuild +watch-frontend: node_modules ## watch frontend files and continuously rebuild @rm -rf $(WEBPACK_DEST_ENTRIES) NODE_ENV=development $(NODE_VARS) pnpm exec webpack --watch --progress --disable-interpret .PHONY: watch-backend -watch-backend: go-check ## watch backend files and continuously rebuild +watch-backend: ## watch backend files and continuously rebuild GITEA_RUN_MODE=dev $(GO) run $(AIR_PACKAGE) -c .air.toml .PHONY: test @@ -749,7 +727,7 @@ build: frontend backend ## build everything frontend: $(WEBPACK_DEST) ## build frontend files .PHONY: backend -backend: go-check generate-backend $(EXECUTABLE) ## build backend files +backend: generate-backend $(EXECUTABLE) ## build backend files # We generate the backend before the frontend in case we in future we want to generate things in the frontend from generated files in backend .PHONY: generate @@ -860,7 +838,7 @@ node_modules: pnpm-lock.yaml update: update-js update-py ## update js and py dependencies .PHONY: update-js -update-js: node-check | node_modules ## update js dependencies +update-js: node_modules ## update js dependencies $(NODE_VARS) pnpm exec updates -u -f package.json rm -rf node_modules pnpm-lock.yaml $(NODE_VARS) pnpm install @@ -869,7 +847,7 @@ update-js: node-check | node_modules ## update js dependencies @touch node_modules .PHONY: update-py -update-py: node-check | node_modules ## update py dependencies +update-py: node_modules ## update py dependencies $(NODE_VARS) pnpm exec updates -u -f pyproject.toml rm -rf .venv uv.lock uv sync @@ -879,14 +857,14 @@ update-py: node-check | node_modules ## update py dependencies webpack: $(WEBPACK_DEST) ## build webpack files $(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) pnpm-lock.yaml - @$(MAKE) -s node-check node_modules + @$(MAKE) -s node_modules @rm -rf $(WEBPACK_DEST_ENTRIES) @echo "Running webpack..." @BROWSERSLIST_IGNORE_OLD_DATA=true $(NODE_VARS) pnpm exec webpack --disable-interpret @touch $(WEBPACK_DEST) .PHONY: svg -svg: node-check | node_modules ## build svg files +svg: node_modules ## build svg files rm -rf $(SVG_DEST_DIR) node tools/generate-svg.ts diff --git a/README.zh-cn.md b/README.zh-cn.md index f34b25b945..8d9531e8e4 100644 --- a/README.zh-cn.md +++ b/README.zh-cn.md @@ -46,7 +46,7 @@ `build` 目标分为两个子目标: - `make backend` 需要 [Go Stable](https://go.dev/dl/),所需版本在 [go.mod](/go.mod) 中定义。 -- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本。 +- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本以及 [pnpm](https://pnpm.io/installation)。 需要互联网连接来下载 go 和 npm 模块。从包含预构建前端文件的官方源代码压缩包构建时,不会触发 `frontend` 目标,因此可以在没有 Node.js 的情况下构建。 diff --git a/README.zh-tw.md b/README.zh-tw.md index 9de3f85dd5..875d31e28a 100644 --- a/README.zh-tw.md +++ b/README.zh-tw.md @@ -46,7 +46,7 @@ `build` 目標分為兩個子目標: - `make backend` 需要 [Go Stable](https://go.dev/dl/),所需版本在 [go.mod](/go.mod) 中定義。 -- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本。 +- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本以及 [pnpm](https://pnpm.io/installation)。 需要互聯網連接來下載 go 和 npm 模塊。從包含預構建前端文件的官方源代碼壓縮包構建時,不會觸發 `frontend` 目標,因此可以在沒有 Node.js 的情況下構建。 diff --git a/assets/go-licenses.json b/assets/go-licenses.json index b105757683..5ee65f2c8a 100644 --- a/assets/go-licenses.json +++ b/assets/go-licenses.json @@ -409,16 +409,6 @@ "path": "github.com/dimiro1/reply/LICENSE", "licenseText": "MIT License\n\nCopyright (c) Discourse\nCopyright (c) Claudemiro\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n" }, - { - "name": "github.com/djherbis/buffer", - "path": "github.com/djherbis/buffer/LICENSE.txt", - "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Dustin H\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n" - }, - { - "name": "github.com/djherbis/nio/v3", - "path": "github.com/djherbis/nio/v3/LICENSE.txt", - "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Dustin H\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n" - }, { "name": "github.com/dlclark/regexp2", "path": "github.com/dlclark/regexp2/LICENSE", diff --git a/cmd/hook.go b/cmd/hook.go index 1845ade625..6004f679ac 100644 --- a/cmd/hook.go +++ b/cmd/hook.go @@ -163,6 +163,14 @@ func (n *nilWriter) WriteString(s string) (int, error) { return len(s), nil } +func parseGitHookCommitRefLine(line string) (oldCommitID, newCommitID string, refFullName git.RefName, ok bool) { + fields := strings.Split(line, " ") + if len(fields) != 3 { + return "", "", "", false + } + return fields[0], fields[1], git.RefName(fields[2]), true +} + func runHookPreReceive(ctx context.Context, c *cli.Command) error { if isInternal, _ := strconv.ParseBool(os.Getenv(repo_module.EnvIsInternal)); isInternal { return nil @@ -228,14 +236,11 @@ Gitea or set your environment appropriately.`, "") continue } - fields := bytes.Fields(scanner.Bytes()) - if len(fields) != 3 { + oldCommitID, newCommitID, refFullName, ok := parseGitHookCommitRefLine(scanner.Text()) + if !ok { continue } - oldCommitID := string(fields[0]) - newCommitID := string(fields[1]) - refFullName := git.RefName(fields[2]) total++ lastline++ @@ -313,7 +318,7 @@ func runHookPostReceive(ctx context.Context, c *cli.Command) error { setup(ctx, c.Bool("debug")) // First of all run update-server-info no matter what - if _, _, err := gitcmd.NewCommand("update-server-info").RunStdString(ctx); err != nil { + if err := gitcmd.NewCommand("update-server-info").RunWithStderr(ctx); err != nil { return fmt.Errorf("failed to call 'git update-server-info': %w", err) } @@ -378,16 +383,13 @@ Gitea or set your environment appropriately.`, "") continue } - fields := bytes.Fields(scanner.Bytes()) - if len(fields) != 3 { + var ok bool + oldCommitIDs[count], newCommitIDs[count], refFullNames[count], ok = parseGitHookCommitRefLine(scanner.Text()) + if !ok { continue } fmt.Fprintf(out, ".") - oldCommitIDs[count] = string(fields[0]) - newCommitIDs[count] = string(fields[1]) - refFullNames[count] = git.RefName(fields[2]) - commitID, _ := git.NewIDFromString(newCommitIDs[count]) if refFullNames[count] == git.BranchPrefix+"master" && !commitID.IsZero() && count == total { masterPushed = true @@ -594,8 +596,8 @@ Gitea or set your environment appropriately.`, "") hookOptions.RefFullNames = make([]git.RefName, 0, hookBatchSize) for { - // note: pktLineTypeUnknow means pktLineTypeFlush and pktLineTypeData all allowed - rs, err = readPktLine(ctx, reader, pktLineTypeUnknow) + // note: pktLineTypeUnknown means pktLineTypeFlush and pktLineTypeData all allowed + rs, err = readPktLine(ctx, reader, pktLineTypeUnknown) if err != nil { return err } @@ -614,7 +616,7 @@ Gitea or set your environment appropriately.`, "") if hasPushOptions { for { - rs, err = readPktLine(ctx, reader, pktLineTypeUnknow) + rs, err = readPktLine(ctx, reader, pktLineTypeUnknown) if err != nil { return err } @@ -711,8 +713,8 @@ Gitea or set your environment appropriately.`, "") type pktLineType int64 const ( - // UnKnow type - pktLineTypeUnknow pktLineType = 0 + // Unknown type + pktLineTypeUnknown pktLineType = 0 // flush-pkt "0000" pktLineTypeFlush pktLineType = iota // data line diff --git a/cmd/hook_test.go b/cmd/hook_test.go index 86cd4834f2..fefc33c01c 100644 --- a/cmd/hook_test.go +++ b/cmd/hook_test.go @@ -39,3 +39,17 @@ func TestPktLine(t *testing.T) { assert.NoError(t, err) assert.Equal(t, []byte("0007a\nb"), w.Bytes()) } + +func TestParseGitHookCommitRefLine(t *testing.T) { + oldCommitID, newCommitID, refName, ok := parseGitHookCommitRefLine("a b c") + assert.True(t, ok) + assert.Equal(t, "a", oldCommitID) + assert.Equal(t, "b", newCommitID) + assert.Equal(t, "c", string(refName)) + + _, _, _, ok = parseGitHookCommitRefLine("a\tb\tc") + assert.False(t, ok) + + _, _, _, ok = parseGitHookCommitRefLine("a b") + assert.False(t, ok) +} diff --git a/cmd/web.go b/cmd/web.go index 6e39db2178..61cfb87130 100644 --- a/cmd/web.go +++ b/cmd/web.go @@ -8,14 +8,13 @@ import ( "fmt" "net" "net/http" + "net/http/pprof" "os" "path/filepath" "strconv" "strings" "time" - _ "net/http/pprof" // Used for debugging if enabled and a web server is running - "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/gtprof" @@ -23,6 +22,7 @@ import ( "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/public" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/routers" "code.gitea.io/gitea/routers/install" @@ -234,22 +234,22 @@ func serveInstalled(c *cli.Command) error { } func servePprof() { - // FIXME: it shouldn't use the global DefaultServeMux, and it should use a proper context - http.DefaultServeMux.Handle("/debug/fgprof", fgprof.Handler()) + mux := http.NewServeMux() + mux.HandleFunc("/debug/pprof/", pprof.Index) + mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) + mux.HandleFunc("/debug/pprof/profile", pprof.Profile) + mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) + mux.HandleFunc("/debug/pprof/trace", pprof.Trace) + mux.Handle("/debug/fgprof", fgprof.Handler()) + // FIXME: it should use a proper context _, _, finished := process.GetManager().AddTypedContext(context.TODO(), "Web: PProf Server", process.SystemProcessType, true) // The pprof server is for debug purpose only, it shouldn't be exposed on public network. At the moment, it's not worth introducing a configurable option for it. log.Info("Starting pprof server on localhost:6060") - log.Info("Stopped pprof server: %v", http.ListenAndServe("localhost:6060", nil)) + log.Info("Stopped pprof server: %v", http.ListenAndServe("localhost:6060", mux)) finished() } func runWeb(ctx context.Context, cmd *cli.Command) error { - defer func() { - if panicked := recover(); panicked != nil { - log.Fatal("PANIC: %v\n%s", panicked, log.Stack(2)) - } - }() - if subCmdName, valid := isValidDefaultSubCommand(cmd); !valid { return fmt.Errorf("unknown command: %s", subCmdName) } @@ -269,6 +269,10 @@ func runWeb(ctx context.Context, cmd *cli.Command) error { createPIDFile(cmd.String("pid")) } + // init the HTML renderer and load templates, if error happens, it will report the error immediately and exit with error log + // in dev mode, it won't exit, but watch the template files for changes + _ = templates.PageRenderer() + if !setting.InstallLock { if err := serveInstall(cmd); err != nil { return err diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini index 40c066c2b1..3eaffde970 100644 --- a/custom/conf/app.example.ini +++ b/custom/conf/app.example.ini @@ -737,11 +737,8 @@ LEVEL = Info ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Git Operation timeout in seconds ;[git.timeout] -;DEFAULT = 360 ;MIGRATE = 600 ;MIRROR = 300 -;CLONE = 300 -;PULL = 300 ;GC = 60 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -2488,8 +2485,9 @@ LEVEL = Info ;[highlight.mapping] ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; Extension mapping to highlight class -;; e.g. .toml=ini +;; Extension mapping to highlight class, for example: +;; .toml = ini +;; .my-js = JavaScript ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; diff --git a/eslint.config.ts b/eslint.config.ts index 253a7f4555..2f710bd936 100644 --- a/eslint.config.ts +++ b/eslint.config.ts @@ -18,7 +18,18 @@ import {defineConfig, globalIgnores} from 'eslint/config'; const jsExts = ['js', 'mjs', 'cjs'] as const; const tsExts = ['ts', 'mts', 'cts'] as const; -const restrictedSyntax = ['WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression']; + +const restrictedGlobals = [ + {name: 'localStorage', message: 'Use `modules/user-settings.ts` instead.'}, + {name: 'fetch', message: 'Use `modules/fetch.ts` instead.'}, +]; + +const restrictedProperties = [ + {object: 'window', property: 'localStorage', message: 'Use `modules/user-settings.ts` instead.'}, + {object: 'globalThis', property: 'localStorage', message: 'Use `modules/user-settings.ts` instead.'}, + {object: 'window', property: 'fetch', message: 'Use `modules/fetch.ts` instead.'}, + {object: 'globalThis', property: 'fetch', message: 'Use `modules/fetch.ts` instead.'}, +]; export default defineConfig([ globalIgnores([ @@ -32,10 +43,6 @@ export default defineConfig([ languageOptions: { ecmaVersion: 'latest', sourceType: 'module', - globals: { - ...globals.browser, - ...globals.node, - }, parser: typescriptParser, parserOptions: { sourceType: 'module', @@ -69,7 +76,7 @@ export default defineConfig([ 'import-x/resolver': {'typescript': true}, }, rules: { - '@eslint-community/eslint-comments/disable-enable-pair': [2], + '@eslint-community/eslint-comments/disable-enable-pair': [0], '@eslint-community/eslint-comments/no-aggregating-enable': [2], '@eslint-community/eslint-comments/no-duplicate-disable': [2], '@eslint-community/eslint-comments/no-restricted-disable': [0], @@ -233,7 +240,7 @@ export default defineConfig([ '@typescript-eslint/no-unused-vars': [2, {vars: 'all', args: 'all', caughtErrors: 'all', ignoreRestSiblings: false, argsIgnorePattern: '^_', varsIgnorePattern: '^_', caughtErrorsIgnorePattern: '^_', destructuredArrayIgnorePattern: '^_'}], '@typescript-eslint/no-use-before-define': [2, {functions: false, classes: true, variables: true, allowNamedExports: true, typedefs: false, enums: false, ignoreTypeReferences: true}], '@typescript-eslint/no-useless-constructor': [0], - '@typescript-eslint/no-useless-default-assignment': [0], // https://github.com/typescript-eslint/typescript-eslint/issues/11847 + '@typescript-eslint/no-useless-default-assignment': [2], '@typescript-eslint/no-useless-empty-export': [0], '@typescript-eslint/no-wrapper-object-types': [2], '@typescript-eslint/non-nullable-type-assertion-style': [0], @@ -264,6 +271,7 @@ export default defineConfig([ '@typescript-eslint/restrict-template-expressions': [0], '@typescript-eslint/return-await': [0], '@typescript-eslint/strict-boolean-expressions': [0], + '@typescript-eslint/strict-void-return': [0], '@typescript-eslint/switch-exhaustiveness-check': [0], '@typescript-eslint/triple-slash-reference': [2], '@typescript-eslint/typedef': [0], @@ -362,7 +370,7 @@ export default defineConfig([ 'import-x/no-self-import': [2], 'import-x/no-unassigned-import': [0], 'import-x/no-unresolved': [2, {commonjs: true, ignore: ['\\?.+$']}], - // 'import-x/no-unused-modules': [2, {unusedExports: true}], // not compatible with eslint 9 + 'import-x/no-unused-modules': [0], // incompatible with eslint 9 'import-x/no-useless-path-segments': [2, {commonjs: true}], 'import-x/no-webpack-loader-syntax': [2], 'import-x/order': [0], @@ -558,9 +566,10 @@ export default defineConfig([ 'no-redeclare': [0], // must be disabled for typescript overloads 'no-regex-spaces': [2], 'no-restricted-exports': [0], - 'no-restricted-globals': [2, 'addEventListener', 'blur', 'close', 'closed', 'confirm', 'defaultStatus', 'defaultstatus', 'error', 'event', 'external', 'find', 'focus', 'frameElement', 'frames', 'history', 'innerHeight', 'innerWidth', 'isFinite', 'isNaN', 'length', 'locationbar', 'menubar', 'moveBy', 'moveTo', 'name', 'onblur', 'onerror', 'onfocus', 'onload', 'onresize', 'onunload', 'open', 'opener', 'opera', 'outerHeight', 'outerWidth', 'pageXOffset', 'pageYOffset', 'parent', 'print', 'removeEventListener', 'resizeBy', 'resizeTo', 'screen', 'screenLeft', 'screenTop', 'screenX', 'screenY', 'scroll', 'scrollbars', 'scrollBy', 'scrollTo', 'scrollX', 'scrollY', 'status', 'statusbar', 'stop', 'toolbar', 'top'], + 'no-restricted-globals': [2, ...restrictedGlobals], + 'no-restricted-properties': [2, ...restrictedProperties], 'no-restricted-imports': [0], - 'no-restricted-syntax': [2, ...restrictedSyntax, {selector: 'CallExpression[callee.name="fetch"]', message: 'use modules/fetch.ts instead'}], + 'no-restricted-syntax': [2, 'WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression'], 'no-return-assign': [0], 'no-script-url': [2], 'no-self-assign': [2, {props: true}], @@ -926,12 +935,6 @@ export default defineConfig([ 'vue/require-typed-ref': [2], }, }, - { - files: ['web_src/js/modules/fetch.ts', 'web_src/js/standalone/**/*'], - rules: { - 'no-restricted-syntax': [2, ...restrictedSyntax], - }, - }, { files: ['**/*.test.ts', 'web_src/js/test/setup.ts'], plugins: {vitest}, @@ -989,38 +992,19 @@ export default defineConfig([ 'vitest/valid-title': [2], }, }, - { - files: ['web_src/js/types.ts'], - rules: { - 'import-x/no-unused-modules': [0], - }, - }, { files: ['**/*.d.ts'], rules: { - 'import-x/no-unused-modules': [0], '@typescript-eslint/consistent-type-definitions': [0], '@typescript-eslint/consistent-type-imports': [0], }, }, { - files: ['*.config.*'], - rules: { - 'import-x/no-unused-modules': [0], - }, - }, - { - files: ['web_src/**/*', 'docs/**/*'], - languageOptions: {globals: globals.browser}, + files: ['*', 'tools/**/*'], + languageOptions: {globals: globals.nodeBuiltin}, }, { files: ['web_src/**/*'], - languageOptions: { - globals: { - ...globals.browser, - __webpack_public_path__: true, - process: false, // https://github.com/webpack/webpack/issues/15833 - }, - }, + languageOptions: {globals: {...globals.browser, ...globals.webpack}}, }, ]); diff --git a/go.mod b/go.mod index a89c2a5c73..16154ad6cf 100644 --- a/go.mod +++ b/go.mod @@ -2,7 +2,7 @@ module code.gitea.io/gitea go 1.25.0 -toolchain go1.25.5 +toolchain go1.25.6 // rfc5280 said: "The serial number is an integer assigned by the CA to each certificate." // But some CAs use negative serial number, just relax the check. related: @@ -28,7 +28,7 @@ require ( github.com/ProtonMail/go-crypto v1.3.0 github.com/PuerkitoBio/goquery v1.10.3 github.com/SaveTheRbtz/zstd-seekable-format-go/pkg v0.8.0 - github.com/alecthomas/chroma/v2 v2.21.1 + github.com/alecthomas/chroma/v2 v2.23.0 github.com/aws/aws-sdk-go-v2/credentials v1.18.10 github.com/aws/aws-sdk-go-v2/service/codecommit v1.32.2 github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb @@ -39,8 +39,6 @@ require ( github.com/charmbracelet/git-lfs-transfer v0.1.1-0.20251013092601-6327009efd21 github.com/chi-middleware/proxy v1.1.1 github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21 - github.com/djherbis/buffer v1.2.0 - github.com/djherbis/nio/v3 v3.0.1 github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 github.com/dustin/go-humanize v1.0.1 github.com/editorconfig/editorconfig-core-go/v2 v2.6.3 @@ -95,7 +93,6 @@ require ( github.com/olivere/elastic/v7 v7.0.32 github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/image-spec v1.1.1 - github.com/pkg/errors v0.9.1 github.com/pquerna/otp v1.5.0 github.com/prometheus/client_golang v1.23.0 github.com/quasoft/websspi v1.1.2 @@ -113,7 +110,7 @@ require ( github.com/wneessen/go-mail v0.7.2 github.com/xeipuuv/gojsonschema v1.2.0 github.com/yohcop/openid-go v1.0.1 - github.com/yuin/goldmark v1.7.13 + github.com/yuin/goldmark v1.7.16 github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc github.com/yuin/goldmark-meta v1.1.0 gitlab.com/gitlab-org/api/client-go v0.142.4 @@ -251,6 +248,7 @@ require ( github.com/philhofer/fwd v1.2.0 // indirect github.com/pierrec/lz4/v4 v4.1.22 // indirect github.com/pjbgf/sha1cd v0.4.0 // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/client_model v0.6.2 // indirect github.com/prometheus/common v0.65.0 // indirect diff --git a/go.sum b/go.sum index ac70239339..acd5d8912e 100644 --- a/go.sum +++ b/go.sum @@ -98,8 +98,8 @@ github.com/SaveTheRbtz/zstd-seekable-format-go/pkg v0.8.0/go.mod h1:1HmmMEVsr+0R github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0= github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k= github.com/alecthomas/chroma/v2 v2.2.0/go.mod h1:vf4zrexSH54oEjJ7EdB65tGNHmH3pGZmVkgTP5RHvAs= -github.com/alecthomas/chroma/v2 v2.21.1 h1:FaSDrp6N+3pphkNKU6HPCiYLgm8dbe5UXIXcoBhZSWA= -github.com/alecthomas/chroma/v2 v2.21.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o= +github.com/alecthomas/chroma/v2 v2.23.0 h1:u/Orux1J0eLuZDeQ44froV8smumheieI0EofhbyKhhk= +github.com/alecthomas/chroma/v2 v2.23.0/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o= github.com/alecthomas/repr v0.0.0-20220113201626-b1b626ac65ae/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8= github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs= github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= @@ -260,11 +260,6 @@ github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/r github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21 h1:PdsjTl0Cg+ZJgOx/CFV5NNgO1ThTreqdgKYiDCMHJwA= github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21/go.mod h1:xJvkyD6Y2rZapGvPJLYo9dyx1s5dxBEDPa8T3YTuOk0= -github.com/djherbis/buffer v1.1.0/go.mod h1:VwN8VdFkMY0DCALdY8o00d3IZ6Amz/UNVMWcSaJT44o= -github.com/djherbis/buffer v1.2.0 h1:PH5Dd2ss0C7CRRhQCZ2u7MssF+No9ide8Ye71nPHcrQ= -github.com/djherbis/buffer v1.2.0/go.mod h1:fjnebbZjCUpPinBRD+TDwXSOeNQ7fPQWLfGQqiAiUyE= -github.com/djherbis/nio/v3 v3.0.1 h1:6wxhnuppteMa6RHA4L81Dq7ThkZH8SwnDzXDYy95vB4= -github.com/djherbis/nio/v3 v3.0.1/go.mod h1:Ng4h80pbZFMla1yKzm61cF0tqqilXZYrogmWgZxOcmg= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= @@ -792,8 +787,8 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/goldmark v1.4.15/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yuin/goldmark v1.7.13 h1:GPddIs617DnBLFFVJFgpo1aBfe/4xcvMc3SB5t/D0pA= -github.com/yuin/goldmark v1.7.13/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg= +github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE= +github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg= github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc h1:+IAOyRda+RLrxa1WC7umKOZRsGq4QrFFMYApOeHzQwQ= github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc/go.mod h1:ovIvrum6DQJA4QsJSovrkC4saKHQVs7TvcaeO8AIl5I= github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= diff --git a/knip.config.ts b/knip.config.ts new file mode 100644 index 0000000000..3f63cbc775 --- /dev/null +++ b/knip.config.ts @@ -0,0 +1,18 @@ +import type {KnipConfig} from 'knip'; + +export default { + entry: [ + '*.ts', + 'tools/**/*.ts', + 'tests/e2e/**/*.ts', + ], + ignoreDependencies: [ + // dependencies used in Makefile or tools + '@primer/octicons', + 'markdownlint-cli', + 'nolyfill', + 'spectral-cli-bundle', + 'vue-tsc', + 'webpack-cli', + ], +} satisfies KnipConfig; diff --git a/models/git/branch_test.go b/models/git/branch_test.go index 5be435172b..7728d72f3e 100644 --- a/models/git/branch_test.go +++ b/models/git/branch_test.go @@ -114,7 +114,7 @@ func TestFindRenamedBranch(t *testing.T) { assert.True(t, exist) assert.Equal(t, "master", branch.To) - _, exist, err = git_model.FindRenamedBranch(t.Context(), 1, "unknow") + _, exist, err = git_model.FindRenamedBranch(t.Context(), 1, "unknown") assert.NoError(t, err) assert.False(t, exist) } diff --git a/models/git/lfs_lock.go b/models/git/lfs_lock.go index 184e616915..aabed6b7fa 100644 --- a/models/git/lfs_lock.go +++ b/models/git/lfs_lock.go @@ -101,10 +101,10 @@ func GetLFSLock(ctx context.Context, repo *repo_model.Repository, path string) ( return rel, nil } -// GetLFSLockByID returns release by given id. -func GetLFSLockByID(ctx context.Context, id int64) (*LFSLock, error) { +// GetLFSLockByIDAndRepo returns lfs lock by given id and repository id. +func GetLFSLockByIDAndRepo(ctx context.Context, id, repoID int64) (*LFSLock, error) { lock := new(LFSLock) - has, err := db.GetEngine(ctx).ID(id).Get(lock) + has, err := db.GetEngine(ctx).ID(id).And("repo_id = ?", repoID).Get(lock) if err != nil { return nil, err } else if !has { @@ -153,7 +153,7 @@ func CountLFSLockByRepoID(ctx context.Context, repoID int64) (int64, error) { // DeleteLFSLockByID deletes a lock by given ID. func DeleteLFSLockByID(ctx context.Context, id int64, repo *repo_model.Repository, u *user_model.User, force bool) (*LFSLock, error) { return db.WithTx2(ctx, func(ctx context.Context) (*LFSLock, error) { - lock, err := GetLFSLockByID(ctx, id) + lock, err := GetLFSLockByIDAndRepo(ctx, id, repo.ID) if err != nil { return nil, err } diff --git a/models/git/lfs_lock_test.go b/models/git/lfs_lock_test.go new file mode 100644 index 0000000000..c88e89be47 --- /dev/null +++ b/models/git/lfs_lock_test.go @@ -0,0 +1,82 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package git + +import ( + "fmt" + "testing" + "time" + + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createTestLock(t *testing.T, repo *repo_model.Repository, owner *user_model.User) *LFSLock { + t.Helper() + + path := fmt.Sprintf("%s-%d-%d", t.Name(), repo.ID, time.Now().UnixNano()) + lock, err := CreateLFSLock(t.Context(), repo, &LFSLock{ + OwnerID: owner.ID, + Path: path, + }) + require.NoError(t, err) + return lock +} + +func TestGetLFSLockByIDAndRepo(t *testing.T) { + require.NoError(t, unittest.PrepareTestDatabase()) + + repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) + repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}) + + lockRepo1 := createTestLock(t, repo1, user2) + lockRepo3 := createTestLock(t, repo3, user4) + + fetched, err := GetLFSLockByIDAndRepo(t.Context(), lockRepo1.ID, repo1.ID) + require.NoError(t, err) + assert.Equal(t, lockRepo1.ID, fetched.ID) + assert.Equal(t, repo1.ID, fetched.RepoID) + + _, err = GetLFSLockByIDAndRepo(t.Context(), lockRepo1.ID, repo3.ID) + assert.Error(t, err) + assert.True(t, IsErrLFSLockNotExist(err)) + + _, err = GetLFSLockByIDAndRepo(t.Context(), lockRepo3.ID, repo1.ID) + assert.Error(t, err) + assert.True(t, IsErrLFSLockNotExist(err)) +} + +func TestDeleteLFSLockByIDRequiresRepoMatch(t *testing.T) { + require.NoError(t, unittest.PrepareTestDatabase()) + + repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) + repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}) + + lockRepo1 := createTestLock(t, repo1, user2) + lockRepo3 := createTestLock(t, repo3, user4) + + _, err := DeleteLFSLockByID(t.Context(), lockRepo3.ID, repo1, user2, true) + assert.Error(t, err) + assert.True(t, IsErrLFSLockNotExist(err)) + + existing, err := GetLFSLockByIDAndRepo(t.Context(), lockRepo3.ID, repo3.ID) + require.NoError(t, err) + assert.Equal(t, lockRepo3.ID, existing.ID) + + deleted, err := DeleteLFSLockByID(t.Context(), lockRepo3.ID, repo3, user4, true) + require.NoError(t, err) + assert.Equal(t, lockRepo3.ID, deleted.ID) + + deleted, err = DeleteLFSLockByID(t.Context(), lockRepo1.ID, repo1, user2, false) + require.NoError(t, err) + assert.Equal(t, lockRepo1.ID, deleted.ID) +} diff --git a/models/issues/comment.go b/models/issues/comment.go index fd0500833e..9c249d2c05 100644 --- a/models/issues/comment.go +++ b/models/issues/comment.go @@ -20,6 +20,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" + "code.gitea.io/gitea/modules/htmlutil" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/references" @@ -233,11 +234,17 @@ func (r RoleInRepo) LocaleHelper(lang translation.Locale) string { return lang.TrString("repo.issues.role." + string(r) + "_helper") } +type SpecialDoerNameType string + +const SpecialDoerNameCodeOwners SpecialDoerNameType = "CODEOWNERS" + // CommentMetaData stores metadata for a comment, these data will not be changed once inserted into database type CommentMetaData struct { ProjectColumnID int64 `json:"project_column_id,omitempty"` ProjectColumnTitle string `json:"project_column_title,omitempty"` ProjectTitle string `json:"project_title,omitempty"` + + SpecialDoerName SpecialDoerNameType `json:"special_doer_name,omitempty"` // e.g. "CODEOWNERS" for CODEOWNERS-triggered review requests } // Comment represents a comment in commit and issue page. @@ -764,6 +771,37 @@ func (c *Comment) CodeCommentLink(ctx context.Context) string { return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag()) } +func (c *Comment) MetaSpecialDoerTr(locale translation.Locale) template.HTML { + if c.CommentMetaData == nil { + return "" + } + if c.CommentMetaData.SpecialDoerName == SpecialDoerNameCodeOwners { + return locale.Tr("repo.issues.review.codeowners_rules") + } + return htmlutil.HTMLFormat("%s", c.CommentMetaData.SpecialDoerName) +} + +func (c *Comment) TimelineRequestedReviewTr(locale translation.Locale, createdStr template.HTML) template.HTML { + if c.AssigneeID > 0 { + // it guarantees LoadAssigneeUserAndTeam has been called, and c.Assignee is Ghost user but not nil if the user doesn't exist + if c.RemovedAssignee { + if c.PosterID == c.AssigneeID { + return locale.Tr("repo.issues.review.remove_review_request_self", createdStr) + } + return locale.Tr("repo.issues.review.remove_review_request", c.Assignee.GetDisplayName(), createdStr) + } + return locale.Tr("repo.issues.review.add_review_request", c.Assignee.GetDisplayName(), createdStr) + } + teamName := "Ghost Team" + if c.AssigneeTeam != nil { + teamName = c.AssigneeTeam.Name + } + if c.RemovedAssignee { + return locale.Tr("repo.issues.review.remove_review_request", teamName, createdStr) + } + return locale.Tr("repo.issues.review.add_review_request", teamName, createdStr) +} + // CreateComment creates comment with context func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) { return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) { @@ -780,6 +818,11 @@ func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, ProjectTitle: opts.ProjectTitle, } } + if opts.SpecialDoerName != "" { + commentMetaData = &CommentMetaData{ + SpecialDoerName: opts.SpecialDoerName, + } + } comment := &Comment{ Type: opts.Type, @@ -976,6 +1019,7 @@ type CreateCommentOptions struct { RefIsPull bool IsForcePush bool Invalidated bool + SpecialDoerName SpecialDoerNameType // e.g. "CODEOWNERS" for CODEOWNERS-triggered review requests } // GetCommentByID returns the comment by given ID. diff --git a/models/issues/pull.go b/models/issues/pull.go index 1ffcd683d5..18977ed212 100644 --- a/models/issues/pull.go +++ b/models/issues/pull.go @@ -666,9 +666,10 @@ func HasWorkInProgressPrefix(title string) bool { return false } -// IsFilesConflicted determines if the Pull Request has changes conflicting with the target branch. +// IsFilesConflicted determines if the Pull Request has changes conflicting with the target branch. +// Sometimes a conflict may not list any files func (pr *PullRequest) IsFilesConflicted() bool { - return len(pr.ConflictedFiles) > 0 + return pr.Status == PullRequestStatusConflict } // GetWorkInProgressPrefix returns the prefix used to mark the pull request as a work in progress. diff --git a/models/issues/pull_test.go b/models/issues/pull_test.go index 7089af253b..25b27cbe9c 100644 --- a/models/issues/pull_test.go +++ b/models/issues/pull_test.go @@ -130,7 +130,7 @@ func TestLoadRequestedReviewers(t *testing.T) { user1, err := user_model.GetUserByID(t.Context(), 1) assert.NoError(t, err) - comment, err := issues_model.AddReviewRequest(t.Context(), issue, user1, &user_model.User{}) + comment, err := issues_model.AddReviewRequest(t.Context(), issue, user1, &user_model.User{}, false) assert.NoError(t, err) assert.NotNil(t, comment) diff --git a/models/issues/review.go b/models/issues/review.go index b758fa5ffa..d8caa4d13a 100644 --- a/models/issues/review.go +++ b/models/issues/review.go @@ -643,7 +643,7 @@ func InsertReviews(ctx context.Context, reviews []*Review) error { } // AddReviewRequest add a review request from one reviewer -func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User) (*Comment, error) { +func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User, isCodeOwners bool) (*Comment, error) { return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) { sess := db.GetEngine(ctx) @@ -702,6 +702,7 @@ func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_mo RemovedAssignee: false, // Use RemovedAssignee as !isRequest AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID ReviewID: review.ID, + SpecialDoerName: util.Iif(isCodeOwners, SpecialDoerNameCodeOwners, ""), }) if err != nil { return nil, err @@ -767,7 +768,7 @@ func restoreLatestOfficialReview(ctx context.Context, issueID, reviewerID int64) } // AddTeamReviewRequest add a review request from one team -func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) { +func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User, isCodeOwners bool) (*Comment, error) { return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) { review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID) if err != nil && !IsErrReviewNotExist(err) { @@ -812,6 +813,7 @@ func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organizat RemovedAssignee: false, // Use RemovedAssignee as !isRequest AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID ReviewID: review.ID, + SpecialDoerName: util.Iif(isCodeOwners, SpecialDoerNameCodeOwners, ""), }) if err != nil { return nil, fmt.Errorf("CreateComment(): %w", err) diff --git a/models/issues/review_test.go b/models/issues/review_test.go index 6795ea8e66..092d88d174 100644 --- a/models/issues/review_test.go +++ b/models/issues/review_test.go @@ -321,14 +321,28 @@ func TestAddReviewRequest(t *testing.T) { pull.HasMerged = false assert.NoError(t, pull.UpdateCols(t.Context(), "has_merged")) issue.IsClosed = true - _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{}) + _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{}, false) assert.Error(t, err) assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err)) pull.HasMerged = true assert.NoError(t, pull.UpdateCols(t.Context(), "has_merged")) issue.IsClosed = false - _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{}) + _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{}, false) assert.Error(t, err) assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err)) + + // Test CODEOWNERS review request stores metadata correctly + pull2 := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) + assert.NoError(t, pull2.LoadIssue(t.Context())) + issue2 := pull2.Issue + assert.NoError(t, issue2.LoadRepo(t.Context())) + reviewer2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 7}) + doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + comment, err := issues_model.AddReviewRequest(t.Context(), issue2, reviewer2, doer, true) + assert.NoError(t, err) + assert.NotNil(t, comment) + assert.NotNil(t, comment.CommentMetaData) + assert.Equal(t, issues_model.SpecialDoerNameCodeOwners, comment.CommentMetaData.SpecialDoerName) } diff --git a/models/issues/stopwatch.go b/models/issues/stopwatch.go index 761b8f91a0..f119951b09 100644 --- a/models/issues/stopwatch.go +++ b/models/issues/stopwatch.go @@ -12,6 +12,8 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" + + "xorm.io/builder" ) // Stopwatch represents a stopwatch for time tracking. @@ -232,3 +234,14 @@ func CancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) ( }) return ok, err } + +// RemoveStopwatchesByRepoID removes all stopwatches for a user in a specific repository +// this function should be called before removing all the issues of the repository +func RemoveStopwatchesByRepoID(ctx context.Context, userID, repoID int64) error { + _, err := db.GetEngine(ctx). + Where("`stopwatch`.user_id = ?", userID). + And(builder.In("`stopwatch`.issue_id", + builder.Select("id").From("issue").Where(builder.Eq{"repo_id": repoID}))). + Delete(new(Stopwatch)) + return err +} diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go index fa11acaee2..9975729fd6 100644 --- a/models/migrations/migrations.go +++ b/models/migrations/migrations.go @@ -399,6 +399,7 @@ func prepareMigrationTasks() []*migration { newMigration(323, "Add support for actions concurrency", v1_26.AddActionsConcurrency), newMigration(324, "Fix closed milestone completeness for milestones with no issues", v1_26.FixClosedMilestoneCompleteness), + newMigration(325, "Fix missed repo_id when migrate attachments", v1_26.FixMissedRepoIDWhenMigrateAttachments), } return preparedMigrations } diff --git a/models/migrations/v1_21/v276.go b/models/migrations/v1_21/v276.go index 3ab7e22cd0..be24b31902 100644 --- a/models/migrations/v1_21/v276.go +++ b/models/migrations/v1_21/v276.go @@ -5,14 +5,10 @@ package v1_21 import ( "context" - "fmt" - "path/filepath" - "strings" - "code.gitea.io/gitea/modules/git" - giturl "code.gitea.io/gitea/modules/git/url" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" "xorm.io/xorm" ) @@ -163,16 +159,13 @@ func migratePushMirrors(x *xorm.Engine) error { } func getRemoteAddress(ownerName, repoName, remoteName string) (string, error) { - repoPath := filepath.Join(setting.RepoRootPath, strings.ToLower(ownerName), strings.ToLower(repoName)+".git") - if exist, _ := util.IsExist(repoPath); !exist { + ctx := context.Background() + relativePath := repo_model.RelativePath(ownerName, repoName) + if exist, _ := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(relativePath)); !exist { return "", nil } - remoteURL, err := git.GetRemoteAddress(context.Background(), repoPath, remoteName) - if err != nil { - return "", fmt.Errorf("get remote %s's address of %s/%s failed: %v", remoteName, ownerName, repoName, err) - } - u, err := giturl.ParseGitURL(remoteURL) + u, err := gitrepo.GitRemoteGetURL(ctx, repo_model.StorageRepo(relativePath), remoteName) if err != nil { return "", err } diff --git a/models/migrations/v1_26/v325.go b/models/migrations/v1_26/v325.go new file mode 100644 index 0000000000..d81540f44e --- /dev/null +++ b/models/migrations/v1_26/v325.go @@ -0,0 +1,18 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_26 + +import ( + "xorm.io/xorm" +) + +func FixMissedRepoIDWhenMigrateAttachments(x *xorm.Engine) error { + _, err := x.Exec("UPDATE `attachment` SET `repo_id` = (SELECT `repo_id` FROM `issue` WHERE `issue`.`id` = `attachment`.`issue_id`) WHERE `issue_id` > 0 AND (`repo_id` IS NULL OR `repo_id` = 0);") + if err != nil { + return err + } + + _, err = x.Exec("UPDATE `attachment` SET `repo_id` = (SELECT `repo_id` FROM `release` WHERE `release`.`id` = `attachment`.`release_id`) WHERE `release_id` > 0 AND (`repo_id` IS NULL OR `repo_id` = 0);") + return err +} diff --git a/models/migrations/v1_26/v325_test.go b/models/migrations/v1_26/v325_test.go new file mode 100644 index 0000000000..d4a66fee81 --- /dev/null +++ b/models/migrations/v1_26/v325_test.go @@ -0,0 +1,45 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package v1_26 + +import ( + "testing" + + "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/modules/timeutil" + + "github.com/stretchr/testify/require" +) + +func Test_FixMissedRepoIDWhenMigrateAttachments(t *testing.T) { + type Attachment struct { + ID int64 `xorm:"pk autoincr"` + UUID string `xorm:"uuid UNIQUE"` + RepoID int64 `xorm:"INDEX"` // this should not be zero + IssueID int64 `xorm:"INDEX"` // maybe zero when creating + ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating + UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added + CommentID int64 `xorm:"INDEX"` + Name string + DownloadCount int64 `xorm:"DEFAULT 0"` + Size int64 `xorm:"DEFAULT 0"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` + } + + type Issue struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"INDEX"` + } + + type Release struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"INDEX"` + } + + // Prepare and load the testing database + x, deferrable := base.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release)) + defer deferrable() + + require.NoError(t, FixMissedRepoIDWhenMigrateAttachments(x)) +} diff --git a/models/migrations/v1_9/v82.go b/models/migrations/v1_9/v82.go index f0307bf07a..8796b0563d 100644 --- a/models/migrations/v1_9/v82.go +++ b/models/migrations/v1_9/v82.go @@ -6,11 +6,10 @@ package v1_9 import ( "context" "fmt" - "path/filepath" - "strings" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/gitrepo" "xorm.io/xorm" ) @@ -34,16 +33,6 @@ func FixReleaseSha1OnReleaseTable(ctx context.Context, x *xorm.Engine) error { Name string } - // UserPath returns the path absolute path of user repositories. - UserPath := func(userName string) string { - return filepath.Join(setting.RepoRootPath, strings.ToLower(userName)) - } - - // RepoPath returns repository path by given user and repository name. - RepoPath := func(userName, repoName string) string { - return filepath.Join(UserPath(userName), strings.ToLower(repoName)+".git") - } - // Update release sha1 const batchSize = 100 sess := x.NewSession() @@ -99,7 +88,7 @@ func FixReleaseSha1OnReleaseTable(ctx context.Context, x *xorm.Engine) error { userCache[repo.OwnerID] = user } - gitRepo, err = git.OpenRepository(ctx, RepoPath(user.Name, repo.Name)) + gitRepo, err = gitrepo.OpenRepository(ctx, repo_model.StorageRepo(repo_model.RelativePath(user.Name, repo.Name))) if err != nil { return err } diff --git a/models/project/column.go b/models/project/column.go index 9b9d874997..79f6dfe911 100644 --- a/models/project/column.go +++ b/models/project/column.go @@ -213,6 +213,18 @@ func GetColumn(ctx context.Context, columnID int64) (*Column, error) { return column, nil } +func GetColumnByIDAndProjectID(ctx context.Context, columnID, projectID int64) (*Column, error) { + column := new(Column) + has, err := db.GetEngine(ctx).ID(columnID).And("project_id=?", projectID).Get(column) + if err != nil { + return nil, err + } else if !has { + return nil, ErrProjectColumnNotExist{ColumnID: columnID} + } + + return column, nil +} + // UpdateColumn updates a project column func UpdateColumn(ctx context.Context, column *Column) error { var fieldToUpdate []string diff --git a/models/project/project.go b/models/project/project.go index c003664fa3..7646c3dd71 100644 --- a/models/project/project.go +++ b/models/project/project.go @@ -302,6 +302,18 @@ func GetProjectByID(ctx context.Context, id int64) (*Project, error) { return p, nil } +func GetProjectByIDAndOwner(ctx context.Context, id, ownerID int64) (*Project, error) { + p := new(Project) + has, err := db.GetEngine(ctx).ID(id).And("owner_id = ?", ownerID).Get(p) + if err != nil { + return nil, err + } else if !has { + return nil, ErrProjectNotExist{ID: id} + } + + return p, nil +} + // GetProjectForRepoByID returns the projects in a repository func GetProjectForRepoByID(ctx context.Context, repoID, id int64) (*Project, error) { p := new(Project) diff --git a/models/renderhelper/repo_file.go b/models/renderhelper/repo_file.go index e0375ed280..f1df8e89e0 100644 --- a/models/renderhelper/repo_file.go +++ b/models/renderhelper/repo_file.go @@ -70,6 +70,6 @@ func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository, "repo": helper.opts.DeprecatedRepoName, }) } - rctx = rctx.WithHelper(helper) + rctx = rctx.WithHelper(helper).WithEnableHeadingIDGeneration(true) return rctx } diff --git a/models/renderhelper/repo_wiki.go b/models/renderhelper/repo_wiki.go index b75f1b9701..218b1e4a67 100644 --- a/models/renderhelper/repo_wiki.go +++ b/models/renderhelper/repo_wiki.go @@ -71,7 +71,7 @@ func NewRenderContextRepoWiki(ctx context.Context, repo *repo_model.Repository, "markupAllowShortIssuePattern": "true", }) } - rctx = rctx.WithHelper(helper) + rctx = rctx.WithHelper(helper).WithEnableHeadingIDGeneration(true) helper.ctx = rctx return rctx } diff --git a/models/repo/attachment.go b/models/repo/attachment.go index 835bee5402..27856f2d2e 100644 --- a/models/repo/attachment.go +++ b/models/repo/attachment.go @@ -166,6 +166,11 @@ func GetAttachmentByReleaseIDFileName(ctx context.Context, releaseID int64, file return attach, nil } +func GetUnlinkedAttachmentsByUserID(ctx context.Context, userID int64) ([]*Attachment, error) { + attachments := make([]*Attachment, 0, 10) + return attachments, db.GetEngine(ctx).Where("uploader_id = ? AND issue_id = 0 AND release_id = 0 AND comment_id = 0", userID).Find(&attachments) +} + // DeleteAttachment deletes the given attachment and optionally the associated file. func DeleteAttachment(ctx context.Context, a *Attachment, remove bool) error { _, err := DeleteAttachments(ctx, []*Attachment{a}, remove) diff --git a/models/repo/attachment_test.go b/models/repo/attachment_test.go index d41008344d..07f4c587a7 100644 --- a/models/repo/attachment_test.go +++ b/models/repo/attachment_test.go @@ -101,3 +101,19 @@ func TestGetAttachmentsByUUIDs(t *testing.T) { assert.Equal(t, int64(1), attachList[0].IssueID) assert.Equal(t, int64(5), attachList[1].IssueID) } + +func TestGetUnlinkedAttachmentsByUserID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + attachments, err := repo_model.GetUnlinkedAttachmentsByUserID(t.Context(), 8) + assert.NoError(t, err) + assert.Len(t, attachments, 1) + assert.Equal(t, int64(10), attachments[0].ID) + assert.Zero(t, attachments[0].IssueID) + assert.Zero(t, attachments[0].ReleaseID) + assert.Zero(t, attachments[0].CommentID) + + attachments, err = repo_model.GetUnlinkedAttachmentsByUserID(t.Context(), 1) + assert.NoError(t, err) + assert.Empty(t, attachments) +} diff --git a/models/repo/release.go b/models/repo/release.go index 67aa390e6d..e2010c8a38 100644 --- a/models/repo/release.go +++ b/models/repo/release.go @@ -93,15 +93,25 @@ func init() { db.RegisterModel(new(Release)) } -// LoadAttributes load repo and publisher attributes for a release -func (r *Release) LoadAttributes(ctx context.Context) error { - var err error - if r.Repo == nil { - r.Repo, err = GetRepositoryByID(ctx, r.RepoID) - if err != nil { - return err - } +// LegacyAttachmentMissingRepoIDCutoff marks the date when repo_id started to be written during uploads +// (2026-01-16T00:00:00Z). Older rows might have repo_id=0 and should be tolerated once. +const LegacyAttachmentMissingRepoIDCutoff timeutil.TimeStamp = 1768521600 + +func (r *Release) LoadRepo(ctx context.Context) (err error) { + if r.Repo != nil { + return nil } + + r.Repo, err = GetRepositoryByID(ctx, r.RepoID) + return err +} + +// LoadAttributes load repo and publisher attributes for a release +func (r *Release) LoadAttributes(ctx context.Context) (err error) { + if err := r.LoadRepo(ctx); err != nil { + return err + } + if r.Publisher == nil { r.Publisher, err = user_model.GetUserByID(ctx, r.PublisherID) if err != nil { @@ -168,6 +178,11 @@ func UpdateReleaseNumCommits(ctx context.Context, rel *Release) error { // AddReleaseAttachments adds a release attachments func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs []string) (err error) { + rel, err := GetReleaseByID(ctx, releaseID) + if err != nil { + return err + } + // Check attachments attachments, err := GetAttachmentsByUUIDs(ctx, attachmentUUIDs) if err != nil { @@ -175,6 +190,17 @@ func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs } for i := range attachments { + if attachments[i].RepoID == 0 && attachments[i].CreatedUnix < LegacyAttachmentMissingRepoIDCutoff { + attachments[i].RepoID = rel.RepoID + if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Cols("repo_id").Update(attachments[i]); err != nil { + return fmt.Errorf("update attachment repo_id [%d]: %w", attachments[i].ID, err) + } + } + + if attachments[i].RepoID != rel.RepoID { + return util.NewPermissionDeniedErrorf("attachment belongs to different repository") + } + if attachments[i].ReleaseID != 0 { return util.NewPermissionDeniedErrorf("release permission denied") } diff --git a/models/repo/release_test.go b/models/repo/release_test.go index 01f0fb3cff..2a09ffb36d 100644 --- a/models/repo/release_test.go +++ b/models/repo/release_test.go @@ -6,7 +6,9 @@ package repo import ( "testing" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" ) @@ -37,3 +39,54 @@ func Test_FindTagsByCommitIDs(t *testing.T) { assert.Equal(t, "delete-tag", rels[1].TagName) assert.Equal(t, "v1.0", rels[2].TagName) } + +func TestAddReleaseAttachmentsRejectsDifferentRepo(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + uuid := "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12" // attachment 2 belongs to repo 2 + err := AddReleaseAttachments(t.Context(), 1, []string{uuid}) + assert.Error(t, err) + assert.ErrorIs(t, err, util.ErrPermissionDenied) + + attach, err := GetAttachmentByUUID(t.Context(), uuid) + assert.NoError(t, err) + assert.Zero(t, attach.ReleaseID, "attachment should not be linked to release on failure") +} + +func TestAddReleaseAttachmentsAllowsLegacyMissingRepoID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + legacyUUID := "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a20" // attachment 10 has repo_id 0 + err := AddReleaseAttachments(t.Context(), 1, []string{legacyUUID}) + assert.NoError(t, err) + + attach, err := GetAttachmentByUUID(t.Context(), legacyUUID) + assert.NoError(t, err) + assert.EqualValues(t, 1, attach.RepoID) + assert.EqualValues(t, 1, attach.ReleaseID) +} + +func TestAddReleaseAttachmentsRejectsRecentZeroRepoID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + recentUUID := "a0eebc99-9c0b-4ef8-bb6d-6bb9bd3800aa" + attachment := &Attachment{ + UUID: recentUUID, + RepoID: 0, + IssueID: 0, + ReleaseID: 0, + CommentID: 0, + Name: "recent-zero", + CreatedUnix: LegacyAttachmentMissingRepoIDCutoff + 1, + } + assert.NoError(t, db.Insert(t.Context(), attachment)) + + err := AddReleaseAttachments(t.Context(), 1, []string{recentUUID}) + assert.Error(t, err) + assert.ErrorIs(t, err, util.ErrPermissionDenied) + + attach, err := GetAttachmentByUUID(t.Context(), recentUUID) + assert.NoError(t, err) + assert.Zero(t, attach.ReleaseID) + assert.Zero(t, attach.RepoID) +} diff --git a/models/repo/watch.go b/models/repo/watch.go index a616544cae..1e63d5c3d2 100644 --- a/models/repo/watch.go +++ b/models/repo/watch.go @@ -176,3 +176,13 @@ func WatchIfAuto(ctx context.Context, userID, repoID int64, isWrite bool) error } return watchRepoMode(ctx, watch, WatchModeAuto) } + +// ClearRepoWatches clears all watches for a repository and from the user that watched it. +// Used when a repository is set to private. +func ClearRepoWatches(ctx context.Context, repoID int64) error { + if _, err := db.Exec(ctx, "UPDATE `repository` SET num_watches = 0 WHERE id = ?", repoID); err != nil { + return err + } + + return db.DeleteBeans(ctx, Watch{RepoID: repoID}) +} diff --git a/models/repo/watch_test.go b/models/repo/watch_test.go index 19e363f6b0..97576fb787 100644 --- a/models/repo/watch_test.go +++ b/models/repo/watch_test.go @@ -13,6 +13,7 @@ import ( "code.gitea.io/gitea/modules/setting" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestIsWatching(t *testing.T) { @@ -119,3 +120,21 @@ func TestWatchIfAuto(t *testing.T) { assert.NoError(t, err) assert.Len(t, watchers, prevCount) } + +func TestClearRepoWatches(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + const repoID int64 = 1 + watchers, err := repo_model.GetRepoWatchersIDs(t.Context(), repoID) + require.NoError(t, err) + require.NotEmpty(t, watchers) + + assert.NoError(t, repo_model.ClearRepoWatches(t.Context(), repoID)) + + watchers, err = repo_model.GetRepoWatchersIDs(t.Context(), repoID) + assert.NoError(t, err) + assert.Empty(t, watchers) + + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID}) + assert.Zero(t, repo.NumWatches) +} diff --git a/models/user/openid.go b/models/user/openid.go index 420c67ca18..5baa48c824 100644 --- a/models/user/openid.go +++ b/models/user/openid.go @@ -102,7 +102,13 @@ func DeleteUserOpenID(ctx context.Context, openid *UserOpenID) (err error) { } // ToggleUserOpenIDVisibility toggles visibility of an openid address of given user. -func ToggleUserOpenIDVisibility(ctx context.Context, id int64) (err error) { - _, err = db.GetEngine(ctx).Exec("update `user_open_id` set `show` = not `show` where `id` = ?", id) - return err +func ToggleUserOpenIDVisibility(ctx context.Context, id int64, user *User) error { + affected, err := db.GetEngine(ctx).Exec("update `user_open_id` set `show` = not `show` where `id` = ? AND uid = ?", id, user.ID) + if err != nil { + return err + } + if n, _ := affected.RowsAffected(); n != 1 { + return util.NewNotExistErrorf("OpenID is unknown") + } + return nil } diff --git a/models/user/openid_test.go b/models/user/openid_test.go index fa260e7a9e..6d2260324f 100644 --- a/models/user/openid_test.go +++ b/models/user/openid_test.go @@ -8,6 +8,7 @@ import ( "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -33,12 +34,14 @@ func TestGetUserOpenIDs(t *testing.T) { func TestToggleUserOpenIDVisibility(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) + user, err := user_model.GetUserByID(t.Context(), int64(2)) + require.NoError(t, err) oids, err := user_model.GetUserOpenIDs(t.Context(), int64(2)) require.NoError(t, err) require.Len(t, oids, 1) assert.True(t, oids[0].Show) - err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID) + err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID, user) require.NoError(t, err) oids, err = user_model.GetUserOpenIDs(t.Context(), int64(2)) @@ -46,7 +49,7 @@ func TestToggleUserOpenIDVisibility(t *testing.T) { require.Len(t, oids, 1) assert.False(t, oids[0].Show) - err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID) + err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID, user) require.NoError(t, err) oids, err = user_model.GetUserOpenIDs(t.Context(), int64(2)) @@ -55,3 +58,13 @@ func TestToggleUserOpenIDVisibility(t *testing.T) { assert.True(t, oids[0].Show) } } + +func TestToggleUserOpenIDVisibilityRequiresOwnership(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + unauthorizedUser, err := user_model.GetUserByID(t.Context(), int64(2)) + require.NoError(t, err) + + err = user_model.ToggleUserOpenIDVisibility(t.Context(), int64(1), unauthorizedUser) + require.Error(t, err) + assert.ErrorIs(t, err, util.ErrNotExist) +} diff --git a/modules/analyze/code_language.go b/modules/analyze/code_language.go index 74e7a06d06..d8589861d3 100644 --- a/modules/analyze/code_language.go +++ b/modules/analyze/code_language.go @@ -4,12 +4,13 @@ package analyze import ( - "path/filepath" + "path" "github.com/go-enry/go-enry/v2" ) // GetCodeLanguage detects code language based on file name and content +// It can be slow when the content is used for detection func GetCodeLanguage(filename string, content []byte) string { if language, ok := enry.GetLanguageByExtension(filename); ok { return language @@ -23,5 +24,5 @@ func GetCodeLanguage(filename string, content []byte) string { return enry.OtherLanguage } - return enry.GetLanguage(filepath.Base(filename), content) + return enry.GetLanguage(path.Base(filename), content) } diff --git a/modules/assetfs/layered.go b/modules/assetfs/layered.go index ce55475bd9..41e4ca7376 100644 --- a/modules/assetfs/layered.go +++ b/modules/assetfs/layered.go @@ -6,9 +6,7 @@ package assetfs import ( "context" "fmt" - "io" "io/fs" - "net/http" "os" "path/filepath" "sort" @@ -25,7 +23,7 @@ import ( // Layer represents a layer in a layered asset file-system. It has a name and works like http.FileSystem type Layer struct { name string - fs http.FileSystem + fs fs.FS localPath string } @@ -34,7 +32,7 @@ func (l *Layer) Name() string { } // Open opens the named file. The caller is responsible for closing the file. -func (l *Layer) Open(name string) (http.File, error) { +func (l *Layer) Open(name string) (fs.File, error) { return l.fs.Open(name) } @@ -48,12 +46,12 @@ func Local(name, base string, sub ...string) *Layer { panic(fmt.Sprintf("Unable to get absolute path for %q: %v", base, err)) } root := util.FilePathJoinAbs(base, sub...) - return &Layer{name: name, fs: http.Dir(root), localPath: root} + return &Layer{name: name, fs: os.DirFS(root), localPath: root} } // Bindata returns a new Layer with the given name, it serves files from the given bindata asset. func Bindata(name string, fs fs.FS) *Layer { - return &Layer{name: name, fs: http.FS(fs)} + return &Layer{name: name, fs: fs} } // LayeredFS is a layered asset file-system. It works like http.FileSystem, but it can have multiple layers. @@ -69,7 +67,7 @@ func Layered(layers ...*Layer) *LayeredFS { } // Open opens the named file. The caller is responsible for closing the file. -func (l *LayeredFS) Open(name string) (http.File, error) { +func (l *LayeredFS) Open(name string) (fs.File, error) { for _, layer := range l.layers { f, err := layer.Open(name) if err == nil || !os.IsNotExist(err) { @@ -89,40 +87,34 @@ func (l *LayeredFS) ReadFile(elems ...string) ([]byte, error) { func (l *LayeredFS) ReadLayeredFile(elems ...string) ([]byte, string, error) { name := util.PathJoinRel(elems...) for _, layer := range l.layers { - f, err := layer.Open(name) + bs, err := fs.ReadFile(layer, name) if os.IsNotExist(err) { continue } else if err != nil { return nil, layer.name, err } - bs, err := io.ReadAll(f) - _ = f.Close() return bs, layer.name, err } return nil, "", fs.ErrNotExist } -func shouldInclude(info fs.FileInfo, fileMode ...bool) bool { - if util.IsCommonHiddenFileName(info.Name()) { +func shouldInclude(dirEntry fs.DirEntry, fileMode ...bool) bool { + if util.IsCommonHiddenFileName(dirEntry.Name()) { return false } if len(fileMode) == 0 { return true } else if len(fileMode) == 1 { - return fileMode[0] == !info.Mode().IsDir() + return fileMode[0] == !dirEntry.IsDir() } panic("too many arguments for fileMode in shouldInclude") } -func readDir(layer *Layer, name string) ([]fs.FileInfo, error) { - f, err := layer.Open(name) - if os.IsNotExist(err) { +func readDirOptional(layer *Layer, name string) (entries []fs.DirEntry, err error) { + if entries, err = fs.ReadDir(layer, name); os.IsNotExist(err) { return nil, nil - } else if err != nil { - return nil, err } - defer f.Close() - return f.Readdir(-1) + return entries, err } // ListFiles lists files/directories in the given directory. The fileMode controls the returned files. @@ -133,13 +125,13 @@ func readDir(layer *Layer, name string) ([]fs.FileInfo, error) { func (l *LayeredFS) ListFiles(name string, fileMode ...bool) ([]string, error) { fileSet := make(container.Set[string]) for _, layer := range l.layers { - infos, err := readDir(layer, name) + entries, err := readDirOptional(layer, name) if err != nil { return nil, err } - for _, info := range infos { - if shouldInclude(info, fileMode...) { - fileSet.Add(info.Name()) + for _, entry := range entries { + if shouldInclude(entry, fileMode...) { + fileSet.Add(entry.Name()) } } } @@ -163,16 +155,16 @@ func listAllFiles(layers []*Layer, name string, fileMode ...bool) ([]string, err var list func(dir string) error list = func(dir string) error { for _, layer := range layers { - infos, err := readDir(layer, dir) + entries, err := readDirOptional(layer, dir) if err != nil { return err } - for _, info := range infos { - path := util.PathJoinRelX(dir, info.Name()) - if shouldInclude(info, fileMode...) { + for _, entry := range entries { + path := util.PathJoinRelX(dir, entry.Name()) + if shouldInclude(entry, fileMode...) { fileSet.Add(path) } - if info.IsDir() { + if entry.IsDir() { if err = list(path); err != nil { return err } diff --git a/modules/eventsource/manager_run.go b/modules/eventsource/manager_run.go index f66dc78c7e..4a42224dda 100644 --- a/modules/eventsource/manager_run.go +++ b/modules/eventsource/manager_run.go @@ -9,6 +9,7 @@ import ( activities_model "code.gitea.io/gitea/models/activities" issues_model "code.gitea.io/gitea/models/issues" + user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" @@ -91,7 +92,13 @@ loop: } for _, userStopwatches := range usersStopwatches { - apiSWs, err := convert.ToStopWatches(ctx, userStopwatches.StopWatches) + u, err := user_model.GetUserByID(ctx, userStopwatches.UserID) + if err != nil { + log.Error("Unable to get user %d: %v", userStopwatches.UserID, err) + continue + } + + apiSWs, err := convert.ToStopWatches(ctx, u, userStopwatches.StopWatches) if err != nil { if !issues_model.IsErrIssueNotExist(err) { log.Error("Unable to APIFormat stopwatches: %v", err) diff --git a/modules/git/attribute/batch.go b/modules/git/attribute/batch.go index 27befdfa25..b1e6387ade 100644 --- a/modules/git/attribute/batch.go +++ b/modules/git/attribute/batch.go @@ -7,7 +7,7 @@ import ( "bytes" "context" "fmt" - "os" + "io" "path/filepath" "time" @@ -20,7 +20,7 @@ import ( type BatchChecker struct { attributesNum int repo *git.Repository - stdinWriter *os.File + stdinWriter io.WriteCloser stdOut *nulSeparatedAttributeWriter ctx context.Context cancel context.CancelFunc @@ -60,10 +60,7 @@ func NewBatchChecker(repo *git.Repository, treeish string, attributes []string) }, } - stdinReader, stdinWriter, err := os.Pipe() - if err != nil { - return nil, err - } + stdinWriter, stdinWriterClose := cmd.MakeStdinPipe() checker.stdinWriter = stdinWriter lw := new(nulSeparatedAttributeWriter) @@ -71,23 +68,19 @@ func NewBatchChecker(repo *git.Repository, treeish string, attributes []string) lw.closed = make(chan struct{}) checker.stdOut = lw - go func() { - defer func() { - _ = stdinReader.Close() - _ = lw.Close() - }() - stdErr := new(bytes.Buffer) - err := cmd.WithEnv(envs). - WithDir(repo.Path). - WithStdin(stdinReader). - WithStdout(lw). - WithStderr(stdErr). - Run(ctx) + cmd.WithEnv(envs). + WithDir(repo.Path). + WithStdoutCopy(lw) - if err != nil && !git.IsErrCanceledOrKilled(err) { + go func() { + defer stdinWriterClose() + defer checker.cancel() + defer lw.Close() + + err := cmd.RunWithStderr(ctx) + if err != nil && !gitcmd.IsErrorCanceledOrKilled(err) { log.Error("Attribute checker for commit %s exits with error: %v", treeish, err) } - checker.cancel() }() return checker, nil diff --git a/modules/git/attribute/checker.go b/modules/git/attribute/checker.go index 49c0eb90ef..3eea31e813 100644 --- a/modules/git/attribute/checker.go +++ b/modules/git/attribute/checker.go @@ -68,18 +68,14 @@ func CheckAttributes(ctx context.Context, gitRepo *git.Repository, treeish strin } defer cancel() - stdOut := new(bytes.Buffer) - stdErr := new(bytes.Buffer) - - if err := cmd.WithEnv(append(os.Environ(), envs...)). + stdout, _, err := cmd.WithEnv(append(os.Environ(), envs...)). WithDir(gitRepo.Path). - WithStdout(stdOut). - WithStderr(stdErr). - Run(ctx); err != nil { - return nil, fmt.Errorf("failed to run check-attr: %w\n%s\n%s", err, stdOut.String(), stdErr.String()) + RunStdBytes(ctx) + if err != nil { + return nil, fmt.Errorf("failed to run check-attr: %w", err) } - fields := bytes.Split(stdOut.Bytes(), []byte{'\000'}) + fields := bytes.Split(stdout, []byte{'\000'}) if len(fields)%3 != 1 { return nil, errors.New("wrong number of fields in return from check-attr") } diff --git a/modules/git/batch.go b/modules/git/batch.go deleted file mode 100644 index f9e1748b54..0000000000 --- a/modules/git/batch.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2024 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package git - -import ( - "bufio" - "context" -) - -type Batch struct { - cancel context.CancelFunc - Reader *bufio.Reader - Writer WriteCloserError -} - -// NewBatch creates a new batch for the given repository, the Close must be invoked before release the batch -func NewBatch(ctx context.Context, repoPath string) (*Batch, error) { - // Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first! - if err := ensureValidGitRepository(ctx, repoPath); err != nil { - return nil, err - } - - var batch Batch - batch.Writer, batch.Reader, batch.cancel = catFileBatch(ctx, repoPath) - return &batch, nil -} - -func NewBatchCheck(ctx context.Context, repoPath string) (*Batch, error) { - // Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first! - if err := ensureValidGitRepository(ctx, repoPath); err != nil { - return nil, err - } - - var check Batch - check.Writer, check.Reader, check.cancel = catFileBatchCheck(ctx, repoPath) - return &check, nil -} - -func (b *Batch) Close() { - if b.cancel != nil { - b.cancel() - b.Reader = nil - b.Writer = nil - b.cancel = nil - } -} diff --git a/modules/git/batch_reader.go b/modules/git/batch_reader.go deleted file mode 100644 index b5cec130d5..0000000000 --- a/modules/git/batch_reader.go +++ /dev/null @@ -1,324 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package git - -import ( - "bufio" - "bytes" - "context" - "io" - "math" - "strconv" - "strings" - - "code.gitea.io/gitea/modules/git/gitcmd" - "code.gitea.io/gitea/modules/log" - - "github.com/djherbis/buffer" - "github.com/djherbis/nio/v3" -) - -// WriteCloserError wraps an io.WriteCloser with an additional CloseWithError function -type WriteCloserError interface { - io.WriteCloser - CloseWithError(err error) error -} - -// ensureValidGitRepository runs git rev-parse in the repository path - thus ensuring that the repository is a valid repository. -// Run before opening git cat-file. -// This is needed otherwise the git cat-file will hang for invalid repositories. -func ensureValidGitRepository(ctx context.Context, repoPath string) error { - stderr := strings.Builder{} - err := gitcmd.NewCommand("rev-parse"). - WithDir(repoPath). - WithStderr(&stderr). - Run(ctx) - if err != nil { - return gitcmd.ConcatenateError(err, (&stderr).String()) - } - return nil -} - -// catFileBatchCheck opens git cat-file --batch-check in the provided repo and returns a stdin pipe, a stdout reader and cancel function -func catFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError, *bufio.Reader, func()) { - batchStdinReader, batchStdinWriter := io.Pipe() - batchStdoutReader, batchStdoutWriter := io.Pipe() - ctx, ctxCancel := context.WithCancel(ctx) - closed := make(chan struct{}) - cancel := func() { - ctxCancel() - _ = batchStdoutReader.Close() - _ = batchStdinWriter.Close() - <-closed - } - - // Ensure cancel is called as soon as the provided context is cancelled - go func() { - <-ctx.Done() - cancel() - }() - - go func() { - stderr := strings.Builder{} - err := gitcmd.NewCommand("cat-file", "--batch-check"). - WithDir(repoPath). - WithStdin(batchStdinReader). - WithStdout(batchStdoutWriter). - WithStderr(&stderr). - WithUseContextTimeout(true). - Run(ctx) - if err != nil { - _ = batchStdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String())) - _ = batchStdinReader.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String())) - } else { - _ = batchStdoutWriter.Close() - _ = batchStdinReader.Close() - } - close(closed) - }() - - // For simplicities sake we'll use a buffered reader to read from the cat-file --batch-check - batchReader := bufio.NewReader(batchStdoutReader) - - return batchStdinWriter, batchReader, cancel -} - -// catFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function -func catFileBatch(ctx context.Context, repoPath string) (WriteCloserError, *bufio.Reader, func()) { - // We often want to feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary. - // so let's create a batch stdin and stdout - batchStdinReader, batchStdinWriter := io.Pipe() - batchStdoutReader, batchStdoutWriter := nio.Pipe(buffer.New(32 * 1024)) - ctx, ctxCancel := context.WithCancel(ctx) - closed := make(chan struct{}) - cancel := func() { - ctxCancel() - _ = batchStdinWriter.Close() - _ = batchStdoutReader.Close() - <-closed - } - - // Ensure cancel is called as soon as the provided context is cancelled - go func() { - <-ctx.Done() - cancel() - }() - - go func() { - stderr := strings.Builder{} - err := gitcmd.NewCommand("cat-file", "--batch"). - WithDir(repoPath). - WithStdin(batchStdinReader). - WithStdout(batchStdoutWriter). - WithStderr(&stderr). - WithUseContextTimeout(true). - Run(ctx) - if err != nil { - _ = batchStdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String())) - _ = batchStdinReader.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String())) - } else { - _ = batchStdoutWriter.Close() - _ = batchStdinReader.Close() - } - close(closed) - }() - - // For simplicities sake we'll us a buffered reader to read from the cat-file --batch - batchReader := bufio.NewReaderSize(batchStdoutReader, 32*1024) - - return batchStdinWriter, batchReader, cancel -} - -// ReadBatchLine reads the header line from cat-file --batch -// We expect: SP SP LF -// then leaving the rest of the stream " LF" to be read -func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err error) { - typ, err = rd.ReadString('\n') - if err != nil { - return sha, typ, size, err - } - if len(typ) == 1 { - typ, err = rd.ReadString('\n') - if err != nil { - return sha, typ, size, err - } - } - idx := strings.IndexByte(typ, ' ') - if idx < 0 { - log.Debug("missing space typ: %s", typ) - return sha, typ, size, ErrNotExist{ID: string(sha)} - } - sha = []byte(typ[:idx]) - typ = typ[idx+1:] - - idx = strings.IndexByte(typ, ' ') - if idx < 0 { - return sha, typ, size, ErrNotExist{ID: string(sha)} - } - - sizeStr := typ[idx+1 : len(typ)-1] - typ = typ[:idx] - - size, err = strconv.ParseInt(sizeStr, 10, 64) - return sha, typ, size, err -} - -// ReadTagObjectID reads a tag object ID hash from a cat-file --batch stream, throwing away the rest of the stream. -func ReadTagObjectID(rd *bufio.Reader, size int64) (string, error) { - var id string - var n int64 -headerLoop: - for { - line, err := rd.ReadBytes('\n') - if err != nil { - return "", err - } - n += int64(len(line)) - idx := bytes.Index(line, []byte{' '}) - if idx < 0 { - continue - } - - if string(line[:idx]) == "object" { - id = string(line[idx+1 : len(line)-1]) - break headerLoop - } - } - - // Discard the rest of the tag - return id, DiscardFull(rd, size-n+1) -} - -// ReadTreeID reads a tree ID from a cat-file --batch stream, throwing away the rest of the stream. -func ReadTreeID(rd *bufio.Reader, size int64) (string, error) { - var id string - var n int64 -headerLoop: - for { - line, err := rd.ReadBytes('\n') - if err != nil { - return "", err - } - n += int64(len(line)) - idx := bytes.Index(line, []byte{' '}) - if idx < 0 { - continue - } - - if string(line[:idx]) == "tree" { - id = string(line[idx+1 : len(line)-1]) - break headerLoop - } - } - - // Discard the rest of the commit - return id, DiscardFull(rd, size-n+1) -} - -// git tree files are a list: -// SP NUL -// -// Unfortunately this 20-byte notation is somewhat in conflict to all other git tools -// Therefore we need some method to convert these binary hashes to hex hashes - -// constant hextable to help quickly convert between binary and hex representation -const hextable = "0123456789abcdef" - -// BinToHexHeash converts a binary Hash into a hex encoded one. Input and output can be the -// same byte slice to support in place conversion without allocations. -// This is at least 100x quicker that hex.EncodeToString -func BinToHex(objectFormat ObjectFormat, sha, out []byte) []byte { - for i := objectFormat.FullLength()/2 - 1; i >= 0; i-- { - v := sha[i] - vhi, vlo := v>>4, v&0x0f - shi, slo := hextable[vhi], hextable[vlo] - out[i*2], out[i*2+1] = shi, slo - } - return out -} - -// ParseCatFileTreeLine reads an entry from a tree in a cat-file --batch stream -// This carefully avoids allocations - except where fnameBuf is too small. -// It is recommended therefore to pass in an fnameBuf large enough to avoid almost all allocations -// -// Each line is composed of: -// SP NUL -// -// We don't attempt to convert the raw HASH to save a lot of time -func ParseCatFileTreeLine(objectFormat ObjectFormat, rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) { - var readBytes []byte - - // Read the Mode & fname - readBytes, err = rd.ReadSlice('\x00') - if err != nil { - return mode, fname, sha, n, err - } - idx := bytes.IndexByte(readBytes, ' ') - if idx < 0 { - log.Debug("missing space in readBytes ParseCatFileTreeLine: %s", readBytes) - return mode, fname, sha, n, &ErrNotExist{} - } - - n += idx + 1 - copy(modeBuf, readBytes[:idx]) - if len(modeBuf) >= idx { - modeBuf = modeBuf[:idx] - } else { - modeBuf = append(modeBuf, readBytes[len(modeBuf):idx]...) - } - mode = modeBuf - - readBytes = readBytes[idx+1:] - - // Deal with the fname - copy(fnameBuf, readBytes) - if len(fnameBuf) > len(readBytes) { - fnameBuf = fnameBuf[:len(readBytes)] - } else { - fnameBuf = append(fnameBuf, readBytes[len(fnameBuf):]...) - } - for err == bufio.ErrBufferFull { - readBytes, err = rd.ReadSlice('\x00') - fnameBuf = append(fnameBuf, readBytes...) - } - n += len(fnameBuf) - if err != nil { - return mode, fname, sha, n, err - } - fnameBuf = fnameBuf[:len(fnameBuf)-1] - fname = fnameBuf - - // Deal with the binary hash - idx = 0 - length := objectFormat.FullLength() / 2 - for idx < length { - var read int - read, err = rd.Read(shaBuf[idx:length]) - n += read - if err != nil { - return mode, fname, sha, n, err - } - idx += read - } - sha = shaBuf - return mode, fname, sha, n, err -} - -func DiscardFull(rd *bufio.Reader, discard int64) error { - if discard > math.MaxInt32 { - n, err := rd.Discard(math.MaxInt32) - discard -= int64(n) - if err != nil { - return err - } - } - for discard > 0 { - n, err := rd.Discard(int(discard)) - discard -= int64(n) - if err != nil { - return err - } - } - return nil -} diff --git a/modules/git/blob_nogogit.go b/modules/git/blob_nogogit.go index af3ce376d6..837b30fd88 100644 --- a/modules/git/blob_nogogit.go +++ b/modules/git/blob_nogogit.go @@ -6,8 +6,6 @@ package git import ( - "bufio" - "bytes" "io" "code.gitea.io/gitea/modules/log" @@ -25,38 +23,28 @@ type Blob struct { // DataAsync gets a ReadCloser for the contents of a blob without reading it all. // Calling the Close function on the result will discard all unread output. -func (b *Blob) DataAsync() (io.ReadCloser, error) { - wr, rd, cancel, err := b.repo.CatFileBatch(b.repo.Ctx) +func (b *Blob) DataAsync() (_ io.ReadCloser, retErr error) { + batch, cancel, err := b.repo.CatFileBatch(b.repo.Ctx) if err != nil { return nil, err } + defer func() { + // if there was an error, cancel the batch right away, + // otherwise let the caller close it + if retErr != nil { + cancel() + } + }() - _, err = wr.Write([]byte(b.ID.String() + "\n")) + info, contentReader, err := batch.QueryContent(b.ID.String()) if err != nil { - cancel() - return nil, err - } - _, _, size, err := ReadBatchLine(rd) - if err != nil { - cancel() return nil, err } b.gotSize = true - b.size = size - - if size < 4096 { - bs, err := io.ReadAll(io.LimitReader(rd, size)) - defer cancel() - if err != nil { - return nil, err - } - _, err = rd.Discard(1) - return io.NopCloser(bytes.NewReader(bs)), err - } - + b.size = info.Size return &blobReader{ - rd: rd, - n: size, + rd: contentReader, + n: info.Size, cancel: cancel, }, nil } @@ -67,30 +55,24 @@ func (b *Blob) Size() int64 { return b.size } - wr, rd, cancel, err := b.repo.CatFileBatchCheck(b.repo.Ctx) + batch, cancel, err := b.repo.CatFileBatch(b.repo.Ctx) if err != nil { log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err) return 0 } defer cancel() - _, err = wr.Write([]byte(b.ID.String() + "\n")) + info, err := batch.QueryInfo(b.ID.String()) if err != nil { log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err) return 0 } - _, _, b.size, err = ReadBatchLine(rd) - if err != nil { - log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err) - return 0 - } - b.gotSize = true - + b.size = info.Size return b.size } type blobReader struct { - rd *bufio.Reader + rd BufferedReader n int64 cancel func() } diff --git a/modules/git/catfile_batch.go b/modules/git/catfile_batch.go new file mode 100644 index 0000000000..d13179f3ec --- /dev/null +++ b/modules/git/catfile_batch.go @@ -0,0 +1,52 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package git + +import ( + "context" + "io" +) + +type BufferedReader interface { + io.Reader + Buffered() int + Peek(n int) ([]byte, error) + Discard(n int) (int, error) + ReadString(sep byte) (string, error) + ReadSlice(sep byte) ([]byte, error) + ReadBytes(sep byte) ([]byte, error) +} + +type CatFileObject struct { + ID string + Type string + Size int64 +} + +type CatFileBatch interface { + // QueryInfo queries the object info from the git repository by its object name using "git cat-file --batch" family commands. + // "git cat-file" accepts "" for the object name, it can be a ref name, object id, etc. https://git-scm.com/docs/gitrevisions + // In Gitea, we only use the simple ref name or object id, no other complex rev syntax like "suffix" or "git describe" although they are supported by git. + QueryInfo(obj string) (*CatFileObject, error) + + // QueryContent is similar to QueryInfo, it queries the object info and additionally returns a reader for its content. + // FIXME: this design still follows the old pattern: the returned BufferedReader is very fragile, + // callers should carefully maintain its lifecycle and discard all unread data. + // TODO: It needs to be refactored to a fully managed Reader stream in the future, don't let callers manually Close or Discard + QueryContent(obj string) (*CatFileObject, BufferedReader, error) +} + +type CatFileBatchCloser interface { + CatFileBatch + Close() +} + +// NewBatch creates a "batch object provider (CatFileBatch)" for the given repository path to retrieve object info and content efficiently. +// The CatFileBatch and the readers create by it should only be used in the same goroutine. +func NewBatch(ctx context.Context, repoPath string) (CatFileBatchCloser, error) { + if DefaultFeatures().SupportCatFileBatchCommand { + return newCatFileBatchCommand(ctx, repoPath) + } + return newCatFileBatchLegacy(ctx, repoPath) +} diff --git a/modules/git/catfile_batch_command.go b/modules/git/catfile_batch_command.go new file mode 100644 index 0000000000..710561f045 --- /dev/null +++ b/modules/git/catfile_batch_command.go @@ -0,0 +1,66 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package git + +import ( + "context" + "os" + "path/filepath" + + "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/util" +) + +// catFileBatchCommand implements the CatFileBatch interface using the "cat-file --batch-command" command +// for git version >= 2.36 +// ref: https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt---batch-command +type catFileBatchCommand struct { + ctx context.Context + repoPath string + batch *catFileBatchCommunicator +} + +var _ CatFileBatch = (*catFileBatchCommand)(nil) + +func newCatFileBatchCommand(ctx context.Context, repoPath string) (*catFileBatchCommand, error) { + if _, err := os.Stat(repoPath); err != nil { + return nil, util.NewNotExistErrorf("repo %q doesn't exist", filepath.Base(repoPath)) + } + return &catFileBatchCommand{ctx: ctx, repoPath: repoPath}, nil +} + +func (b *catFileBatchCommand) getBatch() *catFileBatchCommunicator { + if b.batch != nil { + return b.batch + } + b.batch = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch-command")) + return b.batch +} + +func (b *catFileBatchCommand) QueryContent(obj string) (*CatFileObject, BufferedReader, error) { + _, err := b.getBatch().reqWriter.Write([]byte("contents " + obj + "\n")) + if err != nil { + return nil, nil, err + } + info, err := catFileBatchParseInfoLine(b.getBatch().respReader) + if err != nil { + return nil, nil, err + } + return info, b.getBatch().respReader, nil +} + +func (b *catFileBatchCommand) QueryInfo(obj string) (*CatFileObject, error) { + _, err := b.getBatch().reqWriter.Write([]byte("info " + obj + "\n")) + if err != nil { + return nil, err + } + return catFileBatchParseInfoLine(b.getBatch().respReader) +} + +func (b *catFileBatchCommand) Close() { + if b.batch != nil { + b.batch.Close() + b.batch = nil + } +} diff --git a/modules/git/catfile_batch_legacy.go b/modules/git/catfile_batch_legacy.go new file mode 100644 index 0000000000..795fc4ce3d --- /dev/null +++ b/modules/git/catfile_batch_legacy.go @@ -0,0 +1,81 @@ +// Copyright 2024 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package git + +import ( + "context" + "io" + "os" + "path/filepath" + + "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/util" +) + +// catFileBatchLegacy implements the CatFileBatch interface using the "cat-file --batch" command and "cat-file --batch-check" command +// for git version < 2.36 +// to align with "--batch-command", it creates the two commands for querying object contents and object info separately +// ref: https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt---batch +type catFileBatchLegacy struct { + ctx context.Context + repoPath string + batchContent *catFileBatchCommunicator + batchCheck *catFileBatchCommunicator +} + +var _ CatFileBatchCloser = (*catFileBatchLegacy)(nil) + +func newCatFileBatchLegacy(ctx context.Context, repoPath string) (*catFileBatchLegacy, error) { + if _, err := os.Stat(repoPath); err != nil { + return nil, util.NewNotExistErrorf("repo %q doesn't exist", filepath.Base(repoPath)) + } + return &catFileBatchLegacy{ctx: ctx, repoPath: repoPath}, nil +} + +func (b *catFileBatchLegacy) getBatchContent() *catFileBatchCommunicator { + if b.batchContent != nil { + return b.batchContent + } + b.batchContent = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch")) + return b.batchContent +} + +func (b *catFileBatchLegacy) getBatchCheck() *catFileBatchCommunicator { + if b.batchCheck != nil { + return b.batchCheck + } + b.batchCheck = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch-check")) + return b.batchCheck +} + +func (b *catFileBatchLegacy) QueryContent(obj string) (*CatFileObject, BufferedReader, error) { + _, err := io.WriteString(b.getBatchContent().reqWriter, obj+"\n") + if err != nil { + return nil, nil, err + } + info, err := catFileBatchParseInfoLine(b.getBatchContent().respReader) + if err != nil { + return nil, nil, err + } + return info, b.getBatchContent().respReader, nil +} + +func (b *catFileBatchLegacy) QueryInfo(obj string) (*CatFileObject, error) { + _, err := io.WriteString(b.getBatchCheck().reqWriter, obj+"\n") + if err != nil { + return nil, err + } + return catFileBatchParseInfoLine(b.getBatchCheck().respReader) +} + +func (b *catFileBatchLegacy) Close() { + if b.batchContent != nil { + b.batchContent.Close() + b.batchContent = nil + } + if b.batchCheck != nil { + b.batchCheck.Close() + b.batchCheck = nil + } +} diff --git a/modules/git/catfile_batch_reader.go b/modules/git/catfile_batch_reader.go new file mode 100644 index 0000000000..7d2e496ace --- /dev/null +++ b/modules/git/catfile_batch_reader.go @@ -0,0 +1,243 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package git + +import ( + "bufio" + "bytes" + "context" + "errors" + "io" + "math" + "strconv" + "strings" + + "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/log" +) + +type catFileBatchCommunicator struct { + cancel context.CancelFunc + reqWriter io.Writer + respReader *bufio.Reader + debugGitCmd *gitcmd.Command +} + +func (b *catFileBatchCommunicator) Close() { + if b.cancel != nil { + b.cancel() + b.cancel = nil + } +} + +// newCatFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function +func newCatFileBatch(ctx context.Context, repoPath string, cmdCatFile *gitcmd.Command) (ret *catFileBatchCommunicator) { + ctx, ctxCancel := context.WithCancelCause(ctx) + + // We often want to feed the commits in order into cat-file --batch, followed by their trees and subtrees as necessary. + stdinWriter, stdoutReader, pipeClose := cmdCatFile.MakeStdinStdoutPipe() + ret = &catFileBatchCommunicator{ + debugGitCmd: cmdCatFile, + cancel: func() { ctxCancel(nil) }, + reqWriter: stdinWriter, + respReader: bufio.NewReaderSize(stdoutReader, 32*1024), // use a buffered reader for rich operations + } + + err := cmdCatFile.WithDir(repoPath).StartWithStderr(ctx) + if err != nil { + log.Error("Unable to start git command %v: %v", cmdCatFile.LogString(), err) + // ideally here it should return the error, but it would require refactoring all callers + // so just return a dummy communicator that does nothing, almost the same behavior as before, not bad + ctxCancel(err) + pipeClose() + return ret + } + + go func() { + err := cmdCatFile.WaitWithStderr() + if err != nil && !errors.Is(err, context.Canceled) { + log.Error("cat-file --batch command failed in repo %s, error: %v", repoPath, err) + } + ctxCancel(err) + pipeClose() + }() + + return ret +} + +// catFileBatchParseInfoLine reads the header line from cat-file --batch +// We expect: SP SP LF +// then leaving the rest of the stream " LF" to be read +func catFileBatchParseInfoLine(rd BufferedReader) (*CatFileObject, error) { + typ, err := rd.ReadString('\n') + if err != nil { + return nil, err + } + if len(typ) == 1 { + typ, err = rd.ReadString('\n') + if err != nil { + return nil, err + } + } + idx := strings.IndexByte(typ, ' ') + if idx < 0 { + return nil, ErrNotExist{} + } + sha := typ[:idx] + typ = typ[idx+1:] + + idx = strings.IndexByte(typ, ' ') + if idx < 0 { + return nil, ErrNotExist{ID: sha} + } + + sizeStr := typ[idx+1 : len(typ)-1] + typ = typ[:idx] + + size, err := strconv.ParseInt(sizeStr, 10, 64) + return &CatFileObject{ID: sha, Type: typ, Size: size}, err +} + +// ReadTagObjectID reads a tag object ID hash from a cat-file --batch stream, throwing away the rest of the stream. +func ReadTagObjectID(rd BufferedReader, size int64) (string, error) { + var id string + var n int64 +headerLoop: + for { + line, err := rd.ReadBytes('\n') + if err != nil { + return "", err + } + n += int64(len(line)) + idx := bytes.Index(line, []byte{' '}) + if idx < 0 { + continue + } + + if string(line[:idx]) == "object" { + id = string(line[idx+1 : len(line)-1]) + break headerLoop + } + } + + // Discard the rest of the tag + return id, DiscardFull(rd, size-n+1) +} + +// ReadTreeID reads a tree ID from a cat-file --batch stream, throwing away the rest of the stream. +func ReadTreeID(rd BufferedReader, size int64) (string, error) { + var id string + var n int64 +headerLoop: + for { + line, err := rd.ReadBytes('\n') + if err != nil { + return "", err + } + n += int64(len(line)) + idx := bytes.Index(line, []byte{' '}) + if idx < 0 { + continue + } + + if string(line[:idx]) == "tree" { + id = string(line[idx+1 : len(line)-1]) + break headerLoop + } + } + + // Discard the rest of the commit + return id, DiscardFull(rd, size-n+1) +} + +// git tree files are a list: +// SP NUL +// +// Unfortunately this 20-byte notation is somewhat in conflict to all other git tools +// Therefore we need some method to convert these binary hashes to hex hashes + +// ParseCatFileTreeLine reads an entry from a tree in a cat-file --batch stream +// This carefully avoids allocations - except where fnameBuf is too small. +// It is recommended therefore to pass in an fnameBuf large enough to avoid almost all allocations +// +// Each line is composed of: +// SP NUL +// +// We don't attempt to convert the raw HASH to save a lot of time +func ParseCatFileTreeLine(objectFormat ObjectFormat, rd BufferedReader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) { + var readBytes []byte + + // Read the Mode & fname + readBytes, err = rd.ReadSlice('\x00') + if err != nil { + return mode, fname, sha, n, err + } + idx := bytes.IndexByte(readBytes, ' ') + if idx < 0 { + log.Debug("missing space in readBytes ParseCatFileTreeLine: %s", readBytes) + return mode, fname, sha, n, &ErrNotExist{} + } + + n += idx + 1 + copy(modeBuf, readBytes[:idx]) + if len(modeBuf) >= idx { + modeBuf = modeBuf[:idx] + } else { + modeBuf = append(modeBuf, readBytes[len(modeBuf):idx]...) + } + mode = modeBuf + + readBytes = readBytes[idx+1:] + + // Deal with the fname + copy(fnameBuf, readBytes) + if len(fnameBuf) > len(readBytes) { + fnameBuf = fnameBuf[:len(readBytes)] + } else { + fnameBuf = append(fnameBuf, readBytes[len(fnameBuf):]...) + } + for err == bufio.ErrBufferFull { + readBytes, err = rd.ReadSlice('\x00') + fnameBuf = append(fnameBuf, readBytes...) + } + n += len(fnameBuf) + if err != nil { + return mode, fname, sha, n, err + } + fnameBuf = fnameBuf[:len(fnameBuf)-1] + fname = fnameBuf + + // Deal with the binary hash + idx = 0 + length := objectFormat.FullLength() / 2 + for idx < length { + var read int + read, err = rd.Read(shaBuf[idx:length]) + n += read + if err != nil { + return mode, fname, sha, n, err + } + idx += read + } + sha = shaBuf + return mode, fname, sha, n, err +} + +func DiscardFull(rd BufferedReader, discard int64) error { + if discard > math.MaxInt32 { + n, err := rd.Discard(math.MaxInt32) + discard -= int64(n) + if err != nil { + return err + } + } + for discard > 0 { + n, err := rd.Discard(int(discard)) + discard -= int64(n) + if err != nil { + return err + } + } + return nil +} diff --git a/modules/git/catfile_batch_test.go b/modules/git/catfile_batch_test.go new file mode 100644 index 0000000000..8f6b1f5eff --- /dev/null +++ b/modules/git/catfile_batch_test.go @@ -0,0 +1,89 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package git + +import ( + "io" + "path/filepath" + "sync" + "testing" + + "code.gitea.io/gitea/modules/test" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCatFileBatch(t *testing.T) { + defer test.MockVariableValue(&DefaultFeatures().SupportCatFileBatchCommand)() + DefaultFeatures().SupportCatFileBatchCommand = false + t.Run("LegacyCheck", testCatFileBatch) + DefaultFeatures().SupportCatFileBatchCommand = true + t.Run("BatchCommand", testCatFileBatch) +} + +func testCatFileBatch(t *testing.T) { + t.Run("CorruptedGitRepo", func(t *testing.T) { + tmpDir := t.TempDir() + batch, err := NewBatch(t.Context(), tmpDir) + // as long as the directory exists, no error, because we can't really know whether the git repo is valid until we run commands + require.NoError(t, err) + defer batch.Close() + + _, err = batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449") + require.Error(t, err) + _, err = batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449") + require.Error(t, err) + }) + + batch, err := NewBatch(t.Context(), filepath.Join(testReposDir, "repo1_bare")) + require.NoError(t, err) + defer batch.Close() + + t.Run("QueryInfo", func(t *testing.T) { + info, err := batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449") + require.NoError(t, err) + assert.Equal(t, "e2129701f1a4d54dc44f03c93bca0a2aec7c5449", info.ID) + assert.Equal(t, "blob", info.Type) + assert.EqualValues(t, 6, info.Size) + }) + + t.Run("QueryContent", func(t *testing.T) { + info, rd, err := batch.QueryContent("e2129701f1a4d54dc44f03c93bca0a2aec7c5449") + require.NoError(t, err) + assert.Equal(t, "e2129701f1a4d54dc44f03c93bca0a2aec7c5449", info.ID) + assert.Equal(t, "blob", info.Type) + assert.EqualValues(t, 6, info.Size) + + content, err := io.ReadAll(io.LimitReader(rd, info.Size)) + require.NoError(t, err) + require.Equal(t, "file1\n", string(content)) + }) + + t.Run("QueryTerminated", func(t *testing.T) { + var c *catFileBatchCommunicator + switch b := batch.(type) { + case *catFileBatchLegacy: + c = b.batchCheck + _, _ = c.reqWriter.Write([]byte("in-complete-line-")) + case *catFileBatchCommand: + c = b.batch + _, _ = c.reqWriter.Write([]byte("info")) + default: + t.FailNow() + return + } + + wg := sync.WaitGroup{} + wg.Go(func() { + buf := make([]byte, 100) + _, _ = c.respReader.Read(buf) + n, errRead := c.respReader.Read(buf) + assert.Zero(t, n) + assert.ErrorIs(t, errRead, io.EOF) // the pipe is closed due to command being killed + }) + c.debugGitCmd.DebugKill() + wg.Wait() + }) +} diff --git a/modules/git/commit.go b/modules/git/commit.go index 1917a72bbf..e66a33ef98 100644 --- a/modules/git/commit.go +++ b/modules/git/commit.go @@ -120,7 +120,7 @@ func CommitChanges(ctx context.Context, repoPath string, opts CommitChangesOptio _, _, err := cmd.WithDir(repoPath).RunStdString(ctx) // No stderr but exit status 1 means nothing to commit. - if err != nil && err.Error() == "exit status 1" { + if gitcmd.IsErrorExitCode(err, 1) { return nil } return err @@ -315,7 +315,7 @@ func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, err WithDir(repoPath). RunStdString(ctx) if err != nil { - if strings.Contains(err.Error(), "exit status 128") { + if gitcmd.IsErrorExitCode(err, 128) { return "", ErrNotExist{shortID, ""} } return "", err diff --git a/modules/git/commit_info_test.go b/modules/git/commit_info_test.go index 14a4174544..1e1697b006 100644 --- a/modules/git/commit_info_test.go +++ b/modules/git/commit_info_test.go @@ -30,28 +30,57 @@ func cloneRepo(tb testing.TB, url string) (string, error) { } func testGetCommitsInfo(t *testing.T, repo1 *Repository) { + type expectedEntryInfo struct { + CommitID string + Size int64 + } + // these test case are specific to the repo1 test repo testCases := []struct { CommitID string Path string - ExpectedIDs map[string]string + ExpectedIDs map[string]expectedEntryInfo ExpectedTreeCommit string }{ - {"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", "", map[string]string{ - "file1.txt": "95bb4d39648ee7e325106df01a621c530863a653", - "file2.txt": "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", + {"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", "", map[string]expectedEntryInfo{ + "file1.txt": { + CommitID: "95bb4d39648ee7e325106df01a621c530863a653", + Size: 6, + }, + "file2.txt": { + CommitID: "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", + Size: 6, + }, }, "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2"}, - {"2839944139e0de9737a044f78b0e4b40d989a9e3", "", map[string]string{ - "file1.txt": "2839944139e0de9737a044f78b0e4b40d989a9e3", - "branch1.txt": "9c9aef8dd84e02bc7ec12641deb4c930a7c30185", + {"2839944139e0de9737a044f78b0e4b40d989a9e3", "", map[string]expectedEntryInfo{ + "file1.txt": { + CommitID: "2839944139e0de9737a044f78b0e4b40d989a9e3", + Size: 15, + }, + "branch1.txt": { + CommitID: "9c9aef8dd84e02bc7ec12641deb4c930a7c30185", + Size: 8, + }, }, "2839944139e0de9737a044f78b0e4b40d989a9e3"}, - {"5c80b0245c1c6f8343fa418ec374b13b5d4ee658", "branch2", map[string]string{ - "branch2.txt": "5c80b0245c1c6f8343fa418ec374b13b5d4ee658", + {"5c80b0245c1c6f8343fa418ec374b13b5d4ee658", "branch2", map[string]expectedEntryInfo{ + "branch2.txt": { + CommitID: "5c80b0245c1c6f8343fa418ec374b13b5d4ee658", + Size: 8, + }, }, "5c80b0245c1c6f8343fa418ec374b13b5d4ee658"}, - {"feaf4ba6bc635fec442f46ddd4512416ec43c2c2", "", map[string]string{ - "file1.txt": "95bb4d39648ee7e325106df01a621c530863a653", - "file2.txt": "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", - "foo": "37991dec2c8e592043f47155ce4808d4580f9123", + {"feaf4ba6bc635fec442f46ddd4512416ec43c2c2", "", map[string]expectedEntryInfo{ + "file1.txt": { + CommitID: "95bb4d39648ee7e325106df01a621c530863a653", + Size: 6, + }, + "file2.txt": { + CommitID: "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", + Size: 6, + }, + "foo": { + CommitID: "37991dec2c8e592043f47155ce4808d4580f9123", + Size: 0, + }, }, "feaf4ba6bc635fec442f46ddd4512416ec43c2c2"}, } for _, testCase := range testCases { @@ -93,11 +122,12 @@ func testGetCommitsInfo(t *testing.T, repo1 *Repository) { for _, commitInfo := range commitsInfo { entry := commitInfo.Entry commit := commitInfo.Commit - expectedID, ok := testCase.ExpectedIDs[entry.Name()] + expectedInfo, ok := testCase.ExpectedIDs[entry.Name()] if !assert.True(t, ok) { continue } - assert.Equal(t, expectedID, commit.ID.String()) + assert.Equal(t, expectedInfo.CommitID, commit.ID.String()) + assert.Equal(t, expectedInfo.Size, entry.Size(), entry.Name()) } } } diff --git a/modules/git/diff.go b/modules/git/diff.go index 309d8f4615..a198695fc0 100644 --- a/modules/git/diff.go +++ b/modules/git/diff.go @@ -5,11 +5,9 @@ package git import ( "bufio" - "bytes" "context" "fmt" "io" - "os" "regexp" "strconv" "strings" @@ -17,34 +15,64 @@ import ( "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" ) -// RawDiffType type of a raw diff. +// RawDiffType output format: diff or patch type RawDiffType string -// RawDiffType possible values. const ( RawDiffNormal RawDiffType = "diff" RawDiffPatch RawDiffType = "patch" ) // GetRawDiff dumps diff results of repository in given commit ID to io.Writer. -func GetRawDiff(repo *Repository, commitID string, diffType RawDiffType, writer io.Writer) error { - return GetRepoRawDiffForFile(repo, "", commitID, diffType, "", writer) -} - -// GetRepoRawDiffForFile dumps diff results of file in given commit ID to io.Writer according given repository -func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string, writer io.Writer) error { - commit, err := repo.GetCommit(endCommit) +func GetRawDiff(repo *Repository, commitID string, diffType RawDiffType, writer io.Writer) (retErr error) { + diffOutput, diffFinish, err := getRepoRawDiffForFile(repo.Ctx, repo, "", commitID, diffType, "") if err != nil { return err } + defer func() { + err := diffFinish() + if retErr == nil { + retErr = err // only return command's error if no previous error + } + }() + _, err = io.Copy(writer, diffOutput) + return err +} + +// GetFileDiffCutAroundLine cuts the old or new part of the diff of a file around a specific line number +func GetFileDiffCutAroundLine( + repo *Repository, startCommit, endCommit, treePath string, + line int64, old bool, numbersOfLine int, +) (_ string, retErr error) { + diffOutput, diffFinish, err := getRepoRawDiffForFile(repo.Ctx, repo, startCommit, endCommit, RawDiffNormal, treePath) + if err != nil { + return "", err + } + defer func() { + err := diffFinish() + if retErr == nil { + retErr = err // only return command's error if no previous error + } + }() + return CutDiffAroundLine(diffOutput, line, old, numbersOfLine) +} + +// getRepoRawDiffForFile returns an io.Reader for the diff results of file in given commit ID +// and a "finish" function to wait for the git command and clean up resources after reading is done. +func getRepoRawDiffForFile(ctx context.Context, repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string) (io.Reader, func() gitcmd.RunStdError, error) { + commit, err := repo.GetCommit(endCommit) + if err != nil { + return nil, nil, err + } var files []string if len(file) > 0 { files = append(files, file) } - cmd := gitcmd.NewCommand() + cmd := gitcmd.NewCommand().WithDir(repo.Path) switch diffType { case RawDiffNormal: if len(startCommit) != 0 { @@ -56,7 +84,7 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff } else { c, err := commit.Parent(0) if err != nil { - return err + return nil, nil, err } cmd.AddArguments("diff"). AddOptionFormat("--find-renames=%s", setting.Git.DiffRenameSimilarityThreshold). @@ -71,23 +99,25 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff } else { c, err := commit.Parent(0) if err != nil { - return err + return nil, nil, err } query := fmt.Sprintf("%s...%s", endCommit, c.ID.String()) cmd.AddArguments("format-patch", "--no-signature", "--stdout").AddDynamicArguments(query).AddDashesAndList(files...) } default: - return fmt.Errorf("invalid diffType: %s", diffType) + return nil, nil, util.NewInvalidArgumentErrorf("invalid diff type: %s", diffType) } - stderr := new(bytes.Buffer) - if err = cmd.WithDir(repo.Path). - WithStdout(writer). - WithStderr(stderr). - Run(repo.Ctx); err != nil { - return fmt.Errorf("Run: %w - %s", err, stderr) + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + err = cmd.StartWithStderr(ctx) + if err != nil { + stdoutReaderClose() + return nil, nil, err } - return nil + return stdoutReader, func() gitcmd.RunStdError { + stdoutReaderClose() + return cmd.WaitWithStderr() + }, nil } // ParseDiffHunkString parse the diff hunk content and return @@ -290,30 +320,15 @@ func GetAffectedFiles(repo *Repository, branchName, oldCommitID, newCommitID str } oldCommitID = startCommitID } - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - log.Error("Unable to create os.Pipe for %s", repo.Path) - return nil, err - } - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() affectedFiles := make([]string, 0, 32) // Run `git diff --name-only` to get the names of the changed files - err = gitcmd.NewCommand("diff", "--name-only").AddDynamicArguments(oldCommitID, newCommitID). - WithEnv(env). - WithDir(repo.Path). - WithStdout(stdoutWriter). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - // Close the writer end of the pipe to begin processing - _ = stdoutWriter.Close() - defer func() { - // Close the reader on return to terminate the git command if necessary - _ = stdoutReader.Close() - }() + cmd := gitcmd.NewCommand("diff", "--name-only").AddDynamicArguments(oldCommitID, newCommitID) + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() + err := cmd.WithEnv(env).WithDir(repo.Path). + WithPipelineFunc(func(ctx gitcmd.Context) error { // Now scan the output from the command scanner := bufio.NewScanner(stdoutReader) for scanner.Scan() { diff --git a/modules/git/error.go b/modules/git/error.go index d4b5412da9..1b7bdca043 100644 --- a/modules/git/error.go +++ b/modules/git/error.go @@ -4,8 +4,6 @@ package git import ( - "context" - "errors" "fmt" "strings" @@ -143,10 +141,3 @@ func IsErrMoreThanOne(err error) bool { func (err *ErrMoreThanOne) Error() string { return fmt.Sprintf("ErrMoreThanOne Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut) } - -func IsErrCanceledOrKilled(err error) bool { - // When "cancel()" a git command's context, the returned error of "Run()" could be one of them: - // - context.Canceled - // - *exec.ExitError: "signal: killed" - return err != nil && (errors.Is(err, context.Canceled) || err.Error() == "signal: killed") -} diff --git a/modules/git/foreachref/parser.go b/modules/git/foreachref/parser.go index fa2ef316c7..913431795f 100644 --- a/modules/git/foreachref/parser.go +++ b/modules/git/foreachref/parser.go @@ -30,9 +30,11 @@ type Parser struct { func NewParser(r io.Reader, format Format) *Parser { scanner := bufio.NewScanner(r) - // default MaxScanTokenSize = 64 kiB may be too small for some references, - // so allow the buffer to grow up to 4x if needed - scanner.Buffer(nil, 4*bufio.MaxScanTokenSize) + // default Scanner.MaxScanTokenSize = 64 kiB may be too small for some references, + // so allow the buffer to be large enough in case the ref has long content (e.g.: a tag with long message) + // as long as it doesn't exceed some reasonable limit (4 MiB here, or MAX_DISPLAY_FILE_SIZE=8MiB), it is OK + // there are still some choices: 1. add a config option for the limit; 2. don't use scanner and write our own parser to fully handle large contents + scanner.Buffer(nil, 4*1024*1024) // in addition to the reference delimiter we specified in the --format, // `git for-each-ref` will always add a newline after every reference. diff --git a/modules/git/git.go b/modules/git/git.go index 6d2c643b33..37371ac59f 100644 --- a/modules/git/git.go +++ b/modules/git/git.go @@ -12,25 +12,27 @@ import ( "path/filepath" "runtime" "strings" - "time" "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/tempdir" "github.com/hashicorp/go-version" ) -const RequiredVersion = "2.0.0" // the minimum Git version required +const RequiredVersion = "2.6.0" // the minimum Git version required type Features struct { gitVersion *version.Version - UsingGogit bool - SupportProcReceive bool // >= 2.29 - SupportHashSha256 bool // >= 2.42, SHA-256 repositories no longer an ‘experimental curiosity’ - SupportedObjectFormats []ObjectFormat // sha1, sha256 - SupportCheckAttrOnBare bool // >= 2.40 + UsingGogit bool + SupportProcReceive bool // >= 2.29 + SupportHashSha256 bool // >= 2.42, SHA-256 repositories no longer an ‘experimental curiosity’ + SupportedObjectFormats []ObjectFormat // sha1, sha256 + SupportCheckAttrOnBare bool // >= 2.40 + SupportCatFileBatchCommand bool // >= 2.36, support `git cat-file --batch-command` + SupportGitMergeTree bool // >= 2.40 // we also need "--merge-base" } var defaultFeatures *Features @@ -75,6 +77,8 @@ func loadGitVersionFeatures() (*Features, error) { features.SupportedObjectFormats = append(features.SupportedObjectFormats, Sha256ObjectFormat) } features.SupportCheckAttrOnBare = features.CheckVersionAtLeast("2.40") + features.SupportCatFileBatchCommand = features.CheckVersionAtLeast("2.36") + features.SupportGitMergeTree = features.CheckVersionAtLeast("2.40") // we also need "--merge-base" return features, nil } @@ -137,10 +141,6 @@ func InitSimple() error { log.Warn("git module has been initialized already, duplicate init may work but it's better to fix it") } - if setting.Git.Timeout.Default > 0 { - gitcmd.SetDefaultCommandExecutionTimeout(time.Duration(setting.Git.Timeout.Default) * time.Second) - } - if err := gitcmd.SetExecutablePath(setting.Git.Path); err != nil { return err } @@ -176,3 +176,25 @@ func InitFull() (err error) { return syncGitConfig(context.Background()) } + +// RunGitTests helps to init the git module and run tests. +// FIXME: GIT-PACKAGE-DEPENDENCY: the dependency is not right, setting.Git.HomePath is initialized in this package but used in gitcmd package +func RunGitTests(m interface{ Run() int }) { + fatalf := func(exitCode int, format string, args ...any) { + _, _ = fmt.Fprintf(os.Stderr, format, args...) + os.Exit(exitCode) + } + gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home") + if err != nil { + fatalf(1, "unable to create temp dir: %s", err.Error()) + } + defer cleanup() + + setting.Git.HomePath = gitHomePath + if err = InitFull(); err != nil { + fatalf(1, "failed to call Init: %s", err.Error()) + } + if exitCode := m.Run(); exitCode != 0 { + fatalf(exitCode, "run test failed, ExitCode=%d", exitCode) + } +} diff --git a/modules/git/git_test.go b/modules/git/git_test.go index 7a8ca74b01..44c018dd74 100644 --- a/modules/git/git_test.go +++ b/modules/git/git_test.go @@ -4,42 +4,14 @@ package git import ( - "fmt" - "os" "testing" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/tempdir" - "github.com/hashicorp/go-version" "github.com/stretchr/testify/assert" ) -func testRun(m *testing.M) error { - gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home") - if err != nil { - return fmt.Errorf("unable to create temp dir: %w", err) - } - defer cleanup() - - setting.Git.HomePath = gitHomePath - - if err = InitFull(); err != nil { - return fmt.Errorf("failed to call Init: %w", err) - } - - exitCode := m.Run() - if exitCode != 0 { - return fmt.Errorf("run test failed, ExitCode=%d", exitCode) - } - return nil -} - func TestMain(m *testing.M) { - if err := testRun(m); err != nil { - _, _ = fmt.Fprintf(os.Stderr, "Test failed: %v", err) - os.Exit(1) - } + RunGitTests(m) } func TestParseGitVersion(t *testing.T) { diff --git a/modules/git/gitcmd/command.go b/modules/git/gitcmd/command.go index ff2827bd6c..f780cdf6c9 100644 --- a/modules/git/gitcmd/command.go +++ b/modules/git/gitcmd/command.go @@ -13,7 +13,6 @@ import ( "os" "os/exec" "path/filepath" - "runtime" "strings" "time" @@ -29,24 +28,32 @@ import ( // In most cases, it shouldn't be used. Use AddXxx function instead type TrustedCmdArgs []internal.CmdArg -// defaultCommandExecutionTimeout default command execution timeout duration -var defaultCommandExecutionTimeout = 360 * time.Second - -func SetDefaultCommandExecutionTimeout(timeout time.Duration) { - defaultCommandExecutionTimeout = timeout -} - -// DefaultLocale is the default LC_ALL to run git commands in. -const DefaultLocale = "C" - // Command represents a command with its subcommands or arguments. type Command struct { + callerInfo string prog string args []string - brokenArgs []string - cmd *exec.Cmd // for debug purpose only + preErrors []error configArgs []string opts runOpts + + cmd *exec.Cmd + + cmdCtx context.Context + cmdCancel process.CancelCauseFunc + cmdFinished process.FinishedFunc + cmdStartTime time.Time + + parentPipeFiles []*os.File + parentPipeReaders []*os.File + childrenPipeFiles []*os.File + + // only os.Pipe and in-memory buffers can work with Stdin safely, see https://github.com/golang/go/issues/77227 if the command would exit unexpectedly + cmdStdin io.Reader + cmdStdout io.Writer + cmdStderr io.Writer + + cmdManagedStderr *bytes.Buffer } func logArgSanitize(arg string) string { @@ -97,6 +104,10 @@ func NewCommand(args ...internal.CmdArg) *Command { } } +func (c *Command) handlePreErrorBrokenCommand(arg string) { + c.preErrors = append(c.preErrors, util.ErrorWrap(ErrBrokenCommand, `broken git command argument %q`, arg)) +} + // isSafeArgumentValue checks if the argument is safe to be used as a value (not an option) func isSafeArgumentValue(s string) bool { return s == "" || s[0] != '-' @@ -124,7 +135,7 @@ func (c *Command) AddArguments(args ...internal.CmdArg) *Command { // The values are treated as dynamic argument values. It equals to: AddArguments("--opt") then AddDynamicArguments(val). func (c *Command) AddOptionValues(opt internal.CmdArg, args ...string) *Command { if !isValidArgumentOption(string(opt)) { - c.brokenArgs = append(c.brokenArgs, string(opt)) + c.handlePreErrorBrokenCommand(string(opt)) return c } c.args = append(c.args, string(opt)) @@ -136,12 +147,12 @@ func (c *Command) AddOptionValues(opt internal.CmdArg, args ...string) *Command // For example: AddOptionFormat("--opt=%s %s", val1, val2) means 1 argument: {"--opt=val1 val2"}. func (c *Command) AddOptionFormat(opt string, args ...any) *Command { if !isValidArgumentOption(opt) { - c.brokenArgs = append(c.brokenArgs, opt) + c.handlePreErrorBrokenCommand(opt) return c } // a quick check to make sure the format string matches the number of arguments, to find low-level mistakes ASAP if strings.Count(strings.ReplaceAll(opt, "%%", ""), "%") != len(args) { - c.brokenArgs = append(c.brokenArgs, opt) + c.handlePreErrorBrokenCommand(opt) return c } s := fmt.Sprintf(opt, args...) @@ -155,10 +166,10 @@ func (c *Command) AddOptionFormat(opt string, args ...any) *Command { func (c *Command) AddDynamicArguments(args ...string) *Command { for _, arg := range args { if !isSafeArgumentValue(arg) { - c.brokenArgs = append(c.brokenArgs, arg) + c.handlePreErrorBrokenCommand(arg) } } - if len(c.brokenArgs) != 0 { + if len(c.preErrors) != 0 { return c } c.args = append(c.args, args...) @@ -178,7 +189,7 @@ func (c *Command) AddDashesAndList(list ...string) *Command { func (c *Command) AddConfig(key, value string) *Command { kv := key + "=" + value if !isSafeArgumentValue(kv) { - c.brokenArgs = append(c.brokenArgs, key) + c.handlePreErrorBrokenCommand(kv) } else { c.configArgs = append(c.configArgs, "-c", kv) } @@ -195,11 +206,9 @@ func ToTrustedCmdArgs(args []string) TrustedCmdArgs { return ret } -// runOpts represents parameters to run the command. If UseContextTimeout is specified, then Timeout is ignored. type runOpts struct { - Env []string - Timeout time.Duration - UseContextTimeout bool + Env []string + Timeout time.Duration // Dir is the working dir for the git command, however: // FIXME: this could be incorrect in many cases, for example: @@ -209,21 +218,7 @@ type runOpts struct { // The correct approach is to use `--git-dir" global argument Dir string - Stdout, Stderr io.Writer - - // Stdin is used for passing input to the command - // The caller must make sure the Stdin writer is closed properly to finish the Run function. - // Otherwise, the Run function may hang for long time or forever, especially when the Git's context deadline is not the same as the caller's. - // Some common mistakes: - // * `defer stdinWriter.Close()` then call `cmd.Run()`: the Run() would never return if the command is killed by timeout - // * `go { case <- parentContext.Done(): stdinWriter.Close() }` with `cmd.Run(DefaultTimeout)`: the command would have been killed by timeout but the Run doesn't return until stdinWriter.Close() - // * `go { if stdoutReader.Read() err != nil: stdinWriter.Close() }` with `cmd.Run()`: the stdoutReader may never return error if the command is killed by timeout - // In the future, ideally the git module itself should have full control of the stdin, to avoid such problems and make it easier to refactor to a better architecture. - Stdin io.Reader - - PipelineFunc func(context.Context, context.CancelFunc) error - - callerInfo string + PipelineFunc func(Context) error } func commonBaseEnvs() []string { @@ -254,7 +249,7 @@ func commonBaseEnvs() []string { // CommonGitCmdEnvs returns the common environment variables for a "git" command. func CommonGitCmdEnvs() []string { return append(commonBaseEnvs(), []string{ - "LC_ALL=" + DefaultLocale, + "LC_ALL=C", // ensure git output is in English, error messages are parsed in English "GIT_TERMINAL_PROMPT=0", // avoid prompting for credentials interactively, supported since git v2.3 }...) } @@ -281,42 +276,102 @@ func (c *Command) WithTimeout(timeout time.Duration) *Command { return c } -func (c *Command) WithStdout(stdout io.Writer) *Command { - c.opts.Stdout = stdout +func (c *Command) makeStdoutStderr(w *io.Writer) (PipeReader, func()) { + pr, pw, err := os.Pipe() + if err != nil { + c.preErrors = append(c.preErrors, err) + return &pipeNull{err}, func() {} + } + c.childrenPipeFiles = append(c.childrenPipeFiles, pw) + c.parentPipeFiles = append(c.parentPipeFiles, pr) + c.parentPipeReaders = append(c.parentPipeReaders, pr) + *w /* stdout, stderr */ = pw + return &pipeReader{f: pr}, func() { pr.Close() } +} + +// MakeStdinPipe creates a writer for the command's stdin. +// The returned closer function must be called by the caller to close the pipe. +func (c *Command) MakeStdinPipe() (writer PipeWriter, closer func()) { + pr, pw, err := os.Pipe() + if err != nil { + c.preErrors = append(c.preErrors, err) + return &pipeNull{err}, func() {} + } + c.childrenPipeFiles = append(c.childrenPipeFiles, pr) + c.parentPipeFiles = append(c.parentPipeFiles, pw) + c.cmdStdin = pr + return &pipeWriter{pw}, func() { pw.Close() } +} + +// MakeStdoutPipe creates a reader for the command's stdout. +// The returned closer function must be called by the caller to close the pipe. +// After the pipe reader is closed, the unread data will be discarded. +func (c *Command) MakeStdoutPipe() (reader PipeReader, closer func()) { + return c.makeStdoutStderr(&c.cmdStdout) +} + +// MakeStderrPipe is like MakeStdoutPipe, but for stderr. +func (c *Command) MakeStderrPipe() (reader PipeReader, closer func()) { + return c.makeStdoutStderr(&c.cmdStderr) +} + +func (c *Command) MakeStdinStdoutPipe() (stdin PipeWriter, stdout PipeReader, closer func()) { + stdin, stdinClose := c.MakeStdinPipe() + stdout, stdoutClose := c.MakeStdoutPipe() + return stdin, stdout, func() { + stdinClose() + stdoutClose() + } +} + +func (c *Command) WithStdinBytes(stdin []byte) *Command { + c.cmdStdin = bytes.NewReader(stdin) return c } -func (c *Command) WithStderr(stderr io.Writer) *Command { - c.opts.Stderr = stderr +func (c *Command) WithStdoutBuffer(w PipeBufferWriter) *Command { + c.cmdStdout = w return c } -func (c *Command) WithStdin(stdin io.Reader) *Command { - c.opts.Stdin = stdin +// WithStdinCopy and WithStdoutCopy are general functions that accept any io.Reader / io.Writer. +// In this case, Golang exec.Cmd will start new internal goroutines to do io.Copy between pipes and provided Reader/Writer. +// If the reader or writer is blocked and never returns, then the io.Copy won't finish, then exec.Cmd.Wait won't return, which may cause deadlocks. +// A typical deadlock example is: +// * `r,w:=io.Pipe(); cmd.Stdin=r; defer w.Close(); cmd.Run()`: the Run() will never return because stdin reader is blocked forever and w.Close() will never be called. +// If the reader/writer won't block forever (for example: read from a file or buffer), then these functions are safe to use. +func (c *Command) WithStdinCopy(w io.Reader) *Command { + c.cmdStdin = w return c } -func (c *Command) WithPipelineFunc(f func(context.Context, context.CancelFunc) error) *Command { +func (c *Command) WithStdoutCopy(w io.Writer) *Command { + c.cmdStdout = w + return c +} + +// WithPipelineFunc sets the pipeline function for the command. +// The pipeline function will be called in the Run / Wait function after the command is started successfully. +// The function can read/write from/to the command's stdio pipes (if any). +// The pipeline function can cancel (kill) the command by calling ctx.CancelPipeline before the command finishes. +// The returned error of Run / Wait can be joined errors from the pipeline function, context cause, and command exit error. +// Caller can get the pipeline function's error (if any) by UnwrapPipelineError. +func (c *Command) WithPipelineFunc(f func(ctx Context) error) *Command { c.opts.PipelineFunc = f return c } -func (c *Command) WithUseContextTimeout(useContextTimeout bool) *Command { - c.opts.UseContextTimeout = useContextTimeout - return c -} - // WithParentCallerInfo can be used to set the caller info (usually function name) of the parent function of the caller. // For most cases, "Run" family functions can get its caller info automatically // But if you need to call "Run" family functions in a wrapper function: "FeatureFunc -> GeneralWrapperFunc -> RunXxx", // then you can to call this function in GeneralWrapperFunc to set the caller info of FeatureFunc. // The caller info can only be set once. func (c *Command) WithParentCallerInfo(optInfo ...string) *Command { - if c.opts.callerInfo != "" { + if c.callerInfo != "" { return c } if len(optInfo) > 0 { - c.opts.callerInfo = optInfo[0] + c.callerInfo = optInfo[0] return c } skip := 1 /*parent "wrap/run" functions*/ + 1 /*this function*/ @@ -325,135 +380,174 @@ func (c *Command) WithParentCallerInfo(optInfo ...string) *Command { if pos := strings.LastIndex(callerInfo, "/"); pos >= 0 { callerInfo = callerInfo[pos+1:] } - c.opts.callerInfo = callerInfo + c.callerInfo = callerInfo return c } -// Run runs the command -func (c *Command) Run(ctx context.Context) error { - if len(c.brokenArgs) != 0 { - log.Error("git command is broken: %s, broken args: %s", c.LogString(), strings.Join(c.brokenArgs, " ")) - return ErrBrokenCommand +func (c *Command) Start(ctx context.Context) (retErr error) { + if c.cmd != nil { + // this is a programming error, it will cause serious deadlock problems, so it must be fixed. + panic("git command has already been started") } - // We must not change the provided options - timeout := c.opts.Timeout - if timeout <= 0 { - timeout = defaultCommandExecutionTimeout + defer func() { + c.closePipeFiles(c.childrenPipeFiles) + if retErr != nil { + // release the pipes to avoid resource leak since the command failed to start + c.closePipeFiles(c.parentPipeFiles) + // if error occurs, we must also finish the task, otherwise, cmdFinished will be called in "Wait" function + if c.cmdFinished != nil { + c.cmdFinished() + } + } + }() + + if len(c.preErrors) != 0 { + // In most cases, such error shouldn't happen. If it happens, log it as error level with more details + err := errors.Join(c.preErrors...) + log.Error("git command: %s, error: %s", c.LogString(), err) + return err } cmdLogString := c.LogString() - if c.opts.callerInfo == "" { + if c.callerInfo == "" { c.WithParentCallerInfo() } // these logs are for debugging purposes only, so no guarantee of correctness or stability - desc := fmt.Sprintf("git.Run(by:%s, repo:%s): %s", c.opts.callerInfo, logArgSanitize(c.opts.Dir), cmdLogString) + desc := fmt.Sprintf("git.Run(by:%s, repo:%s): %s", c.callerInfo, logArgSanitize(c.opts.Dir), cmdLogString) log.Debug("git.Command: %s", desc) _, span := gtprof.GetTracer().Start(ctx, gtprof.TraceSpanGitRun) defer span.End() - span.SetAttributeString(gtprof.TraceAttrFuncCaller, c.opts.callerInfo) + span.SetAttributeString(gtprof.TraceAttrFuncCaller, c.callerInfo) span.SetAttributeString(gtprof.TraceAttrGitCommand, cmdLogString) - var cancel context.CancelFunc - var finished context.CancelFunc - - if c.opts.UseContextTimeout { - ctx, cancel, finished = process.GetManager().AddContext(ctx, desc) + if c.opts.Timeout <= 0 { + c.cmdCtx, c.cmdCancel, c.cmdFinished = process.GetManager().AddContext(ctx, desc) } else { - ctx, cancel, finished = process.GetManager().AddContextTimeout(ctx, timeout, desc) + c.cmdCtx, c.cmdCancel, c.cmdFinished = process.GetManager().AddContextTimeout(ctx, c.opts.Timeout, desc) } - defer finished() - startTime := time.Now() + c.cmdStartTime = time.Now() - cmd := exec.CommandContext(ctx, c.prog, append(c.configArgs, c.args...)...) - c.cmd = cmd // for debug purpose only + c.cmd = exec.CommandContext(c.cmdCtx, c.prog, append(c.configArgs, c.args...)...) if c.opts.Env == nil { - cmd.Env = os.Environ() + c.cmd.Env = os.Environ() } else { - cmd.Env = c.opts.Env + c.cmd.Env = c.opts.Env } - process.SetSysProcAttribute(cmd) - cmd.Env = append(cmd.Env, CommonGitCmdEnvs()...) - cmd.Dir = c.opts.Dir - cmd.Stdout = c.opts.Stdout - cmd.Stderr = c.opts.Stderr - cmd.Stdin = c.opts.Stdin - if err := cmd.Start(); err != nil { - return err + process.SetSysProcAttribute(c.cmd) + c.cmd.Env = append(c.cmd.Env, CommonGitCmdEnvs()...) + c.cmd.Dir = c.opts.Dir + c.cmd.Stdout = c.cmdStdout + c.cmd.Stdin = c.cmdStdin + c.cmd.Stderr = c.cmdStderr + return c.cmd.Start() +} + +func (c *Command) closePipeFiles(files []*os.File) { + for _, f := range files { + _ = f.Close() } +} + +func (c *Command) discardPipeReaders(files []*os.File) { + for _, f := range files { + _, _ = io.Copy(io.Discard, f) + } +} + +func (c *Command) Wait() error { + defer func() { + // The reader in another goroutine might be still reading the stdout, so we shouldn't close the pipes here + // MakeStdoutPipe returns a closer function to force callers to close the pipe correctly + // Here we only need to mark the command as finished + c.cmdFinished() + }() if c.opts.PipelineFunc != nil { - err := c.opts.PipelineFunc(ctx, cancel) - if err != nil { - cancel() - _ = cmd.Wait() - return err + errPipeline := c.opts.PipelineFunc(&cmdContext{Context: c.cmdCtx, cmd: c}) + + if context.Cause(c.cmdCtx) == nil { + // if the context is not canceled explicitly, we need to discard the unread data, + // and wait for the command to exit normally, and then get its exit code + c.discardPipeReaders(c.parentPipeReaders) + } // else: canceled command will be killed, and the exit code is caused by kill + + // after the pipeline function returns, we can safely close the pipes, then wait for the command to exit + c.closePipeFiles(c.parentPipeFiles) + errWait := c.cmd.Wait() + errCause := context.Cause(c.cmdCtx) // in case the cause is set during Wait(), get the final cancel cause + + if unwrapped, ok := UnwrapPipelineError(errCause); ok { + if unwrapped != errPipeline { + panic("unwrapped context pipeline error should be the same one returned by pipeline function") + } + if unwrapped == nil { + // the pipeline function declares that there is no error, and it cancels (kills) the command ahead, + // so we should ignore the errors from "wait" and "cause" + errWait, errCause = nil, nil + } } + + // some legacy code still need to access the error returned by pipeline function by "==" but not "errors.Is" + // so we need to make sure the original error is able to be unwrapped by UnwrapPipelineError + return errors.Join(wrapPipelineError(errPipeline), errCause, errWait) } - err := cmd.Wait() - elapsed := time.Since(startTime) + // there might be other goroutines using the context or pipes, so we just wait for the command to finish + errWait := c.cmd.Wait() + elapsed := time.Since(c.cmdStartTime) if elapsed > time.Second { - log.Debug("slow git.Command.Run: %s (%s)", c, elapsed) + log.Debug("slow git.Command.Run: %s (%s)", c, elapsed) // TODO: no need to log this for long-running commands } - // We need to check if the context is canceled by the program on Windows. - // This is because Windows does not have signal checking when terminating the process. - // It always returns exit code 1, unlike Linux, which has many exit codes for signals. - // `err.Error()` returns "exit status 1" when using the `git check-attr` command after the context is canceled. - if runtime.GOOS == "windows" && - err != nil && - (err.Error() == "" || err.Error() == "exit status 1") && - cmd.ProcessState.ExitCode() == 1 && - ctx.Err() == context.Canceled { - return ctx.Err() - } + // Here the logic is different from "PipelineFunc" case, + // because PipelineFunc can return error if it fails, it knows whether it succeeds or fails. + // But in normal case, the caller just runs the git command, the command's exit code is the source of truth. + // If the caller need to know whether the command error is caused by cancellation, it should check the "err" by itself. + errCause := context.Cause(c.cmdCtx) + return errors.Join(errCause, errWait) +} - if err != nil && ctx.Err() != context.DeadlineExceeded { +func (c *Command) StartWithStderr(ctx context.Context) RunStdError { + if c.cmdStderr != nil { + panic("caller-provided stderr receiver doesn't work with managed stderr buffer") + } + c.cmdManagedStderr = &bytes.Buffer{} + c.cmdStderr = c.cmdManagedStderr + err := c.Start(ctx) + if err != nil { + return &runStdError{err: err} + } + return nil +} + +func (c *Command) WaitWithStderr() RunStdError { + if c.cmdManagedStderr == nil { + panic("managed stderr buffer is not initialized") + } + errWait := c.Wait() + if errWait == nil { + // if no exec error but only stderr output, the stderr output is still saved in "c.cmdManagedStderr" and can be read later + return nil + } + return &runStdError{err: errWait, stderr: util.UnsafeBytesToString(c.cmdManagedStderr.Bytes())} +} + +func (c *Command) RunWithStderr(ctx context.Context) RunStdError { + if err := c.StartWithStderr(ctx); err != nil { + return &runStdError{err: err} + } + return c.WaitWithStderr() +} + +func (c *Command) Run(ctx context.Context) (err error) { + if err = c.Start(ctx); err != nil { return err } - - return ctx.Err() -} - -type RunStdError interface { - error - Unwrap() error - Stderr() string -} - -type runStdError struct { - err error - stderr string - errMsg string -} - -func (r *runStdError) Error() string { - // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message - // But a lof of code only checks `strings.Contains(err.Error(), "git error")` - if r.errMsg == "" { - r.errMsg = ConcatenateError(r.err, r.stderr).Error() - } - return r.errMsg -} - -func (r *runStdError) Unwrap() error { - return r.err -} - -func (r *runStdError) Stderr() string { - return r.stderr -} - -func IsErrorExitCode(err error, code int) bool { - var exitError *exec.ExitError - if errors.As(err, &exitError) { - return exitError.ExitCode() == code - } - return false + return c.Wait() } // RunStdString runs the command and returns stdout/stderr as string. and store stderr to returned error (err combined with stderr). @@ -467,22 +561,16 @@ func (c *Command) RunStdBytes(ctx context.Context) (stdout, stderr []byte, runEr return c.WithParentCallerInfo().runStdBytes(ctx) } -func (c *Command) runStdBytes(ctx context.Context) ( /*stdout*/ []byte /*stderr*/, []byte /*runErr*/, RunStdError) { - if c.opts.Stdout != nil || c.opts.Stderr != nil { - // we must panic here, otherwise there would be bugs if developers set Stdin/Stderr by mistake, and it would be very difficult to debug +func (c *Command) runStdBytes(ctx context.Context) ([]byte, []byte, RunStdError) { + if c.cmdStdout != nil || c.cmdStderr != nil { + // it must panic here, otherwise there would be bugs if developers set other Stdin/Stderr by mistake, and it would be very difficult to debug panic("stdout and stderr field must be nil when using RunStdBytes") } stdoutBuf := &bytes.Buffer{} - stderrBuf := &bytes.Buffer{} - err := c.WithParentCallerInfo(). - WithStdout(stdoutBuf). - WithStderr(stderrBuf). - Run(ctx) - if err != nil { - // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message - // But a lot of code depends on it, so we have to keep this behavior - return nil, stderrBuf.Bytes(), &runStdError{err: err, stderr: util.UnsafeBytesToString(stderrBuf.Bytes())} - } - // even if there is no err, there could still be some stderr output - return stdoutBuf.Bytes(), stderrBuf.Bytes(), nil + err := c.WithParentCallerInfo().WithStdoutBuffer(stdoutBuf).RunWithStderr(ctx) + return stdoutBuf.Bytes(), c.cmdManagedStderr.Bytes(), err +} + +func (c *Command) DebugKill() { + _ = c.cmd.Process.Kill() } diff --git a/modules/git/gitcmd/command_race_test.go b/modules/git/gitcmd/command_race_test.go deleted file mode 100644 index c2f0b124a2..0000000000 --- a/modules/git/gitcmd/command_race_test.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2017 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -//go:build race - -package gitcmd - -import ( - "context" - "testing" - "time" -) - -func TestRunWithContextNoTimeout(t *testing.T) { - maxLoops := 10 - - // 'git --version' does not block so it must be finished before the timeout triggered. - cmd := NewCommand("--version") - for i := 0; i < maxLoops; i++ { - if err := cmd.Run(t.Context()); err != nil { - t.Fatal(err) - } - } -} - -func TestRunWithContextTimeout(t *testing.T) { - maxLoops := 10 - - // 'git hash-object --stdin' blocks on stdin so we can have the timeout triggered. - cmd := NewCommand("hash-object", "--stdin") - for i := 0; i < maxLoops; i++ { - if err := cmd.WithTimeout(1 * time.Millisecond).Run(t.Context()); err != nil { - if err != context.DeadlineExceeded { - t.Fatalf("Testing %d/%d: %v", i, maxLoops, err) - } - } - } -} diff --git a/modules/git/gitcmd/command_test.go b/modules/git/gitcmd/command_test.go index 1ba8b2e3e4..86771f499f 100644 --- a/modules/git/gitcmd/command_test.go +++ b/modules/git/gitcmd/command_test.go @@ -4,17 +4,22 @@ package gitcmd import ( + "context" "fmt" "os" "testing" + "time" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/tempdir" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestMain(m *testing.M) { + // FIXME: GIT-PACKAGE-DEPENDENCY: the dependency is not right. + // "setting.Git.HomePath" is initialized in "git" package but really used in "gitcmd" package gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home") if err != nil { _, _ = fmt.Fprintf(os.Stderr, "unable to create temp dir: %v", err) @@ -42,7 +47,7 @@ func TestRunWithContextStd(t *testing.T) { assert.Equal(t, stderr, err.Stderr()) assert.Equal(t, "fatal: Not a valid object name no-such\n", err.Stderr()) // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message - assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such\n", err.Error()) + assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such", err.Error()) assert.Empty(t, stdout) } } @@ -54,7 +59,7 @@ func TestRunWithContextStd(t *testing.T) { assert.Equal(t, string(stderr), err.Stderr()) assert.Equal(t, "fatal: Not a valid object name no-such\n", err.Stderr()) // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message - assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such\n", err.Error()) + assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such", err.Error()) assert.Empty(t, stdout) } } @@ -97,3 +102,29 @@ func TestCommandString(t *testing.T) { cmd = NewCommand("url: https://a:b@c/", "/root/dir-a/dir-b") assert.Equal(t, cmd.prog+` "url: https://sanitized-credential@c/" .../dir-a/dir-b`, cmd.LogString()) } + +func TestRunStdError(t *testing.T) { + e := &runStdError{stderr: "some error"} + var err RunStdError = e + + var asErr RunStdError + require.ErrorAs(t, err, &asErr) + require.Equal(t, "some error", asErr.Stderr()) + + require.ErrorAs(t, fmt.Errorf("wrapped %w", err), &asErr) +} + +func TestRunWithContextTimeout(t *testing.T) { + t.Run("NoTimeout", func(t *testing.T) { + // 'git --version' does not block so it must be finished before the timeout triggered. + err := NewCommand("--version").Run(t.Context()) + require.NoError(t, err) + }) + t.Run("WithTimeout", func(t *testing.T) { + cmd := NewCommand("hash-object", "--stdin") + _, _, pipeClose := cmd.MakeStdinStdoutPipe() + defer pipeClose() + err := cmd.WithTimeout(1 * time.Millisecond).Run(t.Context()) + require.ErrorIs(t, err, context.DeadlineExceeded) + }) +} diff --git a/modules/git/gitcmd/context.go b/modules/git/gitcmd/context.go new file mode 100644 index 0000000000..a32f92ff3a --- /dev/null +++ b/modules/git/gitcmd/context.go @@ -0,0 +1,32 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitcmd + +import ( + "context" +) + +type Context interface { + context.Context + + // CancelPipeline is a helper function to cancel the command context (kill the command) with a specific error cause, + // it returns the same error for convenience to break the PipelineFunc easily + CancelPipeline(err error) error + + // In the future, this interface will be extended to support stdio pipe readers/writers +} + +type cmdContext struct { + context.Context + cmd *Command +} + +func (c *cmdContext) CancelPipeline(err error) error { + // pipelineError is used to distinguish between: + // * context canceled by pipeline caller with/without error (normal cancellation) + // * context canceled by parent context (still context.Canceled error) + // * other causes + c.cmd.cmdCancel(pipelineError{err}) + return err +} diff --git a/modules/git/gitcmd/error.go b/modules/git/gitcmd/error.go new file mode 100644 index 0000000000..066b37f10d --- /dev/null +++ b/modules/git/gitcmd/error.go @@ -0,0 +1,101 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitcmd + +import ( + "context" + "errors" + "fmt" + "os/exec" + "strings" +) + +type RunStdError interface { + error + Unwrap() error + Stderr() string +} + +type runStdError struct { + err error // usually the low-level error like `*exec.ExitError` + stderr string // git command's stderr output + errMsg string // the cached error message for Error() method +} + +func (r *runStdError) Error() string { + // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message + // But a lot of code only checks `strings.Contains(err.Error(), "git error")` + if r.errMsg == "" { + r.errMsg = fmt.Sprintf("%s - %s", r.err.Error(), strings.TrimSpace(r.stderr)) + } + return r.errMsg +} + +func (r *runStdError) Unwrap() error { + return r.err +} + +func (r *runStdError) Stderr() string { + return r.stderr +} + +func ErrorAsStderr(err error) (string, bool) { + var runErr RunStdError + if errors.As(err, &runErr) { + return runErr.Stderr(), true + } + return "", false +} + +func StderrHasPrefix(err error, prefix string) bool { + stderr, ok := ErrorAsStderr(err) + if !ok { + return false + } + return strings.HasPrefix(stderr, prefix) +} + +func IsErrorExitCode(err error, code int) bool { + var exitError *exec.ExitError + if errors.As(err, &exitError) { + return exitError.ExitCode() == code + } + return false +} + +func IsErrorSignalKilled(err error) bool { + var exitError *exec.ExitError + return errors.As(err, &exitError) && exitError.String() == "signal: killed" +} + +func IsErrorCanceledOrKilled(err error) bool { + // When "cancel()" a git command's context, the returned error of "Run()" could be one of them: + // - context.Canceled + // - *exec.ExitError: "signal: killed" + // TODO: in the future, we need to use unified error type from gitcmd.Run to check whether it is manually canceled + return errors.Is(err, context.Canceled) || IsErrorSignalKilled(err) +} + +type pipelineError struct { + error +} + +func (e pipelineError) Unwrap() error { + return e.error +} + +func wrapPipelineError(err error) error { + if err == nil { + return nil + } + return pipelineError{err} +} + +func UnwrapPipelineError(err error) (error, bool) { //nolint:revive // this is for error unwrapping + var pe pipelineError + if errors.As(err, &pe) { + return pe.error, true + } + return nil, false +} diff --git a/modules/git/gitcmd/pipe.go b/modules/git/gitcmd/pipe.go new file mode 100644 index 0000000000..a1e91fdf8e --- /dev/null +++ b/modules/git/gitcmd/pipe.go @@ -0,0 +1,79 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitcmd + +import ( + "io" + "os" +) + +type PipeBufferReader interface { + Read(p []byte) (n int, err error) + Bytes() []byte +} + +type PipeBufferWriter interface { + Write(p []byte) (n int, err error) + Bytes() []byte +} + +type PipeReader interface { + io.ReadCloser + internalOnly() +} + +type pipeReader struct { + f *os.File +} + +func (r *pipeReader) internalOnly() {} + +func (r *pipeReader) Read(p []byte) (n int, err error) { + return r.f.Read(p) +} + +func (r *pipeReader) Close() error { + return r.f.Close() +} + +type PipeWriter interface { + io.WriteCloser + internalOnly() +} + +type pipeWriter struct { + f *os.File +} + +func (w *pipeWriter) internalOnly() {} + +func (w *pipeWriter) Close() error { + return w.f.Close() +} + +func (w *pipeWriter) Write(p []byte) (n int, err error) { + return w.f.Write(p) +} + +func (w *pipeWriter) DrainBeforeClose() error { + return nil +} + +type pipeNull struct { + err error +} + +func (p *pipeNull) internalOnly() {} + +func (p *pipeNull) Read([]byte) (n int, err error) { + return 0, p.err +} + +func (p *pipeNull) Write([]byte) (n int, err error) { + return 0, p.err +} + +func (p *pipeNull) Close() error { + return nil +} diff --git a/modules/git/gitcmd/utils.go b/modules/git/gitcmd/utils.go deleted file mode 100644 index ee24eb6a9a..0000000000 --- a/modules/git/gitcmd/utils.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2025 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package gitcmd - -import "fmt" - -// ConcatenateError concatenats an error with stderr string -func ConcatenateError(err error, stderr string) error { - if len(stderr) == 0 { - return err - } - return fmt.Errorf("%w - %s", err, stderr) -} diff --git a/modules/git/grep.go b/modules/git/grep.go index ed69a788a4..051a7a1d40 100644 --- a/modules/git/grep.go +++ b/modules/git/grep.go @@ -5,11 +5,9 @@ package git import ( "bufio" - "bytes" "context" "errors" "fmt" - "os" "slices" "strconv" "strings" @@ -42,15 +40,6 @@ type GrepOptions struct { } func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepOptions) ([]*GrepResult, error) { - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - return nil, fmt.Errorf("unable to create os pipe to grep: %w", err) - } - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - /* The output is like this ( "^@" means \x00): @@ -83,14 +72,11 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO cmd.AddDynamicArguments(util.IfZero(opts.RefName, "HEAD")) cmd.AddDashesAndList(opts.PathspecList...) opts.MaxResultLimit = util.IfZero(opts.MaxResultLimit, 50) - stderr := bytes.Buffer{} - err = cmd.WithDir(repo.Path). - WithStdout(stdoutWriter). - WithStderr(&stderr). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() - defer stdoutReader.Close() + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() + err := cmd.WithDir(repo.Path). + WithPipelineFunc(func(ctx gitcmd.Context) error { isInBlock := false rd := bufio.NewReaderSize(stdoutReader, util.IfZero(opts.MaxLineLength, 16*1024)) var res *GrepResult @@ -116,8 +102,7 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO } if line == "" { if len(results) >= opts.MaxResultLimit { - cancel() - break + return ctx.CancelPipeline(nil) } isInBlock = false continue @@ -133,17 +118,17 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO } return nil }). - Run(ctx) + RunWithStderr(ctx) // git grep exits by cancel (killed), usually it is caused by the limit of results - if gitcmd.IsErrorExitCode(err, -1) && stderr.Len() == 0 { + if gitcmd.IsErrorExitCode(err, -1) && err.Stderr() == "" { return results, nil } // git grep exits with 1 if no results are found - if gitcmd.IsErrorExitCode(err, 1) && stderr.Len() == 0 { + if gitcmd.IsErrorExitCode(err, 1) && err.Stderr() == "" { return nil, nil } if err != nil && !errors.Is(err, context.Canceled) { - return nil, fmt.Errorf("unable to run git grep: %w, stderr: %s", err, stderr.String()) + return nil, fmt.Errorf("unable to run git grep: %w", err) } return results, nil } diff --git a/modules/git/languagestats/language_stats_nogogit.go b/modules/git/languagestats/language_stats_nogogit.go index 94cf9fff8c..1dbf184af6 100644 --- a/modules/git/languagestats/language_stats_nogogit.go +++ b/modules/git/languagestats/language_stats_nogogit.go @@ -22,33 +22,28 @@ import ( func GetLanguageStats(repo *git.Repository, commitID string) (map[string]int64, error) { // We will feed the commit IDs in order into cat-file --batch, followed by blobs as necessary. // so let's create a batch stdin and stdout - batchStdinWriter, batchReader, cancel, err := repo.CatFileBatch(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return nil, err } defer cancel() - writeID := func(id string) error { - _, err := batchStdinWriter.Write([]byte(id + "\n")) - return err - } - - if err := writeID(commitID); err != nil { + commitInfo, batchReader, err := batch.QueryContent(commitID) + if err != nil { return nil, err } - shaBytes, typ, size, err := git.ReadBatchLine(batchReader) - if typ != "commit" { + if commitInfo.Type != "commit" { log.Debug("Unable to get commit for: %s. Err: %v", commitID, err) return nil, git.ErrNotExist{ID: commitID} } - sha, err := git.NewIDFromString(string(shaBytes)) + sha, err := git.NewIDFromString(commitInfo.ID) if err != nil { log.Debug("Unable to get commit for: %s. Err: %v", commitID, err) return nil, git.ErrNotExist{ID: commitID} } - commit, err := git.CommitFromReader(repo, sha, io.LimitReader(batchReader, size)) + commit, err := git.CommitFromReader(repo, sha, io.LimitReader(batchReader, commitInfo.Size)) if err != nil { log.Debug("Unable to get commit for: %s. Err: %v", commitID, err) return nil, err @@ -144,20 +139,16 @@ func GetLanguageStats(repo *git.Repository, commitID string) (map[string]int64, // If content can not be read or file is too big just do detection by filename if f.Size() <= bigFileSize { - if err := writeID(f.ID.String()); err != nil { - return nil, err - } - _, _, size, err := git.ReadBatchLine(batchReader) + info, _, err := batch.QueryContent(f.ID.String()) if err != nil { - log.Debug("Error reading blob: %s Err: %v", f.ID.String(), err) return nil, err } - sizeToRead := size + sizeToRead := info.Size discard := int64(1) - if size > fileSizeLimit { + if info.Size > fileSizeLimit { sizeToRead = fileSizeLimit - discard = size - fileSizeLimit + 1 + discard = info.Size - fileSizeLimit + 1 } _, err = contentBuf.ReadFrom(io.LimitReader(batchReader, sizeToRead)) diff --git a/modules/git/languagestats/main_test.go b/modules/git/languagestats/main_test.go index b8f9ded005..bf860f2a18 100644 --- a/modules/git/languagestats/main_test.go +++ b/modules/git/languagestats/main_test.go @@ -4,37 +4,11 @@ package languagestats import ( - "fmt" - "os" "testing" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" ) -func testRun(m *testing.M) error { - gitHomePath, err := os.MkdirTemp(os.TempDir(), "git-home") - if err != nil { - return fmt.Errorf("unable to create temp dir: %w", err) - } - defer util.RemoveAll(gitHomePath) - setting.Git.HomePath = gitHomePath - - if err = git.InitFull(); err != nil { - return fmt.Errorf("failed to call Init: %w", err) - } - - exitCode := m.Run() - if exitCode != 0 { - return fmt.Errorf("run test failed, ExitCode=%d", exitCode) - } - return nil -} - func TestMain(m *testing.M) { - if err := testRun(m); err != nil { - _, _ = fmt.Fprintf(os.Stderr, "Test failed: %v", err) - os.Exit(1) - } + git.RunGitTests(m) } diff --git a/modules/git/log_name_status.go b/modules/git/log_name_status.go index 72e513000b..8acfc96f26 100644 --- a/modules/git/log_name_status.go +++ b/modules/git/log_name_status.go @@ -15,25 +15,12 @@ import ( "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/git/gitcmd" - - "github.com/djherbis/buffer" - "github.com/djherbis/nio/v3" + "code.gitea.io/gitea/modules/log" ) // LogNameStatusRepo opens git log --raw in the provided repo and returns a stdin pipe, a stdout reader and cancel function func LogNameStatusRepo(ctx context.Context, repository, head, treepath string, paths ...string) (*bufio.Reader, func()) { - // We often want to feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary. - // so let's create a batch stdin and stdout - stdoutReader, stdoutWriter := nio.Pipe(buffer.New(32 * 1024)) - // Lets also create a context so that we can absolutely ensure that the command should die when we're done - ctx, ctxCancel := context.WithCancel(ctx) - - cancel := func() { - ctxCancel() - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - } cmd := gitcmd.NewCommand() cmd.AddArguments("log", "--name-status", "-c", "--format=commit%x00%H %P%x00", "--parents", "--no-renames", "-t", "-z").AddDynamicArguments(head) @@ -63,24 +50,21 @@ func LogNameStatusRepo(ctx context.Context, repository, head, treepath string, p } cmd.AddDashesAndList(files...) + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + ctx, ctxCancel := context.WithCancel(ctx) go func() { - stderr := strings.Builder{} - err := cmd.WithDir(repository). - WithStdout(stdoutWriter). - WithStderr(&stderr). - Run(ctx) - if err != nil { - _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String())) - return + err := cmd.WithDir(repository).RunWithStderr(ctx) + if err != nil && !errors.Is(err, context.Canceled) { + log.Error("Unable to run git command %v: %v", cmd.LogString(), err) } - - _ = stdoutWriter.Close() }() - // For simplicities sake we'll us a buffered reader to read from the cat-file --batch bufReader := bufio.NewReaderSize(stdoutReader, 32*1024) - return bufReader, cancel + return bufReader, func() { + ctxCancel() + stdoutReaderClose() + } } // LogNameStatusRepoParser parses a git log raw output from LogRawRepo diff --git a/modules/git/parse.go b/modules/git/parse.go index d4ff0ecb23..94020e690d 100644 --- a/modules/git/parse.go +++ b/modules/git/parse.go @@ -46,8 +46,8 @@ func parseLsTreeLine(line []byte) (*LsTreeEntry, error) { entry.Size = optional.Some(size) } - entry.EntryMode, err = ParseEntryMode(string(entryMode)) - if err != nil || entry.EntryMode == EntryModeNoEntry { + entry.EntryMode = ParseEntryMode(string(entryMode)) + if entry.EntryMode == EntryModeNoEntry { return nil, fmt.Errorf("invalid ls-tree output (invalid mode): %q, err: %w", line, err) } diff --git a/modules/git/parse_treeentry.go b/modules/git/parse_treeentry.go index e14d9f17b5..d46cd3344d 100644 --- a/modules/git/parse_treeentry.go +++ b/modules/git/parse_treeentry.go @@ -4,7 +4,6 @@ package git import ( - "bufio" "bytes" "fmt" "io" @@ -47,7 +46,7 @@ func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) { return entries, nil } -func catBatchParseTreeEntries(objectFormat ObjectFormat, ptree *Tree, rd *bufio.Reader, sz int64) ([]*TreeEntry, error) { +func catBatchParseTreeEntries(objectFormat ObjectFormat, ptree *Tree, rd BufferedReader, sz int64) ([]*TreeEntry, error) { fnameBuf := make([]byte, 4096) modeBuf := make([]byte, 40) shaBuf := make([]byte, objectFormat.FullLength()) diff --git a/modules/git/pipeline/catfile.go b/modules/git/pipeline/catfile.go index a4d1ff64cf..3d005e28f1 100644 --- a/modules/git/pipeline/catfile.go +++ b/modules/git/pipeline/catfile.go @@ -5,81 +5,34 @@ package pipeline import ( "bufio" - "bytes" "context" - "fmt" "io" "strconv" "strings" - "sync" "code.gitea.io/gitea/modules/git/gitcmd" - "code.gitea.io/gitea/modules/log" ) // CatFileBatchCheck runs cat-file with --batch-check -func CatFileBatchCheck(ctx context.Context, shasToCheckReader *io.PipeReader, catFileCheckWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string) { - defer wg.Done() - defer shasToCheckReader.Close() - defer catFileCheckWriter.Close() - - stderr := new(bytes.Buffer) - var errbuf strings.Builder - cmd := gitcmd.NewCommand("cat-file", "--batch-check") - if err := cmd.WithDir(tmpBasePath). - WithStdin(shasToCheckReader). - WithStdout(catFileCheckWriter). - WithStderr(stderr). - Run(ctx); err != nil { - _ = catFileCheckWriter.CloseWithError(fmt.Errorf("git cat-file --batch-check [%s]: %w - %s", tmpBasePath, err, errbuf.String())) - } +func CatFileBatchCheck(ctx context.Context, cmd *gitcmd.Command, tmpBasePath string) error { + cmd.AddArguments("cat-file", "--batch-check") + return cmd.WithDir(tmpBasePath).RunWithStderr(ctx) } // CatFileBatchCheckAllObjects runs cat-file with --batch-check --batch-all -func CatFileBatchCheckAllObjects(ctx context.Context, catFileCheckWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string, errChan chan<- error) { - defer wg.Done() - defer catFileCheckWriter.Close() - - stderr := new(bytes.Buffer) - var errbuf strings.Builder - cmd := gitcmd.NewCommand("cat-file", "--batch-check", "--batch-all-objects") - if err := cmd.WithDir(tmpBasePath). - WithStdout(catFileCheckWriter). - WithStderr(stderr). - Run(ctx); err != nil { - log.Error("git cat-file --batch-check --batch-all-object [%s]: %v - %s", tmpBasePath, err, errbuf.String()) - err = fmt.Errorf("git cat-file --batch-check --batch-all-object [%s]: %w - %s", tmpBasePath, err, errbuf.String()) - _ = catFileCheckWriter.CloseWithError(err) - errChan <- err - } +func CatFileBatchCheckAllObjects(ctx context.Context, cmd *gitcmd.Command, tmpBasePath string) error { + return cmd.AddArguments("cat-file", "--batch-check", "--batch-all-objects").WithDir(tmpBasePath).RunWithStderr(ctx) } // CatFileBatch runs cat-file --batch -func CatFileBatch(ctx context.Context, shasToBatchReader *io.PipeReader, catFileBatchWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string) { - defer wg.Done() - defer shasToBatchReader.Close() - defer catFileBatchWriter.Close() - - stderr := new(bytes.Buffer) - var errbuf strings.Builder - if err := gitcmd.NewCommand("cat-file", "--batch"). - WithDir(tmpBasePath). - WithStdin(shasToBatchReader). - WithStdout(catFileBatchWriter). - WithStderr(stderr). - Run(ctx); err != nil { - _ = shasToBatchReader.CloseWithError(fmt.Errorf("git rev-list [%s]: %w - %s", tmpBasePath, err, errbuf.String())) - } +func CatFileBatch(ctx context.Context, cmd *gitcmd.Command, tmpBasePath string) error { + return cmd.AddArguments("cat-file", "--batch").WithDir(tmpBasePath).RunWithStderr(ctx) } // BlobsLessThan1024FromCatFileBatchCheck reads a pipeline from cat-file --batch-check and returns the blobs <1024 in size -func BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader *io.PipeReader, shasToBatchWriter *io.PipeWriter, wg *sync.WaitGroup) { - defer wg.Done() - defer catFileCheckReader.Close() - scanner := bufio.NewScanner(catFileCheckReader) - defer func() { - _ = shasToBatchWriter.CloseWithError(scanner.Err()) - }() +func BlobsLessThan1024FromCatFileBatchCheck(in io.ReadCloser, out io.WriteCloser) error { + defer out.Close() + scanner := bufio.NewScanner(in) for scanner.Scan() { line := scanner.Text() if len(line) == 0 { @@ -95,12 +48,12 @@ func BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader *io.PipeReader, s } toWrite := []byte(fields[0] + "\n") for len(toWrite) > 0 { - n, err := shasToBatchWriter.Write(toWrite) + n, err := out.Write(toWrite) if err != nil { - _ = catFileCheckReader.CloseWithError(err) - break + return err } toWrite = toWrite[n:] } } + return scanner.Err() } diff --git a/modules/git/pipeline/lfs_common.go b/modules/git/pipeline/lfs_common.go index 188e7d4d65..914aefbeaf 100644 --- a/modules/git/pipeline/lfs_common.go +++ b/modules/git/pipeline/lfs_common.go @@ -4,7 +4,6 @@ package pipeline import ( - "fmt" "time" "code.gitea.io/gitea/modules/git" @@ -26,7 +25,3 @@ type lfsResultSlice []*LFSResult func (a lfsResultSlice) Len() int { return len(a) } func (a lfsResultSlice) Swap(i, j int) { a[i], a[j] = a[j], a[i] } func (a lfsResultSlice) Less(i, j int) bool { return a[j].When.After(a[i].When) } - -func lfsError(msg string, err error) error { - return fmt.Errorf("LFS error occurred, %s: err: %w", msg, err) -} diff --git a/modules/git/pipeline/lfs_gogit.go b/modules/git/pipeline/lfs_gogit.go index adcf8ed09c..c12397569c 100644 --- a/modules/git/pipeline/lfs_gogit.go +++ b/modules/git/pipeline/lfs_gogit.go @@ -6,11 +6,10 @@ package pipeline import ( - "bufio" + "fmt" "io" "sort" "strings" - "sync" "code.gitea.io/gitea/modules/git" @@ -24,7 +23,6 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err resultsMap := map[string]*LFSResult{} results := make([]*LFSResult, 0) - basePath := repo.Path gogitRepo := repo.GoGitRepo() commitsIter, err := gogitRepo.Log(&gogit.LogOptions{ @@ -32,7 +30,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err All: true, }) if err != nil { - return nil, lfsError("failed to get GoGit CommitsIter", err) + return nil, fmt.Errorf("LFS error occurred, failed to get GoGit CommitsIter: err: %w", err) } err = commitsIter.ForEach(func(gitCommit *object.Commit) error { @@ -66,7 +64,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err return nil }) if err != nil && err != io.EOF { - return nil, lfsError("failure in CommitIter.ForEach", err) + return nil, fmt.Errorf("LFS error occurred, failure in CommitIter.ForEach: %w", err) } for _, result := range resultsMap { @@ -82,65 +80,6 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err } sort.Sort(lfsResultSlice(results)) - - // Should really use a go-git function here but name-rev is not completed and recapitulating it is not simple - shasToNameReader, shasToNameWriter := io.Pipe() - nameRevStdinReader, nameRevStdinWriter := io.Pipe() - errChan := make(chan error, 1) - wg := sync.WaitGroup{} - wg.Add(3) - - go func() { - defer wg.Done() - scanner := bufio.NewScanner(nameRevStdinReader) - i := 0 - for scanner.Scan() { - line := scanner.Text() - if len(line) == 0 { - continue - } - result := results[i] - result.FullCommitName = line - result.BranchName = strings.Split(line, "~")[0] - i++ - } - }() - go NameRevStdin(repo.Ctx, shasToNameReader, nameRevStdinWriter, &wg, basePath) - go func() { - defer wg.Done() - defer shasToNameWriter.Close() - for _, result := range results { - i := 0 - if i < len(result.SHA) { - n, err := shasToNameWriter.Write([]byte(result.SHA)[i:]) - if err != nil { - errChan <- err - break - } - i += n - } - n := 0 - for n < 1 { - n, err = shasToNameWriter.Write([]byte{'\n'}) - if err != nil { - errChan <- err - break - } - - } - - } - }() - - wg.Wait() - - select { - case err, has := <-errChan: - if has { - return nil, lfsError("unable to obtain name for LFS files", err) - } - default: - } - - return results, nil + err = fillResultNameRev(repo.Ctx, repo.Path, results) + return results, err } diff --git a/modules/git/pipeline/lfs_nogogit.go b/modules/git/pipeline/lfs_nogogit.go index 4881a2be64..91bda0d0e5 100644 --- a/modules/git/pipeline/lfs_nogogit.go +++ b/modules/git/pipeline/lfs_nogogit.go @@ -8,46 +8,34 @@ package pipeline import ( "bufio" "bytes" + "encoding/hex" "io" "sort" "strings" - "sync" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" ) // FindLFSFile finds commits that contain a provided pointer file hash -func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, error) { +func FindLFSFile(repo *git.Repository, objectID git.ObjectID) (results []*LFSResult, _ error) { + cmd := gitcmd.NewCommand("rev-list", "--all") + revListReader, revListReaderClose := cmd.MakeStdoutPipe() + defer revListReaderClose() + err := cmd.WithDir(repo.Path). + WithPipelineFunc(func(context gitcmd.Context) (err error) { + results, err = findLFSFileFunc(repo, objectID, revListReader) + return err + }).RunWithStderr(repo.Ctx) + return results, err +} + +func findLFSFileFunc(repo *git.Repository, objectID git.ObjectID, revListReader io.Reader) ([]*LFSResult, error) { resultsMap := map[string]*LFSResult{} results := make([]*LFSResult, 0) - - basePath := repo.Path - - // Use rev-list to provide us with all commits in order - revListReader, revListWriter := io.Pipe() - defer func() { - _ = revListWriter.Close() - _ = revListReader.Close() - }() - - go func() { - stderr := strings.Builder{} - err := gitcmd.NewCommand("rev-list", "--all"). - WithDir(repo.Path). - WithStdout(revListWriter). - WithStderr(&stderr). - Run(repo.Ctx) - if err != nil { - _ = revListWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String())) - } else { - _ = revListWriter.Close() - } - }() - // Next feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary. // so let's create a batch stdin and stdout - batchStdinWriter, batchReader, cancel, err := repo.CatFileBatch(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return nil, err } @@ -55,7 +43,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err // We'll use a scanner for the revList because it's simpler than a bufio.Reader scan := bufio.NewScanner(revListReader) - trees := [][]byte{} + trees := []string{} paths := []string{} fnameBuf := make([]byte, 4096) @@ -64,14 +52,10 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err for scan.Scan() { // Get the next commit ID - commitID := scan.Bytes() + commitID := scan.Text() // push the commit to the cat-file --batch process - _, err := batchStdinWriter.Write(commitID) - if err != nil { - return nil, err - } - _, err = batchStdinWriter.Write([]byte{'\n'}) + info, batchReader, err := batch.QueryContent(commitID) if err != nil { return nil, err } @@ -81,26 +65,20 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err commitReadingLoop: for { - _, typ, size, err := git.ReadBatchLine(batchReader) - if err != nil { - return nil, err - } - - switch typ { + switch info.Type { case "tag": // This shouldn't happen but if it does well just get the commit and try again - id, err := git.ReadTagObjectID(batchReader, size) + id, err := git.ReadTagObjectID(batchReader, info.Size) if err != nil { return nil, err } - _, err = batchStdinWriter.Write([]byte(id + "\n")) - if err != nil { + if info, batchReader, err = batch.QueryContent(id); err != nil { return nil, err } continue case "commit": // Read in the commit to get its tree and in case this is one of the last used commits - curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(string(commitID)), io.LimitReader(batchReader, size)) + curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(commitID), io.LimitReader(batchReader, info.Size)) if err != nil { return nil, err } @@ -108,13 +86,13 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err return nil, err } - if _, err := batchStdinWriter.Write([]byte(curCommit.Tree.ID.String() + "\n")); err != nil { + if info, _, err = batch.QueryContent(curCommit.Tree.ID.String()); err != nil { return nil, err } curPath = "" case "tree": var n int64 - for n < size { + for n < info.Size { mode, fname, binObjectID, count, err := git.ParseCatFileTreeLine(objectID.Type(), batchReader, modeBuf, fnameBuf, workingShaBuf) if err != nil { return nil, err @@ -130,9 +108,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err } resultsMap[curCommit.ID.String()+":"+curPath+string(fname)] = &result } else if string(mode) == git.EntryModeTree.String() { - hexObjectID := make([]byte, objectID.Type().FullLength()) - git.BinToHex(objectID.Type(), binObjectID, hexObjectID) - trees = append(trees, hexObjectID) + trees = append(trees, hex.EncodeToString(binObjectID)) paths = append(paths, curPath+string(fname)+"/") } } @@ -140,11 +116,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err return nil, err } if len(trees) > 0 { - _, err := batchStdinWriter.Write(trees[len(trees)-1]) - if err != nil { - return nil, err - } - _, err = batchStdinWriter.Write([]byte("\n")) + info, _, err = batch.QueryContent(trees[len(trees)-1]) if err != nil { return nil, err } @@ -155,7 +127,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err break commitReadingLoop } default: - if err := git.DiscardFull(batchReader, size+1); err != nil { + if err := git.DiscardFull(batchReader, info.Size+1); err != nil { return nil, err } } @@ -179,56 +151,6 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err } sort.Sort(lfsResultSlice(results)) - - // Should really use a go-git function here but name-rev is not completed and recapitulating it is not simple - shasToNameReader, shasToNameWriter := io.Pipe() - nameRevStdinReader, nameRevStdinWriter := io.Pipe() - errChan := make(chan error, 1) - wg := sync.WaitGroup{} - wg.Add(3) - - go func() { - defer wg.Done() - scanner := bufio.NewScanner(nameRevStdinReader) - i := 0 - for scanner.Scan() { - line := scanner.Text() - if len(line) == 0 { - continue - } - result := results[i] - result.FullCommitName = line - result.BranchName = strings.Split(line, "~")[0] - i++ - } - }() - go NameRevStdin(repo.Ctx, shasToNameReader, nameRevStdinWriter, &wg, basePath) - go func() { - defer wg.Done() - defer shasToNameWriter.Close() - for _, result := range results { - _, err := shasToNameWriter.Write([]byte(result.SHA)) - if err != nil { - errChan <- err - break - } - _, err = shasToNameWriter.Write([]byte{'\n'}) - if err != nil { - errChan <- err - break - } - } - }() - - wg.Wait() - - select { - case err, has := <-errChan: - if has { - return nil, lfsError("unable to obtain name for LFS files", err) - } - default: - } - - return results, nil + err = fillResultNameRev(repo.Ctx, repo.Path, results) + return results, err } diff --git a/modules/git/pipeline/lfs_test.go b/modules/git/pipeline/lfs_test.go new file mode 100644 index 0000000000..30fe2f93c2 --- /dev/null +++ b/modules/git/pipeline/lfs_test.go @@ -0,0 +1,38 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pipeline + +import ( + "testing" + "time" + + "code.gitea.io/gitea/modules/git" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFindLFSFile(t *testing.T) { + repoPath := "../../../tests/gitea-repositories-meta/user2/lfs.git" + gitRepo, err := git.OpenRepository(t.Context(), repoPath) + require.NoError(t, err) + defer gitRepo.Close() + + objectID := git.MustIDFromString("2b6c6c4eaefa24b22f2092c3d54b263ff26feb58") + + stats, err := FindLFSFile(gitRepo, objectID) + require.NoError(t, err) + + tm, err := time.Parse(time.RFC3339, "2022-12-21T17:56:42-05:00") + require.NoError(t, err) + + assert.Len(t, stats, 1) + assert.Equal(t, "CONTRIBUTING.md", stats[0].Name) + assert.Equal(t, "73cf03db6ece34e12bf91e8853dc58f678f2f82d", stats[0].SHA) + assert.Equal(t, "Initial commit", stats[0].Summary) + assert.Equal(t, tm, stats[0].When) + assert.Empty(t, stats[0].ParentHashes) + assert.Equal(t, "master", stats[0].BranchName) + assert.Equal(t, "master", stats[0].FullCommitName) +} diff --git a/modules/git/pipeline/main_test.go b/modules/git/pipeline/main_test.go new file mode 100644 index 0000000000..fa5832b68c --- /dev/null +++ b/modules/git/pipeline/main_test.go @@ -0,0 +1,14 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pipeline + +import ( + "testing" + + "code.gitea.io/gitea/modules/git" +) + +func TestMain(m *testing.M) { + git.RunGitTests(m) +} diff --git a/modules/git/pipeline/namerev.go b/modules/git/pipeline/namerev.go index 782b5f0531..24de442940 100644 --- a/modules/git/pipeline/namerev.go +++ b/modules/git/pipeline/namerev.go @@ -4,30 +4,54 @@ package pipeline import ( - "bytes" + "bufio" "context" - "fmt" - "io" + "errors" "strings" - "sync" "code.gitea.io/gitea/modules/git/gitcmd" + + "golang.org/x/sync/errgroup" ) -// NameRevStdin runs name-rev --stdin -func NameRevStdin(ctx context.Context, shasToNameReader *io.PipeReader, nameRevStdinWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string) { - defer wg.Done() - defer shasToNameReader.Close() - defer nameRevStdinWriter.Close() +func fillResultNameRev(ctx context.Context, basePath string, results []*LFSResult) error { + // Should really use a go-git function here but name-rev is not completed and recapitulating it is not simple + wg := errgroup.Group{} + cmd := gitcmd.NewCommand("name-rev", "--stdin", "--name-only", "--always").WithDir(basePath) + stdin, stdinClose := cmd.MakeStdinPipe() + stdout, stdoutClose := cmd.MakeStdoutPipe() + defer stdinClose() + defer stdoutClose() - stderr := new(bytes.Buffer) - var errbuf strings.Builder - if err := gitcmd.NewCommand("name-rev", "--stdin", "--name-only", "--always"). - WithDir(tmpBasePath). - WithStdin(shasToNameReader). - WithStdout(nameRevStdinWriter). - WithStderr(stderr). - Run(ctx); err != nil { - _ = shasToNameReader.CloseWithError(fmt.Errorf("git name-rev [%s]: %w - %s", tmpBasePath, err, errbuf.String())) - } + wg.Go(func() error { + scanner := bufio.NewScanner(stdout) + i := 0 + for scanner.Scan() { + line := scanner.Text() + if len(line) == 0 { + continue + } + result := results[i] + result.FullCommitName = line + result.BranchName = strings.Split(line, "~")[0] + i++ + } + return scanner.Err() + }) + wg.Go(func() error { + defer stdinClose() + for _, result := range results { + _, err := stdin.Write([]byte(result.SHA)) + if err != nil { + return err + } + _, err = stdin.Write([]byte{'\n'}) + if err != nil { + return err + } + } + return nil + }) + err := cmd.RunWithStderr(ctx) + return errors.Join(err, wg.Wait()) } diff --git a/modules/git/pipeline/revlist.go b/modules/git/pipeline/revlist.go index 755b165a65..28d4751bd8 100644 --- a/modules/git/pipeline/revlist.go +++ b/modules/git/pipeline/revlist.go @@ -5,63 +5,26 @@ package pipeline import ( "bufio" - "bytes" "context" - "fmt" "io" "strings" - "sync" "code.gitea.io/gitea/modules/git/gitcmd" - "code.gitea.io/gitea/modules/log" ) -// RevListAllObjects runs rev-list --objects --all and writes to a pipewriter -func RevListAllObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.WaitGroup, basePath string, errChan chan<- error) { - defer wg.Done() - defer revListWriter.Close() - - stderr := new(bytes.Buffer) - var errbuf strings.Builder - cmd := gitcmd.NewCommand("rev-list", "--objects", "--all") - if err := cmd.WithDir(basePath). - WithStdout(revListWriter). - WithStderr(stderr). - Run(ctx); err != nil { - log.Error("git rev-list --objects --all [%s]: %v - %s", basePath, err, errbuf.String()) - err = fmt.Errorf("git rev-list --objects --all [%s]: %w - %s", basePath, err, errbuf.String()) - _ = revListWriter.CloseWithError(err) - errChan <- err - } -} - // RevListObjects run rev-list --objects from headSHA to baseSHA -func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath, headSHA, baseSHA string, errChan chan<- error) { - defer wg.Done() - defer revListWriter.Close() - stderr := new(bytes.Buffer) - var errbuf strings.Builder - cmd := gitcmd.NewCommand("rev-list", "--objects").AddDynamicArguments(headSHA) +func RevListObjects(ctx context.Context, cmd *gitcmd.Command, tmpBasePath, headSHA, baseSHA string) error { + cmd.AddArguments("rev-list", "--objects").AddDynamicArguments(headSHA) if baseSHA != "" { cmd = cmd.AddArguments("--not").AddDynamicArguments(baseSHA) } - if err := cmd.WithDir(tmpBasePath). - WithStdout(revListWriter). - WithStderr(stderr). - Run(ctx); err != nil { - log.Error("git rev-list [%s]: %v - %s", tmpBasePath, err, errbuf.String()) - errChan <- fmt.Errorf("git rev-list [%s]: %w - %s", tmpBasePath, err, errbuf.String()) - } + return cmd.WithDir(tmpBasePath).RunWithStderr(ctx) } // BlobsFromRevListObjects reads a RevListAllObjects and only selects blobs -func BlobsFromRevListObjects(revListReader *io.PipeReader, shasToCheckWriter *io.PipeWriter, wg *sync.WaitGroup) { - defer wg.Done() - defer revListReader.Close() - scanner := bufio.NewScanner(revListReader) - defer func() { - _ = shasToCheckWriter.CloseWithError(scanner.Err()) - }() +func BlobsFromRevListObjects(in io.ReadCloser, out io.WriteCloser) error { + defer out.Close() + scanner := bufio.NewScanner(in) for scanner.Scan() { line := scanner.Text() if len(line) == 0 { @@ -73,12 +36,12 @@ func BlobsFromRevListObjects(revListReader *io.PipeReader, shasToCheckWriter *io } toWrite := []byte(fields[0] + "\n") for len(toWrite) > 0 { - n, err := shasToCheckWriter.Write(toWrite) + n, err := out.Write(toWrite) if err != nil { - _ = revListReader.CloseWithError(err) - break + return err } toWrite = toWrite[n:] } } + return scanner.Err() } diff --git a/modules/git/ref.go b/modules/git/ref.go index 56b2db858a..7b63d06b38 100644 --- a/modules/git/ref.go +++ b/modules/git/ref.go @@ -220,3 +220,14 @@ func (ref RefName) RefWebLinkPath() string { } return string(refType) + "/" + util.PathEscapeSegments(ref.ShortName()) } + +func ParseRefSuffix(ref string) (string, string) { + // Partially support https://git-scm.com/docs/gitrevisions + if idx := strings.Index(ref, "@{"); idx != -1 { + return ref[:idx], ref[idx:] + } + if idx := strings.Index(ref, "^"); idx != -1 { + return ref[:idx], ref[idx:] + } + return ref, "" +} diff --git a/modules/git/remote.go b/modules/git/remote.go index 1999ad4b94..ae56c5576a 100644 --- a/modules/git/remote.go +++ b/modules/git/remote.go @@ -74,9 +74,9 @@ func (err *ErrInvalidCloneAddr) Unwrap() error { func IsRemoteNotExistError(err error) bool { // see: https://github.com/go-gitea/gitea/issues/32889#issuecomment-2571848216 // Should not add space in the end, sometimes git will add a `:` - prefix1 := "exit status 128 - fatal: No such remote" // git < 2.30 - prefix2 := "exit status 2 - error: No such remote" // git >= 2.30 - return strings.HasPrefix(err.Error(), prefix1) || strings.HasPrefix(err.Error(), prefix2) + prefix1 := "fatal: No such remote" // git < 2.30, exit status 128 + prefix2 := "error: No such remote" // git >= 2.30. exit status 2 + return gitcmd.StderrHasPrefix(err, prefix1) || gitcmd.StderrHasPrefix(err, prefix2) } // ParseRemoteAddr checks if given remote address is valid, diff --git a/modules/git/repo.go b/modules/git/repo.go index 579accf92e..1e31eb1b80 100644 --- a/modules/git/repo.go +++ b/modules/git/repo.go @@ -8,7 +8,6 @@ import ( "bytes" "context" "fmt" - "io" "net/url" "os" "path" @@ -83,22 +82,19 @@ func InitRepository(ctx context.Context, repoPath string, bare bool, objectForma // IsEmpty Check if repository is empty. func (repo *Repository) IsEmpty() (bool, error) { - var errbuf, output strings.Builder - if err := gitcmd.NewCommand(). + stdout, _, err := gitcmd.NewCommand(). AddOptionFormat("--git-dir=%s", repo.Path). AddArguments("rev-list", "-n", "1", "--all"). WithDir(repo.Path). - WithStdout(&output). - WithStderr(&errbuf). - Run(repo.Ctx); err != nil { - if (err.Error() == "exit status 1" && strings.TrimSpace(errbuf.String()) == "") || err.Error() == "exit status 129" { + RunStdString(repo.Ctx) + if err != nil { + if (gitcmd.IsErrorExitCode(err, 1) && err.Stderr() == "") || gitcmd.IsErrorExitCode(err, 129) { // git 2.11 exits with 129 if the repo is empty return true, nil } - return true, fmt.Errorf("check empty: %w - %s", err, errbuf.String()) + return true, fmt.Errorf("check empty: %w", err) } - - return strings.TrimSpace(output.String()) == "", nil + return strings.TrimSpace(stdout) == "", nil } // CloneRepoOptions options when clone a repository @@ -171,21 +167,16 @@ func Clone(ctx context.Context, from, to string, opts CloneRepoOptions) error { } } - stderr := new(bytes.Buffer) - if err := cmd. + return cmd. WithTimeout(opts.Timeout). WithEnv(envs). - WithStdout(io.Discard). - WithStderr(stderr). - Run(ctx); err != nil { - return gitcmd.ConcatenateError(err, stderr.String()) - } - return nil + RunWithStderr(ctx) } // PushOptions options when push to remote type PushOptions struct { Remote string + LocalRefName string Branch string Force bool ForceWithLease string @@ -207,7 +198,13 @@ func Push(ctx context.Context, repoPath string, opts PushOptions) error { } remoteBranchArgs := []string{opts.Remote} if len(opts.Branch) > 0 { - remoteBranchArgs = append(remoteBranchArgs, opts.Branch) + var refspec string + if opts.LocalRefName != "" { + refspec = fmt.Sprintf("%s:%s", opts.LocalRefName, opts.Branch) + } else { + refspec = opts.Branch + } + remoteBranchArgs = append(remoteBranchArgs, refspec) } cmd.AddDashesAndList(remoteBranchArgs...) diff --git a/modules/git/repo_archive.go b/modules/git/repo_archive.go deleted file mode 100644 index 8a9eec9e6a..0000000000 --- a/modules/git/repo_archive.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2015 The Gogs Authors. All rights reserved. -// Copyright 2020 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package git - -import ( - "context" - "fmt" - "io" - "path/filepath" - "strings" - - "code.gitea.io/gitea/modules/git/gitcmd" -) - -// ArchiveType archive types -type ArchiveType int - -const ( - ArchiveUnknown ArchiveType = iota - ArchiveZip // 1 - ArchiveTarGz // 2 - ArchiveBundle // 3 -) - -// String converts an ArchiveType to string: the extension of the archive file without prefix dot -func (a ArchiveType) String() string { - switch a { - case ArchiveZip: - return "zip" - case ArchiveTarGz: - return "tar.gz" - case ArchiveBundle: - return "bundle" - } - return "unknown" -} - -func SplitArchiveNameType(s string) (string, ArchiveType) { - switch { - case strings.HasSuffix(s, ".zip"): - return strings.TrimSuffix(s, ".zip"), ArchiveZip - case strings.HasSuffix(s, ".tar.gz"): - return strings.TrimSuffix(s, ".tar.gz"), ArchiveTarGz - case strings.HasSuffix(s, ".bundle"): - return strings.TrimSuffix(s, ".bundle"), ArchiveBundle - } - return s, ArchiveUnknown -} - -// CreateArchive create archive content to the target path -func (repo *Repository) CreateArchive(ctx context.Context, format ArchiveType, target io.Writer, usePrefix bool, commitID string) error { - if format.String() == "unknown" { - return fmt.Errorf("unknown format: %v", format) - } - - cmd := gitcmd.NewCommand("archive") - if usePrefix { - cmd.AddOptionFormat("--prefix=%s", filepath.Base(strings.TrimSuffix(repo.Path, ".git"))+"/") - } - cmd.AddOptionFormat("--format=%s", format.String()) - cmd.AddDynamicArguments(commitID) - - var stderr strings.Builder - err := cmd.WithDir(repo.Path). - WithStdout(target). - WithStderr(&stderr). - Run(ctx) - if err != nil { - return gitcmd.ConcatenateError(err, stderr.String()) - } - return nil -} diff --git a/modules/git/repo_base_nogogit.go b/modules/git/repo_base_nogogit.go index 17c71da5ef..775bbd4a09 100644 --- a/modules/git/repo_base_nogogit.go +++ b/modules/git/repo_base_nogogit.go @@ -7,9 +7,9 @@ package git import ( - "bufio" "context" "path/filepath" + "sync" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" @@ -23,11 +23,9 @@ type Repository struct { tagCache *ObjectCache[*Tag] - batchInUse bool - batch *Batch - - checkInUse bool - check *Batch + mu sync.Mutex + catFileBatchCloser CatFileBatchCloser + catFileBatchInUse bool Ctx context.Context LastCommitCache *LastCommitCache @@ -56,69 +54,47 @@ func OpenRepository(ctx context.Context, repoPath string) (*Repository, error) { }, nil } -// CatFileBatch obtains a CatFileBatch for this repository -func (repo *Repository) CatFileBatch(ctx context.Context) (WriteCloserError, *bufio.Reader, func(), error) { - if repo.batch == nil { - var err error - repo.batch, err = NewBatch(ctx, repo.Path) +// CatFileBatch obtains a "batch object provider" for this repository. +// It reuses an existing one if available, otherwise creates a new one. +func (repo *Repository) CatFileBatch(ctx context.Context) (_ CatFileBatch, closeFunc func(), err error) { + repo.mu.Lock() + defer repo.mu.Unlock() + + if repo.catFileBatchCloser == nil { + repo.catFileBatchCloser, err = NewBatch(ctx, repo.Path) if err != nil { - return nil, nil, nil, err + repo.catFileBatchCloser = nil // otherwise it is "interface(nil)" and will cause wrong logic + return nil, nil, err } } - if !repo.batchInUse { - repo.batchInUse = true - return repo.batch.Writer, repo.batch.Reader, func() { - repo.batchInUse = false + if !repo.catFileBatchInUse { + repo.catFileBatchInUse = true + return CatFileBatch(repo.catFileBatchCloser), func() { + repo.mu.Lock() + defer repo.mu.Unlock() + repo.catFileBatchInUse = false }, nil } log.Debug("Opening temporary cat file batch for: %s", repo.Path) tempBatch, err := NewBatch(ctx, repo.Path) if err != nil { - return nil, nil, nil, err + return nil, nil, err } - return tempBatch.Writer, tempBatch.Reader, tempBatch.Close, nil -} - -// CatFileBatchCheck obtains a CatFileBatchCheck for this repository -func (repo *Repository) CatFileBatchCheck(ctx context.Context) (WriteCloserError, *bufio.Reader, func(), error) { - if repo.check == nil { - var err error - repo.check, err = NewBatchCheck(ctx, repo.Path) - if err != nil { - return nil, nil, nil, err - } - } - - if !repo.checkInUse { - repo.checkInUse = true - return repo.check.Writer, repo.check.Reader, func() { - repo.checkInUse = false - }, nil - } - - log.Debug("Opening temporary cat file batch-check for: %s", repo.Path) - tempBatchCheck, err := NewBatchCheck(ctx, repo.Path) - if err != nil { - return nil, nil, nil, err - } - return tempBatchCheck.Writer, tempBatchCheck.Reader, tempBatchCheck.Close, nil + return tempBatch, tempBatch.Close, nil } func (repo *Repository) Close() error { if repo == nil { return nil } - if repo.batch != nil { - repo.batch.Close() - repo.batch = nil - repo.batchInUse = false - } - if repo.check != nil { - repo.check.Close() - repo.check = nil - repo.checkInUse = false + repo.mu.Lock() + defer repo.mu.Unlock() + if repo.catFileBatchCloser != nil { + repo.catFileBatchCloser.Close() + repo.catFileBatchCloser = nil + repo.catFileBatchInUse = false } repo.LastCommitCache = nil repo.tagCache = nil diff --git a/modules/git/repo_base_nogogit_test.go b/modules/git/repo_base_nogogit_test.go new file mode 100644 index 0000000000..a12bbb73c2 --- /dev/null +++ b/modules/git/repo_base_nogogit_test.go @@ -0,0 +1,26 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +//go:build !gogit + +package git + +import ( + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestRepoCatFileBatch(t *testing.T) { + t.Run("MissingRepoAndClose", func(t *testing.T) { + repo, err := OpenRepository(t.Context(), filepath.Join(testReposDir, "repo1_bare")) + require.NoError(t, err) + repo.Path = "/no-such" // when the repo is missing (it usually occurs during testing because the fixtures are synced frequently) + _, _, err = repo.CatFileBatch(t.Context()) + require.Error(t, err) + require.NoError(t, repo.Close()) // shouldn't panic + }) + + // TODO: test more methods and concurrency queries +} diff --git a/modules/git/repo_branch_nogogit.go b/modules/git/repo_branch_nogogit.go index f1b26b06ab..f925aab3e4 100644 --- a/modules/git/repo_branch_nogogit.go +++ b/modules/git/repo_branch_nogogit.go @@ -8,7 +8,6 @@ package git import ( "bufio" - "bytes" "context" "io" "strings" @@ -18,24 +17,24 @@ import ( ) // IsObjectExist returns true if the given object exists in the repository. +// FIXME: this function doesn't seem right, it is only used by GarbageCollectLFSMetaObjectsForRepo func (repo *Repository) IsObjectExist(name string) bool { if name == "" { return false } - wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { - log.Debug("Error writing to CatFileBatchCheck %v", err) + log.Debug("Error opening CatFileBatch %v", err) return false } defer cancel() - _, err = wr.Write([]byte(name + "\n")) + info, err := batch.QueryInfo(name) if err != nil { - log.Debug("Error writing to CatFileBatchCheck %v", err) + log.Debug("Error checking object info %v", err) return false } - sha, _, _, err := ReadBatchLine(rd) - return err == nil && bytes.HasPrefix(sha, []byte(strings.TrimSpace(name))) + return strings.HasPrefix(info.ID, name) // FIXME: this logic doesn't seem right, why "HasPrefix" } // IsReferenceExist returns true if given reference exists in the repository. @@ -44,18 +43,13 @@ func (repo *Repository) IsReferenceExist(name string) bool { return false } - wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { - log.Debug("Error writing to CatFileBatchCheck %v", err) + log.Error("Error opening CatFileBatch %v", err) return false } defer cancel() - _, err = wr.Write([]byte(name + "\n")) - if err != nil { - log.Debug("Error writing to CatFileBatchCheck %v", err) - return false - } - _, _, _, err = ReadBatchLine(rd) + _, err = batch.QueryInfo(name) return err == nil } @@ -100,94 +94,81 @@ func callShowRef(ctx context.Context, repoPath, trimPrefix string, extraArgs git } func WalkShowRef(ctx context.Context, repoPath string, extraArgs gitcmd.TrustedCmdArgs, skip, limit int, walkfn func(sha1, refname string) error) (countAll int, err error) { - stdoutReader, stdoutWriter := io.Pipe() - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - - go func() { - stderrBuilder := &strings.Builder{} - args := gitcmd.TrustedCmdArgs{"for-each-ref", "--format=%(objectname) %(refname)"} - args = append(args, extraArgs...) - err := gitcmd.NewCommand(args...). - WithDir(repoPath). - WithStdout(stdoutWriter). - WithStderr(stderrBuilder). - Run(ctx) - if err != nil { - if stderrBuilder.Len() == 0 { - _ = stdoutWriter.Close() - return - } - _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, stderrBuilder.String())) - } else { - _ = stdoutWriter.Close() - } - }() - i := 0 - bufReader := bufio.NewReader(stdoutReader) - for i < skip { - _, isPrefix, err := bufReader.ReadLine() - if err == io.EOF { - return i, nil - } - if err != nil { - return 0, err - } - if !isPrefix { - i++ - } + args := gitcmd.TrustedCmdArgs{"for-each-ref", "--format=%(objectname) %(refname)"} + args = append(args, extraArgs...) + cmd := gitcmd.NewCommand(args...) + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() + cmd.WithDir(repoPath). + WithPipelineFunc(func(gitcmd.Context) error { + bufReader := bufio.NewReader(stdoutReader) + for i < skip { + _, isPrefix, err := bufReader.ReadLine() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + if !isPrefix { + i++ + } + } + for limit == 0 || i < skip+limit { + // The output of show-ref is simply a list: + // SP LF + sha, err := bufReader.ReadString(' ') + if err == io.EOF { + return nil + } + if err != nil { + return err + } + + branchName, err := bufReader.ReadString('\n') + if err == io.EOF { + // This shouldn't happen... but we'll tolerate it for the sake of peace + return nil + } + if err != nil { + return err + } + + if len(branchName) > 0 { + branchName = branchName[:len(branchName)-1] + } + + if len(sha) > 0 { + sha = sha[:len(sha)-1] + } + + err = walkfn(sha, branchName) + if err != nil { + return err + } + i++ + } + // count all refs + for limit != 0 { + _, isPrefix, err := bufReader.ReadLine() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + if !isPrefix { + i++ + } + } + return nil + }) + err = cmd.RunWithStderr(ctx) + if errPipeline, ok := gitcmd.UnwrapPipelineError(err); ok { + return i, errPipeline // keep the old behavior: return pipeline error directly } - for limit == 0 || i < skip+limit { - // The output of show-ref is simply a list: - // SP LF - sha, err := bufReader.ReadString(' ') - if err == io.EOF { - return i, nil - } - if err != nil { - return 0, err - } - - branchName, err := bufReader.ReadString('\n') - if err == io.EOF { - // This shouldn't happen... but we'll tolerate it for the sake of peace - return i, nil - } - if err != nil { - return i, err - } - - if len(branchName) > 0 { - branchName = branchName[:len(branchName)-1] - } - - if len(sha) > 0 { - sha = sha[:len(sha)-1] - } - - err = walkfn(sha, branchName) - if err != nil { - return i, err - } - i++ - } - // count all refs - for limit != 0 { - _, isPrefix, err := bufReader.ReadLine() - if err == io.EOF { - return i, nil - } - if err != nil { - return 0, err - } - if !isPrefix { - i++ - } - } - return i, nil + return i, err } // GetRefsBySha returns all references filtered with prefix that belong to a sha commit hash diff --git a/modules/git/repo_commit.go b/modules/git/repo_commit.go index 4a441429f4..c10f73690c 100644 --- a/modules/git/repo_commit.go +++ b/modules/git/repo_commit.go @@ -226,66 +226,55 @@ type CommitsByFileAndRangeOptions struct { // CommitsByFileAndRange return the commits according revision file and the page func (repo *Repository) CommitsByFileAndRange(opts CommitsByFileAndRangeOptions) ([]*Commit, error) { - stdoutReader, stdoutWriter := io.Pipe() - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - go func() { - stderr := strings.Builder{} - gitCmd := gitcmd.NewCommand("rev-list"). - AddOptionFormat("--max-count=%d", setting.Git.CommitsRangeSize). - AddOptionFormat("--skip=%d", (opts.Page-1)*setting.Git.CommitsRangeSize) - gitCmd.AddDynamicArguments(opts.Revision) + gitCmd := gitcmd.NewCommand("rev-list"). + AddOptionFormat("--max-count=%d", setting.Git.CommitsRangeSize). + AddOptionFormat("--skip=%d", (opts.Page-1)*setting.Git.CommitsRangeSize) + gitCmd.AddDynamicArguments(opts.Revision) - if opts.Not != "" { - gitCmd.AddOptionValues("--not", opts.Not) - } - if opts.Since != "" { - gitCmd.AddOptionFormat("--since=%s", opts.Since) - } - if opts.Until != "" { - gitCmd.AddOptionFormat("--until=%s", opts.Until) - } - - gitCmd.AddDashesAndList(opts.File) - err := gitCmd.WithDir(repo.Path). - WithStdout(stdoutWriter). - WithStderr(&stderr). - Run(repo.Ctx) - if err != nil { - _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String())) - } else { - _ = stdoutWriter.Close() - } - }() - - objectFormat, err := repo.GetObjectFormat() - if err != nil { - return nil, err + if opts.Not != "" { + gitCmd.AddOptionValues("--not", opts.Not) } + if opts.Since != "" { + gitCmd.AddOptionFormat("--since=%s", opts.Since) + } + if opts.Until != "" { + gitCmd.AddOptionFormat("--until=%s", opts.Until) + } + gitCmd.AddDashesAndList(opts.File) - length := objectFormat.FullLength() - commits := []*Commit{} - shaline := make([]byte, length+1) - for { - n, err := io.ReadFull(stdoutReader, shaline) - if err != nil || n < length { - if err == io.EOF { - err = nil + var commits []*Commit + stdoutReader, stdoutReaderClose := gitCmd.MakeStdoutPipe() + defer stdoutReaderClose() + err := gitCmd.WithDir(repo.Path). + WithPipelineFunc(func(context gitcmd.Context) error { + objectFormat, err := repo.GetObjectFormat() + if err != nil { + return err } - return commits, err - } - objectID, err := NewIDFromString(string(shaline[0:length])) - if err != nil { - return nil, err - } - commit, err := repo.getCommit(objectID) - if err != nil { - return nil, err - } - commits = append(commits, commit) - } + + length := objectFormat.FullLength() + shaline := make([]byte, length+1) + for { + n, err := io.ReadFull(stdoutReader, shaline) + if err != nil || n < length { + if err == io.EOF { + err = nil + } + return err + } + objectID, err := NewIDFromString(string(shaline[0:length])) + if err != nil { + return err + } + commit, err := repo.getCommit(objectID) + if err != nil { + return err + } + commits = append(commits, commit) + } + }). + RunWithStderr(repo.Ctx) + return commits, err } // FilesCountBetween return the number of files changed between two commits diff --git a/modules/git/repo_commit_gogit.go b/modules/git/repo_commit_gogit.go index c84aabde1a..550d153722 100644 --- a/modules/git/repo_commit_gogit.go +++ b/modules/git/repo_commit_gogit.go @@ -67,16 +67,6 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) { return NewIDFromString(actualCommitID) } -// IsCommitExist returns true if given commit exists in current repository. -func (repo *Repository) IsCommitExist(name string) bool { - hash, err := repo.ConvertToGitID(name) - if err != nil { - return false - } - _, err = repo.gogitRepo.CommitObject(plumbing.Hash(hash.RawValue())) - return err == nil -} - func (repo *Repository) getCommit(id ObjectID) (*Commit, error) { var tagObject *object.Tag diff --git a/modules/git/repo_commit_nogogit.go b/modules/git/repo_commit_nogogit.go index 3f27833fa6..2ddb527502 100644 --- a/modules/git/repo_commit_nogogit.go +++ b/modules/git/repo_commit_nogogit.go @@ -6,7 +6,6 @@ package git import ( - "bufio" "errors" "io" "strings" @@ -37,50 +36,31 @@ func (repo *Repository) ResolveReference(name string) (string, error) { // GetRefCommitID returns the last commit ID string of given reference (branch or tag). func (repo *Repository) GetRefCommitID(name string) (string, error) { - wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return "", err } defer cancel() - _, err = wr.Write([]byte(name + "\n")) - if err != nil { - return "", err - } - shaBs, _, _, err := ReadBatchLine(rd) + info, err := batch.QueryInfo(name) if IsErrNotExist(err) { return "", ErrNotExist{name, ""} + } else if err != nil { + return "", err } - - return string(shaBs), nil -} - -// IsCommitExist returns true if given commit exists in current repository. -func (repo *Repository) IsCommitExist(name string) bool { - if err := ensureValidGitRepository(repo.Ctx, repo.Path); err != nil { - log.Error("IsCommitExist: %v", err) - return false - } - _, _, err := gitcmd.NewCommand("cat-file", "-e"). - AddDynamicArguments(name). - WithDir(repo.Path). - RunStdString(repo.Ctx) - return err == nil + return info.ID, nil } func (repo *Repository) getCommit(id ObjectID) (*Commit, error) { - wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return nil, err } defer cancel() - - _, _ = wr.Write([]byte(id.String() + "\n")) - - return repo.getCommitFromBatchReader(wr, rd, id) + return repo.getCommitWithBatch(batch, id) } -func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio.Reader, id ObjectID) (*Commit, error) { - _, typ, size, err := ReadBatchLine(rd) +func (repo *Repository) getCommitWithBatch(batch CatFileBatch, id ObjectID) (*Commit, error) { + info, rd, err := batch.QueryContent(id.String()) if err != nil { if errors.Is(err, io.EOF) || IsErrNotExist(err) { return nil, ErrNotExist{ID: id.String()} @@ -88,13 +68,13 @@ func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio. return nil, err } - switch typ { + switch info.Type { case "missing": return nil, ErrNotExist{ID: id.String()} case "tag": // then we need to parse the tag // and load the commit - data, err := io.ReadAll(io.LimitReader(rd, size)) + data, err := io.ReadAll(io.LimitReader(rd, info.Size)) if err != nil { return nil, err } @@ -106,19 +86,9 @@ func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio. if err != nil { return nil, err } - - if _, err := wr.Write([]byte(tag.Object.String() + "\n")); err != nil { - return nil, err - } - - commit, err := repo.getCommitFromBatchReader(wr, rd, tag.Object) - if err != nil { - return nil, err - } - - return commit, nil + return repo.getCommitWithBatch(batch, tag.Object) case "commit": - commit, err := CommitFromReader(repo, id, io.LimitReader(rd, size)) + commit, err := CommitFromReader(repo, id, io.LimitReader(rd, info.Size)) if err != nil { return nil, err } @@ -129,8 +99,8 @@ func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio. return commit, nil default: - log.Debug("Unknown typ: %s", typ) - if err := DiscardFull(rd, size+1); err != nil { + log.Debug("Unknown cat-file object type: %s", info.Type) + if err := DiscardFull(rd, info.Size+1); err != nil { return nil, err } return nil, ErrNotExist{ @@ -152,16 +122,12 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) { } } - wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return nil, err } defer cancel() - _, err = wr.Write([]byte(commitID + "\n")) - if err != nil { - return nil, err - } - sha, _, _, err := ReadBatchLine(rd) + info, err := batch.QueryInfo(commitID) if err != nil { if IsErrNotExist(err) { return nil, ErrNotExist{commitID, ""} @@ -169,5 +135,5 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) { return nil, err } - return MustIDFromString(string(sha)), nil + return MustIDFromString(info.ID), nil } diff --git a/modules/git/repo_compare.go b/modules/git/repo_compare.go index f60696a763..aa25e2ec20 100644 --- a/modules/git/repo_compare.go +++ b/modules/git/repo_compare.go @@ -18,32 +18,6 @@ import ( "code.gitea.io/gitea/modules/git/gitcmd" ) -// GetMergeBase checks and returns merge base of two branches and the reference used as base. -func (repo *Repository) GetMergeBase(tmpRemote, base, head string) (string, string, error) { - if tmpRemote == "" { - tmpRemote = "origin" - } - - if tmpRemote != "origin" { - tmpBaseName := RemotePrefix + tmpRemote + "/tmp_" + base - // Fetch commit into a temporary branch in order to be able to handle commits and tags - _, _, err := gitcmd.NewCommand("fetch", "--no-tags"). - AddDynamicArguments(tmpRemote). - AddDashesAndList(base + ":" + tmpBaseName). - WithDir(repo.Path). - RunStdString(repo.Ctx) - if err == nil { - base = tmpBaseName - } - } - - stdout, _, err := gitcmd.NewCommand("merge-base"). - AddDashesAndList(base, head). - WithDir(repo.Path). - RunStdString(repo.Ctx) - return strings.TrimSpace(stdout), base, err -} - type lineCountWriter struct { numLines int } @@ -60,7 +34,6 @@ func (l *lineCountWriter) Write(p []byte) (n int, err error) { func (repo *Repository) GetDiffNumChangedFiles(base, head string, directComparison bool) (int, error) { // Now there is git diff --shortstat but this appears to be slower than simply iterating with --nameonly w := &lineCountWriter{} - stderr := new(bytes.Buffer) separator := "..." if directComparison { @@ -72,25 +45,22 @@ func (repo *Repository) GetDiffNumChangedFiles(base, head string, directComparis AddDynamicArguments(base + separator + head). AddArguments("--"). WithDir(repo.Path). - WithStdout(w). - WithStderr(stderr). - Run(repo.Ctx); err != nil { - if strings.Contains(stderr.String(), "no merge base") { + WithStdoutCopy(w). + RunWithStderr(repo.Ctx); err != nil { + if strings.Contains(err.Stderr(), "no merge base") { // git >= 2.28 now returns an error if base and head have become unrelated. // previously it would return the results of git diff -z --name-only base head so let's try that... w = &lineCountWriter{} - stderr.Reset() if err = gitcmd.NewCommand("diff", "-z", "--name-only"). AddDynamicArguments(base, head). AddArguments("--"). WithDir(repo.Path). - WithStdout(w). - WithStderr(stderr). - Run(repo.Ctx); err == nil { + WithStdoutCopy(w). + RunWithStderr(repo.Ctx); err == nil { return w.numLines, nil } } - return 0, fmt.Errorf("%w: Stderr: %s", err, stderr) + return 0, err } return w.numLines, nil } @@ -99,11 +69,9 @@ var patchCommits = regexp.MustCompile(`^From\s(\w+)\s`) // GetDiff generates and returns patch data between given revisions, optimized for human readability func (repo *Repository) GetDiff(compareArg string, w io.Writer) error { - stderr := new(bytes.Buffer) return gitcmd.NewCommand("diff", "-p").AddDynamicArguments(compareArg). WithDir(repo.Path). - WithStdout(w). - WithStderr(stderr). + WithStdoutCopy(w). Run(repo.Ctx) } @@ -112,17 +80,15 @@ func (repo *Repository) GetDiffBinary(compareArg string, w io.Writer) error { return gitcmd.NewCommand("diff", "-p", "--binary", "--histogram"). AddDynamicArguments(compareArg). WithDir(repo.Path). - WithStdout(w). + WithStdoutCopy(w). Run(repo.Ctx) } // GetPatch generates and returns format-patch data between given revisions, able to be used with `git apply` func (repo *Repository) GetPatch(compareArg string, w io.Writer) error { - stderr := new(bytes.Buffer) return gitcmd.NewCommand("format-patch", "--binary", "--stdout").AddDynamicArguments(compareArg). WithDir(repo.Path). - WithStdout(w). - WithStderr(stderr). + WithStdoutCopy(w). Run(repo.Ctx) } diff --git a/modules/git/repo_index.go b/modules/git/repo_index.go index 4068f86bb2..1d040d5e0a 100644 --- a/modules/git/repo_index.go +++ b/modules/git/repo_index.go @@ -101,21 +101,17 @@ func (repo *Repository) RemoveFilesFromIndex(filenames ...string) error { return err } cmd := gitcmd.NewCommand("update-index", "--remove", "-z", "--index-info") - stdout := new(bytes.Buffer) - stderr := new(bytes.Buffer) - buffer := new(bytes.Buffer) + input := new(bytes.Buffer) for _, file := range filenames { if file != "" { // using format: mode SP type SP sha1 TAB path - buffer.WriteString("0 blob " + objectFormat.EmptyObjectID().String() + "\t" + file + "\000") + input.WriteString("0 blob " + objectFormat.EmptyObjectID().String() + "\t" + file + "\000") } } return cmd. WithDir(repo.Path). - WithStdin(bytes.NewReader(buffer.Bytes())). - WithStdout(stdout). - WithStderr(stderr). - Run(repo.Ctx) + WithStdinBytes(input.Bytes()). + RunWithStderr(repo.Ctx) } type IndexObjectInfo struct { @@ -127,19 +123,15 @@ type IndexObjectInfo struct { // AddObjectsToIndex adds the provided object hashes to the index at the provided filenames func (repo *Repository) AddObjectsToIndex(objects ...IndexObjectInfo) error { cmd := gitcmd.NewCommand("update-index", "--add", "--replace", "-z", "--index-info") - stdout := new(bytes.Buffer) - stderr := new(bytes.Buffer) - buffer := new(bytes.Buffer) + input := new(bytes.Buffer) for _, object := range objects { // using format: mode SP type SP sha1 TAB path - buffer.WriteString(object.Mode + " blob " + object.Object.String() + "\t" + object.Filename + "\000") + input.WriteString(object.Mode + " blob " + object.Object.String() + "\t" + object.Filename + "\000") } return cmd. WithDir(repo.Path). - WithStdin(bytes.NewReader(buffer.Bytes())). - WithStdout(stdout). - WithStderr(stderr). - Run(repo.Ctx) + WithStdinBytes(input.Bytes()). + RunWithStderr(repo.Ctx) } // AddObjectToIndex adds the provided object hash to the index at the provided filename diff --git a/modules/git/repo_object.go b/modules/git/repo_object.go index 2a39a3c4d8..38e16b4646 100644 --- a/modules/git/repo_object.go +++ b/modules/git/repo_object.go @@ -5,8 +5,6 @@ package git import ( - "bytes" - "io" "strings" "code.gitea.io/gitea/modules/git/gitcmd" @@ -33,18 +31,12 @@ func (o ObjectType) Bytes() []byte { return []byte(o) } -type EmptyReader struct{} - -func (EmptyReader) Read(p []byte) (int, error) { - return 0, io.EOF -} - func (repo *Repository) GetObjectFormat() (ObjectFormat, error) { if repo != nil && repo.objectFormat != nil { return repo.objectFormat, nil } - str, err := repo.hashObject(EmptyReader{}, false) + str, err := repo.hashObjectBytes(nil, false) if err != nil { return nil, err } @@ -58,32 +50,28 @@ func (repo *Repository) GetObjectFormat() (ObjectFormat, error) { return repo.objectFormat, nil } -// HashObject takes a reader and returns hash for that reader -func (repo *Repository) HashObject(reader io.Reader) (ObjectID, error) { - idStr, err := repo.hashObject(reader, true) +// HashObjectBytes returns hash for the content +func (repo *Repository) HashObjectBytes(buf []byte) (ObjectID, error) { + idStr, err := repo.hashObjectBytes(buf, true) if err != nil { return nil, err } return NewIDFromString(idStr) } -func (repo *Repository) hashObject(reader io.Reader, save bool) (string, error) { +func (repo *Repository) hashObjectBytes(buf []byte, save bool) (string, error) { var cmd *gitcmd.Command if save { cmd = gitcmd.NewCommand("hash-object", "-w", "--stdin") } else { cmd = gitcmd.NewCommand("hash-object", "--stdin") } - stdout := new(bytes.Buffer) - stderr := new(bytes.Buffer) - err := cmd. + stdout, _, err := cmd. WithDir(repo.Path). - WithStdin(reader). - WithStdout(stdout). - WithStderr(stderr). - Run(repo.Ctx) + WithStdinBytes(buf). + RunStdString(repo.Ctx) if err != nil { return "", err } - return strings.TrimSpace(stdout.String()), nil + return strings.TrimSpace(stdout), nil } diff --git a/modules/git/repo_ref_nogogit.go b/modules/git/repo_ref_nogogit.go index 09bb0df7b8..c58992fa9d 100644 --- a/modules/git/repo_ref_nogogit.go +++ b/modules/git/repo_ref_nogogit.go @@ -15,75 +15,61 @@ import ( // GetRefsFiltered returns all references of the repository that matches patterm exactly or starting with. func (repo *Repository) GetRefsFiltered(pattern string) ([]*Reference, error) { - stdoutReader, stdoutWriter := io.Pipe() - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - - go func() { - stderrBuilder := &strings.Builder{} - err := gitcmd.NewCommand("for-each-ref"). - WithDir(repo.Path). - WithStdout(stdoutWriter). - WithStderr(stderrBuilder). - Run(repo.Ctx) - if err != nil { - _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, stderrBuilder.String())) - } else { - _ = stdoutWriter.Close() - } - }() - refs := make([]*Reference, 0) - bufReader := bufio.NewReader(stdoutReader) - for { - // The output of for-each-ref is simply a list: - // SP TAB LF - sha, err := bufReader.ReadString(' ') - if err == io.EOF { - break - } - if err != nil { - return nil, err - } - sha = sha[:len(sha)-1] + cmd := gitcmd.NewCommand("for-each-ref") + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() + err := cmd.WithDir(repo.Path). + WithPipelineFunc(func(context gitcmd.Context) error { + bufReader := bufio.NewReader(stdoutReader) + for { + // The output of for-each-ref is simply a list: + // SP TAB LF + sha, err := bufReader.ReadString(' ') + if err == io.EOF { + break + } + if err != nil { + return err + } + sha = sha[:len(sha)-1] - typ, err := bufReader.ReadString('\t') - if err == io.EOF { - // This should not happen, but we'll tolerate it - break - } - if err != nil { - return nil, err - } - typ = typ[:len(typ)-1] + typ, err := bufReader.ReadString('\t') + if err == io.EOF { + // This should not happen, but we'll tolerate it + break + } + if err != nil { + return err + } + typ = typ[:len(typ)-1] - refName, err := bufReader.ReadString('\n') - if err == io.EOF { - // This should not happen, but we'll tolerate it - break - } - if err != nil { - return nil, err - } - refName = refName[:len(refName)-1] + refName, err := bufReader.ReadString('\n') + if err == io.EOF { + // This should not happen, but we'll tolerate it + break + } + if err != nil { + return err + } + refName = refName[:len(refName)-1] - // refName cannot be HEAD but can be remotes or stash - if strings.HasPrefix(refName, RemotePrefix) || refName == "/refs/stash" { - continue - } + // refName cannot be HEAD but can be remotes or stash + if strings.HasPrefix(refName, RemotePrefix) || refName == "/refs/stash" { + continue + } - if pattern == "" || strings.HasPrefix(refName, pattern) { - r := &Reference{ - Name: refName, - Object: MustIDFromString(sha), - Type: typ, - repo: repo, + if pattern == "" || strings.HasPrefix(refName, pattern) { + r := &Reference{ + Name: refName, + Object: MustIDFromString(sha), + Type: typ, + repo: repo, + } + refs = append(refs, r) + } } - refs = append(refs, r) - } - } - - return refs, nil + return nil + }).RunWithStderr(repo.Ctx) + return refs, err } diff --git a/modules/git/repo_stats.go b/modules/git/repo_stats.go index cfb35288fe..1dd77f05d4 100644 --- a/modules/git/repo_stats.go +++ b/modules/git/repo_stats.go @@ -5,9 +5,7 @@ package git import ( "bufio" - "context" "fmt" - "os" "sort" "strconv" "strings" @@ -55,15 +53,6 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) } stats.CommitCountInAllBranches = c - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - return nil, err - } - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - gitCmd := gitcmd.NewCommand("log", "--numstat", "--no-merges", "--pretty=format:---%n%h%n%aN%n%aE%n", "--date=iso"). AddOptionFormat("--since=%s", since) if len(branch) == 0 { @@ -72,13 +61,11 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) gitCmd.AddArguments("--first-parent").AddDynamicArguments(branch) } - stderr := new(strings.Builder) + stdoutReader, stdoutReaderClose := gitCmd.MakeStdoutPipe() + defer stdoutReaderClose() err = gitCmd. WithDir(repo.Path). - WithStdout(stdoutWriter). - WithStderr(stderr). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() + WithPipelineFunc(func(ctx gitcmd.Context) error { scanner := bufio.NewScanner(stdoutReader) scanner.Split(bufio.ScanLines) stats.CommitCount = 0 @@ -129,7 +116,6 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) } } if err = scanner.Err(); err != nil { - _ = stdoutReader.Close() return fmt.Errorf("GetCodeActivityStats scan: %w", err) } a := make([]*CodeActivityAuthor, 0, len(authors)) @@ -143,12 +129,11 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) stats.AuthorCount = int64(len(authors)) stats.ChangedFiles = int64(len(files)) stats.Authors = a - _ = stdoutReader.Close() return nil }). - Run(repo.Ctx) + RunWithStderr(repo.Ctx) if err != nil { - return nil, fmt.Errorf("Failed to get GetCodeActivityStats for repository.\nError: %w\nStderr: %s", err, stderr) + return nil, fmt.Errorf("GetCodeActivityStats: %w", err) } return stats, nil diff --git a/modules/git/repo_tag.go b/modules/git/repo_tag.go index 4ad0c6e5ab..2599236ae0 100644 --- a/modules/git/repo_tag.go +++ b/modules/git/repo_tag.go @@ -6,7 +6,6 @@ package git import ( "fmt" - "io" "strings" "code.gitea.io/gitea/modules/git/foreachref" @@ -115,51 +114,42 @@ func (repo *Repository) GetTagInfos(page, pageSize int) ([]*Tag, int, error) { // https://git-scm.com/docs/git-for-each-ref#Documentation/git-for-each-ref.txt-refname forEachRefFmt := foreachref.NewFormat("objecttype", "refname:lstrip=2", "object", "objectname", "creator", "contents", "contents:signature") - stdoutReader, stdoutWriter := io.Pipe() - defer stdoutReader.Close() - defer stdoutWriter.Close() - stderr := strings.Builder{} - - go func() { - err := gitcmd.NewCommand("for-each-ref"). - AddOptionFormat("--format=%s", forEachRefFmt.Flag()). - AddArguments("--sort", "-*creatordate", "refs/tags"). - WithDir(repo.Path). - WithStdout(stdoutWriter). - WithStderr(&stderr). - Run(repo.Ctx) - if err != nil { - _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, stderr.String())) - } else { - _ = stdoutWriter.Close() - } - }() - var tags []*Tag - parser := forEachRefFmt.Parser(stdoutReader) - for { - ref := parser.Next() - if ref == nil { - break - } + var tagsTotal int + cmd := gitcmd.NewCommand("for-each-ref") + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() + err := cmd.AddOptionFormat("--format=%s", forEachRefFmt.Flag()). + AddArguments("--sort", "-*creatordate", "refs/tags"). + WithDir(repo.Path). + WithPipelineFunc(func(context gitcmd.Context) error { + parser := forEachRefFmt.Parser(stdoutReader) + for { + ref := parser.Next() + if ref == nil { + break + } - tag, err := parseTagRef(ref) - if err != nil { - return nil, 0, fmt.Errorf("GetTagInfos: parse tag: %w", err) - } - tags = append(tags, tag) - } - if err := parser.Err(); err != nil { - return nil, 0, fmt.Errorf("GetTagInfos: parse output: %w", err) - } + tag, err := parseTagRef(ref) + if err != nil { + return fmt.Errorf("GetTagInfos: parse tag: %w", err) + } + tags = append(tags, tag) + } + if err := parser.Err(); err != nil { + return fmt.Errorf("GetTagInfos: parse output: %w", err) + } - sortTagsByTime(tags) - tagsTotal := len(tags) - if page != 0 { - tags = util.PaginateSlice(tags, page, pageSize).([]*Tag) - } + sortTagsByTime(tags) + tagsTotal = len(tags) + if page != 0 { + tags = util.PaginateSlice(tags, page, pageSize).([]*Tag) + } + return nil + }). + RunWithStderr(repo.Ctx) - return tags, tagsTotal, nil + return tags, tagsTotal, err } // parseTagRef parses a tag from a 'git for-each-ref'-produced reference. diff --git a/modules/git/repo_tag_nogogit.go b/modules/git/repo_tag_nogogit.go index 5f79b68a9a..a9ac040821 100644 --- a/modules/git/repo_tag_nogogit.go +++ b/modules/git/repo_tag_nogogit.go @@ -24,23 +24,19 @@ func (repo *Repository) IsTagExist(name string) bool { // GetTagType gets the type of the tag, either commit (simple) or tag (annotated) func (repo *Repository) GetTagType(id ObjectID) (string, error) { - wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return "", err } defer cancel() - _, err = wr.Write([]byte(id.String() + "\n")) - if err != nil { - return "", err - } - _, typ, _, err := ReadBatchLine(rd) + info, err := batch.QueryInfo(id.String()) if err != nil { if IsErrNotExist(err) { return "", ErrNotExist{ID: id.String()} } return "", err } - return typ, nil + return info.Type, nil } func (repo *Repository) getTag(tagID ObjectID, name string) (*Tag, error) { @@ -88,22 +84,20 @@ func (repo *Repository) getTag(tagID ObjectID, name string) (*Tag, error) { } // The tag is an annotated tag with a message. - wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return nil, err } defer cancel() - if _, err := wr.Write([]byte(tagID.String() + "\n")); err != nil { - return nil, err - } - _, typ, size, err := ReadBatchLine(rd) + info, rd, err := batch.QueryContent(tagID.String()) if err != nil { if errors.Is(err, io.EOF) || IsErrNotExist(err) { return nil, ErrNotExist{ID: tagID.String()} } return nil, err } + typ, size := info.Type, info.Size if typ != "tag" { if err := DiscardFull(rd, size+1); err != nil { return nil, err diff --git a/modules/git/repo_tree.go b/modules/git/repo_tree.go index 964342ba00..e65e2441ed 100644 --- a/modules/git/repo_tree.go +++ b/modules/git/repo_tree.go @@ -58,16 +58,12 @@ func (repo *Repository) CommitTree(author, committer *Signature, tree *Tree, opt cmd.AddArguments("--no-gpg-sign") } - stdout := new(bytes.Buffer) - stderr := new(bytes.Buffer) - err := cmd.WithEnv(env). + stdout, _, err := cmd.WithEnv(env). WithDir(repo.Path). - WithStdin(messageBytes). - WithStdout(stdout). - WithStderr(stderr). - Run(repo.Ctx) + WithStdinBytes(messageBytes.Bytes()). + RunStdString(repo.Ctx) if err != nil { - return nil, gitcmd.ConcatenateError(err, stderr.String()) + return nil, err } - return NewIDFromString(strings.TrimSpace(stdout.String())) + return NewIDFromString(strings.TrimSpace(stdout)) } diff --git a/modules/git/repo_tree_nogogit.go b/modules/git/repo_tree_nogogit.go index 1954f85162..82a61072c9 100644 --- a/modules/git/repo_tree_nogogit.go +++ b/modules/git/repo_tree_nogogit.go @@ -10,24 +10,21 @@ import ( ) func (repo *Repository) getTree(id ObjectID) (*Tree, error) { - wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx) + batch, cancel, err := repo.CatFileBatch(repo.Ctx) if err != nil { return nil, err } defer cancel() - _, _ = wr.Write([]byte(id.String() + "\n")) - - // ignore the SHA - _, typ, size, err := ReadBatchLine(rd) + info, rd, err := batch.QueryContent(id.String()) if err != nil { return nil, err } - switch typ { + switch info.Type { case "tag": resolvedID := id - data, err := io.ReadAll(io.LimitReader(rd, size)) + data, err := io.ReadAll(io.LimitReader(rd, info.Size)) if err != nil { return nil, err } @@ -36,17 +33,14 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) { return nil, err } - if _, err := wr.Write([]byte(tag.Object.String() + "\n")); err != nil { - return nil, err - } - commit, err := repo.getCommitFromBatchReader(wr, rd, tag.Object) + commit, err := repo.getCommitWithBatch(batch, tag.Object) if err != nil { return nil, err } commit.Tree.ResolvedID = resolvedID return &commit.Tree, nil case "commit": - commit, err := CommitFromReader(repo, id, io.LimitReader(rd, size)) + commit, err := CommitFromReader(repo, id, io.LimitReader(rd, info.Size)) if err != nil { return nil, err } @@ -62,14 +56,14 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) { if err != nil { return nil, err } - tree.entries, err = catBatchParseTreeEntries(objectFormat, tree, rd, size) + tree.entries, err = catBatchParseTreeEntries(objectFormat, tree, rd, info.Size) if err != nil { return nil, err } tree.entriesParsed = true return tree, nil default: - if err := DiscardFull(rd, size+1); err != nil { + if err := DiscardFull(rd, info.Size+1); err != nil { return nil, err } return nil, ErrNotExist{ diff --git a/modules/git/submodule.go b/modules/git/submodule.go index 45059eae77..ed69cbe55d 100644 --- a/modules/git/submodule.go +++ b/modules/git/submodule.go @@ -7,7 +7,6 @@ import ( "bufio" "context" "fmt" - "os" "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/log" @@ -21,23 +20,15 @@ type TemplateSubmoduleCommit struct { // GetTemplateSubmoduleCommits returns a list of submodules paths and their commits from a repository // This function is only for generating new repos based on existing template, the template couldn't be too large. func GetTemplateSubmoduleCommits(ctx context.Context, repoPath string) (submoduleCommits []TemplateSubmoduleCommit, _ error) { - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - return nil, err - } - - err = gitcmd.NewCommand("ls-tree", "-r", "--", "HEAD"). - WithDir(repoPath). - WithStdout(stdoutWriter). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() - defer stdoutReader.Close() - + cmd := gitcmd.NewCommand("ls-tree", "-r", "--", "HEAD") + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() + err := cmd.WithDir(repoPath). + WithPipelineFunc(func(ctx gitcmd.Context) error { scanner := bufio.NewScanner(stdoutReader) for scanner.Scan() { entry, err := parseLsTreeLine(scanner.Bytes()) if err != nil { - cancel() return err } if entry.EntryMode == EntryModeCommit { diff --git a/modules/git/tree_entry_mode.go b/modules/git/tree_entry_mode.go index f36c07bc2a..2ceba11374 100644 --- a/modules/git/tree_entry_mode.go +++ b/modules/git/tree_entry_mode.go @@ -4,7 +4,6 @@ package git import ( - "fmt" "strconv" ) @@ -55,21 +54,38 @@ func (e EntryMode) IsExecutable() bool { return e == EntryModeExec } -func ParseEntryMode(mode string) (EntryMode, error) { +func ParseEntryMode(mode string) EntryMode { switch mode { case "000000": - return EntryModeNoEntry, nil + return EntryModeNoEntry case "100644": - return EntryModeBlob, nil + return EntryModeBlob case "100755": - return EntryModeExec, nil + return EntryModeExec case "120000": - return EntryModeSymlink, nil + return EntryModeSymlink case "160000": - return EntryModeCommit, nil - case "040000", "040755": // git uses 040000 for tree object, but some users may get 040755 for unknown reasons - return EntryModeTree, nil + return EntryModeCommit + case "040000": + return EntryModeTree default: - return 0, fmt.Errorf("unparsable entry mode: %s", mode) + // git uses 040000 for tree object, but some users may get 040755 from non-standard git implementations + m, _ := strconv.ParseInt(mode, 8, 32) + modeInt := EntryMode(m) + switch modeInt & 0o770000 { + case 0o040000: + return EntryModeTree + case 0o160000: + return EntryModeCommit + case 0o120000: + return EntryModeSymlink + case 0o100000: + if modeInt&0o777 == 0o755 { + return EntryModeExec + } + return EntryModeBlob + default: + return EntryModeNoEntry + } } } diff --git a/modules/git/tree_entry_nogogit.go b/modules/git/tree_entry_nogogit.go index fd2f3c567f..0a19b38d3e 100644 --- a/modules/git/tree_entry_nogogit.go +++ b/modules/git/tree_entry_nogogit.go @@ -15,23 +15,19 @@ func (te *TreeEntry) Size() int64 { return te.size } - wr, rd, cancel, err := te.ptree.repo.CatFileBatchCheck(te.ptree.repo.Ctx) + batch, cancel, err := te.ptree.repo.CatFileBatch(te.ptree.repo.Ctx) if err != nil { log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err) return 0 } defer cancel() - _, err = wr.Write([]byte(te.ID.String() + "\n")) - if err != nil { - log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err) - return 0 - } - _, _, te.size, err = ReadBatchLine(rd) + info, err := batch.QueryInfo(te.ID.String()) if err != nil { log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err) return 0 } + te.size = info.Size te.sized = true return te.size } diff --git a/modules/git/tree_entry_test.go b/modules/git/tree_entry_test.go index b28abfb545..3df6eeab68 100644 --- a/modules/git/tree_entry_test.go +++ b/modules/git/tree_entry_test.go @@ -22,8 +22,36 @@ func TestEntriesCustomSort(t *testing.T) { &TreeEntry{name: "b-file", entryMode: EntryModeBlob}, } expected := slices.Clone(entries) - rand.Shuffle(len(entries), func(i, j int) { entries[i], entries[j] = entries[j], entries[i] }) - assert.NotEqual(t, expected, entries) + for slices.Equal(expected, entries) { + rand.Shuffle(len(entries), func(i, j int) { entries[i], entries[j] = entries[j], entries[i] }) + } entries.CustomSort(strings.Compare) assert.Equal(t, expected, entries) } + +func TestParseEntryMode(t *testing.T) { + tests := []struct { + modeStr string + expectMod EntryMode + }{ + {"000000", EntryModeNoEntry}, + {"000755", EntryModeNoEntry}, + + {"100644", EntryModeBlob}, + {"100755", EntryModeExec}, + + {"120000", EntryModeSymlink}, + {"120755", EntryModeSymlink}, + {"160000", EntryModeCommit}, + {"160755", EntryModeCommit}, + + {"040000", EntryModeTree}, + {"040755", EntryModeTree}, + + {"777777", EntryModeNoEntry}, // invalid mode + } + for _, test := range tests { + mod := ParseEntryMode(test.modeStr) + assert.Equal(t, test.expectMod, mod, "modeStr: %s", test.modeStr) + } +} diff --git a/modules/git/tree_nogogit.go b/modules/git/tree_nogogit.go index d0ddb1d041..d50c1ad629 100644 --- a/modules/git/tree_nogogit.go +++ b/modules/git/tree_nogogit.go @@ -27,30 +27,29 @@ func (t *Tree) ListEntries() (Entries, error) { } if t.repo != nil { - wr, rd, cancel, err := t.repo.CatFileBatch(t.repo.Ctx) + batch, cancel, err := t.repo.CatFileBatch(t.repo.Ctx) if err != nil { return nil, err } defer cancel() - _, _ = wr.Write([]byte(t.ID.String() + "\n")) - _, typ, sz, err := ReadBatchLine(rd) + info, rd, err := batch.QueryContent(t.ID.String()) if err != nil { return nil, err } - if typ == "commit" { - treeID, err := ReadTreeID(rd, sz) + + if info.Type == "commit" { + treeID, err := ReadTreeID(rd, info.Size) if err != nil && err != io.EOF { return nil, err } - _, _ = wr.Write([]byte(treeID + "\n")) - _, typ, sz, err = ReadBatchLine(rd) + info, rd, err = batch.QueryContent(treeID) if err != nil { return nil, err } } - if typ == "tree" { - t.entries, err = catBatchParseTreeEntries(t.ID.Type(), t, rd, sz) + if info.Type == "tree" { + t.entries, err = catBatchParseTreeEntries(t.ID.Type(), t, rd, info.Size) if err != nil { return nil, err } @@ -59,7 +58,7 @@ func (t *Tree) ListEntries() (Entries, error) { } // Not a tree just use ls-tree instead - if err := DiscardFull(rd, sz+1); err != nil { + if err := DiscardFull(rd, info.Size+1); err != nil { return nil, err } } diff --git a/modules/gitrepo/archive.go b/modules/gitrepo/archive.go index b78922e126..191a1bd2c0 100644 --- a/modules/gitrepo/archive.go +++ b/modules/gitrepo/archive.go @@ -8,7 +8,9 @@ import ( "fmt" "io" "os" + "path" "path/filepath" + "slices" "strings" "code.gitea.io/gitea/modules/git/gitcmd" @@ -16,7 +18,7 @@ import ( ) // CreateArchive create archive content to the target path -func CreateArchive(ctx context.Context, repo Repository, format string, target io.Writer, usePrefix bool, commitID string) error { +func CreateArchive(ctx context.Context, repo Repository, format string, target io.Writer, usePrefix bool, commitID string, paths []string) error { if format == "unknown" { return fmt.Errorf("unknown format: %v", format) } @@ -28,11 +30,13 @@ func CreateArchive(ctx context.Context, repo Repository, format string, target i cmd.AddOptionFormat("--format=%s", format) cmd.AddDynamicArguments(commitID) - var stderr strings.Builder - if err := RunCmd(ctx, repo, cmd.WithStdout(target).WithStderr(&stderr)); err != nil { - return gitcmd.ConcatenateError(err, stderr.String()) + paths = slices.Clone(paths) + for i := range paths { + // although "git archive" already ensures the paths won't go outside the repo, we still clean them here for safety + paths[i] = path.Clean(paths[i]) } - return nil + cmd.AddDynamicArguments(paths...) + return RunCmdWithStderr(ctx, repo, cmd.WithStdoutCopy(target)) } // CreateBundle create bundle content to the target path diff --git a/modules/gitrepo/blame.go b/modules/gitrepo/blame.go index bd64c748d4..2352da1760 100644 --- a/modules/gitrepo/blame.go +++ b/modules/gitrepo/blame.go @@ -8,20 +8,19 @@ import ( "bytes" "context" "io" - "os" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" - "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" ) func LineBlame(ctx context.Context, repo Repository, revision, file string, line uint) (string, error) { - return RunCmdString(ctx, repo, + stdout, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("blame"). AddOptionFormat("-L %d,%d", line, line). AddOptionValues("-p", revision). AddDashesAndList(file)) + return stdout, err } // BlamePart represents block of blame - continuous lines with one sha @@ -34,8 +33,6 @@ type BlamePart struct { // BlameReader returns part of file blame one by one type BlameReader struct { - output io.WriteCloser - reader io.ReadCloser bufferedReader *bufio.Reader done chan error lastSha *string @@ -131,34 +128,42 @@ func (r *BlameReader) Close() error { err := <-r.done r.bufferedReader = nil - _ = r.reader.Close() - _ = r.output.Close() - for _, cleanup := range r.cleanupFuncs { - if cleanup != nil { - cleanup() - } - } + r.cleanup() return err } +func (r *BlameReader) cleanup() { + for _, cleanup := range r.cleanupFuncs { + cleanup() + } +} + // CreateBlameReader creates reader for given repository, commit and file -func CreateBlameReader(ctx context.Context, objectFormat git.ObjectFormat, repo Repository, commit *git.Commit, file string, bypassBlameIgnore bool) (rd *BlameReader, err error) { - var ignoreRevsFileName string - var ignoreRevsFileCleanup func() +func CreateBlameReader(ctx context.Context, objectFormat git.ObjectFormat, repo Repository, commit *git.Commit, file string, bypassBlameIgnore bool) (rd *BlameReader, retErr error) { defer func() { - if err != nil && ignoreRevsFileCleanup != nil { - ignoreRevsFileCleanup() + if retErr != nil { + rd.cleanup() } }() + rd = &BlameReader{ + done: make(chan error, 1), + objectFormat: objectFormat, + } + cmd := gitcmd.NewCommand("blame", "--porcelain") + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + rd.bufferedReader = bufio.NewReader(stdoutReader) + rd.cleanupFuncs = append(rd.cleanupFuncs, stdoutReaderClose) + if git.DefaultFeatures().CheckVersionAtLeast("2.23") && !bypassBlameIgnore { - ignoreRevsFileName, ignoreRevsFileCleanup, err = tryCreateBlameIgnoreRevsFile(commit) + ignoreRevsFileName, ignoreRevsFileCleanup, err := tryCreateBlameIgnoreRevsFile(commit) if err != nil && !git.IsErrNotExist(err) { return nil, err - } - if ignoreRevsFileName != "" { + } else if err == nil { + rd.ignoreRevsFile = ignoreRevsFileName + rd.cleanupFuncs = append(rd.cleanupFuncs, ignoreRevsFileCleanup) // Possible improvement: use --ignore-revs-file /dev/stdin on unix // There is no equivalent on Windows. May be implemented if Gitea uses an external git backend. cmd.AddOptionValues("--ignore-revs-file", ignoreRevsFileName) @@ -167,35 +172,12 @@ func CreateBlameReader(ctx context.Context, objectFormat git.ObjectFormat, repo cmd.AddDynamicArguments(commit.ID.String()).AddDashesAndList(file) - done := make(chan error, 1) - reader, stdout, err := os.Pipe() - if err != nil { - return nil, err - } go func() { - stderr := bytes.Buffer{} // TODO: it doesn't work for directories (the directories shouldn't be "blamed"), and the "err" should be returned by "Read" but not by "Close" - err := RunCmd(ctx, repo, cmd.WithUseContextTimeout(true). - WithStdout(stdout). - WithStderr(&stderr), - ) - done <- err - _ = stdout.Close() - if err != nil { - log.Error("Error running git blame (dir: %v): %v, stderr: %v", repoPath, err, stderr.String()) - } + rd.done <- RunCmdWithStderr(ctx, repo, cmd) }() - bufferedReader := bufio.NewReader(reader) - return &BlameReader{ - output: stdout, - reader: reader, - bufferedReader: bufferedReader, - done: done, - ignoreRevsFile: ignoreRevsFileName, - objectFormat: objectFormat, - cleanupFuncs: []func(){ignoreRevsFileCleanup}, - }, nil + return rd, nil } func tryCreateBlameIgnoreRevsFile(commit *git.Commit) (string, func(), error) { diff --git a/modules/gitrepo/branch.go b/modules/gitrepo/branch.go index e05d75caf8..4c40d1fba3 100644 --- a/modules/gitrepo/branch.go +++ b/modules/gitrepo/branch.go @@ -36,14 +36,14 @@ func GetBranchCommitID(ctx context.Context, repo Repository, branch string) (str // SetDefaultBranch sets default branch of repository. func SetDefaultBranch(ctx context.Context, repo Repository, name string) error { - _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD"). + _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD"). AddDynamicArguments(git.BranchPrefix+name)) return err } // GetDefaultBranch gets default branch of repository. func GetDefaultBranch(ctx context.Context, repo Repository) (string, error) { - stdout, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD")) + stdout, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD")) if err != nil { return "", err } @@ -56,7 +56,7 @@ func GetDefaultBranch(ctx context.Context, repo Repository) (string, error) { // IsReferenceExist returns true if given reference exists in the repository. func IsReferenceExist(ctx context.Context, repo Repository, name string) bool { - _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "--verify").AddDashesAndList(name)) + _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "--verify").AddDashesAndList(name)) return err == nil } @@ -76,7 +76,7 @@ func DeleteBranch(ctx context.Context, repo Repository, name string, force bool) } cmd.AddDashesAndList(name) - _, err := RunCmdString(ctx, repo, cmd) + _, _, err := RunCmdString(ctx, repo, cmd) return err } @@ -85,12 +85,12 @@ func CreateBranch(ctx context.Context, repo Repository, branch, oldbranchOrCommi cmd := gitcmd.NewCommand("branch") cmd.AddDashesAndList(branch, oldbranchOrCommit) - _, err := RunCmdString(ctx, repo, cmd) + _, _, err := RunCmdString(ctx, repo, cmd) return err } // RenameBranch rename a branch func RenameBranch(ctx context.Context, repo Repository, from, to string) error { - _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("branch", "-m").AddDynamicArguments(from, to)) + _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("branch", "-m").AddDynamicArguments(from, to)) return err } diff --git a/modules/gitrepo/cat_file.go b/modules/gitrepo/cat_file.go index c6ac74756f..42ca23acde 100644 --- a/modules/gitrepo/cat_file.go +++ b/modules/gitrepo/cat_file.go @@ -9,6 +9,6 @@ import ( "code.gitea.io/gitea/modules/git" ) -func NewBatch(ctx context.Context, repo Repository) (*git.Batch, error) { +func NewBatch(ctx context.Context, repo Repository) (git.CatFileBatchCloser, error) { return git.NewBatch(ctx, repoPath(repo)) } diff --git a/modules/gitrepo/command.go b/modules/gitrepo/command.go index d4cb6093fc..fd21b9a725 100644 --- a/modules/gitrepo/command.go +++ b/modules/gitrepo/command.go @@ -13,11 +13,14 @@ func RunCmd(ctx context.Context, repo Repository, cmd *gitcmd.Command) error { return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().Run(ctx) } -func RunCmdString(ctx context.Context, repo Repository, cmd *gitcmd.Command) (string, error) { - res, _, err := cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunStdString(ctx) - return res, err +func RunCmdString(ctx context.Context, repo Repository, cmd *gitcmd.Command) (string, string, gitcmd.RunStdError) { + return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunStdString(ctx) } -func RunCmdBytes(ctx context.Context, repo Repository, cmd *gitcmd.Command) ([]byte, []byte, error) { +func RunCmdBytes(ctx context.Context, repo Repository, cmd *gitcmd.Command) ([]byte, []byte, gitcmd.RunStdError) { return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunStdBytes(ctx) } + +func RunCmdWithStderr(ctx context.Context, repo Repository, cmd *gitcmd.Command) gitcmd.RunStdError { + return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunWithStderr(ctx) +} diff --git a/modules/gitrepo/commit.go b/modules/gitrepo/commit.go index da0f3b85a2..0ab17862fe 100644 --- a/modules/gitrepo/commit.go +++ b/modules/gitrepo/commit.go @@ -88,7 +88,7 @@ func AllCommitsCount(ctx context.Context, repo Repository, hidePRRefs bool, file cmd.AddDashesAndList(files...) } - stdout, err := RunCmdString(ctx, repo, cmd) + stdout, _, err := RunCmdString(ctx, repo, cmd) if err != nil { return 0, err } @@ -102,7 +102,7 @@ func GetFullCommitID(ctx context.Context, repo Repository, shortID string) (stri // GetLatestCommitTime returns time for latest commit in repository (across all branches) func GetLatestCommitTime(ctx context.Context, repo Repository) (time.Time, error) { - stdout, err := RunCmdString(ctx, repo, + stdout, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("for-each-ref", "--sort=-committerdate", git.BranchPrefix, "--count", "1", "--format=%(committerdate)")) if err != nil { return time.Time{}, err diff --git a/modules/gitrepo/commit_file.go b/modules/gitrepo/commit_file.go index cd4bb340d0..437b3b51ad 100644 --- a/modules/gitrepo/commit_file.go +++ b/modules/gitrepo/commit_file.go @@ -5,7 +5,6 @@ package gitrepo import ( "bufio" - "bytes" "context" "io" @@ -68,24 +67,20 @@ func parseCommitFileStatus(fileStatus *CommitFileStatus, stdout io.Reader) { // GetCommitFileStatus returns file status of commit in given repository. func GetCommitFileStatus(ctx context.Context, repo Repository, commitID string) (*CommitFileStatus, error) { - stdout, w := io.Pipe() + cmd := gitcmd.NewCommand("log", "--name-status", "-m", "--pretty=format:", "--first-parent", "--no-renames", "-z", "-1") + stdout, stdoutClose := cmd.MakeStdoutPipe() + defer stdoutClose() done := make(chan struct{}) fileStatus := NewCommitFileStatus() go func() { parseCommitFileStatus(fileStatus, stdout) close(done) }() - - stderr := new(bytes.Buffer) - err := gitcmd.NewCommand("log", "--name-status", "-m", "--pretty=format:", "--first-parent", "--no-renames", "-z", "-1"). - AddDynamicArguments(commitID). + err := cmd.AddDynamicArguments(commitID). WithDir(repoPath(repo)). - WithStdout(w). - WithStderr(stderr). - Run(ctx) - w.Close() // Close writer to exit parsing goroutine + RunWithStderr(ctx) if err != nil { - return nil, gitcmd.ConcatenateError(err, stderr.String()) + return nil, err } <-done diff --git a/modules/gitrepo/compare.go b/modules/gitrepo/compare.go index b8e4c30d6c..06cf880d99 100644 --- a/modules/gitrepo/compare.go +++ b/modules/gitrepo/compare.go @@ -22,7 +22,7 @@ type DivergeObject struct { func GetDivergingCommits(ctx context.Context, repo Repository, baseBranch, targetBranch string) (*DivergeObject, error) { cmd := gitcmd.NewCommand("rev-list", "--count", "--left-right"). AddDynamicArguments(baseBranch + "..." + targetBranch).AddArguments("--") - stdout, err1 := RunCmdString(ctx, repo, cmd) + stdout, _, err1 := RunCmdString(ctx, repo, cmd) if err1 != nil { return nil, err1 } diff --git a/modules/gitrepo/config.go b/modules/gitrepo/config.go index bc1746fc3f..9be3ef94ae 100644 --- a/modules/gitrepo/config.go +++ b/modules/gitrepo/config.go @@ -12,7 +12,7 @@ import ( ) func GitConfigGet(ctx context.Context, repo Repository, key string) (string, error) { - result, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--get"). + result, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--get"). AddDynamicArguments(key)) if err != nil { return "", err @@ -27,7 +27,7 @@ func getRepoConfigLockKey(repoStoragePath string) string { // GitConfigAdd add a git configuration key to a specific value for the given repository. func GitConfigAdd(ctx context.Context, repo Repository, key, value string) error { return globallock.LockAndDo(ctx, getRepoConfigLockKey(repo.RelativePath()), func(ctx context.Context) error { - _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--add"). + _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--add"). AddDynamicArguments(key, value)) return err }) @@ -38,7 +38,7 @@ func GitConfigAdd(ctx context.Context, repo Repository, key, value string) error // If the key exists, it will be updated to the new value. func GitConfigSet(ctx context.Context, repo Repository, key, value string) error { return globallock.LockAndDo(ctx, getRepoConfigLockKey(repo.RelativePath()), func(ctx context.Context) error { - _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config"). + _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config"). AddDynamicArguments(key, value)) return err }) diff --git a/modules/gitrepo/diff.go b/modules/gitrepo/diff.go index ad7f24762f..0092cf0bb8 100644 --- a/modules/gitrepo/diff.go +++ b/modules/gitrepo/diff.go @@ -4,7 +4,6 @@ package gitrepo import ( - "bytes" "context" "fmt" "io" @@ -22,7 +21,7 @@ func GetDiffShortStatByCmdArgs(ctx context.Context, repo Repository, trustedArgs // we get: // " 9902 files changed, 2034198 insertions(+), 298800 deletions(-)\n" cmd := gitcmd.NewCommand("diff", "--shortstat").AddArguments(trustedArgs...).AddDynamicArguments(dynamicArgs...) - stdout, err := RunCmdString(ctx, repo, cmd) + stdout, _, err := RunCmdString(ctx, repo, cmd) if err != nil { return 0, 0, 0, err } @@ -65,12 +64,8 @@ func parseDiffStat(stdout string) (numFiles, totalAdditions, totalDeletions int, // GetReverseRawDiff dumps the reverse diff results of repository in given commit ID to io.Writer. func GetReverseRawDiff(ctx context.Context, repo Repository, commitID string, writer io.Writer) error { - stderr := new(bytes.Buffer) - if err := RunCmd(ctx, repo, gitcmd.NewCommand("show", "--pretty=format:revert %H%n", "-R"). + return RunCmdWithStderr(ctx, repo, gitcmd.NewCommand("show", "--pretty=format:revert %H%n", "-R"). AddDynamicArguments(commitID). - WithStdout(writer). - WithStderr(stderr)); err != nil { - return fmt.Errorf("GetReverseRawDiff: %w - %s", err, stderr) - } - return nil + WithStdoutCopy(writer), + ) } diff --git a/modules/gitrepo/fetch.go b/modules/gitrepo/fetch.go new file mode 100644 index 0000000000..0474d6111e --- /dev/null +++ b/modules/gitrepo/fetch.go @@ -0,0 +1,28 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitrepo + +import ( + "context" + + "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/globallock" +) + +// FetchRemoteCommit fetches a specific commit and its related objects from a remote +// repository into the managed repository. +// +// If no reference (branch, tag, or other ref) points to the fetched commit, it will +// be treated as unreachable and cleaned up by `git gc` after the default prune +// expiration period (2 weeks). Ref: https://www.kernel.org/pub/software/scm/git/docs/git-gc.html +// +// This behavior is sufficient for temporary operations, such as determining the +// merge base between commits. +func FetchRemoteCommit(ctx context.Context, repo, remoteRepo Repository, commitID string) error { + return globallock.LockAndDo(ctx, getRepoWriteLockKey(repo.RelativePath()), func(ctx context.Context) error { + return RunCmd(ctx, repo, gitcmd.NewCommand("fetch", "--no-tags"). + AddDynamicArguments(repoPath(remoteRepo)). + AddDynamicArguments(commitID)) + }) +} diff --git a/modules/gitrepo/gitrepo.go b/modules/gitrepo/gitrepo.go index 3a9b0a1c89..535d72ed98 100644 --- a/modules/gitrepo/gitrepo.go +++ b/modules/gitrepo/gitrepo.go @@ -80,7 +80,12 @@ func DeleteRepository(ctx context.Context, repo Repository) error { // RenameRepository renames a repository's name on disk func RenameRepository(ctx context.Context, repo, newRepo Repository) error { - if err := util.Rename(repoPath(repo), repoPath(newRepo)); err != nil { + dstDir := repoPath(newRepo) + if err := os.MkdirAll(filepath.Dir(dstDir), os.ModePerm); err != nil { + return fmt.Errorf("Failed to create dir %s: %w", filepath.Dir(dstDir), err) + } + + if err := util.Rename(repoPath(repo), dstDir); err != nil { return fmt.Errorf("rename repository directory: %w", err) } return nil @@ -116,5 +121,8 @@ func RemoveRepoFileOrDir(ctx context.Context, repo Repository, relativeFileOrDir func CreateRepoFile(ctx context.Context, repo Repository, relativeFilePath string) (io.WriteCloser, error) { absoluteFilePath := filepath.Join(repoPath(repo), relativeFilePath) + if err := os.MkdirAll(filepath.Dir(absoluteFilePath), os.ModePerm); err != nil { + return nil, err + } return os.Create(absoluteFilePath) } diff --git a/modules/gitrepo/main_test.go b/modules/gitrepo/main_test.go index 6e6636ce77..d41a1a6ad4 100644 --- a/modules/gitrepo/main_test.go +++ b/modules/gitrepo/main_test.go @@ -4,29 +4,21 @@ package gitrepo import ( - "os" "path/filepath" "testing" - "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/tempdir" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/test" ) func TestMain(m *testing.M) { - gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home") - if err != nil { - log.Fatal("Unable to create temp dir: %v", err) - } - defer cleanup() - // resolve repository path relative to the test directory testRootDir := test.SetupGiteaRoot() repoPath = func(repo Repository) string { - return filepath.Join(testRootDir, "/modules/git/tests/repos", repo.RelativePath()) + if filepath.IsAbs(repo.RelativePath()) { + return repo.RelativePath() // for testing purpose only + } + return filepath.Join(testRootDir, "modules/git/tests/repos", repo.RelativePath()) } - - setting.Git.HomePath = gitHomePath - os.Exit(m.Run()) + git.RunGitTests(m) } diff --git a/modules/gitrepo/merge.go b/modules/gitrepo/merge.go new file mode 100644 index 0000000000..8d58e21c8d --- /dev/null +++ b/modules/gitrepo/merge.go @@ -0,0 +1,22 @@ +// Copyright 2025 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitrepo + +import ( + "context" + "fmt" + "strings" + + "code.gitea.io/gitea/modules/git/gitcmd" +) + +// MergeBase checks and returns merge base of two commits. +func MergeBase(ctx context.Context, repo Repository, baseCommitID, headCommitID string) (string, error) { + mergeBase, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base"). + AddDashesAndList(baseCommitID, headCommitID)) + if err != nil { + return "", fmt.Errorf("get merge-base of %s and %s failed: %w", baseCommitID, headCommitID, err) + } + return strings.TrimSpace(mergeBase), nil +} diff --git a/modules/gitrepo/merge_tree.go b/modules/gitrepo/merge_tree.go new file mode 100644 index 0000000000..6151b1179f --- /dev/null +++ b/modules/gitrepo/merge_tree.go @@ -0,0 +1,59 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitrepo + +import ( + "bufio" + "context" + "fmt" + + "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/util" +) + +const MaxConflictedDetectFiles = 10 + +// MergeTree performs a merge between two commits (baseRef and headRef) with an optional merge base. +// It returns the resulting tree hash, a list of conflicted files (if any), and an error if the operation fails. +// If there are no conflicts, the list of conflicted files will be nil. +func MergeTree(ctx context.Context, repo Repository, baseRef, headRef, mergeBase string) (treeID string, isErrHasConflicts bool, conflictFiles []string, _ error) { + cmd := gitcmd.NewCommand("merge-tree", "--write-tree", "-z", "--name-only", "--no-messages"). + AddOptionFormat("--merge-base=%s", mergeBase). + AddDynamicArguments(baseRef, headRef) + + stdout, stdoutClose := cmd.MakeStdoutPipe() + defer stdoutClose() + cmd.WithPipelineFunc(func(ctx gitcmd.Context) error { + // https://git-scm.com/docs/git-merge-tree/2.38.0#OUTPUT + // For a conflicted merge, the output is: + // NUL + // NUL + // NUL + // ... + scanner := bufio.NewScanner(stdout) + scanner.Split(util.BufioScannerSplit(0)) + for scanner.Scan() { + line := scanner.Text() + if treeID == "" { // first line is tree ID + treeID = line + continue + } + conflictFiles = append(conflictFiles, line) + if len(conflictFiles) >= MaxConflictedDetectFiles { + break + } + } + return scanner.Err() + }) + + err := RunCmdWithStderr(ctx, repo, cmd) + // For a successful, non-conflicted merge, the exit status is 0. When the merge has conflicts, the exit status is 1. + // A merge can have conflicts without having individual files conflict + // https://git-scm.com/docs/git-merge-tree/2.38.0#_mistakes_to_avoid + isErrHasConflicts = gitcmd.IsErrorExitCode(err, 1) + if err == nil || isErrHasConflicts { + return treeID, isErrHasConflicts, conflictFiles, nil + } + return "", false, nil, fmt.Errorf("run merge-tree failed: %w", err) +} diff --git a/modules/gitrepo/merge_tree_test.go b/modules/gitrepo/merge_tree_test.go new file mode 100644 index 0000000000..9327a0c3d8 --- /dev/null +++ b/modules/gitrepo/merge_tree_test.go @@ -0,0 +1,82 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitrepo + +import ( + "path/filepath" + "testing" + + "code.gitea.io/gitea/modules/git/gitcmd" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func prepareRepoDirRenameConflict(t *testing.T) string { + repoDir := filepath.Join(t.TempDir(), "repo-dir-rename-conflict.git") + require.NoError(t, gitcmd.NewCommand("init", "--bare").AddDynamicArguments(repoDir).Run(t.Context())) + stdin := `blob +mark :1 +data 2 +b + +blob +mark :2 +data 2 +c + +reset refs/heads/master +commit refs/heads/master +mark :3 +author test 1769202331 -0800 +committer test 1769202331 -0800 +data 2 +O +M 100644 :1 z/b +M 100644 :2 z/c + +commit refs/heads/split +mark :4 +author test 1769202336 -0800 +committer test 1769202336 -0800 +data 2 +A +from :3 +M 100644 :2 w/c +M 100644 :1 y/b +D z/b +D z/c + +blob +mark :5 +data 2 +d + +commit refs/heads/add +mark :6 +author test 1769202342 -0800 +committer test 1769202342 -0800 +data 2 +B +from :3 +M 100644 :5 z/d +` + require.NoError(t, gitcmd.NewCommand("fast-import").WithDir(repoDir).WithStdinBytes([]byte(stdin)).Run(t.Context())) + return repoDir +} + +func TestMergeTreeDirectoryRenameConflictWithoutFiles(t *testing.T) { + repoDir := prepareRepoDirRenameConflict(t) + require.DirExists(t, repoDir) + repo := &mockRepository{path: repoDir} + + mergeBase, err := MergeBase(t.Context(), repo, "add", "split") + require.NoError(t, err) + + treeID, conflicted, conflictedFiles, err := MergeTree(t.Context(), repo, "add", "split", mergeBase) + require.NoError(t, err) + assert.True(t, conflicted) + assert.Empty(t, conflictedFiles) + assert.Equal(t, "5e3dd4cfc5b11e278a35b2daa83b7274175e3ab1", treeID) +} diff --git a/modules/gitrepo/remote.go b/modules/gitrepo/remote.go index ce43988461..3cbc34eedb 100644 --- a/modules/gitrepo/remote.go +++ b/modules/gitrepo/remote.go @@ -6,8 +6,6 @@ package gitrepo import ( "context" "errors" - "io" - "time" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" @@ -36,7 +34,7 @@ func GitRemoteAdd(ctx context.Context, repo Repository, remoteName, remoteURL st return errors.New("unknown remote option: " + string(options[0])) } } - _, err := RunCmdString(ctx, repo, cmd.AddDynamicArguments(remoteName, remoteURL)) + _, _, err := RunCmdString(ctx, repo, cmd.AddDynamicArguments(remoteName, remoteURL)) return err }) } @@ -44,7 +42,7 @@ func GitRemoteAdd(ctx context.Context, repo Repository, remoteName, remoteURL st func GitRemoteRemove(ctx context.Context, repo Repository, remoteName string) error { return globallock.LockAndDo(ctx, getRepoConfigLockKey(repo.RelativePath()), func(ctx context.Context) error { cmd := gitcmd.NewCommand("remote", "rm").AddDynamicArguments(remoteName) - _, err := RunCmdString(ctx, repo, cmd) + _, _, err := RunCmdString(ctx, repo, cmd) return err }) } @@ -60,21 +58,3 @@ func GitRemoteGetURL(ctx context.Context, repo Repository, remoteName string) (* } return giturl.ParseGitURL(addr) } - -// GitRemotePrune prunes the remote branches that no longer exist in the remote repository. -func GitRemotePrune(ctx context.Context, repo Repository, remoteName string, timeout time.Duration, stdout, stderr io.Writer) error { - return RunCmd(ctx, repo, gitcmd.NewCommand("remote", "prune"). - AddDynamicArguments(remoteName). - WithTimeout(timeout). - WithStdout(stdout). - WithStderr(stderr)) -} - -// GitRemoteUpdatePrune updates the remote branches and prunes the ones that no longer exist in the remote repository. -func GitRemoteUpdatePrune(ctx context.Context, repo Repository, remoteName string, timeout time.Duration, stdout, stderr io.Writer) error { - return RunCmd(ctx, repo, gitcmd.NewCommand("remote", "update", "--prune"). - AddDynamicArguments(remoteName). - WithTimeout(timeout). - WithStdout(stdout). - WithStderr(stderr)) -} diff --git a/modules/gitrepo/repo_lock.go b/modules/gitrepo/repo_lock.go new file mode 100644 index 0000000000..2eb89ce807 --- /dev/null +++ b/modules/gitrepo/repo_lock.go @@ -0,0 +1,10 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package gitrepo + +// getRepoWriteLockKey returns the global lock key for write operations on the repository. +// Parallel write operations on the same git repository should be avoided to prevent data corruption. +func getRepoWriteLockKey(repoStoragePath string) string { + return "repo-write:" + repoStoragePath +} diff --git a/modules/graceful/manager.go b/modules/graceful/manager.go index ee1872b999..51bd5a2334 100644 --- a/modules/graceful/manager.go +++ b/modules/graceful/manager.go @@ -74,12 +74,6 @@ func (g *Manager) RunWithCancel(rc RunCanceler) { g.RunAtShutdown(context.Background(), rc.Cancel) g.runningServerWaitGroup.Add(1) defer g.runningServerWaitGroup.Done() - defer func() { - if err := recover(); err != nil { - log.Critical("PANIC during RunWithCancel: %v\nStacktrace: %s", err, log.Stack(2)) - g.doShutdown() - } - }() rc.Run() } @@ -89,12 +83,6 @@ func (g *Manager) RunWithCancel(rc RunCanceler) { func (g *Manager) RunWithShutdownContext(run func(context.Context)) { g.runningServerWaitGroup.Add(1) defer g.runningServerWaitGroup.Done() - defer func() { - if err := recover(); err != nil { - log.Critical("PANIC during RunWithShutdownContext: %v\nStacktrace: %s", err, log.Stack(2)) - g.doShutdown() - } - }() ctx := g.ShutdownContext() pprof.SetGoroutineLabels(ctx) // We don't have a label to restore back to but I think this is fine run(ctx) diff --git a/modules/highlight/highlight.go b/modules/highlight/highlight.go index 2b13e9c4ce..68f523c6ca 100644 --- a/modules/highlight/highlight.go +++ b/modules/highlight/highlight.go @@ -12,7 +12,6 @@ import ( "html/template" "io" "path" - "path/filepath" "strings" "sync" @@ -25,35 +24,32 @@ import ( "github.com/alecthomas/chroma/v2/formatters/html" "github.com/alecthomas/chroma/v2/lexers" "github.com/alecthomas/chroma/v2/styles" - lru "github.com/hashicorp/golang-lru/v2" + "github.com/go-enry/go-enry/v2" ) // don't index files larger than this many bytes for performance purposes const sizeLimit = 1024 * 1024 +type globalVarsType struct { + highlightMapping map[string]string + githubStyles *chroma.Style +} + var ( - // For custom user mapping - highlightMapping = map[string]string{} - - once sync.Once - - cache *lru.TwoQueueCache[string, any] - - githubStyles = styles.Get("github") + globalVarsMu sync.Mutex + globalVarsPtr *globalVarsType ) -// NewContext loads custom highlight map from local config -func NewContext() { - once.Do(func() { - highlightMapping = setting.GetHighlightMapping() - - // The size 512 is simply a conservative rule of thumb - c, err := lru.New2Q[string, any](512) - if err != nil { - panic(fmt.Sprintf("failed to initialize LRU cache for highlighter: %s", err)) - } - cache = c - }) +func globalVars() *globalVarsType { + // in the future, the globalVars might need to be re-initialized when settings change, so don't use sync.Once here + globalVarsMu.Lock() + defer globalVarsMu.Unlock() + if globalVarsPtr == nil { + globalVarsPtr = &globalVarsType{} + globalVarsPtr.githubStyles = styles.Get("github") + globalVarsPtr.highlightMapping = setting.GetHighlightMapping() + } + return globalVarsPtr } // UnsafeSplitHighlightedLines splits highlighted code into lines preserving HTML tags @@ -88,10 +84,56 @@ func UnsafeSplitHighlightedLines(code template.HTML) (ret [][]byte) { } } -// Code returns an HTML version of code string with chroma syntax highlighting classes and the matched lexer name -func Code(fileName, language, code string) (output template.HTML, lexerName string) { - NewContext() +func getChromaLexerByLanguage(fileName, lang string) chroma.Lexer { + lang, _, _ = strings.Cut(lang, "?") // maybe, the value from gitattributes might contain `?` parameters? + ext := path.Ext(fileName) + // the "lang" might come from enry, it has different naming for some languages + switch lang { + case "F#": + lang = "FSharp" + case "Pascal": + lang = "ObjectPascal" + case "C": + if ext == ".C" || ext == ".H" { + lang = "C++" + } + } + // lexers.Get is slow if the language name can't be matched directly: it does extra "Match" call to iterate all lexers + return lexers.Get(lang) +} +// GetChromaLexerWithFallback returns a chroma lexer by given file name, language and code content. All parameters can be optional. +// When code content is provided, it will be slow if no lexer is found by file name or language. +// If no lexer is found, it will return the fallback lexer. +func GetChromaLexerWithFallback(fileName, lang string, code []byte) (lexer chroma.Lexer) { + if lang != "" { + lexer = getChromaLexerByLanguage(fileName, lang) + } + + if lexer == nil { + fileExt := path.Ext(fileName) + if val, ok := globalVars().highlightMapping[fileExt]; ok { + lexer = getChromaLexerByLanguage(fileName, val) // use mapped value to find lexer + } + } + + if lexer == nil { + // when using "code" to detect, analyze.GetCodeLanguage is slower, it iterates many rules to detect language from content + // this is the old logic: use enry to detect language, and use chroma to render, but their naming is different for some languages + enryLanguage := analyze.GetCodeLanguage(fileName, code) + lexer = getChromaLexerByLanguage(fileName, enryLanguage) + if lexer == nil { + if enryLanguage != enry.OtherLanguage { + log.Warn("No chroma lexer found for enry detected language: %s (file: %s), need to fix the language mapping between enry and chroma.", enryLanguage, fileName) + } + lexer = lexers.Match(fileName) // lexers.Match will search by its basename and extname + } + } + + return util.IfZero(lexer, lexers.Fallback) +} + +func renderCode(fileName, language, code string, slowGuess bool) (output template.HTML, lexerName string) { // diff view newline will be passed as empty, change to literal '\n' so it can be copied // preserve literal newline in blame view if code == "" || code == "\n" { @@ -102,45 +144,25 @@ func Code(fileName, language, code string) (output template.HTML, lexerName stri return template.HTML(template.HTMLEscapeString(code)), "" } - var lexer chroma.Lexer - - if len(language) > 0 { - lexer = lexers.Get(language) - - if lexer == nil { - // Attempt stripping off the '?' - if before, _, ok := strings.Cut(language, "?"); ok { - lexer = lexers.Get(before) - } - } + var codeForGuessLexer []byte + if slowGuess { + // it is slower to guess lexer by code content, so only do it when necessary + codeForGuessLexer = util.UnsafeStringToBytes(code) } - - if lexer == nil { - if val, ok := highlightMapping[path.Ext(fileName)]; ok { - // use mapped value to find lexer - lexer = lexers.Get(val) - } - } - - if lexer == nil { - if l, ok := cache.Get(fileName); ok { - lexer = l.(chroma.Lexer) - } - } - - if lexer == nil { - lexer = lexers.Match(fileName) - if lexer == nil { - lexer = lexers.Fallback - } - cache.Add(fileName, lexer) - } - - return CodeFromLexer(lexer, code), formatLexerName(lexer.Config().Name) + lexer := GetChromaLexerWithFallback(fileName, language, codeForGuessLexer) + return RenderCodeByLexer(lexer, code), formatLexerName(lexer.Config().Name) } -// CodeFromLexer returns a HTML version of code string with chroma syntax highlighting classes -func CodeFromLexer(lexer chroma.Lexer, code string) template.HTML { +func RenderCodeFast(fileName, language, code string) (output template.HTML, lexerName string) { + return renderCode(fileName, language, code, false) +} + +func RenderCodeSlowGuess(fileName, language, code string) (output template.HTML, lexerName string) { + return renderCode(fileName, language, code, true) +} + +// RenderCodeByLexer returns a HTML version of code string with chroma syntax highlighting classes +func RenderCodeByLexer(lexer chroma.Lexer, code string) template.HTML { formatter := html.New(html.WithClasses(true), html.WithLineNumbers(false), html.PreventSurroundingPre(true), @@ -155,7 +177,7 @@ func CodeFromLexer(lexer chroma.Lexer, code string) template.HTML { return template.HTML(template.HTMLEscapeString(code)) } // style not used for live site but need to pass something - err = formatter.Format(htmlw, githubStyles, iterator) + err = formatter.Format(htmlw, globalVars().githubStyles, iterator) if err != nil { log.Error("Can't format code: %v", err) return template.HTML(template.HTMLEscapeString(code)) @@ -167,12 +189,10 @@ func CodeFromLexer(lexer chroma.Lexer, code string) template.HTML { return template.HTML(strings.TrimSuffix(htmlbuf.String(), "\n")) } -// File returns a slice of chroma syntax highlighted HTML lines of code and the matched lexer name -func File(fileName, language string, code []byte) ([]template.HTML, string, error) { - NewContext() - +// RenderFullFile returns a slice of chroma syntax highlighted HTML lines of code and the matched lexer name +func RenderFullFile(fileName, language string, code []byte) ([]template.HTML, string, error) { if len(code) > sizeLimit { - return PlainText(code), "", nil + return RenderPlainText(code), "", nil } formatter := html.New(html.WithClasses(true), @@ -180,31 +200,7 @@ func File(fileName, language string, code []byte) ([]template.HTML, string, erro html.PreventSurroundingPre(true), ) - var lexer chroma.Lexer - - // provided language overrides everything - if language != "" { - lexer = lexers.Get(language) - } - - if lexer == nil { - if val, ok := highlightMapping[filepath.Ext(fileName)]; ok { - lexer = lexers.Get(val) - } - } - - if lexer == nil { - guessLanguage := analyze.GetCodeLanguage(fileName, code) - - lexer = lexers.Get(guessLanguage) - if lexer == nil { - lexer = lexers.Match(fileName) - if lexer == nil { - lexer = lexers.Fallback - } - } - } - + lexer := GetChromaLexerWithFallback(fileName, language, code) lexerName := formatLexerName(lexer.Config().Name) iterator, err := lexer.Tokenise(nil, string(code)) @@ -218,7 +214,7 @@ func File(fileName, language string, code []byte) ([]template.HTML, string, erro lines := make([]template.HTML, 0, len(tokensLines)) for _, tokens := range tokensLines { iterator = chroma.Literator(tokens...) - err = formatter.Format(htmlBuf, githubStyles, iterator) + err = formatter.Format(htmlBuf, globalVars().githubStyles, iterator) if err != nil { return nil, "", fmt.Errorf("can't format code: %w", err) } @@ -229,8 +225,8 @@ func File(fileName, language string, code []byte) ([]template.HTML, string, erro return lines, lexerName, nil } -// PlainText returns non-highlighted HTML for code -func PlainText(code []byte) []template.HTML { +// RenderPlainText returns non-highlighted HTML for code +func RenderPlainText(code []byte) []template.HTML { r := bufio.NewReader(bytes.NewReader(code)) m := make([]template.HTML, 0, bytes.Count(code, []byte{'\n'})+1) for { diff --git a/modules/highlight/highlight_test.go b/modules/highlight/highlight_test.go index 52873427a8..f4bdedb2a0 100644 --- a/modules/highlight/highlight_test.go +++ b/modules/highlight/highlight_test.go @@ -112,7 +112,7 @@ c=2 for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - out, lexerName, err := File(tt.name, "", []byte(tt.code)) + out, lexerName, err := RenderFullFile(tt.name, "", []byte(tt.code)) assert.NoError(t, err) assert.Equal(t, tt.want, out) assert.Equal(t, tt.lexerName, lexerName) @@ -176,7 +176,7 @@ c=2`), for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - out := PlainText([]byte(tt.code)) + out := RenderPlainText([]byte(tt.code)) assert.Equal(t, tt.want, out) }) } @@ -199,3 +199,36 @@ func TestUnsafeSplitHighlightedLines(t *testing.T) { assert.Equal(t, "a\n", string(ret[0])) assert.Equal(t, "b\n", string(ret[1])) } + +func TestGetChromaLexer(t *testing.T) { + globalVars().highlightMapping[".my-html"] = "HTML" + t.Cleanup(func() { delete(globalVars().highlightMapping, ".my-html") }) + + cases := []struct { + fileName string + language string + content string + expected string + }{ + {"test.py", "", "", "Python"}, + + {"any-file", "javascript", "", "JavaScript"}, + {"any-file", "", "/* vim: set filetype=python */", "Python"}, + {"any-file", "", "", "fallback"}, + + {"test.fs", "", "", "Forth"}, + {"test.fs", "F#", "", "FSharp"}, + {"test.fs", "", "let x = 1", "FSharp"}, + + {"test.c", "", "", "C"}, + {"test.C", "", "", "C++"}, + {"OLD-CODE.PAS", "", "", "ObjectPascal"}, + {"test.my-html", "", "", "HTML"}, + } + for _, c := range cases { + lexer := GetChromaLexerWithFallback(c.fileName, c.language, []byte(c.content)) + if assert.NotNil(t, lexer, "case: %+v", c) { + assert.Equal(t, c.expected, lexer.Config().Name, "case: %+v", c) + } + } +} diff --git a/modules/htmlutil/html.go b/modules/htmlutil/html.go index efbc174b2e..8dbfe0c22e 100644 --- a/modules/htmlutil/html.go +++ b/modules/htmlutil/html.go @@ -6,6 +6,7 @@ package htmlutil import ( "fmt" "html/template" + "io" "slices" "strings" ) @@ -31,7 +32,7 @@ func ParseSizeAndClass(defaultSize int, defaultClass string, others ...any) (int return size, class } -func HTMLFormat(s template.HTML, rawArgs ...any) template.HTML { +func htmlFormatArgs(s template.HTML, rawArgs []any) []any { if !strings.Contains(string(s), "%") || len(rawArgs) == 0 { panic("HTMLFormat requires one or more arguments") } @@ -50,5 +51,35 @@ func HTMLFormat(s template.HTML, rawArgs ...any) template.HTML { args[i] = template.HTMLEscapeString(fmt.Sprint(v)) } } - return template.HTML(fmt.Sprintf(string(s), args...)) + return args +} + +func HTMLFormat(s template.HTML, rawArgs ...any) template.HTML { + return template.HTML(fmt.Sprintf(string(s), htmlFormatArgs(s, rawArgs)...)) +} + +func HTMLPrintf(w io.Writer, s template.HTML, rawArgs ...any) (int, error) { + return fmt.Fprintf(w, string(s), htmlFormatArgs(s, rawArgs)...) +} + +func HTMLPrint(w io.Writer, s template.HTML) (int, error) { + return io.WriteString(w, string(s)) +} + +func HTMLPrintTag(w io.Writer, tag template.HTML, attrs map[string]string) (written int, err error) { + n, err := io.WriteString(w, "<"+string(tag)) + written += n + if err != nil { + return written, err + } + for k, v := range attrs { + n, err = fmt.Fprintf(w, ` %s="%s"`, template.HTMLEscapeString(k), template.HTMLEscapeString(v)) + written += n + if err != nil { + return written, err + } + } + n, err = io.WriteString(w, ">") + written += n + return written, err } diff --git a/modules/indexer/code/bleve/bleve.go b/modules/indexer/code/bleve/bleve.go index 5f6a7f6082..010ee39660 100644 --- a/modules/indexer/code/bleve/bleve.go +++ b/modules/indexer/code/bleve/bleve.go @@ -4,7 +4,6 @@ package bleve import ( - "bufio" "context" "fmt" "io" @@ -151,7 +150,7 @@ func NewIndexer(indexDir string) *Indexer { } } -func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserError, batchReader *bufio.Reader, commitSha string, +func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, commitSha string, update internal.FileUpdate, repo *repo_model.Repository, batch *inner_bleve.FlushingBatch, ) error { // Ignore vendored files in code search @@ -164,7 +163,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro var err error if !update.Sized { var stdout string - stdout, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha)) + stdout, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha)) if err != nil { return err } @@ -177,16 +176,11 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro return b.addDelete(update.Filename, repo, batch) } - if _, err := batchWriter.Write([]byte(update.BlobSha + "\n")); err != nil { - return err - } - - _, _, size, err = git.ReadBatchLine(batchReader) + info, batchReader, err := catFileBatch.QueryContent(update.BlobSha) if err != nil { return err } - - fileContents, err := io.ReadAll(io.LimitReader(batchReader, size)) + fileContents, err := io.ReadAll(io.LimitReader(batchReader, info.Size)) if err != nil { return err } else if !typesniffer.DetectContentType(fileContents).IsText() { @@ -218,18 +212,17 @@ func (b *Indexer) addDelete(filename string, repo *repo_model.Repository, batch func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *internal.RepoChanges) error { batch := inner_bleve.NewFlushingBatch(b.inner.Indexer, maxBatchSize) if len(changes.Updates) > 0 { - gitBatch, err := gitrepo.NewBatch(ctx, repo) + catfileBatch, err := gitrepo.NewBatch(ctx, repo) if err != nil { return err } - defer gitBatch.Close() + defer catfileBatch.Close() for _, update := range changes.Updates { - if err := b.addUpdate(ctx, gitBatch.Writer, gitBatch.Reader, sha, update, repo, batch); err != nil { + if err := b.addUpdate(ctx, catfileBatch, sha, update, repo, batch); err != nil { return err } } - gitBatch.Close() } for _, filename := range changes.RemovedFilenames { if err := b.addDelete(filename, repo, batch); err != nil { diff --git a/modules/indexer/code/elasticsearch/elasticsearch.go b/modules/indexer/code/elasticsearch/elasticsearch.go index a7027051d2..99f974b646 100644 --- a/modules/indexer/code/elasticsearch/elasticsearch.go +++ b/modules/indexer/code/elasticsearch/elasticsearch.go @@ -4,7 +4,6 @@ package elasticsearch import ( - "bufio" "context" "fmt" "io" @@ -139,7 +138,7 @@ const ( }` ) -func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserError, batchReader *bufio.Reader, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]elastic.BulkableRequest, error) { +func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]elastic.BulkableRequest, error) { // Ignore vendored files in code search if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) { return nil, nil @@ -149,7 +148,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro var err error if !update.Sized { var stdout string - stdout, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha)) + stdout, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha)) if err != nil { return nil, err } @@ -162,16 +161,12 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro return []elastic.BulkableRequest{b.addDelete(update.Filename, repo)}, nil } - if _, err := batchWriter.Write([]byte(update.BlobSha + "\n")); err != nil { - return nil, err - } - - _, _, size, err = git.ReadBatchLine(batchReader) + info, batchReader, err := catFileBatch.QueryContent(update.BlobSha) if err != nil { return nil, err } - fileContents, err := io.ReadAll(io.LimitReader(batchReader, size)) + fileContents, err := io.ReadAll(io.LimitReader(batchReader, info.Size)) if err != nil { return nil, err } else if !typesniffer.DetectContentType(fileContents).IsText() { @@ -217,7 +212,7 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st defer batch.Close() for _, update := range changes.Updates { - updateReqs, err := b.addUpdate(ctx, batch.Writer, batch.Reader, sha, update, repo) + updateReqs, err := b.addUpdate(ctx, batch, sha, update, repo) if err != nil { return err } @@ -225,7 +220,6 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st reqs = append(reqs, updateReqs...) } } - batch.Close() } for _, filename := range changes.RemovedFilenames { diff --git a/modules/indexer/code/git.go b/modules/indexer/code/git.go index ca9c6a2974..a17b10551d 100644 --- a/modules/indexer/code/git.go +++ b/modules/indexer/code/git.go @@ -18,7 +18,7 @@ import ( ) func getDefaultBranchSha(ctx context.Context, repo *repo_model.Repository) (string, error) { - stdout, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "-s").AddDynamicArguments(git.BranchPrefix+repo.DefaultBranch)) + stdout, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "-s").AddDynamicArguments(git.BranchPrefix+repo.DefaultBranch)) if err != nil { return "", err } @@ -35,7 +35,7 @@ func getRepoChanges(ctx context.Context, repo *repo_model.Repository, revision s needGenesis := len(status.CommitSha) == 0 if !needGenesis { hasAncestorCmd := gitcmd.NewCommand("merge-base").AddDynamicArguments(status.CommitSha, revision) - stdout, _ := gitrepo.RunCmdString(ctx, repo, hasAncestorCmd) + stdout, _, _ := gitrepo.RunCmdString(ctx, repo, hasAncestorCmd) // FIXME: error is not handled needGenesis = len(stdout) == 0 } @@ -101,7 +101,7 @@ func genesisChanges(ctx context.Context, repo *repo_model.Repository, revision s // nonGenesisChanges get changes since the previous indexer update func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revision string) (*internal.RepoChanges, error) { diffCmd := gitcmd.NewCommand("diff", "--name-status").AddDynamicArguments(repo.CodeIndexerStatus.CommitSha, revision) - stdout, runErr := gitrepo.RunCmdString(ctx, repo, diffCmd) + stdout, _, runErr := gitrepo.RunCmdString(ctx, repo, diffCmd) if runErr != nil { // previous commit sha may have been removed by a force push, so // try rebuilding from scratch diff --git a/modules/indexer/code/search.go b/modules/indexer/code/search.go index a7a5d7d2e3..907dd1a537 100644 --- a/modules/indexer/code/search.go +++ b/modules/indexer/code/search.go @@ -72,10 +72,10 @@ func writeStrings(buf *bytes.Buffer, strs ...string) error { func HighlightSearchResultCode(filename, language string, lineNums []int, code string) []*ResultLine { // we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting - hl, _ := highlight.Code(filename, language, code) + hl, _ := highlight.RenderCodeFast(filename, language, code) highlightedLines := strings.Split(string(hl), "\n") - // The lineNums outputted by highlight.Code might not match the original lineNums, because "highlight" removes the last `\n` + // The lineNums outputted by render might not match the original lineNums, because "highlight" removes the last `\n` lines := make([]*ResultLine, min(len(highlightedLines), len(lineNums))) for i := range lines { lines[i] = &ResultLine{ diff --git a/modules/lfs/pointer_scanner_gogit.go b/modules/lfs/pointer_scanner_gogit.go index e153b8e24e..ccfb16b6c0 100644 --- a/modules/lfs/pointer_scanner_gogit.go +++ b/modules/lfs/pointer_scanner_gogit.go @@ -15,7 +15,7 @@ import ( ) // SearchPointerBlobs scans the whole repository for LFS pointer files -func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob, errChan chan<- error) { +func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob) error { gitRepo := repo.GoGitRepo() err := func() error { @@ -49,14 +49,7 @@ func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan c return nil }) }() - if err != nil { - select { - case <-ctx.Done(): - default: - errChan <- err - } - } close(pointerChan) - close(errChan) + return err } diff --git a/modules/lfs/pointer_scanner_nogogit.go b/modules/lfs/pointer_scanner_nogogit.go index c37a93e73b..29f5d0e346 100644 --- a/modules/lfs/pointer_scanner_nogogit.go +++ b/modules/lfs/pointer_scanner_nogogit.go @@ -8,96 +8,84 @@ package lfs import ( "bufio" "context" + "errors" "io" "strconv" "strings" - "sync" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/git/pipeline" + "code.gitea.io/gitea/modules/util" + + "golang.org/x/sync/errgroup" ) // SearchPointerBlobs scans the whole repository for LFS pointer files -func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob, errChan chan<- error) { - basePath := repo.Path +func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob) error { + cmd1AllObjs, cmd3BatchContent := gitcmd.NewCommand(), gitcmd.NewCommand() - catFileCheckReader, catFileCheckWriter := io.Pipe() - shasToBatchReader, shasToBatchWriter := io.Pipe() - catFileBatchReader, catFileBatchWriter := io.Pipe() + cmd1AllObjsStdout, cmd1AllObjsStdoutClose := cmd1AllObjs.MakeStdoutPipe() + defer cmd1AllObjsStdoutClose() - wg := sync.WaitGroup{} - wg.Add(4) - - // Create the go-routines in reverse order. + cmd3BatchContentIn, cmd3BatchContentOut, cmd3BatchContentClose := cmd3BatchContent.MakeStdinStdoutPipe() + defer cmd3BatchContentClose() + // Create the go-routines in reverse order (update: the order is not needed any more, the pipes are properly prepared) + wg := errgroup.Group{} // 4. Take the output of cat-file --batch and check if each file in turn // to see if they're pointers to files in the LFS store - go createPointerResultsFromCatFileBatch(ctx, catFileBatchReader, &wg, pointerChan) + wg.Go(func() error { + return createPointerResultsFromCatFileBatch(cmd3BatchContentOut, pointerChan) + }) // 3. Take the shas of the blobs and batch read them - go pipeline.CatFileBatch(ctx, shasToBatchReader, catFileBatchWriter, &wg, basePath) + wg.Go(func() error { + return pipeline.CatFileBatch(ctx, cmd3BatchContent, repo.Path) + }) // 2. From the provided objects restrict to blobs <=1k - go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg) + wg.Go(func() error { + return pipeline.BlobsLessThan1024FromCatFileBatchCheck(cmd1AllObjsStdout, cmd3BatchContentIn) + }) // 1. Run batch-check on all objects in the repository - if !git.DefaultFeatures().CheckVersionAtLeast("2.6.0") { - revListReader, revListWriter := io.Pipe() - shasToCheckReader, shasToCheckWriter := io.Pipe() - wg.Add(2) - go pipeline.CatFileBatchCheck(ctx, shasToCheckReader, catFileCheckWriter, &wg, basePath) - go pipeline.BlobsFromRevListObjects(revListReader, shasToCheckWriter, &wg) - go pipeline.RevListAllObjects(ctx, revListWriter, &wg, basePath, errChan) - } else { - go pipeline.CatFileBatchCheckAllObjects(ctx, catFileCheckWriter, &wg, basePath, errChan) - } - wg.Wait() - + wg.Go(func() error { + return pipeline.CatFileBatchCheckAllObjects(ctx, cmd1AllObjs, repo.Path) + }) + err := wg.Wait() close(pointerChan) - close(errChan) + return err } -func createPointerResultsFromCatFileBatch(ctx context.Context, catFileBatchReader *io.PipeReader, wg *sync.WaitGroup, pointerChan chan<- PointerBlob) { - defer wg.Done() +func createPointerResultsFromCatFileBatch(catFileBatchReader io.ReadCloser, pointerChan chan<- PointerBlob) error { defer catFileBatchReader.Close() bufferedReader := bufio.NewReader(catFileBatchReader) buf := make([]byte, 1025) -loop: for { - select { - case <-ctx.Done(): - break loop - default: - } - // File descriptor line: sha sha, err := bufferedReader.ReadString(' ') if err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return util.Iif(errors.Is(err, io.EOF), nil, err) } sha = strings.TrimSpace(sha) // Throw away the blob if _, err := bufferedReader.ReadString(' '); err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } sizeStr, err := bufferedReader.ReadString('\n') if err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } size, err := strconv.Atoi(sizeStr[:len(sizeStr)-1]) if err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } pointerBuf := buf[:size+1] if _, err := io.ReadFull(bufferedReader, pointerBuf); err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } pointerBuf = pointerBuf[:size] // Now we need to check if the pointerBuf is an LFS pointer @@ -105,7 +93,6 @@ loop: if !pointer.IsValid() { continue } - pointerChan <- PointerBlob{Hash: sha, Pointer: pointer} } } diff --git a/modules/lfs/shared.go b/modules/lfs/shared.go index cd9488e3db..e04c089e51 100644 --- a/modules/lfs/shared.go +++ b/modules/lfs/shared.go @@ -66,6 +66,21 @@ type Link struct { ExpiresAt *time.Time `json:"expires_at,omitempty"` } +func NewLink(href string) *Link { + return &Link{Href: href} +} + +func (l *Link) WithHeader(k, v string) *Link { + if v == "" { + return l + } + if l.Header == nil { + l.Header = make(map[string]string) + } + l.Header[k] = v + return l +} + // ObjectError defines the JSON structure returned to the client in case of an error. type ObjectError struct { Code int `json:"code"` diff --git a/modules/log/init.go b/modules/log/init.go index 3fb5200ad7..ccaab50de3 100644 --- a/modules/log/init.go +++ b/modules/log/init.go @@ -35,10 +35,10 @@ func init() { } } -func newProcessTypedContext(parent context.Context, desc string) (ctx context.Context, cancel context.CancelFunc) { +func newProcessTypedContext(parent context.Context, desc string) (context.Context, context.CancelFunc) { // the "process manager" also calls "log.Trace()" to output logs, so if we want to create new contexts by the manager, we need to disable the trace temporarily process.TraceLogDisable(true) defer process.TraceLogDisable(false) - ctx, _, cancel = process.GetManager().AddTypedContext(parent, desc, process.SystemProcessType, false) - return ctx, cancel + ctx, _, finished := process.GetManager().AddTypedContext(parent, desc, process.SystemProcessType, false) + return ctx, context.CancelFunc(finished) } diff --git a/modules/markup/asciicast/asciicast.go b/modules/markup/asciicast/asciicast.go index d86d61d7c4..b3af5eef09 100644 --- a/modules/markup/asciicast/asciicast.go +++ b/modules/markup/asciicast/asciicast.go @@ -20,14 +20,12 @@ func init() { // See https://github.com/asciinema/asciinema/blob/develop/doc/asciicast-v2.md type Renderer struct{} -// Name implements markup.Renderer func (Renderer) Name() string { return "asciicast" } -// Extensions implements markup.Renderer -func (Renderer) Extensions() []string { - return []string{".cast"} +func (Renderer) FileNamePatterns() []string { + return []string{"*.cast"} } const ( @@ -35,12 +33,10 @@ const ( playerSrcAttr = "data-asciinema-player-src" ) -// SanitizerRules implements markup.Renderer func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule { return []setting.MarkupSanitizerRule{{Element: "div", AllowAttr: playerSrcAttr}} } -// Render implements markup.Renderer func (Renderer) Render(ctx *markup.RenderContext, _ io.Reader, output io.Writer) error { rawURL := fmt.Sprintf("%s/%s/%s/raw/%s/%s", setting.AppSubURL, diff --git a/modules/markup/common/footnote.go b/modules/markup/common/footnote.go index 1ece436c66..e552a28237 100644 --- a/modules/markup/common/footnote.go +++ b/modules/markup/common/footnote.go @@ -405,9 +405,9 @@ func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byt if entering { n := node.(*FootnoteLink) is := strconv.Itoa(n.Index) - _, _ = w.WriteString(``) // FIXME: here and below, need to keep the classes _, _ = w.WriteString(is) @@ -419,7 +419,7 @@ func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byt func (r *FootnoteHTMLRenderer) renderFootnoteBackLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { if entering { n := node.(*FootnoteBackLink) - _, _ = w.WriteString(` `) _, _ = w.WriteString("↩︎") @@ -431,7 +431,7 @@ func (r *FootnoteHTMLRenderer) renderFootnoteBackLink(w util.BufWriter, source [ func (r *FootnoteHTMLRenderer) renderFootnote(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { n := node.(*Footnote) if entering { - _, _ = w.WriteString(`
  • 0 { - markup.RegisterRenderer(&Renderer{renderer}) - } + markup.RegisterRenderer(&Renderer{renderer}) } } @@ -38,22 +37,18 @@ var ( _ markup.ExternalRenderer = (*Renderer)(nil) ) -// Name returns the external tool name func (p *Renderer) Name() string { return p.MarkupName } -// NeedPostProcess implements markup.Renderer func (p *Renderer) NeedPostProcess() bool { return p.MarkupRenderer.NeedPostProcess } -// Extensions returns the supported extensions of the tool -func (p *Renderer) Extensions() []string { - return p.FileExtensions +func (p *Renderer) FileNamePatterns() []string { + return p.FilePatterns } -// SanitizerRules implements markup.Renderer func (p *Renderer) SanitizerRules() []setting.MarkupSanitizerRule { return p.MarkupSanitizerRules } diff --git a/modules/markup/external/openapi.go b/modules/markup/external/openapi.go new file mode 100644 index 0000000000..ac5eae53ff --- /dev/null +++ b/modules/markup/external/openapi.go @@ -0,0 +1,79 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package external + +import ( + "fmt" + "html" + "io" + + "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" +) + +type openAPIRenderer struct{} + +var ( + _ markup.PostProcessRenderer = (*openAPIRenderer)(nil) + _ markup.ExternalRenderer = (*openAPIRenderer)(nil) +) + +func (p *openAPIRenderer) Name() string { + return "openapi" +} + +func (p *openAPIRenderer) NeedPostProcess() bool { + return false +} + +func (p *openAPIRenderer) FileNamePatterns() []string { + return []string{ + "openapi.yaml", + "openapi.yml", + "openapi.json", + "swagger.yaml", + "swagger.yml", + "swagger.json", + } +} + +func (p *openAPIRenderer) SanitizerRules() []setting.MarkupSanitizerRule { + return nil +} + +func (p *openAPIRenderer) GetExternalRendererOptions() (ret markup.ExternalRendererOptions) { + ret.SanitizerDisabled = true + ret.DisplayInIframe = true + ret.ContentSandbox = "" + return ret +} + +func (p *openAPIRenderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { + content, err := util.ReadWithLimit(input, int(setting.UI.MaxDisplayFileSize)) + if err != nil { + return err + } + // TODO: can extract this to a tmpl file later + _, err = io.WriteString(output, fmt.Sprintf( + ` + + + + + + +
    + + +`, + setting.StaticURLPrefix, + setting.AssetVersion, + html.EscapeString(ctx.RenderOptions.RelativePath), + html.EscapeString(util.UnsafeBytesToString(content)), + setting.StaticURLPrefix, + setting.AssetVersion, + )) + return err +} diff --git a/modules/markup/html.go b/modules/markup/html.go index 51afd4be00..1c2ae6918d 100644 --- a/modules/markup/html.go +++ b/modules/markup/html.go @@ -12,7 +12,9 @@ import ( "strings" "sync" + "code.gitea.io/gitea/modules/htmlutil" "code.gitea.io/gitea/modules/markup/common" + "code.gitea.io/gitea/modules/translation" "golang.org/x/net/html" "golang.org/x/net/html/atom" @@ -60,7 +62,7 @@ var globalVars = sync.OnceValue(func() *globalVarsType { v.shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`) // anyHashPattern splits url containing SHA into parts - v.anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`) + v.anyHashPattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{40,64})((\.\w+)*)(/[-+~%./\w]+)?(\?[-+~%.\w&=]+)?(#[-+~%.\w]+)?`) // comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash" v.comparePattern = regexp.MustCompile(`https?://(?:\S+/){4,5}([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(#[-+~_%.a-zA-Z0-9]+)?`) @@ -234,6 +236,49 @@ func postProcessString(ctx *RenderContext, procs []processor, content string) (s return buf.String(), nil } +func RenderTocHeadingItems(ctx *RenderContext, nodeDetailsAttrs map[string]string, out io.Writer) { + locale, ok := ctx.Value(translation.ContextKey).(translation.Locale) + if !ok { + locale = translation.NewLocale("") + } + _, _ = htmlutil.HTMLPrintTag(out, "details", nodeDetailsAttrs) + _, _ = htmlutil.HTMLPrintf(out, "%s\n", locale.TrString("toc")) + + baseLevel := 6 + for _, header := range ctx.TocHeadingItems { + if header.HeadingLevel < baseLevel { + baseLevel = header.HeadingLevel + } + } + + currentLevel := baseLevel + indent := []byte{' ', ' '} + _, _ = htmlutil.HTMLPrint(out, "
    \n") + currentLevel-- + } + _, _ = htmlutil.HTMLPrint(out, "\n\n") +} + func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error { if !ctx.usedByRender && ctx.RenderHelper != nil { defer ctx.RenderHelper.CleanUp() @@ -284,6 +329,9 @@ func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output } // Render everything to buf. + if ctx.TocShowInSection == TocShowInMain && len(ctx.TocHeadingItems) > 0 { + RenderTocHeadingItems(ctx, nil, output) + } for _, node := range newNodes { if err := html.Render(output, node); err != nil { return fmt.Errorf("markup.postProcess: html.Render: %w", err) @@ -314,7 +362,7 @@ func visitNode(ctx *RenderContext, procs []processor, node *html.Node) *html.Nod return node.NextSibling } - processNodeAttrID(node) + processNodeHeadingAndID(ctx, node) processFootnoteNode(ctx, node) // FIXME: the footnote processing should be done in the "footnote.go" renderer directly if isEmojiNode(node) { diff --git a/modules/markup/html_commit.go b/modules/markup/html_commit.go index fe7a034967..0a9b329589 100644 --- a/modules/markup/html_commit.go +++ b/modules/markup/html_commit.go @@ -16,12 +16,14 @@ import ( ) type anyHashPatternResult struct { - PosStart int - PosEnd int - FullURL string - CommitID string - SubPath string - QueryHash string + PosStart int + PosEnd int + FullURL string + CommitID string + CommitExt string + SubPath string + QueryParams string + QueryHash string } func createCodeLink(href, content, class string) *html.Node { @@ -56,7 +58,11 @@ func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) { return ret, false } - ret.PosStart, ret.PosEnd = m[0], m[1] + pos := 0 + + ret.PosStart, ret.PosEnd = m[pos], m[pos+1] + pos += 2 + ret.FullURL = s[ret.PosStart:ret.PosEnd] if strings.HasSuffix(ret.FullURL, ".") { // if url ends in '.', it's very likely that it is not part of the actual url but used to finish a sentence. @@ -67,14 +73,24 @@ func anyHashPatternExtract(s string) (ret anyHashPatternResult, ok bool) { } } - ret.CommitID = s[m[2]:m[3]] - if m[5] > 0 { - ret.SubPath = s[m[4]:m[5]] - } + ret.CommitID = s[m[pos]:m[pos+1]] + pos += 2 - lastStart, lastEnd := m[len(m)-2], m[len(m)-1] - if lastEnd > 0 { - ret.QueryHash = s[lastStart:lastEnd][1:] + ret.CommitExt = s[m[pos]:m[pos+1]] + pos += 4 + + if m[pos] > 0 { + ret.SubPath = s[m[pos]:m[pos+1]] + } + pos += 2 + + if m[pos] > 0 { + ret.QueryParams = s[m[pos]:m[pos+1]] + } + pos += 2 + + if m[pos] > 0 { + ret.QueryHash = s[m[pos]:m[pos+1]][1:] } return ret, true } @@ -96,6 +112,9 @@ func fullHashPatternProcessor(ctx *RenderContext, node *html.Node) { continue } text := base.ShortSha(ret.CommitID) + if ret.CommitExt != "" { + text += ret.CommitExt + } if ret.SubPath != "" { text += ret.SubPath } diff --git a/modules/markup/html_link.go b/modules/markup/html_link.go index 7523ebaed0..1702950da8 100644 --- a/modules/markup/html_link.go +++ b/modules/markup/html_link.go @@ -208,7 +208,6 @@ func createDescriptionLink(href, content string) *html.Node { Attr: []html.Attribute{ {Key: "href", Val: href}, {Key: "target", Val: "_blank"}, - {Key: "rel", Val: "noopener noreferrer"}, }, } textNode.Parent = linkNode diff --git a/modules/markup/html_node.go b/modules/markup/html_node.go index 4eb78fdd2b..f98e7429a2 100644 --- a/modules/markup/html_node.go +++ b/modules/markup/html_node.go @@ -6,13 +6,15 @@ package markup import ( "strings" + "code.gitea.io/gitea/modules/markup/common" + "golang.org/x/net/html" ) func isAnchorIDUserContent(s string) bool { // blackfridayExtRegex is for blackfriday extensions create IDs like fn:user-content-footnote // old logic: blackfridayExtRegex = regexp.MustCompile(`[^:]*:user-content-`) - return strings.HasPrefix(s, "user-content-") || strings.Contains(s, ":user-content-") + return strings.HasPrefix(s, "user-content-") || strings.Contains(s, ":user-content-") || isAnchorIDFootnote(s) } func isAnchorIDFootnote(s string) bool { @@ -23,16 +25,80 @@ func isAnchorHrefFootnote(s string) bool { return strings.HasPrefix(s, "#fnref:user-content-") || strings.HasPrefix(s, "#fn:user-content-") } -func processNodeAttrID(node *html.Node) { +// isHeadingTag returns true if the node is a heading tag (h1-h6) +func isHeadingTag(node *html.Node) bool { + return node.Type == html.ElementNode && + len(node.Data) == 2 && + node.Data[0] == 'h' && + node.Data[1] >= '1' && node.Data[1] <= '6' +} + +// getNodeText extracts the text content from a node and its children +func getNodeText(node *html.Node, cached **string) string { + if *cached != nil { + return **cached + } + var text strings.Builder + var extractText func(*html.Node) + extractText = func(n *html.Node) { + if n.Type == html.TextNode { + text.WriteString(n.Data) + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + extractText(c) + } + } + extractText(node) + textStr := text.String() + *cached = &textStr + return textStr +} + +func processNodeHeadingAndID(ctx *RenderContext, node *html.Node) { + // TODO: handle duplicate IDs, need to track existing IDs in the document // Add user-content- to IDs and "#" links if they don't already have them, // and convert the link href to a relative link to the host root + attrIDVal := "" for idx, attr := range node.Attr { if attr.Key == "id" { - if !isAnchorIDUserContent(attr.Val) { - node.Attr[idx].Val = "user-content-" + attr.Val + attrIDVal = attr.Val + if !isAnchorIDUserContent(attrIDVal) { + attrIDVal = "user-content-" + attrIDVal + node.Attr[idx].Val = attrIDVal } } } + + if !isHeadingTag(node) || !ctx.RenderOptions.EnableHeadingIDGeneration { + return + } + + // For heading tags (h1-h6) without an id attribute, generate one from the text content. + // This ensures HTML headings like

    Title

    get proper permalink anchors + // matching the behavior of Markdown headings. + // Only enabled for repository files and wiki pages via EnableHeadingIDGeneration option. + var nodeTextCached *string + if attrIDVal == "" { + nodeText := getNodeText(node, &nodeTextCached) + if nodeText != "" { + // Use the same CleanValue function used by Markdown heading ID generation + attrIDVal = string(common.CleanValue([]byte(nodeText))) + if attrIDVal != "" { + attrIDVal = "user-content-" + attrIDVal + node.Attr = append(node.Attr, html.Attribute{Key: "id", Val: attrIDVal}) + } + } + } + if ctx.TocShowInSection != "" { + nodeText := getNodeText(node, &nodeTextCached) + if nodeText != "" && attrIDVal != "" { + ctx.TocHeadingItems = append(ctx.TocHeadingItems, &TocHeadingItem{ + HeadingLevel: int(node.Data[1] - '0'), + AnchorID: attrIDVal, + InnerText: nodeText, + }) + } + } } func processFootnoteNode(ctx *RenderContext, node *html.Node) { diff --git a/modules/markup/html_node_test.go b/modules/markup/html_node_test.go new file mode 100644 index 0000000000..007e3c2a12 --- /dev/null +++ b/modules/markup/html_node_test.go @@ -0,0 +1,104 @@ +// Copyright 2024 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package markup + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestProcessNodeAttrID_HTMLHeadingWithoutID(t *testing.T) { + // Test that HTML headings without id get an auto-generated id from their text content + // when EnableHeadingIDGeneration is true (for repo files and wiki pages) + testCases := []struct { + name string + input string + expected string + }{ + { + name: "h1 without id", + input: `

    Heading without ID

    `, + expected: `

    Heading without ID

    `, + }, + { + name: "h2 without id", + input: `

    Another Heading

    `, + expected: `

    Another Heading

    `, + }, + { + name: "h3 without id", + input: `

    Third Level

    `, + expected: `

    Third Level

    `, + }, + { + name: "h1 with existing id should keep it", + input: `

    Heading with ID

    `, + expected: `

    Heading with ID

    `, + }, + { + name: "h1 with user-content prefix should not double prefix", + input: `

    Already Prefixed

    `, + expected: `

    Already Prefixed

    `, + }, + { + name: "heading with special characters", + input: `

    What is Wine Staging?

    `, + expected: `

    What is Wine Staging?

    `, + }, + { + name: "heading with nested elements", + input: `

    Bold and Italic

    `, + expected: `

    Bold and Italic

    `, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var result strings.Builder + ctx := NewTestRenderContext().WithEnableHeadingIDGeneration(true) + err := PostProcessDefault(ctx, strings.NewReader(tc.input), &result) + assert.NoError(t, err) + assert.Equal(t, tc.expected, strings.TrimSpace(result.String())) + }) + } +} + +func TestProcessNodeAttrID_SkipHeadingIDForComments(t *testing.T) { + // Test that HTML headings in comment-like contexts (issue comments) + // do NOT get auto-generated IDs to avoid duplicate IDs on pages with multiple documents. + // This is controlled by EnableHeadingIDGeneration which defaults to false. + testCases := []struct { + name string + input string + expected string + }{ + { + name: "h1 without id in comment context", + input: `

    Heading without ID

    `, + expected: `

    Heading without ID

    `, + }, + { + name: "h2 without id in comment context", + input: `

    Another Heading

    `, + expected: `

    Another Heading

    `, + }, + { + name: "h1 with existing id should still be prefixed", + input: `

    Heading with ID

    `, + expected: `

    Heading with ID

    `, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var result strings.Builder + // Default context without EnableHeadingIDGeneration (simulates comment rendering) + err := PostProcessDefault(NewTestRenderContext(), strings.NewReader(tc.input), &result) + assert.NoError(t, err) + assert.Equal(t, tc.expected, strings.TrimSpace(result.String())) + }) + } +} diff --git a/modules/markup/html_test.go b/modules/markup/html_test.go index 08b050baae..76013ccd13 100644 --- a/modules/markup/html_test.go +++ b/modules/markup/html_test.go @@ -102,6 +102,16 @@ func TestRender_CrossReferences(t *testing.T) { test( inputURL, `

    0123456789/foo.txt (L2-L3)

    `) + + inputURL = "https://example.com/repo/owner/archive/0123456789012345678901234567890123456789.tar.gz" + test( + inputURL, + `

    0123456789.tar.gz

    `) + + inputURL = "https://example.com/owner/repo/commit/0123456789012345678901234567890123456789.patch?key=val" + test( + inputURL, + `

    0123456789.patch

    `) } func TestRender_links(t *testing.T) { diff --git a/modules/markup/html_toc_test.go b/modules/markup/html_toc_test.go new file mode 100644 index 0000000000..e93cfc9346 --- /dev/null +++ b/modules/markup/html_toc_test.go @@ -0,0 +1,60 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package markup_test + +import ( + "regexp" + "testing" + + "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/markup/markdown" + "code.gitea.io/gitea/modules/test" + + "github.com/stretchr/testify/assert" +) + +func TestToCWithHTML(t *testing.T) { + defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableAdditionalAttributes, true)() + + t1 := `tag link and Bold` + t2 := "code block ``" + t3 := "markdown **bold**" + input := `--- +include_toc: true +--- + +# ` + t1 + ` +## ` + t2 + ` +#### ` + t3 + ` +## last +` + + renderCtx := markup.NewTestRenderContext().WithEnableHeadingIDGeneration(true) + resultHTML, err := markdown.RenderString(renderCtx, input) + assert.NoError(t, err) + result := string(resultHTML) + re := regexp.MustCompile(`(?s)
    .*?
    `) + result = re.ReplaceAllString(result, "\n") + expected := `
    toc + +
    + +

    tag link and Bold

    +

    code block <a>

    +

    markdown bold

    +

    last

    +` + assert.Equal(t, expected, result) +} diff --git a/modules/markup/main_test.go b/modules/markup/main_test.go index 564f55ac11..a8dcff475d 100644 --- a/modules/markup/main_test.go +++ b/modules/markup/main_test.go @@ -14,5 +14,7 @@ import ( func TestMain(m *testing.M) { setting.IsInTesting = true markup.RenderBehaviorForTesting.DisableAdditionalAttributes = true + setting.Markdown.FileNamePatterns = []string{"*.md"} + markup.RefreshFileNamePatterns() os.Exit(m.Run()) } diff --git a/modules/markup/markdown/goldmark.go b/modules/markup/markdown/goldmark.go index b28fa9824e..555a171685 100644 --- a/modules/markup/markdown/goldmark.go +++ b/modules/markup/markdown/goldmark.go @@ -41,11 +41,10 @@ func (g *ASTTransformer) applyElementDir(n ast.Node) { // Transform transforms the given AST tree. func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc parser.Context) { firstChild := node.FirstChild() - tocMode := "" ctx := pc.Get(renderContextKey).(*markup.RenderContext) rc := pc.Get(renderConfigKey).(*RenderConfig) - tocList := make([]Header, 0, 20) + tocMode := "" if rc.yamlNode != nil { metaNode := rc.toMetaNode(g) if metaNode != nil { @@ -60,8 +59,6 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa } switch v := n.(type) { - case *ast.Heading: - g.transformHeading(ctx, v, reader, &tocList) case *ast.Paragraph: g.applyElementDir(v) case *ast.List: @@ -79,19 +76,18 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa return ast.WalkContinue, nil }) - showTocInMain := tocMode == "true" /* old behavior, in main view */ || tocMode == "main" - showTocInSidebar := !showTocInMain && tocMode != "false" // not hidden, not main, then show it in sidebar - if len(tocList) > 0 && (showTocInMain || showTocInSidebar) { - if showTocInMain { - tocNode := createTOCNode(tocList, rc.Lang, nil) - node.InsertBefore(node, firstChild, tocNode) - } else { - tocNode := createTOCNode(tocList, rc.Lang, map[string]string{"open": "open"}) - ctx.SidebarTocNode = tocNode + if ctx.RenderOptions.EnableHeadingIDGeneration { + showTocInMain := tocMode == "true" /* old behavior, in main view */ || tocMode == "main" + showTocInSidebar := !showTocInMain && tocMode != "false" // not hidden, not main, then show it in sidebar + switch { + case showTocInMain: + ctx.TocShowInSection = markup.TocShowInMain + case showTocInSidebar: + ctx.TocShowInSection = markup.TocShowInSidebar } } - if len(rc.Lang) > 0 { + if rc.Lang != "" { node.SetAttributeString("lang", []byte(rc.Lang)) } } diff --git a/modules/markup/markdown/markdown.go b/modules/markup/markdown/markdown.go index 3b788432ba..cca44a8774 100644 --- a/modules/markup/markdown/markdown.go +++ b/modules/markup/markdown/markdown.go @@ -5,6 +5,7 @@ package markdown import ( + "bytes" "errors" "html/template" "io" @@ -21,10 +22,12 @@ import ( "github.com/yuin/goldmark" highlighting "github.com/yuin/goldmark-highlighting/v2" meta "github.com/yuin/goldmark-meta" + "github.com/yuin/goldmark/ast" "github.com/yuin/goldmark/extension" "github.com/yuin/goldmark/parser" "github.com/yuin/goldmark/renderer" "github.com/yuin/goldmark/renderer/html" + "github.com/yuin/goldmark/text" "github.com/yuin/goldmark/util" ) @@ -57,7 +60,7 @@ func (l *limitWriter) Write(data []byte) (int, error) { // newParserContext creates a parser.Context with the render context set func newParserContext(ctx *markup.RenderContext) parser.Context { - pc := parser.NewContext(parser.WithIDs(newPrefixedIDs())) + pc := parser.NewContext() pc.Set(renderContextKey, ctx) return pc } @@ -101,12 +104,48 @@ func (r *GlodmarkRender) highlightingRenderer(w util.BufWriter, c highlighting.C } } +type goldmarkEmphasisParser struct { + parser.InlineParser +} + +func goldmarkNewEmphasisParser() parser.InlineParser { + return &goldmarkEmphasisParser{parser.NewEmphasisParser()} +} + +func (s *goldmarkEmphasisParser) Parse(parent ast.Node, block text.Reader, pc parser.Context) ast.Node { + line, _ := block.PeekLine() + if len(line) > 1 && line[0] == '_' { + // a special trick to avoid parsing emphasis in filenames like "module/__init__.py" + end := bytes.IndexByte(line[1:], '_') + mark := bytes.Index(line, []byte("_.py")) + // check whether the "end" matches "_.py" or "__.py" + if mark != -1 && (end == mark || end == mark-1) { + return nil + } + } + return s.InlineParser.Parse(parent, block, pc) +} + +func goldmarkDefaultParser() parser.Parser { + return parser.NewParser(parser.WithBlockParsers(parser.DefaultBlockParsers()...), + parser.WithInlineParsers([]util.PrioritizedValue{ + util.Prioritized(parser.NewCodeSpanParser(), 100), + util.Prioritized(parser.NewLinkParser(), 200), + util.Prioritized(parser.NewAutoLinkParser(), 300), + util.Prioritized(parser.NewRawHTMLParser(), 400), + util.Prioritized(goldmarkNewEmphasisParser(), 500), + }...), + parser.WithParagraphTransformers(parser.DefaultParagraphTransformers()...), + ) +} + // SpecializedMarkdown sets up the Gitea specific markdown extensions func SpecializedMarkdown(ctx *markup.RenderContext) *GlodmarkRender { // TODO: it could use a pool to cache the renderers to reuse them with different contexts // at the moment it is fast enough (see the benchmarks) r := &GlodmarkRender{ctx: ctx} r.goldmarkMarkdown = goldmark.New( + goldmark.WithParser(goldmarkDefaultParser()), goldmark.WithExtensions( extension.NewTable(extension.WithTableCellAlignMethod(extension.TableCellAlignAttribute)), extension.Strikethrough, @@ -131,7 +170,6 @@ func SpecializedMarkdown(ctx *markup.RenderContext) *GlodmarkRender { ), goldmark.WithParserOptions( parser.WithAttribute(), - parser.WithAutoHeadingID(), parser.WithASTTransformers(util.Prioritized(NewASTTransformer(&ctx.RenderInternal), 10000)), ), goldmark.WithRendererOptions(html.WithUnsafe()), @@ -202,30 +240,24 @@ func init() { markup.RegisterRenderer(Renderer{}) } -// Renderer implements markup.Renderer type Renderer struct{} var _ markup.PostProcessRenderer = (*Renderer)(nil) -// Name implements markup.Renderer func (Renderer) Name() string { return MarkupName } -// NeedPostProcess implements markup.PostProcessRenderer func (Renderer) NeedPostProcess() bool { return true } -// Extensions implements markup.Renderer -func (Renderer) Extensions() []string { - return setting.Markdown.FileExtensions +func (Renderer) FileNamePatterns() []string { + return setting.Markdown.FileNamePatterns } -// SanitizerRules implements markup.Renderer func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule { return []setting.MarkupSanitizerRule{} } -// Render implements markup.Renderer func (Renderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { return render(ctx, input, output) } diff --git a/modules/markup/markdown/markdown_test.go b/modules/markup/markdown/markdown_test.go index 4eb01bcc2d..47b293e1e9 100644 --- a/modules/markup/markdown/markdown_test.go +++ b/modules/markup/markdown/markdown_test.go @@ -88,6 +88,7 @@ func TestRender_Images(t *testing.T) { } func TestTotal_RenderString(t *testing.T) { + setting.AppURL = AppURL defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableAdditionalAttributes, true)() // Test cases without ambiguous links (It is not right to copy a whole file here, instead it should clearly test what is being tested) @@ -258,7 +259,7 @@ This PR has been generated by [Renovate Bot](https://github.com/renovatebot/reno }, }) for i := range sameCases { - line, err := markdown.RenderString(markup.NewTestRenderContext(localMetas), sameCases[i]) + line, err := markdown.RenderString(markup.NewTestRenderContext(localMetas).WithEnableHeadingIDGeneration(true), sameCases[i]) assert.NoError(t, err) assert.Equal(t, testAnswers[i], string(line)) } @@ -545,5 +546,11 @@ func TestMarkdownLink(t *testing.T) { assert.Equal(t, `

    link1 link2 link3

    +`, string(result)) + + input = "https://example.com/__init__.py" + result, err = markdown.RenderString(markup.NewTestRenderContext("/base", localMetas), input) + assert.NoError(t, err) + assert.Equal(t, `

    https://example.com/__init__.py

    `, string(result)) } diff --git a/modules/markup/markdown/prefixed_id.go b/modules/markup/markdown/prefixed_id.go deleted file mode 100644 index 63d7fadc0a..0000000000 --- a/modules/markup/markdown/prefixed_id.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2024 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package markdown - -import ( - "bytes" - "fmt" - - "code.gitea.io/gitea/modules/container" - "code.gitea.io/gitea/modules/markup/common" - "code.gitea.io/gitea/modules/util" - - "github.com/yuin/goldmark/ast" -) - -type prefixedIDs struct { - values container.Set[string] -} - -// Generate generates a new element id. -func (p *prefixedIDs) Generate(value []byte, kind ast.NodeKind) []byte { - dft := []byte("id") - if kind == ast.KindHeading { - dft = []byte("heading") - } - return p.GenerateWithDefault(value, dft) -} - -// GenerateWithDefault generates a new element id. -func (p *prefixedIDs) GenerateWithDefault(value, dft []byte) []byte { - result := common.CleanValue(value) - if len(result) == 0 { - result = dft - } - if !bytes.HasPrefix(result, []byte("user-content-")) { - result = append([]byte("user-content-"), result...) - } - if p.values.Add(util.UnsafeBytesToString(result)) { - return result - } - for i := 1; ; i++ { - newResult := fmt.Sprintf("%s-%d", result, i) - if p.values.Add(newResult) { - return []byte(newResult) - } - } -} - -// Put puts a given element id to the used ids table. -func (p *prefixedIDs) Put(value []byte) { - p.values.Add(util.UnsafeBytesToString(value)) -} - -func newPrefixedIDs() *prefixedIDs { - return &prefixedIDs{ - values: make(container.Set[string]), - } -} diff --git a/modules/markup/markdown/toc.go b/modules/markup/markdown/toc.go deleted file mode 100644 index a11b9d0390..0000000000 --- a/modules/markup/markdown/toc.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package markdown - -import ( - "net/url" - - "code.gitea.io/gitea/modules/translation" - - "github.com/yuin/goldmark/ast" -) - -// Header holds the data about a header. -type Header struct { - Level int - Text string - ID string -} - -func createTOCNode(toc []Header, lang string, detailsAttrs map[string]string) ast.Node { - details := NewDetails() - summary := NewSummary() - - for k, v := range detailsAttrs { - details.SetAttributeString(k, []byte(v)) - } - - summary.AppendChild(summary, ast.NewString([]byte(translation.NewLocale(lang).TrString("toc")))) - details.AppendChild(details, summary) - ul := ast.NewList('-') - details.AppendChild(details, ul) - currentLevel := 6 - for _, header := range toc { - if header.Level < currentLevel { - currentLevel = header.Level - } - } - for _, header := range toc { - for currentLevel > header.Level { - ul = ul.Parent().(*ast.List) - currentLevel-- - } - for currentLevel < header.Level { - newL := ast.NewList('-') - ul.AppendChild(ul, newL) - currentLevel++ - ul = newL - } - li := ast.NewListItem(currentLevel * 2) - a := ast.NewLink() - a.Destination = []byte("#" + url.QueryEscape(header.ID)) - a.AppendChild(a, ast.NewString([]byte(header.Text))) - li.AppendChild(li, a) - ul.AppendChild(ul, li) - } - - return details -} diff --git a/modules/markup/markdown/transform_heading.go b/modules/markup/markdown/transform_heading.go deleted file mode 100644 index a229a7b1a4..0000000000 --- a/modules/markup/markdown/transform_heading.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2024 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package markdown - -import ( - "fmt" - - "code.gitea.io/gitea/modules/markup" - "code.gitea.io/gitea/modules/util" - - "github.com/yuin/goldmark/ast" - "github.com/yuin/goldmark/text" -) - -func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]Header) { - for _, attr := range v.Attributes() { - if _, ok := attr.Value.([]byte); !ok { - v.SetAttribute(attr.Name, fmt.Appendf(nil, "%v", attr.Value)) - } - } - txt := v.Text(reader.Source()) //nolint:staticcheck // Text is deprecated - header := Header{ - Text: util.UnsafeBytesToString(txt), - Level: v.Level, - } - if id, found := v.AttributeString("id"); found { - header.ID = util.UnsafeBytesToString(id.([]byte)) - } - *tocList = append(*tocList, header) - g.applyElementDir(v) -} diff --git a/modules/markup/mdstripper/mdstripper.go b/modules/markup/mdstripper/mdstripper.go index 19b852a3ee..bf69051e87 100644 --- a/modules/markup/mdstripper/mdstripper.go +++ b/modules/markup/mdstripper/mdstripper.go @@ -165,7 +165,6 @@ func StripMarkdownBytes(rawBytes []byte) ([]byte, []string) { ), goldmark.WithParserOptions( parser.WithAttribute(), - parser.WithAutoHeadingID(), ), goldmark.WithRendererOptions( html.WithUnsafe(), diff --git a/modules/markup/orgmode/orgmode.go b/modules/markup/orgmode/orgmode.go index 93c335d244..17d994734a 100644 --- a/modules/markup/orgmode/orgmode.go +++ b/modules/markup/orgmode/orgmode.go @@ -5,7 +5,6 @@ package orgmode import ( "fmt" - "html" "html/template" "io" "strings" @@ -17,7 +16,6 @@ import ( "code.gitea.io/gitea/modules/setting" "github.com/alecthomas/chroma/v2" - "github.com/alecthomas/chroma/v2/lexers" "github.com/niklasfasching/go-org/org" ) @@ -33,20 +31,16 @@ var ( _ markup.PostProcessRenderer = (*renderer)(nil) ) -// Name implements markup.Renderer func (renderer) Name() string { return "orgmode" } -// NeedPostProcess implements markup.PostProcessRenderer func (renderer) NeedPostProcess() bool { return true } -// Extensions implements markup.Renderer -func (renderer) Extensions() []string { - return []string{".org"} +func (renderer) FileNamePatterns() []string { + return []string{"*.org"} } -// SanitizerRules implements markup.Renderer func (renderer) SanitizerRules() []setting.MarkupSanitizerRule { return []setting.MarkupSanitizerRule{} } @@ -57,40 +51,20 @@ func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error htmlWriter.HighlightCodeBlock = func(source, lang string, inline bool, params map[string]string) string { defer func() { if err := recover(); err != nil { + // catch the panic, log the error and return empty result log.Error("Panic in HighlightCodeBlock: %v\n%s", err, log.Stack(2)) - panic(err) } }() - w := &strings.Builder{} - lexer := lexers.Get(lang) - if lexer == nil && lang == "" { - lexer = lexers.Analyse(source) - if lexer == nil { - lexer = lexers.Fallback - } - lang = strings.ToLower(lexer.Config().Name) - } + lexer := highlight.GetChromaLexerWithFallback("", lang, nil) // don't use content to detect, it is too slow + lexer = chroma.Coalesce(lexer) + sb := &strings.Builder{} // include language-x class as part of commonmark spec - if err := ctx.RenderInternal.FormatWithSafeAttrs(w, `
    `, lang); err != nil {
    -			return ""
    -		}
    -		if lexer == nil {
    -			if _, err := w.WriteString(html.EscapeString(source)); err != nil {
    -				return ""
    -			}
    -		} else {
    -			lexer = chroma.Coalesce(lexer)
    -			if _, err := w.WriteString(string(highlight.CodeFromLexer(lexer, source))); err != nil {
    -				return ""
    -			}
    -		}
    -		if _, err := w.WriteString("
    "); err != nil { - return "" - } - - return w.String() + _ = ctx.RenderInternal.FormatWithSafeAttrs(sb, `
    `, strings.ToLower(lexer.Config().Name))
    +		_, _ = sb.WriteString(string(highlight.RenderCodeByLexer(lexer, source)))
    +		_, _ = sb.WriteString("
    ") + return sb.String() } w := &orgWriter{rctx: ctx, HTMLWriter: htmlWriter} diff --git a/modules/markup/render.go b/modules/markup/render.go index c645749065..5785dc5ad5 100644 --- a/modules/markup/render.go +++ b/modules/markup/render.go @@ -4,6 +4,7 @@ package markup import ( + "bytes" "context" "fmt" "html/template" @@ -16,9 +17,9 @@ import ( "code.gitea.io/gitea/modules/htmlutil" "code.gitea.io/gitea/modules/markup/internal" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/util" - "github.com/yuin/goldmark/ast" "golang.org/x/sync/errgroup" ) @@ -54,6 +55,23 @@ type RenderOptions struct { // used by external render. the router "/org/repo/render/..." will output the rendered content in a standalone page InStandalonePage bool + + // EnableHeadingIDGeneration controls whether to auto-generate IDs for HTML headings without id attribute. + // This should be enabled for repository files and wiki pages, but disabled for comments to avoid duplicate IDs. + EnableHeadingIDGeneration bool +} + +type TocShowInSectionType string + +const ( + TocShowInSidebar TocShowInSectionType = "sidebar" + TocShowInMain TocShowInSectionType = "main" +) + +type TocHeadingItem struct { + HeadingLevel int + AnchorID string + InnerText string } // RenderContext represents a render context @@ -63,7 +81,8 @@ type RenderContext struct { // the context might be used by the "render" function, but it might also be used by "postProcess" function usedByRender bool - SidebarTocNode ast.Node + TocShowInSection TocShowInSectionType + TocHeadingItems []*TocHeadingItem RenderHelper RenderHelper RenderOptions RenderOptions @@ -112,6 +131,11 @@ func (ctx *RenderContext) WithInStandalonePage(v bool) *RenderContext { return ctx } +func (ctx *RenderContext) WithEnableHeadingIDGeneration(v bool) *RenderContext { + ctx.RenderOptions.EnableHeadingIDGeneration = v + return ctx +} + func (ctx *RenderContext) WithUseAbsoluteLink(v bool) *RenderContext { ctx.RenderOptions.UseAbsoluteLink = v return ctx @@ -122,22 +146,29 @@ func (ctx *RenderContext) WithHelper(helper RenderHelper) *RenderContext { return ctx } -// FindRendererByContext finds renderer by RenderContext -// TODO: it should be merged with other similar functions like GetRendererByFileName, DetectMarkupTypeByFileName, etc -func FindRendererByContext(ctx *RenderContext) (Renderer, error) { +func (ctx *RenderContext) DetectMarkupRenderer(prefetchBuf []byte) Renderer { if ctx.RenderOptions.MarkupType == "" && ctx.RenderOptions.RelativePath != "" { - ctx.RenderOptions.MarkupType = DetectMarkupTypeByFileName(ctx.RenderOptions.RelativePath) - if ctx.RenderOptions.MarkupType == "" { - return nil, util.NewInvalidArgumentErrorf("unsupported file to render: %q", ctx.RenderOptions.RelativePath) + var sniffedType typesniffer.SniffedType + if len(prefetchBuf) > 0 { + sniffedType = typesniffer.DetectContentType(prefetchBuf) } + ctx.RenderOptions.MarkupType = DetectRendererTypeByPrefetch(ctx.RenderOptions.RelativePath, sniffedType, prefetchBuf) } + return renderers[ctx.RenderOptions.MarkupType] +} - renderer := renderers[ctx.RenderOptions.MarkupType] +func (ctx *RenderContext) DetectMarkupRendererByReader(in io.Reader) (Renderer, io.Reader, error) { + prefetchBuf := make([]byte, 512) + n, err := util.ReadAtMost(in, prefetchBuf) + if err != nil && err != io.EOF { + return nil, nil, err + } + prefetchBuf = prefetchBuf[:n] + renderer := ctx.DetectMarkupRenderer(prefetchBuf) if renderer == nil { - return nil, util.NewNotExistErrorf("unsupported markup type: %q", ctx.RenderOptions.MarkupType) + return nil, nil, util.NewInvalidArgumentErrorf("unable to find a render") } - - return renderer, nil + return renderer, io.MultiReader(bytes.NewReader(prefetchBuf), in), nil } func RendererNeedPostProcess(renderer Renderer) bool { @@ -148,12 +179,12 @@ func RendererNeedPostProcess(renderer Renderer) bool { } // Render renders markup file to HTML with all specific handling stuff. -func Render(ctx *RenderContext, input io.Reader, output io.Writer) error { - renderer, err := FindRendererByContext(ctx) +func Render(rctx *RenderContext, origInput io.Reader, output io.Writer) error { + renderer, input, err := rctx.DetectMarkupRendererByReader(origInput) if err != nil { return err } - return RenderWithRenderer(ctx, renderer, input, output) + return RenderWithRenderer(rctx, renderer, input, output) } // RenderString renders Markup string to HTML with all specific handling stuff and return string @@ -265,12 +296,14 @@ func Init(renderHelpFuncs *RenderHelperFuncs) { } // since setting maybe changed extensions, this will reload all renderer extensions mapping - extRenderers = make(map[string]Renderer) + fileNameRenderers = make(map[string]Renderer) for _, renderer := range renderers { - for _, ext := range renderer.Extensions() { - extRenderers[strings.ToLower(ext)] = renderer + for _, pattern := range renderer.FileNamePatterns() { + fileNameRenderers[pattern] = renderer } } + + RefreshFileNamePatterns() } func ComposeSimpleDocumentMetas() map[string]string { diff --git a/modules/markup/renderer.go b/modules/markup/renderer.go index 82e9348706..c62c28ad2a 100644 --- a/modules/markup/renderer.go +++ b/modules/markup/renderer.go @@ -14,8 +14,8 @@ import ( // Renderer defines an interface for rendering markup file to HTML type Renderer interface { - Name() string // markup format name - Extensions() []string + Name() string // markup format name, also the renderer type, also the external tool name + FileNamePatterns() []string SanitizerRules() []setting.MarkupSanitizerRule Render(ctx *RenderContext, input io.Reader, output io.Writer) error } @@ -43,26 +43,52 @@ type RendererContentDetector interface { } var ( - extRenderers = make(map[string]Renderer) - renderers = make(map[string]Renderer) + fileNameRenderers = make(map[string]Renderer) + renderers = make(map[string]Renderer) ) // RegisterRenderer registers a new markup file renderer func RegisterRenderer(renderer Renderer) { + // TODO: need to handle conflicts renderers[renderer.Name()] = renderer - for _, ext := range renderer.Extensions() { - extRenderers[strings.ToLower(ext)] = renderer +} + +func RefreshFileNamePatterns() { + // TODO: need to handle conflicts + fileNameRenderers = make(map[string]Renderer) + for _, renderer := range renderers { + for _, ext := range renderer.FileNamePatterns() { + fileNameRenderers[strings.ToLower(ext)] = renderer + } } } -// GetRendererByFileName get renderer by filename -func GetRendererByFileName(filename string) Renderer { - extension := strings.ToLower(path.Ext(filename)) - return extRenderers[extension] +func DetectRendererTypeByFilename(filename string) Renderer { + basename := path.Base(strings.ToLower(filename)) + ext1 := path.Ext(basename) + if renderer := fileNameRenderers[basename]; renderer != nil { + return renderer + } + if renderer := fileNameRenderers["*"+ext1]; renderer != nil { + return renderer + } + if basename, ok := strings.CutSuffix(basename, ext1); ok { + ext2 := path.Ext(basename) + if renderer := fileNameRenderers["*"+ext2+ext1]; renderer != nil { + return renderer + } + } + return nil } -// DetectRendererType detects the markup type of the content -func DetectRendererType(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) string { +// DetectRendererTypeByPrefetch detects the markup type of the content +func DetectRendererTypeByPrefetch(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) string { + if filename != "" { + byExt := DetectRendererTypeByFilename(filename) + if byExt != nil { + return byExt.Name() + } + } for _, renderer := range renderers { if detector, ok := renderer.(RendererContentDetector); ok && detector.CanRender(filename, sniffedType, prefetchBuf) { return renderer.Name() @@ -71,18 +97,12 @@ func DetectRendererType(filename string, sniffedType typesniffer.SniffedType, pr return "" } -// DetectMarkupTypeByFileName returns the possible markup format type via the filename -func DetectMarkupTypeByFileName(filename string) string { - if parser := GetRendererByFileName(filename); parser != nil { - return parser.Name() - } - return "" -} - func PreviewableExtensions() []string { - extensions := make([]string, 0, len(extRenderers)) - for extension := range extRenderers { - extensions = append(extensions, extension) + exts := make([]string, 0, len(fileNameRenderers)) + for p := range fileNameRenderers { + if s, ok := strings.CutPrefix(p, "*"); ok { + exts = append(exts, s) + } } - return extensions + return exts } diff --git a/modules/markup/sanitizer_description_test.go b/modules/markup/sanitizer_description_test.go index ca72491f26..51833414f4 100644 --- a/modules/markup/sanitizer_description_test.go +++ b/modules/markup/sanitizer_description_test.go @@ -16,7 +16,7 @@ func TestDescriptionSanitizer(t *testing.T) { `THUMBS UP`, `THUMBS UP`, `Hello World`, `Hello World`, `
    `, ``, - `https://example.com`, `https://example.com`, + `https://example.com`, `https://example.com`, `data`, `data`, `Important!`, `Important!`, `
    Click me! Nothing to see here.
    `, `Click me! Nothing to see here.`, diff --git a/modules/migration/release.go b/modules/migration/release.go index f92cf25e7b..e25e7e4428 100644 --- a/modules/migration/release.go +++ b/modules/migration/release.go @@ -10,9 +10,12 @@ import ( // ReleaseAsset represents a release asset type ReleaseAsset struct { - ID int64 - Name string - ContentType *string `yaml:"content_type"` + ID int64 + Name string + + // There was a field "ContentType (content_type)" because Some forges can provide that for assets, + // but we don't need it when migrating, so the field is omitted here. + Size *int DownloadCount *int `yaml:"download_count"` Created time.Time diff --git a/modules/nosql/manager.go b/modules/nosql/manager.go index 375c2b5d00..9d3ab49c9a 100644 --- a/modules/nosql/manager.go +++ b/modules/nosql/manager.go @@ -20,7 +20,7 @@ var manager *Manager // Manager is the nosql connection manager type Manager struct { ctx context.Context - finished context.CancelFunc + finished process.FinishedFunc mutex sync.Mutex RedisConnections map[string]*redisClientHolder diff --git a/modules/packages/nuget/symbol_extractor.go b/modules/packages/nuget/symbol_extractor.go index 9c952e1f10..2eadee5463 100644 --- a/modules/packages/nuget/symbol_extractor.go +++ b/modules/packages/nuget/symbol_extractor.go @@ -142,8 +142,8 @@ func ParseDebugHeaderID(r io.ReadSeeker) (string, error) { if _, err := r.Read(b); err != nil { return "", err } - if i := bytes.IndexByte(b, 0); i != -1 { - buf.Write(b[:i]) + if before, _, ok := bytes.Cut(b, []byte{0}); ok { + buf.Write(before) return buf.String(), nil } buf.Write(b) diff --git a/modules/process/manager.go b/modules/process/manager.go index 661511ce8d..c51d6c76f9 100644 --- a/modules/process/manager.go +++ b/modules/process/manager.go @@ -13,6 +13,7 @@ import ( "time" "code.gitea.io/gitea/modules/gtprof" + "code.gitea.io/gitea/modules/util" ) // TODO: This packages still uses a singleton for the Manager. @@ -27,12 +28,14 @@ var ( DefaultContext = context.Background() ) -// IDType is a pid type -type IDType string +type ( + // IDType is a pid type + IDType string -// FinishedFunc is a function that marks that the process is finished and can be removed from the process table -// - it is simply an alias for context.CancelFunc and is only for documentary purposes -type FinishedFunc = context.CancelFunc + CancelCauseFunc func(cause ...error) + // FinishedFunc is a function that marks that the process is finished and can be removed from the process table + FinishedFunc func() +) var ( traceDisabled atomic.Int64 @@ -84,6 +87,10 @@ func GetManager() *Manager { return manager } +func cancelCauseFunc(cancelCause context.CancelCauseFunc) CancelCauseFunc { + return func(cause ...error) { cancelCause(util.OptionalArg(cause)) } +} + // AddContext creates a new context and adds it as a process. Once the process is finished, finished must be called // to remove the process from the process table. It should not be called until the process is finished but must always be called. // @@ -92,11 +99,10 @@ func GetManager() *Manager { // // Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the // process table. -func (pm *Manager) AddContext(parent context.Context, description string) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) { - ctx, cancel = context.WithCancel(parent) - - ctx, _, finished = pm.Add(ctx, description, cancel, NormalProcessType, true) - +func (pm *Manager) AddContext(parent context.Context, description string) (context.Context, CancelCauseFunc, FinishedFunc) { + ctx, ctxCancel := context.WithCancelCause(parent) + cancel := cancelCauseFunc(ctxCancel) + ctx, _, finished := pm.Add(ctx, description, cancel, NormalProcessType, true) return ctx, cancel, finished } @@ -108,11 +114,10 @@ func (pm *Manager) AddContext(parent context.Context, description string) (ctx c // // Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the // process table. -func (pm *Manager) AddTypedContext(parent context.Context, description, processType string, currentlyRunning bool) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) { - ctx, cancel = context.WithCancel(parent) - - ctx, _, finished = pm.Add(ctx, description, cancel, processType, currentlyRunning) - +func (pm *Manager) AddTypedContext(parent context.Context, description, processType string, currentlyRunning bool) (context.Context, CancelCauseFunc, FinishedFunc) { + ctx, ctxCancel := context.WithCancelCause(parent) + cancel := cancelCauseFunc(ctxCancel) + ctx, _, finished := pm.Add(ctx, description, cancel, processType, currentlyRunning) return ctx, cancel, finished } @@ -124,21 +129,23 @@ func (pm *Manager) AddTypedContext(parent context.Context, description, processT // // Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the // process table. -func (pm *Manager) AddContextTimeout(parent context.Context, timeout time.Duration, description string) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) { +func (pm *Manager) AddContextTimeout(parent context.Context, timeout time.Duration, description string) (context.Context, CancelCauseFunc, FinishedFunc) { if timeout <= 0 { // it's meaningless to use timeout <= 0, and it must be a bug! so we must panic here to tell developers to make the timeout correct panic("the timeout must be greater than zero, otherwise the context will be cancelled immediately") } - - ctx, cancel = context.WithTimeout(parent, timeout) - - ctx, _, finished = pm.Add(ctx, description, cancel, NormalProcessType, true) - + ctx, ctxCancelTimeout := context.WithTimeout(parent, timeout) + ctx, ctxCancelCause := context.WithCancelCause(ctx) + cancel := func(cause ...error) { + ctxCancelCause(util.OptionalArg(cause)) + ctxCancelTimeout() + } + ctx, _, finished := pm.Add(ctx, description, cancel, NormalProcessType, true) return ctx, cancel, finished } // Add create a new process -func (pm *Manager) Add(ctx context.Context, description string, cancel context.CancelFunc, processType string, currentlyRunning bool) (context.Context, IDType, FinishedFunc) { +func (pm *Manager) Add(ctx context.Context, description string, cancel CancelCauseFunc, processType string, currentlyRunning bool) (context.Context, IDType, FinishedFunc) { parentPID := GetParentPID(ctx) pm.mutex.Lock() diff --git a/modules/process/process.go b/modules/process/process.go index 06a28c4a60..d81f5ffa1d 100644 --- a/modules/process/process.go +++ b/modules/process/process.go @@ -4,7 +4,6 @@ package process import ( - "context" "time" ) @@ -21,7 +20,7 @@ type process struct { ParentPID IDType Description string Start time.Time - Cancel context.CancelFunc + Cancel CancelCauseFunc Type string } diff --git a/modules/public/public.go b/modules/public/public.go index a7eace1538..3a5a76637e 100644 --- a/modules/public/public.go +++ b/modules/public/public.go @@ -36,7 +36,7 @@ func FileHandlerFunc() http.HandlerFunc { resp.WriteHeader(http.StatusMethodNotAllowed) return } - handleRequest(resp, req, assetFS, req.URL.Path) + handleRequest(resp, req, http.FS(assetFS), req.URL.Path) } } diff --git a/modules/queue/workerqueue.go b/modules/queue/workerqueue.go index 0f5b105551..d8b0722caf 100644 --- a/modules/queue/workerqueue.go +++ b/modules/queue/workerqueue.go @@ -21,7 +21,7 @@ import ( // It can use different underlying (base) queue types type WorkerPoolQueue[T any] struct { ctxRun context.Context - ctxRunCancel context.CancelFunc + ctxRunCancel process.FinishedFunc shutdownDone chan struct{} shutdownTimeout atomic.Int64 // in case some buggy handlers (workers) would hang forever, "shutdown" should finish in predictable time diff --git a/modules/repository/repo.go b/modules/repository/repo.go index ad4a53b858..9013590247 100644 --- a/modules/repository/repo.go +++ b/modules/repository/repo.go @@ -62,7 +62,9 @@ func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *repo_model.Re pointerChan := make(chan lfs.PointerBlob) errChan := make(chan error, 1) - go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan) + go func() { + errChan <- lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan) + }() downloadObjects := func(pointers []lfs.Pointer) error { err := lfsClient.Download(ctx, pointers, func(p lfs.Pointer, content io.ReadCloser, objectError error) error { @@ -150,13 +152,12 @@ func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *repo_model.Re } } - err, has := <-errChan - if has { + err := <-errChan + if err != nil { log.Error("Repo[%-v]: Error enumerating LFS objects for repository: %v", repo, err) - return err } - return nil + return err } // shortRelease to reduce load memory, this struct can replace repo_model.Release @@ -233,7 +234,7 @@ func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitR return fmt.Errorf("unable to update tag %s for pull-mirror Repo[%d:%s/%s]: %w", tag.Name, repo.ID, repo.OwnerName, repo.Name, err) } } - added, deleted, updated = len(deletes), len(updates), len(inserts) + added, deleted, updated = len(inserts), len(deletes), len(updates) return nil }) if err != nil { diff --git a/modules/setting/git.go b/modules/setting/git.go index 318f2c0cac..29fd3daf8a 100644 --- a/modules/setting/git.go +++ b/modules/setting/git.go @@ -33,11 +33,8 @@ var Git = struct { DisablePartialClone bool DiffRenameSimilarityThreshold string Timeout struct { - Default int Migrate int Mirror int - Clone int - Pull int GC int `ini:"GC"` } `ini:"git.timeout"` }{ @@ -56,18 +53,12 @@ var Git = struct { DisablePartialClone: false, DiffRenameSimilarityThreshold: "50%", Timeout: struct { - Default int Migrate int Mirror int - Clone int - Pull int GC int `ini:"GC"` }{ - Default: 360, Migrate: 600, Mirror: 300, - Clone: 300, - Pull: 300, GC: 60, }, } diff --git a/modules/setting/markup.go b/modules/setting/markup.go index caf0d5f8d9..921af60ff5 100644 --- a/modules/setting/markup.go +++ b/modules/setting/markup.go @@ -6,6 +6,7 @@ package setting import ( "regexp" "strings" + "sync" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" @@ -43,22 +44,20 @@ var Markdown = struct { RenderOptionsRepoFile MarkdownRenderOptions `ini:"-"` CustomURLSchemes []string `ini:"CUSTOM_URL_SCHEMES"` // Actually it is a "markup" option because it is used in "post processor" - FileExtensions []string + FileNamePatterns []string `ini:"-"` EnableMath bool MathCodeBlockDetection []string MathCodeBlockOptions MarkdownMathCodeBlockOptions `ini:"-"` }{ - FileExtensions: strings.Split(".md,.markdown,.mdown,.mkd,.livemd", ","), - EnableMath: true, + EnableMath: true, } // MarkupRenderer defines the external parser configured in ini type MarkupRenderer struct { - Enabled bool MarkupName string Command string - FileExtensions []string + FilePatterns []string IsInputFile bool NeedPostProcess bool MarkupSanitizerRules []MarkupSanitizerRule @@ -77,6 +76,13 @@ type MarkupSanitizerRule struct { func loadMarkupFrom(rootCfg ConfigProvider) { mustMapSetting(rootCfg, "markdown", &Markdown) + + markdownFileExtensions := rootCfg.Section("markdown").Key("FILE_EXTENSIONS").Strings(",") + if len(markdownFileExtensions) == 0 || len(markdownFileExtensions) == 1 && markdownFileExtensions[0] == "" { + markdownFileExtensions = []string{".md", ".markdown", ".mdown", ".mkd", ".livemd"} + } + Markdown.FileNamePatterns = fileExtensionsToPatterns("markdown", markdownFileExtensions) + const none = "none" const renderOptionShortIssuePattern = "short-issue-pattern" @@ -215,21 +221,30 @@ func createMarkupSanitizerRule(name string, sec ConfigSection) (MarkupSanitizerR return rule, true } -func newMarkupRenderer(name string, sec ConfigSection) { - extensionReg := regexp.MustCompile(`\.\w`) +var extensionReg = sync.OnceValue(func() *regexp.Regexp { + return regexp.MustCompile(`^(\.[-\w]+)+$`) +}) - extensions := sec.Key("FILE_EXTENSIONS").Strings(",") - exts := make([]string, 0, len(extensions)) +func fileExtensionsToPatterns(sectionName string, extensions []string) []string { + patterns := make([]string, 0, len(extensions)) for _, extension := range extensions { - if !extensionReg.MatchString(extension) { - log.Warn(sec.Name() + " file extension " + extension + " is invalid. Extension ignored") + if !extensionReg().MatchString(extension) { + log.Warn("Config section %s file extension %s is invalid. Extension ignored", sectionName, extension) } else { - exts = append(exts, extension) + patterns = append(patterns, "*"+extension) } } + return patterns +} - if len(exts) == 0 { - log.Warn(sec.Name() + " file extension is empty, markup " + name + " ignored") +func newMarkupRenderer(name string, sec ConfigSection) { + if !sec.Key("ENABLED").MustBool(false) { + return + } + + fileNamePatterns := fileExtensionsToPatterns(name, sec.Key("FILE_EXTENSIONS").Strings(",")) + if len(fileNamePatterns) == 0 { + log.Warn("Config section %s file extension is empty, markup render is ignored", name) return } @@ -262,11 +277,10 @@ func newMarkupRenderer(name string, sec ConfigSection) { } ExternalMarkupRenderers = append(ExternalMarkupRenderers, &MarkupRenderer{ - Enabled: sec.Key("ENABLED").MustBool(false), - MarkupName: name, - FileExtensions: exts, - Command: command, - IsInputFile: sec.Key("IS_INPUT_FILE").MustBool(false), + MarkupName: name, + FilePatterns: fileNamePatterns, + Command: command, + IsInputFile: sec.Key("IS_INPUT_FILE").MustBool(false), RenderContentMode: renderContentMode, RenderContentSandbox: renderContentSandbox, diff --git a/modules/setting/storage.go b/modules/setting/storage.go index ee246158d9..9ee3f01633 100644 --- a/modules/setting/storage.go +++ b/modules/setting/storage.go @@ -172,11 +172,11 @@ func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection, targetType := targetSec.Key("STORAGE_TYPE").String() if targetType == "" { if !IsValidStorageType(StorageType(typ)) { - return nil, 0, fmt.Errorf("unknow storage type %q", typ) + return nil, 0, fmt.Errorf("unknown storage type %q", typ) } targetSec.Key("STORAGE_TYPE").SetValue(typ) } else if !IsValidStorageType(StorageType(targetType)) { - return nil, 0, fmt.Errorf("unknow storage type %q for section storage.%v", targetType, typ) + return nil, 0, fmt.Errorf("unknown storage type %q for section storage.%v", targetType, typ) } return targetSec, targetSecIsTyp, nil @@ -202,7 +202,7 @@ func getStorageTargetSection(rootCfg ConfigProvider, name, typ string, sec Confi } } - // check stoarge name thirdly + // check storage name thirdly targetSec, _ := rootCfg.GetSection(storageSectionName + "." + name) if targetSec != nil { targetType := targetSec.Key("STORAGE_TYPE").String() diff --git a/modules/structs/pull.go b/modules/structs/pull.go index 7cc58217a0..3ad2f78bd3 100644 --- a/modules/structs/pull.go +++ b/modules/structs/pull.go @@ -140,6 +140,8 @@ type CreatePullRequestOption struct { Reviewers []string `json:"reviewers"` // The list of team reviewer names TeamReviewers []string `json:"team_reviewers"` + // Whether maintainers can edit the pull request + AllowMaintainerEdit *bool `json:"allow_maintainer_edit"` } // EditPullRequestOption options when modify pull request diff --git a/modules/templates/base.go b/modules/templates/base.go index 2c2f35bbed..c8697cc7ef 100644 --- a/modules/templates/base.go +++ b/modules/templates/base.go @@ -4,9 +4,6 @@ package templates import ( - "slices" - "strings" - "code.gitea.io/gitea/modules/assetfs" "code.gitea.io/gitea/modules/setting" ) @@ -18,23 +15,3 @@ func AssetFS() *assetfs.LayeredFS { func CustomAssets() *assetfs.Layer { return assetfs.Local("custom", setting.CustomPath, "templates") } - -func ListWebTemplateAssetNames(assets *assetfs.LayeredFS) ([]string, error) { - files, err := assets.ListAllFiles(".", true) - if err != nil { - return nil, err - } - return slices.DeleteFunc(files, func(file string) bool { - return strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl") - }), nil -} - -func ListMailTemplateAssetNames(assets *assetfs.LayeredFS) ([]string, error) { - files, err := assets.ListAllFiles(".", true) - if err != nil { - return nil, err - } - return slices.DeleteFunc(files, func(file string) bool { - return !strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl") - }), nil -} diff --git a/modules/templates/helper.go b/modules/templates/helper.go index a7aa321811..11c52bd5a7 100644 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -25,8 +25,6 @@ import ( // NewFuncMap returns functions for injecting to templates func NewFuncMap() template.FuncMap { return map[string]any{ - "ctx": func() any { return nil }, // template context function - "DumpVar": dumpVar, "NIL": func() any { return nil }, diff --git a/modules/templates/helper_test.go b/modules/templates/helper_test.go index 7e3a952e7b..f90818c0ad 100644 --- a/modules/templates/helper_test.go +++ b/modules/templates/helper_test.go @@ -168,3 +168,10 @@ func TestQueryBuild(t *testing.T) { assert.Equal(t, "&a=b&c=d&e=f", string(QueryBuild("&a=b&c=d&e=f", "k", ""))) }) } + +func TestQueryEscape(t *testing.T) { + // this test is a reference for "urlQueryEscape" in JS + in := "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" // all non-letter & non-number chars + expected := "%21%22%23%24%25%26%27%28%29%2A%2B%2C-.%2F%3A%3B%3C%3D%3E%3F%40%5B%5C%5D%5E_%60%7B%7C%7D~" + assert.Equal(t, expected, string(queryEscape(in))) +} diff --git a/modules/templates/htmlrenderer.go b/modules/templates/htmlrenderer.go index 8073a6e5f5..59b95cdd80 100644 --- a/modules/templates/htmlrenderer.go +++ b/modules/templates/htmlrenderer.go @@ -6,21 +6,18 @@ package templates import ( "bufio" "bytes" - "context" "errors" "fmt" + "html/template" "io" - "net/http" "path/filepath" "regexp" "strconv" "strings" - "sync" "sync/atomic" texttemplate "text/template" "code.gitea.io/gitea/modules/assetfs" - "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates/scopedtmpl" @@ -31,58 +28,27 @@ type TemplateExecutor scopedtmpl.TemplateExecutor type TplName string -type HTMLRender struct { +type tmplRender struct { templates atomic.Pointer[scopedtmpl.ScopedTemplate] + + collectTemplateNames func() ([]string, error) + readTemplateContent func(name string) ([]byte, error) } -var ( - htmlRender *HTMLRender - htmlRenderOnce sync.Once -) - -var ErrTemplateNotInitialized = errors.New("template system is not initialized, check your log for errors") - -func (h *HTMLRender) HTML(w io.Writer, status int, tplName TplName, data any, ctx context.Context) error { //nolint:revive // we don't use ctx, only pass it to the template executor - name := string(tplName) - if respWriter, ok := w.(http.ResponseWriter); ok { - if respWriter.Header().Get("Content-Type") == "" { - respWriter.Header().Set("Content-Type", "text/html; charset=utf-8") - } - respWriter.WriteHeader(status) - } - t, err := h.TemplateLookup(name, ctx) - if err != nil { - return texttemplate.ExecError{Name: name, Err: err} - } - return t.Execute(w, data) +func (h *tmplRender) Templates() *scopedtmpl.ScopedTemplate { + return h.templates.Load() } -func (h *HTMLRender) TemplateLookup(name string, ctx context.Context) (TemplateExecutor, error) { //nolint:revive // we don't use ctx, only pass it to the template executor - tmpls := h.templates.Load() - if tmpls == nil { - return nil, ErrTemplateNotInitialized - } - m := NewFuncMap() - m["ctx"] = func() any { return ctx } - return tmpls.Executor(name, m) -} - -func (h *HTMLRender) CompileTemplates() error { - assets := AssetFS() - extSuffix := ".tmpl" +func (h *tmplRender) recompileTemplates(dummyFuncMap template.FuncMap) error { tmpls := scopedtmpl.NewScopedTemplate() - tmpls.Funcs(NewFuncMap()) - files, err := ListWebTemplateAssetNames(assets) + tmpls.Funcs(dummyFuncMap) + names, err := h.collectTemplateNames() if err != nil { - return nil + return err } - for _, file := range files { - if !strings.HasSuffix(file, extSuffix) { - continue - } - name := strings.TrimSuffix(file, extSuffix) + for _, name := range names { tmpl := tmpls.New(filepath.ToSlash(name)) - buf, err := assets.ReadFile(file) + buf, err := h.readTemplateContent(name) if err != nil { return err } @@ -95,55 +61,20 @@ func (h *HTMLRender) CompileTemplates() error { return nil } -// HTMLRenderer init once and returns the globally shared html renderer -func HTMLRenderer() *HTMLRender { - htmlRenderOnce.Do(initHTMLRenderer) - return htmlRender +func ReloadAllTemplates() error { + return errors.Join(PageRendererReload(), MailRendererReload()) } -func ReloadHTMLTemplates() error { - log.Trace("Reloading HTML templates") - if err := htmlRender.CompileTemplates(); err != nil { - log.Error("Template error: %v\n%s", err, log.Stack(2)) - return err - } - return nil -} - -func initHTMLRenderer() { - rendererType := "static" - if !setting.IsProd { - rendererType = "auto-reloading" - } - log.Debug("Creating %s HTML Renderer", rendererType) - - htmlRender = &HTMLRender{} - if err := htmlRender.CompileTemplates(); err != nil { - p := &templateErrorPrettier{assets: AssetFS()} - wrapTmplErrMsg(p.handleFuncNotDefinedError(err)) - wrapTmplErrMsg(p.handleUnexpectedOperandError(err)) - wrapTmplErrMsg(p.handleExpectedEndError(err)) - wrapTmplErrMsg(p.handleGenericTemplateError(err)) - wrapTmplErrMsg(fmt.Sprintf("CompileTemplates error: %v", err)) - } - - if !setting.IsProd { - go AssetFS().WatchLocalChanges(graceful.GetManager().ShutdownContext(), func() { - _ = ReloadHTMLTemplates() - }) - } -} - -func wrapTmplErrMsg(msg string) { - if msg == "" { +func processStartupTemplateError(err error) { + if err == nil { return } - if setting.IsProd { + if setting.IsProd || setting.IsInTesting { // in prod mode, Gitea must have correct templates to run - log.Fatal("Gitea can't run with template errors: %s", msg) + log.Fatal("Gitea can't run with template errors: %v", err) } // in dev mode, do not need to really exit, because the template errors could be fixed by developer soon and the templates get reloaded - log.Error("There are template errors but Gitea continues to run in dev mode: %s", msg) + log.Error("There are template errors but Gitea continues to run in dev mode: %v", err) } type templateErrorPrettier struct { diff --git a/modules/templates/mail.go b/modules/templates/mail.go new file mode 100644 index 0000000000..ca13626468 --- /dev/null +++ b/modules/templates/mail.go @@ -0,0 +1,195 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package templates + +import ( + "html/template" + "io" + "regexp" + "slices" + "strings" + "sync" + texttmpl "text/template" + + "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" +) + +type MailRender struct { + TemplateNames []string + BodyTemplates struct { + HasTemplate func(name string) bool + ExecuteTemplate func(w io.Writer, name string, data any) error + } + + // FIXME: MAIL-TEMPLATE-SUBJECT: only "issue" related messages support using subject from templates + // It is an incomplete implementation from "Use templates for issue e-mail subject and body" https://github.com/go-gitea/gitea/pull/8329 + SubjectTemplates *texttmpl.Template + + tmplRenderer *tmplRender + + mockedBodyTemplates map[string]*template.Template +} + +// mailSubjectTextFuncMap returns functions for injecting to text templates, it's only used for mail subject +func mailSubjectTextFuncMap() texttmpl.FuncMap { + return texttmpl.FuncMap{ + "dict": dict, + "Eval": evalTokens, + + "EllipsisString": util.EllipsisDisplayString, + "AppName": func() string { + return setting.AppName + }, + "AppDomain": func() string { // documented in mail-templates.md + return setting.Domain + }, + } +} + +var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}\s*$`) + +func newMailRenderer() (*MailRender, error) { + subjectTemplates := texttmpl.New("") + subjectTemplates.Funcs(mailSubjectTextFuncMap()) + + renderer := &MailRender{ + SubjectTemplates: subjectTemplates, + } + + assetFS := AssetFS() + + renderer.tmplRenderer = &tmplRender{ + collectTemplateNames: func() ([]string, error) { + names, err := assetFS.ListAllFiles(".", true) + if err != nil { + return nil, err + } + names = slices.DeleteFunc(names, func(file string) bool { + return !strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl") + }) + for i, name := range names { + names[i] = strings.TrimSuffix(strings.TrimPrefix(name, "mail/"), ".tmpl") + } + renderer.TemplateNames = names + return names, nil + }, + readTemplateContent: func(name string) ([]byte, error) { + content, err := assetFS.ReadFile("mail/" + name + ".tmpl") + if err != nil { + return nil, err + } + var subjectContent []byte + bodyContent := content + loc := mailSubjectSplit.FindIndex(content) + if loc != nil { + subjectContent, bodyContent = content[0:loc[0]], content[loc[1]:] + } + _, err = renderer.SubjectTemplates.New(name).Parse(string(subjectContent)) + if err != nil { + return nil, err + } + return bodyContent, nil + }, + } + + renderer.BodyTemplates.HasTemplate = func(name string) bool { + if renderer.mockedBodyTemplates[name] != nil { + return true + } + return renderer.tmplRenderer.Templates().HasTemplate(name) + } + + staticFuncMap := NewFuncMap() + renderer.BodyTemplates.ExecuteTemplate = func(w io.Writer, name string, data any) error { + if t, ok := renderer.mockedBodyTemplates[name]; ok { + return t.Execute(w, data) + } + t, err := renderer.tmplRenderer.Templates().Executor(name, staticFuncMap) + if err != nil { + return err + } + return t.Execute(w, data) + } + + err := renderer.tmplRenderer.recompileTemplates(staticFuncMap) + if err != nil { + return nil, err + } + return renderer, nil +} + +func (r *MailRender) MockTemplate(name, subject, body string) func() { + if r.mockedBodyTemplates == nil { + r.mockedBodyTemplates = make(map[string]*template.Template) + } + oldSubject := r.SubjectTemplates + r.SubjectTemplates, _ = r.SubjectTemplates.Clone() + texttmpl.Must(r.SubjectTemplates.New(name).Parse(subject)) + + oldBody, hasOldBody := r.mockedBodyTemplates[name] + mockFuncMap := NewFuncMap() + r.mockedBodyTemplates[name] = template.Must(template.New(name).Funcs(mockFuncMap).Parse(body)) + return func() { + r.SubjectTemplates = oldSubject + if hasOldBody { + r.mockedBodyTemplates[name] = oldBody + } else { + delete(r.mockedBodyTemplates, name) + } + } +} + +var ( + globalMailRenderer *MailRender + globalMailRendererMu sync.RWMutex +) + +func MailRendererReload() error { + globalMailRendererMu.Lock() + defer globalMailRendererMu.Unlock() + r, err := newMailRenderer() + if err != nil { + return err + } + globalMailRenderer = r + return nil +} + +func MailRenderer() *MailRender { + globalMailRendererMu.RLock() + r := globalMailRenderer + globalMailRendererMu.RUnlock() + if r != nil { + return r + } + + globalMailRendererMu.Lock() + defer globalMailRendererMu.Unlock() + if globalMailRenderer != nil { + return globalMailRenderer + } + + var err error + globalMailRenderer, err = newMailRenderer() + if err != nil { + log.Fatal("Failed to initialize mail renderer: %v", err) + } + + if !setting.IsProd { + go AssetFS().WatchLocalChanges(graceful.GetManager().ShutdownContext(), func() { + globalMailRendererMu.Lock() + defer globalMailRendererMu.Unlock() + r, err := newMailRenderer() + if err != nil { + log.Error("Mail template error: %v", err) + return + } + globalMailRenderer = r + }) + } + return globalMailRenderer +} diff --git a/modules/templates/mailer.go b/modules/templates/mailer.go deleted file mode 100644 index c43b760777..0000000000 --- a/modules/templates/mailer.go +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package templates - -import ( - "context" - "fmt" - "html/template" - "regexp" - "strings" - "sync/atomic" - texttmpl "text/template" - - "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" -) - -type MailTemplates struct { - TemplateNames []string - BodyTemplates *template.Template - SubjectTemplates *texttmpl.Template -} - -var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}\s*$`) - -// mailSubjectTextFuncMap returns functions for injecting to text templates, it's only used for mail subject -func mailSubjectTextFuncMap() texttmpl.FuncMap { - return texttmpl.FuncMap{ - "dict": dict, - "Eval": evalTokens, - - "EllipsisString": util.EllipsisDisplayString, - "AppName": func() string { - return setting.AppName - }, - "AppDomain": func() string { // documented in mail-templates.md - return setting.Domain - }, - } -} - -func buildSubjectBodyTemplate(stpl *texttmpl.Template, btpl *template.Template, name string, content []byte) error { - // Split template into subject and body - var subjectContent []byte - bodyContent := content - loc := mailSubjectSplit.FindIndex(content) - if loc != nil { - subjectContent = content[0:loc[0]] - bodyContent = content[loc[1]:] - } - if _, err := stpl.New(name).Parse(string(subjectContent)); err != nil { - return fmt.Errorf("failed to parse template [%s/subject]: %w", name, err) - } - if _, err := btpl.New(name).Parse(string(bodyContent)); err != nil { - return fmt.Errorf("failed to parse template [%s/body]: %w", name, err) - } - return nil -} - -// LoadMailTemplates provides the templates required for sending notification mails. -func LoadMailTemplates(ctx context.Context, loadedTemplates *atomic.Pointer[MailTemplates]) { - assetFS := AssetFS() - refreshTemplates := func(firstRun bool) { - var templateNames []string - subjectTemplates := texttmpl.New("") - bodyTemplates := template.New("") - - subjectTemplates.Funcs(mailSubjectTextFuncMap()) - bodyTemplates.Funcs(NewFuncMap()) - - if !firstRun { - log.Trace("Reloading mail templates") - } - assetPaths, err := ListMailTemplateAssetNames(assetFS) - if err != nil { - log.Error("Failed to list mail templates: %v", err) - return - } - - for _, assetPath := range assetPaths { - content, layerName, err := assetFS.ReadLayeredFile(assetPath) - if err != nil { - log.Warn("Failed to read mail template %s by %s: %v", assetPath, layerName, err) - continue - } - tmplName := strings.TrimPrefix(strings.TrimSuffix(assetPath, ".tmpl"), "mail/") - if firstRun { - log.Trace("Adding mail template %s: %s by %s", tmplName, assetPath, layerName) - } - templateNames = append(templateNames, tmplName) - if err = buildSubjectBodyTemplate(subjectTemplates, bodyTemplates, tmplName, content); err != nil { - if firstRun { - log.Fatal("Failed to parse mail template, err: %v", err) - } - log.Error("Failed to parse mail template, err: %v", err) - } - } - loaded := &MailTemplates{ - TemplateNames: templateNames, - BodyTemplates: bodyTemplates, - SubjectTemplates: subjectTemplates, - } - loadedTemplates.Store(loaded) - } - - refreshTemplates(true) - - if !setting.IsProd { - // Now subjectTemplates and bodyTemplates are both synchronized - // thus it is safe to call refresh from a different goroutine - go assetFS.WatchLocalChanges(ctx, func() { - refreshTemplates(false) - }) - } -} diff --git a/modules/templates/page.go b/modules/templates/page.go new file mode 100644 index 0000000000..8f6c82fc4b --- /dev/null +++ b/modules/templates/page.go @@ -0,0 +1,98 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package templates + +import ( + "context" + "html/template" + "io" + "net/http" + "slices" + "strings" + "sync" + texttemplate "text/template" + + "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" +) + +type pageRenderer struct { + tmplRenderer *tmplRender +} + +func (r *pageRenderer) funcMap(ctx context.Context) template.FuncMap { + pageFuncMap := NewFuncMap() + pageFuncMap["ctx"] = func() any { return ctx } + return pageFuncMap +} + +func (r *pageRenderer) funcMapDummy() template.FuncMap { + dummyFuncMap := NewFuncMap() + dummyFuncMap["ctx"] = func() any { return nil } // for template compilation only, no context available + return dummyFuncMap +} + +func (r *pageRenderer) TemplateLookup(tmpl string, templateCtx context.Context) (TemplateExecutor, error) { //nolint:revive // we don't use ctx, only pass it to the template executor + return r.tmplRenderer.Templates().Executor(tmpl, r.funcMap(templateCtx)) +} + +func (r *pageRenderer) HTML(w io.Writer, status int, tplName TplName, data any, templateCtx context.Context) error { //nolint:revive // we don't use ctx, only pass it to the template executor + name := string(tplName) + if respWriter, ok := w.(http.ResponseWriter); ok { + if respWriter.Header().Get("Content-Type") == "" { + respWriter.Header().Set("Content-Type", "text/html; charset=utf-8") + } + respWriter.WriteHeader(status) + } + t, err := r.TemplateLookup(name, templateCtx) + if err != nil { + return texttemplate.ExecError{Name: name, Err: err} + } + return t.Execute(w, data) +} + +var PageRenderer = sync.OnceValue(func() *pageRenderer { + rendererType := util.Iif(setting.IsProd, "static", "auto-reloading") + log.Debug("Creating %s HTML Renderer", rendererType) + + assetFS := AssetFS() + tr := &tmplRender{ + collectTemplateNames: func() ([]string, error) { + names, err := assetFS.ListAllFiles(".", true) + if err != nil { + return nil, err + } + names = slices.DeleteFunc(names, func(file string) bool { + return strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl") + }) + for i, file := range names { + names[i] = strings.TrimSuffix(file, ".tmpl") + } + return names, nil + }, + readTemplateContent: func(name string) ([]byte, error) { + return assetFS.ReadFile(name + ".tmpl") + }, + } + + pr := &pageRenderer{tmplRenderer: tr} + if err := tr.recompileTemplates(pr.funcMapDummy()); err != nil { + processStartupTemplateError(err) + } + + if !setting.IsProd { + go AssetFS().WatchLocalChanges(graceful.GetManager().ShutdownContext(), func() { + if err := tr.recompileTemplates(pr.funcMapDummy()); err != nil { + log.Error("Template error: %v\n%s", err, log.Stack(2)) + } + }) + } + return pr +}) + +func PageRendererReload() error { + return PageRenderer().tmplRenderer.recompileTemplates(PageRenderer().funcMapDummy()) +} diff --git a/modules/templates/scopedtmpl/scopedtmpl.go b/modules/templates/scopedtmpl/scopedtmpl.go index 34e8b9ad70..de066124b9 100644 --- a/modules/templates/scopedtmpl/scopedtmpl.go +++ b/modules/templates/scopedtmpl/scopedtmpl.go @@ -61,6 +61,10 @@ func (t *ScopedTemplate) Freeze() { t.all.Funcs(m) } +func (t *ScopedTemplate) HasTemplate(name string) bool { + return t.all.Lookup(name) != nil +} + func (t *ScopedTemplate) Executor(name string, funcMap template.FuncMap) (TemplateExecutor, error) { t.scopedMu.RLock() scopedTmplSet, ok := t.scopedTemplateSets[name] diff --git a/modules/templates/vars/vars.go b/modules/templates/vars/vars.go index 500078d4b8..60d11ea609 100644 --- a/modules/templates/vars/vars.go +++ b/modules/templates/vars/vars.go @@ -10,25 +10,6 @@ import ( "unicode/utf8" ) -// ErrWrongSyntax represents a wrong syntax with a template -type ErrWrongSyntax struct { - Template string -} - -func (err ErrWrongSyntax) Error() string { - return "wrong syntax found in " + err.Template -} - -// ErrVarMissing represents an error that no matched variable -type ErrVarMissing struct { - Template string - Var string -} - -func (err ErrVarMissing) Error() string { - return fmt.Sprintf("the variable %s is missing for %s", err.Var, err.Template) -} - // Expand replaces all variables like {var} by `vars` map, it always returns the expanded string regardless of errors // if error occurs, the error part doesn't change and is returned as it is. func Expand(template string, vars map[string]string) (string, error) { @@ -66,14 +47,14 @@ func Expand(template string, vars map[string]string) (string, error) { posBegin = posEnd if part == "{}" || part[len(part)-1] != '}' { // treat "{}" or "{..." as error - err = ErrWrongSyntax{Template: template} + err = fmt.Errorf("wrong syntax found in %s", template) buf.WriteString(part) } else { // now we get a valid key "{...}" key := part[1 : len(part)-1] keyFirst, _ := utf8.DecodeRuneInString(key) if unicode.IsSpace(keyFirst) || unicode.IsPunct(keyFirst) || unicode.IsControl(keyFirst) { - // the if key doesn't start with a letter, then we do not treat it as a var now + // if the key doesn't start with a letter, then we do not treat it as a var now buf.WriteString(part) } else { // look up in the map @@ -82,7 +63,7 @@ func Expand(template string, vars map[string]string) (string, error) { } else { // write the non-existing var as it is buf.WriteString(part) - err = ErrVarMissing{Template: template, Var: key} + err = fmt.Errorf("the variable %s is missing for %s", key, template) } } } diff --git a/modules/test/utils.go b/modules/test/utils.go index 53c6a3ed52..34c11ff6b2 100644 --- a/modules/test/utils.go +++ b/modules/test/utils.go @@ -4,6 +4,9 @@ package test import ( + "archive/tar" + "compress/gzip" + "io" "net/http" "net/http/httptest" "os" @@ -71,3 +74,31 @@ func SetupGiteaRoot() string { _ = os.Setenv("GITEA_ROOT", giteaRoot) return giteaRoot } + +func ReadAllTarGzContent(r io.Reader) (map[string]string, error) { + gzr, err := gzip.NewReader(r) + if err != nil { + return nil, err + } + + content := make(map[string]string) + + tr := tar.NewReader(gzr) + for { + hd, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + buf, err := io.ReadAll(tr) + if err != nil { + return nil, err + } + + content[hd.Name] = string(buf) + } + return content, nil +} diff --git a/modules/testlogger/testlogger.go b/modules/testlogger/testlogger.go index 60e281d403..b0f38644a7 100644 --- a/modules/testlogger/testlogger.go +++ b/modules/testlogger/testlogger.go @@ -4,6 +4,7 @@ package testlogger import ( + "context" "fmt" "os" "runtime" @@ -108,30 +109,33 @@ func PrintCurrentTest(t testing.TB, skip ...int) func() { actualSkip := util.OptionalArg(skip) + 1 _, filename, line, _ := runtime.Caller(actualSkip) + getRuntimeStackAll := func() string { + stack := make([]byte, 1024*1024) + n := runtime.Stack(stack, true) + return util.UnsafeBytesToString(stack[:n]) + } + + deferHasRun := false + t.Cleanup(func() { + if !deferHasRun { + Printf("!!! defer function hasn't been run but Cleanup is called\n%s", getRuntimeStackAll()) + } + }) Printf("=== %s (%s:%d)\n", log.NewColoredValue(t.Name()), strings.TrimPrefix(filename, prefix), line) WriterCloser.pushT(t) timeoutChecker := time.AfterFunc(TestTimeout, func() { - l := 128 * 1024 - var stack []byte - for { - stack = make([]byte, l) - n := runtime.Stack(stack, true) - if n <= l { - stack = stack[:n] - break - } - l = n - } - Printf("!!! %s ... timeout: %v ... stacktrace:\n%s\n\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestTimeout, string(stack)) + Printf("!!! %s ... timeout: %v ... stacktrace:\n%s\n\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestTimeout, getRuntimeStackAll()) }) return func() { + deferHasRun = true flushStart := time.Now() slowFlushChecker := time.AfterFunc(TestSlowFlush, func() { Printf("+++ %s ... still flushing after %v ...\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestSlowFlush) }) if err := queue.GetManager().FlushAll(t.Context(), -1); err != nil { - t.Errorf("Flushing queues failed with error %v", err) + // if panic occurs, then the t.Context() is also cancelled ahead, so here it shows "context canceled" error. + t.Errorf("Flushing queues failed with error %q, cause %q", err, context.Cause(t.Context())) } slowFlushChecker.Stop() timeoutChecker.Stop() diff --git a/modules/typesniffer/typesniffer.go b/modules/typesniffer/typesniffer.go index 2e8d9c4a1e..0c4867d8f0 100644 --- a/modules/typesniffer/typesniffer.go +++ b/modules/typesniffer/typesniffer.go @@ -107,6 +107,17 @@ func detectFileTypeBox(data []byte) (brands []string, found bool) { return brands, true } +func isEmbeddedOpenType(data []byte) bool { + // https://www.w3.org/submissions/EOT + if len(data) < 80 { + return false + } + version := binary.LittleEndian.Uint32(data[8:]) // Actually this standard is abandoned (for IE6-IE11 only), there are only 3 versions defined + magic := binary.LittleEndian.Uint16(data[34:36]) // MagicNumber: 0x504C ("LP") + reserved := data[64:80] // Reserved 1-4 (each: unsigned long) + return (version == 0x00010000 || version == 0x00020001 || version == 0x00020002) && magic == 0x504C && bytes.Count(reserved, []byte{0}) == len(reserved) +} + // DetectContentType extends http.DetectContentType with more content types. Defaults to text/plain if input is empty. func DetectContentType(data []byte) SniffedType { if len(data) == 0 { @@ -119,6 +130,18 @@ func DetectContentType(data []byte) SniffedType { data = data[:SniffContentSize] } + const typeMsFontObject = "application/vnd.ms-fontobject" + if ct == typeMsFontObject { + // Stupid Golang blindly detects any content with 34th-35th bytes being "LP" as font. + // If it is not really for ".eot" content, we try to detect it again by hiding the "LP", see the test for more details. + if isEmbeddedOpenType(data) { + return SniffedType{typeMsFontObject} + } + data = slices.Clone(data) + data[34] = 'l' + ct = http.DetectContentType(data) + } + vars := globalVars() // SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888 detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html") diff --git a/modules/typesniffer/typesniffer_test.go b/modules/typesniffer/typesniffer_test.go index ad3f78afdc..17d67f41f7 100644 --- a/modules/typesniffer/typesniffer_test.go +++ b/modules/typesniffer/typesniffer_test.go @@ -6,6 +6,7 @@ package typesniffer import ( "encoding/base64" "encoding/hex" + "net/http" "strings" "testing" @@ -154,3 +155,25 @@ func TestDetectContentTypeAvif(t *testing.T) { st := DetectContentType(buf) assert.Equal(t, MimeTypeImageAvif, st.contentType) } + +func TestDetectContentTypeIncorrectFont(t *testing.T) { + s := "Stupid Golang keep detecting 34th LP as font" + // They don't want to have any improvement to it: https://github.com/golang/go/issues/77172 + golangDetected := http.DetectContentType([]byte(s)) + assert.Equal(t, "application/vnd.ms-fontobject", golangDetected) + // We have to make our patch to make it work correctly + ourDetected := DetectContentType([]byte(s)) + assert.Equal(t, "text/plain; charset=utf-8", ourDetected.contentType) + + // For binary content, ensure it still detects as font. The content is from "opensans-regular.eot" + b := []byte{ + 0x3d, 0x30, 0x00, 0x00, 0x6b, 0x2f, 0x00, 0x00, 0x02, 0x00, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x02, 0x0b, 0x06, 0x06, 0x03, 0x05, 0x04, 0x02, 0x02, 0x04, 0x01, 0x00, 0x90, 0x01, 0x00, 0x00, + 0x04, 0x00, 0x4c, 0x50, 0xef, 0x02, 0x00, 0xe0, 0x5b, 0x20, 0x00, 0x40, 0x28, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x9f, 0x01, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x63, 0xf4, 0x17, 0x14, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x12, 0x00, 0x4f, 0x00, 0x70, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x53, 0x00, + } + assert.Equal(t, "application/vnd.ms-fontobject", http.DetectContentType(b)) + assert.Equal(t, "application/vnd.ms-fontobject", DetectContentType(b).contentType) +} diff --git a/modules/util/buffer.go b/modules/util/buffer.go new file mode 100644 index 0000000000..c5af750292 --- /dev/null +++ b/modules/util/buffer.go @@ -0,0 +1,22 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package util + +import "bytes" + +func BufioScannerSplit(b byte) func(data []byte, atEOF bool) (advance int, token []byte, err error) { + // reference: bufio.ScanLines + return func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := bytes.IndexByte(data, b); i >= 0 { + return i + 1, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + } +} diff --git a/modules/validation/binding.go b/modules/validation/binding.go index 3ecc532613..3f40e5ec97 100644 --- a/modules/validation/binding.go +++ b/modules/validation/binding.go @@ -219,8 +219,8 @@ func portOnly(hostport string) string { if !ok { return "" } - if i := strings.Index(hostport, "]:"); i != -1 { - return hostport[i+len("]:"):] + if _, after2, ok2 := strings.Cut(hostport, "]:"); ok2 { + return after2 } if strings.Contains(hostport, "]") { return "" diff --git a/modules/web/middleware/cookie.go b/modules/web/middleware/cookie.go index ad9aee6478..f98aceba10 100644 --- a/modules/web/middleware/cookie.go +++ b/modules/web/middleware/cookie.go @@ -14,14 +14,24 @@ import ( "code.gitea.io/gitea/modules/util" ) +const cookieRedirectTo = "redirect_to" + +func GetRedirectToCookie(req *http.Request) string { + return GetSiteCookie(req, cookieRedirectTo) +} + // SetRedirectToCookie convenience function to set the RedirectTo cookie consistently func SetRedirectToCookie(resp http.ResponseWriter, value string) { - SetSiteCookie(resp, "redirect_to", value, 0) + SetSiteCookie(resp, cookieRedirectTo, value, 0) } // DeleteRedirectToCookie convenience function to delete most cookies consistently func DeleteRedirectToCookie(resp http.ResponseWriter) { - SetSiteCookie(resp, "redirect_to", "", -1) + SetSiteCookie(resp, cookieRedirectTo, "", -1) +} + +func RedirectLinkUserLogin(req *http.Request) string { + return setting.AppSubURL + "/user/login?redirect_to=" + url.QueryEscape(setting.AppSubURL+req.URL.RequestURI()) } // GetSiteCookie returns given cookie value from request header. diff --git a/options/fileicon/material-icon-rules.json b/options/fileicon/material-icon-rules.json index 6b17e5be67..8ee5dde0b2 100644 --- a/options/fileicon/material-icon-rules.json +++ b/options/fileicon/material-icon-rules.json @@ -460,6 +460,22 @@ ".blog": "folder-docs", "_blog": "folder-docs", "__blog__": "folder-docs", + "knowledge": "folder-docs", + ".knowledge": "folder-docs", + "_knowledge": "folder-docs", + "__knowledge__": "folder-docs", + "diary": "folder-docs", + ".diary": "folder-docs", + "_diary": "folder-docs", + "__diary__": "folder-docs", + "note": "folder-docs", + ".note": "folder-docs", + "_note": "folder-docs", + "__note__": "folder-docs", + "notes": "folder-docs", + ".notes": "folder-docs", + "_notes": "folder-docs", + "__notes__": "folder-docs", "github/workflows": "folder-gh-workflows", ".github/workflows": "folder-gh-workflows", "_github/workflows": "folder-gh-workflows", @@ -916,6 +932,14 @@ ".sql": "folder-database", "_sql": "folder-database", "__sql__": "folder-database", + "migrations": "folder-migrations", + ".migrations": "folder-migrations", + "_migrations": "folder-migrations", + "__migrations__": "folder-migrations", + "migration": "folder-migrations", + ".migration": "folder-migrations", + "_migration": "folder-migrations", + "__migration__": "folder-migrations", "log": "folder-log", ".log": "folder-log", "_log": "folder-log", @@ -1008,6 +1032,14 @@ ".recordings": "folder-audio", "_recordings": "folder-audio", "__recordings__": "folder-audio", + "playlist": "folder-audio", + ".playlist": "folder-audio", + "_playlist": "folder-audio", + "__playlist__": "folder-audio", + "playlists": "folder-audio", + ".playlists": "folder-audio", + "_playlists": "folder-audio", + "__playlists__": "folder-audio", "vid": "folder-video", ".vid": "folder-video", "_vid": "folder-video", @@ -1544,6 +1576,22 @@ ".backends": "folder-server", "_backends": "folder-server", "__backends__": "folder-server", + "inventory": "folder-server", + ".inventory": "folder-server", + "_inventory": "folder-server", + "__inventory__": "folder-server", + "inventories": "folder-server", + ".inventories": "folder-server", + "_inventories": "folder-server", + "__inventories__": "folder-server", + "infrastructure": "folder-server", + ".infrastructure": "folder-server", + "_infrastructure": "folder-server", + "__infrastructure__": "folder-server", + "infra": "folder-server", + ".infra": "folder-server", + "_infra": "folder-server", + "__infra__": "folder-server", "client": "folder-client", ".client": "folder-client", "_client": "folder-client", @@ -1992,6 +2040,14 @@ ".calculations": "folder-functions", "_calculations": "folder-functions", "__calculations__": "folder-functions", + "composable": "folder-functions", + ".composable": "folder-functions", + "_composable": "folder-functions", + "__composable__": "folder-functions", + "composables": "folder-functions", + ".composables": "folder-functions", + "_composables": "folder-functions", + "__composables__": "folder-functions", "generator": "folder-generator", ".generator": "folder-generator", "_generator": "folder-generator", @@ -2936,6 +2992,14 @@ ".projects": "folder-project", "_projects": "folder-project", "__projects__": "folder-project", + "proj": "folder-project", + ".proj": "folder-project", + "_proj": "folder-project", + "__proj__": "folder-project", + "projs": "folder-project", + ".projs": "folder-project", + "_projs": "folder-project", + "__projs__": "folder-project", "prompt": "folder-prompts", ".prompt": "folder-prompts", "_prompt": "folder-prompts", @@ -3431,6 +3495,10 @@ "..cursor": "folder-cursor", "_.cursor": "folder-cursor", "__.cursor__": "folder-cursor", + ".gemini": "folder-gemini-ai", + "..gemini": "folder-gemini-ai", + "_.gemini": "folder-gemini-ai", + "__.gemini__": "folder-gemini-ai", "input": "folder-input", ".input": "folder-input", "_input": "folder-input", @@ -3447,6 +3515,14 @@ ".in": "folder-input", "_in": "folder-input", "__in__": "folder-input", + "salt": "folder-salt", + ".salt": "folder-salt", + "_salt": "folder-salt", + "__salt__": "folder-salt", + "saltstack": "folder-salt", + ".saltstack": "folder-salt", + "_saltstack": "folder-salt", + "__saltstack__": "folder-salt", "simulations": "folder-simulations", ".simulations": "folder-simulations", "_simulations": "folder-simulations", @@ -3961,6 +4037,22 @@ ".blog": "folder-docs-open", "_blog": "folder-docs-open", "__blog__": "folder-docs-open", + "knowledge": "folder-docs-open", + ".knowledge": "folder-docs-open", + "_knowledge": "folder-docs-open", + "__knowledge__": "folder-docs-open", + "diary": "folder-docs-open", + ".diary": "folder-docs-open", + "_diary": "folder-docs-open", + "__diary__": "folder-docs-open", + "note": "folder-docs-open", + ".note": "folder-docs-open", + "_note": "folder-docs-open", + "__note__": "folder-docs-open", + "notes": "folder-docs-open", + ".notes": "folder-docs-open", + "_notes": "folder-docs-open", + "__notes__": "folder-docs-open", "github/workflows": "folder-gh-workflows-open", ".github/workflows": "folder-gh-workflows-open", "_github/workflows": "folder-gh-workflows-open", @@ -4417,6 +4509,14 @@ ".sql": "folder-database-open", "_sql": "folder-database-open", "__sql__": "folder-database-open", + "migrations": "folder-migrations-open", + ".migrations": "folder-migrations-open", + "_migrations": "folder-migrations-open", + "__migrations__": "folder-migrations-open", + "migration": "folder-migrations-open", + ".migration": "folder-migrations-open", + "_migration": "folder-migrations-open", + "__migration__": "folder-migrations-open", "log": "folder-log-open", ".log": "folder-log-open", "_log": "folder-log-open", @@ -4509,6 +4609,14 @@ ".recordings": "folder-audio-open", "_recordings": "folder-audio-open", "__recordings__": "folder-audio-open", + "playlist": "folder-audio-open", + ".playlist": "folder-audio-open", + "_playlist": "folder-audio-open", + "__playlist__": "folder-audio-open", + "playlists": "folder-audio-open", + ".playlists": "folder-audio-open", + "_playlists": "folder-audio-open", + "__playlists__": "folder-audio-open", "vid": "folder-video-open", ".vid": "folder-video-open", "_vid": "folder-video-open", @@ -5045,6 +5153,22 @@ ".backends": "folder-server-open", "_backends": "folder-server-open", "__backends__": "folder-server-open", + "inventory": "folder-server-open", + ".inventory": "folder-server-open", + "_inventory": "folder-server-open", + "__inventory__": "folder-server-open", + "inventories": "folder-server-open", + ".inventories": "folder-server-open", + "_inventories": "folder-server-open", + "__inventories__": "folder-server-open", + "infrastructure": "folder-server-open", + ".infrastructure": "folder-server-open", + "_infrastructure": "folder-server-open", + "__infrastructure__": "folder-server-open", + "infra": "folder-server-open", + ".infra": "folder-server-open", + "_infra": "folder-server-open", + "__infra__": "folder-server-open", "client": "folder-client-open", ".client": "folder-client-open", "_client": "folder-client-open", @@ -5493,6 +5617,14 @@ ".calculations": "folder-functions-open", "_calculations": "folder-functions-open", "__calculations__": "folder-functions-open", + "composable": "folder-functions-open", + ".composable": "folder-functions-open", + "_composable": "folder-functions-open", + "__composable__": "folder-functions-open", + "composables": "folder-functions-open", + ".composables": "folder-functions-open", + "_composables": "folder-functions-open", + "__composables__": "folder-functions-open", "generator": "folder-generator-open", ".generator": "folder-generator-open", "_generator": "folder-generator-open", @@ -6437,6 +6569,14 @@ ".projects": "folder-project-open", "_projects": "folder-project-open", "__projects__": "folder-project-open", + "proj": "folder-project-open", + ".proj": "folder-project-open", + "_proj": "folder-project-open", + "__proj__": "folder-project-open", + "projs": "folder-project-open", + ".projs": "folder-project-open", + "_projs": "folder-project-open", + "__projs__": "folder-project-open", "prompt": "folder-prompts-open", ".prompt": "folder-prompts-open", "_prompt": "folder-prompts-open", @@ -6932,6 +7072,10 @@ "..cursor": "folder-cursor-open", "_.cursor": "folder-cursor-open", "__.cursor__": "folder-cursor-open", + ".gemini": "folder-gemini-ai-open", + "..gemini": "folder-gemini-ai-open", + "_.gemini": "folder-gemini-ai-open", + "__.gemini__": "folder-gemini-ai-open", "input": "folder-input-open", ".input": "folder-input-open", "_input": "folder-input-open", @@ -6948,6 +7092,14 @@ ".in": "folder-input-open", "_in": "folder-input-open", "__in__": "folder-input-open", + "salt": "folder-salt-open", + ".salt": "folder-salt-open", + "_salt": "folder-salt-open", + "__salt__": "folder-salt-open", + "saltstack": "folder-salt-open", + ".saltstack": "folder-salt-open", + "_saltstack": "folder-salt-open", + "__saltstack__": "folder-salt-open", "simulations": "folder-simulations-open", ".simulations": "folder-simulations-open", "_simulations": "folder-simulations-open", @@ -7213,6 +7365,7 @@ "csproj": "visualstudio", "ruleset": "visualstudio", "sln": "visualstudio", + "slnf": "visualstudio", "slnx": "visualstudio", "suo": "visualstudio", "vb": "visualstudio", @@ -7715,6 +7868,7 @@ "tfvars": "terraform", "tfstate": "terraform", "tfbackend": "terraform", + "terraformignore": "terraform", "tofu": "opentofu", "blade.php": "laravel", "inky.php": "laravel", @@ -8162,6 +8316,7 @@ "toc": "toc", "cue": "cue", "lean": "lean", + "sls": "salt", "cljx": "clojure", "clojure": "clojure", "edn": "clojure", @@ -8453,6 +8608,7 @@ ".ruff.toml": "ruff", "uv.toml": "uv", ".uv.toml": "uv", + "uv.lock": "uv", "sconstruct": "scons", "sconscript": "scons", "scsub": "scons", @@ -10150,6 +10306,30 @@ "esbuild.config.ts": "esbuild", "esbuild.config.mts": "esbuild", "esbuild.config.cts": "esbuild", + "esbuild.dev.js": "esbuild", + "esbuild.dev.mjs": "esbuild", + "esbuild.dev.cjs": "esbuild", + "esbuild.dev.ts": "esbuild", + "esbuild.dev.mts": "esbuild", + "esbuild.dev.cts": "esbuild", + "esbuild.stage.js": "esbuild", + "esbuild.stage.mjs": "esbuild", + "esbuild.stage.cjs": "esbuild", + "esbuild.stage.ts": "esbuild", + "esbuild.stage.mts": "esbuild", + "esbuild.stage.cts": "esbuild", + "esbuild.prod.js": "esbuild", + "esbuild.prod.mjs": "esbuild", + "esbuild.prod.cjs": "esbuild", + "esbuild.prod.ts": "esbuild", + "esbuild.prod.mts": "esbuild", + "esbuild.prod.cts": "esbuild", + "esbuild.test.js": "esbuild", + "esbuild.test.mjs": "esbuild", + "esbuild.test.cjs": "esbuild", + "esbuild.test.ts": "esbuild", + "esbuild.test.mts": "esbuild", + "esbuild.test.cts": "esbuild", "drizzle.config.ts": "drizzle", "drizzle.config.dev.ts": "drizzle", "drizzle.config.prod.ts": "drizzle", diff --git a/options/fileicon/material-icon-svgs.json b/options/fileicon/material-icon-svgs.json index f5254099ad..6713c4afa9 100644 --- a/options/fileicon/material-icon-svgs.json +++ b/options/fileicon/material-icon-svgs.json @@ -67,7 +67,7 @@ "biome": "", "bitbucket": "", "bithound": "", - "blender": "", + "blender": "", "blink": "", "blink_light": "", "blitz": "", @@ -102,7 +102,7 @@ "circleci_light": "", "citation": "", "clangd": "", - "claude": "", + "claude": "", "cline": "", "clojure": "", "cloudfoundry": "", @@ -151,8 +151,8 @@ "database": "", "deepsource": "", "denizenscript": "", - "deno": "", - "deno_light": "", + "deno": "", + "deno_light": "", "dependabot": "", "dependencies-update": "", "dhall": "", @@ -237,8 +237,8 @@ "folder-bibliography": "", "folder-bicep-open": "", "folder-bicep": "", - "folder-blender-open": "", - "folder-blender": "", + "folder-blender-open": "", + "folder-blender": "", "folder-bloc-open": "", "folder-bloc": "", "folder-bower-open": "", @@ -255,8 +255,8 @@ "folder-circleci": "", "folder-class-open": "", "folder-class": "", - "folder-claude-open": "", - "folder-claude": "", + "folder-claude-open": "", + "folder-claude": "", "folder-client-open": "", "folder-client": "", "folder-cline-open": "", @@ -379,6 +379,8 @@ "folder-functions": "", "folder-gamemaker-open": "", "folder-gamemaker": "", + "folder-gemini-ai-open": "", + "folder-gemini-ai": "", "folder-generator-open": "", "folder-generator": "", "folder-gh-workflows-open": "", @@ -495,6 +497,8 @@ "folder-metro": "", "folder-middleware-open": "", "folder-middleware": "", + "folder-migrations-open": "", + "folder-migrations": "", "folder-mjml-open": "", "folder-mjml": "", "folder-mobile-open": "", @@ -604,6 +608,8 @@ "folder-rules": "", "folder-rust-open": "", "folder-rust": "", + "folder-salt-open": "", + "folder-salt": "", "folder-sandbox-open": "", "folder-sandbox": "", "folder-sass-open": "", @@ -1024,6 +1030,7 @@ "ruff": "", "rust": "", "salesforce": "", + "salt": "", "san": "", "sas": "", "sass": "", diff --git a/options/locale/locale_en-US.json b/options/locale/locale_en-US.json index 0fb95606b3..417698544f 100644 --- a/options/locale/locale_en-US.json +++ b/options/locale/locale_en-US.json @@ -977,6 +977,7 @@ "repo.fork.blocked_user": "Cannot fork the repository because you are blocked by the repository owner.", "repo.use_template": "Use this template", "repo.open_with_editor": "Open with %s", + "repo.download_directory_as": "Download directory as %s", "repo.download_zip": "Download ZIP", "repo.download_tar": "Download TAR.GZ", "repo.download_bundle": "Download BUNDLE", @@ -1489,6 +1490,7 @@ "repo.issues.filter_sort.feweststars": "Fewest stars", "repo.issues.filter_sort.mostforks": "Most forks", "repo.issues.filter_sort.fewestforks": "Fewest forks", + "repo.issues.quick_goto": "Go to issue", "repo.issues.action_open": "Open", "repo.issues.action_close": "Close", "repo.issues.action_label": "Label", @@ -1701,6 +1703,7 @@ "repo.issues.review.content.empty": "You need to leave a comment indicating the requested change(s).", "repo.issues.review.reject": "requested changes %s", "repo.issues.review.wait": "was requested for review %s", + "repo.issues.review.codeowners_rules": "CODEOWNERS rules", "repo.issues.review.add_review_request": "requested review from %s %s", "repo.issues.review.remove_review_request": "removed review request for %s %s", "repo.issues.review.remove_review_request_self": "declined to review %s", @@ -1736,8 +1739,11 @@ "repo.issues.reference_link": "Reference: %s", "repo.compare.compare_base": "base", "repo.compare.compare_head": "compare", + "repo.compare.title": "Comparing changes", + "repo.compare.description": "Choose two branches or tags to see what’s changed or to start a new pull request.", "repo.pulls.desc": "Enable pull requests and code reviews.", "repo.pulls.new": "New Pull Request", + "repo.pulls.new.description": "Discuss and review the changes in this comparison with others.", "repo.pulls.new.blocked_user": "Cannot create pull request because you are blocked by the repository owner.", "repo.pulls.new.must_collaborator": "You must be a collaborator to create pull request.", "repo.pulls.new.already_existed": "A pull request between these branches already exists", @@ -1747,7 +1753,6 @@ "repo.pulls.allow_edits_from_maintainers": "Allow edits from maintainers", "repo.pulls.allow_edits_from_maintainers_desc": "Users with write access to the base branch can also push to this branch", "repo.pulls.allow_edits_from_maintainers_err": "Updating failed", - "repo.pulls.compare_changes_desc": "Select the branch to merge into and the branch to pull from.", "repo.pulls.has_viewed_file": "Viewed", "repo.pulls.has_changed_since_last_review": "Changed since your last review", "repo.pulls.viewed_files_label": "%[1]d / %[2]d files viewed", @@ -1791,6 +1796,7 @@ "repo.pulls.remove_prefix": "Remove %s prefix", "repo.pulls.data_broken": "This pull request is broken due to missing fork information.", "repo.pulls.files_conflicted": "This pull request has changes conflicting with the target branch.", + "repo.pulls.files_conflicted_no_listed_files": "(No conflicting files listed)", "repo.pulls.is_checking": "Checking for merge conflicts…", "repo.pulls.is_ancestor": "This branch is already included in the target branch. There is nothing to merge.", "repo.pulls.is_empty": "The changes on this branch are already on the target branch. This will be an empty commit.", @@ -1845,7 +1851,8 @@ "repo.pulls.status_checking": "Some checks are pending", "repo.pulls.status_checks_success": "All checks were successful", "repo.pulls.status_checks_warning": "Some checks reported warnings", - "repo.pulls.status_checks_failure": "Some checks failed", + "repo.pulls.status_checks_failure_required": "Some required checks failed", + "repo.pulls.status_checks_failure_optional": "Some optional checks failed", "repo.pulls.status_checks_error": "Some checks reported errors", "repo.pulls.status_checks_requested": "Required", "repo.pulls.status_checks_details": "Details", @@ -2540,8 +2547,8 @@ "repo.diff.too_many_files": "Some files were not shown because too many files have changed in this diff", "repo.diff.show_more": "Show More", "repo.diff.load": "Load Diff", - "repo.diff.generated": "generated", - "repo.diff.vendored": "vendored", + "repo.diff.generated": "Generated", + "repo.diff.vendored": "Vendored", "repo.diff.comment.add_line_comment": "Add line comment", "repo.diff.comment.placeholder": "Leave a comment", "repo.diff.comment.add_single_comment": "Add single comment", @@ -2658,7 +2665,7 @@ "repo.branch.new_branch_from": "Create new branch from \"%s\"", "repo.branch.renamed": "Branch %s was renamed to %s.", "repo.branch.rename_default_or_protected_branch_error": "Only admins can rename default or protected branches.", - "repo.branch.rename_protected_branch_failed": "This branch is protected by glob-based protection rules.", + "repo.branch.rename_protected_branch_failed": "Failed to rename branch due to branch protection rules.", "repo.branch.commits_divergence_from": "Commit divergence: %[1]d behind and %[2]d ahead of %[3]s", "repo.branch.commits_no_divergence": "The same as branch %[1]s", "repo.tag.create_tag": "Create tag %s", @@ -3279,8 +3286,6 @@ "admin.config.git_gc_args": "GC Arguments", "admin.config.git_migrate_timeout": "Migration Timeout", "admin.config.git_mirror_timeout": "Mirror Update Timeout", - "admin.config.git_clone_timeout": "Clone Operation Timeout", - "admin.config.git_pull_timeout": "Pull Operation Timeout", "admin.config.git_gc_timeout": "GC Operation Timeout", "admin.config.log_config": "Log Configuration", "admin.config.logger_name_fmt": "Logger: %s", @@ -3722,8 +3727,8 @@ "projects.exit_fullscreen": "Exit Fullscreen", "git.filemode.changed_filemode": "%[1]s → %[2]s", "git.filemode.directory": "Directory", - "git.filemode.normal_file": "Normal file", - "git.filemode.executable_file": "Executable file", - "git.filemode.symbolic_link": "Symbolic link", + "git.filemode.normal_file": "Regular", + "git.filemode.executable_file": "Executable", + "git.filemode.symbolic_link": "Symlink", "git.filemode.submodule": "Submodule" } diff --git a/options/locale/locale_fr-FR.json b/options/locale/locale_fr-FR.json index 10424db497..22be98d2ff 100644 --- a/options/locale/locale_fr-FR.json +++ b/options/locale/locale_fr-FR.json @@ -3,6 +3,7 @@ "dashboard": "Tableau de bord", "explore_title": "Explorateur", "help": "Aide", + "logo": "Logo", "sign_in": "Connexion", "sign_in_with_provider": "Se connecter avec %s", "sign_in_or": "ou", @@ -10,9 +11,12 @@ "sign_up": "S'inscrire", "link_account": "Lier un Compte", "register": "S'inscrire", + "version": "Version", "powered_by": "Propulsé par %s", + "page": "Page", "template": "Modèle", "language": "Langue", + "notifications": "Notifications", "active_stopwatch": "Suivi du temps actif", "tracked_time_summary": "Résumé du pointage d’après les filtres de la liste des tickets", "create_new": "Créer…", @@ -28,6 +32,7 @@ "password": "Mot de passe", "access_token": "Jeton d’accès", "re_type": "Confirmez le mot de passe", + "captcha": "CAPTCHA", "twofa": "Authentification à deux facteurs", "twofa_scratch": "Code de secours pour l'authentification à deux facteurs", "passcode": "Code d'accès", @@ -63,6 +68,7 @@ "your_starred": "Favoris", "your_settings": "Configuration", "all": "Tous", + "sources": "Sources", "mirrors": "Miroirs", "collaborative": "Collaboratif", "forks": "Bifurcations", @@ -70,6 +76,7 @@ "pull_requests": "Demandes d'ajout", "issues": "Tickets", "milestones": "Jalons", + "ok": "Ok", "cancel": "Annuler", "retry": "Réessayez", "rerun": "Relancer", @@ -82,6 +89,7 @@ "remove_label_str": "Supprimer l’élément « %s »", "edit": "Éditer", "view": "Voir", + "test": "Test", "enabled": "Activé", "disabled": "Désactivé", "locked": "Verrouillée", @@ -114,6 +122,7 @@ "expired": "Expiré", "confirm_delete_artifact": "Êtes-vous sûr de vouloir supprimer l’artefact « %s » ?", "archived": "Archivé", + "concept_system_global": "Global", "concept_user_individual": "Individuel", "concept_code_repository": "Dépôt", "concept_user_organization": "Organisation", @@ -135,6 +144,7 @@ "filter.not_mirror": "Non miroité", "filter.is_template": "Modèle", "filter.not_template": "Pas un modèle", + "filter.public": "Publique", "filter.private": "Privé", "no_results_found": "Aucun résultat trouvé.", "internal_error_skipped": "Une erreur interne est survenue, mais ignorée : %s", @@ -144,7 +154,9 @@ "search.fuzzy_tooltip": "Inclure également les résultats proches de la recherche", "search.words": "Mots", "search.words_tooltip": "Inclure uniquement les résultats qui correspondent exactement aux mots recherchés", + "search.regexp": "Regexp", "search.regexp_tooltip": "Inclure uniquement les résultats qui correspondent à l’expression régulière recherchée", + "search.exact": "Exact", "search.exact_tooltip": "Inclure uniquement les résultats qui correspondent exactement au terme de recherche", "search.repo_kind": "Chercher des dépôts…", "search.user_kind": "Chercher des utilisateurs…", @@ -191,6 +203,8 @@ "editor.buttons.switch_to_legacy.tooltip": "Utiliser l’ancien éditeur à la place", "editor.buttons.enable_monospace_font": "Activer la police à chasse fixe", "editor.buttons.disable_monospace_font": "Désactiver la police à chasse fixe", + "filter.string.asc": "A–Z", + "filter.string.desc": "Z–A", "error.occurred": "Une erreur s’est produite", "error.report_message": "Si vous pensez qu’il s’agit d’un bug Gitea, veuillez consulter notre board GitHub ou ouvrir un nouveau ticket si nécessaire.", "error.not_found": "La cible n'a pu être trouvée.", @@ -202,7 +216,9 @@ "startpage.platform_desc": "Gitea tourne partout où Go peut être compilé : Windows, macOS, Linux, ARM, etc. Choisissez votre préféré !", "startpage.lightweight": "Léger", "startpage.lightweight_desc": "Gitea utilise peu de ressources. Il peut même tourner sur un Raspberry Pi très bon marché. Économisez l'énergie de vos serveurs !", + "startpage.license": "Open Source", "startpage.license_desc": "Venez récupérer %[2]s ! Rejoignez-nous en contribuant à rendre ce projet encore meilleur !", + "install.install": "Installation", "install.installing_desc": "Installation en cours, veuillez patienter…", "install.title": "Configuration initiale", "install.docker_helper": "Si vous exécutez Gitea dans Docker, veuillez lire la documentation avant de modifier les paramètres.", @@ -215,6 +231,7 @@ "install.db_name": "Nom de base de données", "install.db_schema": "Schéma", "install.db_schema_helper": "Laisser vide pour la base de données par défaut (\"public\").", + "install.ssl_mode": "SSL", "install.path": "Emplacement", "install.sqlite_helper": "Chemin d'accès pour la base de données SQLite3.
    Entrer un chemin absolu si vous exécutez Gitea en tant que service.", "install.reinstall_error": "Vous essayez d'installer dans une base de données Gitea existante", @@ -344,6 +361,7 @@ "explore.users": "Utilisateurs", "explore.organizations": "Organisations", "explore.go_to": "Atteindre", + "explore.code": "Code", "explore.code_last_indexed_at": "Dernière indexation %s", "explore.relevant_repositories_tooltip": "Les dépôts qui sont des forks ou qui n'ont aucun sujet, aucune icône et aucune description sont cachés.", "explore.relevant_repositories": "Seuls les dépôts pertinents sont affichés, afficher les résultats non filtrés.", @@ -391,6 +409,7 @@ "auth.twofa_scratch_token_incorrect": "Votre code de secours est incorrect.", "auth.twofa_required": "Vous devez configurer l’authentification à deux facteurs pour avoir accès aux dépôts, ou essayer de vous reconnecter.", "auth.login_userpass": "Connexion", + "auth.login_openid": "OpenID", "auth.oauth_signup_tab": "Créer un compte", "auth.oauth_signup_title": "Compléter le nouveau compte", "auth.oauth_signup_submit": "Finaliser la création du compte", @@ -586,6 +605,7 @@ "user.show_more": "Voir plus", "user.starred": "Dépôts favoris", "user.watched": "Dépôts surveillés", + "user.code": "Code", "user.projects": "Projets", "user.overview": "Vue d'ensemble", "user.following": "Abonnements", @@ -617,6 +637,7 @@ "user.block.info_6": "ouvrir ou commenter vos tickets et demandes d’ajouts", "user.block.info_7": "réagir à vos commentaires dans les tickets ou les demandes d’ajout", "user.block.user_to_block": "Utilisateur à bloquer", + "user.block.note": "Note", "user.block.note.title": "Note facultative :", "user.block.note.info": "La note n’est pas visible par l’utilisateur bloqué.", "user.block.note.edit": "Modifier la note", @@ -627,14 +648,17 @@ "settings.appearance": "Apparence", "settings.password": "Mot de passe", "settings.security": "Sécurité", + "settings.avatar": "Avatar", "settings.ssh_gpg_keys": "Clés SSH / GPG", "settings.social": "Réseaux Sociaux", + "settings.applications": "Applications", "settings.orgs": "Gérer les organisations", "settings.repos": "Dépôts", "settings.delete": "Supprimer le compte", "settings.twofa": "Authentification à deux facteurs (TOTP)", "settings.account_link": "Comptes liés", "settings.organization": "Organisations", + "settings.uid": "UID", "settings.webauthn": "Authentification à deux facteurs (Clés de sécurité)", "settings.public_profile": "Profil public", "settings.biography_placeholder": "Parlez-nous un peu de vous ! (Vous pouvez utiliser Markdown)", @@ -663,6 +687,7 @@ "settings.hidden_comment_types.ref_tooltip": "Commentaires où ce ticket a été référencé sur un autre ticket, révision, etc.", "settings.hidden_comment_types.issue_ref_tooltip": "Commentaires où l’utilisateur change la branche/étiquette associée au ticket", "settings.comment_type_group_reference": "Référence", + "settings.comment_type_group_label": "Label", "settings.comment_type_group_milestone": "Jalon", "settings.comment_type_group_assignee": "Assigné à", "settings.comment_type_group_title": "Titre", @@ -915,6 +940,7 @@ "settings.email_notifications.actions.desc": "Notification pour les executions de workflows sur les dépôts configurés avec les Actions Gitea.", "settings.email_notifications.actions.failure_only": "Ne notifier que pour les exécutions échouées", "settings.visibility": "Visibilité de l'utilisateur", + "settings.visibility.public": "Publique", "settings.visibility.public_tooltip": "Visible par tout le monde", "settings.visibility.limited": "Limité", "settings.visibility.limited_tooltip": "Visible uniquement pour les utilisateurs authentifiés", @@ -951,11 +977,13 @@ "repo.fork.blocked_user": "Impossible de bifurquer le dépôt car vous êtes bloqué par son propriétaire.", "repo.use_template": "Utiliser ce modèle", "repo.open_with_editor": "Ouvrir avec %s", + "repo.download_directory_as": "Télécharger le répertoire en tant que %s", "repo.download_zip": "Télécharger le ZIP", "repo.download_tar": "Télécharger le TAR.GZ", "repo.download_bundle": "Télécharger le BUNDLE", "repo.generate_repo": "Générer un dépôt", "repo.generate_from": "Générer depuis", + "repo.repo_desc": "Description", "repo.repo_desc_helper": "Décrire brièvement votre dépôt", "repo.repo_no_desc": "Aucune description fournie", "repo.repo_lang": "Langue", @@ -1034,11 +1062,14 @@ "repo.desc.template": "Modèle", "repo.desc.internal": "Interne", "repo.desc.archived": "Archivé", + "repo.desc.sha256": "SHA256", "repo.template.items": "Élément du modèle", "repo.template.git_content": "Contenu Git (branche par défaut)", "repo.template.git_hooks": "Déclencheurs Git", "repo.template.git_hooks_tooltip": "Vous ne pouvez actuellement pas modifier ou supprimer les déclencheurs Git ajoutés. Sélectionnez cette option uniquement si vous faites confiance au modèle de dépôt.", + "repo.template.webhooks": "Déclencheurs web", "repo.template.topics": "Sujets", + "repo.template.avatar": "Avatar", "repo.template.issue_labels": "Labels de ticket", "repo.template.one_item": "Vous devez sélectionner au moins un élément du modèle", "repo.template.invalid": "Vous devez sélectionner un modèle de dépôt", @@ -1060,7 +1091,9 @@ "repo.migrate_options_lfs_endpoint.description.local": "Un chemin de serveur local est également pris en charge.", "repo.migrate_options_lfs_endpoint.placeholder": "Si laissé vide, le point d’accès sera déterminé à partir de l’URL de clonage.", "repo.migrate_items": "Éléments à migrer", + "repo.migrate_items_wiki": "Wiki", "repo.migrate_items_milestones": "Jalons", + "repo.migrate_items_labels": "Labels", "repo.migrate_items_issues": "Tickets", "repo.migrate_items_pullrequests": "Demandes d'ajout", "repo.migrate_items_merge_requests": "Demandes de fusion", @@ -1129,17 +1162,21 @@ "repo.empty_message": "Ce dépôt n’a pas de contenu.", "repo.broken_message": "Les données git de ce dépôt ne peuvent pas être lues. Contactez l'administrateur de cette instance ou supprimez ce dépôt.", "repo.no_branch": "Ce dépôt n’a aucune branche.", + "repo.code": "Code", "repo.code.desc": "Accéder au code source, fichiers, révisions et branches.", "repo.branch": "Branche", "repo.tree": "Aborescence", "repo.clear_ref": "Effacer la référence actuelle", "repo.filter_branch_and_tag": "Filtrer une branche ou une étiquette", "repo.find_tag": "Rechercher une étiquette", + "repo.branches": "Branches", "repo.tags": "Étiquettes", "repo.issues": "Tickets", "repo.pulls": "Demandes d'ajout", "repo.projects": "Projets", "repo.packages": "Paquets", + "repo.actions": "Actions", + "repo.labels": "Labels", "repo.org_labels_desc": "Les labels d'une organisation peuvent être utilisés avec tous les dépôts de cette organisation.", "repo.org_labels_desc_manage": "gérer", "repo.milestone": "Jalon", @@ -1149,6 +1186,7 @@ "repo.release": "Publications", "repo.releases": "Publications", "repo.tag": "Étiquette", + "repo.git_tag": "Étiquette git", "repo.released_this": "a publié ceci", "repo.tagged_this": "a étiqueté", "repo.file.title": "%s sur %s", @@ -1282,6 +1320,8 @@ "repo.commits.search_branch": "Cette branche", "repo.commits.search_all": "Toutes les branches", "repo.commits.author": "Auteur", + "repo.commits.message": "Message", + "repo.commits.date": "Date", "repo.commits.older": "Précédemment", "repo.commits.newer": "Récemment", "repo.commits.signed_by": "Signé par", @@ -1306,6 +1346,7 @@ "repo.ext_issues.desc": "Lien vers un gestionnaire de tickets externe.", "repo.projects.desc": "Gérer les tickets et les demandes d’ajouts dans les projets.", "repo.projects.description": "Description (facultative)", + "repo.projects.description_placeholder": "Description", "repo.projects.create": "Créer un projet", "repo.projects.title": "Titre", "repo.projects.new": "Nouveau projet", @@ -1350,6 +1391,7 @@ "repo.issues.filter_no_results_placeholder": "Essayez d’ajuster vos filtres de recherche.", "repo.issues.new": "Nouveau ticket", "repo.issues.new.title_empty": "Le titre ne peut pas être vide", + "repo.issues.new.labels": "Labels", "repo.issues.new.no_label": "Sans labels", "repo.issues.new.clear_labels": "Effacer les labels", "repo.issues.new.projects": "Projets", @@ -1379,6 +1421,7 @@ "repo.issues.create": "Créer un ticket", "repo.issues.new_label": "Nouveau label", "repo.issues.new_label_placeholder": "Nom du label", + "repo.issues.new_label_desc_placeholder": "Description", "repo.issues.create_label": "Créer un label", "repo.issues.label_templates.title": "Charger un ensemble prédéfini de label", "repo.issues.label_templates.info": "Il n'existe pas encore de label. Créez-en un avec « Nouveau label » ou utilisez un jeu de label prédéfini :", @@ -1408,6 +1451,7 @@ "repo.issues.remove_ref_at": "a supprimé la référence %s %s.", "repo.issues.add_ref_at": "a ajouté la référence %s %s.", "repo.issues.delete_branch_at": "a supprimé la branche %s %s.", + "repo.issues.filter_label": "Label", "repo.issues.filter_label_exclude": "Utilisez Alt + Clic/entrée pour exclure les labels.", "repo.issues.filter_label_no_select": "Toutes les labels", "repo.issues.filter_label_select_no_label": "Aucun label", @@ -1425,6 +1469,7 @@ "repo.issues.filter_poster": "Auteur", "repo.issues.filter_user_placeholder": "Rechercher des utilisateurs", "repo.issues.filter_user_no_select": "Tous les utilisateurs", + "repo.issues.filter_type": "Type", "repo.issues.filter_type.all_issues": "Tous les tickets", "repo.issues.filter_type.all_pull_requests": "Toutes les demandes d’ajout", "repo.issues.filter_type.assigned_to_you": "Qui vous sont assignés", @@ -1447,6 +1492,7 @@ "repo.issues.filter_sort.fewestforks": "Bifurcations (croissant)", "repo.issues.action_open": "Ouvrir", "repo.issues.action_close": "Fermer", + "repo.issues.action_label": "Label", "repo.issues.action_milestone": "Jalon", "repo.issues.action_milestone_no_select": "Aucun jalon", "repo.issues.action_assignee": "Assigné à", @@ -1526,6 +1572,7 @@ "repo.issues.label_exclusive_warning": "Tout label d'une portée en conflit sera retiré lors de la modification des labels d’un ticket ou d’une demande d’ajout.", "repo.issues.label_exclusive_order": "Ordre de tri", "repo.issues.label_exclusive_order_tooltip": "Les labels exclusifs partageant la même portée seront triées selon cet ordre numérique.", + "repo.issues.label_count": "%d label(s)", "repo.issues.label_open_issues": "%d tickets ouverts", "repo.issues.label_edit": "Éditer", "repo.issues.label_delete": "Supprimer", @@ -1591,6 +1638,7 @@ "repo.issues.del_time_history": "a supprimé son temps de travail %s.", "repo.issues.add_time_manually": "Temps pointé manuellement", "repo.issues.add_time_hours": "Heures", + "repo.issues.add_time_minutes": "Minutes", "repo.issues.add_time_sum_to_small": "Aucun minuteur n'a été saisi.", "repo.issues.time_spent_total": "Temps passé total", "repo.issues.time_spent_from_all_authors": "Temps passé total : %s", @@ -1654,6 +1702,7 @@ "repo.issues.review.content.empty": "Vous devez laisser un commentaire indiquant le(s) changement(s) demandé(s).", "repo.issues.review.reject": "a requis les changements %s", "repo.issues.review.wait": "a été sollicité pour évaluer cette demande d’ajout %s.", + "repo.issues.review.codeowners_rules": "Règles des CODEOWNERS", "repo.issues.review.add_review_request": "a demandé à %s une évaluation %s.", "repo.issues.review.remove_review_request": "a retiré la demande d’évaluation pour %s %s.", "repo.issues.review.remove_review_request_self": "a décliné son invitation à évaluer %s.", @@ -1685,19 +1734,24 @@ "repo.issues.content_history.created": "a créé", "repo.issues.content_history.delete_from_history": "Supprimer de l’historique", "repo.issues.content_history.delete_from_history_confirm": "Supprimer de l’historique ?", + "repo.issues.content_history.options": "Options", "repo.issues.reference_link": "Référence : %s", + "repo.compare.compare_base": "base", "repo.compare.compare_head": "comparer", + "repo.compare.title": "Comparer les changements", + "repo.compare.description": "Choisissez deux branches ou étiquettes git pour voir les différences ou faire une demande d’ajout.", "repo.pulls.desc": "Active les demandes d’ajouts et l’évaluation du code.", "repo.pulls.new": "Nouvelle demande d'ajout", + "repo.pulls.new.description": "Discutez et examinez les changements dans cette comparaison.", "repo.pulls.new.blocked_user": "Impossible de créer une demande d’ajout car vous êtes bloqué par le propriétaire du dépôt.", "repo.pulls.new.must_collaborator": "Vous devez être un collaborateur pour créer une demande d’ajout.", + "repo.pulls.new.already_existed": "Une demande d’ajout entre ces branches existe déjà", "repo.pulls.edit.already_changed": "Impossible d’enregistrer la demande d’ajout. Il semble que le contenu ait été modifié par un autre utilisateur. Veuillez rafraîchir la page et réessayer afin d’éviter d’écraser leurs modifications.", "repo.pulls.view": "Voir la demande d'ajout", "repo.pulls.compare_changes": "Nouvelle demande d’ajout", "repo.pulls.allow_edits_from_maintainers": "Autoriser les modifications des mainteneurs", "repo.pulls.allow_edits_from_maintainers_desc": "Les utilisateurs ayant un accès en écriture à la branche de base peuvent également soumettre sur cette branche", "repo.pulls.allow_edits_from_maintainers_err": "La mise à jour à échoué", - "repo.pulls.compare_changes_desc": "Sélectionnez la branche dans laquelle fusionner et la branche depuis laquelle tirer les modifications.", "repo.pulls.has_viewed_file": "Consulté", "repo.pulls.has_changed_since_last_review": "Modifié depuis votre dernier passage", "repo.pulls.viewed_files_label": "%[1]d / %[2]d fichiers vus", @@ -1795,7 +1849,8 @@ "repo.pulls.status_checking": "Certains contrôles sont en attente", "repo.pulls.status_checks_success": "Tous les contrôles ont réussi", "repo.pulls.status_checks_warning": "Quelques vérifications ont signalé des avertissements", - "repo.pulls.status_checks_failure": "Certaines vérifications ont échoué", + "repo.pulls.status_checks_failure_required": "Des vérifications obligatoires ont échoué", + "repo.pulls.status_checks_failure_optional": "Des vérifications optionnelles ont échoué", "repo.pulls.status_checks_error": "Quelques vérifications ont signalé des erreurs", "repo.pulls.status_checks_requested": "Requis", "repo.pulls.status_checks_details": "Détails", @@ -1850,6 +1905,7 @@ "repo.milestones.completeness": "%d%% complété", "repo.milestones.create": "Créer un Jalon", "repo.milestones.title": "Titre", + "repo.milestones.desc": "Description", "repo.milestones.due_date": "Date d'échéance (facultatif)", "repo.milestones.clear": "Effacer", "repo.milestones.invalid_due_date_format": "Le format de la date d'échéance est invalide, il doit être comme suit 'aaaa-mm-jj'.", @@ -1884,11 +1940,14 @@ "repo.signing.wont_sign.not_signed_in": "Vous n'êtes pas connecté.", "repo.ext_wiki": "Accès au wiki externe", "repo.ext_wiki.desc": "Lier un wiki externe.", + "repo.wiki": "Wiki", "repo.wiki.welcome": "Bienvenue sur le Wiki.", "repo.wiki.welcome_desc": "Le wiki vous permet d'écrire ou de partager de la documentation avec vos collaborateurs.", "repo.wiki.desc": "Écrire et partager de la documentation avec vos collaborateurs.", "repo.wiki.create_first_page": "Créer la première page", + "repo.wiki.page": "Page", "repo.wiki.filter_page": "Filtrer la page", + "repo.wiki.new_page": "Page", "repo.wiki.page_title": "Titre de la page", "repo.wiki.page_content": "Contenu de la page", "repo.wiki.default_commit_message": "Écrire une note concernant cette mise à jour (optionnel).", @@ -1903,6 +1962,7 @@ "repo.wiki.delete_page_notice_1": "Supprimer la page de wiki \"%s\" est irréversible. Continuer ?", "repo.wiki.page_already_exists": "Une page de wiki avec le même nom existe déjà.", "repo.wiki.reserved_page": "Le nom de page de wiki \"%s\" est réservé.", + "repo.wiki.pages": "Pages", "repo.wiki.last_updated": "Dernière mise à jour: %s", "repo.wiki.page_name_desc": "Entrez un nom pour cette page Wiki. Certains noms spéciaux sont « Home », « _Sidebar » et « _Footer ».", "repo.wiki.original_git_entry_tooltip": "Voir le fichier Git original au lieu d'utiliser un lien convivial.", @@ -1995,6 +2055,7 @@ "repo.settings.collaboration.owner": "Propriétaire", "repo.settings.collaboration.undefined": "Indéfini", "repo.settings.collaboration.per_unit": "Permissions de ressource", + "repo.settings.hooks": "Déclencheurs web", "repo.settings.githooks": "Déclencheurs Git", "repo.settings.basic_settings": "Paramètres de base", "repo.settings.mirror_settings": "Réglages Miroir", @@ -2012,6 +2073,7 @@ "repo.settings.mirror_settings.docs.pulling_remote_title": "Tirer depuis un dépôt distant", "repo.settings.mirror_settings.mirrored_repository": "Dépôt en miroir", "repo.settings.mirror_settings.pushed_repository": "Dépôt sortant", + "repo.settings.mirror_settings.direction": "Sens", "repo.settings.mirror_settings.direction.pull": "Tirer", "repo.settings.mirror_settings.direction.push": "Soumission", "repo.settings.mirror_settings.last_update": "Dernière mise à jour", @@ -2180,6 +2242,7 @@ "repo.settings.payload_url": "URL cible", "repo.settings.http_method": "Méthode HTTP", "repo.settings.content_type": "Type de contenu POST", + "repo.settings.secret": "Secret", "repo.settings.webhook_secret_desc": "Si le serveur webhook supporte l’usage de secrets, vous pouvez indiquer un secret ici en vous basant sur leur documentation.", "repo.settings.slack_username": "Nom d'utilisateur", "repo.settings.slack_icon_url": "URL de l'icône", @@ -2197,6 +2260,7 @@ "repo.settings.event_delete_desc": "Branche ou étiquette supprimée.", "repo.settings.event_fork": "Bifurcation", "repo.settings.event_fork_desc": "Dépôt bifurqué.", + "repo.settings.event_wiki": "Wiki", "repo.settings.event_wiki_desc": "Page wiki créée, renommée, modifiée ou supprimée.", "repo.settings.event_statuses": "Statuts", "repo.settings.event_statuses_desc": "Statut de validation mis à jour depuis l’API.", @@ -2262,6 +2326,19 @@ "repo.settings.slack_domain": "Domaine", "repo.settings.slack_channel": "Canal", "repo.settings.add_web_hook_desc": "Intégrez %s dans votre dépôt.", + "repo.settings.web_hook_name_gitea": "Gitea", + "repo.settings.web_hook_name_gogs": "Gogs", + "repo.settings.web_hook_name_slack": "Slack", + "repo.settings.web_hook_name_discord": "Discord", + "repo.settings.web_hook_name_dingtalk": "DingTalk", + "repo.settings.web_hook_name_telegram": "Telegram", + "repo.settings.web_hook_name_matrix": "Matrix", + "repo.settings.web_hook_name_msteams": "Microsoft Teams", + "repo.settings.web_hook_name_feishu_or_larksuite": "Suite Feishu / Lark", + "repo.settings.web_hook_name_feishu": "Feishu", + "repo.settings.web_hook_name_larksuite": "Suite Lark", + "repo.settings.web_hook_name_wechatwork": "WeCom (Wechat Work)", + "repo.settings.web_hook_name_packagist": "Packagist", "repo.settings.packagist_username": "Nom d'utilisateur Packagist", "repo.settings.packagist_api_token": "Jeton API", "repo.settings.packagist_package_url": "URL du paquet Packagist", @@ -2279,6 +2356,7 @@ "repo.settings.deploy_key_deletion": "Supprimer une clé de déploiement", "repo.settings.deploy_key_deletion_desc": "La suppression d’une clé de déploiement révoque son accès à ce dépôt. Continuer ?", "repo.settings.deploy_key_deletion_success": "La clé de déploiement a été supprimée.", + "repo.settings.branches": "Branches", "repo.settings.protected_branch": "Protection de branche", "repo.settings.protected_branch.save_rule": "Enregistrer la règle", "repo.settings.protected_branch.delete_rule": "Supprimer la règle", @@ -2406,6 +2484,7 @@ "repo.settings.unarchive.success": "Le dépôt a bien été réhabilité.", "repo.settings.unarchive.error": "Une erreur est survenue en essayant deréhabiliter ce dépôt. Voir le journal pour plus de détails.", "repo.settings.update_avatar_success": "L'avatar du dépôt a été mis à jour.", + "repo.settings.lfs": "LFS", "repo.settings.lfs_filelist": "Fichiers LFS stockés dans ce dépôt", "repo.settings.lfs_no_lfs_files": "Aucun fichier LFS stocké dans ce dépôt", "repo.settings.lfs_findcommits": "Trouver des révisions", @@ -2425,6 +2504,7 @@ "repo.settings.lfs_force_unlock": "Forcer le déverrouillage", "repo.settings.lfs_pointers.found": "%d pointeur(s) trouvés : %d associés, %d non associés (%d manquant dans le magasin)", "repo.settings.lfs_pointers.sha": "SHA du Blob", + "repo.settings.lfs_pointers.oid": "OID", "repo.settings.lfs_pointers.inRepo": "Dans le dépôt", "repo.settings.lfs_pointers.exists": "Existe en magasin", "repo.settings.lfs_pointers.accessible": "Accessible à l'utilisateur", @@ -2438,6 +2518,7 @@ "repo.diff.browse_source": "Parcourir la source", "repo.diff.parent": "Parent", "repo.diff.commit": "révision", + "repo.diff.git-notes": "Notes", "repo.diff.data_not_available": "Contenu de la comparaison indisponible", "repo.diff.options_button": "Option de Diff", "repo.diff.download_patch": "Télécharger le Fichier Patch", @@ -2451,6 +2532,7 @@ "repo.diff.whitespace_ignore_at_eol": "Ignorer les blancs en fin de ligne", "repo.diff.stats_desc": " %d fichiers modifiés avec %d ajouts et %d suppressions", "repo.diff.stats_desc_file": "%d modifications: %d ajouts et %d suppressions", + "repo.diff.bin": "BIN", "repo.diff.bin_not_shown": "Fichier binaire non affiché.", "repo.diff.view_file": "Voir le fichier", "repo.diff.file_before": "Avant", @@ -2463,8 +2545,8 @@ "repo.diff.too_many_files": "Certains fichiers ne sont pas affichés car ce diff contient trop de modifications", "repo.diff.show_more": "Voir plus", "repo.diff.load": "Voir la Diff", - "repo.diff.generated": "générée", - "repo.diff.vendored": "externe", + "repo.diff.generated": "Générée", + "repo.diff.vendored": "Externe", "repo.diff.comment.add_line_comment": "Commenter cette ligne", "repo.diff.comment.placeholder": "Laisser un commentaire", "repo.diff.comment.add_single_comment": "Commenter (simple)", @@ -2497,6 +2579,7 @@ "repo.release.new_release": "Nouvelle publication", "repo.release.draft": "Brouillon", "repo.release.prerelease": "Pré-publication", + "repo.release.stable": "Stable", "repo.release.latest": "Dernière", "repo.release.compare": "Comparer", "repo.release.edit": "Éditer", @@ -2537,6 +2620,13 @@ "repo.release.add_tag": "Créer uniquement l'étiquette", "repo.release.releases_for": "Publications pour %s", "repo.release.tags_for": "Étiquettes pour %s", + "repo.release.notes": "Notes de publication", + "repo.release.generate_notes": "Générer des notes de publication", + "repo.release.generate_notes_desc": "Ajoute automatiquement les demandes d’ajouts fusionnées, et ajoute un lien vers la liste des changements pour cette publication.", + "repo.release.previous_tag": "Étiquette précédente", + "repo.release.generate_notes_tag_not_found": "L’étiquette « %s » n’existe pas dans ce dépôt.", + "repo.release.generate_notes_target_not_found": "La cible de la publication « %s » est introuvable.", + "repo.release.generate_notes_missing_tag": "Entrez un nom d’étiquette pour générer les notes de publication.", "repo.branch.name": "Nom de la branche", "repo.branch.already_exists": "Une branche nommée \"%s\" existe déjà.", "repo.branch.delete_head": "Supprimer", @@ -2597,6 +2687,7 @@ "graphs.component_loading_info": "Ça prend son temps…", "graphs.component_failed_to_load": "Une erreur inattendue s’est produite.", "graphs.code_frequency.what": "fréquence du code", + "graphs.contributors.what": "contributions", "graphs.recent_commits.what": "révisions récentes", "org.org_name_holder": "Nom de l'organisation", "org.org_full_name_holder": "Nom complet de l'organisation", @@ -2605,11 +2696,14 @@ "org.repo_updated": "Actualisé", "org.members": "Membres", "org.teams": "Équipes", + "org.code": "Code", "org.lower_members": "Membres", "org.lower_repositories": "dépôts", "org.create_new_team": "Nouvelle équipe", "org.create_team": "Créer une équipe", + "org.org_desc": "Description", "org.team_name": "Nom de l'équipe", + "org.team_desc": "Description", "org.team_name_helper": "Le nom d'équipe doit être court et mémorable.", "org.team_desc_helper": "Décrire le but ou le rôle de l’équipe.", "org.team_access_desc": "Accès au dépôt", @@ -2634,6 +2728,7 @@ "org.settings.change_visibility_notices_2": "Les non-membres ne pourront plus accéder aux dépôts de l’organisation si la visibilité devient privée.", "org.settings.change_visibility_success": "La visibilité de l’organisation %s a été modifiée.", "org.settings.visibility_desc": "Changez qui peut voir l’organisation et ses dépôts.", + "org.settings.visibility.public": "Publique", "org.settings.visibility.limited": "Limité (Visible uniquement aux utilisateurs authentifiés)", "org.settings.visibility.limited_shortname": "Limité", "org.settings.visibility.private": "Privé (Visible uniquement aux membres de l’organisation)", @@ -2738,6 +2833,7 @@ "org.worktime.by_repositories": "Par dépôts", "org.worktime.by_milestones": "Par jalons", "org.worktime.by_members": "Par membres", + "admin.maintenance": "Maintenance", "admin.dashboard": "Tableau de bord", "admin.self_check": "Autodiagnostique", "admin.identity_access": "Identité et accès", @@ -2749,6 +2845,7 @@ "admin.integrations": "Intégrations", "admin.authentication": "Sources d'authentification", "admin.emails": "Courriels de l’utilisateur", + "admin.config": "Configuration", "admin.config_summary": "Résumé", "admin.config_settings": "Paramètres", "admin.notices": "Informations", @@ -2795,6 +2892,7 @@ "admin.dashboard.git_gc_repos": "Exécuter le ramasse-miette des dépôts", "admin.dashboard.resync_all_sshkeys": "Mettre à jour le fichier « ssh/authorized_keys » avec les clés SSH Gitea.", "admin.dashboard.resync_all_sshprincipals": "Mettre à jour le fichier « .ssh/authorized_principals » avec les principaux de Gitea SSH.", + "admin.dashboard.resync_all_hooks": "Resynchroniser les déclencheurs git de tous les dépôts (pre-receive, update, post-receive, proc-receive …)", "admin.dashboard.reinit_missing_repos": "Réinitialiser tous les dépôts Git manquants pour lesquels un enregistrement existe", "admin.dashboard.sync_external_users": "Synchroniser les données de l’utilisateur externe", "admin.dashboard.cleanup_hook_task_table": "Nettoyer la table hook_task", @@ -2852,6 +2950,7 @@ "admin.users.reserved": "Réservé", "admin.users.bot": "Robot", "admin.users.remote": "Distant", + "admin.users.2fa": "2FA", "admin.users.repos": "Dépôts", "admin.users.created": "Créés", "admin.users.last_login": "Dernière connexion", @@ -2936,6 +3035,8 @@ "admin.packages.owner": "Propriétaire", "admin.packages.creator": "Créateur", "admin.packages.name": "Nom", + "admin.packages.version": "Version", + "admin.packages.type": "Type", "admin.packages.repository": "Dépôt", "admin.packages.size": "Taille", "admin.packages.published": "Publiés", @@ -2950,6 +3051,7 @@ "admin.auths.auth_manage_panel": "Gestion des sources d'authentification", "admin.auths.new": "Ajouter une source d'authentification", "admin.auths.name": "Nom", + "admin.auths.type": "Type", "admin.auths.enabled": "Activé", "admin.auths.syncenabled": "Activer la synchronisation des utilisateurs", "admin.auths.updated": "Mis à jour", @@ -2958,6 +3060,8 @@ "admin.auths.security_protocol": "Protocole de sécurité", "admin.auths.domain": "Domaine", "admin.auths.host": "Hôte", + "admin.auths.port": "Port", + "admin.auths.bind_dn": "Lien DN", "admin.auths.bind_password": "Bind mot de passe", "admin.auths.user_base": "Utilisateur Search Base", "admin.auths.user_dn": "Utilisateur DN", @@ -2968,6 +3072,7 @@ "admin.auths.attribute_mail": "Attribut courriel", "admin.auths.attribute_ssh_public_key": "Attribut clé SSH publique", "admin.auths.attribute_avatar": "Attribut de l'avatar", + "admin.auths.ssh_keys_are_verified": "Les clés SSH du LDAP sont considérées comme vérifiées", "admin.auths.attributes_in_bind": "Aller chercher les attributs dans le contexte de liaison DN", "admin.auths.allow_deactivate_all": "Permettre à un résultat de recherche vide de désactiver tous les utilisateurs", "admin.auths.use_paged_search": "Utiliser la recherche paginée", @@ -3086,6 +3191,7 @@ "admin.config.ssh_enabled": "Activé", "admin.config.ssh_start_builtin_server": "Utiliser le serveur incorporé", "admin.config.ssh_domain": "Domaine du serveur SSH", + "admin.config.ssh_port": "Port", "admin.config.ssh_listen_port": "Port d'écoute", "admin.config.ssh_root_path": "Emplacement racine", "admin.config.ssh_minimum_key_size_check": "Vérification de la longueur de clé minimale", @@ -3095,10 +3201,12 @@ "admin.config.lfs_content_path": "Chemin de contenu LFS", "admin.config.lfs_http_auth_expiry": "Expiration de l'authentification HTTP LFS", "admin.config.db_config": "Configuration de la base de données", + "admin.config.db_type": "Type", "admin.config.db_host": "Hôte", "admin.config.db_name": "Nom", "admin.config.db_user": "Nom d'utilisateur", "admin.config.db_schema": "Schéma", + "admin.config.db_ssl_mode": "SSL", "admin.config.db_path": "Emplacement", "admin.config.service_config": "Configuration du service", "admin.config.register_email_confirm": "Exiger la confirmation du courriel lors de l’inscription", @@ -3176,15 +3284,15 @@ "admin.config.git_gc_args": "Arguments de GC", "admin.config.git_migrate_timeout": "Délai imparti pour une migration", "admin.config.git_mirror_timeout": "Délai imparti pour mettre à jour le miroir", - "admin.config.git_clone_timeout": "Délai imparti pour l'opération \"Clone\"", - "admin.config.git_pull_timeout": "Délai imparti pour l'opération \"Pull\"", "admin.config.git_gc_timeout": "Délai imparti pour l'opération \"GC\"", "admin.config.log_config": "Configuration du journal", + "admin.config.logger_name_fmt": "Journal : %s", "admin.config.disabled_logger": "Désactivé", "admin.config.access_log_mode": "Mode de journalisation d'accès", "admin.config.access_log_template": "Modèle de journal d'accès", "admin.config.xorm_log_sql": "Activer la journalisation SQL", "admin.config.set_setting_failed": "Impossible de définir le paramètre %s", + "admin.monitor.stats": "Stats", "admin.monitor.cron": "Tâches récurrentes", "admin.monitor.name": "Nom", "admin.monitor.schedule": "Planification", @@ -3193,9 +3301,11 @@ "admin.monitor.execute_times": "Exécutions", "admin.monitor.process": "Processus en cours d'exécution", "admin.monitor.stacktrace": "Piles d'execution", + "admin.monitor.trace": "Trace", "admin.monitor.performance_logs": "Journaux de performance", "admin.monitor.processes_count": "%d processus", "admin.monitor.download_diagnosis_report": "Télécharger le rapport de diagnostic", + "admin.monitor.desc": "Description", "admin.monitor.start": "Heure de démarrage", "admin.monitor.execute_time": "Heure d'Éxécution", "admin.monitor.last_execution_result": "Résultat", @@ -3205,6 +3315,7 @@ "admin.monitor.queues": "Files d'attente", "admin.monitor.queue": "File d'attente : %s", "admin.monitor.queue.name": "Nom", + "admin.monitor.queue.type": "Type", "admin.monitor.queue.exemplar": "Type d'exemple", "admin.monitor.queue.numberworkers": "Nombre de processus", "admin.monitor.queue.activeworkers": "Processus actifs", @@ -3228,8 +3339,10 @@ "admin.notices.inverse_selection": "Inverser la sélection", "admin.notices.delete_selected": "Supprimer les éléments sélectionnés", "admin.notices.delete_all": "Supprimer toutes les notifications", + "admin.notices.type": "Type", "admin.notices.type_1": "Dépôt", "admin.notices.type_2": "Tâche", + "admin.notices.desc": "Description", "admin.notices.op": "Opération", "admin.notices.delete_success": "Les informations systèmes ont été supprimées.", "admin.self_check.no_problem_found": "Aucun problème trouvé pour l’instant.", @@ -3274,22 +3387,26 @@ "tool.now": "maintenant", "tool.future": "futur", "tool.1s": "1 seconde", + "tool.1m": "1 minute", "tool.1h": "1 heure", "tool.1d": "1 jour", "tool.1w": "1 semaine", "tool.1mon": "1 mois", "tool.1y": "1 an", "tool.seconds": "%d secondes", + "tool.minutes": "%d minutes", "tool.hours": "%d heures", "tool.days": "%d jours", "tool.weeks": "%d semaines", "tool.months": "%d mois", "tool.years": "%d ans", "tool.raw_seconds": "secondes", + "tool.raw_minutes": "minutes", "dropzone.default_message": "Déposez les fichiers ou cliquez ici pour téléverser.", "dropzone.invalid_input_type": "Vous ne pouvez pas téléverser des fichiers de ce type.", "dropzone.file_too_big": "La taille du fichier ({{filesize}} Mo) dépasse la taille maximale ({{maxFilesize}} Mo).", "dropzone.remove_file": "Supprimer le fichier", + "notification.notifications": "Notifications", "notification.unread": "Non lue(s)", "notification.read": "Lue(s)", "notification.no_unread": "Aucune notification non lue.", @@ -3320,12 +3437,14 @@ "packages.empty.documentation": "Pour plus d'informations sur le registre de paquets, voir la documentation.", "packages.empty.repo": "Avez-vous téléchargé un paquet, mais il n'est pas affiché ici? Allez dans les paramètres du paquet et liez le à ce dépôt.", "packages.registry.documentation": "Pour plus d’informations sur le registre %s, voir la documentation.", + "packages.filter.type": "Type", "packages.filter.type.all": "Tous", "packages.filter.no_result": "Votre filtre n'affiche aucun résultat.", "packages.filter.container.tagged": "Balisé", "packages.filter.container.untagged": "Débalisé", "packages.published_by": "%[1]s publié par %[3]s", "packages.published_by_in": "%[1]s publié par %[3]s en %[5]s", + "packages.installation": "Installation", "packages.about": "À propos de ce paquet", "packages.requirements": "Exigences", "packages.dependencies": "Dépendances", @@ -3337,18 +3456,24 @@ "packages.details.documentation_site": "Site de documentation", "packages.details.license": "Licence", "packages.assets": "Ressources", + "packages.versions": "Versions", "packages.versions.view_all": "Voir tout", + "packages.dependency.id": "ID", + "packages.dependency.version": "Version", "packages.search_in_external_registry": "Rechercher dans %s", "packages.alpine.registry": "Configurez ce registre en ajoutant l’URL dans votre fichier /etc/apk/repositories :", "packages.alpine.registry.key": "Téléchargez la clé RSA publique du registre dans le dossier /etc/apk/keys/ pour vérifier la signature de l'index :", "packages.alpine.registry.info": "Choisissez $branch et $repository dans la liste ci-dessous.", "packages.alpine.install": "Pour installer le paquet, exécutez la commande suivante :", "packages.alpine.repository": "Informations sur le Dépôt", + "packages.alpine.repository.branches": "Branches", "packages.alpine.repository.repositories": "Dépôts", + "packages.alpine.repository.architectures": "Architectures", "packages.arch.registry": "Ajouter un serveur avec un dépôt et une architecture liés dans /etc/pacman.conf :", "packages.arch.install": "Synchroniser le paquet avec pacman :", "packages.arch.repository": "Informations sur le Dépôt", "packages.arch.repository.repositories": "Dépôts", + "packages.arch.repository.architectures": "Architectures", "packages.cargo.registry": "Configurez ce registre dans le fichier de configuration Cargo (par exemple ~/.cargo/config.toml) :", "packages.cargo.install": "Pour installer le paquet en utilisant Cargo, exécutez la commande suivante :", "packages.chef.registry": "Configurer ce registre dans votre fichier ~/.chef/config.rb :", @@ -3365,9 +3490,11 @@ "packages.container.details.type": "Type d'image", "packages.container.details.platform": "Plateforme", "packages.container.pull": "Tirez l'image depuis un terminal :", + "packages.container.images": "Images", "packages.container.digest": "Empreinte", "packages.container.multi_arch": "SE / Arch", "packages.container.layers": "Calques d'image", + "packages.container.labels": "Labels", "packages.container.labels.key": "Clé", "packages.container.labels.value": "Valeur", "packages.cran.registry": "Configurez ce registre dans le fichier Rprofile.site :", @@ -3376,7 +3503,9 @@ "packages.debian.registry.info": "Choisissez $distribution et $component dans la liste ci-dessous.", "packages.debian.install": "Pour installer le paquet, exécutez la commande suivante :", "packages.debian.repository": "Infos sur le Dépôt", + "packages.debian.repository.distributions": "Distributions", "packages.debian.repository.components": "Composants", + "packages.debian.repository.architectures": "Architectures", "packages.generic.download": "Télécharger le paquet depuis un terminal :", "packages.go.install": "Installer le paquet à partir de la ligne de commande :", "packages.helm.registry": "Configurer ce registre à partir d'un terminal :", @@ -3405,6 +3534,7 @@ "packages.rpm.distros.suse": "sur les distributions basées sur SUSE", "packages.rpm.install": "Pour installer le paquet, exécutez la commande suivante :", "packages.rpm.repository": "Informations sur le Dépôt", + "packages.rpm.repository.architectures": "Architectures", "packages.rpm.repository.multiple_groups": "Ce paquet est disponible en plusieurs groupes.", "packages.rubygems.install": "Pour installer le paquet en utilisant gem, exécutez la commande suivante :", "packages.rubygems.install2": "ou ajoutez-le au Gemfile :", @@ -3462,8 +3592,10 @@ "packages.owner.settings.chef.title": "Dépôt Chef", "packages.owner.settings.chef.keypair": "Générer une paire de clés", "packages.owner.settings.chef.keypair.description": "Une paire de clés est nécessaire pour s'authentifier au registre Chef. Si vous avez déjà généré une paire de clés, la génération d'une nouvelle paire de clés supprimera l'ancienne.", + "secrets.secrets": "Secrets", "secrets.description": "Les secrets seront transmis à certaines actions et ne pourront pas être lus autrement.", "secrets.none": "Il n'y a pas encore de secrets.", + "secrets.creation.description": "Description", "secrets.creation.name_placeholder": "Caractères alphanumériques ou tirets bas uniquement, insensibles à la casse, ne peut commencer par GITEA_ ou GITHUB_.", "secrets.creation.value_placeholder": "Entrez n’importe quoi. Les blancs cernant seront taillés.", "secrets.creation.description_placeholder": "Décrire brièvement votre dépôt (optionnel).", @@ -3476,6 +3608,7 @@ "secrets.deletion.success": "Le secret a été supprimé.", "secrets.deletion.failed": "Impossible de supprimer le secret.", "secrets.management": "Gestion des Secrets", + "actions.actions": "Actions", "actions.unit.desc": "Gérer les actions", "actions.status.unknown": "Inconnu", "actions.status.waiting": "En attente", @@ -3490,7 +3623,11 @@ "actions.runners.new": "Créer un nouvel exécuteur", "actions.runners.new_notice": "Comment démarrer un exécuteur", "actions.runners.status": "Statut", + "actions.runners.id": "ID", "actions.runners.name": "Nom", + "actions.runners.owner_type": "Type", + "actions.runners.description": "Description", + "actions.runners.labels": "Labels", "actions.runners.last_online": "Dernière fois en ligne", "actions.runners.runner_title": "Exécuteur", "actions.runners.task_list": "Tâches récentes sur cet exécuteur", @@ -3514,6 +3651,7 @@ "actions.runners.status.idle": "Inactif", "actions.runners.status.active": "Actif", "actions.runners.status.offline": "Hors-ligne", + "actions.runners.version": "Version", "actions.runners.reset_registration_token": "Réinitialiser le jeton d'enregistrement", "actions.runners.reset_registration_token_confirm": "Voulez-vous révoquer le jeton actuel et en générer un nouveau ?", "actions.runners.reset_registration_token_success": "Le jeton d’inscription de l’exécuteur a été réinitialisé avec succès", @@ -3554,6 +3692,7 @@ "actions.workflow.has_no_workflow_dispatch": "Le flux de travail %s n’a pas de déclencheur d’événement workflow_dispatch.", "actions.need_approval_desc": "Besoin d’approbation pour exécuter des flux de travail pour une demande d’ajout de bifurcation.", "actions.approve_all_success": "Tous les flux de travail ont été acceptés.", + "actions.variables": "Variables", "actions.variables.management": "Gestion des variables", "actions.variables.creation": "Ajouter une variable", "actions.variables.none": "Il n'y a pas encore de variables.", @@ -3584,6 +3723,7 @@ "projects.type-3.display_name": "Projet d’organisation", "projects.enter_fullscreen": "Plein écran", "projects.exit_fullscreen": "Quitter le plein écran", + "git.filemode.changed_filemode": "%[1]s → %[2]s", "git.filemode.directory": "Dossier", "git.filemode.normal_file": "Fichier normal", "git.filemode.executable_file": "Fichier exécutable", diff --git a/options/locale/locale_ga-IE.json b/options/locale/locale_ga-IE.json index 679963630a..18bef7188c 100644 --- a/options/locale/locale_ga-IE.json +++ b/options/locale/locale_ga-IE.json @@ -32,6 +32,7 @@ "password": "Pasfhocal", "access_token": "Comhartha Rochtana", "re_type": "Deimhnigh Pasfhocal", + "captcha": "CAPTCHA", "twofa": "Fíordheimhniú Dhá-Fhachtóir", "twofa_scratch": "Cód Scratch Dhá-Fhachtóra", "passcode": "Paschód", @@ -132,6 +133,7 @@ "confirm_delete_selected": "Deimhnigh chun gach earra roghnaithe a scriosadh?", "name": "Ainm", "value": "Luach", + "readme": "Léigh-mé", "filter_title": "Scagaire", "filter.clear": "Scagaire Soiléir", "filter.is_archived": "Cartlannaithe", @@ -229,6 +231,7 @@ "install.db_name": "Ainm Bunachar Sonraí", "install.db_schema": "Scéim", "install.db_schema_helper": "Fág bán le haghaidh réamhshocraithe bunachar sonraí (\"poiblí\").", + "install.ssl_mode": "SSL", "install.path": "Cosán", "install.sqlite_helper": "Conair comhad don bhunachar sonraí SQLite3. Cuir
    isteach cosán iomlán má reáchtáil tú Gitea mar sheirbhís.", "install.reinstall_error": "Tá tú ag iarraidh a shuiteáil i mbunachar sonraí Gitea atá ann cheana", @@ -406,6 +409,7 @@ "auth.twofa_scratch_token_incorrect": "Tá do chód scratch mícheart.", "auth.twofa_required": "Ní mór duit fíordheimhniú dhá fhachtóir a shocrú chun rochtain a fháil ar stórtha, nó iarracht a dhéanamh logáil isteach arís.", "auth.login_userpass": "Sínigh isteach", + "auth.login_openid": "OpenID", "auth.oauth_signup_tab": "Cláraigh Cuntas Nua", "auth.oauth_signup_title": "Comhlánaigh Cuntas Nua", "auth.oauth_signup_submit": "Cuntas Comhlánaigh", @@ -654,6 +658,7 @@ "settings.twofa": "Fíordheimhniú Dhá Fachtóir (TOTP)", "settings.account_link": "Cuntais Nasctha", "settings.organization": "Eagraíochtaí", + "settings.uid": "UID", "settings.webauthn": "Fíordheimhniú Dhá-Fachtóir (Eochracha Slándála)", "settings.public_profile": "Próifíl Phoiblí", "settings.biography_placeholder": "Inis dúinn beagán fút féin! (Is féidir leat Markdown a úsáid)", @@ -991,6 +996,7 @@ "repo.multiple_licenses": "Ceadúnais Iolracha", "repo.object_format": "Formáid Oibiacht", "repo.object_format_helper": "Formáid oibiacht an stór. Ní féidir é a athrú níos déanaí. Is é SHA1 an comhoiriúnacht is fearr.", + "repo.readme": "LÉIGHMÉ", "repo.readme_helper": "Roghnaigh comhad teimpléad README.", "repo.readme_helper_desc": "Seo an áit inar féidir leat cur síos iomlán a scríobh do thionscadal.", "repo.auto_init": "Taisce a thionscnamh (Cuireann sé .gitignore, Ceadúnas agus README)", @@ -1055,6 +1061,7 @@ "repo.desc.template": "Teimpléad", "repo.desc.internal": "Inmheánach", "repo.desc.archived": "Cartlannaithe", + "repo.desc.sha256": "SHA256", "repo.template.items": "Míreanna Teimpléad", "repo.template.git_content": "Ábhar Git (Brainse Réamhshocraithe)", "repo.template.git_hooks": "Crúcanna Git", @@ -1083,6 +1090,7 @@ "repo.migrate_options_lfs_endpoint.description.local": "Tacaítear le cosán freastalaí áitiúil freisin.", "repo.migrate_options_lfs_endpoint.placeholder": "Mura bhfágtar bán é, díorthófar an críochphointe ón URL clónála.", "repo.migrate_items": "Míreanna Imirce", + "repo.migrate_items_wiki": "Vicí", "repo.migrate_items_milestones": "Clocha míle", "repo.migrate_items_labels": "Lipéid", "repo.migrate_items_issues": "Saincheisteanna", @@ -1728,8 +1736,11 @@ "repo.issues.reference_link": "Tagairt: %s", "repo.compare.compare_base": "bonn", "repo.compare.compare_head": "déan comparáid", + "repo.compare.title": "Athruithe a chur i gcomparáid", + "repo.compare.description": "Roghnaigh dhá bhrainse nó clib chun a fheiceáil cad atá athraithe nó chun iarratas tarraingthe nua a thosú.", "repo.pulls.desc": "Cumasaigh iarratais tarraingthe agus athbhreithnithe cód.", "repo.pulls.new": "Iarratas Tarraingthe Nua", + "repo.pulls.new.description": "Pléigh agus athbhreithnigh na hathruithe sa chomparáid seo le daoine eile.", "repo.pulls.new.blocked_user": "Ní féidir iarratas tarraingthe a chruthú toisc go bhfuil úinéir an stórais bac ort.", "repo.pulls.new.must_collaborator": "Caithfidh tú a bheith ina chomhoibritheoir chun iarratas tarraingthe a chruthú.", "repo.pulls.new.already_existed": "Tá iarratas tarraingthe idir na brainsí seo ann cheana féin", @@ -1739,7 +1750,6 @@ "repo.pulls.allow_edits_from_maintainers": "Ceadaigh eagarthóirí ó chothabhálaí", "repo.pulls.allow_edits_from_maintainers_desc": "Is féidir le húsáideoirí a bhfuil rochtain scríofa acu ar an mbunbhrainse brú chuig an bhrainse", "repo.pulls.allow_edits_from_maintainers_err": "Theip ar nuashonrú", - "repo.pulls.compare_changes_desc": "Roghnaigh an brainse le cumasc isteach agus an brainse le tarraingt uaidh.", "repo.pulls.has_viewed_file": "Breathnaithe", "repo.pulls.has_changed_since_last_review": "Athraithe ó d'athbhreithniú deire", "repo.pulls.viewed_files_label": "Breathnaíodh ar %[1]d / %[2]d comhaid", @@ -2313,8 +2323,19 @@ "repo.settings.slack_domain": "Fearann", "repo.settings.slack_channel": "Cainéal", "repo.settings.add_web_hook_desc": "Comhtháthaigh %s isteach i do stóras.", + "repo.settings.web_hook_name_gitea": "Gitea", + "repo.settings.web_hook_name_gogs": "Gogs", + "repo.settings.web_hook_name_slack": "Slack", + "repo.settings.web_hook_name_discord": "Discord", + "repo.settings.web_hook_name_dingtalk": "DingTalk", "repo.settings.web_hook_name_telegram": "Teileagram", "repo.settings.web_hook_name_matrix": "Maitrís", + "repo.settings.web_hook_name_msteams": "Microsoft Teams", + "repo.settings.web_hook_name_feishu_or_larksuite": "Feishu / Lark Suite", + "repo.settings.web_hook_name_feishu": "Feishu", + "repo.settings.web_hook_name_larksuite": "Lark Suite", + "repo.settings.web_hook_name_wechatwork": "WeCom (Wechat Work)", + "repo.settings.web_hook_name_packagist": "Packagist", "repo.settings.packagist_username": "Ainm úsáideora Pacagist", "repo.settings.packagist_api_token": "Comhartha API", "repo.settings.packagist_package_url": "URL pacáiste Packagist", @@ -2460,6 +2481,7 @@ "repo.settings.unarchive.success": "Rinneadh an stóras a dhíchartlann go rathúil.", "repo.settings.unarchive.error": "Tharla earráid agus tú ag iarraidh an stóras a dhíchartlannú. Féach an logáil le haghaidh tuilleadh sonraí.", "repo.settings.update_avatar_success": "Nuashonraíodh avatar an stóras.", + "repo.settings.lfs": "LFS", "repo.settings.lfs_filelist": "Comhaid LFS a stóráiltear sa stóras seo", "repo.settings.lfs_no_lfs_files": "Níl aon chomhaid LFS stóráilte sa stóras seo", "repo.settings.lfs_findcommits": "Aimsigh gealltanais", @@ -2479,6 +2501,7 @@ "repo.settings.lfs_force_unlock": "Díghlasáil Fórsa", "repo.settings.lfs_pointers.found": "Fuarthas %d pointeoir(í) bloba — %d gaolmhar, %d neamhghaolmhar (%d ar iarraidh ón stóras)", "repo.settings.lfs_pointers.sha": "SHA Blob", + "repo.settings.lfs_pointers.oid": "OID", "repo.settings.lfs_pointers.inRepo": "I Stóras", "repo.settings.lfs_pointers.exists": "Ann sa siopa", "repo.settings.lfs_pointers.accessible": "Inrochtana don Úsáideoir", @@ -2844,6 +2867,7 @@ "admin.dashboard.task.finished": "Tasc: Tá %[1]s tosaithe ag %[2]s críochnaithe", "admin.dashboard.task.unknown": "Tasc anaithnid: %[1]s", "admin.dashboard.cron.started": "Cron tosaithe: %[1]s", + "admin.dashboard.cron.process": "Cron: %[1]s", "admin.dashboard.cron.cancelled": "Cron: %[1]s cealaithe: %[3]s", "admin.dashboard.cron.error": "Earráid i gCron: %s: %[3]s", "admin.dashboard.cron.finished": "Cron: %[1]s críochnaithe", @@ -2923,6 +2947,7 @@ "admin.users.reserved": "In áirithe", "admin.users.bot": "Bota", "admin.users.remote": "Iargúlta", + "admin.users.2fa": "2FA", "admin.users.repos": "Stórais", "admin.users.created": "Cruthaithe", "admin.users.last_login": "Sínigh Isteach Deiridh", @@ -3044,6 +3069,7 @@ "admin.auths.attribute_mail": "Tréith ríomhphoist", "admin.auths.attribute_ssh_public_key": "Tréith Eochair SSH Phoiblí", "admin.auths.attribute_avatar": "Tréith Avatar", + "admin.auths.ssh_keys_are_verified": "Meastar gur fíoraithe iad eochracha SSH in LDAP", "admin.auths.attributes_in_bind": "Faigh tréithe i gComhthéacs Bind DN", "admin.auths.allow_deactivate_all": "Lig do thoradh cuardaigh folamh gach úsáideoir a dhíghníomhachtú", "admin.auths.use_paged_search": "Úsáid Cuardach Leathanaigh", @@ -3177,6 +3203,7 @@ "admin.config.db_name": "Ainm", "admin.config.db_user": "Ainm úsáideora", "admin.config.db_schema": "Scéim", + "admin.config.db_ssl_mode": "SSL", "admin.config.db_path": "Cosán", "admin.config.service_config": "Cumraíocht Seirbhíse", "admin.config.register_email_confirm": "Deimhniú Ríomhphost a éileamh chun Clárú", @@ -3430,6 +3457,7 @@ "packages.assets": "Sócmhainní", "packages.versions": "Leaganacha", "packages.versions.view_all": "Féach ar gach", + "packages.dependency.id": "ID", "packages.dependency.version": "Leagan", "packages.search_in_external_registry": "Cuardaigh i %s", "packages.alpine.registry": "Socraigh an clárlann seo tríd an URL a chur i do chomhad /etc/apk/repositories:", @@ -3594,6 +3622,7 @@ "actions.runners.new": "Cruthaigh reathaí nua", "actions.runners.new_notice": "Conas reathaí a thosú", "actions.runners.status": "Stádas", + "actions.runners.id": "ID", "actions.runners.name": "Ainm", "actions.runners.owner_type": "Cineál", "actions.runners.description": "Cur síos", @@ -3693,6 +3722,7 @@ "projects.type-3.display_name": "Tionscadal Eagrúcháin", "projects.enter_fullscreen": "Lánscáileán", "projects.exit_fullscreen": "Scoir Lánscáileáin", + "git.filemode.changed_filemode": "%[1]s → %[2]s", "git.filemode.directory": "Eolaire", "git.filemode.normal_file": "Comhad gnáth", "git.filemode.executable_file": "Comhad infheidhmithe", diff --git a/options/locale/locale_tr-TR.json b/options/locale/locale_tr-TR.json index 4503d13930..c7c85ad1dd 100644 --- a/options/locale/locale_tr-TR.json +++ b/options/locale/locale_tr-TR.json @@ -3,6 +3,7 @@ "dashboard": "Pano", "explore_title": "Keşfet", "help": "Yardım", + "logo": "Logo", "sign_in": "Giriş Yap", "sign_in_with_provider": "%s ile oturum aç", "sign_in_or": "veya", @@ -31,6 +32,7 @@ "password": "Parola", "access_token": "Erişim Kodu", "re_type": "Parolayı Doğrula", + "captcha": "CAPTCHA", "twofa": "İki Aşamalı Doğrulama", "twofa_scratch": "İki aşamalı kazınmış kod", "passcode": "Şifre", @@ -152,6 +154,7 @@ "search.fuzzy_tooltip": "Arama terimine benzeyen sonuçları da içer", "search.words": "Kelimeler", "search.words_tooltip": "Sadece arama terimi kelimeleriyle eşleşen sonuçları içer", + "search.regexp": "Regexp", "search.regexp_tooltip": "Sadece regexp arama terimiyle tamamen eşleşen sonuçları içer", "search.exact": "Tam", "search.exact_tooltip": "Sadece arama terimiyle tamamen eşleşen sonuçları içer", @@ -171,7 +174,7 @@ "search.runner_kind": "Çalıştırıcıları ara...", "search.no_results": "Eşleşen sonuç bulunamadı.", "search.issue_kind": "Konuları ara...", - "search.pull_kind": "Değişiklikleri ara...", + "search.pull_kind": "Değişiklik isteklerini ara…", "search.keyword_search_unavailable": "Anahtar kelime ile arama şu an mevcut değil. Lütfen site yöneticinizle iletişime geçin.", "aria.navbar": "Gezinti Çubuğu", "aria.footer": "Alt Bilgi", @@ -184,6 +187,7 @@ "editor.buttons.heading.tooltip": "Başlık ekle", "editor.buttons.bold.tooltip": "Kalın metin ekle", "editor.buttons.italic.tooltip": "Eğik metin ekle", + "editor.buttons.strikethrough.tooltip": "Üstü çizili metin ekle", "editor.buttons.quote.tooltip": "Metni alıntıla", "editor.buttons.code.tooltip": "Kod ekle", "editor.buttons.link.tooltip": "Bağlantı ekle", @@ -227,6 +231,7 @@ "install.db_name": "Veritabanı Adı", "install.db_schema": "Şema", "install.db_schema_helper": "Veritabanı varsayılanı için boş bırakın (\"genel\").", + "install.ssl_mode": "SSL", "install.path": "Yol", "install.sqlite_helper": "SQLite3 veritabanı dosya yolu.
    Gitea'yı servis olarak çalıştırıyorsanız tam yol adını girin.", "install.reinstall_error": "Mevcut bir Gitea veritabanına yüklemeye çalışıyorsunuz", @@ -643,6 +648,7 @@ "settings.appearance": "Görünüm", "settings.password": "Parola", "settings.security": "Güvenlik", + "settings.avatar": "Avatar", "settings.ssh_gpg_keys": "SSH / GPG Anahtarları", "settings.social": "Sosyal Medya Hesapları", "settings.applications": "Uygulamalar", @@ -652,6 +658,7 @@ "settings.twofa": "İki Aşamalı Kimlik Doğrulama (TOTP)", "settings.account_link": "Bağlı Hesaplar", "settings.organization": "Organizasyonlar", + "settings.uid": "UID", "settings.webauthn": "İki-Aşamalı Kimlik Doğrulama (Güvenlik Anahtarları)", "settings.public_profile": "Herkese Açık Profil", "settings.biography_placeholder": "Bize kendiniz hakkında birşeyler söyleyin! (Markdown kullanabilirsiniz)", @@ -970,6 +977,7 @@ "repo.fork.blocked_user": "Depo çatallanamıyor, depo sahibi tarafından engellenmişsiniz.", "repo.use_template": "Bu şablonu kullan", "repo.open_with_editor": "%s ile aç", + "repo.download_directory_as": "Dizini %s olarak indir", "repo.download_zip": "ZIP indir", "repo.download_tar": "TAR.GZ indir", "repo.download_bundle": "BUNDLE indir", @@ -989,6 +997,7 @@ "repo.multiple_licenses": "Çoklu Lisans", "repo.object_format": "Nesne Biçimi", "repo.object_format_helper": "Deponun nesne biçimi. Daha sonra değiştirilemez. SHA1 en uyumlu olandır.", + "repo.readme": "README", "repo.readme_helper": "Bir README dosyası şablonu seçin.", "repo.readme_helper_desc": "Projeniz için eksiksiz bir açıklama yazabileceğiniz yer burasıdır.", "repo.auto_init": "Depoyu başlat (.gitignore, Lisans ve README dosyalarını ekler)", @@ -1053,6 +1062,7 @@ "repo.desc.template": "Şablon", "repo.desc.internal": "Dahili", "repo.desc.archived": "Arşivlenmiş", + "repo.desc.sha256": "SHA256", "repo.template.items": "Şablon Öğeleri", "repo.template.git_content": "Git İçeriği (Varsayılan Dal)", "repo.template.git_hooks": "Git İstemcileri", @@ -1081,6 +1091,7 @@ "repo.migrate_options_lfs_endpoint.description.local": "Yerel bir sunucu yolu da destekleniyor.", "repo.migrate_options_lfs_endpoint.placeholder": "Boş bırakılırsa, uç nokta klon URL'sinden türetilecektir", "repo.migrate_items": "Göç Öğeleri", + "repo.migrate_items_wiki": "Viki", "repo.migrate_items_milestones": "Dönüm noktaları", "repo.migrate_items_labels": "Etiketler", "repo.migrate_items_issues": "Konular", @@ -1175,6 +1186,7 @@ "repo.release": "Sürüm", "repo.releases": "Sürüm", "repo.tag": "Etiket", + "repo.git_tag": "Git Etiketi", "repo.released_this": "bu sürümü yayınladı", "repo.tagged_this": "şunu etiketledi", "repo.file.title": "%s dalındaki/etiketindeki %s", @@ -1234,8 +1246,11 @@ "repo.editor.must_be_on_a_branch": "Bu dosyada değişiklik yapmak veya önermek için bir dalda olmalısınız.", "repo.editor.fork_before_edit": "Bu dosyada değişiklik yapmak veya önermek için depoyu çatallamalısınız.", "repo.editor.delete_this_file": "Dosyayı Sil", + "repo.editor.delete_this_directory": "Dizini Sil", "repo.editor.must_have_write_access": "Bu dosyada değişiklikler yapmak veya önermek için yazma erişiminizin olması gerekir.", "repo.editor.file_delete_success": "\"%s\" dosyası silindi.", + "repo.editor.directory_delete_success": "\"%s\" dizini silindi.", + "repo.editor.delete_directory": "'%s' dizinini sil", "repo.editor.name_your_file": "Dosyanızı isimlendirin…", "repo.editor.filename_help": "Bölü ('/') işaretiyle ismini yazarak bir dizin ekleyebilirsiniz. Dizini silmek için girdi sahasının başına backspace yazmalısınız.", "repo.editor.or": "veya", @@ -1356,6 +1371,7 @@ "repo.projects.column.new": "Yeni Sütun", "repo.projects.column.set_default": "Varsayılanı Ayarla", "repo.projects.column.set_default_desc": "Bu sütunu kategorize edilmemiş konular ve değişiklik istekleri için varsayılan olarak ayarlayın", + "repo.projects.column.default_column_hint": "Bu projeye eklenen yeni konular bu sütuna eklenecektir", "repo.projects.column.delete": "Sutün Sil", "repo.projects.column.deletion_desc": "Bir proje sütununun silinmesi, ilgili tüm konuları varsayılan sütuna taşır. Devam edilsin mi?", "repo.projects.column.color": "Renk", @@ -1474,6 +1490,7 @@ "repo.issues.filter_sort.feweststars": "En az yıldızlılar", "repo.issues.filter_sort.mostforks": "En çok çatallananlar", "repo.issues.filter_sort.fewestforks": "En az çatallananlar", + "repo.issues.quick_goto": "Konuya Git", "repo.issues.action_open": "Açık", "repo.issues.action_close": "Kapat", "repo.issues.action_label": "Etiket", @@ -1686,6 +1703,7 @@ "repo.issues.review.content.empty": "İstenen değişiklik(ler)i belirten bir yorum bırakmanız gerekir.", "repo.issues.review.reject": "%s değişiklik istedi", "repo.issues.review.wait": "için %s inceleme isteği", + "repo.issues.review.codeowners_rules": "CODEOWNERS kuralları", "repo.issues.review.add_review_request": "%s tarafından %s inceleme istedi", "repo.issues.review.remove_review_request": "%s %s için inceleme isteği kaldırıldı", "repo.issues.review.remove_review_request_self": "%s incelemeyi reddetti", @@ -1721,17 +1739,20 @@ "repo.issues.reference_link": "Referans: %s", "repo.compare.compare_base": "temel", "repo.compare.compare_head": "karşılaştır", + "repo.compare.title": "Değişiklikleri karşılaştırma", + "repo.compare.description": "Değişiklikleri görmek veya yeni bir değişiklik isteği başlatmak için iki dal veya etiket seçin.", "repo.pulls.desc": "Değişiklik isteklerini ve kod incelemelerini etkinleştir.", "repo.pulls.new": "Yeni Değişiklik İsteği", + "repo.pulls.new.description": "Bu karşılaştırmadaki değişiklikleri başkalarıyla tartışın ve gözden geçirin.", "repo.pulls.new.blocked_user": "Değişiklik isteği oluşturulamıyor, depo sahibi tarafından engellenmişsiniz.", "repo.pulls.new.must_collaborator": "Değişiklik isteği oluşturmak için bir katkıcı olmalısınız.", + "repo.pulls.new.already_existed": "Bu dallar arasında halihazırda bir değişiklik isteği mevcut", "repo.pulls.edit.already_changed": "Değişiklik isteğine yapılan değişiklikler kaydedilemiyor. İçerik başka kullanıcı tarafından değiştirilmiş gözüküyor. Diğerlerinin değişikliklerinin üzerine yazmamak için lütfen sayfayı yenileyin ve tekrar düzenlemeye çalışın.", "repo.pulls.view": "Değişiklik İsteği Görüntüle", "repo.pulls.compare_changes": "Yeni Değişiklik İsteği", "repo.pulls.allow_edits_from_maintainers": "Bakımcıların düzenlemelerine izin ver", "repo.pulls.allow_edits_from_maintainers_desc": "Ana dala yazma hakkı olan kullanıcılar bu dala da gönderebilirler", "repo.pulls.allow_edits_from_maintainers_err": "Güncelleme başarısız oldu", - "repo.pulls.compare_changes_desc": "Birleştirmek için hedef ve kaynak dalı seçin.", "repo.pulls.has_viewed_file": "Görüldü", "repo.pulls.has_changed_since_last_review": "Son incelemenizden sonra değişti", "repo.pulls.viewed_files_label": "%[1]d / %[2]d dosya görüldü", @@ -1775,6 +1796,7 @@ "repo.pulls.remove_prefix": "%s ön ekini kaldır", "repo.pulls.data_broken": "Bu değişiklik isteği, çatallama bilgilerinin eksik olması nedeniyle bozuldu.", "repo.pulls.files_conflicted": "Bu değişiklik isteğinde, hedef dalla çakışan değişiklikler var.", + "repo.pulls.files_conflicted_no_listed_files": "(Çakışan dosya yok)", "repo.pulls.is_checking": "Birleştirme çakışması denetleniyor…", "repo.pulls.is_ancestor": "Bu dal zaten hedef dalda mevcut. Birleştirilecek bir şey yok.", "repo.pulls.is_empty": "Bu daldaki değişiklikler zaten hedef dalda mevcut. Bu boş bir işleme olacaktır.", @@ -1829,12 +1851,16 @@ "repo.pulls.status_checking": "Bazı denetlemeler beklemede", "repo.pulls.status_checks_success": "Tüm denetlemeler başarılı oldu", "repo.pulls.status_checks_warning": "Bazı kontroller uyarılar bildirdi", - "repo.pulls.status_checks_failure": "Bazı kontroller başarısız oldu", + "repo.pulls.status_checks_failure_required": "Bazı gerekli denetlemeler başarısız oldu", + "repo.pulls.status_checks_failure_optional": "Bazı isteğe bağlı denetlemeler başarısız oldu", "repo.pulls.status_checks_error": "Bazı kontroller hatalar bildirdi", "repo.pulls.status_checks_requested": "Gerekli", "repo.pulls.status_checks_details": "Ayrıntılar", "repo.pulls.status_checks_hide_all": "Tüm denetlemeleri gizle", "repo.pulls.status_checks_show_all": "Tüm denetlemeleri göster", + "repo.pulls.status_checks_approve_all": "Tüm iş akışlarını onayla", + "repo.pulls.status_checks_need_approvals": "%d iş akışı onay bekliyor", + "repo.pulls.status_checks_need_approvals_helper": "İş akışı ancak depo sahibinin onayı sonrasında çalışabilir.", "repo.pulls.update_branch": "Dalı birleştirmeyle güncelle", "repo.pulls.update_branch_rebase": "Dalı yeniden yapılandırmayla güncelle", "repo.pulls.update_branch_success": "Dal güncellemesi başarıyla gerçekleştirildi", @@ -1916,6 +1942,7 @@ "repo.signing.wont_sign.not_signed_in": "Oturum açmadınız.", "repo.ext_wiki": "Harici Vikiye Erişim", "repo.ext_wiki.desc": "Harici bir wiki'ye bağlantı.", + "repo.wiki": "Viki", "repo.wiki.welcome": "Wiki'ye Hoşgeldiniz.", "repo.wiki.welcome_desc": "Wiki, katkıcılarla belge yazmanıza ve paylaşmanıza olanak tanır.", "repo.wiki.desc": "Katkıcılarla belgeler yazın ve paylaşın.", @@ -2284,6 +2311,9 @@ "repo.settings.event_package": "Paket", "repo.settings.event_package_desc": "Bir depoda paket oluşturuldu veya silindi.", "repo.settings.branch_filter": "Dal filtresi", + "repo.settings.branch_filter_desc_1": "İtme, dal oluşturma ve dal silme olayları için dal (ve referans adı) izin listesi, glob kalıbı olarak belirtilir. Boş veya * ise, tüm dallar ve etiketler için olaylar raporlanır.", + "repo.settings.branch_filter_desc_2": "Tam referans adlarını eşleştirmek için refs/heads/ veya refs/tags/ önekini kullanın.", + "repo.settings.branch_filter_desc_doc": "Sözdizimi için %[2]s belgelerine bakın.", "repo.settings.authorization_header": "Yetkilendirme Başlığı", "repo.settings.authorization_header_desc": "Mevcutsa isteklere yetkilendirme başlığı olarak eklenecektir. Örnekler: %s.", "repo.settings.active": "Etkin", @@ -2298,6 +2328,19 @@ "repo.settings.slack_domain": "Alan Adı", "repo.settings.slack_channel": "Kanal", "repo.settings.add_web_hook_desc": "%s web kancasını deponuza ekleyin.", + "repo.settings.web_hook_name_gitea": "Gitea", + "repo.settings.web_hook_name_gogs": "Gogs", + "repo.settings.web_hook_name_slack": "Slack", + "repo.settings.web_hook_name_discord": "Discord", + "repo.settings.web_hook_name_dingtalk": "DingTalk", + "repo.settings.web_hook_name_telegram": "Telegram", + "repo.settings.web_hook_name_matrix": "Matrix", + "repo.settings.web_hook_name_msteams": "Microsoft Teams", + "repo.settings.web_hook_name_feishu_or_larksuite": "Feishu / Lark Suite", + "repo.settings.web_hook_name_feishu": "Feishu", + "repo.settings.web_hook_name_larksuite": "Lark Suite", + "repo.settings.web_hook_name_wechatwork": "WeCom (Wechat Work)", + "repo.settings.web_hook_name_packagist": "Packagist", "repo.settings.packagist_username": "Packagist kullanıcı adı", "repo.settings.packagist_api_token": "API erişim anahtarı", "repo.settings.packagist_package_url": "Packagist paket URL'si", @@ -2443,6 +2486,7 @@ "repo.settings.unarchive.success": "Depo başarıyla arşivden çıkarıldı.", "repo.settings.unarchive.error": "Depoyu arşivden çıkarmaya çalışırken bir hata oluştu. Daha fazla ayrıntı için günlüğe bakın.", "repo.settings.update_avatar_success": "Depo resmi güncellendi.", + "repo.settings.lfs": "LFS", "repo.settings.lfs_filelist": "Bu depoda barındırılan LFS dosyaları", "repo.settings.lfs_no_lfs_files": "Bu depoda barındırılan herhangi bir LFS dosyası yok", "repo.settings.lfs_findcommits": "İşleme bul", @@ -2461,6 +2505,8 @@ "repo.settings.lfs_lock_file_no_exist": "Kilitli dosya varsayılan dalda mevcut değil", "repo.settings.lfs_force_unlock": "Kilidi Açmaya Zorla", "repo.settings.lfs_pointers.found": "Bulunan %d blob işaretçi(leri) - %d ilişkili, %d ilişkilendirilmemiş (%d mağazadan eksik)", + "repo.settings.lfs_pointers.sha": "Blob SHA", + "repo.settings.lfs_pointers.oid": "OID", "repo.settings.lfs_pointers.inRepo": "Depoda", "repo.settings.lfs_pointers.exists": "Mağazada var", "repo.settings.lfs_pointers.accessible": "Kullanıcı tarafından erişilebilir", @@ -2488,6 +2534,7 @@ "repo.diff.whitespace_ignore_at_eol": "Satır sonundaki boşluk değişiklikleri yoksay", "repo.diff.stats_desc": " %d değiştirilmiş dosya ile %d ekleme ve %d silme", "repo.diff.stats_desc_file": "%d değişiklik: %d ekleme ve %d silme", + "repo.diff.bin": "BIN", "repo.diff.bin_not_shown": "İkili dosya gösterilmiyor.", "repo.diff.view_file": "Dosyayı Görüntüle", "repo.diff.file_before": "Önce", @@ -2500,8 +2547,8 @@ "repo.diff.too_many_files": "Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor", "repo.diff.show_more": "Daha Fazla Göster", "repo.diff.load": "Fark Yükle", - "repo.diff.generated": "üretilen", - "repo.diff.vendored": "sağlanmış", + "repo.diff.generated": "Üretilen", + "repo.diff.vendored": "Sağlanmış", "repo.diff.comment.add_line_comment": "Satır yorum ekle", "repo.diff.comment.placeholder": "Yorum Yap", "repo.diff.comment.add_single_comment": "Bir yorum ekle", @@ -2575,6 +2622,13 @@ "repo.release.add_tag": "Yalnızca Etiket Oluştur", "repo.release.releases_for": "%s sürümleri", "repo.release.tags_for": "%s etiketleri", + "repo.release.notes": "Sürüm notları", + "repo.release.generate_notes": "Sürüm notlarını oluştur", + "repo.release.generate_notes_desc": "Bu sürüm için birleştirilmiş değişiklik isteklerini ve değişiklik günlüğü bağlantısını otomatik olarak ekleyin.", + "repo.release.previous_tag": "Önceki etiket", + "repo.release.generate_notes_tag_not_found": "Bu depoda \"%s\" etiketi yok.", + "repo.release.generate_notes_target_not_found": "\"%s\" sürüm hedefi bulunamadı.", + "repo.release.generate_notes_missing_tag": "Sürüm notlarını oluşturmak için bir etiket ismi giriniz.", "repo.branch.name": "Dal Adı", "repo.branch.already_exists": "\"%s\" isimli bir dal zaten mevcut.", "repo.branch.delete_head": "Sil", @@ -2611,7 +2665,7 @@ "repo.branch.new_branch_from": "\"%s\" dalından yeni dal oluştur", "repo.branch.renamed": "%s dalının adı %s olarak değiştirildi.", "repo.branch.rename_default_or_protected_branch_error": "Varsayılan veya korunan dalların adını sadece yöneticiler değiştirebilir.", - "repo.branch.rename_protected_branch_failed": "Bu dal glob tabanlı koruma kurallarıyla korunuyor.", + "repo.branch.rename_protected_branch_failed": "Dal koruma kuralları nedeniyle dalın adı değiştirilemedi.", "repo.branch.commits_divergence_from": "İşleme farklılığı: %[3]s dalının %[1]d işleme gerisinde ve %[2]d işleme ilerisinde", "repo.branch.commits_no_divergence": "%[1]s dalı ile aynı", "repo.tag.create_tag": "%s etiketi oluştur", @@ -2818,6 +2872,7 @@ "admin.dashboard.task.finished": "Görev: %[1]s %[2]s tarafından başlatıldı ve bitti", "admin.dashboard.task.unknown": "Bilinmeyen görev: %[1]s", "admin.dashboard.cron.started": "Cron Başlatıldı: %[1]s", + "admin.dashboard.cron.process": "Cron: %[1]s", "admin.dashboard.cron.cancelled": "Cron: %[1]s iptal edildi: %[3]s", "admin.dashboard.cron.error": "Cron Hatası: %s: %[3]s", "admin.dashboard.cron.finished": "Cron: %[1]s bitti", @@ -2839,6 +2894,7 @@ "admin.dashboard.git_gc_repos": "Tüm depolardaki atıkları temizle", "admin.dashboard.resync_all_sshkeys": "'.ssh/authority_keys' dosyasını Gitea SSH anahtarlarıyla güncelle", "admin.dashboard.resync_all_sshprincipals": "'.ssh/authorized_principals' dosyasını Gitea SSH sorumlularıyla güncelleyin", + "admin.dashboard.resync_all_hooks": "Tüm depoların git kancalarını yeniden senkronize edin (pre-receive, update, post-receive, proc-receive, ...)", "admin.dashboard.reinit_missing_repos": "Kayıtları bulunanlar için tüm eksik Git depolarını yeniden başlat", "admin.dashboard.sync_external_users": "Harici kullanıcı verisini senkronize et", "admin.dashboard.cleanup_hook_task_table": "Hook_task tablosunu temizle", @@ -2894,6 +2950,7 @@ "admin.users.admin": "Yönetici", "admin.users.restricted": "Kısıtlanmış", "admin.users.reserved": "Rezerve", + "admin.users.bot": "Bot", "admin.users.remote": "Uzak", "admin.users.2fa": "2FD", "admin.users.repos": "Depolar", @@ -3017,6 +3074,7 @@ "admin.auths.attribute_mail": "E-posta Özelliği", "admin.auths.attribute_ssh_public_key": "Açık SSH Anahtarı Özelliği", "admin.auths.attribute_avatar": "Avatar Özelliği", + "admin.auths.ssh_keys_are_verified": "LDAP'taki SSH anahtarları doğrulanmış kabul ediliyor", "admin.auths.attributes_in_bind": "Bağlı DN tabanındaki özellikleri çek", "admin.auths.allow_deactivate_all": "Boş bir arama sonucunun tüm kullanıcıları devre dışı bırakmasına izin ver", "admin.auths.use_paged_search": "Sayfalı Aramayı Kullan", @@ -3150,6 +3208,7 @@ "admin.config.db_name": "İsim", "admin.config.db_user": "Kullanıcı adı", "admin.config.db_schema": "Şema", + "admin.config.db_ssl_mode": "SSL", "admin.config.db_path": "Yol", "admin.config.service_config": "Servis Yapılandırması", "admin.config.register_email_confirm": "Kayıt Olmak İçin E-posta Onayı Gereksin", @@ -3227,8 +3286,6 @@ "admin.config.git_gc_args": "GC Argümanları", "admin.config.git_migrate_timeout": "Göç İşlemi Zaman Aşımı", "admin.config.git_mirror_timeout": "Yansı Güncelleme Zaman Aşımı", - "admin.config.git_clone_timeout": "Klonlama İşlemi Zaman Aşımı", - "admin.config.git_pull_timeout": "Çekme İşlemi Zaman Aşımı", "admin.config.git_gc_timeout": "GC İşlemi Zaman Aşımı", "admin.config.log_config": "Log Yapılandırması", "admin.config.logger_name_fmt": "Günlükçü: %s", @@ -3433,6 +3490,7 @@ "packages.conda.registry": "Bu kütüğü .condarc dosyasında bir Conda deposu olarak ayarlayın:", "packages.conda.install": "Conda ile paket kurmak için aşağıdaki komutu çalıştırın:", "packages.container.details.type": "Görüntü Türü", + "packages.container.details.platform": "Platform", "packages.container.pull": "Görüntüyü komut satırını kullanarak çekin:", "packages.container.images": "Görüntüler", "packages.container.digest": "Özet", @@ -3635,6 +3693,7 @@ "actions.workflow.has_workflow_dispatch": "Bu iş akışının workflow_dispatch olay tetikleyicisi var.", "actions.workflow.has_no_workflow_dispatch": "'%s' iş akışının workflow_dispatch olay tetikleyicisi yok.", "actions.need_approval_desc": "Değişiklik isteği çatalında iş akışı çalıştırmak için onay gerekiyor.", + "actions.approve_all_success": "Tüm iş akışı çalıştırmaları başarıyla onaylandı.", "actions.variables": "Değişkenler", "actions.variables.management": "Değişken Yönetimi", "actions.variables.creation": "Değişken Ekle", @@ -3652,12 +3711,21 @@ "actions.variables.update.success": "Değişken düzenlendi.", "actions.logs.always_auto_scroll": "Günlükleri her zaman otomatik kaydır", "actions.logs.always_expand_running": "Çalıştırma günlüklerini her zaman genişlet", + "actions.general": "Genel", + "actions.general.enable_actions": "Eylemleri Etkinleştir", + "actions.general.collaborative_owners_management": "Ortak Sahiplerin Yönetimi", + "actions.general.collaborative_owners_management_help": "Ortak sahip, özel deposu bu deponun eylemlerine ve iş akışlarına erişimi olan bir kullanıcı veya kuruluştur.", + "actions.general.add_collaborative_owner": "Ortak Sahip Ekle", + "actions.general.collaborative_owner_not_exist": "Ortak sahip mevcut değil.", + "actions.general.remove_collaborative_owner": "Ortak Sahip Kaldır", + "actions.general.remove_collaborative_owner_desc": "Ortak sahip kaldırıldığında, bu sahibin depoları artık bu depodaki eylemlere erişemez. Devam etmek istiyor musunuz?", "projects.deleted.display_name": "Silinmiş Proje", "projects.type-1.display_name": "Kişisel Proje", "projects.type-2.display_name": "Depo Projesi", "projects.type-3.display_name": "Organizasyon Projesi", "projects.enter_fullscreen": "Tam Ekran", "projects.exit_fullscreen": "Tam Ekrandan Çık", + "git.filemode.changed_filemode": "%[1]s → %[2]s", "git.filemode.directory": "Dizin", "git.filemode.normal_file": "Normal dosya", "git.filemode.executable_file": "Çalıştırılabilir dosya", diff --git a/options/locale/locale_zh-CN.json b/options/locale/locale_zh-CN.json index 2d031d2d58..661096d165 100644 --- a/options/locale/locale_zh-CN.json +++ b/options/locale/locale_zh-CN.json @@ -786,7 +786,7 @@ "settings.gpg_token": "令牌", "settings.gpg_token_help": "您可以使用以下方式生成签名:", "settings.gpg_token_signature": "GPG 增强签名", - "settings.key_signature_gpg_placeholder": "以 '-----BEGIN PGP PUBLIC KEY BLOCK-----' 开头", + "settings.key_signature_gpg_placeholder": "以 '-----BEGIN PGP SIGNATURE-----' 开头", "settings.verify_gpg_key_success": "GPG 密钥「%s」已验证。", "settings.ssh_key_verified": "已验证的密钥", "settings.ssh_key_verified_long": "密钥已通过令牌验证,可用于验证与该用户任何已激活邮箱地址匹配的提交。", @@ -977,6 +977,7 @@ "repo.fork.blocked_user": "无法克隆仓库,因为您被仓库所有者屏蔽。", "repo.use_template": "使用此模板", "repo.open_with_editor": "用 %s 打开", + "repo.download_directory_as": "下载目录为 %s", "repo.download_zip": "下载 ZIP", "repo.download_tar": "下载 TAR.GZ", "repo.download_bundle": "下载 BUNDLE", @@ -1370,7 +1371,7 @@ "repo.projects.column.new": "创建列", "repo.projects.column.set_default": "设为默认", "repo.projects.column.set_default_desc": "设置此列为未分类问题和合并请求的默认值", - "repo.projects.column.default_column_hint": "添加到此项目的新议题将被添加到此列", + "repo.projects.column.default_column_hint": "添加到此项目的新工单将被添加到此列", "repo.projects.column.delete": "删除列", "repo.projects.column.deletion_desc": "删除项目列会将所有相关问题移至默认列。是否继续?", "repo.projects.column.color": "颜色", @@ -1489,6 +1490,7 @@ "repo.issues.filter_sort.feweststars": "点赞由少到多", "repo.issues.filter_sort.mostforks": "派生由多到少", "repo.issues.filter_sort.fewestforks": "派生由少到多", + "repo.issues.quick_goto": "前往工单", "repo.issues.action_open": "开启", "repo.issues.action_close": "关闭", "repo.issues.action_label": "标签", @@ -1701,6 +1703,7 @@ "repo.issues.review.content.empty": "您需要留下一个注释,表明需要的更改。", "repo.issues.review.reject": "请求变更 %s", "repo.issues.review.wait": "已请求 %s 评审", + "repo.issues.review.codeowners_rules": "CODEOWNERS 规则", "repo.issues.review.add_review_request": "于 %[2]s 请求 %[1]s 评审", "repo.issues.review.remove_review_request": "取消对 %s 的评审请求 %s", "repo.issues.review.remove_review_request_self": "拒绝评审 %s", @@ -1736,8 +1739,11 @@ "repo.issues.reference_link": "引用:%s", "repo.compare.compare_base": "基准分支", "repo.compare.compare_head": "比较", + "repo.compare.title": "对比变更", + "repo.compare.description": "选择两个分支或标签以查看变更或发起一个新的合并请求。", "repo.pulls.desc": "启用合并请求和代码评审。", "repo.pulls.new": "创建合并请求", + "repo.pulls.new.description": "将此对比中的变更与其他版本一起进行讨论和评审。", "repo.pulls.new.blocked_user": "无法创建合并请求,因为您已被仓库所有者屏蔽。", "repo.pulls.new.must_collaborator": "您必须是仓库的协作者才能创建合并请求。", "repo.pulls.new.already_existed": "这些分支之间的合并请求已存在", @@ -1747,7 +1753,6 @@ "repo.pulls.allow_edits_from_maintainers": "允许维护者编辑", "repo.pulls.allow_edits_from_maintainers_desc": "对基础分支有写入权限的用户也可以推送到此分支", "repo.pulls.allow_edits_from_maintainers_err": "更新失败", - "repo.pulls.compare_changes_desc": "选择合并的目标分支和源分支。", "repo.pulls.has_viewed_file": "已查看", "repo.pulls.has_changed_since_last_review": "自您上次审核以来已更改", "repo.pulls.viewed_files_label": "%[1]d / %[2]d 文件已查看", @@ -1845,7 +1850,8 @@ "repo.pulls.status_checking": "一些检查仍在等待运行", "repo.pulls.status_checks_success": "所有检查均成功", "repo.pulls.status_checks_warning": "一些检查报告了警告", - "repo.pulls.status_checks_failure": "一些检查失败了", + "repo.pulls.status_checks_failure_required": "一些必要检查失败", + "repo.pulls.status_checks_failure_optional": "一些可选检查失败", "repo.pulls.status_checks_error": "一些检查报告了错误", "repo.pulls.status_checks_requested": "必须", "repo.pulls.status_checks_details": "详情", @@ -2513,7 +2519,7 @@ "repo.diff.browse_source": "浏览代码", "repo.diff.parent": "父节点", "repo.diff.commit": "当前提交", - "repo.diff.git-notes": "Notes", + "repo.diff.git-notes": "备注", "repo.diff.data_not_available": "比较内容不可用", "repo.diff.options_button": "Diff 选项", "repo.diff.download_patch": "下载 Patch 文件", @@ -2540,7 +2546,7 @@ "repo.diff.too_many_files": "某些文件未显示,因为此 diff 中更改的文件太多", "repo.diff.show_more": "显示更多", "repo.diff.load": "加载差异", - "repo.diff.generated": "自动生成的", + "repo.diff.generated": "自动生成", "repo.diff.vendored": "vendored", "repo.diff.comment.add_line_comment": "添加行内评论", "repo.diff.comment.placeholder": "留下评论", @@ -3067,6 +3073,7 @@ "admin.auths.attribute_mail": "电子邮箱属性", "admin.auths.attribute_ssh_public_key": "SSH公钥属性", "admin.auths.attribute_avatar": "头像属性", + "admin.auths.ssh_keys_are_verified": "LDAP 中的 SSH 密钥被视为已验证", "admin.auths.attributes_in_bind": "从 Bind DN 中拉取属性信息", "admin.auths.allow_deactivate_all": "允许在搜索结果为空时停用所有用户", "admin.auths.use_paged_search": "使用分页搜索", @@ -3239,7 +3246,7 @@ "admin.config.mailer_sendmail_path": "Sendmail 路径", "admin.config.mailer_sendmail_args": "Sendmail 的额外参数", "admin.config.mailer_sendmail_timeout": "Sendmail 超时", - "admin.config.mailer_use_dummy": "Dummy", + "admin.config.mailer_use_dummy": "模拟", "admin.config.test_email_placeholder": "电子邮箱(例如,test@example.com)", "admin.config.send_test_mail": "发送测试邮件", "admin.config.send_test_mail_submit": "发送", @@ -3278,8 +3285,6 @@ "admin.config.git_gc_args": "GC 参数", "admin.config.git_migrate_timeout": "迁移操作超时", "admin.config.git_mirror_timeout": "镜像更新操作超时", - "admin.config.git_clone_timeout": "克隆操作超时", - "admin.config.git_pull_timeout": "拉取操作超时", "admin.config.git_gc_timeout": "GC 操作超时", "admin.config.log_config": "日志配置", "admin.config.logger_name_fmt": "%s 日志", @@ -3488,7 +3493,7 @@ "packages.container.pull": "从命令行拉取镜像:", "packages.container.images": "镜像", "packages.container.digest": "摘要", - "packages.container.multi_arch": "OS / Arch", + "packages.container.multi_arch": "OS / 架构", "packages.container.layers": "镜像层", "packages.container.labels": "标签", "packages.container.labels.key": "键", diff --git a/package.json b/package.json index 7954e15046..acac860a39 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "type": "module", - "packageManager": "pnpm@10.26.0", + "packageManager": "pnpm@10.28.1", "engines": { "node": ">= 22.6.0", "pnpm": ">= 10.0.0" @@ -8,20 +8,20 @@ "dependencies": { "@citation-js/core": "0.7.21", "@citation-js/plugin-bibtex": "0.7.21", - "@citation-js/plugin-csl": "0.7.21", - "@citation-js/plugin-software-formats": "0.6.1", + "@citation-js/plugin-csl": "0.7.22", + "@citation-js/plugin-software-formats": "0.6.2", "@github/markdown-toolbar-element": "2.2.3", "@github/paste-markdown": "1.5.3", "@github/relative-time-element": "5.0.0", - "@github/text-expander-element": "2.9.2", + "@github/text-expander-element": "2.9.4", "@mcaptcha/vanilla-glue": "0.1.0-alpha-3", - "@primer/octicons": "19.21.1", + "@primer/octicons": "19.21.2", "@resvg/resvg-wasm": "2.6.2", "@silverwind/vue3-calendar-heatmap": "2.1.1", "@techknowlogick/license-checker-webpack-plugin": "0.3.0", "add-asset-webpack-plugin": "3.1.1", "ansi_up": "6.0.6", - "asciinema-player": "3.13.5", + "asciinema-player": "3.14.0", "chart.js": "4.5.1", "chartjs-adapter-dayjs-4": "1.0.4", "chartjs-plugin-zoom": "2.2.0", @@ -32,18 +32,19 @@ "dayjs": "1.11.19", "dropzone": "6.0.0-beta.2", "easymde": "2.20.0", - "esbuild-loader": "4.4.0", + "esbuild-loader": "4.4.2", "htmx.org": "2.0.8", "idiomorph": "0.7.4", - "jquery": "3.7.1", + "jquery": "4.0.0", + "js-yaml": "4.1.1", "katex": "0.16.27", "mermaid": "11.12.2", - "mini-css-extract-plugin": "2.9.4", + "mini-css-extract-plugin": "2.10.0", "monaco-editor": "0.55.1", "monaco-editor-webpack-plugin": "7.1.1", - "online-3d-viewer": "0.17.0", + "online-3d-viewer": "0.18.0", "pdfobject": "2.3.1", - "perfect-debounce": "2.0.0", + "perfect-debounce": "2.1.0", "postcss": "8.5.6", "postcss-loader": "8.2.0", "sortablejs": "1.15.6", @@ -56,65 +57,67 @@ "tributejs": "5.1.3", "uint8-to-base64": "0.2.1", "vanilla-colorful": "0.7.2", - "vue": "3.5.25", + "vue": "3.5.27", "vue-bar-graph": "2.2.0", "vue-chartjs": "5.3.3", "vue-loader": "17.4.2", - "webpack": "5.104.0", + "webpack": "5.104.1", "webpack-cli": "6.0.1", "wrap-ansi": "9.0.2" }, "devDependencies": { - "@eslint-community/eslint-plugin-eslint-comments": "4.5.0", + "@eslint-community/eslint-plugin-eslint-comments": "4.6.0", "@eslint/json": "0.14.0", - "@playwright/test": "1.57.0", - "@stylistic/eslint-plugin": "5.6.1", - "@stylistic/stylelint-plugin": "4.0.0", + "@playwright/test": "1.58.0", + "@stylistic/eslint-plugin": "5.7.1", + "@stylistic/stylelint-plugin": "5.0.1", "@types/codemirror": "5.60.17", "@types/dropzone": "5.7.9", "@types/jquery": "3.5.33", - "@types/katex": "0.16.7", + "@types/js-yaml": "4.0.9", + "@types/katex": "0.16.8", "@types/pdfobject": "2.2.5", "@types/sortablejs": "1.15.9", "@types/swagger-ui-dist": "3.30.6", "@types/throttle-debounce": "5.0.2", "@types/tinycolor2": "1.4.6", "@types/toastify-js": "1.12.4", - "@typescript-eslint/parser": "8.50.0", + "@typescript-eslint/parser": "8.53.1", "@vitejs/plugin-vue": "6.0.3", - "@vitest/eslint-plugin": "1.5.2", + "@vitest/eslint-plugin": "1.6.6", "eslint": "9.39.2", "eslint-import-resolver-typescript": "4.4.4", "eslint-plugin-array-func": "5.1.0", "eslint-plugin-github": "6.0.0", "eslint-plugin-import-x": "4.16.1", - "eslint-plugin-playwright": "2.4.0", - "eslint-plugin-regexp": "2.10.0", + "eslint-plugin-playwright": "2.5.0", + "eslint-plugin-regexp": "3.0.0", "eslint-plugin-sonarjs": "3.0.5", "eslint-plugin-unicorn": "62.0.0", - "eslint-plugin-vue": "10.6.2", + "eslint-plugin-vue": "10.7.0", "eslint-plugin-vue-scoped-css": "2.12.0", "eslint-plugin-wc": "3.0.2", - "globals": "16.5.0", - "happy-dom": "20.0.11", + "globals": "17.1.0", + "happy-dom": "20.3.7", "jiti": "2.6.1", + "knip": "5.82.1", "markdownlint-cli": "0.47.0", - "material-icon-theme": "5.29.0", + "material-icon-theme": "5.31.0", "nolyfill": "1.0.44", - "postcss-html": "1.8.0", + "postcss-html": "1.8.1", "spectral-cli-bundle": "1.0.3", - "stylelint": "16.26.1", - "stylelint-config-recommended": "17.0.0", + "stylelint": "17.0.0", + "stylelint-config-recommended": "18.0.0", "stylelint-declaration-block-no-ignored-properties": "2.8.0", "stylelint-declaration-strict-value": "1.10.11", - "stylelint-value-no-unknown-custom-properties": "6.0.1", + "stylelint-value-no-unknown-custom-properties": "6.1.1", "svgo": "4.0.0", "typescript": "5.9.3", - "typescript-eslint": "8.50.0", - "updates": "17.0.7", - "vite-string-plugin": "1.4.9", - "vitest": "4.0.16", - "vue-tsc": "3.1.8" + "typescript-eslint": "8.53.1", + "updates": "17.0.8", + "vite-string-plugin": "1.5.0", + "vitest": "4.0.18", + "vue-tsc": "3.2.3" }, "browserslist": [ "defaults" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 82d899b5fd..4e39cff537 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -33,11 +33,11 @@ importers: specifier: 0.7.21 version: 0.7.21(@citation-js/core@0.7.21) '@citation-js/plugin-csl': - specifier: 0.7.21 - version: 0.7.21(@citation-js/core@0.7.21) + specifier: 0.7.22 + version: 0.7.22(@citation-js/core@0.7.21) '@citation-js/plugin-software-formats': - specifier: 0.6.1 - version: 0.6.1 + specifier: 0.6.2 + version: 0.6.2 '@github/markdown-toolbar-element': specifier: 2.2.3 version: 2.2.3 @@ -48,32 +48,32 @@ importers: specifier: 5.0.0 version: 5.0.0 '@github/text-expander-element': - specifier: 2.9.2 - version: 2.9.2 + specifier: 2.9.4 + version: 2.9.4 '@mcaptcha/vanilla-glue': specifier: 0.1.0-alpha-3 version: 0.1.0-alpha-3 '@primer/octicons': - specifier: 19.21.1 - version: 19.21.1 + specifier: 19.21.2 + version: 19.21.2 '@resvg/resvg-wasm': specifier: 2.6.2 version: 2.6.2 '@silverwind/vue3-calendar-heatmap': specifier: 2.1.1 - version: 2.1.1(tippy.js@6.3.7)(vue@3.5.25(typescript@5.9.3)) + version: 2.1.1(tippy.js@6.3.7)(vue@3.5.27(typescript@5.9.3)) '@techknowlogick/license-checker-webpack-plugin': specifier: 0.3.0 - version: 0.3.0(webpack@5.104.0) + version: 0.3.0(webpack@5.104.1) add-asset-webpack-plugin: specifier: 3.1.1 - version: 3.1.1(webpack@5.104.0) + version: 3.1.1(webpack@5.104.1) ansi_up: specifier: 6.0.6 version: 6.0.6 asciinema-player: - specifier: 3.13.5 - version: 3.13.5 + specifier: 3.14.0 + version: 3.14.0 chart.js: specifier: 4.5.1 version: 4.5.1 @@ -94,7 +94,7 @@ importers: version: 1.6.2 css-loader: specifier: 7.1.2 - version: 7.1.2(webpack@5.104.0) + version: 7.1.2(webpack@5.104.1) dayjs: specifier: 1.11.19 version: 1.11.19 @@ -105,8 +105,8 @@ importers: specifier: 2.20.0 version: 2.20.0 esbuild-loader: - specifier: 4.4.0 - version: 4.4.0(webpack@5.104.0) + specifier: 4.4.2 + version: 4.4.2(webpack@5.104.1) htmx.org: specifier: 2.0.8 version: 2.0.8 @@ -114,8 +114,11 @@ importers: specifier: 0.7.4 version: 0.7.4 jquery: - specifier: 3.7.1 - version: 3.7.1 + specifier: 4.0.0 + version: 4.0.0 + js-yaml: + specifier: 4.1.1 + version: 4.1.1 katex: specifier: 0.16.27 version: 0.16.27 @@ -123,29 +126,29 @@ importers: specifier: 11.12.2 version: 11.12.2 mini-css-extract-plugin: - specifier: 2.9.4 - version: 2.9.4(webpack@5.104.0) + specifier: 2.10.0 + version: 2.10.0(webpack@5.104.1) monaco-editor: specifier: 0.55.1 version: 0.55.1 monaco-editor-webpack-plugin: specifier: 7.1.1 - version: 7.1.1(monaco-editor@0.55.1)(webpack@5.104.0) + version: 7.1.1(monaco-editor@0.55.1)(webpack@5.104.1) online-3d-viewer: - specifier: 0.17.0 - version: 0.17.0 + specifier: 0.18.0 + version: 0.18.0 pdfobject: specifier: 2.3.1 version: 2.3.1 perfect-debounce: - specifier: 2.0.0 - version: 2.0.0 + specifier: 2.1.0 + version: 2.1.0 postcss: specifier: 8.5.6 version: 8.5.6 postcss-loader: specifier: 8.2.0 - version: 8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.0) + version: 8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.1) sortablejs: specifier: 1.15.6 version: 1.15.6 @@ -177,42 +180,42 @@ importers: specifier: 0.7.2 version: 0.7.2 vue: - specifier: 3.5.25 - version: 3.5.25(typescript@5.9.3) + specifier: 3.5.27 + version: 3.5.27(typescript@5.9.3) vue-bar-graph: specifier: 2.2.0 version: 2.2.0(typescript@5.9.3) vue-chartjs: specifier: 5.3.3 - version: 5.3.3(chart.js@4.5.1)(vue@3.5.25(typescript@5.9.3)) + version: 5.3.3(chart.js@4.5.1)(vue@3.5.27(typescript@5.9.3)) vue-loader: specifier: 17.4.2 - version: 17.4.2(vue@3.5.25(typescript@5.9.3))(webpack@5.104.0) + version: 17.4.2(vue@3.5.27(typescript@5.9.3))(webpack@5.104.1) webpack: - specifier: 5.104.0 - version: 5.104.0(webpack-cli@6.0.1) + specifier: 5.104.1 + version: 5.104.1(webpack-cli@6.0.1) webpack-cli: specifier: 6.0.1 - version: 6.0.1(webpack@5.104.0) + version: 6.0.1(webpack@5.104.1) wrap-ansi: specifier: 9.0.2 version: 9.0.2 devDependencies: '@eslint-community/eslint-plugin-eslint-comments': - specifier: 4.5.0 - version: 4.5.0(eslint@9.39.2(jiti@2.6.1)) + specifier: 4.6.0 + version: 4.6.0(eslint@9.39.2(jiti@2.6.1)) '@eslint/json': specifier: 0.14.0 version: 0.14.0 '@playwright/test': - specifier: 1.57.0 - version: 1.57.0 + specifier: 1.58.0 + version: 1.58.0 '@stylistic/eslint-plugin': - specifier: 5.6.1 - version: 5.6.1(eslint@9.39.2(jiti@2.6.1)) + specifier: 5.7.1 + version: 5.7.1(eslint@9.39.2(jiti@2.6.1)) '@stylistic/stylelint-plugin': - specifier: 4.0.0 - version: 4.0.0(stylelint@16.26.1(typescript@5.9.3)) + specifier: 5.0.1 + version: 5.0.1(stylelint@17.0.0(typescript@5.9.3)) '@types/codemirror': specifier: 5.60.17 version: 5.60.17 @@ -222,9 +225,12 @@ importers: '@types/jquery': specifier: 3.5.33 version: 3.5.33 + '@types/js-yaml': + specifier: 4.0.9 + version: 4.0.9 '@types/katex': - specifier: 0.16.7 - version: 0.16.7 + specifier: 0.16.8 + version: 0.16.8 '@types/pdfobject': specifier: 2.2.5 version: 2.2.5 @@ -244,20 +250,20 @@ importers: specifier: 1.12.4 version: 1.12.4 '@typescript-eslint/parser': - specifier: 8.50.0 - version: 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + specifier: 8.53.1 + version: 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) '@vitejs/plugin-vue': specifier: 6.0.3 - version: 6.0.3(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))(vue@3.5.25(typescript@5.9.3)) + version: 6.0.3(vite@7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))(vue@3.5.27(typescript@5.9.3)) '@vitest/eslint-plugin': - specifier: 1.5.2 - version: 1.5.2(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2)) + specifier: 1.6.6 + version: 1.6.6(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.18(@types/node@25.0.10)(happy-dom@20.3.7)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)) eslint: specifier: 9.39.2 version: 9.39.2(jiti@2.6.1) eslint-import-resolver-typescript: specifier: 4.4.4 - version: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)) + version: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-array-func: specifier: 5.1.0 version: 5.1.0(eslint@9.39.2(jiti@2.6.1)) @@ -266,13 +272,13 @@ importers: version: 6.0.0(@types/eslint@9.6.1)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-import-x: specifier: 4.16.1 - version: 4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)) + version: 4.16.1(@typescript-eslint/utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-playwright: - specifier: 2.4.0 - version: 2.4.0(eslint@9.39.2(jiti@2.6.1)) + specifier: 2.5.0 + version: 2.5.0(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-regexp: - specifier: 2.10.0 - version: 2.10.0(eslint@9.39.2(jiti@2.6.1)) + specifier: 3.0.0 + version: 3.0.0(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-sonarjs: specifier: 3.0.5 version: 3.0.5(eslint@9.39.2(jiti@2.6.1)) @@ -280,8 +286,8 @@ importers: specifier: 62.0.0 version: 62.0.0(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-vue: - specifier: 10.6.2 - version: 10.6.2(@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))) + specifier: 10.7.0 + version: 10.7.0(@stylistic/eslint-plugin@5.7.1(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))) eslint-plugin-vue-scoped-css: specifier: 2.12.0 version: 2.12.0(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))) @@ -289,44 +295,47 @@ importers: specifier: 3.0.2 version: 3.0.2(eslint@9.39.2(jiti@2.6.1)) globals: - specifier: 16.5.0 - version: 16.5.0 + specifier: 17.1.0 + version: 17.1.0 happy-dom: - specifier: 20.0.11 - version: 20.0.11 + specifier: 20.3.7 + version: 20.3.7 jiti: specifier: 2.6.1 version: 2.6.1 + knip: + specifier: 5.82.1 + version: 5.82.1(@types/node@25.0.10)(typescript@5.9.3) markdownlint-cli: specifier: 0.47.0 version: 0.47.0 material-icon-theme: - specifier: 5.29.0 - version: 5.29.0 + specifier: 5.31.0 + version: 5.31.0 nolyfill: specifier: 1.0.44 version: 1.0.44 postcss-html: - specifier: 1.8.0 - version: 1.8.0 + specifier: 1.8.1 + version: 1.8.1 spectral-cli-bundle: specifier: 1.0.3 version: 1.0.3 stylelint: - specifier: 16.26.1 - version: 16.26.1(typescript@5.9.3) - stylelint-config-recommended: specifier: 17.0.0 - version: 17.0.0(stylelint@16.26.1(typescript@5.9.3)) + version: 17.0.0(typescript@5.9.3) + stylelint-config-recommended: + specifier: 18.0.0 + version: 18.0.0(stylelint@17.0.0(typescript@5.9.3)) stylelint-declaration-block-no-ignored-properties: specifier: 2.8.0 - version: 2.8.0(stylelint@16.26.1(typescript@5.9.3)) + version: 2.8.0(stylelint@17.0.0(typescript@5.9.3)) stylelint-declaration-strict-value: specifier: 1.10.11 - version: 1.10.11(stylelint@16.26.1(typescript@5.9.3)) + version: 1.10.11(stylelint@17.0.0(typescript@5.9.3)) stylelint-value-no-unknown-custom-properties: - specifier: 6.0.1 - version: 6.0.1(stylelint@16.26.1(typescript@5.9.3)) + specifier: 6.1.1 + version: 6.1.1(stylelint@17.0.0(typescript@5.9.3)) svgo: specifier: 4.0.0 version: 4.0.0 @@ -334,20 +343,20 @@ importers: specifier: 5.9.3 version: 5.9.3 typescript-eslint: - specifier: 8.50.0 - version: 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + specifier: 8.53.1 + version: 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) updates: - specifier: 17.0.7 - version: 17.0.7 + specifier: 17.0.8 + version: 17.0.8 vite-string-plugin: - specifier: 1.4.9 - version: 1.4.9 + specifier: 1.5.0 + version: 1.5.0(vite@7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)) vitest: - specifier: 4.0.16 - version: 4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2) + specifier: 4.0.18 + version: 4.0.18(@types/node@25.0.10)(happy-dom@20.3.7)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2) vue-tsc: - specifier: 3.1.8 - version: 3.1.8(typescript@5.9.3) + specifier: 3.2.3 + version: 3.2.3(typescript@5.9.3) packages: @@ -358,8 +367,8 @@ packages: '@antfu/install-pkg@1.1.0': resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==} - '@babel/code-frame@7.27.1': - resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} + '@babel/code-frame@7.28.6': + resolution: {integrity: sha512-JYgintcMjRiCvS8mMECzaEn+m3PfoQiyqukOMCCVQtoJGYJw8j/8LBJEiqkHLkfwCcs74E3pbAUFNg7d9VNJ+Q==} engines: {node: '>=6.9.0'} '@babel/helper-string-parser@7.27.1': @@ -370,27 +379,27 @@ packages: resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} engines: {node: '>=6.9.0'} - '@babel/parser@7.28.5': - resolution: {integrity: sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==} + '@babel/parser@7.28.6': + resolution: {integrity: sha512-TeR9zWR18BvbfPmGbLampPMW+uW1NZnJlRuuHso8i87QZNq2JRF9i6RgxRqtEq+wQGsS19NNTWr2duhnE49mfQ==} engines: {node: '>=6.0.0'} hasBin: true - '@babel/runtime@7.28.4': - resolution: {integrity: sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==} + '@babel/runtime@7.28.6': + resolution: {integrity: sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==} engines: {node: '>=6.9.0'} - '@babel/types@7.28.5': - resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==} + '@babel/types@7.28.6': + resolution: {integrity: sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg==} engines: {node: '>=6.9.0'} '@braintree/sanitize-url@7.1.1': resolution: {integrity: sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==} - '@cacheable/memory@2.0.6': - resolution: {integrity: sha512-7e8SScMocHxcAb8YhtkbMhGG+EKLRIficb1F5sjvhSYsWTZGxvg4KIDp8kgxnV2PUJ3ddPe6J9QESjKvBWRDkg==} + '@cacheable/memory@2.0.7': + resolution: {integrity: sha512-RbxnxAMf89Tp1dLhXMS7ceft/PGsDl1Ip7T20z5nZ+pwIAsQ1p2izPjVG69oCLv/jfQ7HDPHTWK0c9rcAWXN3A==} - '@cacheable/utils@2.3.2': - resolution: {integrity: sha512-8kGE2P+HjfY8FglaOiW+y8qxcaQAfAhVML+i66XJR3YX5FtyDqn6Txctr3K2FrbxLKixRRYYBWMbuGciOhYNDg==} + '@cacheable/utils@2.3.3': + resolution: {integrity: sha512-JsXDL70gQ+1Vc2W/KUFfkAJzgb4puKwwKehNLuB+HrNKWf91O736kGfxn4KujXCCSuh6mRRL4XEB0PkAFjWS0A==} '@chevrotain/cst-dts-gen@11.0.3': resolution: {integrity: sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==} @@ -425,399 +434,246 @@ packages: peerDependencies: '@citation-js/core': ^0.7.0 - '@citation-js/plugin-cff@0.6.1': - resolution: {integrity: sha512-tLjTgsfzNOdQWGn5mNc2NAaydHnlRucSERoyAXLN7u0BQBfp7j5zwdxCmxcQD/N7hH3fpDKMG+qDzbqpJuKyNA==} + '@citation-js/plugin-cff@0.6.2': + resolution: {integrity: sha512-jvERDFbtQQOBb9s+E8VbRIYsEIb2YEbcLH3yVDxXK0xqBGQDE5m8JZAYUkENy4FmbaD979l0+xJTWAsYN1pV/w==} engines: {node: '>=14.0.0'} - '@citation-js/plugin-csl@0.7.21': - resolution: {integrity: sha512-23ySPYCWDiU1JqhvqTYLn6+C11LkeJxTfyPKKXCK2wan9iF9ODvY7JYNl/OJgtwvu485dKfDpqqptWv6i71Meg==} + '@citation-js/plugin-csl@0.7.22': + resolution: {integrity: sha512-/rGdtbeP3nS4uZDdEbQUHT8PrUcIs0da2t+sWMKYXoOhXQqfw3oJJ7p4tUD+R8lptyIR5Eq20/DFk/kQDdLpYg==} engines: {node: '>=16.0.0'} peerDependencies: '@citation-js/core': ^0.7.0 - '@citation-js/plugin-github@0.6.1': - resolution: {integrity: sha512-1ZeSgQ5AoYsa8n2acVooUeRk76oA8rLszYNBjzj5z6MPa11BZlQJ9O+Gy4tHjlImvsENLbLPx5f8/V1VHXaCfQ==} + '@citation-js/plugin-github@0.6.2': + resolution: {integrity: sha512-NKq/1Ja060o4II1Z4p1+utwpvMsx+XIWdNiFvnJDfR2Z9E1xGETjByPpdobGBsteUTpJPEe9OVfF8Dee/Q7zLQ==} engines: {node: '>=14.0.0'} - '@citation-js/plugin-npm@0.6.1': - resolution: {integrity: sha512-rojJA+l/p2KBpDoY+8n0YfNyQO1Aw03fQR5BN+gXD1LNAP1V+8wqvdPsaHnzPsrhrd4ZXDR7ch/Nk0yynPkJ3Q==} + '@citation-js/plugin-npm@0.6.2': + resolution: {integrity: sha512-mbQg/N9HM+gOqHJCdDZEElSW+h/oM94snKCl3llXuZ4MEH3tHraElS6CYRW/vW7s8KUTTHhgE62Q6ua5aRml8g==} engines: {node: '>=14.0.0'} - '@citation-js/plugin-software-formats@0.6.1': - resolution: {integrity: sha512-BDF9rqi56K0hoTgYTVANCFVRSbWKC9V06Uap7oa8SjqCTgnHJAy8t/F3NxsyYPPG+zmRsLW9VNbcIsJOl0eu/w==} + '@citation-js/plugin-software-formats@0.6.2': + resolution: {integrity: sha512-x1IG0LBKglBU6SuiiKfvOtn7g7o7s+YhQhB44o7zrFaKEO8jkyQ5qMKtM5VFdCBL7teLfzZLjpjNkdJXtZ6XZw==} engines: {node: '>=14.0.0'} - '@citation-js/plugin-yaml@0.6.1': - resolution: {integrity: sha512-XEVVks1cJTqRbjy+nmthfw/puR6NwRB3fyJWi1tX13UYXlkhP/h45nsv4zjgLLGekdcMHQvhad9MAYunOftGKA==} + '@citation-js/plugin-yaml@0.6.2': + resolution: {integrity: sha512-qw53Uy2fDekKAzNhv8pkAWpIccIxyKQ3nQuClMgzDPdyeWg34ElIs4bDub9ZZup15fy+X//2gP8k12RJqNo4lA==} engines: {node: '>=14.0.0'} - '@citation-js/plugin-zenodo@0.6.1': - resolution: {integrity: sha512-bUybENHoZqJ6gheUqgkumjI+mu+fA2bg6VoniDmZTb7Qng9iEpi+IWEAR26/vBE0gK0EWrJjczyDW3HCwrhvVw==} + '@citation-js/plugin-zenodo@0.6.2': + resolution: {integrity: sha512-3XQOO3u4WXY/7AWZyQ+9SuBzS8bYTlJ+NF1uCgrZO64g36nK5iIc5YV9cBl2TL2QhHF6S36nvAsXsj5fX9FeHw==} engines: {node: '>=14.0.0'} - '@csstools/css-parser-algorithms@3.0.5': - resolution: {integrity: sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==} - engines: {node: '>=18'} + '@csstools/css-parser-algorithms@4.0.0': + resolution: {integrity: sha512-+B87qS7fIG3L5h3qwJ/IFbjoVoOe/bpOdh9hAjXbvx0o8ImEmUsGXN0inFOnk2ChCFgqkkGFQ+TpM5rbhkKe4w==} + engines: {node: '>=20.19.0'} peerDependencies: - '@csstools/css-tokenizer': ^3.0.4 + '@csstools/css-tokenizer': ^4.0.0 - '@csstools/css-syntax-patches-for-csstree@1.0.21': - resolution: {integrity: sha512-plP8N8zKfEZ26figX4Nvajx8DuzfuRpLTqglQ5d0chfnt35Qt3X+m6ASZ+rG0D0kxe/upDVNwSIVJP5n4FuNfw==} + '@csstools/css-syntax-patches-for-csstree@1.0.25': + resolution: {integrity: sha512-g0Kw9W3vjx5BEBAF8c5Fm2NcB/Fs8jJXh85aXqwEXiL+tqtOut07TWgyaGzAAfTM+gKckrrncyeGEZPcaRgm2Q==} engines: {node: '>=18'} - '@csstools/css-tokenizer@3.0.4': - resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} - engines: {node: '>=18'} + '@csstools/css-tokenizer@4.0.0': + resolution: {integrity: sha512-QxULHAm7cNu72w97JUNCBFODFaXpbDg+dP8b/oWFAZ2MTRppA3U00Y2L1HqaS4J6yBqxwa/Y3nMBaxVKbB/NsA==} + engines: {node: '>=20.19.0'} - '@csstools/media-query-list-parser@4.0.3': - resolution: {integrity: sha512-HAYH7d3TLRHDOUQK4mZKf9k9Ph/m8Akstg66ywKR4SFAigjs3yBiUeZtFxywiTm5moZMAp/5W/ZuFnNXXYLuuQ==} - engines: {node: '>=18'} + '@csstools/media-query-list-parser@5.0.0': + resolution: {integrity: sha512-T9lXmZOfnam3eMERPsszjY5NK0jX8RmThmmm99FZ8b7z8yMaFZWKwLWGZuTwdO3ddRY5fy13GmmEYZXB4I98Eg==} + engines: {node: '>=20.19.0'} peerDependencies: - '@csstools/css-parser-algorithms': ^3.0.5 - '@csstools/css-tokenizer': ^3.0.4 + '@csstools/css-parser-algorithms': ^4.0.0 + '@csstools/css-tokenizer': ^4.0.0 - '@csstools/selector-specificity@5.0.0': - resolution: {integrity: sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==} - engines: {node: '>=18'} + '@csstools/selector-resolve-nested@4.0.0': + resolution: {integrity: sha512-9vAPxmp+Dx3wQBIUwc1v7Mdisw1kbbaGqXUM8QLTgWg7SoPGYtXBsMXvsFs/0Bn5yoFhcktzxNZGNaUt0VjgjA==} + engines: {node: '>=20.19.0'} peerDependencies: - postcss-selector-parser: ^7.0.0 + postcss-selector-parser: ^7.1.1 + + '@csstools/selector-specificity@6.0.0': + resolution: {integrity: sha512-4sSgl78OtOXEX/2d++8A83zHNTgwCJMaR24FvsYL7Uf/VS8HZk9PTwR51elTbGqMuwH3szLvvOXEaVnqn0Z3zA==} + engines: {node: '>=20.19.0'} + peerDependencies: + postcss-selector-parser: ^7.1.1 '@discoveryjs/json-ext@0.6.3': resolution: {integrity: sha512-4B4OijXeVNOPZlYA2oEwWOTkzyltLao+xbotHQeqN++Rv27Y6s818+n2Qkp8q+Fxhn0t/5lA5X1Mxktud8eayQ==} engines: {node: '>=14.17.0'} - '@dual-bundle/import-meta-resolve@4.2.1': - resolution: {integrity: sha512-id+7YRUgoUX6CgV0DtuhirQWodeeA7Lf4i2x71JS/vtA5pRb/hIGWlw+G6MeXvsM+MXrz0VAydTGElX1rAfgPg==} + '@emnapi/core@1.8.1': + resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==} - '@emnapi/core@1.7.1': - resolution: {integrity: sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==} - - '@emnapi/runtime@1.7.1': - resolution: {integrity: sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==} + '@emnapi/runtime@1.8.1': + resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==} '@emnapi/wasi-threads@1.1.0': resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} - '@esbuild/aix-ppc64@0.25.12': - resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} - engines: {node: '>=18'} - cpu: [ppc64] - os: [aix] - '@esbuild/aix-ppc64@0.27.2': resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} engines: {node: '>=18'} cpu: [ppc64] os: [aix] - '@esbuild/android-arm64@0.25.12': - resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [android] - '@esbuild/android-arm64@0.27.2': resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==} engines: {node: '>=18'} cpu: [arm64] os: [android] - '@esbuild/android-arm@0.25.12': - resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==} - engines: {node: '>=18'} - cpu: [arm] - os: [android] - '@esbuild/android-arm@0.27.2': resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==} engines: {node: '>=18'} cpu: [arm] os: [android] - '@esbuild/android-x64@0.25.12': - resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==} - engines: {node: '>=18'} - cpu: [x64] - os: [android] - '@esbuild/android-x64@0.27.2': resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==} engines: {node: '>=18'} cpu: [x64] os: [android] - '@esbuild/darwin-arm64@0.25.12': - resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [darwin] - '@esbuild/darwin-arm64@0.27.2': resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] - '@esbuild/darwin-x64@0.25.12': - resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==} - engines: {node: '>=18'} - cpu: [x64] - os: [darwin] - '@esbuild/darwin-x64@0.27.2': resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==} engines: {node: '>=18'} cpu: [x64] os: [darwin] - '@esbuild/freebsd-arm64@0.25.12': - resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [freebsd] - '@esbuild/freebsd-arm64@0.27.2': resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-x64@0.25.12': - resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==} - engines: {node: '>=18'} - cpu: [x64] - os: [freebsd] - '@esbuild/freebsd-x64@0.27.2': resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] - '@esbuild/linux-arm64@0.25.12': - resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==} - engines: {node: '>=18'} - cpu: [arm64] - os: [linux] - '@esbuild/linux-arm64@0.27.2': resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==} engines: {node: '>=18'} cpu: [arm64] os: [linux] - '@esbuild/linux-arm@0.25.12': - resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==} - engines: {node: '>=18'} - cpu: [arm] - os: [linux] - '@esbuild/linux-arm@0.27.2': resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} engines: {node: '>=18'} cpu: [arm] os: [linux] - '@esbuild/linux-ia32@0.25.12': - resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==} - engines: {node: '>=18'} - cpu: [ia32] - os: [linux] - '@esbuild/linux-ia32@0.27.2': resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==} engines: {node: '>=18'} cpu: [ia32] os: [linux] - '@esbuild/linux-loong64@0.25.12': - resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==} - engines: {node: '>=18'} - cpu: [loong64] - os: [linux] - '@esbuild/linux-loong64@0.27.2': resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==} engines: {node: '>=18'} cpu: [loong64] os: [linux] - '@esbuild/linux-mips64el@0.25.12': - resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==} - engines: {node: '>=18'} - cpu: [mips64el] - os: [linux] - '@esbuild/linux-mips64el@0.27.2': resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] - '@esbuild/linux-ppc64@0.25.12': - resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==} - engines: {node: '>=18'} - cpu: [ppc64] - os: [linux] - '@esbuild/linux-ppc64@0.27.2': resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] - '@esbuild/linux-riscv64@0.25.12': - resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==} - engines: {node: '>=18'} - cpu: [riscv64] - os: [linux] - '@esbuild/linux-riscv64@0.27.2': resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] - '@esbuild/linux-s390x@0.25.12': - resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==} - engines: {node: '>=18'} - cpu: [s390x] - os: [linux] - '@esbuild/linux-s390x@0.27.2': resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==} engines: {node: '>=18'} cpu: [s390x] os: [linux] - '@esbuild/linux-x64@0.25.12': - resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==} - engines: {node: '>=18'} - cpu: [x64] - os: [linux] - '@esbuild/linux-x64@0.27.2': resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==} engines: {node: '>=18'} cpu: [x64] os: [linux] - '@esbuild/netbsd-arm64@0.25.12': - resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [netbsd] - '@esbuild/netbsd-arm64@0.27.2': resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-x64@0.25.12': - resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==} - engines: {node: '>=18'} - cpu: [x64] - os: [netbsd] - '@esbuild/netbsd-x64@0.27.2': resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==} engines: {node: '>=18'} cpu: [x64] os: [netbsd] - '@esbuild/openbsd-arm64@0.25.12': - resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==} - engines: {node: '>=18'} - cpu: [arm64] - os: [openbsd] - '@esbuild/openbsd-arm64@0.27.2': resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-x64@0.25.12': - resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==} - engines: {node: '>=18'} - cpu: [x64] - os: [openbsd] - '@esbuild/openbsd-x64@0.27.2': resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==} engines: {node: '>=18'} cpu: [x64] os: [openbsd] - '@esbuild/openharmony-arm64@0.25.12': - resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [openharmony] - '@esbuild/openharmony-arm64@0.27.2': resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==} engines: {node: '>=18'} cpu: [arm64] os: [openharmony] - '@esbuild/sunos-x64@0.25.12': - resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==} - engines: {node: '>=18'} - cpu: [x64] - os: [sunos] - '@esbuild/sunos-x64@0.27.2': resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==} engines: {node: '>=18'} cpu: [x64] os: [sunos] - '@esbuild/win32-arm64@0.25.12': - resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [win32] - '@esbuild/win32-arm64@0.27.2': resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==} engines: {node: '>=18'} cpu: [arm64] os: [win32] - '@esbuild/win32-ia32@0.25.12': - resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==} - engines: {node: '>=18'} - cpu: [ia32] - os: [win32] - '@esbuild/win32-ia32@0.27.2': resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==} engines: {node: '>=18'} cpu: [ia32] os: [win32] - '@esbuild/win32-x64@0.25.12': - resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==} - engines: {node: '>=18'} - cpu: [x64] - os: [win32] - '@esbuild/win32-x64@0.27.2': resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==} engines: {node: '>=18'} cpu: [x64] os: [win32] - '@eslint-community/eslint-plugin-eslint-comments@4.5.0': - resolution: {integrity: sha512-MAhuTKlr4y/CE3WYX26raZjy+I/kS2PLKSzvfmDCGrBLTFHOYwqROZdr4XwPgXwX3K9rjzMr4pSmUWGnzsUyMg==} + '@eslint-community/eslint-plugin-eslint-comments@4.6.0': + resolution: {integrity: sha512-2EX2bBQq1ez++xz2o9tEeEQkyvfieWgUFMH4rtJJri2q0Azvhja3hZGXsjPXs31R4fQkZDtWzNDDK2zQn5UE5g==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0 - '@eslint-community/eslint-utils@4.9.0': - resolution: {integrity: sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==} + '@eslint-community/eslint-utils@4.9.1': + resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 @@ -886,8 +742,8 @@ packages: '@github/relative-time-element@5.0.0': resolution: {integrity: sha512-L/2r0DNR/rMbmHWcsdmhtOiy2gESoGOhItNFD4zJ3nZfHl79Dx3N18Vfx/pYr2lruMOdk1cJZb4wEumm+Dxm1w==} - '@github/text-expander-element@2.9.2': - resolution: {integrity: sha512-XY8EUMqM4GAloNxXNA1Py1ny+engWwYntbgsnpstQN4piaTI9rIlfYldyd0nnPXhxjGCVqHPmP6yg17Q0/n9Vg==} + '@github/text-expander-element@2.9.4': + resolution: {integrity: sha512-+zxSlek2r0NrbFmRfymVtYhES9YU033acc/mouXUkN2bs8DaYScPucvBhwg/5d0hsEb2rIykKnkA/2xxWSqCTw==} '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} @@ -939,11 +795,11 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} - '@keyv/bigmap@1.3.0': - resolution: {integrity: sha512-KT01GjzV6AQD5+IYrcpoYLkCu1Jod3nau1Z7EsEuViO3TZGRacSbO9MfHmbJ1WaOXFtWLxPVj169cn2WNKPkIg==} + '@keyv/bigmap@1.3.1': + resolution: {integrity: sha512-WbzE9sdmQtKy8vrNPa9BRnwZh5UF4s1KTmSK0KUVLo3eff5BlQNNWDnFOouNpKfPKDnms9xynJjsMYjMaT/aFQ==} engines: {node: '>= 18'} peerDependencies: - keyv: ^5.5.4 + keyv: ^5.6.0 '@keyv/serialize@1.1.1': resolution: {integrity: sha512-dXn3FZhPv0US+7dtJsIi2R+c7qWYiReoEh5zUntWCf4oSpMNib8FDhSoed6m3QyZdx5hK7iLFkYk3rNxwt8vTA==} @@ -963,6 +819,9 @@ packages: '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + '@napi-rs/wasm-runtime@1.1.1': + resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -1038,20 +897,128 @@ packages: resolution: {integrity: sha512-3dsKlf4Ma7o+uxLIg5OI1Tgwfet2pE8WTbPjEGWvOe6CSjMtK0skJnnSVHaEVX4N4mYU81To0qDeZOPqjaUotg==} engines: {node: '>=12.4.0'} + '@oxc-resolver/binding-android-arm-eabi@11.16.4': + resolution: {integrity: sha512-6XUHilmj8D6Ggus+sTBp64x/DUQ7LgC/dvTDdUOt4iMQnDdSep6N1mnvVLIiG+qM5tRnNHravNzBJnUlYwRQoA==} + cpu: [arm] + os: [android] + + '@oxc-resolver/binding-android-arm64@11.16.4': + resolution: {integrity: sha512-5ODwd1F5mdkm6JIg1CNny9yxIrCzrkKpxmqas7Alw23vE0Ot8D4ykqNBW5Z/nIZkXVEo5VDmnm0sMBBIANcpeQ==} + cpu: [arm64] + os: [android] + + '@oxc-resolver/binding-darwin-arm64@11.16.4': + resolution: {integrity: sha512-egwvDK9DMU4Q8F4BG74/n4E22pQ0lT5ukOVB6VXkTj0iG2fnyoStHoFaBnmDseLNRA4r61Mxxz8k940CIaJMDg==} + cpu: [arm64] + os: [darwin] + + '@oxc-resolver/binding-darwin-x64@11.16.4': + resolution: {integrity: sha512-HMkODYrAG4HaFNCpaYzSQFkxeiz2wzl+smXwxeORIQVEo1WAgUrWbvYT/0RNJg/A8z2aGMGK5KWTUr2nX5GiMw==} + cpu: [x64] + os: [darwin] + + '@oxc-resolver/binding-freebsd-x64@11.16.4': + resolution: {integrity: sha512-mkcKhIdSlUqnndD928WAVVFMEr1D5EwHOBGHadypW0PkM0h4pn89ZacQvU7Qs/Z2qquzvbyw8m4Mq3jOYI+4Dw==} + cpu: [x64] + os: [freebsd] + + '@oxc-resolver/binding-linux-arm-gnueabihf@11.16.4': + resolution: {integrity: sha512-ZJvzbmXI/cILQVcJL9S2Fp7GLAIY4Yr6mpGb+k6LKLUSEq85yhG+rJ9eWCqgULVIf2BFps/NlmPTa7B7oj8jhQ==} + cpu: [arm] + os: [linux] + + '@oxc-resolver/binding-linux-arm-musleabihf@11.16.4': + resolution: {integrity: sha512-iZUB0W52uB10gBUDAi79eTnzqp1ralikCAjfq7CdokItwZUVJXclNYANnzXmtc0Xr0ox+YsDsG2jGcj875SatA==} + cpu: [arm] + os: [linux] + + '@oxc-resolver/binding-linux-arm64-gnu@11.16.4': + resolution: {integrity: sha512-qNQk0H6q1CnwS9cnvyjk9a+JN8BTbxK7K15Bb5hYfJcKTG1hfloQf6egndKauYOO0wu9ldCMPBrEP1FNIQEhaA==} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@oxc-resolver/binding-linux-arm64-musl@11.16.4': + resolution: {integrity: sha512-wEXSaEaYxGGoVSbw0i2etjDDWcqErKr8xSkTdwATP798efsZmodUAcLYJhN0Nd4W35Oq6qAvFGHpKwFrrhpTrA==} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@oxc-resolver/binding-linux-ppc64-gnu@11.16.4': + resolution: {integrity: sha512-CUFOlpb07DVOFLoYiaTfbSBRPIhNgwc/MtlYeg3p6GJJw+kEm/vzc9lohPSjzF2MLPB5hzsJdk+L/GjrTT3UPw==} + cpu: [ppc64] + os: [linux] + libc: [glibc] + + '@oxc-resolver/binding-linux-riscv64-gnu@11.16.4': + resolution: {integrity: sha512-d8It4AH8cN9ReK1hW6ZO4x3rMT0hB2LYH0RNidGogV9xtnjLRU+Y3MrCeClLyOSGCibmweJJAjnwB7AQ31GEhg==} + cpu: [riscv64] + os: [linux] + libc: [glibc] + + '@oxc-resolver/binding-linux-riscv64-musl@11.16.4': + resolution: {integrity: sha512-d09dOww9iKyEHSxuOQ/Iu2aYswl0j7ExBcyy14D6lJ5ijQSP9FXcJYJsJ3yvzboO/PDEFjvRuF41f8O1skiPVg==} + cpu: [riscv64] + os: [linux] + libc: [musl] + + '@oxc-resolver/binding-linux-s390x-gnu@11.16.4': + resolution: {integrity: sha512-lhjyGmUzTWHduZF3MkdUSEPMRIdExnhsqv8u1upX3A15epVn6YVwv4msFQPJl1x1wszkACPeDHGOtzHsITXGdw==} + cpu: [s390x] + os: [linux] + libc: [glibc] + + '@oxc-resolver/binding-linux-x64-gnu@11.16.4': + resolution: {integrity: sha512-ZtqqiI5rzlrYBm/IMMDIg3zvvVj4WO/90Dg/zX+iA8lWaLN7K5nroXb17MQ4WhI5RqlEAgrnYDXW+hok1D9Kaw==} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@oxc-resolver/binding-linux-x64-musl@11.16.4': + resolution: {integrity: sha512-LM424h7aaKcMlqHnQWgTzO+GRNLyjcNnMpqm8SygEtFRVW693XS+XGXYvjORlmJtsyjo84ej1FMb3U2HE5eyjg==} + cpu: [x64] + os: [linux] + libc: [musl] + + '@oxc-resolver/binding-openharmony-arm64@11.16.4': + resolution: {integrity: sha512-8w8U6A5DDWTBv3OUxSD9fNk37liZuEC5jnAc9wQRv9DeYKAXvuUtBfT09aIZ58swaci0q1WS48/CoMVEO6jdCA==} + cpu: [arm64] + os: [openharmony] + + '@oxc-resolver/binding-wasm32-wasi@11.16.4': + resolution: {integrity: sha512-hnjb0mDVQOon6NdfNJ1EmNquonJUjoYkp7UyasjxVa4iiMcApziHP4czzzme6WZbp+vzakhVv2Yi5ACTon3Zlw==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@oxc-resolver/binding-win32-arm64-msvc@11.16.4': + resolution: {integrity: sha512-+i0XtNfSP7cfnh1T8FMrMm4HxTeh0jxKP/VQCLWbjdUxaAQ4damho4gN9lF5dl0tZahtdszXLUboBFNloSJNOQ==} + cpu: [arm64] + os: [win32] + + '@oxc-resolver/binding-win32-ia32-msvc@11.16.4': + resolution: {integrity: sha512-ePW1islJrv3lPnef/iWwrjrSpRH8kLlftdKf2auQNWvYLx6F0xvcnv9d+r/upnVuttoQY9amLnWJf+JnCRksTw==} + cpu: [ia32] + os: [win32] + + '@oxc-resolver/binding-win32-x64-msvc@11.16.4': + resolution: {integrity: sha512-qnjQhjHI4TDL3hkidZyEmQRK43w2NHl6TP5Rnt/0XxYuLdEgx/1yzShhYidyqWzdnhGhSPTM/WVP2mK66XLegA==} + cpu: [x64] + os: [win32] + '@pkgr/core@0.2.9': resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@playwright/test@1.57.0': - resolution: {integrity: sha512-6TyEnHgd6SArQO8UO2OMTxshln3QMWBtPGrOCgs3wVEmQmwyuNtB10IZMfmYDE0riwNR1cu4q+pPcxMVtaG3TA==} + '@playwright/test@1.58.0': + resolution: {integrity: sha512-fWza+Lpbj6SkQKCrU6si4iu+fD2dD3gxNHFhUPxsfXBPhnv3rRSQVd0NtBUT9Z/RhF/boCBcuUaMUSTRTopjZg==} engines: {node: '>=18'} hasBin: true '@popperjs/core@2.11.8': resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==} - '@primer/octicons@19.21.1': - resolution: {integrity: sha512-7tgtBkCNcg75YJnckinzvES+uxysYQCe+CHSEnzr3VYgxttzKRvfmrnVogl3aEuHCQP4xhiE9k2lFDhYwGtTzQ==} + '@primer/octicons@19.21.2': + resolution: {integrity: sha512-SRTcpAZtKYh2266VQpAqNkXAQpPvYWk6dxggkHDzwI95NrE5mEzJ7cfFt/bSVbqDffKQ+tm7TEdLHoSjEcggfQ==} '@resvg/resvg-wasm@2.6.2': resolution: {integrity: sha512-FqALmHI8D4o6lk/LRWDnhw95z5eO+eAa6ORjVg09YRR7BkcM6oPHU9uyC0gtQG5vpFLvgpeU4+zEAz2H8APHNw==} @@ -1060,113 +1027,141 @@ packages: '@rolldown/pluginutils@1.0.0-beta.53': resolution: {integrity: sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==} - '@rollup/rollup-android-arm-eabi@4.53.5': - resolution: {integrity: sha512-iDGS/h7D8t7tvZ1t6+WPK04KD0MwzLZrG0se1hzBjSi5fyxlsiggoJHwh18PCFNn7tG43OWb6pdZ6Y+rMlmyNQ==} + '@rollup/rollup-android-arm-eabi@4.56.0': + resolution: {integrity: sha512-LNKIPA5k8PF1+jAFomGe3qN3bbIgJe/IlpDBwuVjrDKrJhVWywgnJvflMt/zkbVNLFtF1+94SljYQS6e99klnw==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.53.5': - resolution: {integrity: sha512-wrSAViWvZHBMMlWk6EJhvg8/rjxzyEhEdgfMMjREHEq11EtJ6IP6yfcCH57YAEca2Oe3FNCE9DSTgU70EIGmVw==} + '@rollup/rollup-android-arm64@4.56.0': + resolution: {integrity: sha512-lfbVUbelYqXlYiU/HApNMJzT1E87UPGvzveGg2h0ktUNlOCxKlWuJ9jtfvs1sKHdwU4fzY7Pl8sAl49/XaEk6Q==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.53.5': - resolution: {integrity: sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==} + '@rollup/rollup-darwin-arm64@4.56.0': + resolution: {integrity: sha512-EgxD1ocWfhoD6xSOeEEwyE7tDvwTgZc8Bss7wCWe+uc7wO8G34HHCUH+Q6cHqJubxIAnQzAsyUsClt0yFLu06w==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.53.5': - resolution: {integrity: sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==} + '@rollup/rollup-darwin-x64@4.56.0': + resolution: {integrity: sha512-1vXe1vcMOssb/hOF8iv52A7feWW2xnu+c8BV4t1F//m9QVLTfNVpEdja5ia762j/UEJe2Z1jAmEqZAK42tVW3g==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.53.5': - resolution: {integrity: sha512-1T8eY2J8rKJWzaznV7zedfdhD1BqVs1iqILhmHDq/bqCUZsrMt+j8VCTHhP0vdfbHK3e1IQ7VYx3jlKqwlf+vw==} + '@rollup/rollup-freebsd-arm64@4.56.0': + resolution: {integrity: sha512-bof7fbIlvqsyv/DtaXSck4VYQ9lPtoWNFCB/JY4snlFuJREXfZnm+Ej6yaCHfQvofJDXLDMTVxWscVSuQvVWUQ==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.53.5': - resolution: {integrity: sha512-sHTiuXyBJApxRn+VFMaw1U+Qsz4kcNlxQ742snICYPrY+DDL8/ZbaC4DVIB7vgZmp3jiDaKA0WpBdP0aqPJoBQ==} + '@rollup/rollup-freebsd-x64@4.56.0': + resolution: {integrity: sha512-KNa6lYHloW+7lTEkYGa37fpvPq+NKG/EHKM8+G/g9WDU7ls4sMqbVRV78J6LdNuVaeeK5WB9/9VAFbKxcbXKYg==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.53.5': - resolution: {integrity: sha512-dV3T9MyAf0w8zPVLVBptVlzaXxka6xg1f16VAQmjg+4KMSTWDvhimI/Y6mp8oHwNrmnmVl9XxJ/w/mO4uIQONA==} + '@rollup/rollup-linux-arm-gnueabihf@4.56.0': + resolution: {integrity: sha512-E8jKK87uOvLrrLN28jnAAAChNq5LeCd2mGgZF+fGF5D507WlG/Noct3lP/QzQ6MrqJ5BCKNwI9ipADB6jyiq2A==} cpu: [arm] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-arm-musleabihf@4.53.5': - resolution: {integrity: sha512-wIGYC1x/hyjP+KAu9+ewDI+fi5XSNiUi9Bvg6KGAh2TsNMA3tSEs+Sh6jJ/r4BV/bx/CyWu2ue9kDnIdRyafcQ==} + '@rollup/rollup-linux-arm-musleabihf@4.56.0': + resolution: {integrity: sha512-jQosa5FMYF5Z6prEpTCCmzCXz6eKr/tCBssSmQGEeozA9tkRUty/5Vx06ibaOP9RCrW1Pvb8yp3gvZhHwTDsJw==} cpu: [arm] os: [linux] + libc: [musl] - '@rollup/rollup-linux-arm64-gnu@4.53.5': - resolution: {integrity: sha512-Y+qVA0D9d0y2FRNiG9oM3Hut/DgODZbU9I8pLLPwAsU0tUKZ49cyV1tzmB/qRbSzGvY8lpgGkJuMyuhH7Ma+Vg==} + '@rollup/rollup-linux-arm64-gnu@4.56.0': + resolution: {integrity: sha512-uQVoKkrC1KGEV6udrdVahASIsaF8h7iLG0U0W+Xn14ucFwi6uS539PsAr24IEF9/FoDtzMeeJXJIBo5RkbNWvQ==} cpu: [arm64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-arm64-musl@4.53.5': - resolution: {integrity: sha512-juaC4bEgJsyFVfqhtGLz8mbopaWD+WeSOYr5E16y+1of6KQjc0BpwZLuxkClqY1i8sco+MdyoXPNiCkQou09+g==} + '@rollup/rollup-linux-arm64-musl@4.56.0': + resolution: {integrity: sha512-vLZ1yJKLxhQLFKTs42RwTwa6zkGln+bnXc8ueFGMYmBTLfNu58sl5/eXyxRa2RarTkJbXl8TKPgfS6V5ijNqEA==} cpu: [arm64] os: [linux] + libc: [musl] - '@rollup/rollup-linux-loong64-gnu@4.53.5': - resolution: {integrity: sha512-rIEC0hZ17A42iXtHX+EPJVL/CakHo+tT7W0pbzdAGuWOt2jxDFh7A/lRhsNHBcqL4T36+UiAgwO8pbmn3dE8wA==} + '@rollup/rollup-linux-loong64-gnu@4.56.0': + resolution: {integrity: sha512-FWfHOCub564kSE3xJQLLIC/hbKqHSVxy8vY75/YHHzWvbJL7aYJkdgwD/xGfUlL5UV2SB7otapLrcCj2xnF1dg==} cpu: [loong64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-ppc64-gnu@4.53.5': - resolution: {integrity: sha512-T7l409NhUE552RcAOcmJHj3xyZ2h7vMWzcwQI0hvn5tqHh3oSoclf9WgTl+0QqffWFG8MEVZZP1/OBglKZx52Q==} + '@rollup/rollup-linux-loong64-musl@4.56.0': + resolution: {integrity: sha512-z1EkujxIh7nbrKL1lmIpqFTc/sr0u8Uk0zK/qIEFldbt6EDKWFk/pxFq3gYj4Bjn3aa9eEhYRlL3H8ZbPT1xvA==} + cpu: [loong64] + os: [linux] + libc: [musl] + + '@rollup/rollup-linux-ppc64-gnu@4.56.0': + resolution: {integrity: sha512-iNFTluqgdoQC7AIE8Q34R3AuPrJGJirj5wMUErxj22deOcY7XwZRaqYmB6ZKFHoVGqRcRd0mqO+845jAibKCkw==} cpu: [ppc64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-riscv64-gnu@4.53.5': - resolution: {integrity: sha512-7OK5/GhxbnrMcxIFoYfhV/TkknarkYC1hqUw1wU2xUN3TVRLNT5FmBv4KkheSG2xZ6IEbRAhTooTV2+R5Tk0lQ==} + '@rollup/rollup-linux-ppc64-musl@4.56.0': + resolution: {integrity: sha512-MtMeFVlD2LIKjp2sE2xM2slq3Zxf9zwVuw0jemsxvh1QOpHSsSzfNOTH9uYW9i1MXFxUSMmLpeVeUzoNOKBaWg==} + cpu: [ppc64] + os: [linux] + libc: [musl] + + '@rollup/rollup-linux-riscv64-gnu@4.56.0': + resolution: {integrity: sha512-in+v6wiHdzzVhYKXIk5U74dEZHdKN9KH0Q4ANHOTvyXPG41bajYRsy7a8TPKbYPl34hU7PP7hMVHRvv/5aCSew==} cpu: [riscv64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-riscv64-musl@4.53.5': - resolution: {integrity: sha512-GwuDBE/PsXaTa76lO5eLJTyr2k8QkPipAyOrs4V/KJufHCZBJ495VCGJol35grx9xryk4V+2zd3Ri+3v7NPh+w==} + '@rollup/rollup-linux-riscv64-musl@4.56.0': + resolution: {integrity: sha512-yni2raKHB8m9NQpI9fPVwN754mn6dHQSbDTwxdr9SE0ks38DTjLMMBjrwvB5+mXrX+C0npX0CVeCUcvvvD8CNQ==} cpu: [riscv64] os: [linux] + libc: [musl] - '@rollup/rollup-linux-s390x-gnu@4.53.5': - resolution: {integrity: sha512-IAE1Ziyr1qNfnmiQLHBURAD+eh/zH1pIeJjeShleII7Vj8kyEm2PF77o+lf3WTHDpNJcu4IXJxNO0Zluro8bOw==} + '@rollup/rollup-linux-s390x-gnu@4.56.0': + resolution: {integrity: sha512-zhLLJx9nQPu7wezbxt2ut+CI4YlXi68ndEve16tPc/iwoylWS9B3FxpLS2PkmfYgDQtosah07Mj9E0khc3Y+vQ==} cpu: [s390x] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-x64-gnu@4.53.5': - resolution: {integrity: sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==} + '@rollup/rollup-linux-x64-gnu@4.56.0': + resolution: {integrity: sha512-MVC6UDp16ZSH7x4rtuJPAEoE1RwS8N4oK9DLHy3FTEdFoUTCFVzMfJl/BVJ330C+hx8FfprA5Wqx4FhZXkj2Kw==} cpu: [x64] os: [linux] + libc: [glibc] - '@rollup/rollup-linux-x64-musl@4.53.5': - resolution: {integrity: sha512-txGtluxDKTxaMDzUduGP0wdfng24y1rygUMnmlUJ88fzCCULCLn7oE5kb2+tRB+MWq1QDZT6ObT5RrR8HFRKqg==} + '@rollup/rollup-linux-x64-musl@4.56.0': + resolution: {integrity: sha512-ZhGH1eA4Qv0lxaV00azCIS1ChedK0V32952Md3FtnxSqZTBTd6tgil4nZT5cU8B+SIw3PFYkvyR4FKo2oyZIHA==} cpu: [x64] os: [linux] + libc: [musl] - '@rollup/rollup-openharmony-arm64@4.53.5': - resolution: {integrity: sha512-3DFiLPnTxiOQV993fMc+KO8zXHTcIjgaInrqlG8zDp1TlhYl6WgrOHuJkJQ6M8zHEcntSJsUp1XFZSY8C1DYbg==} + '@rollup/rollup-openbsd-x64@4.56.0': + resolution: {integrity: sha512-O16XcmyDeFI9879pEcmtWvD/2nyxR9mF7Gs44lf1vGGx8Vg2DRNx11aVXBEqOQhWb92WN4z7fW/q4+2NYzCbBA==} + cpu: [x64] + os: [openbsd] + + '@rollup/rollup-openharmony-arm64@4.56.0': + resolution: {integrity: sha512-LhN/Reh+7F3RCgQIRbgw8ZMwUwyqJM+8pXNT6IIJAqm2IdKkzpCh/V9EdgOMBKuebIrzswqy4ATlrDgiOwbRcQ==} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.53.5': - resolution: {integrity: sha512-nggc/wPpNTgjGg75hu+Q/3i32R00Lq1B6N1DO7MCU340MRKL3WZJMjA9U4K4gzy3dkZPXm9E1Nc81FItBVGRlA==} + '@rollup/rollup-win32-arm64-msvc@4.56.0': + resolution: {integrity: sha512-kbFsOObXp3LBULg1d3JIUQMa9Kv4UitDmpS+k0tinPBz3watcUiV2/LUDMMucA6pZO3WGE27P7DsfaN54l9ing==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.53.5': - resolution: {integrity: sha512-U/54pTbdQpPLBdEzCT6NBCFAfSZMvmjr0twhnD9f4EIvlm9wy3jjQ38yQj1AGznrNO65EWQMgm/QUjuIVrYF9w==} + '@rollup/rollup-win32-ia32-msvc@4.56.0': + resolution: {integrity: sha512-vSSgny54D6P4vf2izbtFm/TcWYedw7f8eBrOiGGecyHyQB9q4Kqentjaj8hToe+995nob/Wv48pDqL5a62EWtg==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.53.5': - resolution: {integrity: sha512-2NqKgZSuLH9SXBBV2dWNRCZmocgSOx8OJSdpRaEcRlIfX8YrKxUT6z0F1NpvDVhOsl190UFTRh2F2WDWWCYp3A==} + '@rollup/rollup-win32-x64-gnu@4.56.0': + resolution: {integrity: sha512-FeCnkPCTHQJFbiGG49KjV5YGW/8b9rrXAM2Mz2kiIoktq2qsJxRD5giEMEOD2lPdgs72upzefaUvS+nc8E3UzQ==} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.53.5': - resolution: {integrity: sha512-JRpZUhCfhZ4keB5v0fe02gQJy05GqboPOaxvjugW04RLSYYoB/9t2lx2u/tMs/Na/1NXfY8QYjgRljRpN+MjTQ==} + '@rollup/rollup-win32-x64-msvc@4.56.0': + resolution: {integrity: sha512-H8AE9Ur/t0+1VXujj90w0HrSOuv0Nq9r1vSZF2t5km20NTfosQsGGUXDaKdQZzwuLts7IyL1fYT4hM95TI9c4g==} cpu: [x64] os: [win32] @@ -1186,6 +1181,10 @@ packages: '@simonwep/pickr@1.9.0': resolution: {integrity: sha512-oEYvv15PyfZzjoAzvXYt3UyNGwzsrpFxLaZKzkOSd0WYBVwLd19iJerePDONxC1iF6+DpcswPdLIM2KzCJuYFg==} + '@sindresorhus/merge-streams@4.0.0': + resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==} + engines: {node: '>=18'} + '@solid-primitives/refs@1.1.2': resolution: {integrity: sha512-K7tf2thy7L+YJjdqXspXOg5xvNEOH8tgEWsp0+1mQk3obHBRD6hEjYZk7p7FlJphSZImS35je3UfmWuD7MhDfg==} peerDependencies: @@ -1204,17 +1203,17 @@ packages: '@standard-schema/spec@1.1.0': resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} - '@stylistic/eslint-plugin@5.6.1': - resolution: {integrity: sha512-JCs+MqoXfXrRPGbGmho/zGS/jMcn3ieKl/A8YImqib76C8kjgZwq5uUFzc30lJkMvcchuRn6/v8IApLxli3Jyw==} + '@stylistic/eslint-plugin@5.7.1': + resolution: {integrity: sha512-zjTUwIsEfT+k9BmXwq1QEFYsb4afBlsI1AXFyWQBgggMzwBFOuu92pGrE5OFx90IOjNl+lUbQoTG7f8S0PkOdg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: '>=9.0.0' - '@stylistic/stylelint-plugin@4.0.0': - resolution: {integrity: sha512-CFwt3K4Y/7bygNCLCQ8Sy4Hzgbhxq3BsNW0FIuYxl17HD3ywptm54ocyeiLVRrk5jtz1Zwks7Xr9eiZt8SWHAw==} - engines: {node: ^18.12 || >=20.9} + '@stylistic/stylelint-plugin@5.0.1': + resolution: {integrity: sha512-NaVwCNVZ2LyPA3TnUwvjO9c6P6VUjgRB8UP8SOW+cAOJBVqPPuOIDawsvvtql/LhkuR3JuTdGvr/RM3dUl8l2Q==} + engines: {node: '>=20.19.0'} peerDependencies: - stylelint: ^16.22.0 + stylelint: ^17.0.0 '@swc/helpers@0.2.14': resolution: {integrity: sha512-wpCQMhf5p5GhNg2MmGKXzUNwxe7zRiCsmqYsamez2beP7mKPCSiu+BjZcdN95yYSzO857kr0VfQewmGpS77nqA==} @@ -1305,8 +1304,8 @@ packages: '@types/d3-selection@3.0.11': resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==} - '@types/d3-shape@3.1.7': - resolution: {integrity: sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==} + '@types/d3-shape@3.1.8': + resolution: {integrity: sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==} '@types/d3-time-format@4.0.3': resolution: {integrity: sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==} @@ -1353,14 +1352,17 @@ packages: '@types/jquery@3.5.33': resolution: {integrity: sha512-SeyVJXlCZpEki5F0ghuYe+L+PprQta6nRZqhONt9F13dWBtR/ftoaIbdRQ7cis7womE+X2LKhsDdDtkkDhJS6g==} + '@types/js-yaml@4.0.9': + resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} + '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} '@types/json5@0.0.29': resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - '@types/katex@0.16.7': - resolution: {integrity: sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==} + '@types/katex@0.16.8': + resolution: {integrity: sha512-trgaNyfU+Xh2Tc+ABIb44a5AYUpicB3uwirOioeOkNPPbmgRNtcWyDeeFRzjPZENO9Vq8gvVqfhaaXWLlevVwg==} '@types/marked@4.3.2': resolution: {integrity: sha512-a79Yc3TOk6dGdituy8hmTTJXjOkZ7zsFYV10L337ttq/rec8lRMDBpV7fL3uLx6TgbFCa5DU/h8FmIBQPSbU0w==} @@ -1368,11 +1370,8 @@ packages: '@types/ms@2.1.0': resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} - '@types/node@20.19.27': - resolution: {integrity: sha512-N2clP5pJhB2YnZJ3PIHFk5RkygRX5WO/5f0WC08tp0wd+sv0rsJk3MqWn3CbNmT2J505a5336jaQj4ph1AdMug==} - - '@types/node@25.0.3': - resolution: {integrity: sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==} + '@types/node@25.0.10': + resolution: {integrity: sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg==} '@types/pdfobject@2.2.5': resolution: {integrity: sha512-7gD5tqc/RUDq0PyoLemL0vEHxBYi+zY0WVaFAx/Y0jBsXFgot1vB9No1GhDZGwRGJMCIZbgAb74QG9MTyTNU/g==} @@ -1407,63 +1406,66 @@ packages: '@types/whatwg-mimetype@3.0.2': resolution: {integrity: sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==} - '@typescript-eslint/eslint-plugin@8.50.0': - resolution: {integrity: sha512-O7QnmOXYKVtPrfYzMolrCTfkezCJS9+ljLdKW/+DCvRsc3UAz+sbH6Xcsv7p30+0OwUbeWfUDAQE0vpabZ3QLg==} + '@types/ws@8.18.1': + resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} + + '@typescript-eslint/eslint-plugin@8.53.1': + resolution: {integrity: sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.50.0 + '@typescript-eslint/parser': ^8.53.1 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.50.0': - resolution: {integrity: sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==} + '@typescript-eslint/parser@8.53.1': + resolution: {integrity: sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.50.0': - resolution: {integrity: sha512-Cg/nQcL1BcoTijEWyx4mkVC56r8dj44bFDvBdygifuS20f3OZCHmFbjF34DPSi07kwlFvqfv/xOLnJ5DquxSGQ==} + '@typescript-eslint/project-service@8.53.1': + resolution: {integrity: sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/scope-manager@8.50.0': - resolution: {integrity: sha512-xCwfuCZjhIqy7+HKxBLrDVT5q/iq7XBVBXLn57RTIIpelLtEIZHXAF/Upa3+gaCpeV1NNS5Z9A+ID6jn50VD4A==} + '@typescript-eslint/scope-manager@8.53.1': + resolution: {integrity: sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.50.0': - resolution: {integrity: sha512-vxd3G/ybKTSlm31MOA96gqvrRGv9RJ7LGtZCn2Vrc5htA0zCDvcMqUkifcjrWNNKXHUU3WCkYOzzVSFBd0wa2w==} + '@typescript-eslint/tsconfig-utils@8.53.1': + resolution: {integrity: sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.50.0': - resolution: {integrity: sha512-7OciHT2lKCewR0mFoBrvZJ4AXTMe/sYOe87289WAViOocEmDjjv8MvIOT2XESuKj9jp8u3SZYUSh89QA4S1kQw==} + '@typescript-eslint/type-utils@8.53.1': + resolution: {integrity: sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/types@8.50.0': - resolution: {integrity: sha512-iX1mgmGrXdANhhITbpp2QQM2fGehBse9LbTf0sidWK6yg/NE+uhV5dfU1g6EYPlcReYmkE9QLPq/2irKAmtS9w==} + '@typescript-eslint/types@8.53.1': + resolution: {integrity: sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.50.0': - resolution: {integrity: sha512-W7SVAGBR/IX7zm1t70Yujpbk+zdPq/u4soeFSknWFdXIFuWsBGBOUu/Tn/I6KHSKvSh91OiMuaSnYp3mtPt5IQ==} + '@typescript-eslint/typescript-estree@8.53.1': + resolution: {integrity: sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.50.0': - resolution: {integrity: sha512-87KgUXET09CRjGCi2Ejxy3PULXna63/bMYv72tCAlDJC3Yqwln0HiFJ3VJMst2+mEtNtZu5oFvX4qJGjKsnAgg==} + '@typescript-eslint/utils@8.53.1': + resolution: {integrity: sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/visitor-keys@8.50.0': - resolution: {integrity: sha512-Xzmnb58+Db78gT/CCj/PVCvK+zxbnsw6F+O1oheYszJbBSdEjVhQi3C/Xttzxgi/GLmpvOggRs1RFpiJ8+c34Q==} + '@typescript-eslint/visitor-keys@8.53.1': + resolution: {integrity: sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@unrs/resolver-binding-android-arm-eabi@1.11.1': @@ -1505,41 +1507,49 @@ packages: resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==} cpu: [arm64] os: [linux] + libc: [glibc] '@unrs/resolver-binding-linux-arm64-musl@1.11.1': resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==} cpu: [arm64] os: [linux] + libc: [musl] '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==} cpu: [ppc64] os: [linux] + libc: [glibc] '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==} cpu: [riscv64] os: [linux] + libc: [glibc] '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==} cpu: [riscv64] os: [linux] + libc: [musl] '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==} cpu: [s390x] os: [linux] + libc: [glibc] '@unrs/resolver-binding-linux-x64-gnu@1.11.1': resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==} cpu: [x64] os: [linux] + libc: [glibc] '@unrs/resolver-binding-linux-x64-musl@1.11.1': resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==} cpu: [x64] os: [linux] + libc: [musl] '@unrs/resolver-binding-wasm32-wasi@1.11.1': resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==} @@ -1568,8 +1578,8 @@ packages: vite: ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 vue: ^3.2.25 - '@vitest/eslint-plugin@1.5.2': - resolution: {integrity: sha512-2t1F2iecXB/b1Ox4U137lhD3chihEE3dRVtu3qMD35tc6UqUjg1VGRJoS1AkFKwpT8zv8OQInzPQO06hrRkeqw==} + '@vitest/eslint-plugin@1.6.6': + resolution: {integrity: sha512-bwgQxQWRtnTVzsUHK824tBmHzjV0iTx3tZaiQIYDjX3SA7TsQS8CuDVqxXrRY3FaOUMgbGavesCxI9MOfFLm7Q==} engines: {node: '>=18'} peerDependencies: eslint: '>=8.57.0' @@ -1581,11 +1591,11 @@ packages: vitest: optional: true - '@vitest/expect@4.0.16': - resolution: {integrity: sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==} + '@vitest/expect@4.0.18': + resolution: {integrity: sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==} - '@vitest/mocker@4.0.16': - resolution: {integrity: sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==} + '@vitest/mocker@4.0.18': + resolution: {integrity: sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==} peerDependencies: msw: ^2.4.9 vite: ^6.0.0 || ^7.0.0-0 @@ -1595,66 +1605,61 @@ packages: vite: optional: true - '@vitest/pretty-format@4.0.16': - resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==} + '@vitest/pretty-format@4.0.18': + resolution: {integrity: sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==} - '@vitest/runner@4.0.16': - resolution: {integrity: sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==} + '@vitest/runner@4.0.18': + resolution: {integrity: sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==} - '@vitest/snapshot@4.0.16': - resolution: {integrity: sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==} + '@vitest/snapshot@4.0.18': + resolution: {integrity: sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==} - '@vitest/spy@4.0.16': - resolution: {integrity: sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==} + '@vitest/spy@4.0.18': + resolution: {integrity: sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==} - '@vitest/utils@4.0.16': - resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==} + '@vitest/utils@4.0.18': + resolution: {integrity: sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==} - '@volar/language-core@2.4.26': - resolution: {integrity: sha512-hH0SMitMxnB43OZpyF1IFPS9bgb2I3bpCh76m2WEK7BE0A0EzpYsRp0CCH2xNKshr7kacU5TQBLYn4zj7CG60A==} + '@volar/language-core@2.4.27': + resolution: {integrity: sha512-DjmjBWZ4tJKxfNC1F6HyYERNHPYS7L7OPFyCrestykNdUZMFYzI9WTyvwPcaNaHlrEUwESHYsfEw3isInncZxQ==} - '@volar/source-map@2.4.26': - resolution: {integrity: sha512-JJw0Tt/kSFsIRmgTQF4JSt81AUSI1aEye5Zl65EeZ8H35JHnTvFGmpDOBn5iOxd48fyGE+ZvZBp5FcgAy/1Qhw==} + '@volar/source-map@2.4.27': + resolution: {integrity: sha512-ynlcBReMgOZj2i6po+qVswtDUeeBRCTgDurjMGShbm8WYZgJ0PA4RmtebBJ0BCYol1qPv3GQF6jK7C9qoVc7lg==} - '@volar/typescript@2.4.26': - resolution: {integrity: sha512-N87ecLD48Sp6zV9zID/5yuS1+5foj0DfuYGdQ6KHj/IbKvyKv1zNX6VCmnKYwtmHadEO6mFc2EKISiu3RDPAvA==} + '@volar/typescript@2.4.27': + resolution: {integrity: sha512-eWaYCcl/uAPInSK2Lze6IqVWaBu/itVqR5InXcHXFyles4zO++Mglt3oxdgj75BDcv1Knr9Y93nowS8U3wqhxg==} - '@vue/compiler-core@3.5.25': - resolution: {integrity: sha512-vay5/oQJdsNHmliWoZfHPoVZZRmnSWhug0BYT34njkYTPqClh3DNWLkZNJBVSjsNMrg0CCrBfoKkjZQPM/QVUw==} + '@vue/compiler-core@3.5.27': + resolution: {integrity: sha512-gnSBQjZA+//qDZen+6a2EdHqJ68Z7uybrMf3SPjEGgG4dicklwDVmMC1AeIHxtLVPT7sn6sH1KOO+tS6gwOUeQ==} - '@vue/compiler-dom@3.5.25': - resolution: {integrity: sha512-4We0OAcMZsKgYoGlMjzYvaoErltdFI2/25wqanuTu+S4gismOTRTBPi4IASOjxWdzIwrYSjnqONfKvuqkXzE2Q==} + '@vue/compiler-dom@3.5.27': + resolution: {integrity: sha512-oAFea8dZgCtVVVTEC7fv3T5CbZW9BxpFzGGxC79xakTr6ooeEqmRuvQydIiDAkglZEAd09LgVf1RoDnL54fu5w==} - '@vue/compiler-sfc@3.5.25': - resolution: {integrity: sha512-PUgKp2rn8fFsI++lF2sO7gwO2d9Yj57Utr5yEsDf3GNaQcowCLKL7sf+LvVFvtJDXUp/03+dC6f2+LCv5aK1ag==} + '@vue/compiler-sfc@3.5.27': + resolution: {integrity: sha512-sHZu9QyDPeDmN/MRoshhggVOWE5WlGFStKFwu8G52swATgSny27hJRWteKDSUUzUH+wp+bmeNbhJnEAel/auUQ==} - '@vue/compiler-ssr@3.5.25': - resolution: {integrity: sha512-ritPSKLBcParnsKYi+GNtbdbrIE1mtuFEJ4U1sWeuOMlIziK5GtOL85t5RhsNy4uWIXPgk+OUdpnXiTdzn8o3A==} + '@vue/compiler-ssr@3.5.27': + resolution: {integrity: sha512-Sj7h+JHt512fV1cTxKlYhg7qxBvack+BGncSpH+8vnN+KN95iPIcqB5rsbblX40XorP+ilO7VIKlkuu3Xq2vjw==} - '@vue/language-core@3.1.8': - resolution: {integrity: sha512-PfwAW7BLopqaJbneChNL6cUOTL3GL+0l8paYP5shhgY5toBNidWnMXWM+qDwL7MC9+zDtzCF2enT8r6VPu64iw==} + '@vue/language-core@3.2.3': + resolution: {integrity: sha512-VpN/GnYDzGLh44AI6i1OB/WsLXo6vwnl0EWHBelGc4TyC0yEq6azwNaed/+Tgr8anFlSdWYnMEkyHJDPe7ii7A==} + + '@vue/reactivity@3.5.27': + resolution: {integrity: sha512-vvorxn2KXfJ0nBEnj4GYshSgsyMNFnIQah/wczXlsNXt+ijhugmW+PpJ2cNPe4V6jpnBcs0MhCODKllWG+nvoQ==} + + '@vue/runtime-core@3.5.27': + resolution: {integrity: sha512-fxVuX/fzgzeMPn/CLQecWeDIFNt3gQVhxM0rW02Tvp/YmZfXQgcTXlakq7IMutuZ/+Ogbn+K0oct9J3JZfyk3A==} + + '@vue/runtime-dom@3.5.27': + resolution: {integrity: sha512-/QnLslQgYqSJ5aUmb5F0z0caZPGHRB8LEAQ1s81vHFM5CBfnun63rxhvE/scVb/j3TbBuoZwkJyiLCkBluMpeg==} + + '@vue/server-renderer@3.5.27': + resolution: {integrity: sha512-qOz/5thjeP1vAFc4+BY3Nr6wxyLhpeQgAE/8dDtKo6a6xdk+L4W46HDZgNmLOBUDEkFXV3G7pRiUqxjX0/2zWA==} peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true + vue: 3.5.27 - '@vue/reactivity@3.5.25': - resolution: {integrity: sha512-5xfAypCQepv4Jog1U4zn8cZIcbKKFka3AgWHEFQeK65OW+Ys4XybP6z2kKgws4YB43KGpqp5D/K3go2UPPunLA==} - - '@vue/runtime-core@3.5.25': - resolution: {integrity: sha512-Z751v203YWwYzy460bzsYQISDfPjHTl+6Zzwo/a3CsAf+0ccEjQ8c+0CdX1WsumRTHeywvyUFtW6KvNukT/smA==} - - '@vue/runtime-dom@3.5.25': - resolution: {integrity: sha512-a4WrkYFbb19i9pjkz38zJBg8wa/rboNERq3+hRRb0dHiJh13c+6kAbgqCPfMaJ2gg4weWD3APZswASOfmKwamA==} - - '@vue/server-renderer@3.5.25': - resolution: {integrity: sha512-UJaXR54vMG61i8XNIzTSf2Q7MOqZHpp8+x3XLGtE3+fL+nQd+k7O5+X3D/uWrnQXOdMw5VPih+Uremcw+u1woQ==} - peerDependencies: - vue: 3.5.25 - - '@vue/shared@3.5.25': - resolution: {integrity: sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==} + '@vue/shared@3.5.27': + resolution: {integrity: sha512-dXr/3CgqXsJkZ0n9F3I4elY8wM9jMJpP3pvRG52r6m0tu/MsAFIe6JpXVGeNMd/D9F4hQynWT8Rfuj0bdm9kFQ==} '@webassemblyjs/ast@1.14.1': resolution: {integrity: sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==} @@ -1776,8 +1781,8 @@ packages: ajv@8.17.1: resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} - alien-signals@3.1.1: - resolution: {integrity: sha512-ogkIWbVrLwKtHY6oOAXaYkAxP+cTH7V5FZ5+Tm4NZFd8VDZ6uNMDrfzqctTZ42eTMCSR3ne3otpcxmqSnFfPYA==} + alien-signals@3.1.2: + resolution: {integrity: sha512-d9dYqZTS90WLiU0I5c6DHj/HcKkF8ZyGN3G5x8wSbslulz70KOxaqCT0hQCo9KOyhVqzqGojvNdJXoTumZOtcw==} ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} @@ -1819,12 +1824,8 @@ packages: resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==} engines: {node: '>=0.10.0'} - array-union@2.1.0: - resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} - engines: {node: '>=8'} - - asciinema-player@3.13.5: - resolution: {integrity: sha512-mgpJc9g6I+k4Tz5qVUNd0H+GoYlhiUwvlay6vD6IXiuiWOWhBOjxbvqQ1bcI/HPTrOYxhTyxZuzHIXM36Tw60Q==} + asciinema-player@3.14.0: + resolution: {integrity: sha512-44m3CpNavn8i7DSr/AeeV+rJpHpcqc/OCildCs9FAu5gnXB6XNBdbhfg6mHMG4uU3R1rxFNA3ZRTt8FMhHC48Q==} assertion-error@2.0.1: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} @@ -1842,8 +1843,8 @@ packages: engines: {node: '>= 4.5.0'} hasBin: true - axe-core@4.11.0: - resolution: {integrity: sha512-ilYanEU8vxxBexpJd8cWM4ElSQq4QctCLKih0TSfjIfCQTeyH/6zVrmIJfLPrKTKJRbiG+cfnZbQIjAlJmF1jQ==} + axe-core@4.11.1: + resolution: {integrity: sha512-BASOg+YwO2C+346x3LZOeoovTIoTrRqEsqMa6fmfAV0P+U9mFr9NsyOEpiYvFjbc64NMrSswhV50WdXzdb/Z5A==} engines: {node: '>=4'} axobject-query@4.1.0: @@ -1853,14 +1854,15 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - balanced-match@2.0.0: - resolution: {integrity: sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==} + balanced-match@3.0.1: + resolution: {integrity: sha512-vjtV3hiLqYDNRoiAv0zC4QaGAMPomEoq83PRmYIofPswwZurCeWR5LByXm7SyoL0Zh5+2z0+HC7jG8gSZJUh0w==} + engines: {node: '>= 16'} base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - baseline-browser-mapping@2.9.9: - resolution: {integrity: sha512-V8fbOCSeOFvlDj7LLChUcqbZrdKD9RU/VR260piF1790vT0mfLSwGc/Qzxv3IqiTukOpNtItePa0HBpMAj7MDg==} + baseline-browser-mapping@2.9.17: + resolution: {integrity: sha512-agD0MgJFUP/4nvjqzIB29zRPUuCF7Ge6mEv9s8dHrtYD7QWXRcx75rOADE/d5ah1NI+0vkDl0yorDd5U852IQQ==} hasBin: true big.js@5.2.2: @@ -1906,8 +1908,8 @@ packages: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} - cacheable@2.3.1: - resolution: {integrity: sha512-yr+FSHWn1ZUou5LkULX/S+jhfgfnLbuKQjE40tyEd4fxGZVMbBL5ifno0J0OauykS8UiCSgHi+DV/YD+rjFxFg==} + cacheable@2.3.2: + resolution: {integrity: sha512-w+ZuRNmex9c1TR9RcsxbfTKCjSL0rh1WA5SABbrWprIHeNBdmyQLSYonlDy9gpD+63XT8DgZ/wNh1Smvc9WnJA==} callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} @@ -1917,11 +1919,11 @@ packages: resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} engines: {node: '>= 6'} - caniuse-lite@1.0.30001760: - resolution: {integrity: sha512-7AAMPcueWELt1p3mi13HR/LHH0TJLT11cnwDJEs3xA4+CK/PLKeO9Kl1oru24htkyUKtkGCvAx4ohB0Ttry8Dw==} + caniuse-lite@1.0.30001766: + resolution: {integrity: sha512-4C0lfJ0/YPjJQHagaE9x2Elb69CIqEPZeG0anQt9SIvIoOH4a4uaRl73IavyO+0qZh6MDLH//DrXThEYKHkmYA==} - chai@6.2.1: - resolution: {integrity: sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==} + chai@6.2.2: + resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==} engines: {node: '>=18'} chalk@4.1.2: @@ -2039,8 +2041,8 @@ packages: resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} engines: {node: '>= 12'} - comment-parser@1.4.1: - resolution: {integrity: sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg==} + comment-parser@1.4.5: + resolution: {integrity: sha512-aRDkn3uyIlCFfk5NUA+VdwMmMsh8JGhc4hapfV4yxymHGQ3BVskMQfoXGpCo5IoBuQ9tS5iiVKhCpTcB4pW4qw==} engines: {node: '>= 12.0.0'} compare-versions@6.1.1: @@ -2052,8 +2054,8 @@ packages: confbox@0.1.8: resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} - core-js-compat@3.47.0: - resolution: {integrity: sha512-IGfuznZ/n7Kp9+nypamBhvwdwLsW6KC8IOaURw2doAK5e98AG3acVLdh0woOnEqCfUtS+Vu882JE4k/DAm3ItQ==} + core-js-compat@3.48.0: + resolution: {integrity: sha512-OM4cAF3D6VtH/WkLtWvyNC56EZVXsZdU3iqaMG2B4WvYrlqU831pc4UtG5yp0sE9z8Y02wVN7PjW5Zf9Gt0f1Q==} core-js@3.32.2: resolution: {integrity: sha512-pxXSw1mYZPDGvTQqEc5vgIb83jGQKFGYWY76z4a7weZXUolw3G+OvpZqSRcfYOoOVUQJYEPsWeQK8pKEnUtWxQ==} @@ -2196,8 +2198,8 @@ packages: resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==} engines: {node: '>=12'} - d3-format@3.1.0: - resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==} + d3-format@3.1.2: + resolution: {integrity: sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==} engines: {node: '>=12'} d3-geo@3.1.1: @@ -2305,8 +2307,8 @@ packages: supports-color: optional: true - decode-named-character-reference@1.2.0: - resolution: {integrity: sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==} + decode-named-character-reference@1.3.0: + resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==} decode-uri-component@0.2.2: resolution: {integrity: sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==} @@ -2336,10 +2338,6 @@ packages: didyoumean@1.2.2: resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==} - dir-glob@3.0.1: - resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} - engines: {node: '>=8'} - dlv@1.1.3: resolution: {integrity: sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==} @@ -2375,8 +2373,8 @@ packages: easymde@2.20.0: resolution: {integrity: sha512-V1Z5f92TfR42Na852OWnIZMbM7zotWQYTddNaLYZFVKj7APBbyZ3FYJ27gBw2grMW3R6Qdv9J8n5Ij7XRSIgXQ==} - electron-to-chromium@1.5.267: - resolution: {integrity: sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==} + electron-to-chromium@1.5.278: + resolution: {integrity: sha512-dQ0tM1svDRQOwxnXxm+twlGTjr9Upvt8UFWAgmLsxEzFQxhbti4VwxmMjsDxVC51Zo84swW7FVCXEV+VAkhuPw==} emoji-regex@10.6.0: resolution: {integrity: sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==} @@ -2399,6 +2397,10 @@ packages: resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} engines: {node: '>=0.12'} + entities@7.0.1: + resolution: {integrity: sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==} + engines: {node: '>=0.12'} + env-paths@2.2.1: resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} engines: {node: '>=6'} @@ -2417,16 +2419,11 @@ packages: es-module-lexer@2.0.0: resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==} - esbuild-loader@4.4.0: - resolution: {integrity: sha512-4J+hXTpTtEdzUNLoY8ReqDNJx2NoldfiljRCiKbeYUuZmVaiJeDqFgyAzz8uOopaekwRoCcqBFyEroGQLFVZ1g==} + esbuild-loader@4.4.2: + resolution: {integrity: sha512-8LdoT9sC7fzfvhxhsIAiWhzLJr9yT3ggmckXxsgvM07wgrRxhuT98XhLn3E7VczU5W5AFsPKv9DdWcZIubbWkQ==} peerDependencies: webpack: ^4.40.0 || ^5.0.0 - esbuild@0.25.12: - resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} - engines: {node: '>=18'} - hasBin: true - esbuild@0.27.2: resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==} engines: {node: '>=18'} @@ -2568,14 +2565,14 @@ packages: resolution: {integrity: sha512-brcKcxGnISN2CcVhXJ/kEQlNa0MEfGRtwKtWA16SkqXHKitaKIMrfemJKLKX1YqDU5C/5JY3PvZXd5jEW04e0Q==} engines: {node: '>=5.0.0'} - eslint-plugin-playwright@2.4.0: - resolution: {integrity: sha512-MWNXfXlLfwXAjj4Z80PvCCFCXgCYy5OCHan57Z/beGrjkJ3maG1GanuGX8Ck6T6fagplBx2ZdkifxSfByftaTQ==} + eslint-plugin-playwright@2.5.0: + resolution: {integrity: sha512-1ckFw7Abdz+l23wtw5Tg4GTK3Y+MgEQQNjEr7FTJP3wwmIOj8DkbJ6G655aPc09c0Kfn/NoGA4xpMZzeSO4NWw==} engines: {node: '>=16.9.0'} peerDependencies: eslint: '>=8.40.0' - eslint-plugin-prettier@5.5.4: - resolution: {integrity: sha512-swNtI95SToIz05YINMA6Ox5R057IMAmWZ26GqPxusAp1TZzj+IdY9tXNWWD3vkF/wEqydCONcwjTFpxybBqZsg==} + eslint-plugin-prettier@5.5.5: + resolution: {integrity: sha512-hscXkbqUZ2sPithAuLm5MXL+Wph+U7wHngPBv9OMWwlP8iaflyxpjTYZkmdgB4/vPIhemRlBEoLrH7UC1n7aUw==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: '@types/eslint': '>=8.0.0' @@ -2588,11 +2585,11 @@ packages: eslint-config-prettier: optional: true - eslint-plugin-regexp@2.10.0: - resolution: {integrity: sha512-ovzQT8ESVn5oOe5a7gIDPD5v9bCSjIFJu57sVPDqgPRXicQzOnYfFN21WoQBQF18vrhT5o7UMKFwJQVVjyJ0ng==} - engines: {node: ^18 || >=20} + eslint-plugin-regexp@3.0.0: + resolution: {integrity: sha512-iW7hgAV8NOG6E2dz+VeKpq67YLQ9jaajOKYpoOSic2/q8y9BMdXBKkSR9gcMtbqEhNQzdW41E3wWzvhp8ExYwQ==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} peerDependencies: - eslint: '>=8.44.0' + eslint: '>=9.38.0' eslint-plugin-sonarjs@3.0.5: resolution: {integrity: sha512-dI62Ff3zMezUToi161hs2i1HX1ie8Ia2hO0jtNBfdgRBicAG4ydy2WPt0rMTrAe3ZrlqhpAO3w1jcQEdneYoFA==} @@ -2612,8 +2609,8 @@ packages: eslint: '>=5.0.0' vue-eslint-parser: '>=7.1.0' - eslint-plugin-vue@10.6.2: - resolution: {integrity: sha512-nA5yUs/B1KmKzvC42fyD0+l9Yd+LtEpVhWRbXuDj0e+ZURcTtyRbMDWUeJmTAh2wC6jC83raS63anNM2YT3NPw==} + eslint-plugin-vue@10.7.0: + resolution: {integrity: sha512-r2XFCK4qlo1sxEoAMIoTTX0PZAdla0JJDt1fmYiworZUX67WeEGqm+JbyAg3M+pGiJ5U6Mp5WQbontXWtIW7TA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: '@stylistic/eslint-plugin': ^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0 @@ -2665,8 +2662,8 @@ packages: resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - esquery@1.6.0: - resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==} + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} engines: {node: '>=0.10'} esrecurse@4.3.0: @@ -2725,8 +2722,11 @@ packages: resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==} engines: {node: '>= 4.9.1'} - fastq@1.19.1: - resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} + fastq@1.20.1: + resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} + + fd-package-json@2.0.0: + resolution: {integrity: sha512-jKmm9YtsNXN789RS/0mSzOC1NUq9mkVd65vbSSVsKdjGvYXBuE4oWe2QOEoFeRmJg+lPuZxpmrfFclNhoRMneQ==} fdir@6.5.0: resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} @@ -2743,8 +2743,8 @@ packages: fflate@0.8.2: resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==} - file-entry-cache@11.1.1: - resolution: {integrity: sha512-TPVFSDE7q91Dlk1xpFLvFllf8r0HyOMOlnWy7Z2HBku5H3KhIeOGInexrIeg2D64DosVB/JXkrrk6N/7Wriq4A==} + file-entry-cache@11.1.2: + resolution: {integrity: sha512-N2WFfK12gmrK1c1GXOqiAJ1tc5YE+R53zvQ+t5P8S5XhnmKYVB5eZEiLNZKDSmoG8wqqbF9EXYBBW/nef19log==} file-entry-cache@8.0.0: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} @@ -2770,8 +2770,8 @@ packages: resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} engines: {node: '>=16'} - flat-cache@6.1.19: - resolution: {integrity: sha512-l/K33newPTZMTGAnnzaiqSl6NnH7Namh8jBNjrgjprWxGmZUuxx/sJNIRaijOh3n7q7ESbhNZC+pvVZMFdeU4A==} + flat-cache@6.1.20: + resolution: {integrity: sha512-AhHYqwvN62NVLp4lObVXGVluiABTHapoB57EyegZVmazN+hhGhLTn3uZbOofoTw4DSDvVCadzzyChXhOAvy8uQ==} flat@5.0.2: resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==} @@ -2780,6 +2780,11 @@ packages: flatted@3.3.3: resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + formatly@0.3.0: + resolution: {integrity: sha512-9XNj/o4wrRFyhSMJOvsuyMwy8aUfBaZ1VrqHVfohyXf0Sw0e+yfKG+xZaY3arGCOMdwFsqObtzVOc1gU9KiT9w==} + engines: {node: '>=18.3.0'} + hasBin: true + fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} @@ -2834,9 +2839,13 @@ packages: resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==} engines: {node: '>=18'} - globby@11.1.0: - resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} - engines: {node: '>=10'} + globals@17.1.0: + resolution: {integrity: sha512-8HoIcWI5fCvG5NADj4bDav+er9B9JMj2vyL2pI8D0eismKyUvPLTSs+Ln3wqhwcp306i73iyVnEKx3F6T47TGw==} + engines: {node: '>=18'} + + globby@16.1.0: + resolution: {integrity: sha512-+A4Hq7m7Ze592k9gZRy4gJ27DrXRNnC1vPjxTt1qQxEY8RxagBkBxivkCwg7FxSTG0iLLEMaUx13oOr0R2/qcQ==} + engines: {node: '>=20'} globjoin@0.1.4: resolution: {integrity: sha512-xYfnw62CKG8nLkZBfWbhWwDw02CHty86jfPcc2cr3ZfeuK9ysoVPPEUxf21bAD/rWAgk52SuBrLJlefNy8mvFg==} @@ -2851,27 +2860,31 @@ packages: resolution: {integrity: sha512-tSQXBXS/MWQOn/RKckawJ61vvsDpCom87JgxiYdGwHdOa0ht0vzUWDlfioofFCRU0L+6NGDt6XzbgoJvZkMeRQ==} engines: {node: '>=0.8.0'} - happy-dom@20.0.11: - resolution: {integrity: sha512-QsCdAUHAmiDeKeaNojb1OHOPF7NjcWPBR7obdu3NwH2a/oyQaLg5d0aaCy/9My6CdPChYF07dvz5chaXBGaD4g==} + happy-dom@20.3.7: + resolution: {integrity: sha512-sb5IzoRl1WJKsUSRe+IloJf3z1iDq5PQ7Yk/ULMsZ5IAQEs9ZL7RsFfiKBXU7nK9QmO+iz0e59EH8r8jexTZ/g==} engines: {node: '>=20.0.0'} has-flag@4.0.0: resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} engines: {node: '>=8'} + has-flag@5.0.1: + resolution: {integrity: sha512-CsNUt5x9LUdx6hnk/E2SZLsDyvfqANZSUq4+D3D8RzDJ2M+HDTIkF60ibS1vHaK55vzgiZw1bEPFG9yH7l33wA==} + engines: {node: '>=12'} + hash-sum@2.0.0: resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==} - hashery@1.3.0: - resolution: {integrity: sha512-fWltioiy5zsSAs9ouEnvhsVJeAXRybGCNNv0lvzpzNOSDbULXRy7ivFWwCCv4I5Am6kSo75hmbsCduOoc2/K4w==} + hashery@1.4.0: + resolution: {integrity: sha512-Wn2i1In6XFxl8Az55kkgnFRiAlIAushzh26PTjL2AKtQcEfXrcLa7Hn5QOWGZEf3LU057P9TwwZjFyxfS1VuvQ==} engines: {node: '>=20'} - hookified@1.14.0: - resolution: {integrity: sha512-pi1ynXIMFx/uIIwpWJ/5CEtOHLGtnUB0WhGeeYT+fKcQ+WCQbm3/rrkAXnpfph++PgepNqPdTC2WTj8A6k6zoQ==} + hookified@1.15.0: + resolution: {integrity: sha512-51w+ZZGt7Zw5q7rM3nC4t3aLn/xvKDETsXqMczndvwyVQhAHfUmUuFBRFcos8Iyebtk7OAE9dL26wFNzZVVOkw==} - html-tags@3.3.1: - resolution: {integrity: sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==} - engines: {node: '>=8'} + html-tags@5.1.0: + resolution: {integrity: sha512-n6l5uca7/y5joxZ3LUePhzmBFUJ+U2YWzhMa8XUTecSeSlQiZdF5XAd/Q3/WUl0VsXgUwWi8I7CNIwdI5WN1SQ==} + engines: {node: '>=20.10'} htmlparser2@8.0.2: resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==} @@ -2912,6 +2925,9 @@ packages: engines: {node: '>=8'} hasBin: true + import-meta-resolve@4.2.0: + resolution: {integrity: sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==} + imurmurhash@0.1.4: resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} engines: {node: '>=0.8.19'} @@ -2990,6 +3006,10 @@ packages: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} + is-path-inside@4.0.0: + resolution: {integrity: sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==} + engines: {node: '>=12'} + is-plain-object@2.0.4: resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} engines: {node: '>=0.10.0'} @@ -3023,8 +3043,8 @@ packages: resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true - jquery@3.7.1: - resolution: {integrity: sha512-m4avr8yL8kmFN8psrbFFFmB/If14iN5o9nw/NgnnM+kybDJpRsAynV2BsfpTYrTRysYUdADVD7CkUUizgkpLfg==} + jquery@4.0.0: + resolution: {integrity: sha512-TXCHVR3Lb6TZdtw1l3RTLf8RBWVGexdxL6AC8/e0xZKEpBflBsjh9/8LXw+dkNFuOyW9B7iB3O1sP7hS0Kiacg==} js-levenshtein-esm@2.0.0: resolution: {integrity: sha512-1n4LEPOL4wRXY8rOQcuA7Iuaphe5xCMayvufCzlLAi+hRsnBRDbSS6XPuV58CBVJxj5D9ApFLyjQ7KzFToyHBw==} @@ -3039,9 +3059,9 @@ packages: resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} hasBin: true - jsdoc-type-pratt-parser@4.8.0: - resolution: {integrity: sha512-iZ8Bdb84lWRuGHamRXFyML07r21pcwBrLkHEuHgEY5UbCouBwv7ECknDRKzsQIXMiqpPymqtIf8TC/shYKB5rw==} - engines: {node: '>=12.0.0'} + jsdoc-type-pratt-parser@7.1.0: + resolution: {integrity: sha512-SX7q7XyCwzM/MEDCYz0l8GgGbJAACGFII9+WfNYr5SLEKukHWRy2Jk3iWRe7P+lpYJNs7oQ+OSei4JtKGUjd7A==} + engines: {node: '>=20.0.0'} jsesc@3.1.0: resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} @@ -3097,8 +3117,8 @@ packages: keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} - keyv@5.5.5: - resolution: {integrity: sha512-FA5LmZVF1VziNc0bIdCSA1IoSVnDCqE8HJIZZv2/W8YmoAM50+tnUgJR/gQZwEeIMleuIOnRnHA/UaZRNeV4iQ==} + keyv@5.6.0: + resolution: {integrity: sha512-CYDD3SOtsHtyXeEORYRx2qBtpDJFjRTGXUtmNEMGyzYOKj1TE3tycdlho7kA1Ufx9OYWZzg52QFBGALTirzDSw==} khroma@2.1.0: resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==} @@ -3111,6 +3131,14 @@ packages: resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} engines: {node: '>=0.10.0'} + knip@5.82.1: + resolution: {integrity: sha512-1nQk+5AcnkqL40kGQXfouzAEXkTR+eSrgo/8m1d0BMei4eAzFwghoXC4gOKbACgBiCof7hE8wkBVDsEvznf85w==} + engines: {node: '>=18.18.0'} + hasBin: true + peerDependencies: + '@types/node': '>=18' + typescript: '>=5.0.4 <7' + known-css-properties@0.37.0: resolution: {integrity: sha512-JCDrsP4Z1Sb9JwG0aJ8Eo2r7k4Ou5MwmThS/6lcIe1ICyb7UBJKGRIUUdqc2ASdE/42lgz6zFUnzAIhtXnBVrQ==} @@ -3164,8 +3192,8 @@ packages: lodash-es@4.17.21: resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} - lodash-es@4.17.22: - resolution: {integrity: sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==} + lodash-es@4.17.23: + resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} lodash.camelcase@4.3.0: resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} @@ -3188,8 +3216,8 @@ packages: lodash.upperfirst@4.3.1: resolution: {integrity: sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==} - lodash@4.17.21: - resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + lodash@4.17.23: + resolution: {integrity: sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==} magic-string@0.30.21: resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} @@ -3222,12 +3250,12 @@ packages: engines: {node: '>= 12'} hasBin: true - material-icon-theme@5.29.0: - resolution: {integrity: sha512-Kr6D+NgLCWYJjsTjGuIOoKUFG/uomUpLREhyV/9g4qWJMNfm7b1BYYMglRIdQg1IiY7WKqyTws8Ufsad6oFLUA==} + material-icon-theme@5.31.0: + resolution: {integrity: sha512-PPeGSRa+8stQEKvCr2Xym9KIqf2SPwl1chc7cxbK+aY6ORpwOcowtARQEXstZBjQwXTE5GnfE0zg0MFFy+XPzA==} engines: {vscode: ^1.55.0} - mathml-tag-names@2.1.3: - resolution: {integrity: sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==} + mathml-tag-names@4.0.0: + resolution: {integrity: sha512-aa6AU2Pcx0VP/XWnh8IGL0SYSgQHDT6Ucror2j2mXeFAlN3ahaNs8EZtG1YiticMkSLj3Gt6VPFfZogt7G5iFQ==} mdn-data@2.0.28: resolution: {integrity: sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==} @@ -3238,9 +3266,9 @@ packages: mdurl@2.0.0: resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} - meow@13.2.0: - resolution: {integrity: sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==} - engines: {node: '>=18'} + meow@14.0.0: + resolution: {integrity: sha512-JhC3R1f6dbspVtmF3vKjAWz1EVIvwFrGGPLSdU6rK79xBwHWTuHoLnRX/t1/zHS1Ch1Y2UtIrih7DAHuH9JFJA==} + engines: {node: '>=20'} merge-stream@2.0.0: resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} @@ -3339,8 +3367,8 @@ packages: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} engines: {node: '>= 0.6'} - mini-css-extract-plugin@2.9.4: - resolution: {integrity: sha512-ZWYT7ln73Hptxqxk2DxPU9MmapXRhxkJD6tkSR04dnQxm8BGu2hzgKLugK5yySD97u/8yy7Ma7E76k9ZdvtjkQ==} + mini-css-extract-plugin@2.10.0: + resolution: {integrity: sha512-540P2c5dYnJlyJxTaSloliZexv8rji6rY8FhQN+WF/82iHQfA23j/xtJx97L+mXOML27EqksSek/g4eK7jaL3g==} engines: {node: '>= 12.13.0'} peerDependencies: webpack: ^5.0.0 @@ -3449,13 +3477,16 @@ packages: once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - online-3d-viewer@0.17.0: - resolution: {integrity: sha512-CTymQf5hozDHCqgypWYTmwq6+moVyWSDZdCkSovGklipP1oQy7YCEupLvkmJjex27Sxeeyq2Q9GH3+cxKUwpvg==} + online-3d-viewer@0.18.0: + resolution: {integrity: sha512-y7ZlV/zkakNUyjqcXz6XecA7vXgLEUnaAey9tyx8o6/wcdV64RfjXAQOjGXGY2JOZoDi4Cg1ic9icSWMWAvRQA==} optionator@0.9.4: resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} engines: {node: '>= 0.8.0'} + oxc-resolver@11.16.4: + resolution: {integrity: sha512-nvJr3orFz1wNaBA4neRw7CAn0SsjgVaEw1UHpgO/lzVW12w+nsFnvU/S6vVX3kYyFaZdxZheTExi/fa8R8PrZA==} + p-limit@2.3.0: resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} engines: {node: '>=6'} @@ -3511,18 +3542,14 @@ packages: path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - path-type@4.0.0: - resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} - engines: {node: '>=8'} - pathe@2.0.3: resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} pdfobject@2.3.1: resolution: {integrity: sha512-vluuGiSDmMGpOvWFGiUY4trNB8aGKLDVxIXuuGHjX0kK3bMxCANUVtLivctE7uejLBScWCnbVarKatFVvdwXaQ==} - perfect-debounce@2.0.0: - resolution: {integrity: sha512-fkEH/OBiKrqqI/yIgjR92lMfs2K8105zt/VT6+7eTjNwisrsh47CeIED9z58zI7DfKdH3uHAn25ziRZn3kgAow==} + perfect-debounce@2.1.0: + resolution: {integrity: sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==} picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -3550,13 +3577,13 @@ packages: pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} - playwright-core@1.57.0: - resolution: {integrity: sha512-agTcKlMw/mjBWOnD6kFZttAAGHgi/Nw0CZ2o6JqWSbMlI219lAFLZZCyqByTsvVAJq5XA5H8cA6PrvBRpBWEuQ==} + playwright-core@1.58.0: + resolution: {integrity: sha512-aaoB1RWrdNi3//rOeKuMiS65UCcgOVljU46At6eFcOFPFHWtd2weHRRow6z/n+Lec0Lvu0k9ZPKJSjPugikirw==} engines: {node: '>=18'} hasBin: true - playwright@1.57.0: - resolution: {integrity: sha512-ilYQj1s8sr2ppEJ2YVadYBN0Mb3mdo9J0wQ+UuDhzYqURwSoW4n1Xs5vs7ORwgDGmyEh33tRMeS8KhdkMoLXQw==} + playwright@1.58.0: + resolution: {integrity: sha512-2SVA0sbPktiIY/MCOPX8e86ehA/e+tDNq+e5Y8qjKYti2Z/JG7xnronT/TXTIkKbYGWlCbuucZ6dziEgkoEjQQ==} engines: {node: '>=18'} hasBin: true @@ -3570,8 +3597,8 @@ packages: points-on-path@0.2.1: resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==} - postcss-html@1.8.0: - resolution: {integrity: sha512-5mMeb1TgLWoRKxZ0Xh9RZDfwUUIqRrcxO2uXO+Ezl1N5lqpCiSU5Gk6+1kZediBfBHFtPCdopr2UZ2SgUsKcgQ==} + postcss-html@1.8.1: + resolution: {integrity: sha512-OLF6P7qctfAWayOhLpcVnTGqVeJzu2W3WpIYelfz2+JV5oGxfkcEvweN9U4XpeqE0P98dcD9ssusGwlF0TK0uQ==} engines: {node: ^12 || >=14} postcss-import@15.1.0: @@ -3641,9 +3668,6 @@ packages: peerDependencies: postcss: ^8.2.14 - postcss-resolve-nested-selector@0.1.6: - resolution: {integrity: sha512-0sglIs9Wmkzbr8lQwEyIzlDOOC9bGmfVKcJTaxv3vMmd3uo4o4DerC3En0bnmgceeql9BfC8hRkp7cg0fjdVqw==} - postcss-safe-parser@6.0.0: resolution: {integrity: sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ==} engines: {node: '>=12.0'} @@ -3685,12 +3709,12 @@ packages: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} - prettier-linter-helpers@1.0.0: - resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} + prettier-linter-helpers@1.0.1: + resolution: {integrity: sha512-SxToR7P8Y2lWmv/kTzVLC1t/GDI2WGjMwNhLLE9qtH8Q13C+aEmuRlzDst4Up4s0Wc8sF2M+J57iB3cMLqftfg==} engines: {node: '>=6.0.0'} - prettier@3.7.4: - resolution: {integrity: sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==} + prettier@3.8.1: + resolution: {integrity: sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==} engines: {node: '>=14'} hasBin: true @@ -3702,8 +3726,8 @@ packages: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} - qified@0.5.3: - resolution: {integrity: sha512-kXuQdQTB6oN3KhI6V4acnBSZx8D2I4xzZvn9+wFLLFCoBNQY/sFnCW6c43OL7pOQ2HvGV4lnWIXNmgfp7cTWhQ==} + qified@0.6.0: + resolution: {integrity: sha512-tsSGN1x3h569ZSU1u6diwhltLyfUWDp3YbFHedapTmpBl0B3P6U3+Qptg7xu+v+1io1EwhdPyyRHYbEw0KN2FA==} engines: {node: '>=20'} queue-microtask@1.2.3: @@ -3774,8 +3798,8 @@ packages: robust-predicates@3.0.2: resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} - rollup@4.53.5: - resolution: {integrity: sha512-iTNAbFSlRpcHeeWu73ywU/8KuU/LZmNCSxp6fjQkJBD3ivUb8tpDrXhIxEzA05HlYMEwmtaUnb3RP+YNv162OQ==} + rollup@4.56.0: + resolution: {integrity: sha512-9FwVqlgUHzbXtDg9RCMgodF3Ua4Na6Gau+Sdt9vyCN4RhHfVKX2DCHy3BjMLTDd47ITDhYAnTwGulWTblJSDLg==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -3795,8 +3819,9 @@ packages: sax@1.2.4: resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==} - sax@1.4.3: - resolution: {integrity: sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==} + sax@1.4.4: + resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==} + engines: {node: '>=11.0.0'} schema-utils@4.3.3: resolution: {integrity: sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==} @@ -3823,14 +3848,14 @@ packages: serialize-javascript@6.0.2: resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==} - seroval-plugins@1.3.3: - resolution: {integrity: sha512-16OL3NnUBw8JG1jBLUoZJsLnQq0n5Ua6aHalhJK4fMQkz1lqR7Osz1sA30trBtd9VUDc2NgkuRCn8+/pBwqZ+w==} + seroval-plugins@1.5.0: + resolution: {integrity: sha512-EAHqADIQondwRZIdeW2I636zgsODzoBDwb3PT/+7TLDWyw1Dy/Xv7iGUIEXXav7usHDE9HVhOU61irI3EnyyHA==} engines: {node: '>=10'} peerDependencies: seroval: ^1.0 - seroval@1.3.2: - resolution: {integrity: sha512-RbcPH1n5cfwKrru7v7+zrZvjLurgHhGyso3HTyGtRivGWgYjbOmGuivCQaORNELjNONoK35nj28EoWul9sb1zQ==} + seroval@1.5.0: + resolution: {integrity: sha512-OE4cvmJ1uSPrKorFIH9/w/Qwuvi/IMcGbv5RKgcJ/zjA/IohDLU6SVaxFN9FwajbP7nsX0dQqMDes1whk3y+yw==} engines: {node: '>=10'} shallow-clone@3.0.1: @@ -3852,9 +3877,9 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} - slash@3.0.0: - resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} - engines: {node: '>=8'} + slash@5.1.0: + resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==} + engines: {node: '>=14.16'} slice-ansi@4.0.0: resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} @@ -3864,8 +3889,12 @@ packages: resolution: {integrity: sha512-QlaZEqcAH3/RtNyet1IPIYPsEWAaYyXXv1Krsi+1L/QHppjX4Ifm8MQsBISz9vE8cHicIq3clogsheili5vhaQ==} engines: {node: '>= 18'} - solid-js@1.9.10: - resolution: {integrity: sha512-Coz956cos/EPDlhs6+jsdTxKuJDPT7B5SVIWgABwROyxjY7Xbr8wkzD68Et+NxnV7DLJ3nJdAC2r9InuV/4Jew==} + smol-toml@1.6.0: + resolution: {integrity: sha512-4zemZi0HvTnYwLfrpk/CF9LOd9Lt87kAt50GnqhMpyF9U3poDAP2+iukq2bZsO/ufegbYehBkqINbsWxj4l4cw==} + engines: {node: '>= 18'} + + solid-js@1.9.11: + resolution: {integrity: sha512-WEJtcc5mkh/BnHA6Yrg4whlF8g6QwpmXXRg4P2ztPmcKeHHlH4+djYecBLhSpecZY2RRECXYUwIc/C2r3yzQ4Q==} solid-transition-group@0.2.3: resolution: {integrity: sha512-iB72c9N5Kz9ykRqIXl0lQohOau4t0dhel9kjwFvx81UZJbVwaChMuBuyhiZmK24b8aKEK0w3uFM96ZxzcyZGdg==} @@ -3966,14 +3995,18 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} + strip-json-comments@5.0.3: + resolution: {integrity: sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==} + engines: {node: '>=14.16'} + style-search@0.1.0: resolution: {integrity: sha512-Dj1Okke1C3uKKwQcetra4jSuk0DqbzbYtXipzFlFMZtowbF1x7BKJwB9AayVMyFARvU8EDrZdcax4At/452cAg==} - stylelint-config-recommended@17.0.0: - resolution: {integrity: sha512-WaMSdEiPfZTSFVoYmJbxorJfA610O0tlYuU2aEwY33UQhSPgFbClrVJYWvy3jGJx+XW37O+LyNLiZOEXhKhJmA==} - engines: {node: '>=18.12.0'} + stylelint-config-recommended@18.0.0: + resolution: {integrity: sha512-mxgT2XY6YZ3HWWe3Di8umG6aBmWmHTblTgu/f10rqFXnyWxjKWwNdjSWkgkwCtxIKnqjSJzvFmPT5yabVIRxZg==} + engines: {node: '>=20.19.0'} peerDependencies: - stylelint: ^16.23.0 + stylelint: ^17.0.0 stylelint-declaration-block-no-ignored-properties@2.8.0: resolution: {integrity: sha512-Ws8Cav7Y+SPN0JsV407LrnNXWOrqGjxShf+37GBtnU/C58Syve9c0+I/xpLcFOosST3ternykn3Lp77f3ITnFw==} @@ -3987,15 +4020,15 @@ packages: peerDependencies: stylelint: '>=7 <=16' - stylelint-value-no-unknown-custom-properties@6.0.1: - resolution: {integrity: sha512-N60PTdaTknB35j6D4FhW0GL2LlBRV++bRpXMMldWMQZ240yFQaoltzlLY4lXXs7Z0J5mNUYZQ/gjyVtU2DhCMA==} + stylelint-value-no-unknown-custom-properties@6.1.1: + resolution: {integrity: sha512-eQ1zidKD5t9zMEaskjGUY4W47lH76qMlmsDSmCAPEwtaGzB4Ls7ORTfysC1D6hamp2zFC+vN1vpQ+GFz3Tw3lw==} engines: {node: '>=18.12.0'} peerDependencies: stylelint: '>=16' - stylelint@16.26.1: - resolution: {integrity: sha512-v20V59/crfc8sVTAtge0mdafI3AdnzQ2KsWe6v523L4OA1bJO02S7MO2oyXDCS6iWb9ckIPnqAFVItqSBQr7jw==} - engines: {node: '>=18.12.0'} + stylelint@17.0.0: + resolution: {integrity: sha512-saMZ2mqdQre4AfouxcbTdpVglDRcROb4MIucKHvgsDb/0IX7ODhcaz+EOIyfxAsm8Zjl/7j4hJj6MgIYYM8Xwg==} + engines: {node: '>=20.19.0'} hasBin: true stylis@4.3.6: @@ -4013,6 +4046,10 @@ packages: superstruct@0.10.13: resolution: {integrity: sha512-W4SitSZ9MOyMPbHreoZVEneSZyPEeNGbdfJo/7FkJyRs/M3wQRFzq+t3S/NBwlrFSWdx1ONLjLb9pB+UKe4IqQ==} + supports-color@10.2.2: + resolution: {integrity: sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==} + engines: {node: '>=18'} + supports-color@7.2.0: resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} engines: {node: '>=8'} @@ -4021,9 +4058,9 @@ packages: resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} engines: {node: '>=10'} - supports-hyperlinks@3.2.0: - resolution: {integrity: sha512-zFObLMyZeEwzAoKCyu1B91U79K2t7ApXuQfo8OuxwXLDgcKxuwM+YvcbIhm6QWqz7mHUH1TVytR1PwVVjEuMig==} - engines: {node: '>=14.18'} + supports-hyperlinks@4.4.0: + resolution: {integrity: sha512-UKbpT93hN5Nr9go5UY7bopIB9YQlMz9nm/ct4IXt/irb5YRkn9WaqrOBJGZ5Pwvsd5FQzSVeYlGdXoCAPQZrPg==} + engines: {node: '>=20'} supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} @@ -4050,8 +4087,8 @@ packages: resolution: {integrity: sha512-esiWJ7ixSKGpd9DJPBTC4ckChqdOjIwJfYhVHkcQ2Gnm41323p1TRmEI+esTQ9ppD+b5opps2OTEGTCGX5kF+g==} engines: {node: '>=14'} - synckit@0.11.11: - resolution: {integrity: sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==} + synckit@0.11.12: + resolution: {integrity: sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==} engines: {node: ^14.18.0 || >=16.0.0} table@6.9.0: @@ -4083,8 +4120,8 @@ packages: uglify-js: optional: true - terser@5.44.1: - resolution: {integrity: sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==} + terser@5.46.0: + resolution: {integrity: sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==} engines: {node: '>=10'} hasBin: true @@ -4136,8 +4173,8 @@ packages: tributejs@5.1.3: resolution: {integrity: sha512-B5CXihaVzXw+1UHhNFyAwUTMDk1EfoLP5Tj1VhD9yybZ1I8DZJEv8tZ1l0RJo0t0tk9ZhR8eG5tEsaCvRigmdQ==} - ts-api-utils@2.1.0: - resolution: {integrity: sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==} + ts-api-utils@2.4.0: + resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==} engines: {node: '>=18.12'} peerDependencies: typescript: '>=4.8.4' @@ -4159,8 +4196,8 @@ packages: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} - typescript-eslint@8.50.0: - resolution: {integrity: sha512-Q1/6yNUmCpH94fbgMUMg2/BSAr/6U7GBk61kZTv1/asghQOWOjTlp9K8mixS5NcJmm2creY+UFfGeW/+OcA64A==} + typescript-eslint@8.53.1: + resolution: {integrity: sha512-gB+EVQfP5RDElh9ittfXlhZJdjSU4jUSTyE2+ia8CYyNvet4ElfaLlAIqDvQV9JPknKx0jQH1racTYe/4LaLSg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -4177,18 +4214,19 @@ packages: uc.micro@2.1.0: resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} - ufo@1.6.1: - resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} + ufo@1.6.3: + resolution: {integrity: sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==} uint8-to-base64@0.2.1: resolution: {integrity: sha512-uO/84GaoDUfiAxpa8EksjVLE77A9Kc7ZTziN4zRpq4de9yLaLcZn3jx1/sVjyupsywcVX6RKWbqLe7gUNyzH+Q==} - undici-types@6.21.0: - resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} - undici-types@7.16.0: resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} + unicorn-magic@0.4.0: + resolution: {integrity: sha512-wH590V9VNgYH9g3lH9wWjTrUoKsjLF6sGLjhR4sH1LWpLmCOH0Zf7PukhDA8BiS7KHe4oPNkcTHqYkj7SOGUOw==} + engines: {node: '>=20'} + unrs-resolver@1.11.1: resolution: {integrity: sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==} @@ -4198,8 +4236,8 @@ packages: peerDependencies: browserslist: '>= 4.21.0' - updates@17.0.7: - resolution: {integrity: sha512-VyFnSuoXC5qxpq2XVM2BaR0sjTXpDGYj6aTCNu92KvxtnpBVh1nPYJsSXKktgRGx4jsGXFjLhPsXndYpv/o8AA==} + updates@17.0.8: + resolution: {integrity: sha512-K5z2Dgni1V2JUllUC0AvIxL1E31rn1ZTU1bvZyTwvxLeAcLmhC0RZyWOrUcJJvlFBxFgxLz5cqTnbFXTEYdIjA==} engines: {node: '>=22'} hasBin: true @@ -4216,11 +4254,13 @@ packages: vanilla-colorful@0.7.2: resolution: {integrity: sha512-z2YZusTFC6KnLERx1cgoIRX2CjPRP0W75N+3CC6gbvdX5Ch47rZkEMGO2Xnf+IEmi3RiFLxS18gayMA27iU7Kg==} - vite-string-plugin@1.4.9: - resolution: {integrity: sha512-mO7PVkMs8+FuTK9ZjBBCRSjabC9cobvUEbN2EjWtGJo6nu35SbW99bYesOh5Ho39ug/KSbT4VwM4GPC26Xk/mQ==} + vite-string-plugin@1.5.0: + resolution: {integrity: sha512-Ne+67igo8iNoM7N1/59EhQxS1QYvFE3xMK2BEzVKj6QdBRxpOIfG8Y7Uvyv7aDWK+HneU9iwqS1nlaKwtC9OTQ==} + peerDependencies: + vite: '*' - vite@7.3.0: - resolution: {integrity: sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==} + vite@7.3.1: + resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -4259,18 +4299,18 @@ packages: yaml: optional: true - vitest@4.0.16: - resolution: {integrity: sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==} + vitest@4.0.18: + resolution: {integrity: sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==} engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@opentelemetry/api': ^1.9.0 '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 - '@vitest/browser-playwright': 4.0.16 - '@vitest/browser-preview': 4.0.16 - '@vitest/browser-webdriverio': 4.0.16 - '@vitest/ui': 4.0.16 + '@vitest/browser-playwright': 4.0.18 + '@vitest/browser-preview': 4.0.18 + '@vitest/browser-webdriverio': 4.0.18 + '@vitest/ui': 4.0.18 happy-dom: '*' jsdom: '*' peerDependenciesMeta: @@ -4343,22 +4383,26 @@ packages: vue: optional: true - vue-tsc@3.1.8: - resolution: {integrity: sha512-deKgwx6exIHeZwF601P1ktZKNF0bepaSN4jBU3AsbldPx9gylUc1JDxYppl82yxgkAgaz0Y0LCLOi+cXe9HMYA==} + vue-tsc@3.2.3: + resolution: {integrity: sha512-1RdRB7rQXGFMdpo0aXf9spVzWEPGAk7PEb/ejHQwVrcuQA/HsGiixIc3uBQeqY2YjeEEgvr2ShQewBgcN4c1Cw==} hasBin: true peerDependencies: typescript: '>=5.0.0' - vue@3.5.25: - resolution: {integrity: sha512-YLVdgv2K13WJ6n+kD5owehKtEXwdwXuj2TTyJMsO7pSeKw2bfRNZGjhB7YzrpbMYj5b5QsUebHpOqR3R3ziy/g==} + vue@3.5.27: + resolution: {integrity: sha512-aJ/UtoEyFySPBGarREmN4z6qNKpbEguYHMmXSiOGk69czc+zhs0NF6tEFrY8TZKAl8N/LYAkd4JHVd5E/AsSmw==} peerDependencies: typescript: '*' peerDependenciesMeta: typescript: optional: true - watchpack@2.4.4: - resolution: {integrity: sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==} + walk-up-path@4.0.0: + resolution: {integrity: sha512-3hu+tD8YzSLGuFYtPRb48vdhKMi0KQV5sn+uWr8+7dMEq/2G/dtLrdDinkLjqq5TIbIBjYJ4Ax/n3YiaW7QM8A==} + engines: {node: 20 || >=22} + + watchpack@2.5.1: + resolution: {integrity: sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg==} engines: {node: '>=10.13.0'} webidl-conversions@3.0.1: @@ -4389,8 +4433,8 @@ packages: resolution: {integrity: sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==} engines: {node: '>=10.13.0'} - webpack@5.104.0: - resolution: {integrity: sha512-5DeICTX8BVgNp6afSPYXAFjskIgWGlygQH58bcozPOXgo2r/6xx39Y1+cULZ3gTxUYQP88jmwLj2anu4Xaq84g==} + webpack@5.104.1: + resolution: {integrity: sha512-Qphch25abbMNtekmEGJmeRUhLDbe+QfiWTiqpKYkpCOWY64v9eyl+KRRLmqOFA2AvKPpc9DC6+u2n76tQLBoaA==} engines: {node: '>=10.13.0'} hasBin: true peerDependencies: @@ -4438,9 +4482,21 @@ packages: wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} - write-file-atomic@5.0.1: - resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + write-file-atomic@7.0.0: + resolution: {integrity: sha512-YnlPC6JqnZl6aO4uRc+dx5PHguiR9S6WeoLtpxNT9wIG+BDya7ZNE1q7KOjVgaA73hKhKLpVPgJ5QA9THQ5BRg==} + engines: {node: ^20.17.0 || >=22.9.0} + + ws@8.19.0: + resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true xml-lexer@0.2.2: resolution: {integrity: sha512-G0i98epIwiUEiKmMcavmVdhtymW+pCAohMRgybyIME9ygfVu8QheIi+YoQh3ngiThsT0SQzJT4R0sKDEv8Ou0w==} @@ -4461,6 +4517,9 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + zod@4.3.6: + resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==} + snapshots: '@alloc/quick-lru@5.2.0': {} @@ -4470,7 +4529,7 @@ snapshots: package-manager-detector: 1.6.0 tinyexec: 1.0.2 - '@babel/code-frame@7.27.1': + '@babel/code-frame@7.28.6': dependencies: '@babel/helper-validator-identifier': 7.28.5 js-tokens: 4.0.0 @@ -4480,30 +4539,30 @@ snapshots: '@babel/helper-validator-identifier@7.28.5': {} - '@babel/parser@7.28.5': + '@babel/parser@7.28.6': dependencies: - '@babel/types': 7.28.5 + '@babel/types': 7.28.6 - '@babel/runtime@7.28.4': {} + '@babel/runtime@7.28.6': {} - '@babel/types@7.28.5': + '@babel/types@7.28.6': dependencies: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.28.5 '@braintree/sanitize-url@7.1.1': {} - '@cacheable/memory@2.0.6': + '@cacheable/memory@2.0.7': dependencies: - '@cacheable/utils': 2.3.2 - '@keyv/bigmap': 1.3.0(keyv@5.5.5) - hookified: 1.14.0 - keyv: 5.5.5 + '@cacheable/utils': 2.3.3 + '@keyv/bigmap': 1.3.1(keyv@5.6.0) + hookified: 1.15.0 + keyv: 5.6.0 - '@cacheable/utils@2.3.2': + '@cacheable/utils@2.3.3': dependencies: - hashery: 1.3.0 - keyv: 5.5.5 + hashery: 1.4.0 + keyv: 5.6.0 '@chevrotain/cst-dts-gen@11.0.3': dependencies: @@ -4542,72 +4601,74 @@ snapshots: '@citation-js/name': 0.4.2 moo: 0.5.2 - '@citation-js/plugin-cff@0.6.1': + '@citation-js/plugin-cff@0.6.2': dependencies: '@citation-js/date': 0.5.1 - '@citation-js/plugin-yaml': 0.6.1 + '@citation-js/plugin-yaml': 0.6.2 - '@citation-js/plugin-csl@0.7.21(@citation-js/core@0.7.21)': + '@citation-js/plugin-csl@0.7.22(@citation-js/core@0.7.21)': dependencies: '@citation-js/core': 0.7.21 '@citation-js/date': 0.5.1 citeproc: 2.4.63 - '@citation-js/plugin-github@0.6.1': + '@citation-js/plugin-github@0.6.2': dependencies: '@citation-js/date': 0.5.1 '@citation-js/name': 0.4.2 - '@citation-js/plugin-npm@0.6.1': + '@citation-js/plugin-npm@0.6.2': dependencies: '@citation-js/date': 0.5.1 '@citation-js/name': 0.4.2 - '@citation-js/plugin-software-formats@0.6.1': + '@citation-js/plugin-software-formats@0.6.2': dependencies: - '@citation-js/plugin-cff': 0.6.1 - '@citation-js/plugin-github': 0.6.1 - '@citation-js/plugin-npm': 0.6.1 - '@citation-js/plugin-yaml': 0.6.1 - '@citation-js/plugin-zenodo': 0.6.1 + '@citation-js/plugin-cff': 0.6.2 + '@citation-js/plugin-github': 0.6.2 + '@citation-js/plugin-npm': 0.6.2 + '@citation-js/plugin-yaml': 0.6.2 + '@citation-js/plugin-zenodo': 0.6.2 - '@citation-js/plugin-yaml@0.6.1': + '@citation-js/plugin-yaml@0.6.2': dependencies: js-yaml: 4.1.1 - '@citation-js/plugin-zenodo@0.6.1': + '@citation-js/plugin-zenodo@0.6.2': dependencies: '@citation-js/date': 0.5.1 '@citation-js/name': 0.4.2 - '@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4)': + '@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0)': dependencies: - '@csstools/css-tokenizer': 3.0.4 + '@csstools/css-tokenizer': 4.0.0 - '@csstools/css-syntax-patches-for-csstree@1.0.21': {} + '@csstools/css-syntax-patches-for-csstree@1.0.25': {} - '@csstools/css-tokenizer@3.0.4': {} + '@csstools/css-tokenizer@4.0.0': {} - '@csstools/media-query-list-parser@4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + '@csstools/media-query-list-parser@5.0.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)': dependencies: - '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) - '@csstools/css-tokenizer': 3.0.4 + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 - '@csstools/selector-specificity@5.0.0(postcss-selector-parser@7.1.1)': + '@csstools/selector-resolve-nested@4.0.0(postcss-selector-parser@7.1.1)': + dependencies: + postcss-selector-parser: 7.1.1 + + '@csstools/selector-specificity@6.0.0(postcss-selector-parser@7.1.1)': dependencies: postcss-selector-parser: 7.1.1 '@discoveryjs/json-ext@0.6.3': {} - '@dual-bundle/import-meta-resolve@4.2.1': {} - - '@emnapi/core@1.7.1': + '@emnapi/core@1.8.1': dependencies: '@emnapi/wasi-threads': 1.1.0 tslib: 2.8.1 optional: true - '@emnapi/runtime@1.7.1': + '@emnapi/runtime@1.8.1': dependencies: tslib: 2.8.1 optional: true @@ -4617,169 +4678,91 @@ snapshots: tslib: 2.8.1 optional: true - '@esbuild/aix-ppc64@0.25.12': - optional: true - '@esbuild/aix-ppc64@0.27.2': optional: true - '@esbuild/android-arm64@0.25.12': - optional: true - '@esbuild/android-arm64@0.27.2': optional: true - '@esbuild/android-arm@0.25.12': - optional: true - '@esbuild/android-arm@0.27.2': optional: true - '@esbuild/android-x64@0.25.12': - optional: true - '@esbuild/android-x64@0.27.2': optional: true - '@esbuild/darwin-arm64@0.25.12': - optional: true - '@esbuild/darwin-arm64@0.27.2': optional: true - '@esbuild/darwin-x64@0.25.12': - optional: true - '@esbuild/darwin-x64@0.27.2': optional: true - '@esbuild/freebsd-arm64@0.25.12': - optional: true - '@esbuild/freebsd-arm64@0.27.2': optional: true - '@esbuild/freebsd-x64@0.25.12': - optional: true - '@esbuild/freebsd-x64@0.27.2': optional: true - '@esbuild/linux-arm64@0.25.12': - optional: true - '@esbuild/linux-arm64@0.27.2': optional: true - '@esbuild/linux-arm@0.25.12': - optional: true - '@esbuild/linux-arm@0.27.2': optional: true - '@esbuild/linux-ia32@0.25.12': - optional: true - '@esbuild/linux-ia32@0.27.2': optional: true - '@esbuild/linux-loong64@0.25.12': - optional: true - '@esbuild/linux-loong64@0.27.2': optional: true - '@esbuild/linux-mips64el@0.25.12': - optional: true - '@esbuild/linux-mips64el@0.27.2': optional: true - '@esbuild/linux-ppc64@0.25.12': - optional: true - '@esbuild/linux-ppc64@0.27.2': optional: true - '@esbuild/linux-riscv64@0.25.12': - optional: true - '@esbuild/linux-riscv64@0.27.2': optional: true - '@esbuild/linux-s390x@0.25.12': - optional: true - '@esbuild/linux-s390x@0.27.2': optional: true - '@esbuild/linux-x64@0.25.12': - optional: true - '@esbuild/linux-x64@0.27.2': optional: true - '@esbuild/netbsd-arm64@0.25.12': - optional: true - '@esbuild/netbsd-arm64@0.27.2': optional: true - '@esbuild/netbsd-x64@0.25.12': - optional: true - '@esbuild/netbsd-x64@0.27.2': optional: true - '@esbuild/openbsd-arm64@0.25.12': - optional: true - '@esbuild/openbsd-arm64@0.27.2': optional: true - '@esbuild/openbsd-x64@0.25.12': - optional: true - '@esbuild/openbsd-x64@0.27.2': optional: true - '@esbuild/openharmony-arm64@0.25.12': - optional: true - '@esbuild/openharmony-arm64@0.27.2': optional: true - '@esbuild/sunos-x64@0.25.12': - optional: true - '@esbuild/sunos-x64@0.27.2': optional: true - '@esbuild/win32-arm64@0.25.12': - optional: true - '@esbuild/win32-arm64@0.27.2': optional: true - '@esbuild/win32-ia32@0.25.12': - optional: true - '@esbuild/win32-ia32@0.27.2': optional: true - '@esbuild/win32-x64@0.25.12': - optional: true - '@esbuild/win32-x64@0.27.2': optional: true - '@eslint-community/eslint-plugin-eslint-comments@4.5.0(eslint@9.39.2(jiti@2.6.1))': + '@eslint-community/eslint-plugin-eslint-comments@4.6.0(eslint@9.39.2(jiti@2.6.1))': dependencies: escape-string-regexp: 4.0.0 eslint: 9.39.2(jiti@2.6.1) - ignore: 5.3.2 + ignore: 7.0.5 - '@eslint-community/eslint-utils@4.9.0(eslint@9.39.2(jiti@2.6.1))': + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.2(jiti@2.6.1))': dependencies: eslint: 9.39.2(jiti@2.6.1) eslint-visitor-keys: 3.4.3 @@ -4850,7 +4833,7 @@ snapshots: '@github/relative-time-element@5.0.0': {} - '@github/text-expander-element@2.9.2': + '@github/text-expander-element@2.9.4': dependencies: '@github/combobox-nav': 2.3.1 dom-input-range: 2.0.1 @@ -4901,11 +4884,11 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 - '@keyv/bigmap@1.3.0(keyv@5.5.5)': + '@keyv/bigmap@1.3.1(keyv@5.6.0)': dependencies: - hashery: 1.3.0 - hookified: 1.14.0 - keyv: 5.5.5 + hashery: 1.4.0 + hookified: 1.15.0 + keyv: 5.6.0 '@keyv/serialize@1.1.1': {} @@ -4923,8 +4906,15 @@ snapshots: '@napi-rs/wasm-runtime@0.2.12': dependencies: - '@emnapi/core': 1.7.1 - '@emnapi/runtime': 1.7.1 + '@emnapi/core': 1.8.1 + '@emnapi/runtime': 1.8.1 + '@tybys/wasm-util': 0.10.1 + optional: true + + '@napi-rs/wasm-runtime@1.1.1': + dependencies: + '@emnapi/core': 1.8.1 + '@emnapi/runtime': 1.8.1 '@tybys/wasm-util': 0.10.1 optional: true @@ -4938,7 +4928,7 @@ snapshots: '@nodelib/fs.walk@1.2.8': dependencies: '@nodelib/fs.scandir': 2.1.5 - fastq: 1.19.1 + fastq: 1.20.1 '@nolyfill/array-includes@1.0.44': dependencies: @@ -4992,15 +4982,77 @@ snapshots: dependencies: '@nolyfill/shared': 1.0.44 + '@oxc-resolver/binding-android-arm-eabi@11.16.4': + optional: true + + '@oxc-resolver/binding-android-arm64@11.16.4': + optional: true + + '@oxc-resolver/binding-darwin-arm64@11.16.4': + optional: true + + '@oxc-resolver/binding-darwin-x64@11.16.4': + optional: true + + '@oxc-resolver/binding-freebsd-x64@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-arm-gnueabihf@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-arm-musleabihf@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-arm64-gnu@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-arm64-musl@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-ppc64-gnu@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-riscv64-gnu@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-riscv64-musl@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-s390x-gnu@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-x64-gnu@11.16.4': + optional: true + + '@oxc-resolver/binding-linux-x64-musl@11.16.4': + optional: true + + '@oxc-resolver/binding-openharmony-arm64@11.16.4': + optional: true + + '@oxc-resolver/binding-wasm32-wasi@11.16.4': + dependencies: + '@napi-rs/wasm-runtime': 1.1.1 + optional: true + + '@oxc-resolver/binding-win32-arm64-msvc@11.16.4': + optional: true + + '@oxc-resolver/binding-win32-ia32-msvc@11.16.4': + optional: true + + '@oxc-resolver/binding-win32-x64-msvc@11.16.4': + optional: true + '@pkgr/core@0.2.9': {} - '@playwright/test@1.57.0': + '@playwright/test@1.58.0': dependencies: - playwright: 1.57.0 + playwright: 1.58.0 '@popperjs/core@2.11.8': {} - '@primer/octicons@19.21.1': + '@primer/octicons@19.21.2': dependencies: object-assign: 4.1.1 @@ -5008,134 +5060,145 @@ snapshots: '@rolldown/pluginutils@1.0.0-beta.53': {} - '@rollup/rollup-android-arm-eabi@4.53.5': + '@rollup/rollup-android-arm-eabi@4.56.0': optional: true - '@rollup/rollup-android-arm64@4.53.5': + '@rollup/rollup-android-arm64@4.56.0': optional: true - '@rollup/rollup-darwin-arm64@4.53.5': + '@rollup/rollup-darwin-arm64@4.56.0': optional: true - '@rollup/rollup-darwin-x64@4.53.5': + '@rollup/rollup-darwin-x64@4.56.0': optional: true - '@rollup/rollup-freebsd-arm64@4.53.5': + '@rollup/rollup-freebsd-arm64@4.56.0': optional: true - '@rollup/rollup-freebsd-x64@4.53.5': + '@rollup/rollup-freebsd-x64@4.56.0': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.53.5': + '@rollup/rollup-linux-arm-gnueabihf@4.56.0': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.53.5': + '@rollup/rollup-linux-arm-musleabihf@4.56.0': optional: true - '@rollup/rollup-linux-arm64-gnu@4.53.5': + '@rollup/rollup-linux-arm64-gnu@4.56.0': optional: true - '@rollup/rollup-linux-arm64-musl@4.53.5': + '@rollup/rollup-linux-arm64-musl@4.56.0': optional: true - '@rollup/rollup-linux-loong64-gnu@4.53.5': + '@rollup/rollup-linux-loong64-gnu@4.56.0': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.53.5': + '@rollup/rollup-linux-loong64-musl@4.56.0': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.53.5': + '@rollup/rollup-linux-ppc64-gnu@4.56.0': optional: true - '@rollup/rollup-linux-riscv64-musl@4.53.5': + '@rollup/rollup-linux-ppc64-musl@4.56.0': optional: true - '@rollup/rollup-linux-s390x-gnu@4.53.5': + '@rollup/rollup-linux-riscv64-gnu@4.56.0': optional: true - '@rollup/rollup-linux-x64-gnu@4.53.5': + '@rollup/rollup-linux-riscv64-musl@4.56.0': optional: true - '@rollup/rollup-linux-x64-musl@4.53.5': + '@rollup/rollup-linux-s390x-gnu@4.56.0': optional: true - '@rollup/rollup-openharmony-arm64@4.53.5': + '@rollup/rollup-linux-x64-gnu@4.56.0': optional: true - '@rollup/rollup-win32-arm64-msvc@4.53.5': + '@rollup/rollup-linux-x64-musl@4.56.0': optional: true - '@rollup/rollup-win32-ia32-msvc@4.53.5': + '@rollup/rollup-openbsd-x64@4.56.0': optional: true - '@rollup/rollup-win32-x64-gnu@4.53.5': + '@rollup/rollup-openharmony-arm64@4.56.0': optional: true - '@rollup/rollup-win32-x64-msvc@4.53.5': + '@rollup/rollup-win32-arm64-msvc@4.56.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.56.0': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.56.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.56.0': optional: true '@rtsao/scc@1.1.0': {} '@scarf/scarf@1.4.0': {} - '@silverwind/vue3-calendar-heatmap@2.1.1(tippy.js@6.3.7)(vue@3.5.25(typescript@5.9.3))': + '@silverwind/vue3-calendar-heatmap@2.1.1(tippy.js@6.3.7)(vue@3.5.27(typescript@5.9.3))': dependencies: tippy.js: 6.3.7 - vue: 3.5.25(typescript@5.9.3) + vue: 3.5.27(typescript@5.9.3) '@simonwep/pickr@1.9.0': dependencies: core-js: 3.32.2 nanopop: 2.3.0 - '@solid-primitives/refs@1.1.2(solid-js@1.9.10)': - dependencies: - '@solid-primitives/utils': 6.3.2(solid-js@1.9.10) - solid-js: 1.9.10 + '@sindresorhus/merge-streams@4.0.0': {} - '@solid-primitives/transition-group@1.1.2(solid-js@1.9.10)': + '@solid-primitives/refs@1.1.2(solid-js@1.9.11)': dependencies: - solid-js: 1.9.10 + '@solid-primitives/utils': 6.3.2(solid-js@1.9.11) + solid-js: 1.9.11 - '@solid-primitives/utils@6.3.2(solid-js@1.9.10)': + '@solid-primitives/transition-group@1.1.2(solid-js@1.9.11)': dependencies: - solid-js: 1.9.10 + solid-js: 1.9.11 + + '@solid-primitives/utils@6.3.2(solid-js@1.9.11)': + dependencies: + solid-js: 1.9.11 '@standard-schema/spec@1.1.0': {} - '@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@2.6.1))': + '@stylistic/eslint-plugin@5.7.1(eslint@9.39.2(jiti@2.6.1))': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) - '@typescript-eslint/types': 8.50.0 + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) + '@typescript-eslint/types': 8.53.1 eslint: 9.39.2(jiti@2.6.1) eslint-visitor-keys: 4.2.1 espree: 10.4.0 estraverse: 5.3.0 picomatch: 4.0.3 - '@stylistic/stylelint-plugin@4.0.0(stylelint@16.26.1(typescript@5.9.3))': + '@stylistic/stylelint-plugin@5.0.1(stylelint@17.0.0(typescript@5.9.3))': dependencies: - '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) - '@csstools/css-tokenizer': 3.0.4 - '@csstools/media-query-list-parser': 4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-tokenizer': 4.0.0 + '@csstools/media-query-list-parser': 5.0.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) postcss: 8.5.6 postcss-selector-parser: 7.1.1 postcss-value-parser: 4.2.0 style-search: 0.1.0 - stylelint: 16.26.1(typescript@5.9.3) + stylelint: 17.0.0(typescript@5.9.3) '@swc/helpers@0.2.14': {} - '@techknowlogick/license-checker-webpack-plugin@0.3.0(webpack@5.104.0)': + '@techknowlogick/license-checker-webpack-plugin@0.3.0(webpack@5.104.1)': dependencies: glob: 7.2.3 - lodash: 4.17.21 + lodash: 4.17.23 minimatch: 3.1.2 semver: 6.3.1 spdx-expression-validate: 2.0.0 spdx-satisfies: 5.0.1 superstruct: 0.10.13 - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) webpack-sources: 1.4.3 wrap-ansi: 6.2.0 @@ -5218,7 +5281,7 @@ snapshots: '@types/d3-selection@3.0.11': {} - '@types/d3-shape@3.1.7': + '@types/d3-shape@3.1.8': dependencies: '@types/d3-path': 3.1.1 @@ -5263,7 +5326,7 @@ snapshots: '@types/d3-scale': 4.0.9 '@types/d3-scale-chromatic': 3.1.0 '@types/d3-selection': 3.0.11 - '@types/d3-shape': 3.1.7 + '@types/d3-shape': 3.1.8 '@types/d3-time': 3.0.4 '@types/d3-time-format': 4.0.3 '@types/d3-timer': 3.0.2 @@ -5300,21 +5363,19 @@ snapshots: dependencies: '@types/sizzle': 2.3.10 + '@types/js-yaml@4.0.9': {} + '@types/json-schema@7.0.15': {} '@types/json5@0.0.29': {} - '@types/katex@0.16.7': {} + '@types/katex@0.16.8': {} '@types/marked@4.3.2': {} '@types/ms@2.1.0': {} - '@types/node@20.19.27': - dependencies: - undici-types: 6.21.0 - - '@types/node@25.0.3': + '@types/node@25.0.10': dependencies: undici-types: 7.16.0 @@ -5343,95 +5404,99 @@ snapshots: '@types/whatwg-mimetype@3.0.2': {} - '@typescript-eslint/eslint-plugin@8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@types/ws@8.18.1': + dependencies: + '@types/node': 25.0.10 + + '@typescript-eslint/eslint-plugin@8.53.1(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/type-utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/parser': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.1 + '@typescript-eslint/type-utils': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.53.1 eslint: 9.39.2(jiti@2.6.1) ignore: 7.0.5 natural-compare: 1.4.0 - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/scope-manager': 8.53.1 + '@typescript-eslint/types': 8.53.1 + '@typescript-eslint/typescript-estree': 8.53.1(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.53.1 debug: 4.4.3 eslint: 9.39.2(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.50.0(typescript@5.9.3)': + '@typescript-eslint/project-service@8.53.1(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3) - '@typescript-eslint/types': 8.50.0 + '@typescript-eslint/tsconfig-utils': 8.53.1(typescript@5.9.3) + '@typescript-eslint/types': 8.53.1 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.50.0': + '@typescript-eslint/scope-manager@8.53.1': dependencies: - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/types': 8.53.1 + '@typescript-eslint/visitor-keys': 8.53.1 - '@typescript-eslint/tsconfig-utils@8.50.0(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.53.1(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/types': 8.53.1 + '@typescript-eslint/typescript-estree': 8.53.1(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) debug: 4.4.3 eslint: 9.39.2(jiti@2.6.1) - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/types@8.50.0': {} + '@typescript-eslint/types@8.53.1': {} - '@typescript-eslint/typescript-estree@8.50.0(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.53.1(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.50.0(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3) - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/visitor-keys': 8.50.0 + '@typescript-eslint/project-service': 8.53.1(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.53.1(typescript@5.9.3) + '@typescript-eslint/types': 8.53.1 + '@typescript-eslint/visitor-keys': 8.53.1 debug: 4.4.3 minimatch: 9.0.5 semver: 7.7.3 tinyglobby: 0.2.15 - ts-api-utils: 2.1.0(typescript@5.9.3) + ts-api-utils: 2.4.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/types': 8.50.0 - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) + '@typescript-eslint/scope-manager': 8.53.1 + '@typescript-eslint/types': 8.53.1 + '@typescript-eslint/typescript-estree': 8.53.1(typescript@5.9.3) eslint: 9.39.2(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.50.0': + '@typescript-eslint/visitor-keys@8.53.1': dependencies: - '@typescript-eslint/types': 8.50.0 + '@typescript-eslint/types': 8.53.1 eslint-visitor-keys: 4.2.1 '@unrs/resolver-binding-android-arm-eabi@1.11.1': @@ -5493,139 +5558,137 @@ snapshots: '@unrs/resolver-binding-win32-x64-msvc@1.11.1': optional: true - '@vitejs/plugin-vue@6.0.3(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))(vue@3.5.25(typescript@5.9.3))': + '@vitejs/plugin-vue@6.0.3(vite@7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))(vue@3.5.27(typescript@5.9.3))': dependencies: '@rolldown/pluginutils': 1.0.0-beta.53 - vite: 7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2) - vue: 3.5.25(typescript@5.9.3) + vite: 7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2) + vue: 3.5.27(typescript@5.9.3) - '@vitest/eslint-plugin@1.5.2(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))': + '@vitest/eslint-plugin@1.6.6(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.18(@types/node@25.0.10)(happy-dom@20.3.7)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))': dependencies: - '@typescript-eslint/scope-manager': 8.50.0 - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.53.1 + '@typescript-eslint/utils': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) eslint: 9.39.2(jiti@2.6.1) optionalDependencies: typescript: 5.9.3 - vitest: 4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2) + vitest: 4.0.18(@types/node@25.0.10)(happy-dom@20.3.7)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2) transitivePeerDependencies: - supports-color - '@vitest/expect@4.0.16': + '@vitest/expect@4.0.18': dependencies: '@standard-schema/spec': 1.1.0 '@types/chai': 5.2.3 - '@vitest/spy': 4.0.16 - '@vitest/utils': 4.0.16 - chai: 6.2.1 + '@vitest/spy': 4.0.18 + '@vitest/utils': 4.0.18 + chai: 6.2.2 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.16(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))': + '@vitest/mocker@4.0.18(vite@7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))': dependencies: - '@vitest/spy': 4.0.16 + '@vitest/spy': 4.0.18 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2) + vite: 7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2) - '@vitest/pretty-format@4.0.16': + '@vitest/pretty-format@4.0.18': dependencies: tinyrainbow: 3.0.3 - '@vitest/runner@4.0.16': + '@vitest/runner@4.0.18': dependencies: - '@vitest/utils': 4.0.16 + '@vitest/utils': 4.0.18 pathe: 2.0.3 - '@vitest/snapshot@4.0.16': + '@vitest/snapshot@4.0.18': dependencies: - '@vitest/pretty-format': 4.0.16 + '@vitest/pretty-format': 4.0.18 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@4.0.16': {} + '@vitest/spy@4.0.18': {} - '@vitest/utils@4.0.16': + '@vitest/utils@4.0.18': dependencies: - '@vitest/pretty-format': 4.0.16 + '@vitest/pretty-format': 4.0.18 tinyrainbow: 3.0.3 - '@volar/language-core@2.4.26': + '@volar/language-core@2.4.27': dependencies: - '@volar/source-map': 2.4.26 + '@volar/source-map': 2.4.27 - '@volar/source-map@2.4.26': {} + '@volar/source-map@2.4.27': {} - '@volar/typescript@2.4.26': + '@volar/typescript@2.4.27': dependencies: - '@volar/language-core': 2.4.26 + '@volar/language-core': 2.4.27 path-browserify: 1.0.1 vscode-uri: 3.1.0 - '@vue/compiler-core@3.5.25': + '@vue/compiler-core@3.5.27': dependencies: - '@babel/parser': 7.28.5 - '@vue/shared': 3.5.25 - entities: 4.5.0 + '@babel/parser': 7.28.6 + '@vue/shared': 3.5.27 + entities: 7.0.1 estree-walker: 2.0.2 source-map-js: 1.2.1 - '@vue/compiler-dom@3.5.25': + '@vue/compiler-dom@3.5.27': dependencies: - '@vue/compiler-core': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/compiler-core': 3.5.27 + '@vue/shared': 3.5.27 - '@vue/compiler-sfc@3.5.25': + '@vue/compiler-sfc@3.5.27': dependencies: - '@babel/parser': 7.28.5 - '@vue/compiler-core': 3.5.25 - '@vue/compiler-dom': 3.5.25 - '@vue/compiler-ssr': 3.5.25 - '@vue/shared': 3.5.25 + '@babel/parser': 7.28.6 + '@vue/compiler-core': 3.5.27 + '@vue/compiler-dom': 3.5.27 + '@vue/compiler-ssr': 3.5.27 + '@vue/shared': 3.5.27 estree-walker: 2.0.2 magic-string: 0.30.21 postcss: 8.5.6 source-map-js: 1.2.1 - '@vue/compiler-ssr@3.5.25': + '@vue/compiler-ssr@3.5.27': dependencies: - '@vue/compiler-dom': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/compiler-dom': 3.5.27 + '@vue/shared': 3.5.27 - '@vue/language-core@3.1.8(typescript@5.9.3)': + '@vue/language-core@3.2.3': dependencies: - '@volar/language-core': 2.4.26 - '@vue/compiler-dom': 3.5.25 - '@vue/shared': 3.5.25 - alien-signals: 3.1.1 + '@volar/language-core': 2.4.27 + '@vue/compiler-dom': 3.5.27 + '@vue/shared': 3.5.27 + alien-signals: 3.1.2 muggle-string: 0.4.1 path-browserify: 1.0.1 picomatch: 4.0.3 - optionalDependencies: - typescript: 5.9.3 - '@vue/reactivity@3.5.25': + '@vue/reactivity@3.5.27': dependencies: - '@vue/shared': 3.5.25 + '@vue/shared': 3.5.27 - '@vue/runtime-core@3.5.25': + '@vue/runtime-core@3.5.27': dependencies: - '@vue/reactivity': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/reactivity': 3.5.27 + '@vue/shared': 3.5.27 - '@vue/runtime-dom@3.5.25': + '@vue/runtime-dom@3.5.27': dependencies: - '@vue/reactivity': 3.5.25 - '@vue/runtime-core': 3.5.25 - '@vue/shared': 3.5.25 + '@vue/reactivity': 3.5.27 + '@vue/runtime-core': 3.5.27 + '@vue/shared': 3.5.27 csstype: 3.2.3 - '@vue/server-renderer@3.5.25(vue@3.5.25(typescript@5.9.3))': + '@vue/server-renderer@3.5.27(vue@3.5.27(typescript@5.9.3))': dependencies: - '@vue/compiler-ssr': 3.5.25 - '@vue/shared': 3.5.25 - vue: 3.5.25(typescript@5.9.3) + '@vue/compiler-ssr': 3.5.27 + '@vue/shared': 3.5.27 + vue: 3.5.27(typescript@5.9.3) - '@vue/shared@3.5.25': {} + '@vue/shared@3.5.27': {} '@webassemblyjs/ast@1.14.1': dependencies: @@ -5703,20 +5766,20 @@ snapshots: '@webassemblyjs/ast': 1.14.1 '@xtuc/long': 4.2.2 - '@webpack-cli/configtest@3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)': + '@webpack-cli/configtest@3.0.1(webpack-cli@6.0.1)(webpack@5.104.1)': dependencies: - webpack: 5.104.0(webpack-cli@6.0.1) - webpack-cli: 6.0.1(webpack@5.104.0) + webpack: 5.104.1(webpack-cli@6.0.1) + webpack-cli: 6.0.1(webpack@5.104.1) - '@webpack-cli/info@3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)': + '@webpack-cli/info@3.0.1(webpack-cli@6.0.1)(webpack@5.104.1)': dependencies: - webpack: 5.104.0(webpack-cli@6.0.1) - webpack-cli: 6.0.1(webpack@5.104.0) + webpack: 5.104.1(webpack-cli@6.0.1) + webpack-cli: 6.0.1(webpack@5.104.1) - '@webpack-cli/serve@3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)': + '@webpack-cli/serve@3.0.1(webpack-cli@6.0.1)(webpack@5.104.1)': dependencies: - webpack: 5.104.0(webpack-cli@6.0.1) - webpack-cli: 6.0.1(webpack@5.104.0) + webpack: 5.104.1(webpack-cli@6.0.1) + webpack-cli: 6.0.1(webpack@5.104.1) '@xtuc/ieee754@1.2.0': {} @@ -5732,9 +5795,9 @@ snapshots: acorn@8.15.0: {} - add-asset-webpack-plugin@3.1.1(webpack@5.104.0): + add-asset-webpack-plugin@3.1.1(webpack@5.104.1): optionalDependencies: - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) ajv-formats@2.1.1(ajv@8.17.1): optionalDependencies: @@ -5759,7 +5822,7 @@ snapshots: json-schema-traverse: 1.0.0 require-from-string: 2.0.2 - alien-signals@3.1.1: {} + alien-signals@3.1.2: {} ansi-regex@5.0.1: {} @@ -5788,13 +5851,11 @@ snapshots: array-find-index@1.0.2: {} - array-union@2.1.0: {} - - asciinema-player@3.13.5: + asciinema-player@3.14.0: dependencies: - '@babel/runtime': 7.28.4 - solid-js: 1.9.10 - solid-transition-group: 0.2.3(solid-js@1.9.10) + '@babel/runtime': 7.28.6 + solid-js: 1.9.11 + solid-transition-group: 0.2.3(solid-js@1.9.11) assertion-error@2.0.1: {} @@ -5804,17 +5865,17 @@ snapshots: atob@2.1.2: {} - axe-core@4.11.0: {} + axe-core@4.11.1: {} axobject-query@4.1.0: {} balanced-match@1.0.2: {} - balanced-match@2.0.0: {} + balanced-match@3.0.1: {} base64-js@1.5.1: {} - baseline-browser-mapping@2.9.9: {} + baseline-browser-mapping@2.9.17: {} big.js@5.2.2: {} @@ -5837,9 +5898,9 @@ snapshots: browserslist@4.28.1: dependencies: - baseline-browser-mapping: 2.9.9 - caniuse-lite: 1.0.30001760 - electron-to-chromium: 1.5.267 + baseline-browser-mapping: 2.9.17 + caniuse-lite: 1.0.30001766 + electron-to-chromium: 1.5.278 node-releases: 2.0.27 update-browserslist-db: 1.2.3(browserslist@4.28.1) @@ -5856,21 +5917,21 @@ snapshots: bytes@3.1.2: {} - cacheable@2.3.1: + cacheable@2.3.2: dependencies: - '@cacheable/memory': 2.0.6 - '@cacheable/utils': 2.3.2 - hookified: 1.14.0 - keyv: 5.5.5 - qified: 0.5.3 + '@cacheable/memory': 2.0.7 + '@cacheable/utils': 2.3.3 + hookified: 1.15.0 + keyv: 5.6.0 + qified: 0.6.0 callsites@3.1.0: {} camelcase-css@2.0.1: {} - caniuse-lite@1.0.30001760: {} + caniuse-lite@1.0.30001766: {} - chai@6.2.1: {} + chai@6.2.2: {} chalk@4.1.2: dependencies: @@ -5903,7 +5964,7 @@ snapshots: chevrotain-allstar@0.3.1(chevrotain@11.0.3): dependencies: chevrotain: 11.0.3 - lodash-es: 4.17.22 + lodash-es: 4.17.23 chevrotain@11.0.3: dependencies: @@ -5976,7 +6037,7 @@ snapshots: commander@8.3.0: {} - comment-parser@1.4.1: {} + comment-parser@1.4.5: {} compare-versions@6.1.1: {} @@ -5984,7 +6045,7 @@ snapshots: confbox@0.1.8: {} - core-js-compat@3.47.0: + core-js-compat@3.48.0: dependencies: browserslist: 4.28.1 @@ -6017,7 +6078,7 @@ snapshots: css-functions-list@3.2.3: {} - css-loader@7.1.2(webpack@5.104.0): + css-loader@7.1.2(webpack@5.104.1): dependencies: icss-utils: 5.1.0(postcss@8.5.6) postcss: 8.5.6 @@ -6028,7 +6089,7 @@ snapshots: postcss-value-parser: 4.2.0 semver: 7.7.3 optionalDependencies: - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) css-select@5.2.2: dependencies: @@ -6133,7 +6194,7 @@ snapshots: d3-quadtree: 3.0.1 d3-timer: 3.0.1 - d3-format@3.1.0: {} + d3-format@3.1.2: {} d3-geo@3.1.1: dependencies: @@ -6168,7 +6229,7 @@ snapshots: d3-scale@4.0.2: dependencies: d3-array: 3.2.4 - d3-format: 3.1.0 + d3-format: 3.1.2 d3-interpolate: 3.0.1 d3-time: 3.1.0 d3-time-format: 4.1.0 @@ -6225,7 +6286,7 @@ snapshots: d3-ease: 3.0.1 d3-fetch: 3.0.1 d3-force: 3.0.0 - d3-format: 3.1.0 + d3-format: 3.1.2 d3-geo: 3.1.1 d3-hierarchy: 3.1.2 d3-interpolate: 3.0.1 @@ -6246,7 +6307,7 @@ snapshots: dagre-d3-es@7.0.13: dependencies: d3: 7.9.0 - lodash-es: 4.17.22 + lodash-es: 4.17.23 damerau-levenshtein@1.0.8: {} @@ -6260,7 +6321,7 @@ snapshots: dependencies: ms: 2.1.3 - decode-named-character-reference@1.2.0: + decode-named-character-reference@1.3.0: dependencies: character-entities: 2.0.2 @@ -6287,10 +6348,6 @@ snapshots: didyoumean@1.2.2: {} - dir-glob@3.0.1: - dependencies: - path-type: 4.0.0 - dlv@1.1.3: {} doctrine@2.1.0: @@ -6338,7 +6395,7 @@ snapshots: codemirror-spell-checker: 1.1.2 marked: 4.3.0 - electron-to-chromium@1.5.267: {} + electron-to-chromium@1.5.278: {} emoji-regex@10.6.0: {} @@ -6355,6 +6412,8 @@ snapshots: entities@4.5.0: {} + entities@7.0.1: {} + env-paths@2.2.1: {} envinfo@7.21.0: {} @@ -6367,43 +6426,14 @@ snapshots: es-module-lexer@2.0.0: {} - esbuild-loader@4.4.0(webpack@5.104.0): + esbuild-loader@4.4.2(webpack@5.104.1): dependencies: - esbuild: 0.25.12 + esbuild: 0.27.2 get-tsconfig: 4.13.0 loader-utils: 2.0.4 - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) webpack-sources: 1.4.3 - esbuild@0.25.12: - optionalDependencies: - '@esbuild/aix-ppc64': 0.25.12 - '@esbuild/android-arm': 0.25.12 - '@esbuild/android-arm64': 0.25.12 - '@esbuild/android-x64': 0.25.12 - '@esbuild/darwin-arm64': 0.25.12 - '@esbuild/darwin-x64': 0.25.12 - '@esbuild/freebsd-arm64': 0.25.12 - '@esbuild/freebsd-x64': 0.25.12 - '@esbuild/linux-arm': 0.25.12 - '@esbuild/linux-arm64': 0.25.12 - '@esbuild/linux-ia32': 0.25.12 - '@esbuild/linux-loong64': 0.25.12 - '@esbuild/linux-mips64el': 0.25.12 - '@esbuild/linux-ppc64': 0.25.12 - '@esbuild/linux-riscv64': 0.25.12 - '@esbuild/linux-s390x': 0.25.12 - '@esbuild/linux-x64': 0.25.12 - '@esbuild/netbsd-arm64': 0.25.12 - '@esbuild/netbsd-x64': 0.25.12 - '@esbuild/openbsd-arm64': 0.25.12 - '@esbuild/openbsd-x64': 0.25.12 - '@esbuild/openharmony-arm64': 0.25.12 - '@esbuild/sunos-x64': 0.25.12 - '@esbuild/win32-arm64': 0.25.12 - '@esbuild/win32-ia32': 0.25.12 - '@esbuild/win32-x64': 0.25.12 - esbuild@0.27.2: optionalDependencies: '@esbuild/aix-ppc64': 0.27.2 @@ -6463,7 +6493,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-import-resolver-typescript@4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)): + eslint-import-resolver-typescript@4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)): dependencies: debug: 4.4.3 eslint: 9.39.2(jiti@2.6.1) @@ -6474,19 +6504,19 @@ snapshots: tinyglobby: 0.2.15 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)) - eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)) + eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)) transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)): + eslint-module-utils@2.12.1(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)): dependencies: debug: 3.2.7 optionalDependencies: - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) eslint: 9.39.2(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)) + eslint-import-resolver-typescript: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)) transitivePeerDependencies: - supports-color @@ -6519,8 +6549,8 @@ snapshots: '@eslint/eslintrc': 3.3.3 '@eslint/js': 9.39.2 '@github/browserslist-config': 1.0.0 - '@typescript-eslint/eslint-plugin': 8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/eslint-plugin': 8.53.1(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) aria-query: 5.3.2 eslint: 9.39.2(jiti@2.6.1) eslint-config-prettier: 10.1.8(eslint@9.39.2(jiti@2.6.1)) @@ -6528,17 +6558,17 @@ snapshots: eslint-plugin-eslint-comments: 3.2.0(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-filenames: 1.3.2(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-i18n-text: 1.0.1(eslint@9.39.2(jiti@2.6.1)) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.39.2(jiti@2.6.1)) eslint-plugin-no-only-tests: 3.3.0 - eslint-plugin-prettier: 5.5.4(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.7.4) + eslint-plugin-prettier: 5.5.5(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.8.1) eslint-rule-documentation: 1.0.23 globals: 16.5.0 jsx-ast-utils: 3.3.5 - prettier: 3.7.4 + prettier: 3.8.1 svg-element-attributes: 1.3.1 typescript: 5.9.3 - typescript-eslint: 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + typescript-eslint: 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) transitivePeerDependencies: - '@types/eslint' - eslint-import-resolver-typescript @@ -6549,10 +6579,10 @@ snapshots: dependencies: eslint: 9.39.2(jiti@2.6.1) - eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)): + eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)): dependencies: - '@typescript-eslint/types': 8.50.0 - comment-parser: 1.4.1 + '@typescript-eslint/types': 8.53.1 + comment-parser: 1.4.5 debug: 4.4.3 eslint: 9.39.2(jiti@2.6.1) eslint-import-context: 0.1.9(unrs-resolver@1.11.1) @@ -6562,12 +6592,12 @@ snapshots: stable-hash-x: 0.2.0 unrs-resolver: 1.11.1 optionalDependencies: - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) eslint-import-resolver-node: 0.3.9 transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)): dependencies: '@rtsao/scc': 1.1.0 array-includes: '@nolyfill/array-includes@1.0.44' @@ -6578,7 +6608,7 @@ snapshots: doctrine: 2.1.0 eslint: 9.39.2(jiti@2.6.1) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)) hasown: '@nolyfill/hasown@1.0.44' is-core-module: '@nolyfill/is-core-module@1.0.39' is-glob: 4.0.3 @@ -6590,7 +6620,7 @@ snapshots: string.prototype.trimend: '@nolyfill/string.prototype.trimend@1.0.44' tsconfig-paths: 3.15.0 optionalDependencies: - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack @@ -6602,7 +6632,7 @@ snapshots: array-includes: '@nolyfill/array-includes@1.0.44' array.prototype.flatmap: '@nolyfill/array.prototype.flatmap@1.0.44' ast-types-flow: 0.0.8 - axe-core: 4.11.0 + axe-core: 4.11.1 axobject-query: 4.1.0 damerau-levenshtein: 1.0.8 emoji-regex: 9.2.2 @@ -6617,28 +6647,28 @@ snapshots: eslint-plugin-no-only-tests@3.3.0: {} - eslint-plugin-playwright@2.4.0(eslint@9.39.2(jiti@2.6.1)): + eslint-plugin-playwright@2.5.0(eslint@9.39.2(jiti@2.6.1)): dependencies: eslint: 9.39.2(jiti@2.6.1) globals: 16.5.0 - eslint-plugin-prettier@5.5.4(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.7.4): + eslint-plugin-prettier@5.5.5(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.8.1): dependencies: eslint: 9.39.2(jiti@2.6.1) - prettier: 3.7.4 - prettier-linter-helpers: 1.0.0 - synckit: 0.11.11 + prettier: 3.8.1 + prettier-linter-helpers: 1.0.1 + synckit: 0.11.12 optionalDependencies: '@types/eslint': 9.6.1 eslint-config-prettier: 10.1.8(eslint@9.39.2(jiti@2.6.1)) - eslint-plugin-regexp@2.10.0(eslint@9.39.2(jiti@2.6.1)): + eslint-plugin-regexp@3.0.0(eslint@9.39.2(jiti@2.6.1)): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) '@eslint-community/regexpp': 4.12.2 - comment-parser: 1.4.1 + comment-parser: 1.4.5 eslint: 9.39.2(jiti@2.6.1) - jsdoc-type-pratt-parser: 4.8.0 + jsdoc-type-pratt-parser: 7.1.0 refa: 0.12.1 regexp-ast-analysis: 0.7.1 scslre: 0.3.0 @@ -6660,14 +6690,14 @@ snapshots: eslint-plugin-unicorn@62.0.0(eslint@9.39.2(jiti@2.6.1)): dependencies: '@babel/helper-validator-identifier': 7.28.5 - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) '@eslint/plugin-kit': 0.4.1 change-case: 5.4.4 ci-info: 4.3.1 clean-regexp: 1.0.0 - core-js-compat: 3.47.0 + core-js-compat: 3.48.0 eslint: 9.39.2(jiti@2.6.1) - esquery: 1.6.0 + esquery: 1.7.0 find-up-simple: 1.0.1 globals: 16.5.0 indent-string: 5.0.0 @@ -6681,10 +6711,10 @@ snapshots: eslint-plugin-vue-scoped-css@2.12.0(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) eslint: 9.39.2(jiti@2.6.1) eslint-compat-utils: 0.6.5(eslint@9.39.2(jiti@2.6.1)) - lodash: 4.17.21 + lodash: 4.17.23 postcss: 8.5.6 postcss-safe-parser: 6.0.0(postcss@8.5.6) postcss-scss: 4.0.9(postcss@8.5.6) @@ -6694,9 +6724,9 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-vue@10.6.2(@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))): + eslint-plugin-vue@10.7.0(@stylistic/eslint-plugin@5.7.1(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) eslint: 9.39.2(jiti@2.6.1) natural-compare: 1.4.0 nth-check: 2.1.1 @@ -6705,8 +6735,8 @@ snapshots: vue-eslint-parser: 10.2.0(eslint@9.39.2(jiti@2.6.1)) xml-name-validator: 4.0.0 optionalDependencies: - '@stylistic/eslint-plugin': 5.6.1(eslint@9.39.2(jiti@2.6.1)) - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@stylistic/eslint-plugin': 5.7.1(eslint@9.39.2(jiti@2.6.1)) + '@typescript-eslint/parser': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) eslint-plugin-wc@3.0.2(eslint@9.39.2(jiti@2.6.1)): dependencies: @@ -6732,7 +6762,7 @@ snapshots: eslint@9.39.2(jiti@2.6.1): dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1)) + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1)) '@eslint-community/regexpp': 4.12.2 '@eslint/config-array': 0.21.1 '@eslint/config-helpers': 0.4.2 @@ -6752,7 +6782,7 @@ snapshots: eslint-scope: 8.4.0 eslint-visitor-keys: 4.2.1 espree: 10.4.0 - esquery: 1.6.0 + esquery: 1.7.0 esutils: 2.0.3 fast-deep-equal: 3.1.3 file-entry-cache: 8.0.0 @@ -6777,7 +6807,7 @@ snapshots: acorn-jsx: 5.3.2(acorn@8.15.0) eslint-visitor-keys: 4.2.1 - esquery@1.6.0: + esquery@1.7.0: dependencies: estraverse: 5.3.0 @@ -6823,10 +6853,14 @@ snapshots: fastest-levenshtein@1.0.16: {} - fastq@1.19.1: + fastq@1.20.1: dependencies: reusify: 1.1.0 + fd-package-json@2.0.0: + dependencies: + walk-up-path: 4.0.0 + fdir@6.5.0(picomatch@4.0.3): optionalDependencies: picomatch: 4.0.3 @@ -6839,9 +6873,9 @@ snapshots: fflate@0.8.2: {} - file-entry-cache@11.1.1: + file-entry-cache@11.1.2: dependencies: - flat-cache: 6.1.19 + flat-cache: 6.1.20 file-entry-cache@8.0.0: dependencies: @@ -6868,16 +6902,20 @@ snapshots: flatted: 3.3.3 keyv: 4.5.4 - flat-cache@6.1.19: + flat-cache@6.1.20: dependencies: - cacheable: 2.3.1 + cacheable: 2.3.2 flatted: 3.3.3 - hookified: 1.14.0 + hookified: 1.15.0 flat@5.0.2: {} flatted@3.3.3: {} + formatly@0.3.0: + dependencies: + fd-package-json: 2.0.0 + fs.realpath@1.0.0: {} fsevents@2.3.2: @@ -6927,14 +6965,16 @@ snapshots: globals@16.5.0: {} - globby@11.1.0: + globals@17.1.0: {} + + globby@16.1.0: dependencies: - array-union: 2.1.0 - dir-glob: 3.0.1 + '@sindresorhus/merge-streams': 4.0.0 fast-glob: 3.3.3 - ignore: 5.3.2 - merge2: 1.4.1 - slash: 3.0.0 + ignore: 7.0.5 + is-path-inside: 4.0.0 + slash: 5.1.0 + unicorn-magic: 0.4.0 globjoin@0.1.4: {} @@ -6944,23 +6984,31 @@ snapshots: hammerjs@2.0.8: {} - happy-dom@20.0.11: + happy-dom@20.3.7: dependencies: - '@types/node': 20.19.27 + '@types/node': 25.0.10 '@types/whatwg-mimetype': 3.0.2 + '@types/ws': 8.18.1 + entities: 4.5.0 whatwg-mimetype: 3.0.0 + ws: 8.19.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate has-flag@4.0.0: {} + has-flag@5.0.1: {} + hash-sum@2.0.0: {} - hashery@1.3.0: + hashery@1.4.0: dependencies: - hookified: 1.14.0 + hookified: 1.15.0 - hookified@1.14.0: {} + hookified@1.15.0: {} - html-tags@3.3.1: {} + html-tags@5.1.0: {} htmlparser2@8.0.2: dependencies: @@ -6997,6 +7045,8 @@ snapshots: pkg-dir: 4.2.0 resolve-cwd: 3.0.0 + import-meta-resolve@4.2.0: {} + imurmurhash@0.1.4: {} indent-string@5.0.0: {} @@ -7055,6 +7105,8 @@ snapshots: is-number@7.0.0: {} + is-path-inside@4.0.0: {} + is-plain-object@2.0.4: dependencies: isobject: 3.0.1 @@ -7073,7 +7125,7 @@ snapshots: jest-worker@27.5.1: dependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.10 merge-stream: 2.0.0 supports-color: 8.1.1 @@ -7081,7 +7133,7 @@ snapshots: jiti@2.6.1: {} - jquery@3.7.1: {} + jquery@4.0.0: {} js-levenshtein-esm@2.0.0: {} @@ -7093,7 +7145,7 @@ snapshots: dependencies: argparse: 2.0.1 - jsdoc-type-pratt-parser@4.8.0: {} + jsdoc-type-pratt-parser@7.1.0: {} jsesc@3.1.0: {} @@ -7136,7 +7188,7 @@ snapshots: dependencies: json-buffer: 3.0.1 - keyv@5.5.5: + keyv@5.6.0: dependencies: '@keyv/serialize': 1.1.1 @@ -7148,6 +7200,23 @@ snapshots: kind-of@6.0.3: {} + knip@5.82.1(@types/node@25.0.10)(typescript@5.9.3): + dependencies: + '@nodelib/fs.walk': 1.2.8 + '@types/node': 25.0.10 + fast-glob: 3.3.3 + formatly: 0.3.0 + jiti: 2.6.1 + js-yaml: 4.1.1 + minimist: 1.2.8 + oxc-resolver: 11.16.4 + picocolors: 1.1.1 + picomatch: 4.0.3 + smol-toml: 1.6.0 + strip-json-comments: 5.0.3 + typescript: 5.9.3 + zod: 4.3.6 + known-css-properties@0.37.0: {} langium@3.3.1: @@ -7199,7 +7268,7 @@ snapshots: lodash-es@4.17.21: {} - lodash-es@4.17.22: {} + lodash-es@4.17.23: {} lodash.camelcase@4.3.0: {} @@ -7215,7 +7284,7 @@ snapshots: lodash.upperfirst@4.3.1: {} - lodash@4.17.21: {} + lodash@4.17.23: {} magic-string@0.30.21: dependencies: @@ -7267,14 +7336,14 @@ snapshots: marked@4.3.0: {} - material-icon-theme@5.29.0: + material-icon-theme@5.31.0: dependencies: chroma-js: 3.2.0 events: 3.3.0 fast-deep-equal: 3.1.3 svgson: 5.3.1 - mathml-tag-names@2.1.3: {} + mathml-tag-names@4.0.0: {} mdn-data@2.0.28: {} @@ -7282,7 +7351,7 @@ snapshots: mdurl@2.0.0: {} - meow@13.2.0: {} + meow@14.0.0: {} merge-stream@2.0.0: {} @@ -7304,7 +7373,7 @@ snapshots: dompurify: 3.3.1 katex: 0.16.27 khroma: 2.1.0 - lodash-es: 4.17.22 + lodash-es: 4.17.23 marked: 16.4.2 roughjs: 4.6.6 stylis: 4.3.6 @@ -7313,7 +7382,7 @@ snapshots: micromark-core-commonmark@2.0.3: dependencies: - decode-named-character-reference: 1.2.0 + decode-named-character-reference: 1.3.0 devlop: 1.1.0 micromark-factory-destination: 2.0.1 micromark-factory-label: 2.0.1 @@ -7368,7 +7437,7 @@ snapshots: micromark-extension-math@3.1.0: dependencies: - '@types/katex': 0.16.7 + '@types/katex': 0.16.8 devlop: 1.1.0 katex: 0.16.27 micromark-factory-space: 2.0.1 @@ -7465,7 +7534,7 @@ snapshots: dependencies: '@types/debug': 4.1.12 debug: 4.4.3 - decode-named-character-reference: 1.2.0 + decode-named-character-reference: 1.3.0 devlop: 1.1.0 micromark-core-commonmark: 2.0.3 micromark-factory-space: 2.0.1 @@ -7494,11 +7563,11 @@ snapshots: dependencies: mime-db: 1.52.0 - mini-css-extract-plugin@2.9.4(webpack@5.104.0): + mini-css-extract-plugin@2.10.0(webpack@5.104.1): dependencies: schema-utils: 4.3.3 tapable: 2.3.0 - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) minimatch@10.1.1: dependencies: @@ -7519,13 +7588,13 @@ snapshots: acorn: 8.15.0 pathe: 2.0.3 pkg-types: 1.3.1 - ufo: 1.6.1 + ufo: 1.6.3 - monaco-editor-webpack-plugin@7.1.1(monaco-editor@0.55.1)(webpack@5.104.0): + monaco-editor-webpack-plugin@7.1.1(monaco-editor@0.55.1)(webpack@5.104.1): dependencies: loader-utils: 2.0.4 monaco-editor: 0.55.1 - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) monaco-editor@0.55.1: dependencies: @@ -7582,7 +7651,7 @@ snapshots: dependencies: wrappy: 1.0.2 - online-3d-viewer@0.17.0: + online-3d-viewer@0.18.0: dependencies: '@simonwep/pickr': 1.9.0 fflate: 0.8.2 @@ -7597,6 +7666,29 @@ snapshots: type-check: 0.4.0 word-wrap: 1.2.5 + oxc-resolver@11.16.4: + optionalDependencies: + '@oxc-resolver/binding-android-arm-eabi': 11.16.4 + '@oxc-resolver/binding-android-arm64': 11.16.4 + '@oxc-resolver/binding-darwin-arm64': 11.16.4 + '@oxc-resolver/binding-darwin-x64': 11.16.4 + '@oxc-resolver/binding-freebsd-x64': 11.16.4 + '@oxc-resolver/binding-linux-arm-gnueabihf': 11.16.4 + '@oxc-resolver/binding-linux-arm-musleabihf': 11.16.4 + '@oxc-resolver/binding-linux-arm64-gnu': 11.16.4 + '@oxc-resolver/binding-linux-arm64-musl': 11.16.4 + '@oxc-resolver/binding-linux-ppc64-gnu': 11.16.4 + '@oxc-resolver/binding-linux-riscv64-gnu': 11.16.4 + '@oxc-resolver/binding-linux-riscv64-musl': 11.16.4 + '@oxc-resolver/binding-linux-s390x-gnu': 11.16.4 + '@oxc-resolver/binding-linux-x64-gnu': 11.16.4 + '@oxc-resolver/binding-linux-x64-musl': 11.16.4 + '@oxc-resolver/binding-openharmony-arm64': 11.16.4 + '@oxc-resolver/binding-wasm32-wasi': 11.16.4 + '@oxc-resolver/binding-win32-arm64-msvc': 11.16.4 + '@oxc-resolver/binding-win32-ia32-msvc': 11.16.4 + '@oxc-resolver/binding-win32-x64-msvc': 11.16.4 + p-limit@2.3.0: dependencies: p-try: 2.2.0 @@ -7626,14 +7718,14 @@ snapshots: '@types/unist': 2.0.11 character-entities-legacy: 3.0.0 character-reference-invalid: 2.0.1 - decode-named-character-reference: 1.2.0 + decode-named-character-reference: 1.3.0 is-alphanumerical: 2.0.1 is-decimal: 2.0.1 is-hexadecimal: 2.0.1 parse-json@5.2.0: dependencies: - '@babel/code-frame': 7.27.1 + '@babel/code-frame': 7.28.6 error-ex: 1.3.4 json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 @@ -7650,13 +7742,11 @@ snapshots: path-parse@1.0.7: {} - path-type@4.0.0: {} - pathe@2.0.3: {} pdfobject@2.3.1: {} - perfect-debounce@2.0.0: {} + perfect-debounce@2.1.0: {} picocolors@1.1.1: {} @@ -7678,11 +7768,11 @@ snapshots: mlly: 1.8.0 pathe: 2.0.3 - playwright-core@1.57.0: {} + playwright-core@1.58.0: {} - playwright@1.57.0: + playwright@1.58.0: dependencies: - playwright-core: 1.57.0 + playwright-core: 1.58.0 optionalDependencies: fsevents: 2.3.2 @@ -7695,7 +7785,7 @@ snapshots: path-data-parser: 0.1.0 points-on-curve: 0.2.0 - postcss-html@1.8.0: + postcss-html@1.8.1: dependencies: htmlparser2: 8.0.2 js-tokens: 9.0.1 @@ -7721,14 +7811,14 @@ snapshots: optionalDependencies: postcss: 8.5.6 - postcss-loader@8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.0): + postcss-loader@8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.1): dependencies: cosmiconfig: 9.0.0(typescript@5.9.3) jiti: 2.6.1 postcss: 8.5.6 semver: 7.7.3 optionalDependencies: - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) transitivePeerDependencies: - typescript @@ -7758,8 +7848,6 @@ snapshots: postcss: 8.5.6 postcss-selector-parser: 6.1.2 - postcss-resolve-nested-selector@0.1.6: {} - postcss-safe-parser@6.0.0(postcss@8.5.6): dependencies: postcss: 8.5.6 @@ -7802,19 +7890,19 @@ snapshots: prelude-ls@1.2.1: {} - prettier-linter-helpers@1.0.0: + prettier-linter-helpers@1.0.1: dependencies: fast-diff: 1.3.0 - prettier@3.7.4: {} + prettier@3.8.1: {} punycode.js@2.3.1: {} punycode@2.3.1: {} - qified@0.5.3: + qified@0.6.0: dependencies: - hookified: 1.14.0 + hookified: 1.15.0 queue-microtask@1.2.3: {} @@ -7873,32 +7961,35 @@ snapshots: robust-predicates@3.0.2: {} - rollup@4.53.5: + rollup@4.56.0: dependencies: '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.53.5 - '@rollup/rollup-android-arm64': 4.53.5 - '@rollup/rollup-darwin-arm64': 4.53.5 - '@rollup/rollup-darwin-x64': 4.53.5 - '@rollup/rollup-freebsd-arm64': 4.53.5 - '@rollup/rollup-freebsd-x64': 4.53.5 - '@rollup/rollup-linux-arm-gnueabihf': 4.53.5 - '@rollup/rollup-linux-arm-musleabihf': 4.53.5 - '@rollup/rollup-linux-arm64-gnu': 4.53.5 - '@rollup/rollup-linux-arm64-musl': 4.53.5 - '@rollup/rollup-linux-loong64-gnu': 4.53.5 - '@rollup/rollup-linux-ppc64-gnu': 4.53.5 - '@rollup/rollup-linux-riscv64-gnu': 4.53.5 - '@rollup/rollup-linux-riscv64-musl': 4.53.5 - '@rollup/rollup-linux-s390x-gnu': 4.53.5 - '@rollup/rollup-linux-x64-gnu': 4.53.5 - '@rollup/rollup-linux-x64-musl': 4.53.5 - '@rollup/rollup-openharmony-arm64': 4.53.5 - '@rollup/rollup-win32-arm64-msvc': 4.53.5 - '@rollup/rollup-win32-ia32-msvc': 4.53.5 - '@rollup/rollup-win32-x64-gnu': 4.53.5 - '@rollup/rollup-win32-x64-msvc': 4.53.5 + '@rollup/rollup-android-arm-eabi': 4.56.0 + '@rollup/rollup-android-arm64': 4.56.0 + '@rollup/rollup-darwin-arm64': 4.56.0 + '@rollup/rollup-darwin-x64': 4.56.0 + '@rollup/rollup-freebsd-arm64': 4.56.0 + '@rollup/rollup-freebsd-x64': 4.56.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.56.0 + '@rollup/rollup-linux-arm-musleabihf': 4.56.0 + '@rollup/rollup-linux-arm64-gnu': 4.56.0 + '@rollup/rollup-linux-arm64-musl': 4.56.0 + '@rollup/rollup-linux-loong64-gnu': 4.56.0 + '@rollup/rollup-linux-loong64-musl': 4.56.0 + '@rollup/rollup-linux-ppc64-gnu': 4.56.0 + '@rollup/rollup-linux-ppc64-musl': 4.56.0 + '@rollup/rollup-linux-riscv64-gnu': 4.56.0 + '@rollup/rollup-linux-riscv64-musl': 4.56.0 + '@rollup/rollup-linux-s390x-gnu': 4.56.0 + '@rollup/rollup-linux-x64-gnu': 4.56.0 + '@rollup/rollup-linux-x64-musl': 4.56.0 + '@rollup/rollup-openbsd-x64': 4.56.0 + '@rollup/rollup-openharmony-arm64': 4.56.0 + '@rollup/rollup-win32-arm64-msvc': 4.56.0 + '@rollup/rollup-win32-ia32-msvc': 4.56.0 + '@rollup/rollup-win32-x64-gnu': 4.56.0 + '@rollup/rollup-win32-x64-msvc': 4.56.0 fsevents: 2.3.3 roughjs@4.6.6: @@ -7923,7 +8014,7 @@ snapshots: sax@1.2.4: {} - sax@1.4.3: {} + sax@1.4.4: {} schema-utils@4.3.3: dependencies: @@ -7948,11 +8039,11 @@ snapshots: dependencies: randombytes: 2.1.0 - seroval-plugins@1.3.3(seroval@1.3.2): + seroval-plugins@1.5.0(seroval@1.5.0): dependencies: - seroval: 1.3.2 + seroval: 1.5.0 - seroval@1.3.2: {} + seroval@1.5.0: {} shallow-clone@3.0.1: dependencies: @@ -7968,7 +8059,7 @@ snapshots: signal-exit@4.1.0: {} - slash@3.0.0: {} + slash@5.1.0: {} slice-ansi@4.0.0: dependencies: @@ -7978,17 +8069,19 @@ snapshots: smol-toml@1.5.2: {} - solid-js@1.9.10: + smol-toml@1.6.0: {} + + solid-js@1.9.11: dependencies: csstype: 3.2.3 - seroval: 1.3.2 - seroval-plugins: 1.3.3(seroval@1.3.2) + seroval: 1.5.0 + seroval-plugins: 1.5.0(seroval@1.5.0) - solid-transition-group@0.2.3(solid-js@1.9.10): + solid-transition-group@0.2.3(solid-js@1.9.11): dependencies: - '@solid-primitives/refs': 1.1.2(solid-js@1.9.10) - '@solid-primitives/transition-group': 1.1.2(solid-js@1.9.10) - solid-js: 1.9.10 + '@solid-primitives/refs': 1.1.2(solid-js@1.9.11) + '@solid-primitives/transition-group': 1.1.2(solid-js@1.9.11) + solid-js: 1.9.11 sortablejs@1.15.6: {} @@ -8078,35 +8171,37 @@ snapshots: strip-json-comments@3.1.1: {} + strip-json-comments@5.0.3: {} + style-search@0.1.0: {} - stylelint-config-recommended@17.0.0(stylelint@16.26.1(typescript@5.9.3)): + stylelint-config-recommended@18.0.0(stylelint@17.0.0(typescript@5.9.3)): dependencies: - stylelint: 16.26.1(typescript@5.9.3) + stylelint: 17.0.0(typescript@5.9.3) - stylelint-declaration-block-no-ignored-properties@2.8.0(stylelint@16.26.1(typescript@5.9.3)): + stylelint-declaration-block-no-ignored-properties@2.8.0(stylelint@17.0.0(typescript@5.9.3)): dependencies: - stylelint: 16.26.1(typescript@5.9.3) + stylelint: 17.0.0(typescript@5.9.3) - stylelint-declaration-strict-value@1.10.11(stylelint@16.26.1(typescript@5.9.3)): + stylelint-declaration-strict-value@1.10.11(stylelint@17.0.0(typescript@5.9.3)): dependencies: - stylelint: 16.26.1(typescript@5.9.3) + stylelint: 17.0.0(typescript@5.9.3) - stylelint-value-no-unknown-custom-properties@6.0.1(stylelint@16.26.1(typescript@5.9.3)): + stylelint-value-no-unknown-custom-properties@6.1.1(stylelint@17.0.0(typescript@5.9.3)): dependencies: postcss-value-parser: 4.2.0 resolve: 1.22.11 - stylelint: 16.26.1(typescript@5.9.3) + stylelint: 17.0.0(typescript@5.9.3) - stylelint@16.26.1(typescript@5.9.3): + stylelint@17.0.0(typescript@5.9.3): dependencies: - '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) - '@csstools/css-syntax-patches-for-csstree': 1.0.21 - '@csstools/css-tokenizer': 3.0.4 - '@csstools/media-query-list-parser': 4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) - '@csstools/selector-specificity': 5.0.0(postcss-selector-parser@7.1.1) - '@dual-bundle/import-meta-resolve': 4.2.1 - balanced-match: 2.0.0 + '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0) + '@csstools/css-syntax-patches-for-csstree': 1.0.25 + '@csstools/css-tokenizer': 4.0.0 + '@csstools/media-query-list-parser': 5.0.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0) + '@csstools/selector-resolve-nested': 4.0.0(postcss-selector-parser@7.1.1) + '@csstools/selector-specificity': 6.0.0(postcss-selector-parser@7.1.1) + balanced-match: 3.0.1 colord: 2.9.3 cosmiconfig: 9.0.0(typescript@5.9.3) css-functions-list: 3.2.3 @@ -8114,31 +8209,30 @@ snapshots: debug: 4.4.3 fast-glob: 3.3.3 fastest-levenshtein: 1.0.16 - file-entry-cache: 11.1.1 + file-entry-cache: 11.1.2 global-modules: 2.0.0 - globby: 11.1.0 + globby: 16.1.0 globjoin: 0.1.4 - html-tags: 3.3.1 + html-tags: 5.1.0 ignore: 7.0.5 + import-meta-resolve: 4.2.0 imurmurhash: 0.1.4 is-plain-object: 5.0.0 known-css-properties: 0.37.0 - mathml-tag-names: 2.1.3 - meow: 13.2.0 + mathml-tag-names: 4.0.0 + meow: 14.0.0 micromatch: 4.0.8 normalize-path: 3.0.0 picocolors: 1.1.1 postcss: 8.5.6 - postcss-resolve-nested-selector: 0.1.6 postcss-safe-parser: 7.0.1(postcss@8.5.6) postcss-selector-parser: 7.1.1 postcss-value-parser: 4.2.0 - resolve-from: 5.0.0 - string-width: 4.2.3 - supports-hyperlinks: 3.2.0 + string-width: 8.1.0 + supports-hyperlinks: 4.4.0 svg-tags: 1.0.0 table: 6.9.0 - write-file-atomic: 5.0.1 + write-file-atomic: 7.0.0 transitivePeerDependencies: - supports-color - typescript @@ -8168,6 +8262,8 @@ snapshots: superstruct@0.10.13: {} + supports-color@10.2.2: {} + supports-color@7.2.0: dependencies: has-flag: 4.0.0 @@ -8176,10 +8272,10 @@ snapshots: dependencies: has-flag: 4.0.0 - supports-hyperlinks@3.2.0: + supports-hyperlinks@4.4.0: dependencies: - has-flag: 4.0.0 - supports-color: 7.2.0 + has-flag: 5.0.1 + supports-color: 10.2.2 supports-preserve-symlinks-flag@1.0.0: {} @@ -8195,7 +8291,7 @@ snapshots: css-what: 6.2.2 csso: 5.0.5 picocolors: 1.1.1 - sax: 1.4.3 + sax: 1.4.4 svgson@5.3.1: dependencies: @@ -8213,7 +8309,7 @@ snapshots: transitivePeerDependencies: - encoding - synckit@0.11.11: + synckit@0.11.12: dependencies: '@pkgr/core': 0.2.9 @@ -8254,16 +8350,16 @@ snapshots: tapable@2.3.0: {} - terser-webpack-plugin@5.3.16(webpack@5.104.0): + terser-webpack-plugin@5.3.16(webpack@5.104.1): dependencies: '@jridgewell/trace-mapping': 0.3.31 jest-worker: 27.5.1 schema-utils: 4.3.3 serialize-javascript: 6.0.2 - terser: 5.44.1 - webpack: 5.104.0(webpack-cli@6.0.1) + terser: 5.46.0 + webpack: 5.104.1(webpack-cli@6.0.1) - terser@5.44.1: + terser@5.46.0: dependencies: '@jridgewell/source-map': 0.3.11 acorn: 8.15.0 @@ -8309,7 +8405,7 @@ snapshots: tributejs@5.1.3: {} - ts-api-utils@2.1.0(typescript@5.9.3): + ts-api-utils@2.4.0(typescript@5.9.3): dependencies: typescript: 5.9.3 @@ -8331,12 +8427,12 @@ snapshots: dependencies: prelude-ls: 1.2.1 - typescript-eslint@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3): + typescript-eslint@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/eslint-plugin': 8.53.1(@typescript-eslint/parser@8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/typescript-estree': 8.53.1(typescript@5.9.3) + '@typescript-eslint/utils': 8.53.1(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3) eslint: 9.39.2(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: @@ -8348,14 +8444,14 @@ snapshots: uc.micro@2.1.0: {} - ufo@1.6.1: {} + ufo@1.6.3: {} uint8-to-base64@0.2.1: {} - undici-types@6.21.0: {} - undici-types@7.16.0: {} + unicorn-magic@0.4.0: {} + unrs-resolver@1.11.1: dependencies: napi-postinstall: 0.3.4 @@ -8386,7 +8482,7 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.1 - updates@17.0.7: {} + updates@17.0.8: {} uri-js@4.4.1: dependencies: @@ -8398,33 +8494,35 @@ snapshots: vanilla-colorful@0.7.2: {} - vite-string-plugin@1.4.9: {} + vite-string-plugin@1.5.0(vite@7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)): + dependencies: + vite: 7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2) - vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2): + vite@7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2): dependencies: esbuild: 0.27.2 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 - rollup: 4.53.5 + rollup: 4.56.0 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 25.0.3 + '@types/node': 25.0.10 fsevents: 2.3.3 jiti: 2.6.1 stylus: 0.57.0 - terser: 5.44.1 + terser: 5.46.0 yaml: 2.8.2 - vitest@4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2): + vitest@4.0.18(@types/node@25.0.10)(happy-dom@20.3.7)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2): dependencies: - '@vitest/expect': 4.0.16 - '@vitest/mocker': 4.0.16(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2)) - '@vitest/pretty-format': 4.0.16 - '@vitest/runner': 4.0.16 - '@vitest/snapshot': 4.0.16 - '@vitest/spy': 4.0.16 - '@vitest/utils': 4.0.16 + '@vitest/expect': 4.0.18 + '@vitest/mocker': 4.0.18(vite@7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)) + '@vitest/pretty-format': 4.0.18 + '@vitest/runner': 4.0.18 + '@vitest/snapshot': 4.0.18 + '@vitest/spy': 4.0.18 + '@vitest/utils': 4.0.18 es-module-lexer: 1.7.0 expect-type: 1.3.0 magic-string: 0.30.21 @@ -8436,11 +8534,11 @@ snapshots: tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 - vite: 7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2) + vite: 7.3.1(@types/node@25.0.10)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: - '@types/node': 25.0.3 - happy-dom: 20.0.11 + '@types/node': 25.0.10 + happy-dom: 20.3.7 transitivePeerDependencies: - jiti - less @@ -8475,14 +8573,14 @@ snapshots: vue-bar-graph@2.2.0(typescript@5.9.3): dependencies: - vue: 3.5.25(typescript@5.9.3) + vue: 3.5.27(typescript@5.9.3) transitivePeerDependencies: - typescript - vue-chartjs@5.3.3(chart.js@4.5.1)(vue@3.5.25(typescript@5.9.3)): + vue-chartjs@5.3.3(chart.js@4.5.1)(vue@3.5.27(typescript@5.9.3)): dependencies: chart.js: 4.5.1 - vue: 3.5.25(typescript@5.9.3) + vue: 3.5.27(typescript@5.9.3) vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1)): dependencies: @@ -8491,49 +8589,51 @@ snapshots: eslint-scope: 8.4.0 eslint-visitor-keys: 4.2.1 espree: 10.4.0 - esquery: 1.6.0 + esquery: 1.7.0 semver: 7.7.3 transitivePeerDependencies: - supports-color - vue-loader@17.4.2(vue@3.5.25(typescript@5.9.3))(webpack@5.104.0): + vue-loader@17.4.2(vue@3.5.27(typescript@5.9.3))(webpack@5.104.1): dependencies: chalk: 4.1.2 hash-sum: 2.0.0 - watchpack: 2.4.4 - webpack: 5.104.0(webpack-cli@6.0.1) + watchpack: 2.5.1 + webpack: 5.104.1(webpack-cli@6.0.1) optionalDependencies: - vue: 3.5.25(typescript@5.9.3) + vue: 3.5.27(typescript@5.9.3) - vue-tsc@3.1.8(typescript@5.9.3): + vue-tsc@3.2.3(typescript@5.9.3): dependencies: - '@volar/typescript': 2.4.26 - '@vue/language-core': 3.1.8(typescript@5.9.3) + '@volar/typescript': 2.4.27 + '@vue/language-core': 3.2.3 typescript: 5.9.3 - vue@3.5.25(typescript@5.9.3): + vue@3.5.27(typescript@5.9.3): dependencies: - '@vue/compiler-dom': 3.5.25 - '@vue/compiler-sfc': 3.5.25 - '@vue/runtime-dom': 3.5.25 - '@vue/server-renderer': 3.5.25(vue@3.5.25(typescript@5.9.3)) - '@vue/shared': 3.5.25 + '@vue/compiler-dom': 3.5.27 + '@vue/compiler-sfc': 3.5.27 + '@vue/runtime-dom': 3.5.27 + '@vue/server-renderer': 3.5.27(vue@3.5.27(typescript@5.9.3)) + '@vue/shared': 3.5.27 optionalDependencies: typescript: 5.9.3 - watchpack@2.4.4: + walk-up-path@4.0.0: {} + + watchpack@2.5.1: dependencies: glob-to-regexp: 0.4.1 graceful-fs: 4.2.11 webidl-conversions@3.0.1: {} - webpack-cli@6.0.1(webpack@5.104.0): + webpack-cli@6.0.1(webpack@5.104.1): dependencies: '@discoveryjs/json-ext': 0.6.3 - '@webpack-cli/configtest': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.0) - '@webpack-cli/info': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.0) - '@webpack-cli/serve': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.0) + '@webpack-cli/configtest': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.1) + '@webpack-cli/info': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.1) + '@webpack-cli/serve': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.1) colorette: 2.0.20 commander: 12.1.0 cross-spawn: 7.0.6 @@ -8542,7 +8642,7 @@ snapshots: import-local: 3.2.0 interpret: 3.1.1 rechoir: 0.8.0 - webpack: 5.104.0(webpack-cli@6.0.1) + webpack: 5.104.1(webpack-cli@6.0.1) webpack-merge: 6.0.1 webpack-merge@6.0.1: @@ -8558,7 +8658,7 @@ snapshots: webpack-sources@3.3.3: {} - webpack@5.104.0(webpack-cli@6.0.1): + webpack@5.104.1(webpack-cli@6.0.1): dependencies: '@types/eslint-scope': 3.7.7 '@types/estree': 1.0.8 @@ -8582,11 +8682,11 @@ snapshots: neo-async: 2.6.2 schema-utils: 4.3.3 tapable: 2.3.0 - terser-webpack-plugin: 5.3.16(webpack@5.104.0) - watchpack: 2.4.4 + terser-webpack-plugin: 5.3.16(webpack@5.104.1) + watchpack: 2.5.1 webpack-sources: 3.3.3 optionalDependencies: - webpack-cli: 6.0.1(webpack@5.104.0) + webpack-cli: 6.0.1(webpack@5.104.1) transitivePeerDependencies: - '@swc/core' - esbuild @@ -8630,11 +8730,13 @@ snapshots: wrappy@1.0.2: {} - write-file-atomic@5.0.1: + write-file-atomic@7.0.0: dependencies: imurmurhash: 0.1.4 signal-exit: 4.1.0 + ws@8.19.0: {} + xml-lexer@0.2.2: dependencies: eventemitter3: 2.0.3 @@ -8649,3 +8751,5 @@ snapshots: yaml@2.8.2: {} yocto-queue@0.1.0: {} + + zod@4.3.6: {} diff --git a/public/assets/img/svg/octicon-logo-github.svg b/public/assets/img/svg/octicon-logo-github.svg index 02d92c9b13..8aae451ae5 100644 --- a/public/assets/img/svg/octicon-logo-github.svg +++ b/public/assets/img/svg/octicon-logo-github.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/public/assets/img/svg/octicon-mark-github.svg b/public/assets/img/svg/octicon-mark-github.svg index 9381053c06..6d6dc40886 100644 --- a/public/assets/img/svg/octicon-mark-github.svg +++ b/public/assets/img/svg/octicon-mark-github.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 3657feb2ce..20a10d1915 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ requires-python = ">=3.10" [dependency-groups] dev = [ "djlint==1.36.4", - "yamllint==1.37.1", + "yamllint==1.38.0", ] [tool.djlint] diff --git a/routers/api/v1/admin/adopt.go b/routers/api/v1/admin/adopt.go index c2efed7490..92711409f0 100644 --- a/routers/api/v1/admin/adopt.go +++ b/routers/api/v1/admin/adopt.go @@ -8,7 +8,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/routers/api/v1/utils" "code.gitea.io/gitea/services/context" repo_service "code.gitea.io/gitea/services/repository" @@ -99,12 +99,12 @@ func AdoptRepository(ctx *context.APIContext) { ctx.APIErrorInternal(err) return } - isDir, err := util.IsDir(repo_model.RepoPath(ctxUser.Name, repoName)) + exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, repoName))) if err != nil { ctx.APIErrorInternal(err) return } - if has || !isDir { + if has || !exist { ctx.APIErrorNotFound() return } @@ -161,12 +161,12 @@ func DeleteUnadoptedRepository(ctx *context.APIContext) { ctx.APIErrorInternal(err) return } - isDir, err := util.IsDir(repo_model.RepoPath(ctxUser.Name, repoName)) + exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, repoName))) if err != nil { ctx.APIErrorInternal(err) return } - if has || !isDir { + if has || !exist { ctx.APIErrorNotFound() return } diff --git a/routers/api/v1/admin/runners.go b/routers/api/v1/admin/runners.go index 736c421229..1e15b24076 100644 --- a/routers/api/v1/admin/runners.go +++ b/routers/api/v1/admin/runners.go @@ -14,7 +14,7 @@ import ( func GetRegistrationToken(ctx *context.APIContext) { // swagger:operation GET /admin/runners/registration-token admin adminGetRunnerRegistrationToken // --- - // summary: Get an global actions runner registration token + // summary: Get a global actions runner registration token // produces: // - application/json // parameters: @@ -29,7 +29,7 @@ func GetRegistrationToken(ctx *context.APIContext) { func CreateRegistrationToken(ctx *context.APIContext) { // swagger:operation POST /admin/actions/runners/registration-token admin adminCreateRunnerRegistrationToken // --- - // summary: Get an global actions runner registration token + // summary: Get a global actions runner registration token // produces: // - application/json // parameters: @@ -57,11 +57,11 @@ func ListRunners(ctx *context.APIContext) { shared.ListRunners(ctx, 0, 0) } -// GetRunner get an global runner +// GetRunner get a global runner func GetRunner(ctx *context.APIContext) { // swagger:operation GET /admin/actions/runners/{runner_id} admin getAdminRunner // --- - // summary: Get an global runner + // summary: Get a global runner // produces: // - application/json // parameters: @@ -80,11 +80,11 @@ func GetRunner(ctx *context.APIContext) { shared.GetRunner(ctx, 0, 0, ctx.PathParamInt64("runner_id")) } -// DeleteRunner delete an global runner +// DeleteRunner delete a global runner func DeleteRunner(ctx *context.APIContext) { // swagger:operation DELETE /admin/actions/runners/{runner_id} admin deleteAdminRunner // --- - // summary: Delete an global runner + // summary: Delete a global runner // produces: // - application/json // parameters: diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index fcf9e73057..6d37c67cc4 100644 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -1384,19 +1384,19 @@ func Routes() *web.Router { }) m.Get("/{base}/*", repo.GetPullRequestByBaseHead) }, mustAllowPulls, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo()) - m.Group("/statuses", func() { + m.Group("/statuses", func() { // "/statuses/{sha}" only accepts commit ID m.Combo("/{sha}").Get(repo.GetCommitStatuses). Post(reqToken(), reqRepoWriter(unit.TypeCode), bind(api.CreateStatusOption{}), repo.NewCommitStatus) }, reqRepoReader(unit.TypeCode)) m.Group("/commits", func() { m.Get("", context.ReferencesGitRepo(), repo.GetAllCommits) - m.Group("/{ref}", func() { - m.Get("/status", repo.GetCombinedCommitStatusByRef) - m.Get("/statuses", repo.GetCommitStatusesByRef) - }, context.ReferencesGitRepo()) - m.Group("/{sha}", func() { - m.Get("/pull", repo.GetCommitPullRequest) - }, context.ReferencesGitRepo()) + m.PathGroup("/*", func(g *web.RouterPathGroup) { + // Mis-configured reverse proxy might decode the `%2F` to slash ahead, so we need to support both formats (escaped, unescaped) here. + // It also matches GitHub's behavior + g.MatchPath("GET", "//status", repo.GetCombinedCommitStatusByRef) + g.MatchPath("GET", "//statuses", repo.GetCommitStatusesByRef) + g.MatchPath("GET", "//pull", repo.GetCommitPullRequest) + }) }, reqRepoReader(unit.TypeCode)) m.Group("/git", func() { m.Group("/commits", func() { diff --git a/routers/api/v1/misc/markup_test.go b/routers/api/v1/misc/markup_test.go index 38a1a3be9e..4d61b287ae 100644 --- a/routers/api/v1/misc/markup_test.go +++ b/routers/api/v1/misc/markup_test.go @@ -173,8 +173,8 @@ Here are some links to the most important topics. You can find the full list of Image

    `, http.StatusOK) - testRenderMarkup(t, "file", false, "path/test.unknown", "## Test", "unsupported file to render: \"path/test.unknown\"\n", http.StatusUnprocessableEntity) - testRenderMarkup(t, "unknown", false, "", "## Test", "Unknown mode: unknown\n", http.StatusUnprocessableEntity) + testRenderMarkup(t, "file", false, "path/test.unknown", "## Test", "unable to find a render\n", http.StatusUnprocessableEntity) + testRenderMarkup(t, "unknown", false, "", "## Test", "unsupported render mode: unknown\n", http.StatusUnprocessableEntity) } var simpleCases = []string{ diff --git a/routers/api/v1/repo/branch.go b/routers/api/v1/repo/branch.go index 4624d7e738..9bdc0c76b8 100644 --- a/routers/api/v1/repo/branch.go +++ b/routers/api/v1/repo/branch.go @@ -515,7 +515,7 @@ func RenameBranch(ctx *context.APIContext) { case repo_model.IsErrUserDoesNotHaveAccessToRepo(err): ctx.APIError(http.StatusForbidden, "User must be a repo or site admin to rename default or protected branches.") case errors.Is(err, git_model.ErrBranchIsProtected): - ctx.APIError(http.StatusForbidden, "Branch is protected by glob-based protection rules.") + ctx.APIError(http.StatusForbidden, "Failed to rename branch due to branch protection rules.") default: ctx.APIErrorInternal(err) } diff --git a/routers/api/v1/repo/download.go b/routers/api/v1/repo/download.go index ea5846d343..5ddda525f9 100644 --- a/routers/api/v1/repo/download.go +++ b/routers/api/v1/repo/download.go @@ -8,25 +8,35 @@ import ( "net/http" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" archiver_service "code.gitea.io/gitea/services/repository/archiver" ) -func serveRepoArchive(ctx *context.APIContext, reqFileName string) { - aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, reqFileName) +func serveRepoArchive(ctx *context.APIContext, reqFileName string, paths []string) { + aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, reqFileName, paths) if err != nil { - if errors.Is(err, archiver_service.ErrUnknownArchiveFormat{}) { + if errors.Is(err, util.ErrInvalidArgument) { ctx.APIError(http.StatusBadRequest, err) - } else if errors.Is(err, archiver_service.RepoRefNotFoundError{}) { + } else if errors.Is(err, util.ErrNotExist) { ctx.APIError(http.StatusNotFound, err) } else { ctx.APIErrorInternal(err) } return } - archiver_service.ServeRepoArchive(ctx.Base, aReq) + err = archiver_service.ServeRepoArchive(ctx.Base, aReq) + if err != nil { + if errors.Is(err, util.ErrInvalidArgument) { + ctx.APIError(http.StatusBadRequest, err) + } else { + ctx.APIErrorInternal(err) + } + } } +// DownloadArchive is the GitHub-compatible endpoint to download repository archives +// TODO: The API document is missing: Add github compatible tarball download API endpoints (#32572) func DownloadArchive(ctx *context.APIContext) { var tp repo_model.ArchiveType switch ballType := ctx.PathParam("ball_type"); ballType { @@ -40,5 +50,5 @@ func DownloadArchive(ctx *context.APIContext) { ctx.APIError(http.StatusBadRequest, "Unknown archive type: "+ballType) return } - serveRepoArchive(ctx, ctx.PathParam("*")+"."+tp.String()) + serveRepoArchive(ctx, ctx.PathParam("*")+"."+tp.String(), ctx.FormStrings("path")) } diff --git a/routers/api/v1/repo/file.go b/routers/api/v1/repo/file.go index 27a0827a10..deb68963c2 100644 --- a/routers/api/v1/repo/file.go +++ b/routers/api/v1/repo/file.go @@ -273,13 +273,19 @@ func GetArchive(ctx *context.APIContext) { // description: the git reference for download with attached archive format (e.g. master.zip) // type: string // required: true + // - name: path + // in: query + // type: array + // items: + // type: string + // description: subpath of the repository to download + // collectionFormat: multi // responses: // 200: // description: success // "404": // "$ref": "#/responses/notFound" - - serveRepoArchive(ctx, ctx.PathParam("*")) + serveRepoArchive(ctx, ctx.PathParam("*"), ctx.FormStrings("path")) } // GetEditorconfig get editor config of a repository diff --git a/routers/api/v1/repo/issue_stopwatch.go b/routers/api/v1/repo/issue_stopwatch.go index 0f28b9757d..f9fbff091d 100644 --- a/routers/api/v1/repo/issue_stopwatch.go +++ b/routers/api/v1/repo/issue_stopwatch.go @@ -224,7 +224,7 @@ func GetStopwatches(ctx *context.APIContext) { return } - apiSWs, err := convert.ToStopWatches(ctx, sws) + apiSWs, err := convert.ToStopWatches(ctx, ctx.Doer, sws) if err != nil { ctx.APIErrorInternal(err) return diff --git a/routers/api/v1/repo/pull.go b/routers/api/v1/repo/pull.go index 209647e7d7..2f59bef7c7 100644 --- a/routers/api/v1/repo/pull.go +++ b/routers/api/v1/repo/pull.go @@ -25,6 +25,7 @@ import ( "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" @@ -496,6 +497,12 @@ func CreatePullRequest(ctx *context.APIContext) { deadlineUnix = timeutil.TimeStamp(form.Deadline.Unix()) } + unitPullRequest, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypePullRequests) + if err != nil { + ctx.APIErrorInternal(err) + return + } + prIssue := &issues_model.Issue{ RepoID: repo.ID, Title: form.Title, @@ -517,6 +524,8 @@ func CreatePullRequest(ctx *context.APIContext) { Type: issues_model.PullRequestGitea, } + pr.AllowMaintainerEdit = optional.FromPtr(form.AllowMaintainerEdit).ValueOrDefault(unitPullRequest.PullRequestsConfig().DefaultAllowMaintainerEdit) + // Get all assignee IDs assigneeIDs, err := issues_model.MakeIDsFromAPIAssigneesToAdd(ctx, form.Assignee, form.Assignees) if err != nil { @@ -1062,19 +1071,11 @@ func MergePullRequest(ctx *context.APIContext) { // parseCompareInfo returns non-nil if it succeeds, it always writes to the context and returns nil if it fails func parseCompareInfo(ctx *context.APIContext, compareParam string) (result *git_service.CompareInfo, closer func()) { baseRepo := ctx.Repo.Repository - compareReq, err := common.ParseCompareRouterParam(compareParam) - switch { - case errors.Is(err, util.ErrInvalidArgument): - ctx.APIError(http.StatusBadRequest, err.Error()) - return nil, nil - case err != nil: - ctx.APIErrorInternal(err) - return nil, nil - } + compareReq := common.ParseCompareRouterParam(compareParam) // remove the check when we support compare with carets - if compareReq.CaretTimes > 0 { - ctx.APIError(http.StatusBadRequest, "Unsupported compare syntax with carets") + if compareReq.BaseOriRefSuffix != "" { + ctx.APIError(http.StatusBadRequest, "Unsupported comparison syntax: ref with suffix") return nil, nil } @@ -1321,7 +1322,7 @@ func CancelScheduledAutoMerge(ctx *context.APIContext) { } if ctx.Doer.ID != autoMerge.DoerID { - allowed, err := access_model.IsUserRepoAdmin(ctx, ctx.Repo.Repository, ctx.Doer) + allowed, err := pull_service.IsUserAllowedToMerge(ctx, pull, ctx.Repo.Permission, ctx.Doer) if err != nil { ctx.APIErrorInternal(err) return @@ -1531,9 +1532,9 @@ func GetPullRequestFiles(ctx *context.APIContext) { var compareInfo *git_service.CompareInfo if pr.HasMerged { - compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefName(pr.MergeBase), git.RefName(pr.GetGitHeadRefName()), true, false) + compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefName(pr.MergeBase), git.RefName(pr.GetGitHeadRefName()), false, false) } else { - compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefNameFromBranch(pr.BaseBranch), git.RefName(pr.GetGitHeadRefName()), true, false) + compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefNameFromBranch(pr.BaseBranch), git.RefName(pr.GetGitHeadRefName()), false, false) } if err != nil { ctx.APIErrorInternal(err) diff --git a/routers/api/v1/repo/release_attachment.go b/routers/api/v1/repo/release_attachment.go index 43e97beb27..5f5423fafe 100644 --- a/routers/api/v1/repo/release_attachment.go +++ b/routers/api/v1/repo/release_attachment.go @@ -398,7 +398,6 @@ func DeleteReleaseAttachment(ctx *context.APIContext) { ctx.APIErrorNotFound() return } - // FIXME Should prove the existence of the given repo, but results in unnecessary database requests if err := repo_model.DeleteAttachment(ctx, attach, true); err != nil { ctx.APIErrorInternal(err) diff --git a/routers/common/blockexpensive.go b/routers/common/blockexpensive.go index ebec0a2e5b..fec364351c 100644 --- a/routers/common/blockexpensive.go +++ b/routers/common/blockexpensive.go @@ -24,7 +24,7 @@ func BlockExpensive() func(next http.Handler) http.Handler { ret := determineRequestPriority(reqctx.FromContext(req.Context())) if !ret.SignedIn { if ret.Expensive || ret.LongPolling { - http.Redirect(w, req, setting.AppSubURL+"/user/login", http.StatusSeeOther) + http.Redirect(w, req, middleware.RedirectLinkUserLogin(req), http.StatusSeeOther) return } } diff --git a/routers/common/compare.go b/routers/common/compare.go index fa9e4e668e..5b6fdba4e0 100644 --- a/routers/common/compare.go +++ b/routers/common/compare.go @@ -9,31 +9,28 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/util" ) type CompareRouterReq struct { - BaseOriRef string + BaseOriRef string + BaseOriRefSuffix string + + CompareSeparator string + HeadOwner string HeadRepoName string HeadOriRef string - CaretTimes int // ^ times after base ref - DotTimes int } func (cr *CompareRouterReq) DirectComparison() bool { - return cr.DotTimes == 2 || cr.DotTimes == 0 + // FIXME: the design of "DirectComparison" is wrong, it loses the information of `^` + // To correctly handle the comparison, developers should use `ci.CompareSeparator` directly, all "DirectComparison" related code should be rewritten. + return cr.CompareSeparator == ".." } -func parseBase(base string) (string, int) { - parts := strings.SplitN(base, "^", 2) - if len(parts) == 1 { - return base, 0 - } - return parts[0], len(parts[1]) + 1 -} - -func parseHead(head string) (string, string, string) { +func parseHead(head string) (headOwnerName, headRepoName, headRef string) { paths := strings.SplitN(head, ":", 2) if len(paths) == 1 { return "", "", paths[0] @@ -48,6 +45,7 @@ func parseHead(head string) (string, string, string) { // ParseCompareRouterParam Get compare information from the router parameter. // A full compare url is of the form: // +// 0. /{:baseOwner}/{:baseRepoName}/compare // 1. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headBranch} // 2. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}:{:headBranch} // 3. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}/{:headRepoName}:{:headBranch} @@ -70,45 +68,31 @@ func parseHead(head string) (string, string, string) { // format: ...[:] // base<-head: master...head:feature // same repo: master...feature -func ParseCompareRouterParam(routerParam string) (*CompareRouterReq, error) { +func ParseCompareRouterParam(routerParam string) *CompareRouterReq { if routerParam == "" { - return &CompareRouterReq{}, nil + return &CompareRouterReq{} } - var basePart, headPart string - dotTimes := 3 - parts := strings.Split(routerParam, "...") - if len(parts) > 2 { - return nil, util.NewInvalidArgumentErrorf("invalid compare router: %s", routerParam) - } - if len(parts) != 2 { - parts = strings.Split(routerParam, "..") - if len(parts) == 1 { + sep := "..." + basePart, headPart, ok := strings.Cut(routerParam, sep) + if !ok { + sep = ".." + basePart, headPart, ok = strings.Cut(routerParam, sep) + if !ok { headOwnerName, headRepoName, headRef := parseHead(routerParam) return &CompareRouterReq{ - HeadOriRef: headRef, - HeadOwner: headOwnerName, - HeadRepoName: headRepoName, - DotTimes: dotTimes, - }, nil - } else if len(parts) > 2 { - return nil, util.NewInvalidArgumentErrorf("invalid compare router: %s", routerParam) + HeadOriRef: headRef, + HeadOwner: headOwnerName, + HeadRepoName: headRepoName, + CompareSeparator: "...", + } } - dotTimes = 2 } - basePart, headPart = parts[0], parts[1] - baseRef, caretTimes := parseBase(basePart) - headOwnerName, headRepoName, headRef := parseHead(headPart) - - return &CompareRouterReq{ - BaseOriRef: baseRef, - HeadOriRef: headRef, - HeadOwner: headOwnerName, - HeadRepoName: headRepoName, - CaretTimes: caretTimes, - DotTimes: dotTimes, - }, nil + ci := &CompareRouterReq{CompareSeparator: sep} + ci.BaseOriRef, ci.BaseOriRefSuffix = git.ParseRefSuffix(basePart) + ci.HeadOwner, ci.HeadRepoName, ci.HeadOriRef = parseHead(headPart) + return ci } // maxForkTraverseLevel defines the maximum levels to traverse when searching for the head repository. diff --git a/routers/common/compare_test.go b/routers/common/compare_test.go index a55f6607ae..e4e24a03cf 100644 --- a/routers/common/compare_test.go +++ b/routers/common/compare_test.go @@ -6,146 +6,100 @@ package common import ( "testing" - "code.gitea.io/gitea/models/unittest" - "github.com/stretchr/testify/assert" ) func TestCompareRouterReq(t *testing.T) { - unittest.PrepareTestEnv(t) - - kases := []struct { - router string + cases := []struct { + input string CompareRouterReq *CompareRouterReq }{ { - router: "", + input: "", + CompareRouterReq: &CompareRouterReq{}, + }, + { + input: "v1.0...v1.1", CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "", - HeadOriRef: "", - DotTimes: 0, + BaseOriRef: "v1.0", + CompareSeparator: "...", + HeadOriRef: "v1.1", }, }, { - router: "main...develop", + input: "main..develop", CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "main", - HeadOriRef: "develop", - DotTimes: 3, + BaseOriRef: "main", + CompareSeparator: "..", + HeadOriRef: "develop", }, }, { - router: "main..develop", + input: "main^...develop", CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "main", - HeadOriRef: "develop", - DotTimes: 2, + BaseOriRef: "main", + BaseOriRefSuffix: "^", + CompareSeparator: "...", + HeadOriRef: "develop", }, }, { - router: "main^...develop", + input: "main^^^^^...develop", CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "main", - HeadOriRef: "develop", - CaretTimes: 1, - DotTimes: 3, + BaseOriRef: "main", + BaseOriRefSuffix: "^^^^^", + CompareSeparator: "...", + HeadOriRef: "develop", }, }, { - router: "main^^^^^...develop", + input: "develop", CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "main", - HeadOriRef: "develop", - CaretTimes: 5, - DotTimes: 3, + CompareSeparator: "...", + HeadOriRef: "develop", }, }, { - router: "develop", + input: "teabot:feature1", CompareRouterReq: &CompareRouterReq{ - HeadOriRef: "develop", - DotTimes: 3, + CompareSeparator: "...", + HeadOwner: "teabot", + HeadOriRef: "feature1", }, }, { - router: "lunny/forked_repo:develop", + input: "lunny/forked_repo:develop", CompareRouterReq: &CompareRouterReq{ - HeadOwner: "lunny", - HeadRepoName: "forked_repo", - HeadOriRef: "develop", - DotTimes: 3, + CompareSeparator: "...", + HeadOwner: "lunny", + HeadRepoName: "forked_repo", + HeadOriRef: "develop", }, }, { - router: "main...lunny/forked_repo:develop", + input: "main...lunny/forked_repo:develop", CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "main", - HeadOwner: "lunny", - HeadRepoName: "forked_repo", - HeadOriRef: "develop", - DotTimes: 3, + BaseOriRef: "main", + CompareSeparator: "...", + HeadOwner: "lunny", + HeadRepoName: "forked_repo", + HeadOriRef: "develop", }, }, { - router: "main...lunny/forked_repo:develop", + input: "main^...lunny/forked_repo:develop", CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "main", - HeadOwner: "lunny", - HeadRepoName: "forked_repo", - HeadOriRef: "develop", - DotTimes: 3, - }, - }, - { - router: "main^...lunny/forked_repo:develop", - CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "main", - HeadOwner: "lunny", - HeadRepoName: "forked_repo", - HeadOriRef: "develop", - DotTimes: 3, - CaretTimes: 1, - }, - }, - { - router: "v1.0...v1.1", - CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "v1.0", - HeadOriRef: "v1.1", - DotTimes: 3, - }, - }, - { - router: "teabot-patch-1...v0.0.1", - CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "teabot-patch-1", - HeadOriRef: "v0.0.1", - DotTimes: 3, - }, - }, - { - router: "teabot:feature1", - CompareRouterReq: &CompareRouterReq{ - HeadOwner: "teabot", - HeadOriRef: "feature1", - DotTimes: 3, - }, - }, - { - router: "8eb19a5ae19abae15c0666d4ab98906139a7f439...283c030497b455ecfa759d4649f9f8b45158742e", - CompareRouterReq: &CompareRouterReq{ - BaseOriRef: "8eb19a5ae19abae15c0666d4ab98906139a7f439", - HeadOriRef: "283c030497b455ecfa759d4649f9f8b45158742e", - DotTimes: 3, + BaseOriRef: "main", + BaseOriRefSuffix: "^", + CompareSeparator: "...", + HeadOwner: "lunny", + HeadRepoName: "forked_repo", + HeadOriRef: "develop", }, }, } - for _, kase := range kases { - t.Run(kase.router, func(t *testing.T) { - r, err := ParseCompareRouterParam(kase.router) - assert.NoError(t, err) - assert.Equal(t, kase.CompareRouterReq, r) - }) + for _, c := range cases { + assert.Equal(t, c.CompareRouterReq, ParseCompareRouterParam(c.input), "input: %s", c.input) } } diff --git a/routers/common/errpage.go b/routers/common/errpage.go index 4caef92d14..4d24914bd2 100644 --- a/routers/common/errpage.go +++ b/routers/common/errpage.go @@ -4,8 +4,11 @@ package common import ( + "bytes" "fmt" + "io" "net/http" + "strings" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/httpcache" @@ -19,6 +22,36 @@ import ( const tplStatus500 templates.TplName = "status/500" +func renderServerErrorPage(w http.ResponseWriter, req *http.Request, respCode int, tmpl templates.TplName, ctxData map[string]any, plainMsg string) { + acceptsHTML := false + for _, part := range req.Header["Accept"] { + if strings.Contains(part, "text/html") { + acceptsHTML = true + break + } + } + + httpcache.SetCacheControlInHeader(w.Header(), &httpcache.CacheControlOptions{NoTransform: true}) + w.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions) + + tmplCtx := context.NewTemplateContext(req.Context(), req) + tmplCtx["Locale"] = middleware.Locale(w, req) + + w.WriteHeader(respCode) + + outBuf := &bytes.Buffer{} + if acceptsHTML { + err := templates.PageRenderer().HTML(outBuf, respCode, tmpl, ctxData, tmplCtx) + if err != nil { + _, _ = w.Write([]byte("Internal server error but failed to render error page template, please collect error logs and report to Gitea issue tracker")) + return + } + } else { + outBuf.WriteString(plainMsg) + } + _, _ = io.Copy(w, outBuf) +} + // RenderPanicErrorPage renders a 500 page, and it never panics func RenderPanicErrorPage(w http.ResponseWriter, req *http.Request, err any) { combinedErr := fmt.Sprintf("%v\n%s", err, log.Stack(2)) @@ -32,24 +65,14 @@ func RenderPanicErrorPage(w http.ResponseWriter, req *http.Request, err any) { routing.UpdatePanicError(req.Context(), err) - httpcache.SetCacheControlInHeader(w.Header(), &httpcache.CacheControlOptions{NoTransform: true}) - w.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions) - - tmplCtx := context.NewTemplateContext(req.Context(), req) - tmplCtx["Locale"] = middleware.Locale(w, req) + plainMsg := "Internal Server Error" ctxData := middleware.GetContextData(req.Context()) - // This recovery handler could be called without Gitea's web context, so we shouldn't touch that context too much. // Otherwise, the 500-page may cause new panics, eg: cache.GetContextWithData, it makes the developer&users couldn't find the original panic. user, _ := ctxData[middleware.ContextDataKeySignedUser].(*user_model.User) if !setting.IsProd || (user != nil && user.IsAdmin) { - ctxData["ErrorMsg"] = "PANIC: " + combinedErr - } - - err = templates.HTMLRenderer().HTML(w, http.StatusInternalServerError, tplStatus500, ctxData, tmplCtx) - if err != nil { - log.Error("Error occurs again when rendering error page: %v", err) - w.WriteHeader(http.StatusInternalServerError) - _, _ = w.Write([]byte("Internal server error, please collect error logs and report to Gitea issue tracker")) + plainMsg = "PANIC: " + combinedErr + ctxData["ErrorMsg"] = plainMsg } + renderServerErrorPage(w, req, http.StatusInternalServerError, tplStatus500, ctxData, plainMsg) } diff --git a/routers/common/errpage_test.go b/routers/common/errpage_test.go index 33aa6bb339..c50d45c296 100644 --- a/routers/common/errpage_test.go +++ b/routers/common/errpage_test.go @@ -18,19 +18,28 @@ import ( ) func TestRenderPanicErrorPage(t *testing.T) { - w := httptest.NewRecorder() - req := &http.Request{URL: &url.URL{}} - req = req.WithContext(reqctx.NewRequestContextForTest(t.Context())) - RenderPanicErrorPage(w, req, errors.New("fake panic error (for test only)")) - respContent := w.Body.String() - assert.Contains(t, respContent, `class="page-content status-page-500"`) - assert.Contains(t, respContent, ``) - assert.Contains(t, respContent, `lang="en-US"`) // make sure the locale work + t.Run("HTML", func(t *testing.T) { + w := httptest.NewRecorder() + req := &http.Request{URL: &url.URL{}, Header: http.Header{"Accept": []string{"text/html"}}} + req = req.WithContext(reqctx.NewRequestContextForTest(t.Context())) + RenderPanicErrorPage(w, req, errors.New("fake panic error (for test only)")) + respContent := w.Body.String() + assert.Contains(t, respContent, `class="page-content status-page-500"`) + assert.Contains(t, respContent, ``) + assert.Contains(t, respContent, `lang="en-US"`) // make sure the locale work - // the 500 page doesn't have normal pages footer, it makes it easier to distinguish a normal page and a failed page. - // especially when a sub-template causes page error, the HTTP response code is still 200, - // the different "footer" is the only way to know whether a page is fully rendered without error. - assert.False(t, test.IsNormalPageCompleted(respContent)) + // the 500 page doesn't have normal pages footer, it makes it easier to distinguish a normal page and a failed page. + // especially when a sub-template causes page error, the HTTP response code is still 200, + // the different "footer" is the only way to know whether a page is fully rendered without error. + assert.False(t, test.IsNormalPageCompleted(respContent)) + }) + t.Run("Plain", func(t *testing.T) { + w := httptest.NewRecorder() + req := &http.Request{URL: &url.URL{}} + req = req.WithContext(reqctx.NewRequestContextForTest(t.Context())) + renderServiceUnavailable(w, req) + assert.Equal(t, "Service Unavailable", w.Body.String()) + }) } func TestMain(m *testing.M) { diff --git a/routers/common/markup.go b/routers/common/markup.go index 00b2dd07c6..e189bcdecf 100644 --- a/routers/common/markup.go +++ b/routers/common/markup.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/models/renderhelper" "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/httplib" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" @@ -31,7 +32,7 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur // and the urlPathContext is "/gitea/owner/repo/src/branch/features/feat-123/doc" if mode == "" || mode == "markdown" { - // raw markdown doesn't need any special handling + // raw Markdown doesn't need any special handling baseLink := urlPathContext if baseLink == "" { baseLink = fmt.Sprintf("%s%s", httplib.GuessCurrentHostURL(ctx), urlPathContext) @@ -39,7 +40,8 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur rctx := renderhelper.NewRenderContextSimpleDocument(ctx, baseLink).WithUseAbsoluteLink(true). WithMarkupType(markdown.MarkupName) if err := markdown.RenderRaw(rctx, strings.NewReader(text), ctx.Resp); err != nil { - ctx.HTTPError(http.StatusInternalServerError, err.Error()) + log.Error("RenderMarkupRaw: %v", err) + ctx.HTTPError(http.StatusInternalServerError, "failed to render raw markup") } return } @@ -92,7 +94,7 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur }) rctx = rctx.WithMarkupType("").WithRelativePath(filePath) // render the repo file content by its extension default: - ctx.HTTPError(http.StatusUnprocessableEntity, "Unknown mode: "+mode) + ctx.HTTPError(http.StatusUnprocessableEntity, "unsupported render mode: "+mode) return } rctx = rctx.WithUseAbsoluteLink(true) @@ -100,7 +102,8 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur if errors.Is(err, util.ErrInvalidArgument) { ctx.HTTPError(http.StatusUnprocessableEntity, err.Error()) } else { - ctx.HTTPError(http.StatusInternalServerError, err.Error()) + log.Error("RenderMarkup: %v", err) + ctx.HTTPError(http.StatusInternalServerError, "failed to render markup") } return } diff --git a/routers/common/qos.go b/routers/common/qos.go index 0670ea0b4c..96f23b64fe 100644 --- a/routers/common/qos.go +++ b/routers/common/qos.go @@ -14,7 +14,6 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/web/middleware" - giteacontext "code.gitea.io/gitea/services/context" "github.com/bohde/codel" "github.com/go-chi/chi/v5" @@ -119,27 +118,6 @@ func requestPriority(ctx context.Context) Priority { // renderServiceUnavailable will render an HTTP 503 Service // Unavailable page, providing HTML if the client accepts it. func renderServiceUnavailable(w http.ResponseWriter, req *http.Request) { - acceptsHTML := false - for _, part := range req.Header["Accept"] { - if strings.Contains(part, "text/html") { - acceptsHTML = true - break - } - } - - // If the client doesn't accept HTML, then render a plain text response - if !acceptsHTML { - http.Error(w, "503 Service Unavailable", http.StatusServiceUnavailable) - return - } - - tmplCtx := giteacontext.NewTemplateContext(req.Context(), req) - tmplCtx["Locale"] = middleware.Locale(w, req) ctxData := middleware.GetContextData(req.Context()) - err := templates.HTMLRenderer().HTML(w, http.StatusServiceUnavailable, tplStatus503, ctxData, tmplCtx) - if err != nil { - log.Error("Error occurs again when rendering service unavailable page: %v", err) - w.WriteHeader(http.StatusInternalServerError) - _, _ = w.Write([]byte("Internal server error, please collect error logs and report to Gitea issue tracker")) - } + renderServerErrorPage(w, req, http.StatusServiceUnavailable, tplStatus503, ctxData, "Service Unavailable") } diff --git a/routers/common/qos_test.go b/routers/common/qos_test.go index 850a5f51db..17dc9cb30c 100644 --- a/routers/common/qos_test.go +++ b/routers/common/qos_test.go @@ -4,7 +4,6 @@ package common import ( - "net/http" "testing" user_model "code.gitea.io/gitea/models/user" @@ -62,30 +61,3 @@ func TestRequestPriority(t *testing.T) { }) } } - -func TestRenderServiceUnavailable(t *testing.T) { - t.Run("HTML", func(t *testing.T) { - ctx, resp := contexttest.MockContext(t, "") - ctx.Req.Header.Set("Accept", "text/html") - - renderServiceUnavailable(resp, ctx.Req) - assert.Equal(t, http.StatusServiceUnavailable, resp.Code) - assert.Contains(t, resp.Header().Get("Content-Type"), "text/html") - - body := resp.Body.String() - assert.Contains(t, body, `lang="en-US"`) - assert.Contains(t, body, "503 Service Unavailable") - }) - - t.Run("plain", func(t *testing.T) { - ctx, resp := contexttest.MockContext(t, "") - ctx.Req.Header.Set("Accept", "text/plain") - - renderServiceUnavailable(resp, ctx.Req) - assert.Equal(t, http.StatusServiceUnavailable, resp.Code) - assert.Contains(t, resp.Header().Get("Content-Type"), "text/plain") - - body := resp.Body.String() - assert.Contains(t, body, "503 Service Unavailable") - }) -} diff --git a/routers/init.go b/routers/init.go index 859b00ebb2..82a5378263 100644 --- a/routers/init.go +++ b/routers/init.go @@ -15,7 +15,6 @@ import ( "code.gitea.io/gitea/modules/eventsource" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" - "code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/external" @@ -24,7 +23,6 @@ import ( "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/svg" "code.gitea.io/gitea/modules/system" - "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" @@ -132,7 +130,6 @@ func InitWebInstalled(ctx context.Context) { mustInit(uinotification.Init) mustInitCtx(ctx, archiver.Init) - highlight.NewContext() external.RegisterRenderers() markup.Init(markup_service.FormalRenderHelperFuncs()) @@ -182,7 +179,6 @@ func InitWebInstalled(ctx context.Context) { // NormalRoutes represents non install routes func NormalRoutes() *web.Router { - _ = templates.HTMLRenderer() r := web.NewRouter() r.Use(common.ProtocolMiddlewares()...) diff --git a/routers/install/install.go b/routers/install/install.go index c5acf968bd..399128b6ed 100644 --- a/routers/install/install.go +++ b/routers/install/install.go @@ -24,11 +24,9 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" - "code.gitea.io/gitea/modules/reqctx" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/timeutil" - "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/modules/user" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" @@ -37,8 +35,6 @@ import ( "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/versioned_migration" - - "gitea.com/go-chi/session" ) const ( @@ -55,29 +51,13 @@ func getSupportedDbTypeNames() (dbTypeNames []map[string]string) { return dbTypeNames } -// installContexter prepare for rendering installation page func installContexter() func(next http.Handler) http.Handler { - rnd := templates.HTMLRenderer() - dbTypeNames := getSupportedDbTypeNames() - envConfigKeys := setting.CollectEnvConfigKeys() - return func(next http.Handler) http.Handler { - return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { - base := context.NewBaseContext(resp, req) - ctx := context.NewWebContext(base, rnd, session.GetSession(req)) - ctx.Data.MergeFrom(middleware.CommonTemplateContextData()) - ctx.Data.MergeFrom(reqctx.ContextData{ - "Title": ctx.Locale.Tr("install.install"), - "PageIsInstall": true, - "DbTypeNames": dbTypeNames, - "EnvConfigKeys": envConfigKeys, - "CustomConfFile": setting.CustomConf, - "AllLangs": translation.AllLangs(), - - "PasswordHashAlgorithms": hash.RecommendedHashAlgorithms, - }) - next.ServeHTTP(resp, ctx.Req) - }) - } + return context.ContexterInstallPage(map[string]any{ + "DbTypeNames": getSupportedDbTypeNames(), + "EnvConfigKeys": setting.CollectEnvConfigKeys(), + "CustomConfFile": setting.CustomConf, + "PasswordHashAlgorithms": hash.RecommendedHashAlgorithms, + }) } // Install render installation page diff --git a/routers/private/hook_pre_receive.go b/routers/private/hook_pre_receive.go index 88e8b466f1..39955116c4 100644 --- a/routers/private/hook_pre_receive.go +++ b/routers/private/hook_pre_receive.go @@ -191,7 +191,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID string, r // 2. Disallow force pushes to protected branches if oldCommitID != objectFormat.EmptyObjectID().String() { - output, err := gitrepo.RunCmdString(ctx, + output, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-list", "--max-count=1"). AddDynamicArguments(oldCommitID, "^"+newCommitID). diff --git a/routers/private/hook_verification.go b/routers/private/hook_verification.go index 9c357f4b41..fd26ba89e2 100644 --- a/routers/private/hook_verification.go +++ b/routers/private/hook_verification.go @@ -5,9 +5,7 @@ package private import ( "bufio" - "context" "io" - "os" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" @@ -18,16 +16,6 @@ import ( // This file contains commit verification functions for refs passed across in hooks func verifyCommits(oldCommitID, newCommitID string, repo *git.Repository, env []string) error { - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - log.Error("Unable to create os.Pipe for %s", repo.Path) - return err - } - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - var command *gitcmd.Command objectFormat, _ := repo.GetObjectFormat() if oldCommitID == objectFormat.EmptyObjectID().String() { @@ -39,18 +27,14 @@ func verifyCommits(oldCommitID, newCommitID string, repo *git.Repository, env [] command = gitcmd.NewCommand("rev-list").AddDynamicArguments(oldCommitID + "..." + newCommitID) } // This is safe as force pushes are already forbidden - err = command.WithEnv(env). + stdoutReader, stdoutReaderClose := command.MakeStdoutPipe() + defer stdoutReaderClose() + + err := command.WithEnv(env). WithDir(repo.Path). - WithStdout(stdoutWriter). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() + WithPipelineFunc(func(ctx gitcmd.Context) error { err := readAndVerifyCommitsFromShaReader(stdoutReader, repo, env) - if err != nil { - log.Error("readAndVerifyCommitsFromShaReader failed: %v", err) - cancel() - } - _ = stdoutReader.Close() - return err + return ctx.CancelPipeline(err) }). Run(repo.Ctx) if err != nil && !isErrUnverifiedCommit(err) { @@ -72,34 +56,21 @@ func readAndVerifyCommitsFromShaReader(input io.ReadCloser, repo *git.Repository } func readAndVerifyCommit(sha string, repo *git.Repository, env []string) error { - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - log.Error("Unable to create pipe for %s: %v", repo.Path, err) - return err - } - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - commitID := git.MustIDFromString(sha) + cmd := gitcmd.NewCommand("cat-file", "commit").AddDynamicArguments(sha) + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() - return gitcmd.NewCommand("cat-file", "commit").AddDynamicArguments(sha). - WithEnv(env). + return cmd.WithEnv(env). WithDir(repo.Path). - WithStdout(stdoutWriter). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() + WithPipelineFunc(func(ctx gitcmd.Context) error { commit, err := git.CommitFromReader(repo, commitID, stdoutReader) if err != nil { return err } verification := asymkey_service.ParseCommitWithSignature(ctx, commit) if !verification.Verified { - cancel() - return &errUnverifiedCommit{ - commit.ID.String(), - } + return ctx.CancelPipeline(&errUnverifiedCommit{commit.ID.String()}) } return nil }). diff --git a/routers/private/manager.go b/routers/private/manager.go index 00e52d6511..b84919d180 100644 --- a/routers/private/manager.go +++ b/routers/private/manager.go @@ -21,7 +21,7 @@ import ( // ReloadTemplates reloads all the templates func ReloadTemplates(ctx *context.PrivateContext) { - err := templates.ReloadHTMLTemplates() + err := templates.ReloadAllTemplates() if err != nil { ctx.JSON(http.StatusInternalServerError, private.Response{ UserMsg: fmt.Sprintf("Template error: %v", err), diff --git a/routers/web/admin/repos.go b/routers/web/admin/repos.go index 1bc8abb88c..424219815c 100644 --- a/routers/web/admin/repos.go +++ b/routers/web/admin/repos.go @@ -11,10 +11,10 @@ import ( "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" - "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/routers/web/explore" "code.gitea.io/gitea/services/context" repo_service "code.gitea.io/gitea/services/repository" @@ -134,12 +134,12 @@ func AdoptOrDeleteRepository(ctx *context.Context) { ctx.ServerError("IsRepositoryExist", err) return } - isDir, err := util.IsDir(repo_model.RepoPath(ctxUser.Name, repoName)) + exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, repoName))) if err != nil { ctx.ServerError("IsDir", err) return } - if has || !isDir { + if has || !exist { // Fallthrough to failure mode } else if action == "adopt" { if _, err := repo_service.AdoptRepository(ctx, ctx.Doer, ctxUser, repo_service.CreateRepoOptions{ diff --git a/routers/web/auth/2fa.go b/routers/web/auth/2fa.go index 1f087a7897..a19c9d7aca 100644 --- a/routers/web/auth/2fa.go +++ b/routers/web/auth/2fa.go @@ -26,7 +26,7 @@ var ( func TwoFactor(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("twofa") - if CheckAutoLogin(ctx) { + if performAutoLogin(ctx) { return } @@ -99,7 +99,7 @@ func TwoFactorPost(ctx *context.Context) { func TwoFactorScratch(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("twofa_scratch") - if CheckAutoLogin(ctx) { + if performAutoLogin(ctx) { return } @@ -151,7 +151,7 @@ func TwoFactorScratchPost(ctx *context.Context) { return } - handleSignInFull(ctx, u, remember, false) + handleSignInFull(ctx, u, remember) if ctx.Written() { return } diff --git a/routers/web/auth/auth.go b/routers/web/auth/auth.go index d36fb5bab7..bc0939d92a 100644 --- a/routers/web/auth/auth.go +++ b/routers/web/auth/auth.go @@ -9,6 +9,7 @@ import ( "fmt" "html/template" "net/http" + "net/url" "strings" "code.gitea.io/gitea/models/auth" @@ -126,20 +127,47 @@ func resetLocale(ctx *context.Context, u *user_model.User) error { return nil } -func RedirectAfterLogin(ctx *context.Context) { +func rememberAuthRedirectLink(ctx *context.Context) { redirectTo := ctx.FormString("redirect_to") if redirectTo == "" { - redirectTo = ctx.GetSiteCookie("redirect_to") + if ref, err := url.Parse(ctx.Req.Referer()); err == nil && httplib.IsCurrentGiteaSiteURL(ctx, ctx.Req.Referer()) { + // the request paths starting with "/user/" are either: + // * auth related pages: don't redirect back to them + // * user settings pages: they have "require sign-in" protection already, no "referer redirect" would happen + skipRefererRedirect := strings.HasPrefix(ref.Path, setting.AppSubURL+"/user/") + if !skipRefererRedirect { + redirectTo = ref.RequestURI() + } + } } - middleware.DeleteRedirectToCookie(ctx.Resp) - nextRedirectTo := setting.AppSubURL + string(setting.LandingPageURL) - if setting.LandingPageURL == setting.LandingPageLogin { - nextRedirectTo = setting.AppSubURL + "/" // do not cycle-redirect to the login page + if redirectTo != "" { + middleware.SetRedirectToCookie(ctx.Resp, redirectTo) } - ctx.RedirectToCurrentSite(redirectTo, nextRedirectTo) } -func CheckAutoLogin(ctx *context.Context) bool { +func consumeAuthRedirectLink(ctx *context.Context) string { + redirects := []string{ctx.FormString("redirect_to"), middleware.GetRedirectToCookie(ctx.Req)} + middleware.DeleteRedirectToCookie(ctx.Resp) + if setting.LandingPageURL == setting.LandingPageLogin { + redirects = append(redirects, setting.AppSubURL+"/") // do not cycle-redirect to the login page + } else { + redirects = append(redirects, setting.AppSubURL+string(setting.LandingPageURL)) + } + for _, link := range redirects { + if link != "" && httplib.IsCurrentGiteaSiteURL(ctx, link) { + return link + } + } + return setting.AppSubURL + "/" +} + +func redirectAfterAuth(ctx *context.Context) { + ctx.RedirectToCurrentSite(consumeAuthRedirectLink(ctx)) +} + +func performAutoLogin(ctx *context.Context) bool { + rememberAuthRedirectLink(ctx) + isSucceed, err := autoSignIn(ctx) // try to auto-login if err != nil { if errors.Is(err, auth_service.ErrAuthTokenInvalidHash) { @@ -150,13 +178,8 @@ func CheckAutoLogin(ctx *context.Context) bool { return true } - redirectTo := ctx.FormString("redirect_to") - if len(redirectTo) > 0 { - middleware.SetRedirectToCookie(ctx.Resp, redirectTo) - } - if isSucceed { - RedirectAfterLogin(ctx) + redirectAfterAuth(ctx) return true } @@ -181,11 +204,11 @@ func prepareSignInPageData(ctx *context.Context) { // SignIn render sign in page func SignIn(ctx *context.Context) { - if CheckAutoLogin(ctx) { + if performAutoLogin(ctx) { return } if ctx.IsSigned { - RedirectAfterLogin(ctx) + redirectAfterAuth(ctx) return } prepareSignInPageData(ctx) @@ -295,19 +318,19 @@ func SignInPost(ctx *context.Context) { // This handles the final part of the sign-in process of the user. func handleSignIn(ctx *context.Context, u *user_model.User, remember bool) { - redirect := handleSignInFull(ctx, u, remember, true) + handleSignInFull(ctx, u, remember) if ctx.Written() { return } - ctx.Redirect(redirect) + redirectAfterAuth(ctx) } -func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRedirect bool) string { +func handleSignInFull(ctx *context.Context, u *user_model.User, remember bool) { if remember { nt, token, err := auth_service.CreateAuthTokenForUserID(ctx, u.ID) if err != nil { ctx.ServerError("CreateAuthTokenForUserID", err) - return setting.AppSubURL + "/" + return } ctx.SetSiteCookie(setting.CookieRememberName, nt.ID+":"+token, setting.LogInRememberDays*timeutil.Day) @@ -316,7 +339,7 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe userHasTwoFactorAuth, err := auth.HasTwoFactorOrWebAuthn(ctx, u.ID) if err != nil { ctx.ServerError("HasTwoFactorOrWebAuthn", err) - return setting.AppSubURL + "/" + return } if err := updateSession(ctx, []string{ @@ -335,7 +358,7 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe session.KeyUserHasTwoFactorAuth: userHasTwoFactorAuth, }); err != nil { ctx.ServerError("RegenerateSession", err) - return setting.AppSubURL + "/" + return } // Language setting of the user overwrites the one previously set @@ -346,7 +369,7 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe } if err := user_service.UpdateUser(ctx, u, opts); err != nil { ctx.ServerError("UpdateUser Language", fmt.Errorf("Error updating user language [user: %d, locale: %s]", u.ID, ctx.Locale.Language())) - return setting.AppSubURL + "/" + return } } @@ -359,21 +382,8 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe // Register last login if err := user_service.UpdateUser(ctx, u, &user_service.UpdateOptions{SetLastLogin: true}); err != nil { ctx.ServerError("UpdateUser", err) - return setting.AppSubURL + "/" + return } - - if redirectTo := ctx.GetSiteCookie("redirect_to"); redirectTo != "" && httplib.IsCurrentGiteaSiteURL(ctx, redirectTo) { - middleware.DeleteRedirectToCookie(ctx.Resp) - if obeyRedirect { - ctx.RedirectToCurrentSite(redirectTo) - } - return redirectTo - } - - if obeyRedirect { - ctx.Redirect(setting.AppSubURL + "/") - } - return setting.AppSubURL + "/" } // extractUserNameFromOAuth2 tries to extract a normalized username from the given OAuth2 user. @@ -436,10 +446,7 @@ func SignUp(ctx *context.Context) { // Show Disabled Registration message if DisableRegistration or AllowOnlyExternalRegistration options are true ctx.Data["DisableRegistration"] = setting.Service.DisableRegistration || setting.Service.AllowOnlyExternalRegistration - redirectTo := ctx.FormString("redirect_to") - if len(redirectTo) > 0 { - middleware.SetRedirectToCookie(ctx.Resp, redirectTo) - } + rememberAuthRedirectLink(ctx) ctx.HTML(http.StatusOK, tplSignUp) } @@ -817,13 +824,7 @@ func handleAccountActivation(ctx *context.Context, user *user_model.User) { } ctx.Flash.Success(ctx.Tr("auth.account_activated")) - if redirectTo := ctx.GetSiteCookie("redirect_to"); len(redirectTo) > 0 { - middleware.DeleteRedirectToCookie(ctx.Resp) - ctx.RedirectToCurrentSite(redirectTo) - return - } - - ctx.Redirect(setting.AppSubURL + "/") + redirectAfterAuth(ctx) } // ActivateEmail render the activate email page diff --git a/routers/web/auth/oauth.go b/routers/web/auth/oauth.go index 5eab7ffeb4..b96ea17bc3 100644 --- a/routers/web/auth/oauth.go +++ b/routers/web/auth/oauth.go @@ -21,7 +21,6 @@ import ( "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/session" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/web/middleware" source_service "code.gitea.io/gitea/services/auth/source" "code.gitea.io/gitea/services/auth/source/oauth2" "code.gitea.io/gitea/services/context" @@ -42,10 +41,7 @@ func SignInOAuth(ctx *context.Context) { return } - redirectTo := ctx.FormString("redirect_to") - if len(redirectTo) > 0 { - middleware.SetRedirectToCookie(ctx.Resp, redirectTo) - } + rememberAuthRedirectLink(ctx) // try to do a direct callback flow, so we don't authenticate the user again but use the valid accesstoken to get the user user, gothUser, err := oAuth2UserLoginCallback(ctx, authSource, ctx.Req, ctx.Resp) @@ -398,13 +394,7 @@ func handleOAuth2SignIn(ctx *context.Context, authSource *auth.Source, u *user_m return } - if redirectTo := ctx.GetSiteCookie("redirect_to"); len(redirectTo) > 0 { - middleware.DeleteRedirectToCookie(ctx.Resp) - ctx.RedirectToCurrentSite(redirectTo) - return - } - - ctx.Redirect(setting.AppSubURL + "/") + redirectAfterAuth(ctx) return } diff --git a/routers/web/auth/oauth2_provider.go b/routers/web/auth/oauth2_provider.go index 02e1a50285..e09469526e 100644 --- a/routers/web/auth/oauth2_provider.go +++ b/routers/web/auth/oauth2_provider.go @@ -230,8 +230,7 @@ func AuthorizeOAuth(ctx *context.Context) { // pkce support switch form.CodeChallengeMethod { - case "S256": - case "plain": + case "S256", "plain": if err := ctx.Session.Set("CodeChallengeMethod", form.CodeChallengeMethod); err != nil { handleAuthorizeError(ctx, AuthorizeError{ ErrorCode: ErrorCodeServerError, diff --git a/routers/web/auth/openid.go b/routers/web/auth/openid.go index 4ef4c96ccc..948e65366e 100644 --- a/routers/web/auth/openid.go +++ b/routers/web/auth/openid.go @@ -35,7 +35,7 @@ func SignInOpenID(ctx *context.Context) { return } - if CheckAutoLogin(ctx) { + if performAutoLogin(ctx) { return } diff --git a/routers/web/auth/password.go b/routers/web/auth/password.go index 537ad4b994..61c6119470 100644 --- a/routers/web/auth/password.go +++ b/routers/web/auth/password.go @@ -16,7 +16,6 @@ import ( "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/web" - "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/mailer" @@ -236,7 +235,7 @@ func ResetPasswdPost(ctx *context.Context) { return } - handleSignInFull(ctx, u, remember, false) + handleSignInFull(ctx, u, remember) if ctx.Written() { return } @@ -308,11 +307,5 @@ func MustChangePasswordPost(ctx *context.Context) { log.Trace("User updated password: %s", ctx.Doer.Name) - if redirectTo := ctx.GetSiteCookie("redirect_to"); redirectTo != "" { - middleware.DeleteRedirectToCookie(ctx.Resp) - ctx.RedirectToCurrentSite(redirectTo) - return - } - - ctx.Redirect(setting.AppSubURL + "/") + redirectAfterAuth(ctx) } diff --git a/routers/web/auth/webauthn.go b/routers/web/auth/webauthn.go index dacb6be225..cae726b8bf 100644 --- a/routers/web/auth/webauthn.go +++ b/routers/web/auth/webauthn.go @@ -26,7 +26,7 @@ var tplWebAuthn templates.TplName = "user/auth/webauthn" func WebAuthn(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("twofa") - if CheckAutoLogin(ctx) { + if performAutoLogin(ctx) { return } @@ -156,12 +156,8 @@ func WebAuthnPasskeyLogin(ctx *context.Context) { } remember := false // TODO: implement remember me - redirect := handleSignInFull(ctx, user, remember, false) - if redirect == "" { - redirect = setting.AppSubURL + "/" - } - - ctx.JSONRedirect(redirect) + handleSignInFull(ctx, user, remember) + ctx.JSONRedirect(consumeAuthRedirectLink(ctx)) } // WebAuthnLoginAssertion submits a WebAuthn challenge to the browser @@ -274,11 +270,7 @@ func WebAuthnLoginAssertionPost(ctx *context.Context) { } remember := ctx.Session.Get("twofaRemember").(bool) - redirect := handleSignInFull(ctx, user, remember, false) - if redirect == "" { - redirect = setting.AppSubURL + "/" - } + handleSignInFull(ctx, user, remember) _ = ctx.Session.Delete("twofaUid") - - ctx.JSONRedirect(redirect) + ctx.JSONRedirect(consumeAuthRedirectLink(ctx)) } diff --git a/routers/web/devtest/mail_preview.go b/routers/web/devtest/mail_preview.go index d6bade15d7..7b1787d52b 100644 --- a/routers/web/devtest/mail_preview.go +++ b/routers/web/devtest/mail_preview.go @@ -8,6 +8,7 @@ import ( "strings" "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/mailer" @@ -34,17 +35,18 @@ func MailPreviewRender(ctx *context.Context) { func prepareMailPreviewRender(ctx *context.Context, tmplName string) { tmplSubject := mailer.LoadedTemplates().SubjectTemplates.Lookup(tmplName) - if tmplSubject == nil { - ctx.Data["RenderMailSubject"] = "default subject" - } else { + // FIXME: MAIL-TEMPLATE-SUBJECT: only "issue" related messages support using subject from templates + subject := "(default subject)" + if tmplSubject != nil { var buf strings.Builder err := tmplSubject.Execute(&buf, nil) if err != nil { - ctx.Data["RenderMailSubject"] = err.Error() + subject = "ERROR: " + err.Error() } else { - ctx.Data["RenderMailSubject"] = buf.String() + subject = util.IfZero(buf.String(), subject) } } + ctx.Data["RenderMailSubject"] = subject ctx.Data["RenderMailTemplateName"] = tmplName } diff --git a/routers/web/feed/profile_test.go b/routers/web/feed/profile_test.go index a0f1509269..bf9492b57b 100644 --- a/routers/web/feed/profile_test.go +++ b/routers/web/feed/profile_test.go @@ -23,7 +23,6 @@ func TestCheckGetOrgFeedsAsOrgMember(t *testing.T) { ctx, resp := contexttest.MockContext(t, "org3.atom") ctx.ContextUser = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}) contexttest.LoadUser(t, ctx, 2) - ctx.IsSigned = true feed.ShowUserFeedAtom(ctx) assert.Contains(t, resp.Body.String(), "") // Should contain 1 private entry }) @@ -31,7 +30,6 @@ func TestCheckGetOrgFeedsAsOrgMember(t *testing.T) { ctx, resp := contexttest.MockContext(t, "org3.atom") ctx.ContextUser = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}) contexttest.LoadUser(t, ctx, 5) - ctx.IsSigned = true feed.ShowUserFeedAtom(ctx) assert.NotContains(t, resp.Body.String(), "") // Should not contain any entries }) diff --git a/routers/web/githttp.go b/routers/web/githttp.go index ed3c56b07b..43d318c1a1 100644 --- a/routers/web/githttp.go +++ b/routers/web/githttp.go @@ -10,9 +10,12 @@ import ( ) func addOwnerRepoGitHTTPRouters(m *web.Router) { + // Some users want to use "web-based git client" to access Gitea's repositories, + // so the CORS handler and OPTIONS method are used. m.Group("/{username}/{reponame}", func() { m.Methods("POST,OPTIONS", "/git-upload-pack", repo.ServiceUploadPack) m.Methods("POST,OPTIONS", "/git-receive-pack", repo.ServiceReceivePack) + m.Methods("POST,OPTIONS", "/git-upload-archive", repo.ServiceUploadArchive) m.Methods("GET,OPTIONS", "/info/refs", repo.GetInfoRefs) m.Methods("GET,OPTIONS", "/HEAD", repo.GetTextFile("HEAD")) m.Methods("GET,OPTIONS", "/objects/info/alternates", repo.GetTextFile("objects/info/alternates")) diff --git a/routers/web/org/projects.go b/routers/web/org/projects.go index d524409c41..f4a54db006 100644 --- a/routers/web/org/projects.go +++ b/routers/web/org/projects.go @@ -205,22 +205,24 @@ func ChangeProjectStatus(ctx *context.Context) { } id := ctx.PathParamInt64("id") - if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx, 0, id, toClose); err != nil { - ctx.NotFoundOrServerError("ChangeProjectStatusByRepoIDAndID", project_model.IsErrProjectNotExist, err) - return - } - ctx.JSONRedirect(project_model.ProjectLinkForOrg(ctx.ContextUser, id)) -} - -// DeleteProject delete a project -func DeleteProject(ctx *context.Context) { - p, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id")) + project, err := project_model.GetProjectByIDAndOwner(ctx, id, ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return } - if p.OwnerID != ctx.ContextUser.ID { - ctx.NotFound(nil) + + if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx, 0, project.ID, toClose); err != nil { + ctx.NotFoundOrServerError("ChangeProjectStatusByRepoIDAndID", project_model.IsErrProjectNotExist, err) + return + } + ctx.JSONRedirect(project_model.ProjectLinkForOrg(ctx.ContextUser, project.ID)) +} + +// DeleteProject delete a project +func DeleteProject(ctx *context.Context) { + p, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID) + if err != nil { + ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return } @@ -246,15 +248,11 @@ func RenderEditProject(ctx *context.Context) { return } - p, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id")) + p, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return } - if p.OwnerID != ctx.ContextUser.ID { - ctx.NotFound(nil) - return - } ctx.Data["projectID"] = p.ID ctx.Data["title"] = p.Title @@ -288,15 +286,11 @@ func EditProjectPost(ctx *context.Context) { return } - p, err := project_model.GetProjectByID(ctx, projectID) + p, err := project_model.GetProjectByIDAndOwner(ctx, projectID, ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return } - if p.OwnerID != ctx.ContextUser.ID { - ctx.NotFound(nil) - return - } p.Title = form.Title p.Description = form.Content @@ -316,15 +310,12 @@ func EditProjectPost(ctx *context.Context) { // ViewProject renders the project with board view for a project func ViewProject(ctx *context.Context) { - project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id")) + project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return } - if project.OwnerID != ctx.ContextUser.ID { - ctx.NotFound(nil) - return - } + if err := project.LoadOwner(ctx); err != nil { ctx.ServerError("LoadOwner", err) return @@ -455,28 +446,15 @@ func DeleteProjectColumn(ctx *context.Context) { return } - project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id")) + project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return } - pb, err := project_model.GetColumn(ctx, ctx.PathParamInt64("columnID")) + _, err = project_model.GetColumnByIDAndProjectID(ctx, ctx.PathParamInt64("columnID"), project.ID) if err != nil { - ctx.ServerError("GetProjectColumn", err) - return - } - if pb.ProjectID != ctx.PathParamInt64("id") { - ctx.JSON(http.StatusUnprocessableEntity, map[string]string{ - "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", pb.ID, project.ID), - }) - return - } - - if project.OwnerID != ctx.ContextUser.ID { - ctx.JSON(http.StatusUnprocessableEntity, map[string]string{ - "message": fmt.Sprintf("ProjectColumn[%d] is not in Owner[%d] as expected", pb.ID, ctx.ContextUser.ID), - }) + ctx.NotFoundOrServerError("GetColumnByIDAndProjectID", project_model.IsErrProjectColumnNotExist, err) return } @@ -492,7 +470,7 @@ func DeleteProjectColumn(ctx *context.Context) { func AddColumnToProjectPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.EditProjectColumnForm) - project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id")) + project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return @@ -520,30 +498,18 @@ func CheckProjectColumnChangePermissions(ctx *context.Context) (*project_model.P return nil, nil } - project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id")) + project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return nil, nil } - column, err := project_model.GetColumn(ctx, ctx.PathParamInt64("columnID")) + column, err := project_model.GetColumnByIDAndProjectID(ctx, ctx.PathParamInt64("columnID"), project.ID) if err != nil { - ctx.ServerError("GetProjectColumn", err) - return nil, nil - } - if column.ProjectID != ctx.PathParamInt64("id") { - ctx.JSON(http.StatusUnprocessableEntity, map[string]string{ - "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", column.ID, project.ID), - }) + ctx.NotFoundOrServerError("GetColumnByIDAndProjectID", project_model.IsErrProjectColumnNotExist, err) return nil, nil } - if project.OwnerID != ctx.ContextUser.ID { - ctx.JSON(http.StatusUnprocessableEntity, map[string]string{ - "message": fmt.Sprintf("ProjectColumn[%d] is not in Repository[%d] as expected", column.ID, project.ID), - }) - return nil, nil - } return project, column } @@ -595,24 +561,15 @@ func MoveIssues(ctx *context.Context) { return } - project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id")) + project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID) if err != nil { ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err) return } - if project.OwnerID != ctx.ContextUser.ID { - ctx.NotFound(nil) - return - } - column, err := project_model.GetColumn(ctx, ctx.PathParamInt64("columnID")) + column, err := project_model.GetColumnByIDAndProjectID(ctx, ctx.PathParamInt64("columnID"), project.ID) if err != nil { - ctx.NotFoundOrServerError("GetProjectColumn", project_model.IsErrProjectColumnNotExist, err) - return - } - - if column.ProjectID != project.ID { - ctx.NotFound(nil) + ctx.NotFoundOrServerError("GetColumnByIDAndProjectID", project_model.IsErrProjectColumnNotExist, err) return } diff --git a/routers/web/org/projects_test.go b/routers/web/org/projects_test.go index c3a769e621..63bcefb6e2 100644 --- a/routers/web/org/projects_test.go +++ b/routers/web/org/projects_test.go @@ -4,11 +4,14 @@ package org_test import ( + "net/http" "testing" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/web/org" "code.gitea.io/gitea/services/contexttest" + "code.gitea.io/gitea/services/forms" "github.com/stretchr/testify/assert" ) @@ -26,3 +29,30 @@ func TestCheckProjectColumnChangePermissions(t *testing.T) { assert.NotNil(t, column) assert.False(t, ctx.Written()) } + +func TestChangeProjectStatusRejectsForeignProjects(t *testing.T) { + unittest.PrepareTestEnv(t) + // project 4 is owned by user2 not user1 + ctx, _ := contexttest.MockContext(t, "user1/-/projects/4/close") + contexttest.LoadUser(t, ctx, 1) + ctx.ContextUser = ctx.Doer + ctx.SetPathParam("action", "close") + ctx.SetPathParam("id", "4") + + org.ChangeProjectStatus(ctx) + + assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus()) +} + +func TestAddColumnToProjectPostRejectsForeignProjects(t *testing.T) { + unittest.PrepareTestEnv(t) + ctx, _ := contexttest.MockContext(t, "user1/-/projects/4/columns/new") + contexttest.LoadUser(t, ctx, 1) + ctx.ContextUser = ctx.Doer + ctx.SetPathParam("id", "4") + web.SetForm(ctx, &forms.EditProjectColumnForm{Title: "foreign"}) + + org.AddColumnToProjectPost(ctx) + + assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus()) +} diff --git a/routers/web/repo/attachment.go b/routers/web/repo/attachment.go index 54200d8de8..ae52eb2ffa 100644 --- a/routers/web/repo/attachment.go +++ b/routers/web/repo/attachment.go @@ -4,11 +4,12 @@ package repo import ( - "fmt" "net/http" + issues_model "code.gitea.io/gitea/models/issues" access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/httpcache" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" @@ -40,7 +41,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) { file, header, err := ctx.Req.FormFile("file") if err != nil { - ctx.HTTPError(http.StatusInternalServerError, fmt.Sprintf("FormFile: %v", err)) + ctx.ServerError("FormFile", err) return } defer file.Close() @@ -56,7 +57,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) { ctx.HTTPError(http.StatusBadRequest, err.Error()) return } - ctx.HTTPError(http.StatusInternalServerError, fmt.Sprintf("NewAttachment: %v", err)) + ctx.ServerError("UploadAttachmentGeneralSizeLimit", err) return } @@ -74,13 +75,44 @@ func DeleteAttachment(ctx *context.Context) { ctx.HTTPError(http.StatusBadRequest, err.Error()) return } - if !ctx.IsSigned || (ctx.Doer.ID != attach.UploaderID) { + + if !ctx.IsSigned { ctx.HTTPError(http.StatusForbidden) return } + + if attach.RepoID != ctx.Repo.Repository.ID { + ctx.HTTPError(http.StatusBadRequest, "attachment does not belong to this repository") + return + } + + if ctx.Doer.ID != attach.UploaderID { + if attach.IssueID > 0 { + issue, err := issues_model.GetIssueByID(ctx, attach.IssueID) + if err != nil { + ctx.ServerError("GetIssueByID", err) + return + } + if !ctx.Repo.Permission.CanWriteIssuesOrPulls(issue.IsPull) { + ctx.HTTPError(http.StatusForbidden) + return + } + } else if attach.ReleaseID > 0 { + if !ctx.Repo.Permission.CanWrite(unit.TypeReleases) { + ctx.HTTPError(http.StatusForbidden) + return + } + } else { + if !ctx.Repo.Permission.IsAdmin() && !ctx.Repo.Permission.IsOwner() { + ctx.HTTPError(http.StatusForbidden) + return + } + } + } + err = repo_model.DeleteAttachment(ctx, attach, true) if err != nil { - ctx.HTTPError(http.StatusInternalServerError, fmt.Sprintf("DeleteAttachment: %v", err)) + ctx.ServerError("DeleteAttachment", err) return } ctx.JSON(http.StatusOK, map[string]string{ @@ -100,23 +132,41 @@ func ServeAttachment(ctx *context.Context, uuid string) { return } - repository, unitType, err := repo_service.LinkedRepository(ctx, attach) - if err != nil { - ctx.ServerError("LinkedRepository", err) + // prevent visiting attachment from other repository directly + // The check will be ignored before this code merged. + if attach.CreatedUnix > repo_model.LegacyAttachmentMissingRepoIDCutoff && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID != attach.RepoID { + ctx.HTTPError(http.StatusNotFound) return } - if repository == nil { // If not linked + unitType, repoID, err := repo_service.GetAttachmentLinkedTypeAndRepoID(ctx, attach) + if err != nil { + ctx.ServerError("GetAttachmentLinkedTypeAndRepoID", err) + return + } + + if unitType == unit.TypeInvalid { // unlinked attachment can only be accessed by the uploader if !(ctx.IsSigned && attach.UploaderID == ctx.Doer.ID) { // We block if not the uploader ctx.HTTPError(http.StatusNotFound) return } - } else { // If we have the repository we check access - perm, err := access_model.GetUserRepoPermission(ctx, repository, ctx.Doer) - if err != nil { - ctx.HTTPError(http.StatusInternalServerError, "GetUserRepoPermission", err.Error()) - return + } else { // If we have the linked type, we need to check access + var perm access_model.Permission + if ctx.Repo.Repository == nil { + repo, err := repo_model.GetRepositoryByID(ctx, repoID) + if err != nil { + ctx.ServerError("GetRepositoryByID", err) + return + } + perm, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer) + if err != nil { + ctx.ServerError("GetUserRepoPermission", err) + return + } + } else { + perm = ctx.Repo.Permission } + if !perm.CanRead(unitType) { ctx.HTTPError(http.StatusNotFound) return diff --git a/routers/web/repo/blame.go b/routers/web/repo/blame.go index ab3aecbbe7..25eb88eefc 100644 --- a/routers/web/repo/blame.go +++ b/routers/web/repo/blame.go @@ -267,7 +267,7 @@ func renderBlame(ctx *context.Context, blameParts []*gitrepo.BlamePart, commitNa bufContent := buf.Bytes() bufContent = charset.ToUTF8(bufContent, charset.ConvertOpts{}) - highlighted, lexerName := highlight.Code(path.Base(ctx.Repo.TreePath), language, util.UnsafeBytesToString(bufContent)) + highlighted, lexerName := highlight.RenderCodeSlowGuess(path.Base(ctx.Repo.TreePath), language, util.UnsafeBytesToString(bufContent)) unsafeLines := highlight.UnsafeSplitHighlightedLines(highlighted) for i, br := range rows { var line template.HTML diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go index f702b2de16..27f5651ecb 100644 --- a/routers/web/repo/commit.go +++ b/routers/web/repo/commit.go @@ -279,7 +279,7 @@ func Diff(ctx *context.Context) { diffBlobExcerptData := &gitdiff.DiffBlobExcerptData{ BaseLink: ctx.Repo.RepoLink + "/blob_excerpt", - DiffStyle: ctx.FormString("style"), + DiffStyle: GetDiffViewStyle(ctx), AfterCommitID: commitID, } gitRepo := ctx.Repo.GitRepo diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go index 29a82b5dfc..150a8583c8 100644 --- a/routers/web/repo/compare.go +++ b/routers/web/repo/compare.go @@ -149,9 +149,9 @@ func setCsvCompareContext(ctx *context.Context) { if err != nil { return nil, nil, err } - + var closer io.Closer = reader csvReader, err := csv_module.CreateReaderAndDetermineDelimiter(ctx, charset.ToUTF8WithFallbackReader(reader, charset.ConvertOpts{})) - return csvReader, reader, err + return csvReader, closer, err } baseReader, baseBlobCloser, err := csvReaderFromCommit(markup.NewRenderContext(ctx).WithRelativePath(diffFile.OldName), baseBlob) @@ -196,21 +196,16 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { baseRepo := ctx.Repo.Repository fileOnly := ctx.FormBool("file-only") - compareReq, err := common.ParseCompareRouterParam(ctx.PathParam("*")) - switch { - case errors.Is(err, util.ErrInvalidArgument): - ctx.HTTPError(http.StatusBadRequest, err.Error()) - return nil - case err != nil: - ctx.ServerError("ParseCompareRouterParam", err) - return nil - } + // 1 Parse compare router param + compareReq := common.ParseCompareRouterParam(ctx.PathParam("*")) + // remove the check when we support compare with carets - if compareReq.CaretTimes > 0 { - ctx.HTTPError(http.StatusBadRequest, "Unsupported compare syntax with carets") + if compareReq.BaseOriRefSuffix != "" { + ctx.HTTPError(http.StatusBadRequest, "Unsupported comparison syntax: ref with suffix") return nil } + // 2 get repository and owner for head headOwner, headRepo, err := common.GetHeadOwnerAndRepo(ctx, baseRepo, compareReq) switch { case errors.Is(err, util.ErrInvalidArgument): @@ -224,45 +219,66 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { return nil } - baseBranch := util.IfZero(compareReq.BaseOriRef, baseRepo.DefaultBranch) - headBranch := util.IfZero(compareReq.HeadOriRef, headRepo.DefaultBranch) isSameRepo := baseRepo.ID == headRepo.ID - ctx.Data["BaseName"] = baseRepo.OwnerName - ctx.Data["BaseBranch"] = baseBranch - ctx.Data["HeadUser"] = headOwner - ctx.Data["HeadBranch"] = headBranch - ctx.Repo.PullRequest.SameRepo = isSameRepo + // 3 permission check + // base repository's code unit read permission check has been done on web.go + permBase := ctx.Repo.Permission - // Check if base branch is valid. - baseIsCommit := ctx.Repo.GitRepo.IsCommitExist(baseBranch) - baseIsBranch, _ := git_model.IsBranchExist(ctx, ctx.Repo.Repository.ID, baseBranch) - baseIsTag := gitrepo.IsTagExist(ctx, ctx.Repo.Repository, baseBranch) - - if !baseIsCommit && !baseIsBranch && !baseIsTag { - // Check if baseBranch is short sha commit hash - if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(baseBranch); baseCommit != nil { - baseBranch = baseCommit.ID.String() - ctx.Data["BaseBranch"] = baseBranch - baseIsCommit = true - } else if baseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() { - if isSameRepo { - ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(headBranch)) - } else { - ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(headRepo.FullName()) + ":" + util.PathEscapeSegments(headBranch)) - } + // If we're not merging from the same repo: + if !isSameRepo { + // Assert ctx.Doer has permission to read headRepo's codes + permHead, err := access_model.GetUserRepoPermission(ctx, headRepo, ctx.Doer) + if err != nil { + ctx.ServerError("GetUserRepoPermission", err) return nil - } else { + } + if !permHead.CanRead(unit.TypeCode) { + if log.IsTrace() { + log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v", + ctx.Doer, + headRepo, + permHead) + } ctx.NotFound(nil) return nil } + ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode) } - ctx.Data["BaseIsCommit"] = baseIsCommit - ctx.Data["BaseIsBranch"] = baseIsBranch - ctx.Data["BaseIsTag"] = baseIsTag - ctx.Data["IsPull"] = true - // Now we have the repository that represents the base + // 4 get base and head refs + baseRefName := util.IfZero(compareReq.BaseOriRef, baseRepo.DefaultBranch) + headRefName := util.IfZero(compareReq.HeadOriRef, headRepo.DefaultBranch) + + baseRef := ctx.Repo.GitRepo.UnstableGuessRefByShortName(baseRefName) + if baseRef == "" { + ctx.NotFound(nil) + return nil + } + var headGitRepo *git.Repository + if isSameRepo { + headGitRepo = ctx.Repo.GitRepo + } else { + headGitRepo, err = gitrepo.OpenRepository(ctx, headRepo) + if err != nil { + ctx.ServerError("OpenRepository", err) + return nil + } + defer headGitRepo.Close() + } + headRef := headGitRepo.UnstableGuessRefByShortName(headRefName) + if headRef == "" { + ctx.NotFound(nil) + return nil + } + + ctx.Data["BaseName"] = baseRepo.OwnerName + ctx.Data["BaseBranch"] = baseRef.ShortName() // for legacy templates + ctx.Data["HeadUser"] = headOwner + ctx.Data["HeadBranch"] = headRef.ShortName() // for legacy templates + ctx.Repo.PullRequest.SameRepo = isSameRepo + + ctx.Data["IsPull"] = true // The current base and head repositories and branches may not // actually be the intended branches that the user wants to @@ -331,64 +347,9 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { ctx.Data["PageIsComparePull"] = false } - // 8. Finally open the git repo - var headGitRepo *git.Repository - if isSameRepo { - headGitRepo = ctx.Repo.GitRepo - } else if has { - headGitRepo, err = gitrepo.RepositoryFromRequestContextOrOpen(ctx, headRepo) - if err != nil { - ctx.ServerError("RepositoryFromRequestContextOrOpen", err) - return nil - } - } else { - ctx.NotFound(nil) - return nil - } - ctx.Data["HeadRepo"] = headRepo ctx.Data["BaseCompareRepo"] = ctx.Repo.Repository - // Now we need to assert that the ctx.Doer has permission to read - // the baseRepo's code and pulls - // (NOT headRepo's) - permBase, err := access_model.GetUserRepoPermission(ctx, baseRepo, ctx.Doer) - if err != nil { - ctx.ServerError("GetUserRepoPermission", err) - return nil - } - if !permBase.CanRead(unit.TypeCode) { - if log.IsTrace() { - log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in baseRepo has Permissions: %-+v", - ctx.Doer, - baseRepo, - permBase) - } - ctx.NotFound(nil) - return nil - } - - // If we're not merging from the same repo: - if !isSameRepo { - // Assert ctx.Doer has permission to read headRepo's codes - permHead, err := access_model.GetUserRepoPermission(ctx, headRepo, ctx.Doer) - if err != nil { - ctx.ServerError("GetUserRepoPermission", err) - return nil - } - if !permHead.CanRead(unit.TypeCode) { - if log.IsTrace() { - log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v", - ctx.Doer, - headRepo, - permHead) - } - ctx.NotFound(nil) - return nil - } - ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode) - } - // If we have a rootRepo and it's different from: // 1. the computed base // 2. the computed head @@ -436,28 +397,9 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { } } - // Check if head branch is valid. - headIsCommit := headGitRepo.IsCommitExist(headBranch) - headIsBranch, _ := git_model.IsBranchExist(ctx, headRepo.ID, headBranch) - headIsTag := gitrepo.IsTagExist(ctx, headRepo, headBranch) - if !headIsCommit && !headIsBranch && !headIsTag { - // Check if headBranch is short sha commit hash - if headCommit, _ := headGitRepo.GetCommit(headBranch); headCommit != nil { - headBranch = headCommit.ID.String() - ctx.Data["HeadBranch"] = headBranch - headIsCommit = true - } else { - ctx.NotFound(nil) - return nil - } - } - ctx.Data["HeadIsCommit"] = headIsCommit - ctx.Data["HeadIsBranch"] = headIsBranch - ctx.Data["HeadIsTag"] = headIsTag - // Treat as pull request if both references are branches if ctx.Data["PageIsComparePull"] == nil { - ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch && permBase.CanReadIssuesOrPulls(true) + ctx.Data["PageIsComparePull"] = baseRef.IsBranch() && headRef.IsBranch() && permBase.CanReadIssuesOrPulls(true) } if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) { @@ -471,20 +413,7 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { return nil } - baseBranchRef := git.RefName(baseBranch) - if baseIsBranch { - baseBranchRef = git.RefNameFromBranch(baseBranch) - } else if baseIsTag { - baseBranchRef = git.RefNameFromTag(baseBranch) - } - headBranchRef := git.RefName(headBranch) - if headIsBranch { - headBranchRef = git.RefNameFromBranch(headBranch) - } else if headIsTag { - headBranchRef = git.RefNameFromTag(headBranch) - } - - compareInfo, err := git_service.GetCompareInfo(ctx, baseRepo, headRepo, headGitRepo, baseBranchRef, headBranchRef, compareReq.DirectComparison(), fileOnly) + compareInfo, err := git_service.GetCompareInfo(ctx, baseRepo, headRepo, headGitRepo, baseRef, headRef, compareReq.DirectComparison(), fileOnly) if err != nil { ctx.ServerError("GetCompareInfo", err) return nil @@ -517,7 +446,7 @@ func PrepareCompareDiff( ctx.Data["TitleQuery"] = newPrFormTitle ctx.Data["BodyQuery"] = newPrFormBody - if (headCommitID == ci.MergeBase && !ci.DirectComparison) || + if (headCommitID == ci.MergeBase && !ci.DirectComparison()) || headCommitID == ci.BaseCommitID { ctx.Data["IsNothingToCompare"] = true if unit, err := repo.GetUnit(ctx, unit.TypePullRequests); err == nil { @@ -534,7 +463,7 @@ func PrepareCompareDiff( } beforeCommitID := ci.MergeBase - if ci.DirectComparison { + if ci.DirectComparison() { beforeCommitID = ci.BaseCommitID } @@ -555,7 +484,7 @@ func PrepareCompareDiff( MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters, MaxFiles: maxFiles, WhitespaceBehavior: whitespaceBehavior, - DirectComparison: ci.DirectComparison, + DirectComparison: ci.DirectComparison(), }, ctx.FormStrings("files")...) if err != nil { ctx.ServerError("GetDiff", err) @@ -570,7 +499,7 @@ func PrepareCompareDiff( ctx.Data["Diff"] = diff ctx.Data["DiffBlobExcerptData"] = &gitdiff.DiffBlobExcerptData{ BaseLink: ci.HeadRepo.Link() + "/blob_excerpt", - DiffStyle: ctx.FormString("style"), + DiffStyle: GetDiffViewStyle(ctx), AfterCommitID: headCommitID, } ctx.Data["DiffNotAvailable"] = diffShortStat.NumFiles == 0 @@ -668,13 +597,7 @@ func CompareDiff(ctx *context.Context) { ctx.Data["PageIsViewCode"] = true ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes - ctx.Data["DirectComparison"] = ci.DirectComparison - ctx.Data["OtherCompareSeparator"] = ".." - ctx.Data["CompareSeparator"] = "..." - if ci.DirectComparison { - ctx.Data["CompareSeparator"] = ".." - ctx.Data["OtherCompareSeparator"] = "..." - } + ctx.Data["CompareInfo"] = ci nothingToCompare := PrepareCompareDiff(ctx, ci, gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string))) if ctx.Written() { @@ -751,11 +674,7 @@ func CompareDiff(ctx *context.Context) { beforeCommitID := ctx.Data["BeforeCommitID"].(string) afterCommitID := ctx.Data["AfterCommitID"].(string) - separator := "..." - if ci.DirectComparison { - separator = ".." - } - ctx.Data["Title"] = "Comparing " + base.ShortSha(beforeCommitID) + separator + base.ShortSha(afterCommitID) + ctx.Data["Title"] = "Comparing " + base.ShortSha(beforeCommitID) + ci.CompareSeparator + base.ShortSha(afterCommitID) ctx.Data["IsDiffCompare"] = true @@ -837,7 +756,7 @@ func ExcerptBlob(ctx *context.Context) { diffBlobExcerptData := &gitdiff.DiffBlobExcerptData{ BaseLink: ctx.Repo.RepoLink + "/blob_excerpt", - DiffStyle: ctx.FormString("style"), + DiffStyle: GetDiffViewStyle(ctx), AfterCommitID: commitID, } diff --git a/routers/web/repo/editor_cherry_pick.go b/routers/web/repo/editor_cherry_pick.go index ca0e19517a..605a35b100 100644 --- a/routers/web/repo/editor_cherry_pick.go +++ b/routers/web/repo/editor_cherry_pick.go @@ -67,7 +67,7 @@ func CherryPickPost(ctx *context.Context) { if parsed.form.Revert { err = gitrepo.GetReverseRawDiff(ctx, ctx.Repo.Repository, fromCommitID, buf) } else { - err = git.GetRawDiff(ctx.Repo.GitRepo, fromCommitID, "patch", buf) + err = git.GetRawDiff(ctx.Repo.GitRepo, fromCommitID, git.RawDiffPatch, buf) } if err == nil { opts.Content = buf.String() diff --git a/routers/web/repo/githttp.go b/routers/web/repo/githttp.go index c7b53dcbfb..8b3deb5a03 100644 --- a/routers/web/repo/githttp.go +++ b/routers/web/repo/githttp.go @@ -5,7 +5,6 @@ package repo import ( - "bytes" "compress/gzip" "fmt" "net/http" @@ -30,6 +29,7 @@ import ( repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" repo_service "code.gitea.io/gitea/services/repository" @@ -55,8 +55,9 @@ func CorsHandler() func(next http.Handler) http.Handler { } } -// httpBase implementation git smart HTTP protocol -func httpBase(ctx *context.Context) *serviceHandler { +// httpBase does the common work for git http services, +// including early response, authentication, repository lookup and permission check. +func httpBase(ctx *context.Context, optGitService ...string) *serviceHandler { username := ctx.PathParam("username") reponame := strings.TrimSuffix(ctx.PathParam("reponame"), ".git") @@ -65,20 +66,23 @@ func httpBase(ctx *context.Context) *serviceHandler { return nil } + var serviceType string var isPull, receivePack bool - service := ctx.FormString("service") - if service == "git-receive-pack" || - strings.HasSuffix(ctx.Req.URL.Path, "git-receive-pack") { - isPull = false + switch util.OptionalArg(optGitService) { + case "git-receive-pack": + serviceType = ServiceTypeReceivePack receivePack = true - } else if service == "git-upload-pack" || - strings.HasSuffix(ctx.Req.URL.Path, "git-upload-pack") { + case "git-upload-pack": + serviceType = ServiceTypeUploadPack isPull = true - } else if service == "git-upload-archive" || - strings.HasSuffix(ctx.Req.URL.Path, "git-upload-archive") { + case "git-upload-archive": + serviceType = ServiceTypeUploadArchive isPull = true - } else { + case "": isPull = ctx.Req.Method == http.MethodHead || ctx.Req.Method == http.MethodGet + default: // unknown service + ctx.Resp.WriteHeader(http.StatusBadRequest) + return nil } var accessMode perm.AccessMode @@ -188,7 +192,7 @@ func httpBase(ctx *context.Context) *serviceHandler { } if repoExist { - // Because of special ref "refs/for" .. , need delay write permission check + // Because of special ref "refs/for" (agit) , need delay write permission check if git.DefaultFeatures().SupportProcReceive { accessMode = perm.AccessModeRead } @@ -277,7 +281,6 @@ func httpBase(ctx *context.Context) *serviceHandler { ctx.PlainText(http.StatusForbidden, "repository wiki is disabled") return nil } - log.Error("Failed to get the wiki unit in %-v Error: %v", repo, err) ctx.ServerError("GetUnit(UnitTypeWiki) for "+repo.FullName(), err) return nil } @@ -285,9 +288,7 @@ func httpBase(ctx *context.Context) *serviceHandler { environ = append(environ, repo_module.EnvRepoID+fmt.Sprintf("=%d", repo.ID)) - ctx.Req.URL.Path = strings.ToLower(ctx.Req.URL.Path) // blue: In case some repo name has upper case name - - return &serviceHandler{repo, isWiki, environ} + return &serviceHandler{serviceType, repo, isWiki, environ} } var ( @@ -330,6 +331,8 @@ func dummyInfoRefs(ctx *context.Context) { } type serviceHandler struct { + serviceType string + repo *repo_model.Repository isWiki bool environ []string @@ -350,7 +353,7 @@ func setHeaderNoCache(ctx *context.Context) { func setHeaderCacheForever(ctx *context.Context) { now := time.Now().Unix() - expires := now + 31536000 + expires := now + 365*86400 // 365 days ctx.Resp.Header().Set("Date", strconv.FormatInt(now, 10)) ctx.Resp.Header().Set("Expires", strconv.FormatInt(expires, 10)) ctx.Resp.Header().Set("Cache-Control", "public, max-age=31536000") @@ -367,7 +370,7 @@ func isSlashRune(r rune) bool { return r == '/' || r == '\\' } func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string) { if containsParentDirectorySeparator(file) { - log.Error("request file path contains invalid path: %v", file) + log.Debug("request file path contains invalid path: %v", file) ctx.Resp.WriteHeader(http.StatusBadRequest) return } @@ -380,38 +383,45 @@ func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string // one or more key=value pairs separated by colons var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`) -func prepareGitCmdWithAllowedService(service string) (*gitcmd.Command, error) { - if service == ServiceTypeReceivePack { - return gitcmd.NewCommand(ServiceTypeReceivePack), nil +func prepareGitCmdWithAllowedService(service string, allowedServices []string) *gitcmd.Command { + if !slices.Contains(allowedServices, service) { + return nil } - if service == ServiceTypeUploadPack { - return gitcmd.NewCommand(ServiceTypeUploadPack), nil + switch service { + case ServiceTypeReceivePack: + return gitcmd.NewCommand(ServiceTypeReceivePack) + case ServiceTypeUploadPack: + return gitcmd.NewCommand(ServiceTypeUploadPack) + case ServiceTypeUploadArchive: + return gitcmd.NewCommand(ServiceTypeUploadArchive) + default: + return nil } - return nil, fmt.Errorf("service %q is not allowed", service) } -func serviceRPC(ctx *context.Context, h *serviceHandler, service string) { - defer func() { - if err := ctx.Req.Body.Close(); err != nil { - log.Error("serviceRPC: Close: %v", err) - } - }() +func serviceRPC(ctx *context.Context, service string) { + defer ctx.Req.Body.Close() + h := httpBase(ctx, "git-"+service) + if h == nil { + return + } expectedContentType := fmt.Sprintf("application/x-git-%s-request", service) if ctx.Req.Header.Get("Content-Type") != expectedContentType { - log.Error("Content-Type (%q) doesn't match expected: %q", ctx.Req.Header.Get("Content-Type"), expectedContentType) - // FIXME: why it's 401 if the content type is unexpected? - ctx.Resp.WriteHeader(http.StatusUnauthorized) + log.Debug("Content-Type (%q) doesn't match expected: %q", ctx.Req.Header.Get("Content-Type"), expectedContentType) + ctx.Resp.WriteHeader(http.StatusBadRequest) return } - cmd, err := prepareGitCmdWithAllowedService(service) - if err != nil { - log.Error("Failed to prepareGitCmdWithService: %v", err) - // FIXME: why it's 401 if the service type doesn't supported? - ctx.Resp.WriteHeader(http.StatusUnauthorized) + cmd := prepareGitCmdWithAllowedService(service, []string{ServiceTypeUploadPack, ServiceTypeReceivePack, ServiceTypeUploadArchive}) + if cmd == nil { + ctx.Resp.WriteHeader(http.StatusBadRequest) return } + // git upload-archive does not have a "--stateless-rpc" option + if service == ServiceTypeUploadPack || service == ServiceTypeReceivePack { + cmd.AddArguments("--stateless-rpc") + } ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service)) @@ -419,10 +429,10 @@ func serviceRPC(ctx *context.Context, h *serviceHandler, service string) { // Handle GZIP. if ctx.Req.Header.Get("Content-Encoding") == "gzip" { + var err error reqBody, err = gzip.NewReader(reqBody) if err != nil { - log.Error("Fail to create gzip reader: %v", err) - ctx.Resp.WriteHeader(http.StatusInternalServerError) + ctx.Resp.WriteHeader(http.StatusBadRequest) return } } @@ -434,49 +444,35 @@ func serviceRPC(ctx *context.Context, h *serviceHandler, service string) { h.environ = append(h.environ, "GIT_PROTOCOL="+protocol) } - var stderr bytes.Buffer - if err := gitrepo.RunCmd(ctx, h.getStorageRepo(), cmd.AddArguments("--stateless-rpc", "."). + if err := gitrepo.RunCmdWithStderr(ctx, h.getStorageRepo(), cmd.AddArguments("."). WithEnv(append(os.Environ(), h.environ...)). - WithStderr(&stderr). - WithStdin(reqBody). - WithStdout(ctx.Resp). - WithUseContextTimeout(true)); err != nil { - if !git.IsErrCanceledOrKilled(err) { - log.Error("Fail to serve RPC(%s) in %s: %v - %s", service, h.getStorageRepo().RelativePath(), err, stderr.String()) + WithStdinCopy(reqBody). + WithStdoutCopy(ctx.Resp), + ); err != nil { + if !gitcmd.IsErrorCanceledOrKilled(err) { + log.Error("Fail to serve RPC(%s) in %s: %v", service, h.getStorageRepo().RelativePath(), err) } - return } } const ( - ServiceTypeUploadPack = "upload-pack" - ServiceTypeReceivePack = "receive-pack" + ServiceTypeUploadPack = "upload-pack" + ServiceTypeReceivePack = "receive-pack" + ServiceTypeUploadArchive = "upload-archive" ) // ServiceUploadPack implements Git Smart HTTP protocol func ServiceUploadPack(ctx *context.Context) { - h := httpBase(ctx) - if h != nil { - serviceRPC(ctx, h, ServiceTypeUploadPack) - } + serviceRPC(ctx, ServiceTypeUploadPack) } // ServiceReceivePack implements Git Smart HTTP protocol func ServiceReceivePack(ctx *context.Context) { - h := httpBase(ctx) - if h != nil { - serviceRPC(ctx, h, ServiceTypeReceivePack) - } + serviceRPC(ctx, ServiceTypeReceivePack) } -func getServiceType(ctx *context.Context) string { - switch ctx.Req.FormValue("service") { - case "git-" + ServiceTypeUploadPack: - return ServiceTypeUploadPack - case "git-" + ServiceTypeReceivePack: - return ServiceTypeReceivePack - } - return "" +func ServiceUploadArchive(ctx *context.Context) { + serviceRPC(ctx, ServiceTypeUploadArchive) } func packetWrite(str string) []byte { @@ -489,36 +485,45 @@ func packetWrite(str string) []byte { // GetInfoRefs implements Git dumb HTTP func GetInfoRefs(ctx *context.Context) { - h := httpBase(ctx) + h := httpBase(ctx, ctx.FormString("service")) // git http protocol: "?service=git-" if h == nil { return } setHeaderNoCache(ctx) - service := getServiceType(ctx) - cmd, err := prepareGitCmdWithAllowedService(service) - if err == nil { - if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) { - h.environ = append(h.environ, "GIT_PROTOCOL="+protocol) - } - h.environ = append(os.Environ(), h.environ...) - - refs, _, err := gitrepo.RunCmdBytes(ctx, h.getStorageRepo(), cmd.AddArguments("--stateless-rpc", "--advertise-refs", "."). - WithEnv(h.environ)) - if err != nil { - log.Error(fmt.Sprintf("%v - %s", err, string(refs))) - } - - ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", service)) - ctx.Resp.WriteHeader(http.StatusOK) - _, _ = ctx.Resp.Write(packetWrite("# service=git-" + service + "\n")) - _, _ = ctx.Resp.Write([]byte("0000")) - _, _ = ctx.Resp.Write(refs) - } else { + if h.serviceType == "" { + // it's said that some legacy git clients will send requests to "/info/refs" without "service" parameter, + // although there should be no such case client in the modern days. TODO: not quite sure why we need this UpdateServerInfo logic if err := gitrepo.UpdateServerInfo(ctx, h.getStorageRepo()); err != nil { - log.Error("Failed to update server info: %v", err) + ctx.ServerError("UpdateServerInfo", err) + return } h.sendFile(ctx, "text/plain; charset=utf-8", "info/refs") + return } + + cmd := prepareGitCmdWithAllowedService(h.serviceType, []string{ServiceTypeUploadPack, ServiceTypeReceivePack}) + if cmd == nil { + ctx.Resp.WriteHeader(http.StatusBadRequest) + return + } + + if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) { + h.environ = append(h.environ, "GIT_PROTOCOL="+protocol) + } + h.environ = append(os.Environ(), h.environ...) + + cmd = cmd.AddArguments("--stateless-rpc", "--advertise-refs", ".").WithEnv(h.environ) + refs, _, err := gitrepo.RunCmdBytes(ctx, h.getStorageRepo(), cmd) + if err != nil { + ctx.ServerError("RunGitServiceAdvertiseRefs", err) + return + } + + ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", h.serviceType)) + ctx.Resp.WriteHeader(http.StatusOK) + _, _ = ctx.Resp.Write(packetWrite("# service=git-" + h.serviceType + "\n")) + _, _ = ctx.Resp.Write([]byte("0000")) + _, _ = ctx.Resp.Write(refs) } // GetTextFile implements Git dumb HTTP diff --git a/routers/web/repo/issue_view.go b/routers/web/repo/issue_view.go index 803afbffe4..7670660e31 100644 --- a/routers/web/repo/issue_view.go +++ b/routers/web/repo/issue_view.go @@ -7,7 +7,6 @@ import ( "fmt" "math/big" "net/http" - "net/url" "sort" "strconv" @@ -33,6 +32,7 @@ import ( "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/templates/vars" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/web/middleware" asymkey_service "code.gitea.io/gitea/services/asymkey" "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/context/upload" @@ -408,7 +408,7 @@ func ViewIssue(ctx *context.Context) { } ctx.Data["Reference"] = issue.Ref - ctx.Data["SignInLink"] = setting.AppSubURL + "/user/login?redirect_to=" + url.QueryEscape(ctx.Data["Link"].(string)) + ctx.Data["SignInLink"] = middleware.RedirectLinkUserLogin(ctx.Req) ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID) ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) ctx.Data["HasProjectsWritePermission"] = ctx.Repo.CanWrite(unit.TypeProjects) @@ -495,7 +495,7 @@ func preparePullViewSigning(ctx *context.Context, issue *issues_model.Issue) { pull := issue.PullRequest ctx.Data["WillSign"] = false if ctx.Doer != nil { - sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.Doer, ctx.Repo.GitRepo, pull.BaseBranch, pull.GetGitHeadRefName()) + sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.Doer, ctx.Repo.GitRepo) ctx.Data["WillSign"] = sign ctx.Data["SigningKeyMergeDisplay"] = asymkey_model.GetDisplaySigningKey(key) if err != nil { diff --git a/routers/web/repo/middlewares.go b/routers/web/repo/middlewares.go index 7518e6feae..c7c9da498b 100644 --- a/routers/web/repo/middlewares.go +++ b/routers/web/repo/middlewares.go @@ -7,8 +7,11 @@ import ( "strconv" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" + "code.gitea.io/gitea/services/gitdiff" user_service "code.gitea.io/gitea/services/user" ) @@ -28,36 +31,24 @@ func SetEditorconfigIfExists(ctx *context.Context) { ctx.Data["Editorconfig"] = ec } +func GetDiffViewStyle(ctx *context.Context) string { + return util.Iif(ctx.Data["IsSplitStyle"] == true, gitdiff.DiffStyleSplit, gitdiff.DiffStyleUnified) +} + // SetDiffViewStyle set diff style as render variable func SetDiffViewStyle(ctx *context.Context) { - queryStyle := ctx.FormString("style") - - if !ctx.IsSigned { - ctx.Data["IsSplitStyle"] = queryStyle == "split" - return + style := ctx.FormString("style") + if ctx.IsSigned { + style = util.IfZero(style, ctx.Doer.DiffViewStyle) + style = util.Iif(style == gitdiff.DiffStyleSplit, gitdiff.DiffStyleSplit, gitdiff.DiffStyleUnified) + if style != ctx.Doer.DiffViewStyle { + err := user_service.UpdateUser(ctx, ctx.Doer, &user_service.UpdateOptions{DiffViewStyle: optional.Some(style)}) + if err != nil { + log.Error("UpdateUser DiffViewStyle: %v", err) + } + } } - - var ( - userStyle = ctx.Doer.DiffViewStyle - style string - ) - - if queryStyle == "unified" || queryStyle == "split" { - style = queryStyle - } else if userStyle == "unified" || userStyle == "split" { - style = userStyle - } else { - style = "unified" - } - ctx.Data["IsSplitStyle"] = style == "split" - - opts := &user_service.UpdateOptions{ - DiffViewStyle: optional.Some(style), - } - if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil { - ctx.ServerError("UpdateUser", err) - } } // SetWhitespaceBehavior set whitespace behavior as render variable diff --git a/routers/web/repo/middlewares_test.go b/routers/web/repo/middlewares_test.go new file mode 100644 index 0000000000..c6dc2e4615 --- /dev/null +++ b/routers/web/repo/middlewares_test.go @@ -0,0 +1,59 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repo + +import ( + "testing" + + "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/services/contexttest" + "code.gitea.io/gitea/services/gitdiff" + + "github.com/stretchr/testify/assert" +) + +func TestDiffViewStyle(t *testing.T) { + unittest.PrepareTestEnv(t) + + t.Run("AnonymousUser", func(t *testing.T) { + ctx, _ := contexttest.MockContext(t, "/any") + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx)) + + ctx, _ = contexttest.MockContext(t, "/any?style=split") + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleSplit, GetDiffViewStyle(ctx)) + + ctx, _ = contexttest.MockContext(t, "/any") + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx)) // at the moment, anonymous users don't have a saved preference + }) + + t.Run("SignedInUser", func(t *testing.T) { + ctx, _ := contexttest.MockContext(t, "/any") + contexttest.LoadUser(t, ctx, 2) + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx)) + + ctx, _ = contexttest.MockContext(t, "/any?style=split") + contexttest.LoadUser(t, ctx, 2) + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleSplit, GetDiffViewStyle(ctx)) + + ctx, _ = contexttest.MockContext(t, "/any") + contexttest.LoadUser(t, ctx, 2) + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleSplit, GetDiffViewStyle(ctx)) + + ctx, _ = contexttest.MockContext(t, "/any?style=unified") + contexttest.LoadUser(t, ctx, 2) + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx)) + + ctx, _ = contexttest.MockContext(t, "/any") + contexttest.LoadUser(t, ctx, 2) + SetDiffViewStyle(ctx) + assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx)) + }) +} diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go index 57f96e81b6..cff501ad71 100644 --- a/routers/web/repo/pull.go +++ b/routers/web/repo/pull.go @@ -23,6 +23,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/commitstatus" "code.gitea.io/gitea/modules/emoji" "code.gitea.io/gitea/modules/fileicon" "code.gitea.io/gitea/modules/git" @@ -32,8 +33,10 @@ import ( "code.gitea.io/gitea/modules/graceful" issue_template "code.gitea.io/gitea/modules/issue/template" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/utils" @@ -236,7 +239,7 @@ func GetMergedBaseCommitID(ctx *context.Context, issue *issues_model.Issue) stri } if commitSHA != "" { // Get immediate parent of the first commit in the patch, grab history back - parentCommit, err = gitrepo.RunCmdString(ctx, ctx.Repo.Repository, + parentCommit, _, err = gitrepo.RunCmdString(ctx, ctx.Repo.Repository, gitcmd.NewCommand("rev-list", "-1", "--skip=1").AddDynamicArguments(commitSHA)) if err == nil { parentCommit = strings.TrimSpace(parentCommit) @@ -319,6 +322,26 @@ type pullCommitStatusCheckData struct { RequireApprovalRunCount int // number of workflow runs that require approval CanApprove bool // whether the user can approve workflow runs ApproveLink string // link to approve all checks + RequiredChecksState commitstatus.CommitStatusState + LatestCommitStatus *git_model.CommitStatus +} + +func (d *pullCommitStatusCheckData) CommitStatusCheckPrompt(locale translation.Locale) string { + if d.RequiredChecksState.IsPending() || len(d.MissingRequiredChecks) > 0 { + return locale.TrString("repo.pulls.status_checking") + } else if d.RequiredChecksState.IsSuccess() { + if d.LatestCommitStatus != nil && d.LatestCommitStatus.State.IsFailure() { + return locale.TrString("repo.pulls.status_checks_failure_optional") + } + return locale.TrString("repo.pulls.status_checks_success") + } else if d.RequiredChecksState.IsWarning() { + return locale.TrString("repo.pulls.status_checks_warning") + } else if d.RequiredChecksState.IsFailure() { + return locale.TrString("repo.pulls.status_checks_failure_required") + } else if d.RequiredChecksState.IsError() { + return locale.TrString("repo.pulls.status_checks_error") + } + return locale.TrString("repo.pulls.status_checking") } // prepareViewPullInfo show meta information for a pull request preview page @@ -359,6 +382,8 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_s defer baseGitRepo.Close() } + statusCheckData := &pullCommitStatusCheckData{} + if exist, _ := git_model.IsBranchExist(ctx, pull.BaseRepo.ID, pull.BaseBranch); !exist { ctx.Data["BaseBranchNotExist"] = true ctx.Data["IsPullRequestBroken"] = true @@ -379,9 +404,10 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_s git_model.CommitStatusesHideActionsURL(ctx, commitStatuses) } + statusCheckData.LatestCommitStatus = git_model.CalcCommitStatus(commitStatuses) if len(commitStatuses) > 0 { ctx.Data["LatestCommitStatuses"] = commitStatuses - ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses) + ctx.Data["LatestCommitStatus"] = statusCheckData.LatestCommitStatus } compareInfo, err := git_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo, @@ -466,10 +492,8 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_s return nil } - statusCheckData := &pullCommitStatusCheckData{ - ApproveLink: fmt.Sprintf("%s/actions/approve-all-checks?commit_id=%s", repo.Link(), sha), - } ctx.Data["StatusCheckData"] = statusCheckData + statusCheckData.ApproveLink = fmt.Sprintf("%s/actions/approve-all-checks?commit_id=%s", repo.Link(), sha) commitStatuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll) if err != nil { @@ -494,9 +518,10 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_s statusCheckData.CanApprove = ctx.Repo.CanWrite(unit.TypeActions) } + statusCheckData.LatestCommitStatus = git_model.CalcCommitStatus(commitStatuses) if len(commitStatuses) > 0 { ctx.Data["LatestCommitStatuses"] = commitStatuses - ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses) + ctx.Data["LatestCommitStatus"] = statusCheckData.LatestCommitStatus } if pb != nil && pb.EnableStatusCheck { @@ -533,7 +558,7 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_s } return false } - ctx.Data["RequiredStatusCheckState"] = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts) + statusCheckData.RequiredChecksState = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts) } ctx.Data["HeadBranchMovedOn"] = headBranchSha != sha @@ -864,7 +889,7 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) { ctx.Data["DiffBlobExcerptData"] = &gitdiff.DiffBlobExcerptData{ BaseLink: ctx.Repo.RepoLink + "/blob_excerpt", PullIssueIndex: pull.Index, - DiffStyle: ctx.FormString("style"), + DiffStyle: GetDiffViewStyle(ctx), AfterCommitID: afterCommitID, } ctx.Data["DiffNotAvailable"] = diffShortStat.NumFiles == 0 @@ -1136,11 +1161,9 @@ func MergePullRequest(ctx *context.Context) { message += "\n\n" + form.MergeMessageField } - deleteBranchAfterMerge, err := pull_service.ShouldDeleteBranchAfterMerge(ctx, form.DeleteBranchAfterMerge, ctx.Repo.Repository, pr) - if err != nil { - ctx.ServerError("ShouldDeleteBranchAfterMerge", err) - return - } + // There is always a checkbox on the UI (the DeleteBranchAfterMerge is nil if the checkbox is not checked), + // just use the user's choice, don't use pull_service.ShouldDeleteBranchAfterMerge to decide + deleteBranchAfterMerge := optional.FromPtr(form.DeleteBranchAfterMerge).Value() if form.MergeWhenChecksSucceed { // delete all scheduled auto merges @@ -1266,6 +1289,28 @@ func CancelAutoMergePullRequest(ctx *context.Context) { return } + exist, autoMerge, err := pull_model.GetScheduledMergeByPullID(ctx, issue.PullRequest.ID) + if err != nil { + ctx.ServerError("GetScheduledMergeByPullID", err) + return + } + if !exist { + ctx.NotFound(nil) + return + } + + if ctx.Doer.ID != autoMerge.DoerID { + allowed, err := pull_service.IsUserAllowedToMerge(ctx, issue.PullRequest, ctx.Repo.Permission, ctx.Doer) + if err != nil { + ctx.ServerError("IsUserAllowedToMerge", err) + return + } + if !allowed { + ctx.HTTPError(http.StatusForbidden, "user has no permission to cancel the scheduled auto merge") + return + } + } + if err := automerge.RemoveScheduledAutoMerge(ctx, ctx.Doer, issue.PullRequest); err != nil { if db.IsErrNotExist(err) { ctx.Flash.Error(ctx.Tr("repo.pulls.auto_merge_not_scheduled")) @@ -1391,6 +1436,7 @@ func CompareAndPullRequestPost(ctx *context.Context) { AssigneeIDs: assigneeIDs, Reviewers: validateRet.Reviewers, TeamReviewers: validateRet.TeamReviewers, + ProjectID: projectID, } if err := pull_service.NewPullRequest(ctx, prOpts); err != nil { switch { @@ -1442,15 +1488,6 @@ func CompareAndPullRequestPost(ctx *context.Context) { return } - if projectID > 0 && ctx.Repo.CanWrite(unit.TypeProjects) { - if err := issues_model.IssueAssignOrRemoveProject(ctx, pullIssue, ctx.Doer, projectID, 0); err != nil { - if !errors.Is(err, util.ErrPermissionDenied) { - ctx.ServerError("IssueAssignOrRemoveProject", err) - return - } - } - } - log.Trace("Pull request created: %d/%d", repo.ID, pullIssue.ID) ctx.JSONRedirect(pullIssue.Link()) } diff --git a/routers/web/repo/pull_review_test.go b/routers/web/repo/pull_review_test.go index 42223c1d9c..1b28cad5b6 100644 --- a/routers/web/repo/pull_review_test.go +++ b/routers/web/repo/pull_review_test.go @@ -30,7 +30,7 @@ func TestRenderConversation(t *testing.T) { run := func(name string, cb func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder)) { t.Run(name, func(t *testing.T) { - ctx, resp := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, resp := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) contexttest.LoadUser(t, ctx, pr.Issue.PosterID) contexttest.LoadRepo(t, ctx, pr.BaseRepoID) contexttest.LoadGitRepo(t, ctx) diff --git a/routers/web/repo/render.go b/routers/web/repo/render.go index d6447795bf..b1299c7047 100644 --- a/routers/web/repo/render.go +++ b/routers/web/repo/render.go @@ -32,24 +32,18 @@ func RenderFile(ctx *context.Context) { return } - dataRc, err := blob.DataAsync() + blobReader, err := blob.DataAsync() if err != nil { ctx.ServerError("DataAsync", err) return } - defer dataRc.Close() - - if markupType := markup.DetectMarkupTypeByFileName(blob.Name()); markupType == "" { - http.Error(ctx.Resp, "Unsupported file type render", http.StatusBadRequest) - return - } + defer blobReader.Close() rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), CurrentTreePath: path.Dir(ctx.Repo.TreePath), }).WithRelativePath(ctx.Repo.TreePath).WithInStandalonePage(true) - - renderer, err := markup.FindRendererByContext(rctx) + renderer, rendererInput, err := rctx.DetectMarkupRendererByReader(blobReader) if err != nil { http.Error(ctx.Resp, "Unable to find renderer", http.StatusBadRequest) return @@ -71,7 +65,7 @@ func RenderFile(ctx *context.Context) { ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'") } - err = markup.RenderWithRenderer(rctx, renderer, dataRc, ctx.Resp) + err = markup.RenderWithRenderer(rctx, renderer, rendererInput, ctx.Resp) if err != nil { log.Error("Failed to render file %q: %v", ctx.Repo.TreePath, err) http.Error(ctx.Resp, "Failed to render file", http.StatusInternalServerError) diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go index 3a0976ffa0..bc2b0264c0 100644 --- a/routers/web/repo/repo.go +++ b/routers/web/repo/repo.go @@ -364,31 +364,39 @@ func RedirectDownload(ctx *context.Context) { // Download an archive of a repository func Download(ctx *context.Context) { - aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*")) + aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*"), ctx.FormStrings("path")) if err != nil { - if errors.Is(err, archiver_service.ErrUnknownArchiveFormat{}) { + if errors.Is(err, util.ErrInvalidArgument) { ctx.HTTPError(http.StatusBadRequest, err.Error()) - } else if errors.Is(err, archiver_service.RepoRefNotFoundError{}) { + } else if errors.Is(err, util.ErrNotExist) { ctx.HTTPError(http.StatusNotFound, err.Error()) } else { ctx.ServerError("archiver_service.NewRequest", err) } return } - archiver_service.ServeRepoArchive(ctx.Base, aReq) + err = archiver_service.ServeRepoArchive(ctx.Base, aReq) + if err != nil { + if errors.Is(err, util.ErrInvalidArgument) { + ctx.HTTPError(http.StatusBadRequest, err.Error()) + } else { + ctx.ServerError("archiver_service.ServeRepoArchive", err) + } + } } // InitiateDownload will enqueue an archival request, as needed. It may submit // a request that's already in-progress, but the archiver service will just // kind of drop it on the floor if this is the case. func InitiateDownload(ctx *context.Context) { - if setting.Repository.StreamArchives { + paths := ctx.FormStrings("path") + if setting.Repository.StreamArchives || len(paths) > 0 { ctx.JSON(http.StatusOK, map[string]any{ "complete": true, }) return } - aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*")) + aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*"), paths) if err != nil { ctx.HTTPError(http.StatusBadRequest, "invalid archive request") return diff --git a/routers/web/repo/setting/lfs.go b/routers/web/repo/setting/lfs.go index c7a19062d2..8a8015035f 100644 --- a/routers/web/repo/setting/lfs.go +++ b/routers/web/repo/setting/lfs.go @@ -407,7 +407,9 @@ func LFSPointerFiles(ctx *context.Context) { err = func() error { pointerChan := make(chan lfs.PointerBlob) errChan := make(chan error, 1) - go lfs.SearchPointerBlobs(ctx, ctx.Repo.GitRepo, pointerChan, errChan) + go func() { + errChan <- lfs.SearchPointerBlobs(ctx, ctx.Repo.GitRepo, pointerChan) + }() numPointers := 0 var numAssociated, numNoExist, numAssociatable int @@ -483,11 +485,6 @@ func LFSPointerFiles(ctx *context.Context) { results = append(results, result) } - err, has := <-errChan - if has { - return err - } - ctx.Data["Pointers"] = results ctx.Data["NumPointers"] = numPointers ctx.Data["NumAssociated"] = numAssociated @@ -495,7 +492,8 @@ func LFSPointerFiles(ctx *context.Context) { ctx.Data["NumNoExist"] = numNoExist ctx.Data["NumNotAssociated"] = numPointers - numAssociated - return nil + err := <-errChan + return err }() if err != nil { ctx.ServerError("LFSPointerFiles", err) diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go index 8e85cc3278..8aeb1a0af8 100644 --- a/routers/web/repo/view.go +++ b/routers/web/repo/view.go @@ -151,12 +151,7 @@ func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool { return true } -func markupRender(ctx *context.Context, renderCtx *markup.RenderContext, input io.Reader) (escaped *charset.EscapeStatus, output template.HTML, err error) { - renderer, err := markup.FindRendererByContext(renderCtx) - if err != nil { - return nil, "", err - } - +func markupRenderToHTML(ctx *context.Context, renderCtx *markup.RenderContext, renderer markup.Renderer, input io.Reader) (escaped *charset.EscapeStatus, output template.HTML, err error) { markupRd, markupWr := io.Pipe() defer markupWr.Close() diff --git a/routers/web/repo/view_file.go b/routers/web/repo/view_file.go index 167cd5f927..44bc8543b0 100644 --- a/routers/web/repo/view_file.go +++ b/routers/web/repo/view_file.go @@ -21,9 +21,7 @@ import ( "code.gitea.io/gitea/modules/git/attribute" "code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" issue_service "code.gitea.io/gitea/services/issue" @@ -60,14 +58,19 @@ func prepareFileViewLfsAttrs(ctx *context.Context) (*attribute.Attributes, bool) return attrs, true } -func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte, utf8Reader io.Reader) bool { - markupType := markup.DetectMarkupTypeByFileName(filename) - if markupType == "" { - markupType = markup.DetectRendererType(filename, sniffedType, prefetchBuf) - } - if markupType == "" { - return false +func handleFileViewRenderMarkup(ctx *context.Context, prefetchBuf []byte, utf8Reader io.Reader) bool { + rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ + CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), + CurrentTreePath: path.Dir(ctx.Repo.TreePath), + }).WithRelativePath(ctx.Repo.TreePath) + + renderer := rctx.DetectMarkupRenderer(prefetchBuf) + if renderer == nil { + return false // not supported markup } + metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx) + metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL() + rctx.WithMetas(metas) ctx.Data["HasSourceRenderedToggle"] = true @@ -75,19 +78,10 @@ func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedTy return false } - ctx.Data["MarkupType"] = markupType - metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx) - metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL() - rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ - CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), - CurrentTreePath: path.Dir(ctx.Repo.TreePath), - }). - WithMarkupType(markupType). - WithRelativePath(ctx.Repo.TreePath). - WithMetas(metas) + ctx.Data["MarkupType"] = rctx.RenderOptions.MarkupType var err error - ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, utf8Reader) + ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRenderToHTML(ctx, rctx, renderer, utf8Reader) if err != nil { ctx.ServerError("Render", err) return true @@ -95,7 +89,8 @@ func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedTy return true } -func handleFileViewRenderSource(ctx *context.Context, filename string, attrs *attribute.Attributes, fInfo *fileInfo, utf8Reader io.Reader) bool { +func handleFileViewRenderSource(ctx *context.Context, attrs *attribute.Attributes, fInfo *fileInfo, utf8Reader io.Reader) bool { + filename := ctx.Repo.TreePath if ctx.FormString("display") == "rendered" || !fInfo.st.IsRepresentableAsText() { return false } @@ -124,11 +119,11 @@ func handleFileViewRenderSource(ctx *context.Context, filename string, attrs *at } language := attrs.GetLanguage().Value() - fileContent, lexerName, err := highlight.File(filename, language, buf) + fileContent, lexerName, err := highlight.RenderFullFile(filename, language, buf) ctx.Data["LexerName"] = lexerName if err != nil { - log.Error("highlight.File failed, fallback to plain text: %v", err) - fileContent = highlight.PlainText(buf) + log.Error("highlight.RenderFullFile failed, fallback to plain text: %v", err) + fileContent = highlight.RenderPlainText(buf) } status := &charset.EscapeStatus{} statuses := make([]*charset.EscapeStatus, len(fileContent)) @@ -246,10 +241,10 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) { switch { case fInfo.blobOrLfsSize >= setting.UI.MaxDisplayFileSize: ctx.Data["IsFileTooLarge"] = true - case handleFileViewRenderMarkup(ctx, entry.Name(), fInfo.st, buf, contentReader): + case handleFileViewRenderMarkup(ctx, buf, contentReader): // it also sets ctx.Data["FileContent"] and more ctx.Data["IsMarkup"] = true - case handleFileViewRenderSource(ctx, entry.Name(), attrs, fInfo, contentReader): + case handleFileViewRenderSource(ctx, attrs, fInfo, contentReader): // it also sets ctx.Data["FileContent"] and more ctx.Data["IsDisplayingSource"] = true case handleFileViewRenderImage(ctx, fInfo, buf): diff --git a/routers/web/repo/view_readme.go b/routers/web/repo/view_readme.go index f1fa5732f0..830709422e 100644 --- a/routers/web/repo/view_readme.go +++ b/routers/web/repo/view_readme.go @@ -18,7 +18,6 @@ import ( "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" @@ -190,18 +189,15 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{}) - if markupType := markup.DetectMarkupTypeByFileName(readmeFile.Name()); markupType != "" { + rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ + CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), + CurrentTreePath: path.Dir(readmeFullPath), + }).WithRelativePath(readmeFullPath) + renderer := rctx.DetectMarkupRenderer(buf) + if renderer != nil { ctx.Data["IsMarkup"] = true - ctx.Data["MarkupType"] = markupType - - rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ - CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), - CurrentTreePath: path.Dir(readmeFullPath), - }). - WithMarkupType(markupType). - WithRelativePath(readmeFullPath) - - ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd) + ctx.Data["MarkupType"] = rctx.RenderOptions.MarkupType + ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRenderToHTML(ctx, rctx, renderer, rd) if err != nil { log.Error("Render failed for %s in %-v: %v Falling back to rendering source", readmeFile.Name(), ctx.Repo.Repository, err) delete(ctx.Data, "IsMarkup") diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go index 921e17fb6a..5f775efb22 100644 --- a/routers/web/repo/wiki.go +++ b/routers/web/repo/wiki.go @@ -10,7 +10,7 @@ import ( "io" "net/http" "net/url" - "path/filepath" + "path" "strings" "code.gitea.io/gitea/models/renderhelper" @@ -277,12 +277,10 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) { return nil, nil } - if rctx.SidebarTocNode != nil { + if rctx.TocShowInSection == markup.TocShowInSidebar && len(rctx.TocHeadingItems) > 0 { sb := strings.Builder{} - if err = markdown.SpecializedMarkdown(rctx).Renderer().Render(&sb, nil, rctx.SidebarTocNode); err != nil { - log.Error("Failed to render wiki sidebar TOC: %v", err) - } - ctx.Data["WikiSidebarTocHTML"] = templates.SanitizeHTML(sb.String()) + markup.RenderTocHeadingItems(rctx, map[string]string{"open": ""}, &sb) + ctx.Data["WikiSidebarTocHTML"] = template.HTML(sb.String()) } if !isSideBar { @@ -492,9 +490,9 @@ func Wiki(ctx *context.Context) { } wikiPath := entry.Name() - if markup.DetectMarkupTypeByFileName(wikiPath) != markdown.MarkupName { - ext := strings.ToUpper(filepath.Ext(wikiPath)) - ctx.Data["FormatWarning"] = ext + " rendering is not supported at the moment. Rendered as Markdown." + detectedRender := markup.DetectRendererTypeByFilename(wikiPath) + if detectedRender == nil || detectedRender.Name() != markdown.MarkupName { + ctx.Data["FormatWarning"] = "File extension " + path.Ext(wikiPath) + " is not supported at the moment. Rendered as Markdown." } // Get last change information. lastCommit, err := wikiGitRepo.GetCommitByPath(wikiPath) diff --git a/routers/web/user/home.go b/routers/web/user/home.go index b53a3daedb..9e77c51d12 100644 --- a/routers/web/user/home.go +++ b/routers/web/user/home.go @@ -660,6 +660,8 @@ func ShowSSHKeys(ctx *context.Context) { } var buf bytes.Buffer + // "authorized_keys" file format: "#" followed by comment line per key + buf.WriteString("# Gitea isn't a key server. The keys are exported as the user uploaded and might not have been fully verified.\n") for i := range keys { buf.WriteString(keys[i].OmitEmail()) buf.WriteString("\n") @@ -695,6 +697,8 @@ func ShowGPGKeys(ctx *context.Context) { var buf bytes.Buffer headers := make(map[string]string) + // https://www.rfc-editor.org/rfc/rfc4880 + headers["Comment"] = "Gitea isn't a key server. The keys are exported as the user uploaded and might not have been fully verified." if len(failedEntitiesID) > 0 { // If some key need re-import to be exported headers["Note"] = "The keys with the following IDs couldn't be exported and need to be reuploaded " + strings.Join(failedEntitiesID, ", ") } else if len(entities) == 0 { diff --git a/routers/web/user/home_test.go b/routers/web/user/home_test.go index c5b9e16c1e..5f3646769e 100644 --- a/routers/web/user/home_test.go +++ b/routers/web/user/home_test.go @@ -116,7 +116,7 @@ func TestMilestonesForSpecificRepo(t *testing.T) { } func TestDashboardPagination(t *testing.T) { - ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) page := context.NewPagination(10, 3, 1, 3) setting.AppSubURL = "/SubPath" diff --git a/routers/web/user/notification.go b/routers/web/user/notification.go index aaf9d435c0..cf61b0a2f2 100644 --- a/routers/web/user/notification.go +++ b/routers/web/user/notification.go @@ -15,6 +15,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" @@ -128,7 +129,9 @@ func prepareUserNotificationsData(ctx *context.Context) { ctx.Data["Notifications"] = notifications ctx.Data["Link"] = setting.AppSubURL + "/notifications" ctx.Data["SequenceNumber"] = ctx.FormString("sequence-number") + pager.AddParamFromRequest(ctx.Req) + pager.RemoveParam(container.SetOf("div-only", "sequence-number")) ctx.Data["Page"] = pager } diff --git a/routers/web/user/setting/adopt.go b/routers/web/user/setting/adopt.go index 171c1933d4..abf9d8c6db 100644 --- a/routers/web/user/setting/adopt.go +++ b/routers/web/user/setting/adopt.go @@ -4,12 +4,9 @@ package setting import ( - "path/filepath" - repo_model "code.gitea.io/gitea/models/repo" - user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" repo_service "code.gitea.io/gitea/services/repository" ) @@ -27,7 +24,6 @@ func AdoptOrDeleteRepository(ctx *context.Context) { action := ctx.FormString("action") ctxUser := ctx.Doer - root := user_model.UserPath(ctxUser.LowerName) // check not a repo has, err := repo_model.IsRepositoryModelExist(ctx, ctxUser, dir) @@ -36,12 +32,12 @@ func AdoptOrDeleteRepository(ctx *context.Context) { return } - isDir, err := util.IsDir(filepath.Join(root, dir+".git")) + exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, dir))) if err != nil { ctx.ServerError("IsDir", err) return } - if has || !isDir { + if has || !exist { // Fallthrough to failure mode } else if action == "adopt" && allowAdopt { if _, err := repo_service.AdoptRepository(ctx, ctxUser, ctxUser, repo_service.CreateRepoOptions{ diff --git a/routers/web/user/setting/security/main_test.go b/routers/web/user/setting/security/main_test.go new file mode 100644 index 0000000000..2a27cd6dbf --- /dev/null +++ b/routers/web/user/setting/security/main_test.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package security + +import ( + "testing" + + "code.gitea.io/gitea/models/unittest" +) + +func TestMain(m *testing.M) { + unittest.MainTest(m) +} diff --git a/routers/web/user/setting/security/openid.go b/routers/web/user/setting/security/openid.go index a23a98dd25..78db7650fe 100644 --- a/routers/web/user/setting/security/openid.go +++ b/routers/web/user/setting/security/openid.go @@ -4,12 +4,14 @@ package security import ( + "errors" "net/http" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/auth/openid" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/forms" @@ -116,7 +118,11 @@ func DeleteOpenID(ctx *context.Context) { } if err := user_model.DeleteUserOpenID(ctx, &user_model.UserOpenID{ID: ctx.FormInt64("id"), UID: ctx.Doer.ID}); err != nil { - ctx.ServerError("DeleteUserOpenID", err) + if errors.Is(err, util.ErrNotExist) { + ctx.HTTPError(http.StatusNotFound) + } else { + ctx.ServerError("DeleteUserOpenID", err) + } return } log.Trace("OpenID address deleted: %s", ctx.Doer.Name) @@ -132,8 +138,12 @@ func ToggleOpenIDVisibility(ctx *context.Context) { return } - if err := user_model.ToggleUserOpenIDVisibility(ctx, ctx.FormInt64("id")); err != nil { - ctx.ServerError("ToggleUserOpenIDVisibility", err) + if err := user_model.ToggleUserOpenIDVisibility(ctx, ctx.FormInt64("id"), ctx.Doer); err != nil { + if errors.Is(err, util.ErrNotExist) { + ctx.HTTPError(http.StatusNotFound) + } else { + ctx.ServerError("ToggleUserOpenIDVisibility", err) + } return } diff --git a/routers/web/user/setting/security/openid_test.go b/routers/web/user/setting/security/openid_test.go new file mode 100644 index 0000000000..860639ea1c --- /dev/null +++ b/routers/web/user/setting/security/openid_test.go @@ -0,0 +1,36 @@ +// Copyright 2024 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package security + +import ( + "net/http" + "testing" + + "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/services/contexttest" + + "github.com/stretchr/testify/assert" +) + +func TestDeleteOpenIDReturnsNotFoundForOtherUsersAddress(t *testing.T) { + unittest.PrepareTestEnv(t) + ctx, _ := contexttest.MockContext(t, "POST /user/settings/security") + contexttest.LoadUser(t, ctx, 2) + ctx.SetFormString("id", "1") + + DeleteOpenID(ctx) + + assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus()) +} + +func TestToggleOpenIDVisibilityReturnsNotFoundForOtherUsersAddress(t *testing.T) { + unittest.PrepareTestEnv(t) + ctx, _ := contexttest.MockContext(t, "POST /user/settings/security") + contexttest.LoadUser(t, ctx, 2) + ctx.SetFormString("id", "1") + + ToggleOpenIDVisibility(ctx) + + assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus()) +} diff --git a/routers/web/user/stop_watch.go b/routers/web/user/stop_watch.go index 1d1cc61cc9..4bd8841573 100644 --- a/routers/web/user/stop_watch.go +++ b/routers/web/user/stop_watch.go @@ -29,7 +29,7 @@ func GetStopwatches(ctx *context.Context) { return } - apiSWs, err := convert.ToStopWatches(ctx, sws) + apiSWs, err := convert.ToStopWatches(ctx, ctx.Doer, sws) if err != nil { ctx.HTTPError(http.StatusInternalServerError, err.Error()) return diff --git a/routers/web/web.go b/routers/web/web.go index 4da8cdb581..c37add30d5 100644 --- a/routers/web/web.go +++ b/routers/web/web.go @@ -18,7 +18,6 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/validation" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" @@ -159,9 +158,7 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont } ctx.Data["Title"] = ctx.Tr("auth.must_change_password") ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password" - if ctx.Req.URL.Path != "/user/events" { - middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI()) - } + middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI()) ctx.Redirect(setting.AppSubURL + "/user/settings/change_password") return } @@ -172,7 +169,7 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont } } - // Redirect to dashboard (or alternate location) if user tries to visit any non-login page. + // When a signed-in user visits a page that requires sign-out (e.g.: "/user/login"), redirect to home (or alternate location) if options.SignOutRequired && ctx.IsSigned && ctx.Req.URL.RequestURI() != "/" { ctx.RedirectToCurrentSite(ctx.FormString("redirect_to")) return @@ -187,10 +184,7 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont if options.SignInRequired { if !ctx.IsSigned { - if ctx.Req.URL.Path != "/user/events" { - middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI()) - } - ctx.Redirect(setting.AppSubURL + "/user/login") + ctx.Redirect(middleware.RedirectLinkUserLogin(ctx.Req)) return } else if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm { ctx.Data["Title"] = ctx.Tr("auth.active_your_account") @@ -200,12 +194,8 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont } // Redirect to log in page if auto-signin info is provided and has not signed in. - if !options.SignOutRequired && !ctx.IsSigned && - ctx.GetSiteCookie(setting.CookieRememberName) != "" { - if ctx.Req.URL.Path != "/user/events" { - middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI()) - } - ctx.Redirect(setting.AppSubURL + "/user/login") + if !options.SignOutRequired && !ctx.IsSigned && ctx.GetSiteCookie(setting.CookieRememberName) != "" { + ctx.Redirect(middleware.RedirectLinkUserLogin(ctx.Req)) return } @@ -241,8 +231,6 @@ func Routes() *web.Router { routes.Methods("GET, HEAD", "/apple-touch-icon-precomposed.png", misc.StaticRedirect("/assets/img/apple-touch-icon.png")) routes.Methods("GET, HEAD", "/favicon.ico", misc.StaticRedirect("/assets/img/favicon.png")) - _ = templates.HTMLRenderer() - var mid []any if setting.EnableGzip { diff --git a/services/actions/commit_status.go b/services/actions/commit_status.go index 089dfeb634..7271f58091 100644 --- a/services/actions/commit_status.go +++ b/services/actions/commit_status.go @@ -9,6 +9,7 @@ import ( "fmt" "path" "strconv" + "strings" actions_model "code.gitea.io/gitea/models/actions" "code.gitea.io/gitea/models/db" @@ -129,6 +130,7 @@ func createCommitStatus(ctx context.Context, repo *repo_model.Repository, event, runName = wfs[0].Name } ctxName := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event) + ctxName = strings.TrimSpace(ctxName) // git_model.NewCommitStatus also trims spaces state := toCommitStatus(job.Status) if statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commitID, db.ListOptionsAll); err == nil { for _, v := range statuses { diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go index 74a8a127ef..27e540f5cc 100644 --- a/services/actions/job_emitter.go +++ b/services/actions/job_emitter.go @@ -114,7 +114,7 @@ func checkJobsByRunID(ctx context.Context, runID int64) error { } } if runUpdated { - NotifyWorkflowRunStatusUpdateWithReload(ctx, jobs[0]) + NotifyWorkflowRunStatusUpdateWithReload(ctx, js[0]) } } return nil diff --git a/services/agit/agit.go b/services/agit/agit.go index 15fc2e8fb5..fa2ddd9baf 100644 --- a/services/agit/agit.go +++ b/services/agit/agit.go @@ -229,7 +229,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git. } if !forcePush.Value() { - output, err := gitrepo.RunCmdString(ctx, repo, + output, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-list", "--max-count=1"). AddDynamicArguments(oldCommitID, "^"+opts.NewCommitIDs[i]), ) diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go index d778ff8918..cffefe08ae 100644 --- a/services/asymkey/sign.go +++ b/services/asymkey/sign.go @@ -271,13 +271,22 @@ Loop: } // SignMerge determines if we should sign a PR merge commit to the base repository -func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, gitRepo *git.Repository, baseCommit, headCommit string) (bool, *git.SigningKey, *git.Signature, error) { +func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, gitRepo *git.Repository) (bool, *git.SigningKey, *git.Signature, error) { if err := pr.LoadBaseRepo(ctx); err != nil { log.Error("Unable to get Base Repo for pull request") return false, nil, nil, err } repo := pr.BaseRepo + baseCommit, err := gitRepo.GetCommit(pr.BaseBranch) + if err != nil { + return false, nil, nil, err + } + headCommit, err := gitRepo.GetCommit(pr.GetGitHeadRefName()) + if err != nil { + return false, nil, nil, err + } + signingKey, signer := gitrepo.GetSigningKey(ctx) if signingKey == nil { return false, nil, nil, &ErrWontSign{noKey} @@ -319,38 +328,26 @@ Loop: return false, nil, nil, &ErrWontSign{approved} } case baseSigned: - commit, err := gitRepo.GetCommit(baseCommit) - if err != nil { - return false, nil, nil, err - } - verification := ParseCommitWithSignature(ctx, commit) + verification := ParseCommitWithSignature(ctx, baseCommit) if !verification.Verified { return false, nil, nil, &ErrWontSign{baseSigned} } case headSigned: - commit, err := gitRepo.GetCommit(headCommit) - if err != nil { - return false, nil, nil, err - } - verification := ParseCommitWithSignature(ctx, commit) + verification := ParseCommitWithSignature(ctx, headCommit) if !verification.Verified { return false, nil, nil, &ErrWontSign{headSigned} } case commitsSigned: - commit, err := gitRepo.GetCommit(headCommit) - if err != nil { - return false, nil, nil, err - } - verification := ParseCommitWithSignature(ctx, commit) + verification := ParseCommitWithSignature(ctx, headCommit) if !verification.Verified { return false, nil, nil, &ErrWontSign{commitsSigned} } // need to work out merge-base - mergeBaseCommit, _, err := gitRepo.GetMergeBase("", baseCommit, headCommit) + mergeBaseCommit, err := gitrepo.MergeBase(ctx, pr.BaseRepo, baseCommit.ID.String(), headCommit.ID.String()) if err != nil { return false, nil, nil, err } - commitList, err := commit.CommitsBeforeUntil(mergeBaseCommit) + commitList, err := headCommit.CommitsBeforeUntil(mergeBaseCommit) if err != nil { return false, nil, nil, err } diff --git a/services/context/base.go b/services/context/base.go index 8bd66bed09..4baea95ccf 100644 --- a/services/context/base.go +++ b/services/context/base.go @@ -18,6 +18,7 @@ import ( "code.gitea.io/gitea/modules/reqctx" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" ) @@ -147,10 +148,7 @@ func (b *Base) PlainText(status int, text string) { // Redirect redirects the request func (b *Base) Redirect(location string, status ...int) { - code := http.StatusSeeOther - if len(status) == 1 { - code = status[0] - } + code := util.OptionalArg(status, http.StatusSeeOther) if !httplib.IsRelativeURL(location) { // Some browsers (Safari) have buggy behavior for Cookie + Cache + External Redirection, eg: /my-path => https://other/path diff --git a/services/context/captcha.go b/services/context/captcha.go index 9272e7a65a..b4c3a92907 100644 --- a/services/context/captcha.go +++ b/services/context/captcha.go @@ -5,6 +5,7 @@ package context import ( "fmt" + "image/color" "sync" "code.gitea.io/gitea/modules/cache" @@ -29,6 +30,15 @@ func GetImageCaptcha() *captcha.Captcha { imageCaptchaOnce.Do(func() { cpt = captcha.NewCaptcha(captcha.Options{ SubURL: setting.AppSubURL, + // Use a color palette with high contrast colors suitable for both light and dark modes + // These colors provide good visibility and readability in both themes + ColorPalette: color.Palette{ + color.RGBA{R: 234, G: 67, B: 53, A: 255}, // Bright red + color.RGBA{R: 66, G: 133, B: 244, A: 255}, // Medium blue + color.RGBA{R: 52, G: 168, B: 83, A: 255}, // Green + color.RGBA{R: 251, G: 188, B: 5, A: 255}, // Yellow/gold + color.RGBA{R: 171, G: 71, B: 188, A: 255}, // Purple + }, }) cpt.Store = cache.GetCache().ChiCache() }) diff --git a/services/context/context.go b/services/context/context.go index 420b2aefa8..b19941cb8d 100644 --- a/services/context/context.go +++ b/services/context/context.go @@ -17,6 +17,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/httpcache" + "code.gitea.io/gitea/modules/reqctx" "code.gitea.io/gitea/modules/session" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" @@ -137,15 +138,32 @@ func NewWebContext(base *Base, render Render, session session.Store) *Context { return ctx } -// Contexter initializes a classic context for a request. -func Contexter() func(next http.Handler) http.Handler { - rnd := templates.HTMLRenderer() +func ContexterInstallPage(data map[string]any) func(next http.Handler) http.Handler { + rnd := templates.PageRenderer() + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + base := NewBaseContext(resp, req) + ctx := NewWebContext(base, rnd, session.GetContextSession(req)) + ctx.Data.MergeFrom(middleware.CommonTemplateContextData()) + ctx.Data.MergeFrom(reqctx.ContextData{ + "Title": ctx.Locale.Tr("install.install"), + "PageIsInstall": true, + "AllLangs": translation.AllLangs(), + }) + ctx.Data.MergeFrom(data) + next.ServeHTTP(resp, ctx.Req) + }) + } +} + +// Contexter initializes a classic context for a request. +func Contexter() func(next http.Handler) http.Handler { + rnd := templates.PageRenderer() return func(next http.Handler) http.Handler { return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { base := NewBaseContext(resp, req) ctx := NewWebContext(base, rnd, session.GetContextSession(req)) ctx.Data.MergeFrom(middleware.CommonTemplateContextData()) - ctx.Data["CurrentURL"] = setting.AppSubURL + req.URL.RequestURI() ctx.Data["Link"] = ctx.Link // PageData is passed by reference, and it will be rendered to `window.config.pageData` in `head.tmpl` for JavaScript modules diff --git a/services/context/package.go b/services/context/package.go index 8b722932b1..0e9210515b 100644 --- a/services/context/package.go +++ b/services/context/package.go @@ -150,7 +150,7 @@ func determineAccessMode(ctx *Base, pkg *Package, doer *user_model.User) (perm.A // PackageContexter initializes a package context for a request. func PackageContexter() func(next http.Handler) http.Handler { - renderer := templates.HTMLRenderer() + renderer := templates.PageRenderer() return func(next http.Handler) http.Handler { return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { base := NewBaseContext(resp, req) diff --git a/services/context/pagination.go b/services/context/pagination.go index 2a9805db05..21efab8b12 100644 --- a/services/context/pagination.go +++ b/services/context/pagination.go @@ -8,8 +8,10 @@ import ( "html/template" "net/http" "net/url" + "slices" "strings" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/paginator" ) @@ -49,6 +51,14 @@ func (p *Pagination) AddParamFromRequest(req *http.Request) { p.AddParamFromQuery(req.URL.Query()) } +func (p *Pagination) RemoveParam(keys container.Set[string]) { + p.urlParams = slices.DeleteFunc(p.urlParams, func(s string) bool { + k, _, _ := strings.Cut(s, "=") + k, _ = url.QueryUnescape(k) + return keys.Contains(k) + }) +} + // GetParams returns the configured URL params func (p *Pagination) GetParams() template.URL { return template.URL(strings.Join(p.urlParams, "&")) diff --git a/services/context/pagination_test.go b/services/context/pagination_test.go new file mode 100644 index 0000000000..78359caa09 --- /dev/null +++ b/services/context/pagination_test.go @@ -0,0 +1,35 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package context + +import ( + "net/url" + "testing" + + "code.gitea.io/gitea/modules/container" + + "github.com/stretchr/testify/assert" +) + +func TestPagination(t *testing.T) { + p := NewPagination(1, 1, 1, 1) + params := url.Values{} + params.Add("k1", "11") + params.Add("k1", "12") + params.Add("k", "a") + params.Add("k", "b") + params.Add("k2", "21") + params.Add("k2", "22") + params.Add("foo", "bar") + + p.AddParamFromQuery(params) + v, _ := url.ParseQuery(string(p.GetParams())) + assert.Equal(t, params, v) + + p.RemoveParam(container.SetOf("k", "foo")) + params.Del("k") + params.Del("foo") + v, _ = url.ParseQuery(string(p.GetParams())) + assert.Equal(t, params, v) +} diff --git a/services/contexttest/context_tests.go b/services/contexttest/context_tests.go index 44d9f4a70f..33e632ea4d 100644 --- a/services/contexttest/context_tests.go +++ b/services/contexttest/context_tests.go @@ -143,8 +143,9 @@ func LoadRepoCommit(t *testing.T, ctx gocontext.Context) { gitRepo, err := gitrepo.OpenRepository(ctx, repo.Repository) require.NoError(t, err) - defer gitRepo.Close() - + t.Cleanup(func() { + gitRepo.Close() + }) if repo.RefFullName == "" { repo.RefFullName = git_module.RefNameFromBranch(repo.Repository.DefaultBranch) } @@ -161,8 +162,10 @@ func LoadUser(t *testing.T, ctx gocontext.Context, userID int64) { switch ctx := ctx.(type) { case *context.Context: ctx.Doer = doer + ctx.IsSigned = true case *context.APIContext: ctx.Doer = doer + ctx.IsSigned = true default: assert.FailNow(t, "context is not *context.Context or *context.APIContext") } diff --git a/services/convert/issue.go b/services/convert/issue.go index e26412bcca..b396dd0737 100644 --- a/services/convert/issue.go +++ b/services/convert/issue.go @@ -10,6 +10,7 @@ import ( "strings" issues_model "code.gitea.io/gitea/models/issues" + access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/label" @@ -163,11 +164,12 @@ func ToTrackedTime(ctx context.Context, doer *user_model.User, t *issues_model.T } // ToStopWatches convert Stopwatch list to api.StopWatches -func ToStopWatches(ctx context.Context, sws []*issues_model.Stopwatch) (api.StopWatches, error) { +func ToStopWatches(ctx context.Context, doer *user_model.User, sws []*issues_model.Stopwatch) (api.StopWatches, error) { result := api.StopWatches(make([]api.StopWatch, 0, len(sws))) issueCache := make(map[int64]*issues_model.Issue) repoCache := make(map[int64]*repo_model.Repository) + permCache := make(map[int64]access_model.Permission) var ( issue *issues_model.Issue repo *repo_model.Repository @@ -182,13 +184,30 @@ func ToStopWatches(ctx context.Context, sws []*issues_model.Stopwatch) (api.Stop if err != nil { return nil, err } + issueCache[sw.IssueID] = issue } repo, ok = repoCache[issue.RepoID] if !ok { repo, err = repo_model.GetRepositoryByID(ctx, issue.RepoID) if err != nil { - return nil, err + log.Error("GetRepositoryByID(%d): %v", issue.RepoID, err) + continue } + repoCache[issue.RepoID] = repo + } + + // ADD: Check user permissions + perm, ok := permCache[repo.ID] + if !ok { + perm, err = access_model.GetUserRepoPermission(ctx, repo, doer) + if err != nil { + continue + } + permCache[repo.ID] = perm + } + + if !perm.CanReadIssuesOrPulls(issue.IsPull) { + continue } result = append(result, api.StopWatch{ diff --git a/services/convert/issue_test.go b/services/convert/issue_test.go index 4d780f3f00..a12a69288a 100644 --- a/services/convert/issue_test.go +++ b/services/convert/issue_test.go @@ -8,9 +8,11 @@ import ( "testing" "time" + "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" @@ -55,3 +57,29 @@ func TestMilestone_APIFormat(t *testing.T) { Deadline: milestone.DeadlineUnix.AsTimePtr(), }, *ToAPIMilestone(milestone)) } + +func TestToStopWatchesRespectsPermissions(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + ctx := t.Context() + publicSW := unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{ID: 1}) + privateIssue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{RepoID: 3}) + privateSW := &issues_model.Stopwatch{IssueID: privateIssue.ID, UserID: 5} + assert.NoError(t, db.Insert(ctx, privateSW)) + assert.NotZero(t, privateSW.ID) + + regularUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}) + adminUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + + sws := []*issues_model.Stopwatch{publicSW, privateSW} + + visible, err := ToStopWatches(ctx, regularUser, sws) + assert.NoError(t, err) + assert.Len(t, visible, 1) + assert.Equal(t, "repo1", visible[0].RepoName) + + visibleAdmin, err := ToStopWatches(ctx, adminUser, sws) + assert.NoError(t, err) + assert.Len(t, visibleAdmin, 2) + assert.ElementsMatch(t, []string{"repo1", "repo3"}, []string{visibleAdmin[0].RepoName, visibleAdmin[1].RepoName}) +} diff --git a/services/convert/notification.go b/services/convert/notification.go index 69470638be..87166501a6 100644 --- a/services/convert/notification.go +++ b/services/convert/notification.go @@ -8,8 +8,8 @@ import ( "net/url" activities_model "code.gitea.io/gitea/models/activities" - "code.gitea.io/gitea/models/perm" access_model "code.gitea.io/gitea/models/perm/access" + "code.gitea.io/gitea/modules/log" api "code.gitea.io/gitea/modules/structs" ) @@ -25,11 +25,17 @@ func ToNotificationThread(ctx context.Context, n *activities_model.Notification) // since user only get notifications when he has access to use minimal access mode if n.Repository != nil { - result.Repository = ToRepo(ctx, n.Repository, access_model.Permission{AccessMode: perm.AccessModeRead}) - - // This permission is not correct and we should not be reporting it - for repository := result.Repository; repository != nil; repository = repository.Parent { - repository.Permissions = nil + perm, err := access_model.GetUserRepoPermission(ctx, n.Repository, n.User) + if err != nil { + log.Error("GetUserRepoPermission failed: %v", err) + return result + } + if perm.HasAnyUnitAccessOrPublicAccess() { // if user has been revoked access to repo, do not show repo info + result.Repository = ToRepo(ctx, n.Repository, perm) + // This permission is not correct and we should not be reporting it + for repository := result.Repository; repository != nil; repository = repository.Parent { + repository.Permissions = nil + } } } diff --git a/services/convert/notification_test.go b/services/convert/notification_test.go new file mode 100644 index 0000000000..718a070819 --- /dev/null +++ b/services/convert/notification_test.go @@ -0,0 +1,57 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package convert + +import ( + "testing" + + activities_model "code.gitea.io/gitea/models/activities" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/timeutil" + + "github.com/stretchr/testify/assert" +) + +func TestToNotificationThreadIncludesRepoForAccessibleUser(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + n := newRepoNotification(t, 1, 4) + thread := ToNotificationThread(t.Context(), n) + + if assert.NotNil(t, thread.Repository) { + assert.Equal(t, n.Repository.FullName(), thread.Repository.FullName) + assert.Nil(t, thread.Repository.Permissions) + } +} + +func TestToNotificationThreadOmitsRepoWhenAccessRevoked(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + n := newRepoNotification(t, 2, 4) + thread := ToNotificationThread(t.Context(), n) + + assert.Nil(t, thread.Repository) +} + +func newRepoNotification(t *testing.T, repoID, userID int64) *activities_model.Notification { + t.Helper() + + ctx := t.Context() + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID}) + assert.NoError(t, repo.LoadOwner(ctx)) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID}) + + return &activities_model.Notification{ + ID: repoID*1000 + userID, + UserID: user.ID, + RepoID: repo.ID, + Status: activities_model.NotificationStatusUnread, + Source: activities_model.NotificationSourceRepository, + UpdatedUnix: timeutil.TimeStampNow(), + Repository: repo, + User: user, + } +} diff --git a/services/convert/repository.go b/services/convert/repository.go index a364591bb8..da4e59d7a9 100644 --- a/services/convert/repository.go +++ b/services/convert/repository.go @@ -34,7 +34,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR permissionInRepo.SetUnitsWithDefaultAccessMode(repo.Units, permissionInRepo.AccessMode) } - // TODO: ideally we should pass "doer" into "ToRepo" to to make CloneLink could generate user-related links + // TODO: ideally we should pass "doer" into "ToRepo" to make CloneLink could generate user-related links // And passing "doer" in will also fix other FIXMEs in this file. cloneLink := repo.CloneLinkGeneral(ctx) // no doer at the moment permission := &api.Permission{ @@ -127,20 +127,10 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR projectsMode = config.ProjectsMode } - hasReleases := false - if _, err := repo.GetUnit(ctx, unit_model.TypeReleases); err == nil { - hasReleases = true - } - - hasPackages := false - if _, err := repo.GetUnit(ctx, unit_model.TypePackages); err == nil { - hasPackages = true - } - - hasActions := false - if _, err := repo.GetUnit(ctx, unit_model.TypeActions); err == nil { - hasActions = true - } + hasCode := repo.UnitEnabled(ctx, unit_model.TypeCode) + hasReleases := repo.UnitEnabled(ctx, unit_model.TypeReleases) + hasPackages := repo.UnitEnabled(ctx, unit_model.TypePackages) + hasActions := repo.UnitEnabled(ctx, unit_model.TypeActions) if err := repo.LoadOwner(ctx); err != nil { return nil @@ -221,6 +211,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR Updated: repo.UpdatedUnix.AsTime(), ArchivedAt: repo.ArchivedUnix.AsTime(), Permissions: permission, + HasCode: hasCode, HasIssues: hasIssues, ExternalTracker: externalTracker, InternalTracker: internalTracker, diff --git a/services/cron/tasks_basic.go b/services/cron/tasks_basic.go index 48380b7b9a..c620959cc1 100644 --- a/services/cron/tasks_basic.go +++ b/services/cron/tasks_basic.go @@ -54,7 +54,7 @@ func registerRepoHealthCheck() { RunAtStart: false, Schedule: "@midnight", }, - Timeout: time.Duration(setting.Git.Timeout.Default) * time.Second, + Timeout: time.Duration(setting.Git.Timeout.GC) * time.Second, Args: []string{}, }, func(ctx context.Context, _ *user_model.User, config Config) error { rhcConfig := config.(*RepoHealthCheckConfig) diff --git a/services/doctor/heads.go b/services/doctor/heads.go index bdadfa674c..4d34b18e18 100644 --- a/services/doctor/heads.go +++ b/services/doctor/heads.go @@ -20,10 +20,10 @@ func synchronizeRepoHeads(ctx context.Context, logger log.Logger, autofix bool) numReposUpdated := 0 err := iterateRepositories(ctx, func(repo *repo_model.Repository) error { numRepos++ - _, defaultBranchErr := gitrepo.RunCmdString(ctx, repo, + _, _, defaultBranchErr := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse").AddDashesAndList(repo.DefaultBranch)) - head, headErr := gitrepo.RunCmdString(ctx, repo, + head, _, headErr := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "--short", "HEAD")) // what we expect: default branch is valid, and HEAD points to it diff --git a/services/doctor/mergebase.go b/services/doctor/mergebase.go index 852e37f415..a76ed8afb7 100644 --- a/services/doctor/mergebase.go +++ b/services/doctor/mergebase.go @@ -43,17 +43,17 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro if !pr.HasMerged { var err error - pr.MergeBase, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitHeadRefName())) + pr.MergeBase, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitHeadRefName())) if err != nil { var err2 error - pr.MergeBase, err2 = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse").AddDynamicArguments(git.BranchPrefix+pr.BaseBranch)) + pr.MergeBase, _, err2 = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse").AddDynamicArguments(git.BranchPrefix+pr.BaseBranch)) if err2 != nil { logger.Warn("Unable to get merge base for PR ID %d, #%d onto %s in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2) return nil } } } else { - parentsString, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID)) + parentsString, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID)) if err != nil { logger.Warn("Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err) return nil @@ -66,7 +66,7 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro refs := append([]string{}, parents[1:]...) refs = append(refs, pr.GetGitHeadRefName()) cmd := gitcmd.NewCommand("merge-base").AddDashesAndList(refs...) - pr.MergeBase, err = gitrepo.RunCmdString(ctx, repo, cmd) + pr.MergeBase, _, err = gitrepo.RunCmdString(ctx, repo, cmd) if err != nil { logger.Warn("Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err) return nil diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go index 2d33d2b42b..e4545570c8 100644 --- a/services/forms/repo_form.go +++ b/services/forms/repo_form.go @@ -405,13 +405,6 @@ func (f *NewPackagistHookForm) Validate(req *http.Request, errs binding.Errors) return middleware.Validate(errs, ctx.Data, f, ctx.Locale) } -// .___ -// | | ______ ________ __ ____ -// | |/ ___// ___/ | \_/ __ \ -// | |\___ \ \___ \| | /\ ___/ -// |___/____ >____ >____/ \___ > -// \/ \/ \/ - // CreateIssueForm form for creating issue type CreateIssueForm struct { Title string `binding:"Required;MaxSize(255)"` diff --git a/services/git/compare.go b/services/git/compare.go index e4996f357a..6c49fff26a 100644 --- a/services/git/compare.go +++ b/services/git/compare.go @@ -6,14 +6,11 @@ package git import ( "context" "fmt" - "strconv" - "time" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" - "code.gitea.io/gitea/modules/graceful" - logger "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" ) // CompareInfo represents needed information for comparing references. @@ -25,7 +22,7 @@ type CompareInfo struct { HeadGitRepo *git.Repository HeadRef git.RefName HeadCommitID string - DirectComparison bool + CompareSeparator string MergeBase string Commits []*git.Commit NumFiles int @@ -39,77 +36,66 @@ func (ci *CompareInfo) IsSameRef() bool { return ci.IsSameRepository() && ci.BaseRef == ci.HeadRef } +func (ci *CompareInfo) DirectComparison() bool { + // FIXME: the design of "DirectComparison" is wrong, it loses the information of `^` + // To correctly handle the comparison, developers should use `ci.CompareSeparator` directly, all "DirectComparison" related code should be rewritten. + return ci.CompareSeparator == ".." +} + // GetCompareInfo generates and returns compare information between base and head branches of repositories. func GetCompareInfo(ctx context.Context, baseRepo, headRepo *repo_model.Repository, headGitRepo *git.Repository, baseRef, headRef git.RefName, directComparison, fileOnly bool) (_ *CompareInfo, err error) { - var ( - remoteBranch string - tmpRemote string - ) - - // We don't need a temporary remote for same repository. - if baseRepo.ID != headRepo.ID { - // Add a temporary remote - tmpRemote = strconv.FormatInt(time.Now().UnixNano(), 10) - if err = gitrepo.GitRemoteAdd(ctx, headRepo, tmpRemote, baseRepo.RepoPath()); err != nil { - return nil, fmt.Errorf("GitRemoteAdd: %w", err) - } - defer func() { - if err := gitrepo.GitRemoteRemove(graceful.GetManager().ShutdownContext(), headRepo, tmpRemote); err != nil { - logger.Error("GetPullRequestInfo: GitRemoteRemove: %v", err) - } - }() - } - compareInfo := &CompareInfo{ BaseRepo: baseRepo, BaseRef: baseRef, HeadRepo: headRepo, HeadGitRepo: headGitRepo, HeadRef: headRef, - DirectComparison: directComparison, + CompareSeparator: util.Iif(directComparison, "..", "..."), } + compareInfo.BaseCommitID, err = gitrepo.GetFullCommitID(ctx, baseRepo, baseRef.String()) + if err != nil { + return nil, err + } compareInfo.HeadCommitID, err = gitrepo.GetFullCommitID(ctx, headRepo, headRef.String()) if err != nil { - compareInfo.HeadCommitID = headRef.String() + return nil, err } - // FIXME: It seems we don't need mergebase if it's a direct comparison? - compareInfo.MergeBase, remoteBranch, err = headGitRepo.GetMergeBase(tmpRemote, baseRef.String(), headRef.String()) - if err == nil { - compareInfo.BaseCommitID, err = gitrepo.GetFullCommitID(ctx, headRepo, remoteBranch) - if err != nil { - compareInfo.BaseCommitID = remoteBranch - } - separator := "..." - baseCommitID := compareInfo.MergeBase - if directComparison { - separator = ".." - baseCommitID = compareInfo.BaseCommitID - } - - // We have a common base - therefore we know that ... should work - if !fileOnly { - compareInfo.Commits, err = headGitRepo.ShowPrettyFormatLogToList(ctx, baseCommitID+separator+headRef.String()) - if err != nil { - return nil, fmt.Errorf("ShowPrettyFormatLogToList: %w", err) + // if they are not the same repository, then we need to fetch the base commit into the head repository + // because we will use headGitRepo in the following code + if baseRepo.ID != headRepo.ID { + exist := headGitRepo.IsReferenceExist(compareInfo.BaseCommitID) + if !exist { + if err := gitrepo.FetchRemoteCommit(ctx, headRepo, baseRepo, compareInfo.BaseCommitID); err != nil { + return nil, fmt.Errorf("FetchRemoteCommit: %w", err) } - } else { - compareInfo.Commits = []*git.Commit{} + } + } + + if !directComparison { + compareInfo.MergeBase, err = gitrepo.MergeBase(ctx, headRepo, compareInfo.BaseCommitID, compareInfo.HeadCommitID) + if err != nil { + return nil, fmt.Errorf("MergeBase: %w", err) + } + } else { + compareInfo.MergeBase = compareInfo.BaseCommitID + } + + // We have a common base - therefore we know that ... should work + if !fileOnly { + compareInfo.Commits, err = headGitRepo.ShowPrettyFormatLogToList(ctx, compareInfo.BaseCommitID+compareInfo.CompareSeparator+compareInfo.HeadCommitID) + if err != nil { + return nil, fmt.Errorf("ShowPrettyFormatLogToList: %w", err) } } else { compareInfo.Commits = []*git.Commit{} - compareInfo.MergeBase, err = gitrepo.GetFullCommitID(ctx, headRepo, remoteBranch) - if err != nil { - compareInfo.MergeBase = remoteBranch - } - compareInfo.BaseCommitID = compareInfo.MergeBase } // Count number of changed files. // This probably should be removed as we need to use shortstat elsewhere // Now there is git diff --shortstat but this appears to be slower than simply iterating with --nameonly - compareInfo.NumFiles, err = headGitRepo.GetDiffNumChangedFiles(remoteBranch, headRef.String(), directComparison) + compareInfo.NumFiles, err = headGitRepo.GetDiffNumChangedFiles(compareInfo.BaseCommitID, compareInfo.HeadCommitID, directComparison) if err != nil { return nil, err } diff --git a/services/gitdiff/git_diff_tree.go b/services/gitdiff/git_diff_tree.go index 2a3c7c9445..b4f26210be 100644 --- a/services/gitdiff/git_diff_tree.go +++ b/services/gitdiff/git_diff_tree.go @@ -166,16 +166,6 @@ func parseGitDiffTreeLine(line string) (*DiffTreeRecord, error) { return nil, fmt.Errorf("unparsable output for diff-tree --raw: `%s`, expected 5 space delimited values got %d)", line, len(fields)) } - baseMode, err := git.ParseEntryMode(fields[0]) - if err != nil { - return nil, err - } - - headMode, err := git.ParseEntryMode(fields[1]) - if err != nil { - return nil, err - } - baseBlobID := fields[2] headBlobID := fields[3] @@ -201,8 +191,8 @@ func parseGitDiffTreeLine(line string) (*DiffTreeRecord, error) { return &DiffTreeRecord{ Status: status, Score: score, - BaseMode: baseMode, - HeadMode: headMode, + BaseMode: git.ParseEntryMode(fields[0]), + HeadMode: git.ParseEntryMode(fields[1]), BaseBlobID: baseBlobID, HeadBlobID: headBlobID, BasePath: basePath, diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go index 17eb3d4280..3728f50d21 100644 --- a/services/gitdiff/gitdiff.go +++ b/services/gitdiff/gitdiff.go @@ -16,7 +16,6 @@ import ( "path" "sort" "strings" - "time" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" @@ -200,6 +199,11 @@ type DiffBlobExcerptData struct { AfterCommitID string } +const ( + DiffStyleSplit = "split" + DiffStyleUnified = "unified" +) + func (d *DiffLine) RenderBlobExcerptButtons(fileNameHash string, data *DiffBlobExcerptData) template.HTML { dataHiddenCommentIDs := strings.Join(base.Int64sToStrings(d.SectionInfo.HiddenCommentIDs), ",") anchor := fmt.Sprintf("diff-%sK%d", fileNameHash, d.SectionInfo.RightIdx) @@ -327,7 +331,7 @@ func (diffSection *DiffSection) getLineContentForRender(lineIdx int, diffLine *D if setting.Git.DisableDiffHighlight { return template.HTML(html.EscapeString(diffLine.Content[1:])) } - h, _ = highlight.Code(diffSection.FileName, fileLanguage, diffLine.Content[1:]) + h, _ = highlight.RenderCodeFast(diffSection.FileName, fileLanguage, diffLine.Content[1:]) return h } @@ -394,20 +398,20 @@ type DiffFile struct { isAmbiguous bool // basic fields (parsed from diff result) - Name string - NameHash string - OldName string - Addition int - Deletion int - Type DiffFileType - Mode string - OldMode string - IsCreated bool - IsDeleted bool - IsBin bool - IsLFSFile bool - IsRenamed bool - IsSubmodule bool + Name string + NameHash string + OldName string + Addition int + Deletion int + Type DiffFileType + EntryMode string + OldEntryMode string + IsCreated bool + IsDeleted bool + IsBin bool + IsLFSFile bool + IsRenamed bool + IsSubmodule bool // basic fields but for render purpose only Sections []*DiffSection IsIncomplete bool @@ -496,21 +500,36 @@ func (diffFile *DiffFile) ShouldBeHidden() bool { return diffFile.IsGenerated || diffFile.IsViewed } -func (diffFile *DiffFile) ModeTranslationKey(mode string) string { - switch mode { - case "040000": - return "git.filemode.directory" - case "100644": - return "git.filemode.normal_file" - case "100755": - return "git.filemode.executable_file" - case "120000": - return "git.filemode.symbolic_link" - case "160000": - return "git.filemode.submodule" - default: - return mode +func (diffFile *DiffFile) TranslateDiffEntryMode(locale translation.Locale) string { + entryModeTr := func(mode string) string { + entryMode := git.ParseEntryMode(mode) + switch { + case entryMode.IsDir(): + return locale.TrString("git.filemode.directory") + case entryMode.IsRegular(): + return locale.TrString("git.filemode.normal_file") + case entryMode.IsExecutable(): + return locale.TrString("git.filemode.executable_file") + case entryMode.IsLink(): + return locale.TrString("git.filemode.symbolic_link") + case entryMode.IsSubModule(): + return locale.TrString("git.filemode.submodule") + default: + return mode + } } + + if diffFile.EntryMode != "" && diffFile.OldEntryMode != "" { + oldMode := entryModeTr(diffFile.OldEntryMode) + newMode := entryModeTr(diffFile.EntryMode) + return locale.TrString("git.filemode.changed_filemode", oldMode, newMode) + } + if diffFile.EntryMode != "" { + if entryMode := git.ParseEntryMode(diffFile.EntryMode); !entryMode.IsRegular() { + return entryModeTr(diffFile.EntryMode) + } + } + return "" } type limitByteWriter struct { @@ -690,10 +709,10 @@ parsingLoop: strings.HasPrefix(line, "new mode "): if strings.HasPrefix(line, "old mode ") { - curFile.OldMode = prepareValue(line, "old mode ") + curFile.OldEntryMode = prepareValue(line, "old mode ") } if strings.HasPrefix(line, "new mode ") { - curFile.Mode = prepareValue(line, "new mode ") + curFile.EntryMode = prepareValue(line, "new mode ") } if strings.HasSuffix(line, " 160000\n") { curFile.IsSubmodule, curFile.SubmoduleDiffInfo = true, &SubmoduleDiffInfo{} @@ -728,7 +747,7 @@ parsingLoop: curFile.Type = DiffFileAdd curFile.IsCreated = true if strings.HasPrefix(line, "new file mode ") { - curFile.Mode = prepareValue(line, "new file mode ") + curFile.EntryMode = prepareValue(line, "new file mode ") } if strings.HasSuffix(line, " 160000\n") { curFile.IsSubmodule, curFile.SubmoduleDiffInfo = true, &SubmoduleDiffInfo{} @@ -1244,23 +1263,14 @@ func getDiffBasic(ctx context.Context, gitRepo *git.Repository, opts *DiffOption cmdCtx, cmdCancel := context.WithCancel(ctx) defer cmdCancel() - reader, writer := io.Pipe() - defer func() { - _ = reader.Close() - _ = writer.Close() - }() - + reader, readerClose := cmdDiff.MakeStdoutPipe() + defer readerClose() go func() { - stderr := &bytes.Buffer{} - if err := cmdDiff.WithTimeout(time.Duration(setting.Git.Timeout.Default) * time.Second). + if err := cmdDiff. WithDir(repoPath). - WithStdout(writer). - WithStderr(stderr). - Run(cmdCtx); err != nil && !git.IsErrCanceledOrKilled(err) { - log.Error("error during GetDiff(git diff dir: %s): %v, stderr: %s", repoPath, err, stderr.String()) + RunWithStderr(cmdCtx); err != nil && !gitcmd.IsErrorCanceledOrKilled(err) { + log.Error("error during GetDiff(git diff dir: %s): %v", repoPath, err) } - - _ = writer.Close() }() diff, err := ParsePatch(cmdCtx, opts.MaxLines, opts.MaxLineCharacters, opts.MaxFiles, reader, parsePatchSkipToFile) @@ -1339,7 +1349,7 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit func highlightCodeLines(diffFile *DiffFile, isLeft bool, rawContent []byte) map[int]template.HTML { content := util.UnsafeBytesToString(charset.ToUTF8(rawContent, charset.ConvertOpts{})) - highlightedNewContent, _ := highlight.Code(diffFile.Name, diffFile.Language, content) + highlightedNewContent, _ := highlight.RenderCodeFast(diffFile.Name, diffFile.Language, content) unsafeLines := highlight.UnsafeSplitHighlightedLines(highlightedNewContent) lines := make(map[int]template.HTML, len(unsafeLines)) // only save the highlighted lines we need, but not the whole file, to save memory diff --git a/services/issue/assignee.go b/services/issue/assignee.go index ba9c91e0ed..97b32d5865 100644 --- a/services/issue/assignee.go +++ b/services/issue/assignee.go @@ -70,7 +70,7 @@ func ReviewRequest(ctx context.Context, issue *issues_model.Issue, doer *user_mo } if isAdd { - comment, err = issues_model.AddReviewRequest(ctx, issue, reviewer, doer) + comment, err = issues_model.AddReviewRequest(ctx, issue, reviewer, doer, false) } else { comment, err = issues_model.RemoveReviewRequest(ctx, issue, reviewer, doer) } @@ -224,7 +224,7 @@ func TeamReviewRequest(ctx context.Context, issue *issues_model.Issue, doer *use return nil, err } if isAdd { - comment, err = issues_model.AddTeamReviewRequest(ctx, issue, reviewer, doer) + comment, err = issues_model.AddTeamReviewRequest(ctx, issue, reviewer, doer, false) } else { comment, err = issues_model.RemoveTeamReviewRequest(ctx, issue, reviewer, doer) } diff --git a/services/issue/issue.go b/services/issue/issue.go index 85e70d0761..bb208e43a9 100644 --- a/services/issue/issue.go +++ b/services/issue/issue.go @@ -228,7 +228,7 @@ func AddAssigneeIfNotAssigned(ctx context.Context, issue *issues_model.Issue, do return nil, err } if isAssigned { - // nothing to to + // nothing to do return nil, nil } diff --git a/services/issue/pull.go b/services/issue/pull.go index 8ee14c0a4b..2fcf3860d0 100644 --- a/services/issue/pull.go +++ b/services/issue/pull.go @@ -7,35 +7,16 @@ import ( "context" "fmt" "slices" - "time" issues_model "code.gitea.io/gitea/models/issues" org_model "code.gitea.io/gitea/models/organization" - repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" - "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" ) -func getMergeBase(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, pr *issues_model.PullRequest, baseBranch, headBranch string) (string, error) { - // Add a temporary remote - tmpRemote := fmt.Sprintf("mergebase-%d-%d", pr.ID, time.Now().UnixNano()) - if err := gitrepo.GitRemoteAdd(ctx, repo, tmpRemote, gitRepo.Path); err != nil { - return "", fmt.Errorf("GitRemoteAdd: %w", err) - } - defer func() { - if err := gitrepo.GitRemoteRemove(graceful.GetManager().ShutdownContext(), repo, tmpRemote); err != nil { - log.Error("getMergeBase: GitRemoteRemove: %v", err) - } - }() - - mergeBase, _, err := gitRepo.GetMergeBase(tmpRemote, baseBranch, headBranch) - return mergeBase, err -} - type ReviewRequestNotifier struct { Comment *issues_model.Comment IsAdd bool @@ -99,11 +80,10 @@ func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullReque } // get the mergebase - mergeBase, err := getMergeBase(ctx, pr.BaseRepo, repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitHeadRefName()) + mergeBase, err := gitrepo.MergeBase(ctx, pr.BaseRepo, git.BranchPrefix+pr.BaseBranch, pr.GetGitHeadRefName()) if err != nil { return nil, err } - // https://github.com/go-gitea/gitea/issues/29763, we need to get the files changed // between the merge base and the head commit but not the base branch and the head commit changedFiles, err := repo.GetFilesChangedBetween(mergeBase, pr.GetGitHeadRefName()) @@ -149,7 +129,7 @@ func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullReque for _, u := range uniqUsers { if u.ID != issue.Poster.ID && !contain(latestReivews, u) { - comment, err := issues_model.AddReviewRequest(ctx, issue, u, issue.Poster) + comment, err := issues_model.AddReviewRequest(ctx, issue, u, issue.Poster, true) if err != nil { log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err) return nil, err @@ -166,7 +146,7 @@ func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullReque } for _, t := range uniqTeams { - comment, err := issues_model.AddTeamReviewRequest(ctx, issue, t, issue.Poster) + comment, err := issues_model.AddTeamReviewRequest(ctx, issue, t, issue.Poster, true) if err != nil { log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err) return nil, err diff --git a/services/lfs/locks.go b/services/lfs/locks.go index 5bc3f6b95a..c2279edaf0 100644 --- a/services/lfs/locks.go +++ b/services/lfs/locks.go @@ -90,7 +90,7 @@ func GetListLockHandler(ctx *context.Context) { }) return } - lock, err := git_model.GetLFSLockByID(ctx, v) + lock, err := git_model.GetLFSLockByIDAndRepo(ctx, v, repository.ID) if err != nil && !git_model.IsErrLFSLockNotExist(err) { log.Error("Unable to get lock with ID[%s]: Error: %v", v, err) } diff --git a/services/lfs/server.go b/services/lfs/server.go index 3455b4b9bd..4819437bf1 100644 --- a/services/lfs/server.go +++ b/services/lfs/server.go @@ -11,7 +11,6 @@ import ( "errors" "fmt" "io" - "maps" "net/http" "net/url" "regexp" @@ -487,40 +486,32 @@ func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, downloa rep.Error = err } else { rep.Actions = make(map[string]*lfs_module.Link) - - header := make(map[string]string) - - if len(rc.Authorization) > 0 { - header["Authorization"] = rc.Authorization - } - if download { var link *lfs_module.Link if setting.LFS.Storage.ServeDirect() { // If we have a signed url (S3, object storage), redirect to this directly. u, err := storage.LFS.URL(pointer.RelativePath(), pointer.Oid, rc.Method, nil) if u != nil && err == nil { - // Presigned url does not need the Authorization header - // https://github.com/go-gitea/gitea/issues/21525 - delete(header, "Authorization") - link = &lfs_module.Link{Href: u.String(), Header: header} + link = lfs_module.NewLink(u.String()) // Presigned url does not need the Authorization header } } if link == nil { - link = &lfs_module.Link{Href: rc.DownloadLink(pointer), Header: header} + link = lfs_module.NewLink(rc.DownloadLink(pointer)).WithHeader("Authorization", rc.Authorization) } rep.Actions["download"] = link } if upload { - rep.Actions["upload"] = &lfs_module.Link{Href: rc.UploadLink(pointer), Header: header} + // Set Transfer-Encoding header to enable chunked uploads. Required by git-lfs client to do chunked transfer. + // See: https://github.com/git-lfs/git-lfs/blob/main/tq/basic_upload.go#L58-59 + rep.Actions["upload"] = lfs_module.NewLink(rc.UploadLink(pointer)). + WithHeader("Authorization", rc.Authorization). + WithHeader("Transfer-Encoding", "chunked") - verifyHeader := make(map[string]string) - maps.Copy(verifyHeader, header) - - // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662 - verifyHeader["Accept"] = lfs_module.AcceptHeader - - rep.Actions["verify"] = &lfs_module.Link{Href: rc.VerifyLink(pointer), Header: verifyHeader} + // "Accept" header is the workaround for git-lfs < 2.8.0 (before 2019). + // This workaround could be removed in the future: https://github.com/git-lfs/git-lfs/issues/3662 + rep.Actions["verify"] = lfs_module.NewLink(rc.VerifyLink(pointer)). + WithHeader("Authorization", rc.Authorization). + WithHeader("Accept", lfs_module.AcceptHeader) } } return rep diff --git a/services/mailer/mail.go b/services/mailer/mail.go index d81b6d10af..8f831f89ad 100644 --- a/services/mailer/mail.go +++ b/services/mailer/mail.go @@ -15,7 +15,6 @@ import ( "mime" "regexp" "strings" - "sync/atomic" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -32,12 +31,10 @@ import ( const mailMaxSubjectRunes = 256 // There's no actual limit for subject in RFC 5322 -var loadedTemplates atomic.Pointer[templates.MailTemplates] - var subjectRemoveSpaces = regexp.MustCompile(`[\s]+`) -func LoadedTemplates() *templates.MailTemplates { - return loadedTemplates.Load() +func LoadedTemplates() *templates.MailRender { + return templates.MailRenderer() } // SendTestMail sends a test mail diff --git a/services/mailer/mail_issue_common.go b/services/mailer/mail_issue_common.go index d65e310288..994df6707a 100644 --- a/services/mailer/mail_issue_common.go +++ b/services/mailer/mail_issue_common.go @@ -21,6 +21,7 @@ import ( "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/util" incoming_payload "code.gitea.io/gitea/services/mailer/incoming/payload" sender_service "code.gitea.io/gitea/services/mailer/sender" "code.gitea.io/gitea/services/mailer/token" @@ -122,9 +123,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang var mailSubject bytes.Buffer if err := LoadedTemplates().SubjectTemplates.ExecuteTemplate(&mailSubject, tplName, mailMeta); err == nil { subject = sanitizeSubject(mailSubject.String()) - if subject == "" { - subject = fallback - } + subject = util.IfZero(subject, fallback) } else { log.Error("ExecuteTemplate [%s]: %v", tplName+"/subject", err) } @@ -203,7 +202,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang msg.SetHeader("References", references...) msg.SetHeader("List-Unsubscribe", listUnsubscribe...) - for key, value := range generateAdditionalHeadersForIssue(comment, actType, recipient) { + for key, value := range generateAdditionalHeadersForIssue(ctx, comment, actType, recipient) { msg.SetHeader(key, value) } @@ -261,14 +260,14 @@ func actionToTemplate(issue *issues_model.Issue, actionType activities_model.Act } template = "repo/" + typeName + "/" + name - ok := LoadedTemplates().BodyTemplates.Lookup(template) != nil + ok := LoadedTemplates().BodyTemplates.HasTemplate(template) if !ok && typeName != "issue" { template = "repo/issue/" + name - ok = LoadedTemplates().BodyTemplates.Lookup(template) != nil + ok = LoadedTemplates().BodyTemplates.HasTemplate(template) } if !ok { template = "repo/" + typeName + "/default" - ok = LoadedTemplates().BodyTemplates.Lookup(template) != nil + ok = LoadedTemplates().BodyTemplates.HasTemplate(template) } if !ok { template = "repo/issue/default" @@ -303,17 +302,17 @@ func generateMessageIDForIssue(issue *issues_model.Issue, comment *issues_model. return fmt.Sprintf("<%s/%s/%d%s@%s>", issue.Repo.FullName(), path, issue.Index, extra, setting.Domain) } -func generateAdditionalHeadersForIssue(ctx *mailComment, reason string, recipient *user_model.User) map[string]string { - repo := ctx.Issue.Repo +func generateAdditionalHeadersForIssue(ctx context.Context, comment *mailComment, reason string, recipient *user_model.User) map[string]string { + repo := comment.Issue.Repo - issueID := strconv.FormatInt(ctx.Issue.Index, 10) + issueID := strconv.FormatInt(comment.Issue.Index, 10) headers := generateMetadataHeaders(repo) - maps.Copy(headers, generateSenderRecipientHeaders(ctx.Doer, recipient)) + maps.Copy(headers, generateSenderRecipientHeaders(comment.Doer, recipient)) maps.Copy(headers, generateReasonHeaders(reason)) headers["X-Gitea-Issue-ID"] = issueID - headers["X-Gitea-Issue-Link"] = ctx.Issue.HTMLURL(context.TODO()) // FIXME: use proper context + headers["X-Gitea-Issue-Link"] = comment.Issue.HTMLURL(ctx) headers["X-GitLab-Issue-IID"] = issueID return headers diff --git a/services/mailer/mail_release.go b/services/mailer/mail_release.go index 248cf0ab90..1f940f33df 100644 --- a/services/mailer/mail_release.go +++ b/services/mailer/mail_release.go @@ -7,9 +7,12 @@ import ( "bytes" "context" "fmt" + "slices" + access_model "code.gitea.io/gitea/models/perm/access" "code.gitea.io/gitea/models/renderhelper" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup/markdown" @@ -44,6 +47,16 @@ func MailNewRelease(ctx context.Context, rel *repo_model.Release) { return } + if err := rel.LoadRepo(ctx); err != nil { + log.Error("rel.LoadRepo: %v", err) + return + } + + // delete publisher or any users with no permission + recipients = slices.DeleteFunc(recipients, func(u *user_model.User) bool { + return u.ID == rel.PublisherID || !access_model.CheckRepoUnitUser(ctx, rel.Repo, u, unit.TypeReleases) + }) + langMap := make(map[string][]*user_model.User) for _, user := range recipients { if user.ID != rel.PublisherID { diff --git a/services/mailer/mail_release_test.go b/services/mailer/mail_release_test.go new file mode 100644 index 0000000000..6fc8587f98 --- /dev/null +++ b/services/mailer/mail_release_test.go @@ -0,0 +1,64 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package mailer + +import ( + "testing" + + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/test" + sender_service "code.gitea.io/gitea/services/mailer/sender" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestMailNewReleaseFiltersUnauthorizedWatchers(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + defer test.MockVariableValue(&setting.MailService)() + defer test.MockVariableValue(&setting.Domain)() + defer test.MockVariableValue(&setting.AppName)() + defer test.MockVariableValue(&setting.AppURL)() + + setting.MailService = &setting.Mailer{ + From: "Gitea", + FromEmail: "noreply@example.com", + } + setting.Domain = "example.com" + setting.AppName = "Gitea" + setting.AppURL = "https://example.com/" + defer mockMailTemplates(string(tplNewReleaseMail), "{{.Subject}}", "

    {{.Release.TagName}}

    ")() + + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) + require.True(t, repo.IsPrivate) + + admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}) + unauthorized := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}) + + assert.NoError(t, repo_model.WatchRepo(t.Context(), admin, repo, true)) + assert.NoError(t, repo_model.WatchRepo(t.Context(), unauthorized, repo, true)) + + rel := unittest.AssertExistsAndLoadBean(t, &repo_model.Release{ID: 11}) + rel.Repo = nil + rel.Publisher = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: rel.PublisherID}) + + var sent []*sender_service.Message + origSend := SendAsync + SendAsync = func(msgs ...*sender_service.Message) { + sent = append(sent, msgs...) + } + defer func() { + SendAsync = origSend + }() + + MailNewRelease(t.Context(), rel) + + require.Len(t, sent, 1) + assert.Equal(t, admin.EmailTo(), sent[0].To) + assert.NotEqual(t, unauthorized.EmailTo(), sent[0].To) +} diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go index 8a4a589357..caa072725a 100644 --- a/services/mailer/mail_test.go +++ b/services/mailer/mail_test.go @@ -96,11 +96,8 @@ func prepareMailerBase64Test(t *testing.T) (doer *user_model.User, repo *repo_mo return user, repo, issue, att1, att2 } -func prepareMailTemplates(name, subjectTmpl, bodyTmpl string) { - loadedTemplates.Store(&templates.MailTemplates{ - SubjectTemplates: texttmpl.Must(texttmpl.New(name).Parse(subjectTmpl)), - BodyTemplates: template.Must(template.New(name).Parse(bodyTmpl)), - }) +func mockMailTemplates(name, subjectTmpl, bodyTmpl string) func() { + return templates.MailRenderer().MockTemplate(name, subjectTmpl, bodyTmpl) } func TestComposeIssueComment(t *testing.T) { @@ -112,10 +109,8 @@ func TestComposeIssueComment(t *testing.T) { }, }) - setting.IncomingEmail.Enabled = true - defer func() { setting.IncomingEmail.Enabled = false }() - - prepareMailTemplates("repo/issue/comment", subjectTpl, bodyTpl) + defer test.MockVariableValue(&setting.IncomingEmail.Enabled, true)() + defer mockMailTemplates("repo/issue/comment", subjectTpl, bodyTpl)() recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{ @@ -160,7 +155,7 @@ func TestComposeIssueComment(t *testing.T) { func TestMailMentionsComment(t *testing.T) { doer, _, issue, comment := prepareMailerTest(t) comment.Poster = doer - prepareMailTemplates("repo/issue/comment", subjectTpl, bodyTpl) + defer mockMailTemplates("repo/issue/comment", subjectTpl, bodyTpl)() mails := 0 defer test.MockVariableValue(&SendAsync, func(msgs ...*sender_service.Message) { @@ -175,7 +170,7 @@ func TestMailMentionsComment(t *testing.T) { func TestComposeIssueMessage(t *testing.T) { doer, _, issue, _ := prepareMailerTest(t) - prepareMailTemplates("repo/issue/new", subjectTpl, bodyTpl) + defer mockMailTemplates("repo/issue/new", subjectTpl, bodyTpl)() recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{ Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue, @@ -204,14 +199,10 @@ func TestTemplateSelection(t *testing.T) { doer, repo, issue, comment := prepareMailerTest(t) recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}} - prepareMailTemplates("repo/issue/default", "repo/issue/default/subject", "repo/issue/default/body") - - texttmpl.Must(LoadedTemplates().SubjectTemplates.New("repo/issue/new").Parse("repo/issue/new/subject")) - texttmpl.Must(LoadedTemplates().SubjectTemplates.New("repo/pull/comment").Parse("repo/pull/comment/subject")) - texttmpl.Must(LoadedTemplates().SubjectTemplates.New("repo/issue/close").Parse("")) // Must default to a fallback subject - template.Must(LoadedTemplates().BodyTemplates.New("repo/issue/new").Parse("repo/issue/new/body")) - template.Must(LoadedTemplates().BodyTemplates.New("repo/pull/comment").Parse("repo/pull/comment/body")) - template.Must(LoadedTemplates().BodyTemplates.New("repo/issue/close").Parse("repo/issue/close/body")) + defer mockMailTemplates("repo/issue/default", "repo/issue/default/subject", "repo/issue/default/body")() + defer mockMailTemplates("repo/issue/new", "repo/issue/new/subject", "repo/issue/new/body")() + defer mockMailTemplates("repo/pull/comment", "repo/pull/comment/subject", "repo/pull/comment/body")() + defer mockMailTemplates("repo/issue/close", "", "repo/issue/close/body")() // Must default to a fallback subject expect := func(t *testing.T, msg *sender_service.Message, expSubject, expBody string) { subject := msg.ToMessage().GetGenHeader("Subject") @@ -256,7 +247,7 @@ func TestTemplateServices(t *testing.T) { expect := func(t *testing.T, issue *issues_model.Issue, comment *issues_model.Comment, doer *user_model.User, actionType activities_model.ActionType, fromMention bool, tplSubject, tplBody, expSubject, expBody string, ) { - prepareMailTemplates("repo/issue/default", tplSubject, tplBody) + defer mockMailTemplates("repo/issue/default", tplSubject, tplBody)() recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}} msg := testComposeIssueCommentMessage(t, &mailComment{ Issue: issue, Doer: doer, ActionType: actionType, @@ -304,7 +295,7 @@ func TestGenerateAdditionalHeadersForIssue(t *testing.T) { comment := &mailComment{Issue: issue, Doer: doer} recipient := &user_model.User{Name: "test", Email: "test@gitea.com"} - headers := generateAdditionalHeadersForIssue(comment, "dummy-reason", recipient) + headers := generateAdditionalHeadersForIssue(t.Context(), comment, "dummy-reason", recipient) expected := map[string]string{ "List-ID": "user2/repo1 ", @@ -523,7 +514,7 @@ func TestEmbedBase64Images(t *testing.T) { att2ImgBase64 := fmt.Sprintf(``, att2Base64) t.Run("ComposeMessage", func(t *testing.T) { - prepareMailTemplates("repo/issue/new", subjectTpl, bodyTpl) + defer mockMailTemplates("repo/issue/new", subjectTpl, bodyTpl)() issue.Content = fmt.Sprintf(`MSG-BEFORE MSG-AFTER`, att1.UUID) require.NoError(t, issues_model.UpdateIssueCols(t.Context(), issue, "content")) diff --git a/services/mailer/mailer.go b/services/mailer/mailer.go index db00aac4f1..05dd5d8588 100644 --- a/services/mailer/mailer.go +++ b/services/mailer/mailer.go @@ -43,7 +43,7 @@ func NewContext(ctx context.Context) { sender = &sender_service.SMTPSender{} } - templates.LoadMailTemplates(ctx, &loadedTemplates) + _ = templates.MailRenderer() mailQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "mail", func(items ...*sender_service.Message) []*sender_service.Message { for _, msg := range items { diff --git a/services/markup/renderhelper_codepreview_test.go b/services/markup/renderhelper_codepreview_test.go index 6665f0d009..c84845e7ea 100644 --- a/services/markup/renderhelper_codepreview_test.go +++ b/services/markup/renderhelper_codepreview_test.go @@ -18,7 +18,7 @@ import ( func TestRenderHelperCodePreview(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) htm, err := renderRepoFileCodePreview(ctx, markup.RenderCodePreviewOptions{ FullURL: "http://full", OwnerName: "user2", @@ -46,7 +46,7 @@ func TestRenderHelperCodePreview(t *testing.T) { `, string(htm)) - ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) htm, err = renderRepoFileCodePreview(ctx, markup.RenderCodePreviewOptions{ FullURL: "http://full", OwnerName: "user2", @@ -70,7 +70,7 @@ func TestRenderHelperCodePreview(t *testing.T) { `, string(htm)) - ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) _, err = renderRepoFileCodePreview(ctx, markup.RenderCodePreviewOptions{ FullURL: "http://full", OwnerName: "user15", diff --git a/services/markup/renderhelper_issueicontitle_test.go b/services/markup/renderhelper_issueicontitle_test.go index adce8401e0..25907f4b77 100644 --- a/services/markup/renderhelper_issueicontitle_test.go +++ b/services/markup/renderhelper_issueicontitle_test.go @@ -19,7 +19,7 @@ import ( func TestRenderHelperIssueIconTitle(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) ctx.Repo.Repository = unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 1}) htm, err := renderRepoIssueIconTitle(ctx, markup.RenderIssueIconTitleOptions{ LinkHref: "/link", @@ -28,7 +28,7 @@ func TestRenderHelperIssueIconTitle(t *testing.T) { assert.NoError(t, err) assert.Equal(t, `octicon-issue-opened(16/text green) issue1 (#1)`, string(htm)) - ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) htm, err = renderRepoIssueIconTitle(ctx, markup.RenderIssueIconTitleOptions{ OwnerName: "user2", RepoName: "repo1", @@ -38,7 +38,7 @@ func TestRenderHelperIssueIconTitle(t *testing.T) { assert.NoError(t, err) assert.Equal(t, `octicon-issue-opened(16/text green) issue1 (user2/repo1#1)`, string(htm)) - ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()}) + ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()}) _, err = renderRepoIssueIconTitle(ctx, markup.RenderIssueIconTitleOptions{ OwnerName: "user2", RepoName: "repo2", diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go index 96c2655b3a..6f5b9bb33a 100644 --- a/services/migrations/gitea_uploader.go +++ b/services/migrations/gitea_uploader.go @@ -8,8 +8,6 @@ import ( "context" "fmt" "io" - "os" - "path/filepath" "strconv" "strings" "time" @@ -320,6 +318,7 @@ func (g *GiteaLocalUploader) CreateReleases(ctx context.Context, releases ...*ba } attach := repo_model.Attachment{ UUID: uuid.New().String(), + RepoID: g.repo.ID, Name: asset.Name, DownloadCount: int64(*asset.DownloadCount), Size: int64(*asset.Size), @@ -589,12 +588,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba } defer ret.Close() - pullDir := filepath.Join(g.repo.RepoPath(), "pulls") - if err = os.MkdirAll(pullDir, os.ModePerm); err != nil { - return err - } - - f, err := os.Create(filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number))) + f, err := gitrepo.CreateRepoFile(ctx, g.repo, fmt.Sprintf("pulls/%d.patch", pr.Number)) if err != nil { return err } @@ -668,7 +662,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba fetchArg = git.BranchPrefix + fetchArg } - _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("fetch", "--no-tags").AddDashesAndList(remote, fetchArg)) + _, _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("fetch", "--no-tags").AddDashesAndList(remote, fetchArg)) if err != nil { log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err) return head, nil @@ -703,7 +697,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba // The SHA is empty log.Warn("Empty reference, no pull head for PR #%d in %s/%s", pr.Number, g.repoOwner, g.repoName) } else { - _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA)) + _, _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA)) if err != nil { // Git update-ref remove bad references with a relative path log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitHeadRefName()) @@ -739,7 +733,7 @@ func (g *GiteaLocalUploader) newPullRequest(ctx context.Context, pr *base.PullRe if pr.Base.Ref != "" && pr.Head.SHA != "" { // A PR against a tag base does not make sense - therefore pr.Base.Ref must be a branch // TODO: should we be checking for the refs/heads/ prefix on the pr.Base.Ref? (i.e. are these actually branches or refs) - pr.Base.SHA, _, err = g.gitRepo.GetMergeBase("", git.BranchPrefix+pr.Base.Ref, pr.Head.SHA) + pr.Base.SHA, err = gitrepo.MergeBase(ctx, g.repo, git.BranchPrefix+pr.Base.Ref, pr.Head.SHA) if err != nil { log.Error("Cannot determine the merge base for PR #%d in %s/%s. Error: %v", pr.Number, g.repoOwner, g.repoName, err) } @@ -901,21 +895,10 @@ func (g *GiteaLocalUploader) CreateReviews(ctx context.Context, reviews ...*base // SECURITY: The TreePath must be cleaned! use relative path comment.TreePath = util.PathJoinRel(comment.TreePath) - var patch string - reader, writer := io.Pipe() - defer func() { - _ = reader.Close() - _ = writer.Close() - }() - go func(comment *base.ReviewComment) { - if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, writer); err != nil { - // We should ignore the error since the commit maybe removed when force push to the pull request - log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err) - } - _ = writer.Close() - }(comment) - - patch, _ = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines) + patch, _ := git.GetFileDiffCutAroundLine( + g.gitRepo, pr.MergeBase, headCommitID, comment.TreePath, + int64((&issues_model.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines, + ) if comment.CreatedAt.IsZero() { comment.CreatedAt = review.CreatedAt diff --git a/services/migrations/github.go b/services/migrations/github.go index ae7350c016..ce631dcd42 100644 --- a/services/migrations/github.go +++ b/services/migrations/github.go @@ -329,7 +329,6 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith r.Assets = append(r.Assets, &base.ReleaseAsset{ ID: asset.GetID(), Name: asset.GetName(), - ContentType: asset.ContentType, Size: asset.Size, DownloadCount: asset.DownloadCount, Created: asset.CreatedAt.Time, diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go index 260fa9cd5d..cbf974af2c 100644 --- a/services/migrations/gitlab.go +++ b/services/migrations/gitlab.go @@ -316,12 +316,11 @@ func (g *GitlabDownloader) convertGitlabRelease(ctx context.Context, rel *gitlab httpClient := NewMigrationHTTPClient() - for k, asset := range rel.Assets.Links { + for _, asset := range rel.Assets.Links { assetID := asset.ID // Don't optimize this, for closure we need a local variable r.Assets = append(r.Assets, &base.ReleaseAsset{ ID: int64(asset.ID), Name: asset.Name, - ContentType: &rel.Assets.Sources[k].Format, Size: &zero, DownloadCount: &zero, DownloadFunc: func() (io.ReadCloser, error) { diff --git a/services/migrations/main_test.go b/services/migrations/main_test.go index d0ec6a3f8d..581af614f9 100644 --- a/services/migrations/main_test.go +++ b/services/migrations/main_test.go @@ -171,7 +171,6 @@ func assertReactionsEqual(t *testing.T, expected, actual []*base.Reaction) { func assertReleaseAssetEqual(t *testing.T, expected, actual *base.ReleaseAsset) { assert.Equal(t, expected.ID, actual.ID) assert.Equal(t, expected.Name, actual.Name) - assert.Equal(t, expected.ContentType, actual.ContentType) assert.Equal(t, expected.Size, actual.Size) assert.Equal(t, expected.DownloadCount, actual.DownloadCount) assertTimeEqual(t, expected.Created, actual.Created) diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go index f9c40049db..14a226f453 100644 --- a/services/mirror/mirror_pull.go +++ b/services/mirror/mirror_pull.go @@ -193,38 +193,19 @@ func parseRemoteUpdateOutput(output, remoteName string) []*mirrorSyncResult { return results } -func pruneBrokenReferences(ctx context.Context, - m *repo_model.Mirror, - timeout time.Duration, - stdoutBuilder, stderrBuilder *strings.Builder, - isWiki bool, -) error { - wiki := "" - var storageRepo gitrepo.Repository = m.Repo - if isWiki { - wiki = "Wiki " - storageRepo = m.Repo.WikiStorageRepo() - } - - stderrBuilder.Reset() - stdoutBuilder.Reset() - - pruneErr := gitrepo.GitRemotePrune(ctx, storageRepo, m.GetRemoteName(), timeout, stdoutBuilder, stderrBuilder) +func pruneBrokenReferences(ctx context.Context, m *repo_model.Mirror, gitRepo gitrepo.Repository, timeout time.Duration) error { + cmd := gitcmd.NewCommand("remote", "prune").AddDynamicArguments(m.GetRemoteName()).WithTimeout(timeout) + stdout, _, pruneErr := gitrepo.RunCmdString(ctx, gitRepo, cmd) if pruneErr != nil { - stdout := stdoutBuilder.String() - stderr := stderrBuilder.String() - - // sanitize the output, since it may contain the remote address, which may - // contain a password - stderrMessage := util.SanitizeCredentialURLs(stderr) + // sanitize the output, since it may contain the remote address, which may contain a password + stderrMessage := util.SanitizeCredentialURLs(pruneErr.Stderr()) stdoutMessage := util.SanitizeCredentialURLs(stdout) - log.Error("Failed to prune mirror repository %s%-v references:\nStdout: %s\nStderr: %s\nErr: %v", wiki, m.Repo, stdoutMessage, stderrMessage, pruneErr) - desc := fmt.Sprintf("Failed to prune mirror repository %s'%s' references: %s", wiki, storageRepo.RelativePath(), stderrMessage) + log.Error("Failed to prune mirror repository %s references:\nStdout: %s\nStderr: %s\nErr: %v", gitRepo.RelativePath(), stdoutMessage, stderrMessage, pruneErr) + desc := fmt.Sprintf("Failed to prune mirror repository %s references: %s", gitRepo.RelativePath(), stderrMessage) if err := system_model.CreateRepositoryNotice(desc); err != nil { log.Error("CreateRepositoryNotice: %v", err) } - // this if will only be reached on a successful prune so try to get the mirror again } return pruneErr } @@ -249,59 +230,46 @@ func checkRecoverableSyncError(stderrMessage string) bool { // runSync returns true if sync finished without error. func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bool) { - timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second - log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo) - // use fetch but not remote update because git fetch support --tags but remote update doesn't - cmd := gitcmd.NewCommand("fetch") - if m.EnablePrune { - cmd.AddArguments("--prune") - } - cmd.AddArguments("--tags").AddDynamicArguments(m.GetRemoteName()) - remoteURL, remoteErr := gitrepo.GitRemoteGetURL(ctx, m.Repo, m.GetRemoteName()) if remoteErr != nil { log.Error("SyncMirrors [repo: %-v]: GetRemoteURL Error %v", m.Repo, remoteErr) return nil, false } - envs := proxy.EnvWithProxy(remoteURL.URL) + timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second - stdoutBuilder := strings.Builder{} - stderrBuilder := strings.Builder{} - if err := gitrepo.RunCmd(ctx, m.Repo, cmd.WithTimeout(timeout). - WithEnv(envs). - WithStdout(&stdoutBuilder). - WithStderr(&stderrBuilder)); err != nil { - stdout := stdoutBuilder.String() - stderr := stderrBuilder.String() + // use fetch but not remote update because git fetch support --tags but remote update doesn't + cmdFetch := func() *gitcmd.Command { + cmd := gitcmd.NewCommand("fetch", "--tags") + if m.EnablePrune { + cmd.AddArguments("--prune") + } + return cmd.AddDynamicArguments(m.GetRemoteName()).WithTimeout(timeout).WithEnv(envs) + } + var err error + var fetchOutput string // it is from fetch's stderr + fetchStdout, fetchStderr, err := gitrepo.RunCmdString(ctx, m.Repo, cmdFetch()) + if err != nil { // sanitize the output, since it may contain the remote address, which may contain a password - stderrMessage := util.SanitizeCredentialURLs(stderr) - stdoutMessage := util.SanitizeCredentialURLs(stdout) + stderrMessage := util.SanitizeCredentialURLs(fetchStderr) + stdoutMessage := util.SanitizeCredentialURLs(fetchStdout) // Now check if the error is a resolve reference due to broken reference - if checkRecoverableSyncError(stderr) { + if checkRecoverableSyncError(fetchStderr) { log.Warn("SyncMirrors [repo: %-v]: failed to update mirror repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err) err = nil - // Attempt prune - pruneErr := pruneBrokenReferences(ctx, m, timeout, &stdoutBuilder, &stderrBuilder, false) + pruneErr := pruneBrokenReferences(ctx, m, m.Repo, timeout) if pruneErr == nil { // Successful prune - reattempt mirror - stderrBuilder.Reset() - stdoutBuilder.Reset() - if err = gitrepo.RunCmd(ctx, m.Repo, cmd.WithTimeout(timeout). - WithStdout(&stdoutBuilder). - WithStderr(&stderrBuilder)); err != nil { - stdout := stdoutBuilder.String() - stderr := stderrBuilder.String() - - // sanitize the output, since it may contain the remote address, which may - // contain a password - stderrMessage = util.SanitizeCredentialURLs(stderr) - stdoutMessage = util.SanitizeCredentialURLs(stdout) + fetchStdout, fetchStderr, err = gitrepo.RunCmdString(ctx, m.Repo, cmdFetch()) + if err != nil { + // sanitize the output, since it may contain the remote address, which may contain a password + stderrMessage = util.SanitizeCredentialURLs(fetchStderr) + stdoutMessage = util.SanitizeCredentialURLs(fetchStdout) } } } @@ -310,13 +278,13 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo if err != nil { log.Error("SyncMirrors [repo: %-v]: failed to update mirror repository:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err) desc := fmt.Sprintf("Failed to update mirror repository '%s': %s", m.Repo.RelativePath(), stderrMessage) - if err = system_model.CreateRepositoryNotice(desc); err != nil { + if err := system_model.CreateRepositoryNotice(desc); err != nil { log.Error("CreateRepositoryNotice: %v", err) } return nil, false } } - output := stderrBuilder.String() + fetchOutput = fetchStderr // the result of "git fetch" is in stderr if err := gitrepo.WriteCommitGraph(ctx, m.Repo); err != nil { log.Error("SyncMirrors [repo: %-v]: %v", m.Repo, err) @@ -353,16 +321,16 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo log.Error("SyncMirrors [repo: %-v]: failed to update size for mirror repository: %v", m.Repo.FullName(), err) } + cmdRemoteUpdatePrune := func() *gitcmd.Command { + return gitcmd.NewCommand("remote", "update", "--prune"). + AddDynamicArguments(m.GetRemoteName()).WithTimeout(timeout).WithEnv(envs) + } + if repo_service.HasWiki(ctx, m.Repo) { log.Trace("SyncMirrors [repo: %-v Wiki]: running git remote update...", m.Repo) - stderrBuilder.Reset() - stdoutBuilder.Reset() - - if err := gitrepo.GitRemoteUpdatePrune(ctx, m.Repo.WikiStorageRepo(), m.GetRemoteName(), - timeout, &stdoutBuilder, &stderrBuilder); err != nil { - stdout := stdoutBuilder.String() - stderr := stderrBuilder.String() - + // the result of "git remote update" is in stderr + stdout, stderr, err := gitrepo.RunCmdString(ctx, m.Repo.WikiStorageRepo(), cmdRemoteUpdatePrune()) + if err != nil { // sanitize the output, since it may contain the remote address, which may contain a password stderrMessage := util.SanitizeCredentialURLs(stderr) stdoutMessage := util.SanitizeCredentialURLs(stdout) @@ -373,16 +341,11 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo err = nil // Attempt prune - pruneErr := pruneBrokenReferences(ctx, m, timeout, &stdoutBuilder, &stderrBuilder, true) + pruneErr := pruneBrokenReferences(ctx, m, m.Repo.WikiStorageRepo(), timeout) if pruneErr == nil { // Successful prune - reattempt mirror - stderrBuilder.Reset() - stdoutBuilder.Reset() - - if err = gitrepo.GitRemoteUpdatePrune(ctx, m.Repo.WikiStorageRepo(), m.GetRemoteName(), - timeout, &stdoutBuilder, &stderrBuilder); err != nil { - stdout := stdoutBuilder.String() - stderr := stderrBuilder.String() + stdout, stderr, err = gitrepo.RunCmdString(ctx, m.Repo.WikiStorageRepo(), cmdRemoteUpdatePrune()) + if err != nil { stderrMessage = util.SanitizeCredentialURLs(stderr) stdoutMessage = util.SanitizeCredentialURLs(stdout) } @@ -393,7 +356,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo if err != nil { log.Error("SyncMirrors [repo: %-v Wiki]: failed to update mirror repository wiki:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err) desc := fmt.Sprintf("Failed to update mirror repository wiki '%s': %s", m.Repo.WikiStorageRepo().RelativePath(), stderrMessage) - if err = system_model.CreateRepositoryNotice(desc); err != nil { + if err := system_model.CreateRepositoryNotice(desc); err != nil { log.Error("CreateRepositoryNotice: %v", err) } return nil, false @@ -418,7 +381,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo } m.UpdatedUnix = timeutil.TimeStampNow() - return parseRemoteUpdateOutput(output, m.GetRemoteName()), true + return parseRemoteUpdateOutput(fetchOutput, m.GetRemoteName()), true } func getRepoPullMirrorLockKey(repoID int64) string { diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go index bae189ba87..1acb227ac0 100644 --- a/services/mirror/mirror_push.go +++ b/services/mirror/mirror_push.go @@ -192,7 +192,9 @@ func pushAllLFSObjects(ctx context.Context, gitRepo *git.Repository, lfsClient l pointerChan := make(chan lfs.PointerBlob) errChan := make(chan error, 1) - go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan) + go func() { + errChan <- lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan) + }() uploadObjects := func(pointers []lfs.Pointer) error { err := lfsClient.Upload(ctx, pointers, func(p lfs.Pointer, objectError error) (io.ReadCloser, error) { @@ -242,13 +244,12 @@ func pushAllLFSObjects(ctx context.Context, gitRepo *git.Repository, lfsClient l } } - err, has := <-errChan - if has { + err := <-errChan + if err != nil { log.Error("Error enumerating LFS objects for repository: %v", err) - return err } - return nil + return err } func syncPushMirrorWithSyncOnCommit(ctx context.Context, repoID int64) { diff --git a/services/org/team_test.go b/services/org/team_test.go index a5e01e7a54..5cb588b7dd 100644 --- a/services/org/team_test.go +++ b/services/org/team_test.go @@ -8,6 +8,7 @@ import ( "strings" "testing" + issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" access_model "code.gitea.io/gitea/models/perm/access" @@ -62,6 +63,36 @@ func TestTeam_RemoveMember(t *testing.T) { assert.True(t, organization.IsErrLastOrgOwner(err)) } +func TestRemoveTeamMemberRemovesSubscriptionsAndStopwatches(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + ctx := t.Context() + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 2}) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}) + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}) + issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{RepoID: repo.ID}) + + assert.NoError(t, repo_model.WatchRepo(ctx, user, repo, true)) + assert.NoError(t, issues_model.CreateOrUpdateIssueWatch(ctx, user.ID, issue.ID, true)) + ok, err := issues_model.CreateIssueStopwatch(ctx, user, issue) + assert.NoError(t, err) + assert.True(t, ok) + + assert.NoError(t, RemoveTeamMember(ctx, team, user)) + + watch, err := repo_model.GetWatch(ctx, user.ID, repo.ID) + assert.NoError(t, err) + assert.False(t, repo_model.IsWatchMode(watch.Mode)) + + _, exists, err := issues_model.GetIssueWatch(ctx, user.ID, issue.ID) + assert.NoError(t, err) + assert.False(t, exists) + + hasStopwatch, _, _, err := issues_model.HasUserStopwatch(ctx, user.ID) + assert.NoError(t, err) + assert.False(t, hasStopwatch) +} + func TestNewTeam(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) diff --git a/services/packages/debian/repository.go b/services/packages/debian/repository.go index 34b52b45cf..910f93b034 100644 --- a/services/packages/debian/repository.go +++ b/services/packages/debian/repository.go @@ -7,6 +7,7 @@ import ( "bytes" "compress/gzip" "context" + "crypto" "errors" "fmt" "io" @@ -67,7 +68,14 @@ func GetOrCreateKeyPair(ctx context.Context, ownerID int64) (string, string, err } func generateKeypair() (string, string, error) { - e, err := openpgp.NewEntity("", "Debian Registry", "", nil) + // Repository signing keys are long-lived and there is currently no rotation mechanism, choose stronger algorithms + cfg := &packet.Config{ + RSABits: 4096, + DefaultHash: crypto.SHA256, + DefaultCipher: packet.CipherAES256, + } + + e, err := openpgp.NewEntity("", "Automatically generated Debian Registry Key; created "+time.Now().UTC().Format(time.RFC3339), "", cfg) if err != nil { return "", "", err } diff --git a/services/pull/check.go b/services/pull/check.go index 691ce9da9f..8826fca280 100644 --- a/services/pull/check.go +++ b/services/pull/check.go @@ -238,7 +238,7 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer } defer closer.Close() - sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, gitRepo, pr.BaseBranch, pr.GetGitHeadRefName()) + sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, gitRepo) return sign, err } @@ -246,7 +246,7 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer // markPullRequestAsMergeable checks if pull request is possible to leaving checking status, // and set to be either conflict or mergeable. func markPullRequestAsMergeable(ctx context.Context, pr *issues_model.PullRequest) { - // If the status has not been changed to conflict by testPullRequestTmpRepoBranchMergeable then we are mergeable + // If the status has not been changed to conflict by the conflict checking functions then we are mergeable if pr.Status == issues_model.PullRequestStatusChecking { pr.Status = issues_model.PullRequestStatusMergeable } @@ -287,10 +287,9 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com prHeadRef := pr.GetGitHeadRefName() // Check if the pull request is merged into BaseBranch - if _, err := gitrepo.RunCmdString(ctx, pr.BaseRepo, - gitcmd.NewCommand("merge-base", "--is-ancestor"). - AddDynamicArguments(prHeadRef, pr.BaseBranch)); err != nil { - if strings.Contains(err.Error(), "exit status 1") { + cmd := gitcmd.NewCommand("merge-base", "--is-ancestor").AddDynamicArguments(prHeadRef, pr.BaseBranch) + if err := gitrepo.RunCmdWithStderr(ctx, pr.BaseRepo, cmd); err != nil { + if gitcmd.IsErrorExitCode(err, 1) { // prHeadRef is not an ancestor of the base branch return nil, nil } @@ -315,7 +314,7 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName) // Get the commit from BaseBranch where the pull request got merged - mergeCommit, err := gitrepo.RunCmdString(ctx, pr.BaseRepo, + mergeCommit, _, err := gitrepo.RunCmdString(ctx, pr.BaseRepo, gitcmd.NewCommand("rev-list", "--ancestry-path", "--merges", "--reverse"). AddDynamicArguments(prHeadCommitID+".."+pr.BaseBranch)) if err != nil { @@ -443,8 +442,8 @@ func checkPullRequestMergeable(id int64) { return } - if err := testPullRequestBranchMergeable(pr); err != nil { - log.Error("testPullRequestTmpRepoBranchMergeable[%-v]: %v", pr, err) + if err := checkPullRequestBranchMergeable(ctx, pr); err != nil { + log.Error("checkPullRequestBranchMergeable[%-v]: %v", pr, err) pr.Status = issues_model.PullRequestStatusError if err := pr.UpdateCols(ctx, "status"); err != nil { log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err) diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go index 25860fc1a8..656bcc50af 100644 --- a/services/pull/commit_status.go +++ b/services/pull/commit_status.go @@ -6,6 +6,8 @@ package pull import ( "context" + "errors" + "fmt" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" @@ -14,8 +16,6 @@ import ( "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/glob" "code.gitea.io/gitea/modules/log" - - "github.com/pkg/errors" ) // MergeRequiredContextsCommitStatus returns a commit status state for given required contexts @@ -69,7 +69,7 @@ func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus, func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) (bool, error) { pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch) if err != nil { - return false, errors.Wrap(err, "GetLatestCommitStatus") + return false, fmt.Errorf("GetLatestCommitStatus: %w", err) } if pb == nil || !pb.EnableStatusCheck { return true, nil @@ -86,19 +86,19 @@ func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) ( func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (commitstatus.CommitStatusState, error) { // Ensure HeadRepo is loaded if err := pr.LoadHeadRepo(ctx); err != nil { - return "", errors.Wrap(err, "LoadHeadRepo") + return "", fmt.Errorf("LoadHeadRepo: %w", err) } // check if all required status checks are successful headGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo) if err != nil { - return "", errors.Wrap(err, "OpenRepository") + return "", fmt.Errorf("OpenRepository: %w", err) } defer closer.Close() if pr.Flow == issues_model.PullRequestFlowGithub { if exist, err := git_model.IsBranchExist(ctx, pr.HeadRepo.ID, pr.HeadBranch); err != nil { - return "", errors.Wrap(err, "IsBranchExist") + return "", fmt.Errorf("IsBranchExist: %w", err) } else if !exist { return "", errors.New("Head branch does not exist, can not merge") } @@ -118,17 +118,17 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR } if err := pr.LoadBaseRepo(ctx); err != nil { - return "", errors.Wrap(err, "LoadBaseRepo") + return "", fmt.Errorf("LoadBaseRepo: %w", err) } commitStatuses, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll) if err != nil { - return "", errors.Wrap(err, "GetLatestCommitStatus") + return "", fmt.Errorf("GetLatestCommitStatus: %w", err) } pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch) if err != nil { - return "", errors.Wrap(err, "LoadProtectedBranch") + return "", fmt.Errorf("LoadProtectedBranch: %w", err) } var requiredContexts []string if pb != nil { diff --git a/services/pull/lfs.go b/services/pull/lfs.go index eb2a08ed8d..094b563b92 100644 --- a/services/pull/lfs.go +++ b/services/pull/lfs.go @@ -7,15 +7,19 @@ package pull import ( "bufio" "context" + "errors" "io" "strconv" - "sync" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/git/pipeline" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" + + "golang.org/x/sync/errgroup" ) // LFSPush pushes lfs objects referred to in new commits in the head repository from the base repository @@ -26,81 +30,82 @@ func LFSPush(ctx context.Context, tmpBasePath, mergeHeadSHA, mergeBaseSHA string // ensure only blobs and <=1k size then pass in to git cat-file --batch // to read each sha and check each as a pointer // Then if they are lfs -> add them to the baseRepo - revListReader, revListWriter := io.Pipe() - shasToCheckReader, shasToCheckWriter := io.Pipe() - catFileCheckReader, catFileCheckWriter := io.Pipe() - shasToBatchReader, shasToBatchWriter := io.Pipe() - catFileBatchReader, catFileBatchWriter := io.Pipe() - errChan := make(chan error, 1) - wg := sync.WaitGroup{} - wg.Add(6) - // Create the go-routines in reverse order. + + cmd1RevList, cmd3BathCheck, cmd5BatchContent := gitcmd.NewCommand(), gitcmd.NewCommand(), gitcmd.NewCommand() + cmd1RevListOut, cmd1RevListClose := cmd1RevList.MakeStdoutPipe() + defer cmd1RevListClose() + + cmd3BatchCheckIn, cmd3BatchCheckOut, cmd3BatchCheckClose := cmd3BathCheck.MakeStdinStdoutPipe() + defer cmd3BatchCheckClose() + + cmd5BatchContentIn, cmd5BatchContentOut, cmd5BatchContentClose := cmd5BatchContent.MakeStdinStdoutPipe() + defer cmd5BatchContentClose() + + // Create the go-routines in reverse order (update: the order is not needed any more, the pipes are properly prepared) + wg := &errgroup.Group{} // 6. Take the output of cat-file --batch and check if each file in turn // to see if they're pointers to files in the LFS store associated with // the head repo and add them to the base repo if so - go createLFSMetaObjectsFromCatFileBatch(ctx, catFileBatchReader, &wg, pr) + wg.Go(func() error { + return createLFSMetaObjectsFromCatFileBatch(ctx, cmd5BatchContentOut, pr) + }) // 5. Take the shas of the blobs and batch read them - go pipeline.CatFileBatch(ctx, shasToBatchReader, catFileBatchWriter, &wg, tmpBasePath) + wg.Go(func() error { + return pipeline.CatFileBatch(ctx, cmd5BatchContent, tmpBasePath) + }) // 4. From the provided objects restrict to blobs <=1k - go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg) + wg.Go(func() error { + return pipeline.BlobsLessThan1024FromCatFileBatchCheck(cmd3BatchCheckOut, cmd5BatchContentIn) + }) // 3. Run batch-check on the objects retrieved from rev-list - go pipeline.CatFileBatchCheck(ctx, shasToCheckReader, catFileCheckWriter, &wg, tmpBasePath) + wg.Go(func() error { + return pipeline.CatFileBatchCheck(ctx, cmd3BathCheck, tmpBasePath) + }) // 2. Check each object retrieved rejecting those without names as they will be commits or trees - go pipeline.BlobsFromRevListObjects(revListReader, shasToCheckWriter, &wg) + wg.Go(func() error { + return pipeline.BlobsFromRevListObjects(cmd1RevListOut, cmd3BatchCheckIn) + }) // 1. Run rev-list objects from mergeHead to mergeBase - go pipeline.RevListObjects(ctx, revListWriter, &wg, tmpBasePath, mergeHeadSHA, mergeBaseSHA, errChan) + wg.Go(func() error { + return pipeline.RevListObjects(ctx, cmd1RevList, tmpBasePath, mergeHeadSHA, mergeBaseSHA) + }) - wg.Wait() - select { - case err, has := <-errChan: - if has { - return err - } - default: - } - return nil + return wg.Wait() } -func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReader *io.PipeReader, wg *sync.WaitGroup, pr *issues_model.PullRequest) { - defer wg.Done() +func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReader io.ReadCloser, pr *issues_model.PullRequest) error { defer catFileBatchReader.Close() contentStore := lfs.NewContentStore() - bufferedReader := bufio.NewReader(catFileBatchReader) buf := make([]byte, 1025) for { // File descriptor line: sha _, err := bufferedReader.ReadString(' ') if err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return util.Iif(errors.Is(err, io.EOF), nil, err) } // Throw away the blob if _, err := bufferedReader.ReadString(' '); err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } sizeStr, err := bufferedReader.ReadString('\n') if err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } size, err := strconv.Atoi(sizeStr[:len(sizeStr)-1]) if err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } pointerBuf := buf[:size+1] if _, err := io.ReadFull(bufferedReader, pointerBuf); err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } pointerBuf = pointerBuf[:size] // Now we need to check if the pointerBuf is an LFS pointer @@ -120,15 +125,13 @@ func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReade log.Warn("During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v", pr.Index, pr.BaseRepo, pointer.Oid, pr.HeadRepo) continue } - _ = catFileBatchReader.CloseWithError(err) - break + return err } // OK we have a pointer that is associated with the head repo // and is actually a file in the LFS // Therefore it should be associated with the base repo if _, err := git_model.NewLFSMetaObject(ctx, pr.BaseRepoID, pointer); err != nil { - _ = catFileBatchReader.CloseWithError(err) - break + return err } } } diff --git a/services/pull/merge.go b/services/pull/merge.go index 88e30c6832..4925302797 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -366,11 +366,11 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use if err != nil { return "", fmt.Errorf("Failed to get full commit id for HEAD: %w", err) } - mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+baseBranch) + mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+tmpRepoBaseBranch) if err != nil { return "", fmt.Errorf("Failed to get full commit id for origin/%s: %w", pr.BaseBranch, err) } - mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, baseBranch) + mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, tmpRepoBaseBranch) if err != nil { return "", fmt.Errorf("Failed to get full commit id for the new merge: %w", err) } @@ -407,32 +407,30 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use ) mergeCtx.env = append(mergeCtx.env, repo_module.EnvPushTrigger+"="+string(pushTrigger)) - pushCmd := gitcmd.NewCommand("push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch) + pushCmd := gitcmd.NewCommand("push", "origin").AddDynamicArguments(tmpRepoBaseBranch + ":" + git.BranchPrefix + pr.BaseBranch) // Push back to upstream. // This cause an api call to "/api/internal/hook/post-receive/...", // If it's merge, all db transaction and operations should be there but not here to prevent deadlock. - if err := mergeCtx.PrepareGitCmd(pushCmd).Run(ctx); err != nil { - if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") { + if err := mergeCtx.PrepareGitCmd(pushCmd).RunWithStderr(ctx); err != nil { + if strings.Contains(err.Stderr(), "non-fast-forward") { return "", &git.ErrPushOutOfDate{ StdOut: mergeCtx.outbuf.String(), - StdErr: mergeCtx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } - } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") { + } else if strings.Contains(err.Stderr(), "! [remote rejected]") { err := &git.ErrPushRejected{ StdOut: mergeCtx.outbuf.String(), - StdErr: mergeCtx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } err.GenerateMessage() return "", err } - return "", fmt.Errorf("git push: %s", mergeCtx.errbuf.String()) + return "", fmt.Errorf("git push: %s", err.Stderr()) } mergeCtx.outbuf.Reset() - mergeCtx.errbuf.Reset() - return mergeCommitID, nil } @@ -446,9 +444,8 @@ func commitAndSignNoAuthor(ctx *mergeContext, message string) error { } cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID) } - if err := ctx.PrepareGitCmd(cmdCommit).Run(ctx); err != nil { - log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) - return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + if err := ctx.PrepareGitCmd(cmdCommit).RunWithStderr(ctx); err != nil { + return fmt.Errorf("git commit %v: %w\n%s", ctx.pr, err, ctx.outbuf.String()) } return nil } @@ -507,39 +504,37 @@ func (err ErrMergeDivergingFastForwardOnly) Error() string { } func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *gitcmd.Command) error { - if err := ctx.PrepareGitCmd(cmd).Run(ctx); err != nil { + if err := ctx.PrepareGitCmd(cmd).RunWithStderr(ctx); err != nil { // Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil { // We have a merge conflict error - log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) return ErrMergeConflicts{ Style: mergeStyle, StdOut: ctx.outbuf.String(), - StdErr: ctx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } - } else if strings.Contains(ctx.errbuf.String(), "refusing to merge unrelated histories") { - log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + } else if strings.Contains(err.Stderr(), "refusing to merge unrelated histories") { + log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) return ErrMergeUnrelatedHistories{ Style: mergeStyle, StdOut: ctx.outbuf.String(), - StdErr: ctx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } - } else if mergeStyle == repo_model.MergeStyleFastForwardOnly && strings.Contains(ctx.errbuf.String(), "Not possible to fast-forward, aborting") { - log.Debug("MergeDivergingFastForwardOnly %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + } else if mergeStyle == repo_model.MergeStyleFastForwardOnly && strings.Contains(err.Stderr(), "Not possible to fast-forward, aborting") { + log.Debug("MergeDivergingFastForwardOnly %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) return ErrMergeDivergingFastForwardOnly{ StdOut: ctx.outbuf.String(), - StdErr: ctx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } } - log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) - return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) + return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) } ctx.outbuf.Reset() - ctx.errbuf.Reset() - return nil } @@ -722,7 +717,7 @@ func SetMerged(ctx context.Context, pr *issues_model.PullRequest, mergedCommitID return false, fmt.Errorf("ChangeIssueStatus: %w", err) } - // We need to save all of the data used to compute this merge as it may have already been changed by testPullRequestBranchMergeable. FIXME: need to set some state to prevent testPullRequestBranchMergeable from running whilst we are merging. + // We need to save all of the data used to compute this merge as it may have already been changed by checkPullRequestBranchMergeable. FIXME: need to set some state to prevent checkPullRequestBranchMergeable from running whilst we are merging. if cnt, err := db.GetEngine(ctx).Where("id = ?", pr.ID). And("has_merged = ?", false). Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix, conflicted_files"). diff --git a/services/pull/merge_ff_only.go b/services/pull/merge_ff_only.go index 22a560e29c..2ed09a7161 100644 --- a/services/pull/merge_ff_only.go +++ b/services/pull/merge_ff_only.go @@ -11,7 +11,7 @@ import ( // doMergeStyleFastForwardOnly merges the tracking into the current HEAD - which is assumed to be staging branch (equal to the pr.BaseBranch) func doMergeStyleFastForwardOnly(ctx *mergeContext) error { - cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(trackingBranch) + cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(tmpRepoTrackingBranch) if err := runMergeCommand(ctx, repo_model.MergeStyleFastForwardOnly, cmd); err != nil { log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err) return err diff --git a/services/pull/merge_merge.go b/services/pull/merge_merge.go index bc94400f21..2bc1fb9a34 100644 --- a/services/pull/merge_merge.go +++ b/services/pull/merge_merge.go @@ -11,7 +11,7 @@ import ( // doMergeStyleMerge merges the tracking branch into the current HEAD - which is assumed to be the staging branch (equal to the pr.BaseBranch) func doMergeStyleMerge(ctx *mergeContext, message string) error { - cmd := gitcmd.NewCommand("merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch) + cmd := gitcmd.NewCommand("merge", "--no-ff", "--no-commit").AddDynamicArguments(tmpRepoTrackingBranch) if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil { log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err) return err diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go index 6f752c351d..1131a23d85 100644 --- a/services/pull/merge_prepare.go +++ b/services/pull/merge_prepare.go @@ -5,7 +5,6 @@ package pull import ( "bufio" - "bytes" "context" "fmt" "io" @@ -19,7 +18,9 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" asymkey_service "code.gitea.io/gitea/services/asymkey" ) @@ -37,12 +38,10 @@ type mergeContext struct { // Do NOT use it with gitcmd.RunStd*() functions, otherwise it will panic func (ctx *mergeContext) PrepareGitCmd(cmd *gitcmd.Command) *gitcmd.Command { ctx.outbuf.Reset() - ctx.errbuf.Reset() return cmd.WithEnv(ctx.env). WithDir(ctx.tmpBasePath). WithParentCallerInfo(). - WithStdout(ctx.outbuf). - WithStderr(ctx.errbuf) + WithStdoutBuffer(ctx.outbuf) } // ErrSHADoesNotMatch represents a "SHADoesNotMatch" kind of error. @@ -77,7 +76,7 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque if expectedHeadCommitID != "" { trackingCommitID, _, err := gitcmd.NewCommand("show-ref", "--hash"). - AddDynamicArguments(git.BranchPrefix + trackingBranch). + AddDynamicArguments(git.BranchPrefix + tmpRepoTrackingBranch). WithEnv(mergeCtx.env). WithDir(mergeCtx.tmpBasePath). RunStdString(ctx) @@ -96,7 +95,6 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque } mergeCtx.outbuf.Reset() - mergeCtx.errbuf.Reset() if err := prepareTemporaryRepoForMerge(mergeCtx); err != nil { defer cancel() return nil, nil, err @@ -105,7 +103,7 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque mergeCtx.sig = doer.NewGitSig() mergeCtx.committer = mergeCtx.sig - gitRepo, err := git.OpenRepository(ctx, mergeCtx.tmpBasePath) + gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo) if err != nil { defer cancel() return nil, nil, fmt.Errorf("failed to open temp git repo for pr[%d]: %w", mergeCtx.pr.ID, err) @@ -113,7 +111,7 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque defer gitRepo.Close() // Determine if we should sign - sign, key, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, gitRepo, "HEAD", trackingBranch) + sign, key, signer, _ := asymkey_service.SignMerge(ctx, pr, doer, gitRepo) if sign { mergeCtx.signKey = key if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { @@ -154,8 +152,8 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error { } defer sparseCheckoutListFile.Close() // we will close it earlier but we need to ensure it is closed if there is an error - if err := getDiffTree(ctx, ctx.tmpBasePath, baseBranch, trackingBranch, sparseCheckoutListFile); err != nil { - log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, baseBranch, trackingBranch, err) + if err := getDiffTree(ctx, ctx.tmpBasePath, tmpRepoBaseBranch, tmpRepoTrackingBranch, sparseCheckoutListFile); err != nil { + log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, tmpRepoBaseBranch, tmpRepoTrackingBranch, err) return fmt.Errorf("getDiffTree: %w", err) } @@ -166,13 +164,11 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error { setConfig := func(key, value string) error { if err := ctx.PrepareGitCmd(gitcmd.NewCommand("config", "--local").AddDynamicArguments(key, value)). - Run(ctx); err != nil { - log.Error("git config [%s -> %q]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String()) - return fmt.Errorf("git config [%s -> %q]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String()) + RunWithStderr(ctx); err != nil { + log.Error("git config [%s -> %q]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), err.Stderr()) + return fmt.Errorf("git config [%s -> %q]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), err.Stderr()) } ctx.outbuf.Reset() - ctx.errbuf.Reset() - return nil } @@ -199,63 +195,34 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error { // Read base branch index if err := ctx.PrepareGitCmd(gitcmd.NewCommand("read-tree", "HEAD")). - Run(ctx); err != nil { - log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String()) - return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String()) + RunWithStderr(ctx); err != nil { + log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), err.Stderr()) + return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), err.Stderr()) } ctx.outbuf.Reset() - ctx.errbuf.Reset() - return nil } // getDiffTree returns a string containing all the files that were changed between headBranch and baseBranch // the filenames are escaped so as to fit the format required for .git/info/sparse-checkout func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, out io.Writer) error { - diffOutReader, diffOutWriter, err := os.Pipe() - if err != nil { - log.Error("Unable to create os.Pipe for %s", repoPath) - return err - } - defer func() { - _ = diffOutReader.Close() - _ = diffOutWriter.Close() - }() - - scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) { - if atEOF && len(data) == 0 { - return 0, nil, nil - } - if i := bytes.IndexByte(data, '\x00'); i >= 0 { - return i + 1, data[0:i], nil - } - if atEOF { - return len(data), data, nil - } - return 0, nil, nil - } - - err = gitcmd.NewCommand("diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root"). - AddDynamicArguments(baseBranch, headBranch). + cmd := gitcmd.NewCommand("diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root") + diffOutReader, diffOutReaderClose := cmd.MakeStdoutPipe() + defer diffOutReaderClose() + err := cmd.AddDynamicArguments(baseBranch, headBranch). WithDir(repoPath). - WithStdout(diffOutWriter). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - // Close the writer end of the pipe to begin processing - _ = diffOutWriter.Close() - defer func() { - // Close the reader on return to terminate the git command if necessary - _ = diffOutReader.Close() - }() - + WithPipelineFunc(func(ctx gitcmd.Context) error { // Now scan the output from the command scanner := bufio.NewScanner(diffOutReader) - scanner.Split(scanNullTerminatedStrings) + scanner.Split(util.BufioScannerSplit(0)) for scanner.Scan() { - filepath := scanner.Text() + treePath := scanner.Text() // escape '*', '?', '[', spaces and '!' prefix - filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`) + treePath = escapedSymbols.ReplaceAllString(treePath, `\$1`) // no necessary to escape the first '#' symbol because the first symbol is '/' - fmt.Fprintf(out, "/%s\n", filepath) + if _, err := fmt.Fprintf(out, "/%s\n", treePath); err != nil { + return err + } } return scanner.Err() }). @@ -286,16 +253,15 @@ func (err ErrRebaseConflicts) Error() string { // if there is a conflict it will return an ErrRebaseConflicts func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error { // Checkout head branch - if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch)). - Run(ctx); err != nil { - return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout", "-b").AddDynamicArguments(tmpRepoStagingBranch, tmpRepoTrackingBranch)). + RunWithStderr(ctx); err != nil { + return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) } ctx.outbuf.Reset() - ctx.errbuf.Reset() // Rebase before merging - if err := ctx.PrepareGitCmd(gitcmd.NewCommand("rebase").AddDynamicArguments(baseBranch)). - Run(ctx); err != nil { + if err := ctx.PrepareGitCmd(gitcmd.NewCommand("rebase").AddDynamicArguments(tmpRepoBaseBranch)). + RunWithStderr(ctx); err != nil { // Rebase will leave a REBASE_HEAD file in .git if there is a conflict if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil { var commitSha string @@ -309,7 +275,7 @@ func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) commitShaBytes, readErr := os.ReadFile(failingCommitPath) if readErr != nil { // Abandon this attempt to handle the error - return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) } commitSha = strings.TrimSpace(string(commitShaBytes)) ok = true @@ -318,20 +284,19 @@ func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) } if !ok { log.Error("Unable to determine failing commit sha for failing rebase in temp repo for %-v. Cannot cast as ErrRebaseConflicts.", ctx.pr) - return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) } - log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), ctx.errbuf.String()) + log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), err.Stderr()) return ErrRebaseConflicts{ CommitSHA: commitSha, Style: mergeStyle, StdOut: ctx.outbuf.String(), - StdErr: ctx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } } - return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) } ctx.outbuf.Reset() - ctx.errbuf.Reset() return nil } diff --git a/services/pull/merge_rebase.go b/services/pull/merge_rebase.go index 0fa4fd00f6..9dbe67a6c6 100644 --- a/services/pull/merge_rebase.go +++ b/services/pull/merge_rebase.go @@ -43,7 +43,7 @@ func doMergeRebaseFastForward(ctx *mergeContext) error { return fmt.Errorf("Failed to get full commit id for HEAD: %w", err) } - cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(stagingBranch) + cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(tmpRepoStagingBranch) if err := runMergeCommand(ctx, repo_model.MergeStyleRebase, cmd); err != nil { log.Error("Unable to merge staging into base: %v", err) return err @@ -88,7 +88,7 @@ func doMergeRebaseFastForward(ctx *mergeContext) error { // Perform rebase merge with merge commit. func doMergeRebaseMergeCommit(ctx *mergeContext, message string) error { - cmd := gitcmd.NewCommand("merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(stagingBranch) + cmd := gitcmd.NewCommand("merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(tmpRepoStagingBranch) if err := runMergeCommand(ctx, repo_model.MergeStyleRebaseMerge, cmd); err != nil { log.Error("Unable to merge staging into base: %v", err) @@ -109,14 +109,12 @@ func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, mes } // Checkout base branch again - if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout").AddDynamicArguments(baseBranch)). - Run(ctx); err != nil { - log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) - return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) + if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout").AddDynamicArguments(tmpRepoBaseBranch)). + RunWithStderr(ctx); err != nil { + log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) + return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) } ctx.outbuf.Reset() - ctx.errbuf.Reset() - if mergeStyle == repo_model.MergeStyleRebase { return doMergeRebaseFastForward(ctx) } diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go index b5f2a4deff..6c101c8e89 100644 --- a/services/pull/merge_squash.go +++ b/services/pull/merge_squash.go @@ -32,9 +32,9 @@ func getAuthorSignatureSquash(ctx *mergeContext) (*git.Signature, error) { } defer gitRepo.Close() - commits, err := gitRepo.CommitsBetweenIDs(trackingBranch, "HEAD") + commits, err := gitRepo.CommitsBetweenIDs(tmpRepoTrackingBranch, "HEAD") if err != nil { - log.Error("%-v Unable to get commits between: %s %s: %v", ctx.pr, "HEAD", trackingBranch, err) + log.Error("%-v Unable to get commits between: %s %s: %v", ctx.pr, "HEAD", tmpRepoTrackingBranch, err) return nil, err } @@ -58,7 +58,7 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error { return fmt.Errorf("getAuthorSignatureSquash: %w", err) } - cmdMerge := gitcmd.NewCommand("merge", "--squash").AddDynamicArguments(trackingBranch) + cmdMerge := gitcmd.NewCommand("merge", "--squash").AddDynamicArguments(tmpRepoTrackingBranch) if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmdMerge); err != nil { log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err) return err @@ -81,11 +81,10 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error { } cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID) } - if err := ctx.PrepareGitCmd(cmdCommit).Run(ctx); err != nil { - log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String()) - return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String()) + if err := ctx.PrepareGitCmd(cmdCommit).RunWithStderr(ctx); err != nil { + log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr()) + return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), err.Stderr()) } ctx.outbuf.Reset() - ctx.errbuf.Reset() return nil } diff --git a/services/pull/merge_tree.go b/services/pull/merge_tree.go new file mode 100644 index 0000000000..18d59fabd1 --- /dev/null +++ b/services/pull/merge_tree.go @@ -0,0 +1,144 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pull + +import ( + "context" + "errors" + "fmt" + + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/gitrepo" + "code.gitea.io/gitea/modules/log" +) + +// checkConflictsMergeTree uses git merge-tree to check for conflicts and if none are found checks if the patch is empty +// return true if there are conflicts otherwise return false +// pr.Status and pr.ConflictedFiles will be updated as necessary +func checkConflictsMergeTree(ctx context.Context, pr *issues_model.PullRequest, baseCommitID string) (bool, error) { + treeHash, conflict, conflictFiles, err := gitrepo.MergeTree(ctx, pr.BaseRepo, baseCommitID, pr.HeadCommitID, pr.MergeBase) + if err != nil { + return false, fmt.Errorf("MergeTree: %w", err) + } + if conflict { + pr.Status = issues_model.PullRequestStatusConflict + // sometimes git merge-tree will detect conflicts but not list any conflicted files + // so that pr.ConflictedFiles will be empty + pr.ConflictedFiles = conflictFiles + + log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles) + return true, nil + } + + // Detect whether the pull request introduces changes by comparing the merged tree (treeHash) + // against the current base commit (baseCommitID) using `git diff-tree`. The command returns exit code 0 + // if there is no diff between these trees (empty patch) and exit code 1 if there is a diff. + gitErr := gitrepo.RunCmd(ctx, pr.BaseRepo, gitcmd.NewCommand("diff-tree", "-r", "--quiet"). + AddDynamicArguments(treeHash, baseCommitID)) + switch { + case gitErr == nil: + log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID) + pr.Status = issues_model.PullRequestStatusEmpty + case gitcmd.IsErrorExitCode(gitErr, 1): + pr.Status = issues_model.PullRequestStatusMergeable + default: + return false, fmt.Errorf("run diff-tree exit abnormally: %w", gitErr) + } + return false, nil +} + +func checkPullRequestMergeableByMergeTree(ctx context.Context, pr *issues_model.PullRequest) error { + // 1. Get head commit + if err := pr.LoadHeadRepo(ctx); err != nil { + return err + } + headGitRepo, err := gitrepo.OpenRepository(ctx, pr.HeadRepo) + if err != nil { + return fmt.Errorf("OpenRepository: %w", err) + } + defer headGitRepo.Close() + + // 2. Get/open base repository + var baseGitRepo *git.Repository + if pr.IsSameRepo() { + baseGitRepo = headGitRepo + } else { + baseGitRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo) + if err != nil { + return fmt.Errorf("OpenRepository: %w", err) + } + defer baseGitRepo.Close() + } + + // 3. Get head commit id + if pr.Flow == issues_model.PullRequestFlowGithub { + pr.HeadCommitID, err = headGitRepo.GetRefCommitID(git.BranchPrefix + pr.HeadBranch) + if err != nil { + return fmt.Errorf("GetBranchCommitID: can't find commit ID for head: %w", err) + } + } else { + if pr.ID > 0 { + pr.HeadCommitID, err = baseGitRepo.GetRefCommitID(pr.GetGitHeadRefName()) + if err != nil { + return fmt.Errorf("GetRefCommitID: can't find commit ID for head: %w", err) + } + } else if pr.HeadCommitID == "" { // for new pull request with agit, the head commit id must be provided + return errors.New("head commit ID is empty for pull request Agit flow") + } + } + + // 4. fetch head commit id into the current repository + // it will be checked in 2 weeks by default from git if the pull request created failure. + if !pr.IsSameRepo() { + if !baseGitRepo.IsReferenceExist(pr.HeadCommitID) { + if err := gitrepo.FetchRemoteCommit(ctx, pr.BaseRepo, pr.HeadRepo, pr.HeadCommitID); err != nil { + return fmt.Errorf("FetchRemoteCommit: %w", err) + } + } + } + + // 5. update merge base + baseCommitID, err := baseGitRepo.GetRefCommitID(git.BranchPrefix + pr.BaseBranch) + if err != nil { + return fmt.Errorf("GetBranchCommitID: can't find commit ID for base: %w", err) + } + + pr.MergeBase, err = gitrepo.MergeBase(ctx, pr.BaseRepo, baseCommitID, pr.HeadCommitID) + if err != nil { + // if there is no merge base, then it's empty, still need to allow the pull request to be created + // not quite right (e.g.: why not reset the fields like below), but no interest to do more investigation at the moment + log.Error("MergeBase: unable to find merge base between %s and %s: %v", baseCommitID, pr.HeadCommitID, err) + pr.Status = issues_model.PullRequestStatusEmpty + return nil + } + + // reset conflicted files and changed protected files + pr.ConflictedFiles = nil + pr.ChangedProtectedFiles = nil + + // 6. if base == head, then it's an ancestor + if pr.HeadCommitID == pr.MergeBase { + pr.Status = issues_model.PullRequestStatusAncestor + return nil + } + + // 7. Check for conflicts + conflicted, err := checkConflictsMergeTree(ctx, pr, baseCommitID) + if err != nil { + log.Error("checkConflictsMergeTree: %v", err) + pr.Status = issues_model.PullRequestStatusError + return fmt.Errorf("checkConflictsMergeTree: %w", err) + } + if conflicted || pr.Status == issues_model.PullRequestStatusEmpty { + return nil + } + + // 8. Check for protected files changes + if err = checkPullFilesProtection(ctx, pr, baseGitRepo, pr.HeadCommitID); err != nil { + return fmt.Errorf("checkPullFilesProtection: %w", err) + } + return nil +} diff --git a/services/pull/merge_tree_test.go b/services/pull/merge_tree_test.go new file mode 100644 index 0000000000..6fa2cf7022 --- /dev/null +++ b/services/pull/merge_tree_test.go @@ -0,0 +1,154 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package pull + +import ( + "context" + "fmt" + "testing" + + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/git/gitcmd" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testPullRequestMergeCheck(t *testing.T, + targetFunc func(ctx context.Context, pr *issues_model.PullRequest) error, + pr *issues_model.PullRequest, + expectedStatus issues_model.PullRequestStatus, + expectedConflictedFiles []string, + expectedChangedProtectedFiles []string, +) { + assert.NoError(t, pr.LoadIssue(t.Context())) + assert.NoError(t, pr.LoadBaseRepo(t.Context())) + assert.NoError(t, pr.LoadHeadRepo(t.Context())) + pr.Status = issues_model.PullRequestStatusChecking + pr.ConflictedFiles = []string{"unrelated-conflicted-file"} + pr.ChangedProtectedFiles = []string{"unrelated-protected-file"} + pr.MergeBase = "" + pr.HeadCommitID = "" + err := targetFunc(t.Context(), pr) + require.NoError(t, err) + assert.Equal(t, expectedStatus, pr.Status) + assert.Equal(t, expectedConflictedFiles, pr.ConflictedFiles) + assert.Equal(t, expectedChangedProtectedFiles, pr.ChangedProtectedFiles) + assert.NotEmpty(t, pr.MergeBase) + assert.NotEmpty(t, pr.HeadCommitID) +} + +func TestPullRequestMergeable(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) + t.Run("NoConflict-MergeTree", func(t *testing.T) { + testPullRequestMergeCheck(t, checkPullRequestMergeableByMergeTree, pr, issues_model.PullRequestStatusMergeable, nil, nil) + }) + t.Run("NoConflict-TmpRepo", func(t *testing.T) { + testPullRequestMergeCheck(t, checkPullRequestMergeableByTmpRepo, pr, issues_model.PullRequestStatusMergeable, nil, nil) + }) + + pr.BaseBranch, pr.HeadBranch = "test-merge-tree-conflict-base", "test-merge-tree-conflict-head" + conflictFiles := createConflictBranches(t, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.HeadBranch) + t.Run("Conflict-MergeTree", func(t *testing.T) { + testPullRequestMergeCheck(t, checkPullRequestMergeableByMergeTree, pr, issues_model.PullRequestStatusConflict, conflictFiles, nil) + }) + t.Run("Conflict-TmpRepo", func(t *testing.T) { + testPullRequestMergeCheck(t, checkPullRequestMergeableByTmpRepo, pr, issues_model.PullRequestStatusConflict, conflictFiles, nil) + }) + + pr.BaseBranch, pr.HeadBranch = "test-merge-tree-empty-base", "test-merge-tree-empty-head" + createEmptyBranches(t, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.HeadBranch) + t.Run("Empty-MergeTree", func(t *testing.T) { + testPullRequestMergeCheck(t, checkPullRequestMergeableByMergeTree, pr, issues_model.PullRequestStatusEmpty, nil, nil) + }) + t.Run("Empty-TmpRepo", func(t *testing.T) { + testPullRequestMergeCheck(t, checkPullRequestMergeableByTmpRepo, pr, issues_model.PullRequestStatusEmpty, nil, nil) + }) +} + +func createConflictBranches(t *testing.T, repoPath, baseBranch, headBranch string) []string { + conflictFile := "conflict.txt" + stdin := fmt.Sprintf( + `reset refs/heads/%[1]s +from refs/heads/master + +commit refs/heads/%[1]s +mark :1 +committer Test 0 +0000 +data 17 +add conflict file +M 100644 inline %[3]s +data 4 +base + +commit refs/heads/%[1]s +mark :2 +committer Test 0 +0000 +data 11 +base change +from :1 +M 100644 inline %[3]s +data 11 +base change + +reset refs/heads/%[2]s +from :1 + +commit refs/heads/%[2]s +mark :3 +committer Test 0 +0000 +data 11 +head change +from :1 +M 100644 inline %[3]s +data 11 +head change +`, baseBranch, headBranch, conflictFile) + err := gitcmd.NewCommand("fast-import").WithDir(repoPath).WithStdinBytes([]byte(stdin)).RunWithStderr(t.Context()) + require.NoError(t, err) + return []string{conflictFile} +} + +func createEmptyBranches(t *testing.T, repoPath, baseBranch, headBranch string) { + emptyFile := "empty.txt" + stdin := fmt.Sprintf(`reset refs/heads/%[1]s +from refs/heads/master + +commit refs/heads/%[1]s +mark :1 +committer Test 0 +0000 +data 14 +add empty file +M 100644 inline %[3]s +data 4 +base + +reset refs/heads/%[2]s +from :1 + +commit refs/heads/%[2]s +mark :2 +committer Test 0 +0000 +data 17 +change empty file +from :1 +M 100644 inline %[3]s +data 6 +change + +commit refs/heads/%[2]s +mark :3 +committer Test 0 +0000 +data 17 +revert empty file +from :2 +M 100644 inline %[3]s +data 4 +base +`, baseBranch, headBranch, emptyFile) + err := gitcmd.NewCommand("fast-import").WithDir(repoPath).WithStdinBytes([]byte(stdin)).RunWithStderr(t.Context()) + require.NoError(t, err) +} diff --git a/services/pull/patch.go b/services/pull/patch.go index d82fe3e225..30f07f8931 100644 --- a/services/pull/patch.go +++ b/services/pull/patch.go @@ -21,7 +21,6 @@ import ( "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/glob" - "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" @@ -67,10 +66,18 @@ var patchErrorSuffices = []string{ ": does not exist in index", } -func testPullRequestBranchMergeable(pr *issues_model.PullRequest) error { - ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("testPullRequestBranchMergeable: %s", pr)) +func checkPullRequestBranchMergeable(ctx context.Context, pr *issues_model.PullRequest) error { + ctx, _, finished := process.GetManager().AddContext(ctx, fmt.Sprintf("checkPullRequestBranchMergeable: %s", pr)) defer finished() + if git.DefaultFeatures().SupportGitMergeTree { + return checkPullRequestMergeableByMergeTree(ctx, pr) + } + + return checkPullRequestMergeableByTmpRepo(ctx, pr) +} + +func checkPullRequestMergeableByTmpRepo(ctx context.Context, pr *issues_model.PullRequest) error { prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) if err != nil { if !git_model.IsErrBranchNotExist(err) { @@ -80,10 +87,6 @@ func testPullRequestBranchMergeable(pr *issues_model.PullRequest) error { } defer cancel() - return testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr) -} - -func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepoContext, pr *issues_model.PullRequest) error { gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath) if err != nil { return fmt.Errorf("OpenRepository: %w", err) @@ -91,16 +94,16 @@ func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepo defer gitRepo.Close() // 1. update merge base - pr.MergeBase, _, err = gitcmd.NewCommand("merge-base", "--", "base", "tracking").WithDir(prCtx.tmpBasePath).RunStdString(ctx) + pr.MergeBase, _, err = gitcmd.NewCommand("merge-base", "--", tmpRepoBaseBranch, tmpRepoTrackingBranch).WithDir(prCtx.tmpBasePath).RunStdString(ctx) if err != nil { var err2 error - pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base") + pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + tmpRepoBaseBranch) if err2 != nil { return fmt.Errorf("GetMergeBase: %v and can't find commit ID for base: %w", err, err2) } } pr.MergeBase = strings.TrimSpace(pr.MergeBase) - if pr.HeadCommitID, err = gitRepo.GetRefCommitID(git.BranchPrefix + "tracking"); err != nil { + if pr.HeadCommitID, err = gitRepo.GetRefCommitID(git.BranchPrefix + tmpRepoTrackingBranch); err != nil { return fmt.Errorf("GetBranchCommitID: can't find commit ID for head: %w", err) } @@ -110,17 +113,19 @@ func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepo } // 2. Check for conflicts - if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty { + conflicts, err := checkConflictsByTmpRepo(ctx, pr, gitRepo, prCtx.tmpBasePath) + if err != nil { return err } - // 3. Check for protected files changes - if err = checkPullFilesProtection(ctx, pr, gitRepo); err != nil { - return fmt.Errorf("pr.CheckPullFilesProtection(): %v", err) + pr.ChangedProtectedFiles = nil + if conflicts || pr.Status == issues_model.PullRequestStatusEmpty { + return nil } - if len(pr.ChangedProtectedFiles) > 0 { - log.Trace("Found %d protected files changed", len(pr.ChangedProtectedFiles)) + // 3. Check for protected files changes + if err = checkPullFilesProtection(ctx, pr, gitRepo, tmpRepoTrackingBranch); err != nil { + return fmt.Errorf("pr.CheckPullFilesProtection(): %w", err) } pr.Status = issues_model.PullRequestStatusMergeable @@ -249,7 +254,7 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo defer cancel() // First we use read-tree to do a simple three-way merge - if _, _, err := gitcmd.NewCommand("read-tree", "-m").AddDynamicArguments(base, ours, theirs).WithDir(gitPath).RunStdString(ctx); err != nil { + if err := gitcmd.NewCommand("read-tree", "-m").AddDynamicArguments(base, ours, theirs).WithDir(gitPath).RunWithStderr(ctx); err != nil { log.Error("Unable to run read-tree -m! Error: %v", err) return false, nil, fmt.Errorf("unable to run read-tree -m! Error: %w", err) } @@ -307,14 +312,14 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo return conflict, conflictedFiles, nil } -func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) { - // 1. checkConflicts resets the conflict status - therefore - reset the conflict status +func checkConflictsByTmpRepo(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) { + // 1. checkConflictsByTmpRepo resets the conflict status - therefore - reset the conflict status pr.ConflictedFiles = nil // 2. AttemptThreeWayMerge first - this is much quicker than plain patch to base description := fmt.Sprintf("PR[%d] %s/%s#%d", pr.ID, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, pr.Index) conflict, conflictFiles, err := AttemptThreeWayMerge(ctx, - tmpBasePath, gitRepo, pr.MergeBase, "base", "tracking", description) + tmpBasePath, gitRepo, pr.MergeBase, tmpRepoBaseBranch, tmpRepoTrackingBranch, description) if err != nil { return false, err } @@ -329,7 +334,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * return false, fmt.Errorf("unable to write unconflicted tree: %w\n`git ls-files -u`:\n%s", err, lsfiles) } treeHash = strings.TrimSpace(treeHash) - baseTree, err := gitRepo.GetTree("base") + baseTree, err := gitRepo.GetTree(tmpRepoBaseBranch) if err != nil { return false, err } @@ -379,10 +384,10 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * return false, nil } - log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable (patchPath): %s", pr.ID, patchPath) + log.Trace("PullRequest[%d].checkPullRequestMergeableByTmpRepo (patchPath): %s", pr.ID, patchPath) // 4. Read the base branch in to the index of the temporary repository - _, _, err = gitcmd.NewCommand("read-tree", "base").WithDir(tmpBasePath).RunStdString(ctx) + _, _, err = gitcmd.NewCommand("read-tree", tmpRepoBaseBranch).WithDir(tmpBasePath).RunStdString(ctx) if err != nil { return false, fmt.Errorf("git read-tree %s: %w", pr.BaseBranch, err) } @@ -413,30 +418,15 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * // in memory - which is very wasteful. // - alternatively we can do the equivalent of: // `git apply --check ... | grep ...` - // meaning we don't store all of the conflicts unnecessarily. - stderrReader, stderrWriter, err := os.Pipe() - if err != nil { - log.Error("Unable to open stderr pipe: %v", err) - return false, fmt.Errorf("unable to open stderr pipe: %w", err) - } - defer func() { - _ = stderrReader.Close() - _ = stderrWriter.Close() - }() + // meaning we don't store all the conflicts unnecessarily. + stderrReader, stderrReaderClose := cmdApply.MakeStderrPipe() + defer stderrReaderClose() // 8. Run the check command conflict = false err = cmdApply. WithDir(tmpBasePath). - WithStderr(stderrWriter). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - // Close the writer end of the pipe to begin processing - _ = stderrWriter.Close() - defer func() { - // Close the reader on return to terminate the git command if necessary - _ = stderrReader.Close() - }() - + WithPipelineFunc(func(ctx gitcmd.Context) error { const prefix = "error: patch failed:" const errorPrefix = "error: " const threewayFailed = "Failed to perform three-way merge..." @@ -449,7 +439,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * scanner := bufio.NewScanner(stderrReader) for scanner.Scan() { line := scanner.Text() - log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable: stderr: %s", pr.ID, line) + log.Trace("PullRequest[%d].checkPullRequestMergeableByTmpRepo: stderr: %s", pr.ID, line) if strings.HasPrefix(line, prefix) { conflict = true filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0]) @@ -474,8 +464,8 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * conflicts.Add(filepath) } } - // only list 10 conflicted files - if len(conflicts) >= 10 { + // only list part of conflicted files + if len(conflicts) >= gitrepo.MaxConflictedDetectFiles { break } } @@ -491,7 +481,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * }). Run(gitRepo.Ctx) - // 9. Check if the found conflictedfiles is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts. + // 9. Check if the found conflicted files is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts. // Note: `"err" could be non-nil` is due that if enable 3-way merge, it doesn't return any error on found conflicts. if len(pr.ConflictedFiles) > 0 { if conflict { @@ -585,7 +575,7 @@ func CheckUnprotectedFiles(repo *git.Repository, branchName, oldCommitID, newCom } // checkPullFilesProtection check if pr changed protected files and save results -func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) error { +func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, headRef string) error { if pr.Status == issues_model.PullRequestStatusEmpty { pr.ChangedProtectedFiles = nil return nil @@ -601,9 +591,12 @@ func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, return nil } - pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.HeadBranch, pr.MergeBase, "tracking", pb.GetProtectedFilePatterns(), 10, os.Environ()) + pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.HeadBranch, pr.MergeBase, headRef, pb.GetProtectedFilePatterns(), 10, os.Environ()) if err != nil && !IsErrFilePathProtected(err) { return err } + if len(pr.ChangedProtectedFiles) > 0 { + log.Trace("Found %d protected files changed in PR %s#%d", len(pr.ChangedProtectedFiles), pr.BaseRepo.FullName(), pr.Index) + } return nil } diff --git a/services/pull/patch_unmerged.go b/services/pull/patch_unmerged.go index 0491680313..78a31a8704 100644 --- a/services/pull/patch_unmerged.go +++ b/services/pull/patch_unmerged.go @@ -9,7 +9,6 @@ import ( "context" "fmt" "io" - "os" "strconv" "strings" @@ -60,27 +59,11 @@ func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan close(outputChan) }() - lsFilesReader, lsFilesWriter, err := os.Pipe() - if err != nil { - log.Error("Unable to open stderr pipe: %v", err) - outputChan <- &lsFileLine{err: fmt.Errorf("unable to open stderr pipe: %w", err)} - return - } - defer func() { - _ = lsFilesWriter.Close() - _ = lsFilesReader.Close() - }() - - stderr := &strings.Builder{} - err = gitcmd.NewCommand("ls-files", "-u", "-z"). - WithDir(tmpBasePath). - WithStdout(lsFilesWriter). - WithStderr(stderr). - WithPipelineFunc(func(_ context.Context, _ context.CancelFunc) error { - _ = lsFilesWriter.Close() - defer func() { - _ = lsFilesReader.Close() - }() + cmd := gitcmd.NewCommand("ls-files", "-u", "-z") + lsFilesReader, lsFilesReaderClose := cmd.MakeStdoutPipe() + defer lsFilesReaderClose() + err := cmd.WithDir(tmpBasePath). + WithPipelineFunc(func(gitcmd.Context) error { bufferedReader := bufio.NewReader(lsFilesReader) for { @@ -113,9 +96,9 @@ func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan outputChan <- toemit } }). - Run(ctx) + RunWithStderr(ctx) if err != nil { - outputChan <- &lsFileLine{err: fmt.Errorf("git ls-files -u -z: %w", gitcmd.ConcatenateError(err, stderr.String()))} + outputChan <- &lsFileLine{err: fmt.Errorf("git ls-files -u -z: %w", err)} } } diff --git a/services/pull/pull.go b/services/pull/pull.go index f8f64dd650..285e489078 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -4,12 +4,10 @@ package pull import ( - "bytes" "context" "errors" "fmt" "io" - "os" "regexp" "strings" "time" @@ -52,6 +50,7 @@ type NewPullRequestOptions struct { AssigneeIDs []int64 Reviewers []*user_model.User TeamReviewers []*organization.Team + ProjectID int64 } // NewPullRequest creates new pull request with labels for repository. @@ -67,11 +66,13 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { // user should be a collaborator or a member of the organization for base repo canCreate := issue.Poster.IsAdmin || pr.Flow == issues_model.PullRequestFlowAGit + canAssignProject := canCreate if !canCreate { canCreate, err := repo_model.IsOwnerMemberCollaborator(ctx, repo, issue.Poster.ID) if err != nil { return err } + canAssignProject = canCreate if !canCreate { // or user should have write permission in the head repo @@ -85,19 +86,11 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { if !perm.CanWrite(unit.TypeCode) { return issues_model.ErrMustCollaborator } + canAssignProject = perm.CanWrite(unit.TypeProjects) } } - prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr) - if err != nil { - if !git_model.IsErrBranchNotExist(err) { - log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err) - } - return err - } - defer cancel() - - if err := testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr); err != nil { + if err := checkPullRequestBranchMergeable(ctx, pr); err != nil { return err } @@ -117,9 +110,16 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { assigneeCommentMap[assigneeID] = comment } + if opts.ProjectID > 0 && canAssignProject { + if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, issue.Poster, opts.ProjectID, 0); err != nil { + return err + } + } + pr.Issue = issue issue.PullRequest = pr + var err error if pr.Flow == issues_model.PullRequestFlowGithub { err = PushToBaseRepo(ctx, pr) } else { @@ -160,6 +160,9 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { // Request reviews, these should be requested before other notifications because they will add request reviews record // on database permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster) + if err != nil { + return err + } for _, reviewer := range opts.Reviewers { if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil { return err @@ -293,7 +296,7 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer pr.BaseBranch = targetBranch // Refresh patch - if err := testPullRequestBranchMergeable(pr); err != nil { + if err := checkPullRequestBranchMergeable(ctx, pr); err != nil { return err } @@ -511,40 +514,23 @@ func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest, } defer cancel() - tmpRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath) - if err != nil { - return false, "", fmt.Errorf("OpenRepository: %w", err) - } - defer tmpRepo.Close() - - // Find the merge-base - mergeBase, _, err = tmpRepo.GetMergeBase("", "base", "tracking") + mergeBase, err = gitrepo.MergeBase(ctx, pr.BaseRepo, pr.BaseBranch, pr.GetGitHeadRefName()) if err != nil { return false, "", fmt.Errorf("GetMergeBase: %w", err) } cmd := gitcmd.NewCommand("diff", "--name-only", "-z").AddDynamicArguments(newCommitID, oldCommitID, mergeBase) - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - return false, mergeBase, fmt.Errorf("unable to open pipe for to run diff: %w", err) - } - stderr := new(bytes.Buffer) + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() if err := cmd.WithDir(prCtx.tmpBasePath). - WithStdout(stdoutWriter). - WithStderr(stderr). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() - defer func() { - _ = stdoutReader.Close() - }() + WithPipelineFunc(func(ctx gitcmd.Context) error { return util.IsEmptyReader(stdoutReader) }). - Run(ctx); err != nil { - if err == util.ErrNotEmpty { + RunWithStderr(ctx); err != nil { + if errors.Is(err, util.ErrNotEmpty) { return true, mergeBase, nil } - err = gitcmd.ConcatenateError(err, stderr.String()) log.Error("Unable to run diff on %s %s %s in tempRepo for PR[%d]%s/%s...%s/%s: Error: %v", newCommitID, oldCommitID, mergeBase, @@ -1067,7 +1053,7 @@ func GetPullCommits(ctx context.Context, baseGitRepo *git.Repository, doer *user if pull.HasMerged { baseBranch = pull.MergeBase } - compareInfo, err := git_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo, git.RefNameFromBranch(baseBranch), git.RefName(pull.GetGitHeadRefName()), true, false) + compareInfo, err := git_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo, git.RefNameFromBranch(baseBranch), git.RefName(pull.GetGitHeadRefName()), false, false) if err != nil { return nil, "", err } diff --git a/services/pull/review.go b/services/pull/review.go index 9aeeb4c31d..acbb620e92 100644 --- a/services/pull/review.go +++ b/services/pull/review.go @@ -8,8 +8,6 @@ import ( "context" "errors" "fmt" - "io" - "regexp" "strings" "code.gitea.io/gitea/models/db" @@ -17,6 +15,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" @@ -26,7 +25,15 @@ import ( notify_service "code.gitea.io/gitea/services/notify" ) -var notEnoughLines = regexp.MustCompile(`fatal: file .* has only \d+ lines?`) +func isErrBlameNotFoundOrNotEnoughLines(err error) bool { + stdErr, ok := gitcmd.ErrorAsStderr(err) + if !ok { + return false + } + notFound := strings.HasPrefix(stdErr, "fatal: no such path") + notEnoughLines := strings.HasPrefix(stdErr, "fatal: file ") && strings.Contains(stdErr, " has only ") && strings.Contains(stdErr, " lines?") + return notFound || notEnoughLines +} // ErrDismissRequestOnClosedPR represents an error when an user tries to dismiss a review associated to a closed or merged PR. type ErrDismissRequestOnClosedPR struct{} @@ -67,7 +74,7 @@ func lineBlame(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Re func checkInvalidation(ctx context.Context, c *issues_model.Comment, repo *repo_model.Repository, gitRepo *git.Repository, branch string) error { // FIXME differentiate between previous and proposed line commit, err := lineBlame(ctx, repo, gitRepo, branch, c.TreePath, uint(c.UnsignedLine())) - if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) { + if isErrBlameNotFoundOrNotEnoughLines(err) { c.Invalidated = true return issues_model.UpdateCommentInvalidate(ctx, c) } @@ -251,7 +258,7 @@ func createCodeComment(ctx context.Context, doer *user_model.User, repo *repo_mo commit, err := lineBlame(ctx, pr.BaseRepo, gitRepo, head, treePath, uint(line)) if err == nil { commitID = commit.ID.String() - } else if !(strings.Contains(err.Error(), "exit status 128 - fatal: no such path") || notEnoughLines.MatchString(err.Error())) { + } else if !isErrBlameNotFoundOrNotEnoughLines(err) { return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %w", pr.GetGitHeadRefName(), gitRepo.Path, treePath, line, err) } } @@ -266,22 +273,12 @@ func createCodeComment(ctx context.Context, doer *user_model.User, repo *repo_mo if len(commitID) == 0 { commitID = headCommitID } - reader, writer := io.Pipe() - defer func() { - _ = reader.Close() - _ = writer.Close() - }() - go func() { - if err := git.GetRepoRawDiffForFile(gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, treePath, writer); err != nil { - _ = writer.CloseWithError(fmt.Errorf("GetRawDiffForLine[%s, %s, %s, %s]: %w", gitRepo.Path, pr.MergeBase, headCommitID, treePath, err)) - return - } - _ = writer.Close() - }() - patch, err = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: line}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines) + patch, err = git.GetFileDiffCutAroundLine( + gitRepo, pr.MergeBase, headCommitID, treePath, + int64((&issues_model.Comment{Line: line}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines, + ) if err != nil { - log.Error("Error whilst generating patch: %v", err) return nil, err } diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go index 113d1cb49e..d0da870241 100644 --- a/services/pull/temp_repo.go +++ b/services/pull/temp_repo.go @@ -5,11 +5,11 @@ package pull import ( + "bytes" "context" "fmt" "os" "path/filepath" - "strings" git_model "code.gitea.io/gitea/models/git" issues_model "code.gitea.io/gitea/models/issues" @@ -23,17 +23,16 @@ import ( // Temporary repos created here use standard branch names to help simplify // merging code const ( - baseBranch = "base" // equivalent to pr.BaseBranch - trackingBranch = "tracking" // equivalent to pr.HeadBranch - stagingBranch = "staging" // this is used for a working branch + tmpRepoBaseBranch = "base" // equivalent to pr.BaseBranch + tmpRepoTrackingBranch = "tracking" // equivalent to pr.HeadBranch + tmpRepoStagingBranch = "staging" // this is used for a working branch ) type prTmpRepoContext struct { context.Context tmpBasePath string pr *issues_model.PullRequest - outbuf *strings.Builder // we keep these around to help reduce needless buffer recreation, - errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use + outbuf *bytes.Buffer // we keep these around to help reduce needless buffer recreation, any use should be preceded by a Reset and preferably after use } // PrepareGitCmd prepares a git command with the correct directory, environment, and output buffers @@ -41,10 +40,7 @@ type prTmpRepoContext struct { // Do NOT use it with gitcmd.RunStd*() functions, otherwise it will panic func (ctx *prTmpRepoContext) PrepareGitCmd(cmd *gitcmd.Command) *gitcmd.Command { ctx.outbuf.Reset() - ctx.errbuf.Reset() - return cmd.WithDir(ctx.tmpBasePath). - WithStdout(ctx.outbuf). - WithStderr(ctx.errbuf) + return cmd.WithDir(ctx.tmpBasePath).WithStdoutBuffer(ctx.outbuf) } // createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch @@ -86,8 +82,7 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) Context: ctx, tmpBasePath: tmpBasePath, pr: pr, - outbuf: &strings.Builder{}, - errbuf: &strings.Builder{}, + outbuf: &bytes.Buffer{}, } baseRepoPath := pr.BaseRepo.RepoPath() @@ -100,7 +95,6 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) } remoteRepoName := "head_repo" - baseBranch := "base" fetchArgs := gitcmd.TrustedCmdArgs{"--no-tags"} if git.DefaultFeatures().CheckVersionAtLeast("2.25.0") { @@ -133,25 +127,25 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) } if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath)). - Run(ctx); err != nil { - log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + RunWithStderr(ctx); err != nil { + log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), err.Stderr()) cancel() - return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String()) + return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), err.Stderr()) } if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("fetch", "origin").AddArguments(fetchArgs...). - AddDashesAndList(git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+baseBranch, git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+"original_"+baseBranch)). - Run(ctx); err != nil { - log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + AddDashesAndList(git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+tmpRepoBaseBranch, git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+"original_"+tmpRepoBaseBranch)). + RunWithStderr(ctx); err != nil { + log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), err.Stderr()) cancel() - return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), err.Stderr()) } - if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch)). - Run(ctx); err != nil { - log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+tmpRepoBaseBranch)). + RunWithStderr(ctx); err != nil { + log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), err.Stderr()) cancel() - return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String()) + return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), err.Stderr()) } if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil { @@ -161,13 +155,12 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) } if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath)). - Run(ctx); err != nil { - log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + RunWithStderr(ctx); err != nil { + log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), err.Stderr()) cancel() - return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String()) + return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), err.Stderr()) } - trackingBranch := "tracking" objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName) // Fetch head branch var headBranch string @@ -178,19 +171,17 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) } else { headBranch = pr.GetGitHeadRefName() } - if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch)). - Run(ctx); err != nil { + if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+tmpRepoTrackingBranch)). + RunWithStderr(ctx); err != nil { cancel() if exist, _ := git_model.IsBranchExist(ctx, pr.HeadRepo.ID, pr.HeadBranch); !exist { return nil, nil, git_model.ErrBranchNotExist{ BranchName: pr.HeadBranch, } } - log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String()) - return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String()) + log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), err.Stderr()) + return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), err.Stderr()) } prCtx.outbuf.Reset() - prCtx.errbuf.Reset() - return prCtx, cancel, nil } diff --git a/services/pull/update_rebase.go b/services/pull/update_rebase.go index 6a70c03467..6b90e5d776 100644 --- a/services/pull/update_rebase.go +++ b/services/pull/update_rebase.go @@ -28,7 +28,7 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques defer cancel() // Determine the old merge-base before the rebase - we use this for LFS push later on - oldMergeBase, _, _ := gitcmd.NewCommand("merge-base").AddDashesAndList(baseBranch, trackingBranch). + oldMergeBase, _, _ := gitcmd.NewCommand("merge-base").AddDashesAndList(tmpRepoBaseBranch, tmpRepoTrackingBranch). WithDir(mergeCtx.tmpBasePath).RunStdString(ctx) oldMergeBase = strings.TrimSpace(oldMergeBase) @@ -42,11 +42,11 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques // It's questionable about where this should go - either after or before the push // I think in the interests of data safety - failures to push to the lfs should prevent // the push as you can always re-rebase. - if err := LFSPush(ctx, mergeCtx.tmpBasePath, baseBranch, oldMergeBase, &issues_model.PullRequest{ + if err := LFSPush(ctx, mergeCtx.tmpBasePath, tmpRepoBaseBranch, oldMergeBase, &issues_model.PullRequest{ HeadRepoID: pr.BaseRepoID, BaseRepoID: pr.HeadRepoID, }); err != nil { - log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", baseBranch, oldMergeBase, pr, err) + log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", tmpRepoBaseBranch, oldMergeBase, pr, err) return err } } @@ -65,13 +65,12 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques } pushCmd := gitcmd.NewCommand("push", "-f", "head_repo"). - AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch) + AddDynamicArguments(tmpRepoStagingBranch + ":" + git.BranchPrefix + pr.HeadBranch) // Push back to the head repository. // TODO: this cause an api call to "/api/internal/hook/post-receive/...", // that prevents us from doint the whole merge in one db transaction mergeCtx.outbuf.Reset() - mergeCtx.errbuf.Reset() if err := pushCmd. WithEnv(repo_module.FullPushingEnvironment( @@ -83,28 +82,25 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques pr.Index, )). WithDir(mergeCtx.tmpBasePath). - WithStdout(mergeCtx.outbuf). - WithStderr(mergeCtx.errbuf). - Run(ctx); err != nil { - if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") { + WithStdoutBuffer(mergeCtx.outbuf). + RunWithStderr(ctx); err != nil { + if strings.Contains(err.Stderr(), "non-fast-forward") { return &git.ErrPushOutOfDate{ StdOut: mergeCtx.outbuf.String(), - StdErr: mergeCtx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } - } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") { + } else if strings.Contains(err.Stderr(), "! [remote rejected]") { err := &git.ErrPushRejected{ StdOut: mergeCtx.outbuf.String(), - StdErr: mergeCtx.errbuf.String(), + StdErr: err.Stderr(), Err: err, } err.GenerateMessage() return err } - return fmt.Errorf("git push: %s", mergeCtx.errbuf.String()) + return fmt.Errorf("git push: %s", err.Stderr()) } mergeCtx.outbuf.Reset() - mergeCtx.errbuf.Reset() - return nil } diff --git a/services/release/release.go b/services/release/release.go index a0d3736b44..a482501164 100644 --- a/services/release/release.go +++ b/services/release/release.go @@ -371,7 +371,7 @@ func DeleteReleaseByID(ctx context.Context, repo *repo_model.Repository, rel *re } } - if stdout, err := gitrepo.RunCmdString(ctx, repo, + if stdout, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("tag", "-d").AddDashesAndList(rel.TagName), ); err != nil && !strings.Contains(err.Error(), "not found") { log.Error("DeleteReleaseByID (git tag -d): %d in %v Failed:\nStdout: %s\nError: %v", rel.ID, repo, stdout, err) diff --git a/services/repository/adopt.go b/services/repository/adopt.go index 8d8e59b053..18d70d1bee 100644 --- a/services/repository/adopt.go +++ b/services/repository/adopt.go @@ -23,7 +23,6 @@ import ( "code.gitea.io/gitea/modules/optional" repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" notify_service "code.gitea.io/gitea/services/notify" ) @@ -214,13 +213,13 @@ func DeleteUnadoptedRepository(ctx context.Context, doer, u *user_model.User, re return err } - repoPath := repo_model.RepoPath(u.Name, repoName) - isExist, err := util.IsExist(repoPath) + relativePath := repo_model.RelativePath(u.Name, repoName) + exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(relativePath)) if err != nil { - log.Error("Unable to check if %s exists. Error: %v", repoPath, err) + log.Error("Unable to check if %s exists. Error: %v", relativePath, err) return err } - if !isExist { + if !exist { return repo_model.ErrRepoNotExist{ OwnerName: u.Name, Name: repoName, @@ -236,7 +235,7 @@ func DeleteUnadoptedRepository(ctx context.Context, doer, u *user_model.User, re } } - return util.RemoveAll(repoPath) + return gitrepo.DeleteRepository(ctx, repo_model.StorageRepo(relativePath)) } type unadoptedRepositories struct { diff --git a/services/repository/archiver/archiver.go b/services/repository/archiver/archiver.go index b2ca74871c..bfd941ebf6 100644 --- a/services/repository/archiver/archiver.go +++ b/services/repository/archiver/archiver.go @@ -8,7 +8,6 @@ import ( "errors" "fmt" "io" - "net/http" "os" "strings" "time" @@ -16,6 +15,7 @@ import ( "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/httplib" @@ -24,6 +24,7 @@ import ( "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/util" gitea_context "code.gitea.io/gitea/services/context" ) @@ -36,58 +37,31 @@ type ArchiveRequest struct { Repo *repo_model.Repository Type repo_model.ArchiveType CommitID string + Paths []string archiveRefShortName string // the ref short name to download the archive, for example: "master", "v1.0.0", "commit id" } -// ErrUnknownArchiveFormat request archive format is not supported -type ErrUnknownArchiveFormat struct { - RequestNameType string -} - -// Error implements error -func (err ErrUnknownArchiveFormat) Error() string { - return "unknown format: " + err.RequestNameType -} - -// Is implements error -func (ErrUnknownArchiveFormat) Is(err error) bool { - _, ok := err.(ErrUnknownArchiveFormat) - return ok -} - -// RepoRefNotFoundError is returned when a requested reference (commit, tag) was not found. -type RepoRefNotFoundError struct { - RefShortName string -} - -// Error implements error. -func (e RepoRefNotFoundError) Error() string { - return "unrecognized repository reference: " + e.RefShortName -} - -func (e RepoRefNotFoundError) Is(err error) bool { - _, ok := err.(RepoRefNotFoundError) - return ok -} - // NewRequest creates an archival request, based on the URI. The // resulting ArchiveRequest is suitable for being passed to Await() // if it's determined that the request still needs to be satisfied. -func NewRequest(repo *repo_model.Repository, gitRepo *git.Repository, archiveRefExt string) (*ArchiveRequest, error) { +func NewRequest(repo *repo_model.Repository, gitRepo *git.Repository, archiveRefExt string, paths []string) (*ArchiveRequest, error) { // here the archiveRefShortName is not a clear ref, it could be a tag, branch or commit id archiveRefShortName, archiveType := repo_model.SplitArchiveNameType(archiveRefExt) if archiveType == repo_model.ArchiveUnknown { - return nil, ErrUnknownArchiveFormat{archiveRefExt} + return nil, util.NewInvalidArgumentErrorf("unknown format: %s", archiveRefExt) + } + if archiveType == repo_model.ArchiveBundle && len(paths) != 0 { + return nil, util.NewInvalidArgumentErrorf("cannot specify paths when requesting a bundle") } // Get corresponding commit. commitID, err := gitRepo.ConvertToGitID(archiveRefShortName) if err != nil { - return nil, RepoRefNotFoundError{RefShortName: archiveRefShortName} + return nil, util.NewNotExistErrorf("unrecognized repository reference: %s", archiveRefShortName) } - r := &ArchiveRequest{Repo: repo, archiveRefShortName: archiveRefShortName, Type: archiveType} + r := &ArchiveRequest{Repo: repo, archiveRefShortName: archiveRefShortName, Type: archiveType, Paths: paths} r.CommitID = commitID.String() return r, nil } @@ -159,6 +133,7 @@ func (aReq *ArchiveRequest) Stream(ctx context.Context, w io.Writer) error { w, setting.Repository.PrefixArchiveFiles, aReq.CommitID, + aReq.Paths, ) } @@ -339,7 +314,7 @@ func DeleteRepositoryArchives(ctx context.Context) error { return storage.Clean(storage.RepoArchives) } -func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) { +func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) error { // Add nix format link header so tarballs lock correctly: // https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md ctx.Resp.Header().Add("Link", fmt.Sprintf(`<%s/archive/%s.%s?rev=%s>; rel="immutable"`, @@ -350,20 +325,22 @@ func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) { )) downloadName := archiveReq.Repo.Name + "-" + archiveReq.GetArchiveName() - if setting.Repository.StreamArchives { + if setting.Repository.StreamArchives || len(archiveReq.Paths) > 0 { + // the header must be set before starting streaming even an error would occur, + // because errors may happen in git command and such cases aren't in our control. httplib.ServeSetHeaders(ctx.Resp, &httplib.ServeHeaderOptions{Filename: downloadName}) if err := archiveReq.Stream(ctx, ctx.Resp); err != nil && !ctx.Written() { - log.Error("Archive %v streaming failed: %v", archiveReq, err) - ctx.HTTPError(http.StatusInternalServerError) + if gitcmd.StderrHasPrefix(err, "fatal: pathspec") { + return util.NewInvalidArgumentErrorf("path doesn't exist or is invalid") + } + return fmt.Errorf("archive repo %s: failed to stream: %w", archiveReq.Repo.FullName(), err) } - return + return nil } archiver, err := archiveReq.Await(ctx) if err != nil { - log.Error("Archive %v await failed: %v", archiveReq, err) - ctx.HTTPError(http.StatusInternalServerError) - return + return fmt.Errorf("archive repo %s: failed to await: %w", archiveReq.Repo.FullName(), err) } rPath := archiver.RelativePath() @@ -372,15 +349,13 @@ func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) { u, err := storage.RepoArchives.URL(rPath, downloadName, ctx.Req.Method, nil) if u != nil && err == nil { ctx.Redirect(u.String()) - return + return nil } } fr, err := storage.RepoArchives.Open(rPath) if err != nil { - log.Error("Archive %v open file failed: %v", archiveReq, err) - ctx.HTTPError(http.StatusInternalServerError) - return + return fmt.Errorf("archive repo %s: failed to open archive file: %w", archiveReq.Repo.FullName(), err) } defer fr.Close() @@ -388,4 +363,5 @@ func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) { Filename: downloadName, LastModified: archiver.CreatedUnix.AsLocalTime(), }) + return nil } diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go index ae5232f5a1..6cc1856a9c 100644 --- a/services/repository/archiver/archiver_test.go +++ b/services/repository/archiver/archiver_test.go @@ -8,11 +8,13 @@ import ( "time" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/contexttest" _ "code.gitea.io/gitea/models/actions" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestMain(m *testing.M) { @@ -29,47 +31,47 @@ func TestArchive_Basic(t *testing.T) { contexttest.LoadGitRepo(t, ctx) defer ctx.Repo.GitRepo.Close() - bogusReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip") + bogusReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil) assert.NoError(t, err) assert.NotNil(t, bogusReq) assert.Equal(t, firstCommit+".zip", bogusReq.GetArchiveName()) // Check a series of bogus requests. // Step 1, valid commit with a bad extension. - bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".unknown") + bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".unknown", nil) assert.Error(t, err) assert.Nil(t, bogusReq) // Step 2, missing commit. - bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "dbffff.zip") + bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "dbffff.zip", nil) assert.Error(t, err) assert.Nil(t, bogusReq) // Step 3, doesn't look like branch/tag/commit. - bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "db.zip") + bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "db.zip", nil) assert.Error(t, err) assert.Nil(t, bogusReq) - bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "master.zip") + bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "master.zip", nil) assert.NoError(t, err) assert.NotNil(t, bogusReq) assert.Equal(t, "master.zip", bogusReq.GetArchiveName()) - bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "test/archive.zip") + bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "test/archive.zip", nil) assert.NoError(t, err) assert.NotNil(t, bogusReq) assert.Equal(t, "test-archive.zip", bogusReq.GetArchiveName()) // Now two valid requests, firstCommit with valid extensions. - zipReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip") + zipReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil) assert.NoError(t, err) assert.NotNil(t, zipReq) - tgzReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".tar.gz") + tgzReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".tar.gz", nil) assert.NoError(t, err) assert.NotNil(t, tgzReq) - secondReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".bundle") + secondReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".bundle", nil) assert.NoError(t, err) assert.NotNil(t, secondReq) @@ -89,7 +91,7 @@ func TestArchive_Basic(t *testing.T) { // Sleep two seconds to make sure the queue doesn't change. time.Sleep(2 * time.Second) - zipReq2, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip") + zipReq2, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil) assert.NoError(t, err) // This zipReq should match what's sitting in the queue, as we haven't // let it release yet. From the consumer's point of view, this looks like @@ -104,12 +106,12 @@ func TestArchive_Basic(t *testing.T) { // Now we'll submit a request and TimedWaitForCompletion twice, before and // after we release it. We should trigger both the timeout and non-timeout // cases. - timedReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".tar.gz") + timedReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".tar.gz", nil) assert.NoError(t, err) assert.NotNil(t, timedReq) doArchive(t.Context(), timedReq) - zipReq2, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip") + zipReq2, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil) assert.NoError(t, err) // Now, we're guaranteed to have released the original zipReq from the queue. // Ensure that we don't get handed back the released entry somehow, but they @@ -124,9 +126,13 @@ func TestArchive_Basic(t *testing.T) { // Ideally, the extension would match what we originally requested. assert.NotEqual(t, zipReq.GetArchiveName(), tgzReq.GetArchiveName()) assert.NotEqual(t, zipReq.GetArchiveName(), secondReq.GetArchiveName()) -} -func TestErrUnknownArchiveFormat(t *testing.T) { - err := ErrUnknownArchiveFormat{RequestNameType: "xxx"} - assert.ErrorIs(t, err, ErrUnknownArchiveFormat{}) + t.Run("BadPath", func(t *testing.T) { + badRequest, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".tar.gz", []string{"not-a-path"}) + require.NoError(t, err) + err = ServeRepoArchive(ctx.Base, badRequest) + require.Error(t, err) + assert.ErrorIs(t, err, util.ErrInvalidArgument) + assert.ErrorContains(t, err, "path doesn't exist or is invalid") + }) } diff --git a/services/repository/branch.go b/services/repository/branch.go index 142073eabe..a580208af6 100644 --- a/services/repository/branch.go +++ b/services/repository/branch.go @@ -264,12 +264,12 @@ func checkBranchName(ctx context.Context, repo *repo_model.Repository, name stri return git_model.ErrBranchAlreadyExists{ BranchName: name, } - // If branchRefName like a/b but we want to create a branch named a then we have a conflict + // If branchRefName like "a/b" but we want to create a branch named a then we have a conflict case strings.HasPrefix(branchRefName, name+"/"): return git_model.ErrBranchNameConflict{ BranchName: branchRefName, } - // Conversely if branchRefName like a but we want to create a branch named a/b then we also have a conflict + // Conversely if branchRefName like "a" but we want to create a branch named "a/b" then we also have a conflict case strings.HasPrefix(name, branchRefName+"/"): return git_model.ErrBranchNameConflict{ BranchName: branchRefName, @@ -281,7 +281,6 @@ func checkBranchName(ctx context.Context, repo *repo_model.Repository, name stri } return nil }) - return err } @@ -443,6 +442,15 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, doer *user_m } } + // We also need to check if "to" matches with a protected branch rule. + rule, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, to) + if err != nil { + return "", err + } + if rule != nil && !rule.CanUserPush(ctx, doer) { + return "", git_model.ErrBranchIsProtected + } + if err := git_model.RenameBranch(ctx, repo, from, to, func(ctx context.Context, isDefault bool) error { err2 := gitrepo.RenameBranch(ctx, repo, from, to) if err2 != nil { diff --git a/services/repository/check.go b/services/repository/check.go index 57d627c63d..e521af94e1 100644 --- a/services/repository/check.go +++ b/services/repository/check.go @@ -88,7 +88,7 @@ func GitGcRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Du command := gitcmd.NewCommand("gc").AddArguments(args...) var stdout string var err error - stdout, err = gitrepo.RunCmdString(ctx, repo, command) + stdout, _, err = gitrepo.RunCmdString(ctx, repo, command) if err != nil { log.Error("Repository garbage collection failed for %-v. Stdout: %s\nError: %v", repo, stdout, err) desc := fmt.Sprintf("Repository garbage collection failed for %s. Stdout: %s\nError: %v", repo.RelativePath(), stdout, err) diff --git a/services/repository/collaboration.go b/services/repository/collaboration.go index 53b3c2e203..cb56d90ae2 100644 --- a/services/repository/collaboration.go +++ b/services/repository/collaboration.go @@ -120,6 +120,11 @@ func ReconsiderWatches(ctx context.Context, repo *repo_model.Repository, user *u return err } + // Remove all stopwatches a user has running in the repository + if err := issues_model.RemoveStopwatchesByRepoID(ctx, user.ID, repo.ID); err != nil { + return err + } + // Remove all IssueWatches a user has subscribed to in the repository return issues_model.RemoveIssueWatchersByRepoID(ctx, user.ID, repo.ID) } diff --git a/services/repository/collaboration_test.go b/services/repository/collaboration_test.go index 5e33c50366..56d9d72e0a 100644 --- a/services/repository/collaboration_test.go +++ b/services/repository/collaboration_test.go @@ -6,7 +6,10 @@ package repository import ( "testing" + "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/perm" + access_model "code.gitea.io/gitea/models/perm/access" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -32,8 +35,8 @@ func TestRepository_AddCollaborator(t *testing.T) { func TestRepository_DeleteCollaboration(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}) - repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 4}) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15}) + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 22}) assert.NoError(t, repo.LoadOwner(t.Context())) assert.NoError(t, DeleteCollaboration(t.Context(), repo, user)) @@ -44,3 +47,50 @@ func TestRepository_DeleteCollaboration(t *testing.T) { unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID}) } + +func TestRepository_DeleteCollaborationRemovesSubscriptionsAndStopwatches(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + ctx := t.Context() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15}) + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 22}) + assert.NoError(t, repo.LoadOwner(ctx)) + assert.NoError(t, repo_model.WatchRepo(ctx, user, repo, true)) + + hasAccess, err := access_model.HasAnyUnitAccess(ctx, user.ID, repo) + assert.NoError(t, err) + assert.True(t, hasAccess) + + issueCount, err := db.GetEngine(ctx).Where("repo_id=?", repo.ID).Count(new(issues_model.Issue)) + assert.NoError(t, err) + tempIssue := &issues_model.Issue{ + RepoID: repo.ID, + Index: issueCount + 1, + PosterID: repo.OwnerID, + Title: "temp issue", + Content: "temp", + } + assert.NoError(t, db.Insert(ctx, tempIssue)) + assert.NoError(t, issues_model.CreateOrUpdateIssueWatch(ctx, user.ID, tempIssue.ID, true)) + ok, err := issues_model.CreateIssueStopwatch(ctx, user, tempIssue) + assert.NoError(t, err) + assert.True(t, ok) + + assert.NoError(t, DeleteCollaboration(ctx, repo, user)) + + hasAccess, err = access_model.HasAnyUnitAccess(ctx, user.ID, repo) + assert.NoError(t, err) + assert.False(t, hasAccess) + + watch, err := repo_model.GetWatch(ctx, user.ID, repo.ID) + assert.NoError(t, err) + assert.False(t, repo_model.IsWatchMode(watch.Mode)) + + _, exists, err := issues_model.GetIssueWatch(ctx, user.ID, tempIssue.ID) + assert.NoError(t, err) + assert.False(t, exists) + + hasStopwatch, _, _, err := issues_model.HasUserStopwatch(ctx, user.ID) + assert.NoError(t, err) + assert.False(t, hasStopwatch) +} diff --git a/services/repository/contributors_graph.go b/services/repository/contributors_graph.go index 2c5c7c604f..c9cc0dcd0b 100644 --- a/services/repository/contributors_graph.go +++ b/services/repository/contributors_graph.go @@ -8,7 +8,6 @@ import ( "context" "errors" "fmt" - "os" "strconv" "strings" "sync" @@ -117,26 +116,17 @@ func getExtendedCommitStats(repo *git.Repository, revision string /*, limit int if err != nil { return nil, err } - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - return nil, err - } - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() gitCmd := gitcmd.NewCommand("log", "--shortstat", "--no-merges", "--pretty=format:---%n%aN%n%aE%n%as", "--reverse") // AddOptionFormat("--max-count=%d", limit) gitCmd.AddDynamicArguments(baseCommit.ID.String()) + stdoutReader, stdoutReaderClose := gitCmd.MakeStdoutPipe() + defer stdoutReaderClose() + var extendedCommitStats []*ExtendedCommitStats - stderr := new(strings.Builder) err = gitCmd.WithDir(repo.Path). - WithStdout(stdoutWriter). - WithStderr(stderr). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() + WithPipelineFunc(func(ctx gitcmd.Context) error { scanner := bufio.NewScanner(stdoutReader) for scanner.Scan() { @@ -188,12 +178,11 @@ func getExtendedCommitStats(repo *git.Repository, revision string /*, limit int } extendedCommitStats = append(extendedCommitStats, res) } - _ = stdoutReader.Close() return nil }). - Run(repo.Ctx) + RunWithStderr(repo.Ctx) if err != nil { - return nil, fmt.Errorf("Failed to get ContributorsCommitStats for repository.\nError: %w\nStderr: %s", err, stderr) + return nil, fmt.Errorf("ContributorsCommitStats: %w", err) } return extendedCommitStats, nil diff --git a/services/repository/create.go b/services/repository/create.go index 7439fc8f08..bfac83419d 100644 --- a/services/repository/create.go +++ b/services/repository/create.go @@ -315,7 +315,7 @@ func CreateRepositoryDirectly(ctx context.Context, doer, owner *user_model.User, licenses = append(licenses, opts.License) var stdout string - stdout, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse", "HEAD")) + stdout, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse", "HEAD")) if err != nil { log.Error("CreateRepository(git rev-parse HEAD) in %v: Stdout: %s\nError: %v", repo, stdout, err) return nil, fmt.Errorf("CreateRepository(git rev-parse HEAD): %w", err) @@ -476,7 +476,7 @@ func updateGitRepoAfterCreate(ctx context.Context, repo *repo_model.Repository) return fmt.Errorf("checkDaemonExportOK: %w", err) } - if stdout, err := gitrepo.RunCmdString(ctx, repo, + if stdout, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("update-server-info")); err != nil { log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) return fmt.Errorf("CreateRepository(git update-server-info): %w", err) diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go index ee567ecd37..10f923f2e1 100644 --- a/services/repository/files/patch.go +++ b/services/repository/files/patch.go @@ -164,20 +164,15 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user } } - stdout := &strings.Builder{} - stderr := &strings.Builder{} - cmdApply := gitcmd.NewCommand("apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary") if git.DefaultFeatures().CheckVersionAtLeast("2.32") { cmdApply.AddArguments("-3") } if err := cmdApply.WithDir(t.basePath). - WithStdout(stdout). - WithStderr(stderr). - WithStdin(strings.NewReader(opts.Content)). - Run(ctx); err != nil { - return nil, fmt.Errorf("Error: Stdout: %s\nStderr: %s\nErr: %w", stdout.String(), stderr.String(), err) + WithStdinBytes([]byte(opts.Content)). + RunWithStderr(ctx); err != nil { + return nil, fmt.Errorf("git apply error: %w", err) } // Now write the tree diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go index b605236c03..63f4f06d25 100644 --- a/services/repository/files/temp_repo.go +++ b/services/repository/files/temp_repo.go @@ -98,7 +98,7 @@ func (t *TemporaryUploadRepository) Init(ctx context.Context, objectFormatName s // SetDefaultIndex sets the git index to our HEAD func (t *TemporaryUploadRepository) SetDefaultIndex(ctx context.Context) error { - if _, _, err := gitcmd.NewCommand("read-tree", "HEAD").WithDir(t.basePath).RunStdString(ctx); err != nil { + if err := gitcmd.NewCommand("read-tree", "HEAD").WithDir(t.basePath).RunWithStderr(ctx); err != nil { return fmt.Errorf("SetDefaultIndex: %w", err) } return nil @@ -106,7 +106,7 @@ func (t *TemporaryUploadRepository) SetDefaultIndex(ctx context.Context) error { // RefreshIndex looks at the current index and checks to see if merges or updates are needed by checking stat() information. func (t *TemporaryUploadRepository) RefreshIndex(ctx context.Context) error { - if _, _, err := gitcmd.NewCommand("update-index", "--refresh").WithDir(t.basePath).RunStdString(ctx); err != nil { + if err := gitcmd.NewCommand("update-index", "--refresh").WithDir(t.basePath).RunWithStderr(ctx); err != nil { return fmt.Errorf("RefreshIndex: %w", err) } return nil @@ -115,16 +115,11 @@ func (t *TemporaryUploadRepository) RefreshIndex(ctx context.Context) error { // LsFiles checks if the given filename arguments are in the index func (t *TemporaryUploadRepository) LsFiles(ctx context.Context, filenames ...string) ([]string, error) { stdOut := new(bytes.Buffer) - stdErr := new(bytes.Buffer) - if err := gitcmd.NewCommand("ls-files", "-z").AddDashesAndList(filenames...). WithDir(t.basePath). - WithStdout(stdOut). - WithStderr(stdErr). - Run(ctx); err != nil { - log.Error("Unable to run git ls-files for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String()) - err = fmt.Errorf("Unable to run git ls-files for temporary repo of: %s Error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) - return nil, err + WithStdoutBuffer(stdOut). + RunWithStderr(ctx); err != nil { + return nil, fmt.Errorf("unable to run git ls-files for temporary repo of: %s, error: %w", t.repo.FullName(), err) } fileList := make([]string, 0, len(filenames)) @@ -149,8 +144,6 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi if err != nil { return fmt.Errorf("unable to get object format for temporary repo: %q, error: %w", t.repo.FullName(), err) } - stdOut := new(bytes.Buffer) - stdErr := new(bytes.Buffer) stdIn := new(bytes.Buffer) for _, file := range filenames { if file != "" { @@ -162,11 +155,9 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi if err := gitcmd.NewCommand("update-index", "--remove", "-z", "--index-info"). WithDir(t.basePath). - WithStdout(stdOut). - WithStderr(stdErr). - WithStdin(stdIn). - Run(ctx); err != nil { - return fmt.Errorf("unable to update-index for temporary repo: %q, error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) + WithStdinBytes(stdIn.Bytes()). + RunWithStderr(ctx); err != nil { + return fmt.Errorf("unable to update-index for temporary repo: %q, error: %w", t.repo.FullName(), err) } return nil } @@ -174,16 +165,12 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi // HashObjectAndWrite writes the provided content to the object db and returns its hash func (t *TemporaryUploadRepository) HashObjectAndWrite(ctx context.Context, content io.Reader) (string, error) { stdOut := new(bytes.Buffer) - stdErr := new(bytes.Buffer) - if err := gitcmd.NewCommand("hash-object", "-w", "--stdin"). WithDir(t.basePath). - WithStdout(stdOut). - WithStderr(stdErr). - WithStdin(content). - Run(ctx); err != nil { - log.Error("Unable to hash-object to temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String()) - return "", fmt.Errorf("Unable to hash-object to temporary repo: %s Error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) + WithStdoutBuffer(stdOut). + WithStdinCopy(content). + RunWithStderr(ctx); err != nil { + return "", fmt.Errorf("unable to hash-object to temporary repo: %s, error: %w", t.repo.FullName(), err) } return strings.TrimSpace(stdOut.String()), nil @@ -191,17 +178,15 @@ func (t *TemporaryUploadRepository) HashObjectAndWrite(ctx context.Context, cont // AddObjectToIndex adds the provided object hash to the index with the provided mode and path func (t *TemporaryUploadRepository) AddObjectToIndex(ctx context.Context, mode, objectHash, objectPath string) error { - if _, _, err := gitcmd.NewCommand("update-index", "--add", "--replace", "--cacheinfo"). - AddDynamicArguments(mode, objectHash, objectPath).WithDir(t.basePath).RunStdString(ctx); err != nil { - stderr := err.Error() - if matched, _ := regexp.MatchString(".*Invalid path '.*", stderr); matched { + if err := gitcmd.NewCommand("update-index", "--add", "--replace", "--cacheinfo"). + AddDynamicArguments(mode, objectHash, objectPath).WithDir(t.basePath).RunWithStderr(ctx); err != nil { + if matched, _ := regexp.MatchString(".*Invalid path '.*", err.Stderr()); matched { return ErrFilePathInvalid{ Message: objectPath, Path: objectPath, } } - log.Error("Unable to add object to index: %s %s %s in temporary repo %s(%s) Error: %v", mode, objectHash, objectPath, t.repo.FullName(), t.basePath, err) - return fmt.Errorf("Unable to add object to index at %s in temporary repo %s Error: %w", objectPath, t.repo.FullName(), err) + return fmt.Errorf("unable to add object to index at %s in temporary repo %s, error: %w", objectPath, t.repo.FullName(), err) } return nil } @@ -342,18 +327,13 @@ func (t *TemporaryUploadRepository) CommitTree(ctx context.Context, opts *Commit ) stdout := new(bytes.Buffer) - stderr := new(bytes.Buffer) if err := cmdCommitTree. WithEnv(env). WithDir(t.basePath). - WithStdout(stdout). - WithStderr(stderr). - WithStdin(messageBytes). - Run(ctx); err != nil { - log.Error("Unable to commit-tree in temporary repo: %s (%s) Error: %v\nStdout: %s\nStderr: %s", - t.repo.FullName(), t.basePath, err, stdout, stderr) - return "", fmt.Errorf("Unable to commit-tree in temporary repo: %s Error: %w\nStdout: %s\nStderr: %s", - t.repo.FullName(), err, stdout, stderr) + WithStdoutBuffer(stdout). + WithStdinBytes(messageBytes.Bytes()). + RunWithStderr(ctx); err != nil { + return "", fmt.Errorf("unable to commit-tree in temporary repo: %s Error: %w", t.repo.FullName(), err) } return strings.TrimSpace(stdout.String()), nil } @@ -382,36 +362,24 @@ func (t *TemporaryUploadRepository) Push(ctx context.Context, doer *user_model.U // DiffIndex returns a Diff of the current index to the head func (t *TemporaryUploadRepository) DiffIndex(ctx context.Context) (*gitdiff.Diff, error) { - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - return nil, fmt.Errorf("unable to open stdout pipe: %w", err) - } - defer func() { - _ = stdoutReader.Close() - _ = stdoutWriter.Close() - }() - stderr := new(bytes.Buffer) var diff *gitdiff.Diff - err = gitcmd.NewCommand("diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD"). - WithTimeout(30 * time.Second). + cmd := gitcmd.NewCommand("diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD") + stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe() + defer stdoutReaderClose() + + err := cmd.WithTimeout(30 * time.Second). WithDir(t.basePath). - WithStdout(stdoutWriter). - WithStderr(stderr). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() - defer cancel() + WithPipelineFunc(func(ctx gitcmd.Context) error { var diffErr error diff, diffErr = gitdiff.ParsePatch(ctx, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, stdoutReader, "") - _ = stdoutReader.Close() if diffErr != nil { // if the diffErr is not nil, it will be returned as the error of "Run()" return fmt.Errorf("ParsePatch: %w", diffErr) } return nil }). - Run(ctx) - if err != nil && !git.IsErrCanceledOrKilled(err) { - log.Error("Unable to diff-index in temporary repo %s (%s). Error: %v\nStderr: %s", t.repo.FullName(), t.basePath, err, stderr) + RunWithStderr(ctx) + if err != nil && !gitcmd.IsErrorCanceledOrKilled(err) { return nil, fmt.Errorf("unable to run diff-index pipeline in temporary repo: %w", err) } diff --git a/services/repository/generate.go b/services/repository/generate.go index b2913cd110..bc37bc7bfe 100644 --- a/services/repository/generate.go +++ b/services/repository/generate.go @@ -21,7 +21,6 @@ import ( git_model "code.gitea.io/gitea/models/git" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/glob" "code.gitea.io/gitea/modules/log" @@ -216,19 +215,6 @@ func processGiteaTemplateFile(ctx context.Context, tmpDir string, templateRepo, } func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository, tmpDir string) error { - commitTimeStr := time.Now().Format(time.RFC3339) - authorSig := repo.Owner.NewGitSig() - - // Because this may call hooks we should pass in the environment - env := append(os.Environ(), - "GIT_AUTHOR_NAME="+authorSig.Name, - "GIT_AUTHOR_EMAIL="+authorSig.Email, - "GIT_AUTHOR_DATE="+commitTimeStr, - "GIT_COMMITTER_NAME="+authorSig.Name, - "GIT_COMMITTER_EMAIL="+authorSig.Email, - "GIT_COMMITTER_DATE="+commitTimeStr, - ) - // Clone to temporary path and do the init commit. if err := gitrepo.CloneRepoToLocal(ctx, templateRepo, tmpDir, git.CloneRepoOptions{ Depth: 1, @@ -264,15 +250,6 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r return err } - if stdout, _, err := gitcmd.NewCommand("remote", "add", "origin"). - AddDynamicArguments(repo.RepoPath()). - WithDir(tmpDir). - WithEnv(env). - RunStdString(ctx); err != nil { - log.Error("Unable to add %v as remote origin to temporary repo to %s: stdout %s\nError: %v", repo, tmpDir, stdout, err) - return fmt.Errorf("git remote add: %w", err) - } - if err = git.AddTemplateSubmoduleIndexes(ctx, tmpDir, submodules); err != nil { return fmt.Errorf("failed to add submodules: %v", err) } diff --git a/services/repository/gitgraph/graph.go b/services/repository/gitgraph/graph.go index f89d9a095a..8d9bec47f8 100644 --- a/services/repository/gitgraph/graph.go +++ b/services/repository/gitgraph/graph.go @@ -6,9 +6,6 @@ package gitgraph import ( "bufio" "bytes" - "context" - "os" - "strings" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" @@ -45,22 +42,14 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo } graph := NewGraph() - stderr := new(strings.Builder) - stdoutReader, stdoutWriter, err := os.Pipe() - if err != nil { - return nil, err - } commitsToSkip := setting.UI.GraphMaxCommitNum * (page - 1) - scanner := bufio.NewScanner(stdoutReader) - + stdoutReader, stdoutReaderClose := graphCmd.MakeStdoutPipe() + defer stdoutReaderClose() if err := graphCmd. WithDir(r.Path). - WithStdout(stdoutWriter). - WithStderr(stderr). - WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error { - _ = stdoutWriter.Close() - defer stdoutReader.Close() + WithPipelineFunc(func(ctx gitcmd.Context) error { + scanner := bufio.NewScanner(stdoutReader) parser := &Parser{} parser.firstInUse = -1 parser.maxAllowedColors = maxAllowedColors @@ -92,8 +81,7 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo line := scanner.Bytes() if bytes.IndexByte(line, '*') >= 0 { if err := parser.AddLineToGraph(graph, row, line); err != nil { - cancel() - return err + return ctx.CancelPipeline(err) } break } @@ -104,13 +92,12 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo row++ line := scanner.Bytes() if err := parser.AddLineToGraph(graph, row, line); err != nil { - cancel() - return err + return ctx.CancelPipeline(err) } } return scanner.Err() }). - Run(r.Ctx); err != nil { + RunWithStderr(r.Ctx); err != nil { return graph, err } return graph, nil diff --git a/services/repository/init.go b/services/repository/init.go index 51cc113d63..6aeb5ec644 100644 --- a/services/repository/init.go +++ b/services/repository/init.go @@ -11,7 +11,9 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/log" repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" @@ -71,12 +73,12 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi defaultBranch = setting.Repository.DefaultBranch } - if stdout, _, err := gitcmd.NewCommand("push", "origin"). - AddDynamicArguments("HEAD:" + defaultBranch). - WithDir(tmpPath). - WithEnv(repo_module.InternalPushingEnvironment(u, repo)). - RunStdString(ctx); err != nil { - log.Error("Failed to push back to HEAD: Stdout: %s\nError: %v", stdout, err) + if err := gitrepo.PushFromLocal(ctx, tmpPath, repo, git.PushOptions{ + LocalRefName: "HEAD", + Branch: defaultBranch, + Env: repo_module.InternalPushingEnvironment(u, repo), + }); err != nil { + log.Error("Failed to push back to HEAD Error: %v", err) return fmt.Errorf("git push: %w", err) } diff --git a/services/repository/migrate.go b/services/repository/migrate.go index 8f515326ad..bc46c5e09b 100644 --- a/services/repository/migrate.go +++ b/services/repository/migrate.go @@ -225,7 +225,7 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, // this is necessary for sync local tags from remote configName := fmt.Sprintf("remote.%s.fetch", mirrorModel.GetRemoteName()) - if stdout, err := gitrepo.RunCmdString(ctx, repo, + if stdout, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("config"). AddOptionValues("--add", configName, `+refs/tags/*:refs/tags/*`)); err != nil { log.Error("MigrateRepositoryGitData(git config --add +refs/tags/*:refs/tags/*) in %v: Stdout: %s\nError: %v", repo, stdout, err) diff --git a/services/repository/repository.go b/services/repository/repository.go index 4d07cb0e38..ae64f0116a 100644 --- a/services/repository/repository.go +++ b/services/repository/repository.go @@ -194,6 +194,10 @@ func MakeRepoPrivate(ctx context.Context, repo *repo_model.Repository) (err erro return err } + if err = repo_model.ClearRepoWatches(ctx, repo.ID); err != nil { + return err + } + // Create/Remove git-daemon-export-ok for git-daemon... if err := CheckDaemonExportOK(ctx, repo); err != nil { return err @@ -217,28 +221,28 @@ func MakeRepoPrivate(ctx context.Context, repo *repo_model.Repository) (err erro }) } -// LinkedRepository returns the linked repo if any -func LinkedRepository(ctx context.Context, a *repo_model.Attachment) (*repo_model.Repository, unit.Type, error) { +// GetAttachmentLinkedTypeAndRepoID returns the linked type and repository id of attachment if any +func GetAttachmentLinkedTypeAndRepoID(ctx context.Context, a *repo_model.Attachment) (unit.Type, int64, error) { if a.IssueID != 0 { iss, err := issues_model.GetIssueByID(ctx, a.IssueID) if err != nil { - return nil, unit.TypeIssues, err + return unit.TypeIssues, 0, err } - repo, err := repo_model.GetRepositoryByID(ctx, iss.RepoID) unitType := unit.TypeIssues if iss.IsPull { unitType = unit.TypePullRequests } - return repo, unitType, err - } else if a.ReleaseID != 0 { + return unitType, iss.RepoID, nil + } + + if a.ReleaseID != 0 { rel, err := repo_model.GetReleaseByID(ctx, a.ReleaseID) if err != nil { - return nil, unit.TypeReleases, err + return unit.TypeReleases, 0, err } - repo, err := repo_model.GetRepositoryByID(ctx, rel.RepoID) - return repo, unit.TypeReleases, err + return unit.TypeReleases, rel.RepoID, nil } - return nil, -1, nil + return unit.TypeInvalid, 0, nil } // CheckDaemonExportOK creates/removes git-daemon-export-ok for git-daemon... diff --git a/services/repository/repository_test.go b/services/repository/repository_test.go index 5673a4a161..2d860f1b9d 100644 --- a/services/repository/repository_test.go +++ b/services/repository/repository_test.go @@ -13,31 +13,30 @@ import ( "code.gitea.io/gitea/models/unittest" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestLinkedRepository(t *testing.T) { +func TestAttachLinkedTypeAndRepoID(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) testCases := []struct { name string attachID int64 - expectedRepo *repo_model.Repository expectedUnitType unit.Type + expectedRepoID int64 }{ - {"LinkedIssue", 1, &repo_model.Repository{ID: 1}, unit.TypeIssues}, - {"LinkedComment", 3, &repo_model.Repository{ID: 1}, unit.TypePullRequests}, - {"LinkedRelease", 9, &repo_model.Repository{ID: 1}, unit.TypeReleases}, - {"Notlinked", 10, nil, -1}, + {"LinkedIssue", 1, unit.TypeIssues, 1}, + {"LinkedComment", 3, unit.TypePullRequests, 1}, + {"LinkedRelease", 9, unit.TypeReleases, 1}, + {"Notlinked", 10, unit.TypeInvalid, 0}, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { attach, err := repo_model.GetAttachmentByID(t.Context(), tc.attachID) assert.NoError(t, err) - repo, unitType, err := LinkedRepository(t.Context(), attach) + unitType, repoID, err := GetAttachmentLinkedTypeAndRepoID(t.Context(), attach) assert.NoError(t, err) - if tc.expectedRepo != nil { - assert.Equal(t, tc.expectedRepo.ID, repo.ID) - } assert.Equal(t, tc.expectedUnitType, unitType) + assert.Equal(t, tc.expectedRepoID, repoID) }) } } @@ -70,3 +69,24 @@ func TestRepository_HasWiki(t *testing.T) { repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) assert.False(t, HasWiki(t.Context(), repo2)) } + +func TestMakeRepoPrivateClearsWatches(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}) + repo.IsPrivate = false + + watchers, err := repo_model.GetRepoWatchersIDs(t.Context(), repo.ID) + require.NoError(t, err) + require.NotEmpty(t, watchers) + + assert.NoError(t, MakeRepoPrivate(t.Context(), repo)) + + watchers, err = repo_model.GetRepoWatchersIDs(t.Context(), repo.ID) + assert.NoError(t, err) + assert.Empty(t, watchers) + + updatedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo.ID}) + assert.True(t, updatedRepo.IsPrivate) + assert.Zero(t, updatedRepo.NumWatches) +} diff --git a/services/repository/transfer.go b/services/repository/transfer.go index af477fc7f1..a601ee6f16 100644 --- a/services/repository/transfer.go +++ b/services/repository/transfer.go @@ -6,7 +6,6 @@ package repository import ( "context" "fmt" - "os" "strings" "code.gitea.io/gitea/models/db" @@ -291,12 +290,8 @@ func transferOwnership(ctx context.Context, doer *user_model.User, newOwnerName } // Rename remote repository to new path and delete local copy. - dir := user_model.UserPath(newOwner.Name) - if err := os.MkdirAll(dir, os.ModePerm); err != nil { - return fmt.Errorf("Failed to create dir %s: %w", dir, err) - } - - if err := util.Rename(repo_model.RepoPath(oldOwner.Name, repo.Name), repo_model.RepoPath(newOwner.Name, repo.Name)); err != nil { + oldRelativePath, newRelativePath := repo_model.RelativePath(oldOwner.Name, repo.Name), repo_model.RelativePath(newOwner.Name, repo.Name) + if err := gitrepo.RenameRepository(ctx, repo_model.StorageRepo(oldRelativePath), repo_model.StorageRepo(newRelativePath)); err != nil { return fmt.Errorf("rename repository directory: %w", err) } repoRenamed = true diff --git a/services/user/user.go b/services/user/user.go index 8e42fa3ccd..9b8bcf83c0 100644 --- a/services/user/user.go +++ b/services/user/user.go @@ -239,6 +239,11 @@ func DeleteUser(ctx context.Context, u *user_model.User, purge bool) error { if err := deleteUser(ctx, u, purge); err != nil { return fmt.Errorf("DeleteUser: %w", err) } + + // Finally delete any unlinked attachments, this will also delete the attached files + if err := deleteUserUnlinkedAttachments(ctx, u); err != nil { + return fmt.Errorf("deleteUserUnlinkedAttachments: %w", err) + } return nil }); err != nil { return err @@ -269,6 +274,19 @@ func DeleteUser(ctx context.Context, u *user_model.User, purge bool) error { return nil } +func deleteUserUnlinkedAttachments(ctx context.Context, u *user_model.User) error { + attachments, err := repo_model.GetUnlinkedAttachmentsByUserID(ctx, u.ID) + if err != nil { + return fmt.Errorf("GetUnlinkedAttachmentsByUserID: %w", err) + } + for _, attach := range attachments { + if err := repo_model.DeleteAttachment(ctx, attach, true); err != nil { + return fmt.Errorf("DeleteAttachment ID[%d]: %w", attach.ID, err) + } + } + return nil +} + // DeleteInactiveUsers deletes all inactive users and their email addresses. func DeleteInactiveUsers(ctx context.Context, olderThan time.Duration) error { inactiveUsers, err := user_model.GetInactiveUsers(ctx, olderThan) diff --git a/services/user/user_test.go b/services/user/user_test.go index 25e8ee7b2f..4d8d448dcd 100644 --- a/services/user/user_test.go +++ b/services/user/user_test.go @@ -63,6 +63,24 @@ func TestDeleteUser(t *testing.T) { assert.Error(t, DeleteUser(t.Context(), org, false)) } +func TestDeleteUserUnlinkedAttachments(t *testing.T) { + t.Run("DeleteExisting", func(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 8}) + unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 10}) + + assert.NoError(t, deleteUserUnlinkedAttachments(t.Context(), user)) + unittest.AssertNotExistsBean(t, &repo_model.Attachment{ID: 10}) + }) + + t.Run("NoUnlinkedAttachments", func(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + assert.NoError(t, deleteUserUnlinkedAttachments(t.Context(), user)) + }) +} + func TestPurgeUser(t *testing.T) { test := func(userID int64) { assert.NoError(t, unittest.PrepareTestDatabase()) diff --git a/services/webtheme/webtheme.go b/services/webtheme/webtheme.go index 57d63f4e07..8091c25713 100644 --- a/services/webtheme/webtheme.go +++ b/services/webtheme/webtheme.go @@ -140,7 +140,7 @@ func initThemes() { setting.LogStartupProblem(1, log.ERROR, "Default theme %q is not available, please correct the '[ui].DEFAULT_THEME' setting in the config file", setting.UI.DefaultTheme) } }() - cssFiles, err := public.AssetFS().ListFiles("/assets/css") + cssFiles, err := public.AssetFS().ListFiles("assets/css") if err != nil { log.Error("Failed to list themes: %v", err) availableThemes = []*ThemeMetaInfo{defaultThemeMetaInfoByInternalName(setting.UI.DefaultTheme)} diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go index f4115038cb..1f1e564006 100644 --- a/services/wiki/wiki.go +++ b/services/wiki/wiki.go @@ -170,7 +170,7 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model // FIXME: The wiki doesn't have lfs support at present - if this changes need to check attributes here - objectHash, err := gitRepo.HashObject(strings.NewReader(content)) + objectHash, err := gitRepo.HashObjectBytes([]byte(content)) if err != nil { log.Error("HashObject failed: %v", err) return err diff --git a/stylelint.config.ts b/stylelint.config.js similarity index 98% rename from stylelint.config.ts rename to stylelint.config.js index 8a5b87e17c..cb3fb5b9c9 100644 --- a/stylelint.config.ts +++ b/stylelint.config.js @@ -1,5 +1,5 @@ +// TODO: Move to .ts after https://github.com/stylelint/stylelint/issues/8893 is fixed import {fileURLToPath} from 'node:url'; -import type {Config} from 'stylelint'; const cssVarFiles = [ fileURLToPath(new URL('web_src/css/base.css', import.meta.url)), @@ -146,4 +146,4 @@ export default { 'shorthand-property-no-redundant-values': true, 'value-no-vendor-prefix': [true, {ignoreValues: ['box', 'inline-box']}], }, -} satisfies Config; +}; diff --git a/templates/admin/config.tmpl b/templates/admin/config.tmpl index 57631fd9c6..728746713c 100644 --- a/templates/admin/config.tmpl +++ b/templates/admin/config.tmpl @@ -307,10 +307,6 @@
    {{.Git.Timeout.Migrate}} {{ctx.Locale.Tr "tool.raw_seconds"}}
    {{ctx.Locale.Tr "admin.config.git_mirror_timeout"}}
    {{.Git.Timeout.Mirror}} {{ctx.Locale.Tr "tool.raw_seconds"}}
    -
    {{ctx.Locale.Tr "admin.config.git_clone_timeout"}}
    -
    {{.Git.Timeout.Clone}} {{ctx.Locale.Tr "tool.raw_seconds"}}
    -
    {{ctx.Locale.Tr "admin.config.git_pull_timeout"}}
    -
    {{.Git.Timeout.Pull}} {{ctx.Locale.Tr "tool.raw_seconds"}}
    {{ctx.Locale.Tr "admin.config.git_gc_timeout"}}
    {{.Git.Timeout.GC}} {{ctx.Locale.Tr "tool.raw_seconds"}}
    diff --git a/templates/base/footer_content.tmpl b/templates/base/footer_content.tmpl index df437badf6..66c9d718ea 100644 --- a/templates/base/footer_content.tmpl +++ b/templates/base/footer_content.tmpl @@ -1,7 +1,7 @@