diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 0b20da7c16..4f82a5d8c6 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -13,7 +13,7 @@
"ghcr.io/devcontainers/features/git-lfs:1.2.5": {},
"ghcr.io/jsburckhardt/devcontainer-features/uv:1": {},
"ghcr.io/devcontainers/features/python:1": {
- "version": "3.13"
+ "version": "3.14"
},
"ghcr.io/warrenbuckley/codespace-features/sqlite:1": {}
},
diff --git a/.github/labeler.yml b/.github/labeler.yml
index c940afef0c..68a0f30fd6 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -46,7 +46,7 @@ modifies/internal:
- ".gitpod.yml"
- ".markdownlint.yaml"
- ".spectral.yaml"
- - "stylelint.config.ts"
+ - "stylelint.config.*"
- ".yamllint.yaml"
- ".github/**"
- ".gitea/**"
@@ -84,9 +84,9 @@ docs-update-needed:
topic/code-linting:
- changed-files:
- any-glob-to-any-file:
- - ".eslintrc.cjs"
- ".golangci.yml"
- ".markdownlint.yaml"
- ".spectral.yaml"
- ".yamllint.yaml"
- - "stylelint.config.ts"
+ - "eslint*.config.*"
+ - "stylelint.config.*"
diff --git a/.github/workflows/cron-flake-updater.yml b/.github/workflows/cron-flake-updater.yml
new file mode 100644
index 0000000000..105802e558
--- /dev/null
+++ b/.github/workflows/cron-flake-updater.yml
@@ -0,0 +1,22 @@
+name: cron-flake-updater
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
+
+jobs:
+ nix-flake-update:
+ permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v5
+ - uses: DeterminateSystems/determinate-nix-action@v3
+ - uses: DeterminateSystems/update-flake-lock@main
+ with:
+ pr-title: "Update Nix flake"
+ pr-labels: |
+ dependencies
diff --git a/.github/workflows/cron-licenses.yml b/.github/workflows/cron-licenses.yml
index c5dd70a1f8..ee1c3e0c75 100644
--- a/.github/workflows/cron-licenses.yml
+++ b/.github/workflows/cron-licenses.yml
@@ -20,7 +20,7 @@ jobs:
- run: make generate-gitignore
timeout-minutes: 40
- name: push translations to repo
- uses: appleboy/git-push-action@v1.0.0
+ uses: appleboy/git-push-action@v1.2.0
with:
author_email: "teabot@gitea.io"
author_name: GiteaBot
diff --git a/.github/workflows/cron-translations.yml b/.github/workflows/cron-translations.yml
index d87ba8b20d..56a30fb5ba 100644
--- a/.github/workflows/cron-translations.yml
+++ b/.github/workflows/cron-translations.yml
@@ -29,7 +29,7 @@ jobs:
- name: update locales
run: ./build/update-locales.sh
- name: push translations to repo
- uses: appleboy/git-push-action@v1.0.0
+ uses: appleboy/git-push-action@v1.2.0
with:
author_email: "teabot@gitea.io"
author_name: GiteaBot
diff --git a/.github/workflows/files-changed.yml b/.github/workflows/files-changed.yml
index 7b58186cd0..332e9e0d6f 100644
--- a/.github/workflows/files-changed.yml
+++ b/.github/workflows/files-changed.yml
@@ -85,6 +85,7 @@ jobs:
- "uv.lock"
docker:
+ - ".github/workflows/pull-docker-dryrun.yml"
- "Dockerfile"
- "Dockerfile.rootless"
- "docker/**"
diff --git a/.github/workflows/pull-compliance.yml b/.github/workflows/pull-compliance.yml
index 99ff95e9c6..fb81622bd6 100644
--- a/.github/workflows/pull-compliance.yml
+++ b/.github/workflows/pull-compliance.yml
@@ -39,7 +39,7 @@ jobs:
steps:
- uses: actions/checkout@v6
- uses: astral-sh/setup-uv@v7
- - run: uv python install 3.12
+ - run: uv python install 3.14
- uses: pnpm/action-setup@v4
- uses: actions/setup-node@v6
with:
@@ -59,7 +59,7 @@ jobs:
steps:
- uses: actions/checkout@v6
- uses: astral-sh/setup-uv@v7
- - run: uv python install 3.12
+ - run: uv python install 3.14
- run: make deps-py
- run: make lint-yaml
diff --git a/.github/workflows/pull-db-tests.yml b/.github/workflows/pull-db-tests.yml
index 66f48d5af8..d168c2ecc5 100644
--- a/.github/workflows/pull-db-tests.yml
+++ b/.github/workflows/pull-db-tests.yml
@@ -63,7 +63,6 @@ jobs:
RACE_ENABLED: true
TEST_TAGS: gogit
TEST_LDAP: 1
- USE_REPO_TEST_DIR: 1
test-sqlite:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
@@ -90,7 +89,6 @@ jobs:
TAGS: bindata gogit sqlite sqlite_unlock_notify
RACE_ENABLED: true
TEST_TAGS: gogit sqlite sqlite_unlock_notify
- USE_REPO_TEST_DIR: 1
test-unit:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
@@ -206,7 +204,6 @@ jobs:
env:
TAGS: bindata
RACE_ENABLED: true
- USE_REPO_TEST_DIR: 1
TEST_INDEXER_CODE_ES_URL: "http://elastic:changeme@elasticsearch:9200"
test-mssql:
@@ -246,4 +243,3 @@ jobs:
timeout-minutes: 50
env:
TAGS: bindata
- USE_REPO_TEST_DIR: 1
diff --git a/.github/workflows/pull-docker-dryrun.yml b/.github/workflows/pull-docker-dryrun.yml
index 2a4d675abc..405521c354 100644
--- a/.github/workflows/pull-docker-dryrun.yml
+++ b/.github/workflows/pull-docker-dryrun.yml
@@ -14,24 +14,25 @@ jobs:
contents: read
container:
- if: needs.files-changed.outputs.docker == 'true' || needs.files-changed.outputs.actions == 'true'
+ if: needs.files-changed.outputs.docker == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@v6
+ - uses: docker/setup-qemu-action@v3
- uses: docker/setup-buildx-action@v3
- name: Build regular container image
uses: docker/build-push-action@v6
with:
context: .
+ platforms: linux/amd64,linux/arm64,linux/riscv64
push: false
- tags: gitea/gitea:linux-amd64
- name: Build rootless container image
uses: docker/build-push-action@v6
with:
context: .
push: false
+ platforms: linux/amd64,linux/arm64,linux/riscv64
file: Dockerfile.rootless
- tags: gitea/gitea:linux-amd64
diff --git a/.gitignore b/.gitignore
index 11af4543bd..cead4853ca 100644
--- a/.gitignore
+++ b/.gitignore
@@ -89,7 +89,6 @@ cpu.out
/vendor
/VERSION
/.air
-/.go-licenses
# Files and folders that were previously generated
/public/assets/img/webpack
@@ -121,8 +120,6 @@ prime/
/.goosehints
/.windsurfrules
/.github/copilot-instructions.md
-/AGENT.md
-/CLAUDE.md
/llms.txt
# Ignore worktrees when working on multiple branches
diff --git a/.golangci.yml b/.golangci.yml
index e9b9a03c43..2b85c89fdc 100644
--- a/.golangci.yml
+++ b/.golangci.yml
@@ -18,6 +18,7 @@ linters:
- mirror
- modernize
- nakedret
+ - nilnil
- nolintlint
- perfsprint
- revive
@@ -48,6 +49,10 @@ linters:
desc: do not use the ini package, use gitea's config system instead
- pkg: gitea.com/go-chi/cache
desc: do not use the go-chi cache package, use gitea's cache system
+ - pkg: github.com/pkg/errors
+ desc: use builtin errors package instead
+ - pkg: github.com/go-ap/errors
+ desc: use builtin errors package instead
nolintlint:
allow-unused: false
require-explanation: true
@@ -62,35 +67,24 @@ linters:
revive:
severity: error
rules:
- - name: atomic
- - name: bare-return
- name: blank-imports
- name: constant-logical-expr
- name: context-as-argument
- name: context-keys-type
- name: dot-imports
- - name: duplicated-imports
- name: empty-lines
- - name: error-naming
- name: error-return
- name: error-strings
- - name: errorf
- name: exported
- name: identical-branches
- name: if-return
- name: increment-decrement
- - name: indent-error-flow
- name: modifies-value-receiver
- name: package-comments
- - name: range
- - name: receiver-naming
- name: redefines-builtin-id
- - name: string-of-int
- name: superfluous-else
- name: time-naming
- - name: unconditional-recursion
- name: unexported-return
- - name: unreachable-code
- name: var-declaration
- name: var-naming
arguments:
@@ -128,16 +122,12 @@ linters:
- linters:
- dupl
- errcheck
- - gocyclo
- - gosec
- staticcheck
- unparam
path: _test\.go
- linters:
- dupl
- errcheck
- - gocyclo
- - gosec
path: models/migrations/v
- linters:
- forbidigo
@@ -149,7 +139,6 @@ linters:
- gocritic
text: (?i)`ID' should not be capitalized
- linters:
- - deadcode
- unused
text: (?i)swagger
- linters:
diff --git a/AGENTS.md b/AGENTS.md
new file mode 100644
index 0000000000..402a9d6945
--- /dev/null
+++ b/AGENTS.md
@@ -0,0 +1,10 @@
+# Instructions for agents
+
+- Use `make help` to find available development targets
+- Before committing `.go` changes, run `make fmt` to format, and run `make lint-go` to lint
+- Before committing `.ts` changes, run `make lint-js` to lint
+- Before committing `go.mod` changes, run `make tidy`
+- Before committing new `.go` files, add the current year into the copyright header
+- Before committing any files, remove all trailing whitespace from source code lines
+- Never force-push to pull request branches
+- Always start issue and pull request comments with an authorship attribution
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e3374e14c3..f0d93452ae 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,7 +4,39 @@ This changelog goes through the changes that have been made in each release
without substantial changes to our git log; to see the highlights of what has
been added to each release, please refer to the [blog](https://blog.gitea.com).
-## [1.25.3](https://github.com/go-gitea/gitea/releases/tag/1.25.3) - 2025-12-17
+## [1.25.4](https://github.com/go-gitea/gitea/releases/tag/v1.25.4) - 2026-01-15
+
+* SECURITY
+ * Release attachments must belong to the intended repo (#36347) (#36375)
+ * Fix permission check on org project operations (#36318) (#36373)
+ * Clean watches when make a repository private and check permission when send release emails (#36319) (#36370)
+ * Add more check for stopwatch read or list (#36340) (#36368)
+ * Fix openid setting check (#36346) (#36361)
+ * Fix cancel auto merge bug (#36341) (#36356)
+ * Fix delete attachment check (#36320) (#36355)
+ * LFS locks must belong to the intended repo (#36344) (#36349)
+ * Fix bug on notification read (#36339) #36387
+* ENHANCEMENTS
+ * Add more routes to the "expensive" list (#36290)
+ * Make "commit statuses" API accept slashes in "ref" (#36264) (#36275)
+* BUGFIXES
+ * Fix git http service handling (#36396)
+ * Fix markdown newline handling during IME composition (#36421) (#36424)
+ * Fix missing repository id when migrating release attachments (#36389)
+ * Fix bug when compare in the pull request (#36363) (#36372)
+ * Fix incorrect text content detection (#36364) (#36369)
+ * Fill missing `has_code` in repository api (#36338) (#36359)
+ * Fix notifications pagination query parameters (#36351) (#36358)
+ * Fix some trivial problems (#36336) (#36337)
+ * Prevent panic when GitLab release has more links than sources (#36295) (#36305)
+ * Fix stats bug when syncing release (#36285) (#36294)
+ * Always honor user's choice for "delete branch after merge" (#36281) (#36286)
+ * Use the requested host for LFS links (#36242) (#36258)
+ * Fix panic when get editor config file (#36241) (#36247)
+ * Fix regression in writing authorized principals (#36213) (#36218)
+ * Fix WebAuthn error checking (#36219) (#36235)
+
+## [1.25.3](https://github.com/go-gitea/gitea/releases/tag/v1.25.3) - 2025-12-17
* SECURITY
* Bump toolchain to go1.25.5, misc fixes (#36082)
@@ -31,7 +63,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
* Fix error handling in mailer and wiki services (#36041) (#36053)
* Fix bugs when comparing and creating pull request (#36166) (#36144)
-## [1.25.2](https://github.com/go-gitea/gitea/releases/tag/1.25.2) - 2025-11-23
+## [1.25.2](https://github.com/go-gitea/gitea/releases/tag/v1.25.2) - 2025-11-23
* SECURITY
* Upgrade golang.org/x/crypto to 0.45.0 (#35985) (#35988)
@@ -418,7 +450,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
* Hide href attribute of a tag if there is no target_url (#34556) (#34684)
* Fix tag target (#34781) #34783
-## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/1.24.0) - 2025-05-26
+## [1.24.0](https://github.com/go-gitea/gitea/releases/tag/v1.24.0) - 2025-05-26
* BREAKING
* Make Gitea always use its internal config, ignore `/etc/gitconfig` (#33076)
@@ -788,7 +820,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
* Bump x/net (#32896) (#32900)
* Only activity tab needs heatmap data loading (#34652)
-## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/1.23.8) - 2025-05-11
+## [1.23.8](https://github.com/go-gitea/gitea/releases/tag/v1.23.8) - 2025-05-11
* SECURITY
* Fix a bug when uploading file via lfs ssh command (#34408) (#34411)
@@ -815,7 +847,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
* Bump go version in go.mod (#34160)
* remove hardcoded 'code' string in clone_panel.tmpl (#34153) (#34158)
-## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/1.23.7) - 2025-04-07
+## [1.23.7](https://github.com/go-gitea/gitea/releases/tag/v1.23.7) - 2025-04-07
* Enhancements
* Add a config option to block "expensive" pages for anonymous users (#34024) (#34071)
@@ -913,7 +945,7 @@ been added to each release, please refer to the [blog](https://blog.gitea.com).
* BUGFIXES
* Fix a bug caused by status webhook template #33512
-## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/1.23.2) - 2025-02-04
+## [1.23.2](https://github.com/go-gitea/gitea/releases/tag/v1.23.2) - 2025-02-04
* BREAKING
* Add tests for webhook and fix some webhook bugs (#33396) (#33442)
@@ -3443,7 +3475,7 @@ Key highlights of this release encompass significant changes categorized under `
* Improve decryption failure message (#24573) (#24575)
* Makefile: Use portable !, not GNUish -not, with find(1). (#24565) (#24572)
-## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/1.19.3) - 2023-05-03
+## [1.19.3](https://github.com/go-gitea/gitea/releases/tag/v1.19.3) - 2023-05-03
* SECURITY
* Use golang 1.20.4 to fix CVE-2023-24539, CVE-2023-24540, and CVE-2023-29400
@@ -3456,7 +3488,7 @@ Key highlights of this release encompass significant changes categorized under `
* Fix incorrect CurrentUser check for docker rootless (#24435)
* Getting the tag list does not require being signed in (#24413) (#24416)
-## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/1.19.2) - 2023-04-26
+## [1.19.2](https://github.com/go-gitea/gitea/releases/tag/v1.19.2) - 2023-04-26
* SECURITY
* Require repo scope for PATs for private repos and basic authentication (#24362) (#24364)
@@ -3955,7 +3987,7 @@ Key highlights of this release encompass significant changes categorized under `
* Display attachments of review comment when comment content is blank (#23035) (#23046)
* Return empty url for submodule tree entries (#23043) (#23048)
-## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/1.18.4) - 2023-02-20
+## [1.18.4](https://github.com/go-gitea/gitea/releases/tag/v1.18.4) - 2023-02-20
* SECURITY
* Provide the ability to set password hash algorithm parameters (#22942) (#22943)
@@ -4382,7 +4414,7 @@ Key highlights of this release encompass significant changes categorized under `
* Fix the mode of custom dir to 0700 in docker-rootless (#20861) (#20867)
* Fix UI mis-align for PR commit history (#20845) (#20859)
-## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/1.17.1) - 2022-08-17
+## [1.17.1](https://github.com/go-gitea/gitea/releases/tag/v1.17.1) - 2022-08-17
* SECURITY
* Correctly escape within tribute.js (#20831) (#20832)
diff --git a/CLAUDE.md b/CLAUDE.md
new file mode 100644
index 0000000000..43c994c2d3
--- /dev/null
+++ b/CLAUDE.md
@@ -0,0 +1 @@
+@AGENTS.md
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 9d696bf6b1..c64d91a7eb 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -80,7 +80,7 @@ The more detailed and specific you are, the faster we can fix the issue. \
It is really helpful if you can reproduce your problem on a site running on the latest commits, i.e. , as perhaps your problem has already been fixed on a current version. \
Please follow the guidelines described in [How to Report Bugs Effectively](http://www.chiark.greenend.org.uk/~sgtatham/bugs.html) for your report.
-Please be kind, remember that Gitea comes at no cost to you, and you're getting free help.
+Please be kind—remember that Gitea comes at no cost to you, and you're getting free help.
### Types of issues
@@ -183,7 +183,7 @@ Here's how to run the test suite:
## Translation
All translation work happens on [Crowdin](https://translate.gitea.com).
-The only translation that is maintained in this repository is [the English translation](https://github.com/go-gitea/gitea/blob/main/options/locale/locale_en-US.ini).
+The only translation that is maintained in this repository is [the English translation](https://github.com/go-gitea/gitea/blob/main/options/locale/locale_en-US.json).
It is synced regularly with Crowdin. \
Other locales on main branch **should not** be updated manually as they will be overwritten with each sync. \
Once a language has reached a **satisfactory percentage** of translated keys (~25%), it will be synced back into this repo and included in the next released version.
diff --git a/Dockerfile b/Dockerfile
index 7cee0f32d3..f71b13e8f3 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,13 @@
# syntax=docker/dockerfile:1
-# Build stage
-FROM docker.io/library/golang:1.25-alpine3.22 AS build-env
+# Build frontend on the native platform to avoid QEMU-related issues with esbuild/webpack
+FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.26-alpine3.23 AS frontend-build
+RUN apk --no-cache add build-base git nodejs pnpm
+WORKDIR /src
+COPY --exclude=.git/ . .
+RUN --mount=type=cache,target=/root/.local/share/pnpm/store make frontend
+
+# Build backend for each target platform
+FROM docker.io/library/golang:1.26-alpine3.23 AS build-env
ARG GOPROXY=direct
@@ -12,22 +19,19 @@ ARG CGO_EXTRA_CFLAGS
# Build deps
RUN apk --no-cache add \
build-base \
- git \
- nodejs \
- pnpm
+ git
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
-# Use COPY but not "mount" because some directories like "node_modules" contain platform-depended contents and these directories need to be ignored.
-# ".git" directory will be mounted later separately for getting version data.
-# TODO: in the future, maybe we can pre-build the frontend assets on one platform and share them for different platforms, the benefit is that it won't be affected by webpack plugin compatibility problems, then the working directory can be fully mounted and the COPY is not needed.
+# Use COPY instead of bind mount as read-only one breaks makefile state tracking and read-write one needs binary to be moved as it's discarded.
+# ".git" directory is mounted separately later only for version data extraction.
COPY --exclude=.git/ . .
+COPY --from=frontend-build /src/public/assets public/assets
# Build gitea, .git mount is required for version data
RUN --mount=type=cache,target=/go/pkg/mod \
--mount=type=cache,target="/root/.cache/go-build" \
- --mount=type=cache,target=/root/.local/share/pnpm/store \
--mount=type=bind,source=".git/",target=".git/" \
- make
+ make backend
COPY docker/root /tmp/local
@@ -39,7 +43,7 @@ RUN chmod 755 /tmp/local/usr/bin/entrypoint \
/tmp/local/etc/s6/.s6-svscan/* \
/go/src/code.gitea.io/gitea/gitea
-FROM docker.io/library/alpine:3.22 AS gitea
+FROM docker.io/library/alpine:3.23 AS gitea
EXPOSE 22 3000
@@ -76,5 +80,6 @@ ENV GITEA_CUSTOM=/data/gitea
VOLUME ["/data"]
+# HINT: HEALTH-CHECK-ENDPOINT: don't use HEALTHCHECK, search this hint keyword for more information
ENTRYPOINT ["/usr/bin/entrypoint"]
CMD ["/usr/bin/s6-svscan", "/etc/s6"]
diff --git a/Dockerfile.rootless b/Dockerfile.rootless
index 8a6fa587e9..bc210132c5 100644
--- a/Dockerfile.rootless
+++ b/Dockerfile.rootless
@@ -1,6 +1,13 @@
# syntax=docker/dockerfile:1
-# Build stage
-FROM docker.io/library/golang:1.25-alpine3.22 AS build-env
+# Build frontend on the native platform to avoid QEMU-related issues with esbuild/webpack
+FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.26-alpine3.23 AS frontend-build
+RUN apk --no-cache add build-base git nodejs pnpm
+WORKDIR /src
+COPY --exclude=.git/ . .
+RUN --mount=type=cache,target=/root/.local/share/pnpm/store make frontend
+
+# Build backend for each target platform
+FROM docker.io/library/golang:1.26-alpine3.23 AS build-env
ARG GOPROXY=direct
@@ -12,20 +19,18 @@ ARG CGO_EXTRA_CFLAGS
# Build deps
RUN apk --no-cache add \
build-base \
- git \
- nodejs \
- pnpm
+ git
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
# See the comments in Dockerfile
COPY --exclude=.git/ . .
+COPY --from=frontend-build /src/public/assets public/assets
# Build gitea, .git mount is required for version data
RUN --mount=type=cache,target=/go/pkg/mod \
--mount=type=cache,target="/root/.cache/go-build" \
- --mount=type=cache,target=/root/.local/share/pnpm/store \
--mount=type=bind,source=".git/",target=".git/" \
- make
+ make backend
COPY docker/rootless /tmp/local
@@ -33,7 +38,7 @@ COPY docker/rootless /tmp/local
RUN chmod 755 /tmp/local/usr/local/bin/* \
/go/src/code.gitea.io/gitea/gitea
-FROM docker.io/library/alpine:3.22 AS gitea-rootless
+FROM docker.io/library/alpine:3.23 AS gitea-rootless
EXPOSE 2222 3000
@@ -77,5 +82,6 @@ ENV HOME="/var/lib/gitea/git"
VOLUME ["/var/lib/gitea", "/etc/gitea"]
WORKDIR /var/lib/gitea
+# HINT: HEALTH-CHECK-ENDPOINT: don't use HEALTHCHECK, search this hint keyword for more information
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/usr/local/bin/docker-entrypoint.sh"]
CMD []
diff --git a/Makefile b/Makefile
index ee75906b7f..3c7582dd57 100644
--- a/Makefile
+++ b/Makefile
@@ -1,22 +1,5 @@
-ifeq ($(USE_REPO_TEST_DIR),1)
-
-# This rule replaces the whole Makefile when we're trying to use /tmp repository temporary files
-location = $(CURDIR)/$(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST))
-self := $(location)
-
-%:
- @tmpdir=`mktemp --tmpdir -d` ; \
- echo Using temporary directory $$tmpdir for test repositories ; \
- USE_REPO_TEST_DIR= $(MAKE) -f $(self) --no-print-directory REPO_TEST_DIR=$$tmpdir/ $@ ; \
- STATUS=$$? ; rm -r "$$tmpdir" ; exit $$STATUS
-
-else
-
-# This is the "normal" part of the Makefile
-
DIST := dist
DIST_DIRS := $(DIST)/binaries $(DIST)/release
-IMPORT := code.gitea.io/gitea
# By default use go's 1.25 experimental json v2 library when building
# TODO: remove when no longer experimental
@@ -32,14 +15,13 @@ XGO_VERSION := go-1.25.x
AIR_PACKAGE ?= github.com/air-verse/air@v1
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.9.2
-GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.7.2
+GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.9.0
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.15
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.7.0
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.33.1
XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
-GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1
-ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.7.9
+ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.7.10
DOCKER_IMAGE ?= gitea/gitea
DOCKER_TAG ?= latest
@@ -84,7 +66,6 @@ endif
EXTRA_GOFLAGS ?=
-MAKE_VERSION := $(shell "$(MAKE)" -v | cat | head -n 1)
MAKE_EVIDENCE_DIR := .make_evidence
GOTESTFLAGS ?=
@@ -100,7 +81,7 @@ GITHUB_REF_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
# Enable typescript support in Node.js before 22.18
# TODO: Remove this once we can raise the minimum Node.js version to 22.18 (alpine >= 3.23)
-NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v 2>/dev/null | cut -c2- | tr '.' ' '))
+NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v 2>/dev/null | cut -c2- | sed 's/-.*//' | tr '.' ' '))
ifeq ($(shell test "$(NODE_VERSION)" -lt "022018000"; echo $$?),0)
NODE_VARS := NODE_OPTIONS="--experimental-strip-types"
else
@@ -130,7 +111,7 @@ ifeq ($(VERSION),main)
VERSION := main-nightly
endif
-LDFLAGS := $(LDFLAGS) -X "main.MakeVersion=$(MAKE_VERSION)" -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
+LDFLAGS := $(LDFLAGS) -X "main.Version=$(GITEA_VERSION)" -X "main.Tags=$(TAGS)"
LINUX_ARCHS ?= linux/amd64,linux/386,linux/arm-5,linux/arm-6,linux/arm64,linux/riscv64
@@ -150,7 +131,6 @@ SVG_DEST_DIR := public/assets/img/svg
AIR_TMP_DIR := .air
-GO_LICENSE_TMP_DIR := .go-licenses
GO_LICENSE_FILE := assets/go-licenses.json
TAGS ?=
@@ -159,7 +139,7 @@ TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags
TEST_TAGS ?= $(TAGS_SPLIT) sqlite sqlite_unlock_notify
-TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(DIST) $(MAKE_EVIDENCE_DIR) $(AIR_TMP_DIR) $(GO_LICENSE_TMP_DIR)
+TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(DIST) $(MAKE_EVIDENCE_DIR) $(AIR_TMP_DIR)
GO_DIRS := build cmd models modules routers services tests tools
WEB_DIRS := web_src/js web_src/css
@@ -211,16 +191,6 @@ help: Makefile ## print Makefile help information.
@printf " \033[36m%-46s\033[0m %s\n" "test[#TestSpecificName]" "run unit test"
@printf " \033[36m%-46s\033[0m %s\n" "test-sqlite[#TestSpecificName]" "run integration test for sqlite"
-.PHONY: go-check
-go-check:
- $(eval MIN_GO_VERSION_STR := $(shell grep -Eo '^go\s+[0-9]+\.[0-9]+' go.mod | cut -d' ' -f2))
- $(eval MIN_GO_VERSION := $(shell printf "%03d%03d" $(shell echo '$(MIN_GO_VERSION_STR)' | tr '.' ' ')))
- $(eval GO_VERSION := $(shell printf "%03d%03d" $(shell $(GO) version | grep -Eo '[0-9]+\.[0-9]+' | tr '.' ' ');))
- @if [ "$(GO_VERSION)" -lt "$(MIN_GO_VERSION)" ]; then \
- echo "Gitea requires Go $(MIN_GO_VERSION_STR) or greater to build. You can get it at https://go.dev/dl/"; \
- exit 1; \
- fi
-
.PHONY: git-check
git-check:
@if git lfs >/dev/null 2>&1 ; then : ; else \
@@ -228,20 +198,6 @@ git-check:
exit 1; \
fi
-.PHONY: node-check
-node-check:
- $(eval MIN_NODE_VERSION_STR := $(shell grep -Eo '"node":.*[0-9.]+"' package.json | sed -n 's/.*[^0-9.]\([0-9.]*\)"/\1/p'))
- $(eval MIN_NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell echo '$(MIN_NODE_VERSION_STR)' | tr '.' ' ')))
- $(eval PNPM_MISSING := $(shell hash pnpm > /dev/null 2>&1 || echo 1))
- @if [ "$(NODE_VERSION)" -lt "$(MIN_NODE_VERSION)" ]; then \
- echo "Gitea requires Node.js $(MIN_NODE_VERSION_STR) or greater to build. You can get it at https://nodejs.org/en/download/"; \
- exit 1; \
- fi
- @if [ "$(PNPM_MISSING)" = "1" ]; then \
- echo "Gitea requires pnpm to build. You can install it at https://pnpm.io/installation"; \
- exit 1; \
- fi
-
.PHONY: clean-all
clean-all: clean ## delete backend, frontend and integration files
rm -rf $(WEBPACK_DEST_ENTRIES) node_modules
@@ -253,7 +209,7 @@ clean: ## delete backend and integration files
e2e*.test \
tests/integration/gitea-integration-* \
tests/integration/indexers-* \
- tests/mysql.ini tests/pgsql.ini tests/mssql.ini man/ \
+ tests/sqlite.ini tests/mysql.ini tests/pgsql.ini tests/mssql.ini man/ \
tests/e2e/gitea-e2e-*/ \
tests/e2e/indexers-*/ \
tests/e2e/reports/ tests/e2e/test-artifacts/ tests/e2e/test-snapshots/
@@ -338,12 +294,12 @@ lint-backend: lint-go lint-go-gitea-vet lint-editorconfig ## lint backend files
lint-backend-fix: lint-go-fix lint-go-gitea-vet lint-editorconfig ## lint backend files and fix issues
.PHONY: lint-js
-lint-js: node_modules ## lint js files
+lint-js: node_modules ## lint js and ts files
$(NODE_VARS) pnpm exec eslint --color --max-warnings=0 $(ESLINT_FILES)
$(NODE_VARS) pnpm exec vue-tsc
.PHONY: lint-js-fix
-lint-js-fix: node_modules ## lint js files and fix issues
+lint-js-fix: node_modules ## lint js and ts files and fix issues
$(NODE_VARS) pnpm exec eslint --color --max-warnings=0 $(ESLINT_FILES) --fix
$(NODE_VARS) pnpm exec vue-tsc
@@ -426,12 +382,12 @@ watch: ## watch everything and continuously rebuild
@bash tools/watch.sh
.PHONY: watch-frontend
-watch-frontend: node-check node_modules ## watch frontend files and continuously rebuild
+watch-frontend: node_modules ## watch frontend files and continuously rebuild
@rm -rf $(WEBPACK_DEST_ENTRIES)
NODE_ENV=development $(NODE_VARS) pnpm exec webpack --watch --progress --disable-interpret
.PHONY: watch-backend
-watch-backend: go-check ## watch backend files and continuously rebuild
+watch-backend: ## watch backend files and continuously rebuild
GITEA_RUN_MODE=dev $(GO) run $(AIR_PACKAGE) -c .air.toml
.PHONY: test
@@ -497,25 +453,20 @@ tidy-check: tidy
go-licenses: $(GO_LICENSE_FILE) ## regenerate go licenses
$(GO_LICENSE_FILE): go.mod go.sum
- @rm -rf $(GO_LICENSE_FILE)
- $(GO) install $(GO_LICENSES_PACKAGE)
- -GOOS=linux CGO_ENABLED=1 go-licenses save . --force --save_path=$(GO_LICENSE_TMP_DIR) 2>/dev/null
- $(GO) run build/generate-go-licenses.go $(GO_LICENSE_TMP_DIR) $(GO_LICENSE_FILE)
- @rm -rf $(GO_LICENSE_TMP_DIR)
+ GO=$(GO) $(GO) run build/generate-go-licenses.go $(GO_LICENSE_FILE)
generate-ini-sqlite:
- sed -e 's|{{REPO_TEST_DIR}}|${REPO_TEST_DIR}|g' \
+ sed -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-sqlite|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
- -e 's|{{TEST_TYPE}}|$(or $(TEST_TYPE),integration)|g' \
tests/sqlite.ini.tmpl > tests/sqlite.ini
.PHONY: test-sqlite
test-sqlite: integrations.sqlite.test generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./integrations.sqlite.test
+ GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test
.PHONY: test-sqlite\#%
test-sqlite\#%: integrations.sqlite.test generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.run $(subst .,/,$*)
+ GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.run $(subst .,/,$*)
.PHONY: test-sqlite-migration
test-sqlite-migration: migrations.sqlite.test migrations.individual.sqlite.test
@@ -525,18 +476,17 @@ generate-ini-mysql:
-e 's|{{TEST_MYSQL_DBNAME}}|${TEST_MYSQL_DBNAME}|g' \
-e 's|{{TEST_MYSQL_USERNAME}}|${TEST_MYSQL_USERNAME}|g' \
-e 's|{{TEST_MYSQL_PASSWORD}}|${TEST_MYSQL_PASSWORD}|g' \
- -e 's|{{REPO_TEST_DIR}}|${REPO_TEST_DIR}|g' \
+ -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-mysql|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
- -e 's|{{TEST_TYPE}}|$(or $(TEST_TYPE),integration)|g' \
tests/mysql.ini.tmpl > tests/mysql.ini
.PHONY: test-mysql
test-mysql: integrations.mysql.test generate-ini-mysql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./integrations.mysql.test
+ GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test
.PHONY: test-mysql\#%
test-mysql\#%: integrations.mysql.test generate-ini-mysql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./integrations.mysql.test -test.run $(subst .,/,$*)
+ GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test -test.run $(subst .,/,$*)
.PHONY: test-mysql-migration
test-mysql-migration: migrations.mysql.test migrations.individual.mysql.test
@@ -548,18 +498,17 @@ generate-ini-pgsql:
-e 's|{{TEST_PGSQL_PASSWORD}}|${TEST_PGSQL_PASSWORD}|g' \
-e 's|{{TEST_PGSQL_SCHEMA}}|${TEST_PGSQL_SCHEMA}|g' \
-e 's|{{TEST_MINIO_ENDPOINT}}|${TEST_MINIO_ENDPOINT}|g' \
- -e 's|{{REPO_TEST_DIR}}|${REPO_TEST_DIR}|g' \
+ -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-pgsql|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
- -e 's|{{TEST_TYPE}}|$(or $(TEST_TYPE),integration)|g' \
tests/pgsql.ini.tmpl > tests/pgsql.ini
.PHONY: test-pgsql
test-pgsql: integrations.pgsql.test generate-ini-pgsql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./integrations.pgsql.test
+ GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test
.PHONY: test-pgsql\#%
test-pgsql\#%: integrations.pgsql.test generate-ini-pgsql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.run $(subst .,/,$*)
+ GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.run $(subst .,/,$*)
.PHONY: test-pgsql-migration
test-pgsql-migration: migrations.pgsql.test migrations.individual.pgsql.test
@@ -569,18 +518,17 @@ generate-ini-mssql:
-e 's|{{TEST_MSSQL_DBNAME}}|${TEST_MSSQL_DBNAME}|g' \
-e 's|{{TEST_MSSQL_USERNAME}}|${TEST_MSSQL_USERNAME}|g' \
-e 's|{{TEST_MSSQL_PASSWORD}}|${TEST_MSSQL_PASSWORD}|g' \
- -e 's|{{REPO_TEST_DIR}}|${REPO_TEST_DIR}|g' \
+ -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-mssql|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
- -e 's|{{TEST_TYPE}}|$(or $(TEST_TYPE),integration)|g' \
tests/mssql.ini.tmpl > tests/mssql.ini
.PHONY: test-mssql
test-mssql: integrations.mssql.test generate-ini-mssql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./integrations.mssql.test
+ GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test
.PHONY: test-mssql\#%
test-mssql\#%: integrations.mssql.test generate-ini-mssql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./integrations.mssql.test -test.run $(subst .,/,$*)
+ GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test -test.run $(subst .,/,$*)
.PHONY: test-mssql-migration
test-mssql-migration: migrations.mssql.test migrations.individual.mssql.test
@@ -599,59 +547,59 @@ test-e2e: test-e2e-sqlite
.PHONY: test-e2e-sqlite
test-e2e-sqlite: playwright e2e.sqlite.test generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./e2e.sqlite.test
+ GITEA_TEST_CONF=tests/sqlite.ini ./e2e.sqlite.test
.PHONY: test-e2e-sqlite\#%
test-e2e-sqlite\#%: playwright e2e.sqlite.test generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./e2e.sqlite.test -test.run TestE2e/$*
+ GITEA_TEST_CONF=tests/sqlite.ini ./e2e.sqlite.test -test.run TestE2e/$*
.PHONY: test-e2e-mysql
test-e2e-mysql: playwright e2e.mysql.test generate-ini-mysql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./e2e.mysql.test
+ GITEA_TEST_CONF=tests/mysql.ini ./e2e.mysql.test
.PHONY: test-e2e-mysql\#%
test-e2e-mysql\#%: playwright e2e.mysql.test generate-ini-mysql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./e2e.mysql.test -test.run TestE2e/$*
+ GITEA_TEST_CONF=tests/mysql.ini ./e2e.mysql.test -test.run TestE2e/$*
.PHONY: test-e2e-pgsql
test-e2e-pgsql: playwright e2e.pgsql.test generate-ini-pgsql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./e2e.pgsql.test
+ GITEA_TEST_CONF=tests/pgsql.ini ./e2e.pgsql.test
.PHONY: test-e2e-pgsql\#%
test-e2e-pgsql\#%: playwright e2e.pgsql.test generate-ini-pgsql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./e2e.pgsql.test -test.run TestE2e/$*
+ GITEA_TEST_CONF=tests/pgsql.ini ./e2e.pgsql.test -test.run TestE2e/$*
.PHONY: test-e2e-mssql
test-e2e-mssql: playwright e2e.mssql.test generate-ini-mssql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./e2e.mssql.test
+ GITEA_TEST_CONF=tests/mssql.ini ./e2e.mssql.test
.PHONY: test-e2e-mssql\#%
test-e2e-mssql\#%: playwright e2e.mssql.test generate-ini-mssql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./e2e.mssql.test -test.run TestE2e/$*
+ GITEA_TEST_CONF=tests/mssql.ini ./e2e.mssql.test -test.run TestE2e/$*
.PHONY: bench-sqlite
bench-sqlite: integrations.sqlite.test generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
+ GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: bench-mysql
bench-mysql: integrations.mysql.test generate-ini-mysql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./integrations.mysql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
+ GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: bench-mssql
bench-mssql: integrations.mssql.test generate-ini-mssql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./integrations.mssql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
+ GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: bench-pgsql
bench-pgsql: integrations.pgsql.test generate-ini-pgsql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
+ GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: integration-test-coverage
integration-test-coverage: integrations.cover.test generate-ini-mysql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./integrations.cover.test -test.coverprofile=integration.coverage.out
+ GITEA_TEST_CONF=tests/mysql.ini ./integrations.cover.test -test.coverprofile=integration.coverage.out
.PHONY: integration-test-coverage-sqlite
integration-test-coverage-sqlite: integrations.cover.sqlite.test generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./integrations.cover.sqlite.test -test.coverprofile=integration.coverage.out
+ GITEA_TEST_CONF=tests/sqlite.ini ./integrations.cover.sqlite.test -test.coverprofile=integration.coverage.out
integrations.mysql.test: git-check $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.mysql.test
@@ -674,54 +622,54 @@ integrations.cover.sqlite.test: git-check $(GO_SOURCES)
.PHONY: migrations.mysql.test
migrations.mysql.test: $(GO_SOURCES) generate-ini-mysql
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.mysql.test
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini ./migrations.mysql.test
+ GITEA_TEST_CONF=tests/mysql.ini ./migrations.mysql.test
.PHONY: migrations.pgsql.test
migrations.pgsql.test: $(GO_SOURCES) generate-ini-pgsql
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.pgsql.test
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini ./migrations.pgsql.test
+ GITEA_TEST_CONF=tests/pgsql.ini ./migrations.pgsql.test
.PHONY: migrations.mssql.test
migrations.mssql.test: $(GO_SOURCES) generate-ini-mssql
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.mssql.test
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini ./migrations.mssql.test
+ GITEA_TEST_CONF=tests/mssql.ini ./migrations.mssql.test
.PHONY: migrations.sqlite.test
migrations.sqlite.test: $(GO_SOURCES) generate-ini-sqlite
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.sqlite.test -tags '$(TEST_TAGS)'
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini ./migrations.sqlite.test
+ GITEA_TEST_CONF=tests/sqlite.ini ./migrations.sqlite.test
.PHONY: migrations.individual.mysql.test
-migrations.individual.mysql.test: $(GO_SOURCES)
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
+migrations.individual.mysql.test: $(GO_SOURCES) generate-ini-mysql
+ GITEA_TEST_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.sqlite.test\#%
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
+ GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: migrations.individual.pgsql.test
-migrations.individual.pgsql.test: $(GO_SOURCES)
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
+migrations.individual.pgsql.test: $(GO_SOURCES) generate-ini-pgsql
+ GITEA_TEST_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.pgsql.test\#%
migrations.individual.pgsql.test\#%: $(GO_SOURCES) generate-ini-pgsql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
+ GITEA_TEST_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: migrations.individual.mssql.test
migrations.individual.mssql.test: $(GO_SOURCES) generate-ini-mssql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
+ GITEA_TEST_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.mssql.test\#%
migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
+ GITEA_TEST_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: migrations.individual.sqlite.test
migrations.individual.sqlite.test: $(GO_SOURCES) generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
+ GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.sqlite.test\#%
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite
- GITEA_ROOT="$(CURDIR)" GITEA_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
+ GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
e2e.mysql.test: $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/e2e -o e2e.mysql.test
@@ -749,7 +697,7 @@ build: frontend backend ## build everything
frontend: $(WEBPACK_DEST) ## build frontend files
.PHONY: backend
-backend: go-check generate-backend $(EXECUTABLE) ## build backend files
+backend: generate-backend $(EXECUTABLE) ## build backend files
# We generate the backend before the frontend in case we in future we want to generate things in the frontend from generated files in backend
.PHONY: generate
@@ -765,11 +713,11 @@ generate-go: $(TAGS_PREREQ)
.PHONY: security-check
security-check:
- GOEXPERIMENT= go run $(GOVULNCHECK_PACKAGE) -show color ./...
+ GOEXPERIMENT= go run $(GOVULNCHECK_PACKAGE) -show color ./... || true
$(EXECUTABLE): $(GO_SOURCES) $(TAGS_PREREQ)
ifneq ($(and $(STATIC),$(findstring pam,$(TAGS))),)
- $(error pam support set via TAGS doesn't support static builds)
+ $(error pam support set via TAGS does not support static builds)
endif
CGO_ENABLED="$(CGO_ENABLED)" CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@
@@ -843,7 +791,6 @@ deps-tools: ## install tool dependencies
$(GO) install $(MISSPELL_PACKAGE) & \
$(GO) install $(SWAGGER_PACKAGE) & \
$(GO) install $(XGO_PACKAGE) & \
- $(GO) install $(GO_LICENSES_PACKAGE) & \
$(GO) install $(GOVULNCHECK_PACKAGE) & \
$(GO) install $(ACTIONLINT_PACKAGE) & \
wait
@@ -860,7 +807,7 @@ node_modules: pnpm-lock.yaml
update: update-js update-py ## update js and py dependencies
.PHONY: update-js
-update-js: node-check | node_modules ## update js dependencies
+update-js: node_modules ## update js dependencies
$(NODE_VARS) pnpm exec updates -u -f package.json
rm -rf node_modules pnpm-lock.yaml
$(NODE_VARS) pnpm install
@@ -869,7 +816,7 @@ update-js: node-check | node_modules ## update js dependencies
@touch node_modules
.PHONY: update-py
-update-py: node-check | node_modules ## update py dependencies
+update-py: node_modules ## update py dependencies
$(NODE_VARS) pnpm exec updates -u -f pyproject.toml
rm -rf .venv uv.lock
uv sync
@@ -879,14 +826,14 @@ update-py: node-check | node_modules ## update py dependencies
webpack: $(WEBPACK_DEST) ## build webpack files
$(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) pnpm-lock.yaml
- @$(MAKE) -s node-check node_modules
+ @$(MAKE) -s node_modules
@rm -rf $(WEBPACK_DEST_ENTRIES)
@echo "Running webpack..."
@BROWSERSLIST_IGNORE_OLD_DATA=true $(NODE_VARS) pnpm exec webpack --disable-interpret
@touch $(WEBPACK_DEST)
.PHONY: svg
-svg: node-check | node_modules ## build svg files
+svg: node_modules ## build svg files
rm -rf $(SVG_DEST_DIR)
node tools/generate-svg.ts
@@ -932,9 +879,6 @@ docker:
docker build --disable-content-trust=false -t $(DOCKER_REF) .
# support also build args docker build --build-arg GITEA_VERSION=v1.2.3 --build-arg TAGS="bindata sqlite sqlite_unlock_notify" .
-# This endif closes the if at the top of the file
-endif
-
# Disable parallel execution because it would break some targets that don't
# specify exact dependencies like 'backend' which does currently not depend
# on 'frontend' to enable Node.js-less builds from source tarballs.
diff --git a/README.zh-cn.md b/README.zh-cn.md
index f34b25b945..8d9531e8e4 100644
--- a/README.zh-cn.md
+++ b/README.zh-cn.md
@@ -46,7 +46,7 @@
`build` 目标分为两个子目标:
- `make backend` 需要 [Go Stable](https://go.dev/dl/),所需版本在 [go.mod](/go.mod) 中定义。
-- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本。
+- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本以及 [pnpm](https://pnpm.io/installation)。
需要互联网连接来下载 go 和 npm 模块。从包含预构建前端文件的官方源代码压缩包构建时,不会触发 `frontend` 目标,因此可以在没有 Node.js 的情况下构建。
diff --git a/README.zh-tw.md b/README.zh-tw.md
index 9de3f85dd5..875d31e28a 100644
--- a/README.zh-tw.md
+++ b/README.zh-tw.md
@@ -46,7 +46,7 @@
`build` 目標分為兩個子目標:
- `make backend` 需要 [Go Stable](https://go.dev/dl/),所需版本在 [go.mod](/go.mod) 中定義。
-- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本。
+- `make frontend` 需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本以及 [pnpm](https://pnpm.io/installation)。
需要互聯網連接來下載 go 和 npm 模塊。從包含預構建前端文件的官方源代碼壓縮包構建時,不會觸發 `frontend` 目標,因此可以在沒有 Node.js 的情況下構建。
diff --git a/assets/emoji.json b/assets/emoji.json
index 28244caa65..2d6329e44f 100644
--- a/assets/emoji.json
+++ b/assets/emoji.json
@@ -1 +1,11483 @@
-[{"emoji":"👍","aliases":["+1","thumbsup"]},{"emoji":"👎","aliases":["-1","thumbsdown"]},{"emoji":"💯","aliases":["100"]},{"emoji":"🔢","aliases":["1234"]},{"emoji":"🥇","aliases":["1st_place_medal"]},{"emoji":"🥈","aliases":["2nd_place_medal"]},{"emoji":"🥉","aliases":["3rd_place_medal"]},{"emoji":"🎱","aliases":["8ball"]},{"emoji":"🅰️","aliases":["a"]},{"emoji":"🆎","aliases":["ab"]},{"emoji":"🧮","aliases":["abacus"]},{"emoji":"🔤","aliases":["abc"]},{"emoji":"🔡","aliases":["abcd"]},{"emoji":"🉑","aliases":["accept"]},{"emoji":"🪗","aliases":["accordion"]},{"emoji":"🩹","aliases":["adhesive_bandage"]},{"emoji":"🧑","aliases":["adult"]},{"emoji":"🚡","aliases":["aerial_tramway"]},{"emoji":"🇦🇫","aliases":["afghanistan"]},{"emoji":"✈️","aliases":["airplane"]},{"emoji":"🇦🇽","aliases":["aland_islands"]},{"emoji":"⏰","aliases":["alarm_clock"]},{"emoji":"🇦🇱","aliases":["albania"]},{"emoji":"⚗️","aliases":["alembic"]},{"emoji":"🇩🇿","aliases":["algeria"]},{"emoji":"👽","aliases":["alien"]},{"emoji":"🚑","aliases":["ambulance"]},{"emoji":"🇦🇸","aliases":["american_samoa"]},{"emoji":"🏺","aliases":["amphora"]},{"emoji":"🫀","aliases":["anatomical_heart"]},{"emoji":"⚓","aliases":["anchor"]},{"emoji":"🇦🇩","aliases":["andorra"]},{"emoji":"👼","aliases":["angel"]},{"emoji":"💢","aliases":["anger"]},{"emoji":"🇦🇴","aliases":["angola"]},{"emoji":"😠","aliases":["angry"]},{"emoji":"🇦🇮","aliases":["anguilla"]},{"emoji":"😧","aliases":["anguished"]},{"emoji":"🐜","aliases":["ant"]},{"emoji":"🇦🇶","aliases":["antarctica"]},{"emoji":"🇦🇬","aliases":["antigua_barbuda"]},{"emoji":"🍎","aliases":["apple"]},{"emoji":"♒","aliases":["aquarius"]},{"emoji":"🇦🇷","aliases":["argentina"]},{"emoji":"♈","aliases":["aries"]},{"emoji":"🇦🇲","aliases":["armenia"]},{"emoji":"◀️","aliases":["arrow_backward"]},{"emoji":"⏬","aliases":["arrow_double_down"]},{"emoji":"⏫","aliases":["arrow_double_up"]},{"emoji":"⬇️","aliases":["arrow_down"]},{"emoji":"🔽","aliases":["arrow_down_small"]},{"emoji":"▶️","aliases":["arrow_forward"]},{"emoji":"⤵️","aliases":["arrow_heading_down"]},{"emoji":"⤴️","aliases":["arrow_heading_up"]},{"emoji":"⬅️","aliases":["arrow_left"]},{"emoji":"↙️","aliases":["arrow_lower_left"]},{"emoji":"↘️","aliases":["arrow_lower_right"]},{"emoji":"➡️","aliases":["arrow_right"]},{"emoji":"↪️","aliases":["arrow_right_hook"]},{"emoji":"⬆️","aliases":["arrow_up"]},{"emoji":"↕️","aliases":["arrow_up_down"]},{"emoji":"🔼","aliases":["arrow_up_small"]},{"emoji":"↖️","aliases":["arrow_upper_left"]},{"emoji":"↗️","aliases":["arrow_upper_right"]},{"emoji":"🔃","aliases":["arrows_clockwise"]},{"emoji":"🔄","aliases":["arrows_counterclockwise"]},{"emoji":"🎨","aliases":["art"]},{"emoji":"🚛","aliases":["articulated_lorry"]},{"emoji":"🛰️","aliases":["artificial_satellite"]},{"emoji":"🧑🎨","aliases":["artist"]},{"emoji":"🇦🇼","aliases":["aruba"]},{"emoji":"🇦🇨","aliases":["ascension_island"]},{"emoji":"*️⃣","aliases":["asterisk"]},{"emoji":"😲","aliases":["astonished"]},{"emoji":"🧑🚀","aliases":["astronaut"]},{"emoji":"👟","aliases":["athletic_shoe"]},{"emoji":"🏧","aliases":["atm"]},{"emoji":"⚛️","aliases":["atom_symbol"]},{"emoji":"🇦🇺","aliases":["australia"]},{"emoji":"🇦🇹","aliases":["austria"]},{"emoji":"🛺","aliases":["auto_rickshaw"]},{"emoji":"🥑","aliases":["avocado"]},{"emoji":"🪓","aliases":["axe"]},{"emoji":"🇦🇿","aliases":["azerbaijan"]},{"emoji":"🅱️","aliases":["b"]},{"emoji":"👶","aliases":["baby"]},{"emoji":"🍼","aliases":["baby_bottle"]},{"emoji":"🐤","aliases":["baby_chick"]},{"emoji":"🚼","aliases":["baby_symbol"]},{"emoji":"🔙","aliases":["back"]},{"emoji":"🥓","aliases":["bacon"]},{"emoji":"🦡","aliases":["badger"]},{"emoji":"🏸","aliases":["badminton"]},{"emoji":"🥯","aliases":["bagel"]},{"emoji":"🛄","aliases":["baggage_claim"]},{"emoji":"🥖","aliases":["baguette_bread"]},{"emoji":"🇧🇸","aliases":["bahamas"]},{"emoji":"🇧🇭","aliases":["bahrain"]},{"emoji":"⚖️","aliases":["balance_scale"]},{"emoji":"👨🦲","aliases":["bald_man"]},{"emoji":"👩🦲","aliases":["bald_woman"]},{"emoji":"🩰","aliases":["ballet_shoes"]},{"emoji":"🎈","aliases":["balloon"]},{"emoji":"🗳️","aliases":["ballot_box"]},{"emoji":"☑️","aliases":["ballot_box_with_check"]},{"emoji":"🎍","aliases":["bamboo"]},{"emoji":"🍌","aliases":["banana"]},{"emoji":"‼️","aliases":["bangbang"]},{"emoji":"🇧🇩","aliases":["bangladesh"]},{"emoji":"🪕","aliases":["banjo"]},{"emoji":"🏦","aliases":["bank"]},{"emoji":"📊","aliases":["bar_chart"]},{"emoji":"🇧🇧","aliases":["barbados"]},{"emoji":"💈","aliases":["barber"]},{"emoji":"⚾","aliases":["baseball"]},{"emoji":"🧺","aliases":["basket"]},{"emoji":"🏀","aliases":["basketball"]},{"emoji":"🦇","aliases":["bat"]},{"emoji":"🛀","aliases":["bath"]},{"emoji":"🛁","aliases":["bathtub"]},{"emoji":"🔋","aliases":["battery"]},{"emoji":"🏖️","aliases":["beach_umbrella"]},{"emoji":"🫘","aliases":["beans"]},{"emoji":"🐻","aliases":["bear"]},{"emoji":"🧔","aliases":["bearded_person"]},{"emoji":"🦫","aliases":["beaver"]},{"emoji":"🛏️","aliases":["bed"]},{"emoji":"🐝","aliases":["bee","honeybee"]},{"emoji":"🍺","aliases":["beer"]},{"emoji":"🍻","aliases":["beers"]},{"emoji":"🪲","aliases":["beetle"]},{"emoji":"🔰","aliases":["beginner"]},{"emoji":"🇧🇾","aliases":["belarus"]},{"emoji":"🇧🇪","aliases":["belgium"]},{"emoji":"🇧🇿","aliases":["belize"]},{"emoji":"🔔","aliases":["bell"]},{"emoji":"🫑","aliases":["bell_pepper"]},{"emoji":"🛎️","aliases":["bellhop_bell"]},{"emoji":"🇧🇯","aliases":["benin"]},{"emoji":"🍱","aliases":["bento"]},{"emoji":"🇧🇲","aliases":["bermuda"]},{"emoji":"🧃","aliases":["beverage_box"]},{"emoji":"🇧🇹","aliases":["bhutan"]},{"emoji":"🚴","aliases":["bicyclist"]},{"emoji":"🚲","aliases":["bike"]},{"emoji":"🚴♂️","aliases":["biking_man"]},{"emoji":"🚴♀️","aliases":["biking_woman"]},{"emoji":"👙","aliases":["bikini"]},{"emoji":"🧢","aliases":["billed_cap"]},{"emoji":"☣️","aliases":["biohazard"]},{"emoji":"🐦","aliases":["bird"]},{"emoji":"🎂","aliases":["birthday"]},{"emoji":"🦬","aliases":["bison"]},{"emoji":"🫦","aliases":["biting_lip"]},{"emoji":"🐦⬛","aliases":["black_bird"]},{"emoji":"🐈⬛","aliases":["black_cat"]},{"emoji":"⚫","aliases":["black_circle"]},{"emoji":"🏴","aliases":["black_flag"]},{"emoji":"🖤","aliases":["black_heart"]},{"emoji":"🃏","aliases":["black_joker"]},{"emoji":"⬛","aliases":["black_large_square"]},{"emoji":"◾","aliases":["black_medium_small_square"]},{"emoji":"◼️","aliases":["black_medium_square"]},{"emoji":"✒️","aliases":["black_nib"]},{"emoji":"▪️","aliases":["black_small_square"]},{"emoji":"🔲","aliases":["black_square_button"]},{"emoji":"👱♂️","aliases":["blond_haired_man"]},{"emoji":"👱","aliases":["blond_haired_person"]},{"emoji":"👱♀️","aliases":["blond_haired_woman","blonde_woman"]},{"emoji":"🌼","aliases":["blossom"]},{"emoji":"🐡","aliases":["blowfish"]},{"emoji":"📘","aliases":["blue_book"]},{"emoji":"🚙","aliases":["blue_car"]},{"emoji":"💙","aliases":["blue_heart"]},{"emoji":"🟦","aliases":["blue_square"]},{"emoji":"🫐","aliases":["blueberries"]},{"emoji":"😊","aliases":["blush"]},{"emoji":"🐗","aliases":["boar"]},{"emoji":"⛵","aliases":["boat","sailboat"]},{"emoji":"🇧🇴","aliases":["bolivia"]},{"emoji":"💣","aliases":["bomb"]},{"emoji":"🦴","aliases":["bone"]},{"emoji":"📖","aliases":["book","open_book"]},{"emoji":"🔖","aliases":["bookmark"]},{"emoji":"📑","aliases":["bookmark_tabs"]},{"emoji":"📚","aliases":["books"]},{"emoji":"💥","aliases":["boom","collision"]},{"emoji":"🪃","aliases":["boomerang"]},{"emoji":"👢","aliases":["boot"]},{"emoji":"🇧🇦","aliases":["bosnia_herzegovina"]},{"emoji":"🇧🇼","aliases":["botswana"]},{"emoji":"⛹️♂️","aliases":["bouncing_ball_man","basketball_man"]},{"emoji":"⛹️","aliases":["bouncing_ball_person"]},{"emoji":"⛹️♀️","aliases":["bouncing_ball_woman","basketball_woman"]},{"emoji":"💐","aliases":["bouquet"]},{"emoji":"🇧🇻","aliases":["bouvet_island"]},{"emoji":"🙇","aliases":["bow"]},{"emoji":"🏹","aliases":["bow_and_arrow"]},{"emoji":"🙇♂️","aliases":["bowing_man"]},{"emoji":"🙇♀️","aliases":["bowing_woman"]},{"emoji":"🥣","aliases":["bowl_with_spoon"]},{"emoji":"🎳","aliases":["bowling"]},{"emoji":"🥊","aliases":["boxing_glove"]},{"emoji":"👦","aliases":["boy"]},{"emoji":"🧠","aliases":["brain"]},{"emoji":"🇧🇷","aliases":["brazil"]},{"emoji":"🍞","aliases":["bread"]},{"emoji":"🤱","aliases":["breast_feeding"]},{"emoji":"🧱","aliases":["bricks"]},{"emoji":"🌉","aliases":["bridge_at_night"]},{"emoji":"💼","aliases":["briefcase"]},{"emoji":"🇮🇴","aliases":["british_indian_ocean_territory"]},{"emoji":"🇻🇬","aliases":["british_virgin_islands"]},{"emoji":"🥦","aliases":["broccoli"]},{"emoji":"💔","aliases":["broken_heart"]},{"emoji":"🧹","aliases":["broom"]},{"emoji":"🟤","aliases":["brown_circle"]},{"emoji":"🤎","aliases":["brown_heart"]},{"emoji":"🟫","aliases":["brown_square"]},{"emoji":"🇧🇳","aliases":["brunei"]},{"emoji":"🧋","aliases":["bubble_tea"]},{"emoji":"🫧","aliases":["bubbles"]},{"emoji":"🪣","aliases":["bucket"]},{"emoji":"🐛","aliases":["bug"]},{"emoji":"🏗️","aliases":["building_construction"]},{"emoji":"💡","aliases":["bulb"]},{"emoji":"🇧🇬","aliases":["bulgaria"]},{"emoji":"🚅","aliases":["bullettrain_front"]},{"emoji":"🚄","aliases":["bullettrain_side"]},{"emoji":"🇧🇫","aliases":["burkina_faso"]},{"emoji":"🌯","aliases":["burrito"]},{"emoji":"🇧🇮","aliases":["burundi"]},{"emoji":"🚌","aliases":["bus"]},{"emoji":"🕴️","aliases":["business_suit_levitating"]},{"emoji":"🚏","aliases":["busstop"]},{"emoji":"👤","aliases":["bust_in_silhouette"]},{"emoji":"👥","aliases":["busts_in_silhouette"]},{"emoji":"🧈","aliases":["butter"]},{"emoji":"🦋","aliases":["butterfly"]},{"emoji":"🌵","aliases":["cactus"]},{"emoji":"🍰","aliases":["cake"]},{"emoji":"📆","aliases":["calendar"]},{"emoji":"🤙","aliases":["call_me_hand"]},{"emoji":"📲","aliases":["calling"]},{"emoji":"🇰🇭","aliases":["cambodia"]},{"emoji":"🐫","aliases":["camel"]},{"emoji":"📷","aliases":["camera"]},{"emoji":"📸","aliases":["camera_flash"]},{"emoji":"🇨🇲","aliases":["cameroon"]},{"emoji":"🏕️","aliases":["camping"]},{"emoji":"🇨🇦","aliases":["canada"]},{"emoji":"🇮🇨","aliases":["canary_islands"]},{"emoji":"♋","aliases":["cancer"]},{"emoji":"🕯️","aliases":["candle"]},{"emoji":"🍬","aliases":["candy"]},{"emoji":"🥫","aliases":["canned_food"]},{"emoji":"🛶","aliases":["canoe"]},{"emoji":"🇨🇻","aliases":["cape_verde"]},{"emoji":"🔠","aliases":["capital_abcd"]},{"emoji":"♑","aliases":["capricorn"]},{"emoji":"🚗","aliases":["car","red_car"]},{"emoji":"🗃️","aliases":["card_file_box"]},{"emoji":"📇","aliases":["card_index"]},{"emoji":"🗂️","aliases":["card_index_dividers"]},{"emoji":"🇧🇶","aliases":["caribbean_netherlands"]},{"emoji":"🎠","aliases":["carousel_horse"]},{"emoji":"🪚","aliases":["carpentry_saw"]},{"emoji":"🥕","aliases":["carrot"]},{"emoji":"🤸","aliases":["cartwheeling"]},{"emoji":"🐱","aliases":["cat"]},{"emoji":"🐈","aliases":["cat2"]},{"emoji":"🇰🇾","aliases":["cayman_islands"]},{"emoji":"💿","aliases":["cd"]},{"emoji":"🇨🇫","aliases":["central_african_republic"]},{"emoji":"🇪🇦","aliases":["ceuta_melilla"]},{"emoji":"🇹🇩","aliases":["chad"]},{"emoji":"⛓️","aliases":["chains"]},{"emoji":"🪑","aliases":["chair"]},{"emoji":"🍾","aliases":["champagne"]},{"emoji":"💹","aliases":["chart"]},{"emoji":"📉","aliases":["chart_with_downwards_trend"]},{"emoji":"📈","aliases":["chart_with_upwards_trend"]},{"emoji":"🏁","aliases":["checkered_flag"]},{"emoji":"🧀","aliases":["cheese"]},{"emoji":"🍒","aliases":["cherries"]},{"emoji":"🌸","aliases":["cherry_blossom"]},{"emoji":"♟️","aliases":["chess_pawn"]},{"emoji":"🌰","aliases":["chestnut"]},{"emoji":"🐔","aliases":["chicken"]},{"emoji":"🧒","aliases":["child"]},{"emoji":"🚸","aliases":["children_crossing"]},{"emoji":"🇨🇱","aliases":["chile"]},{"emoji":"🐿️","aliases":["chipmunk"]},{"emoji":"🍫","aliases":["chocolate_bar"]},{"emoji":"🥢","aliases":["chopsticks"]},{"emoji":"🇨🇽","aliases":["christmas_island"]},{"emoji":"🎄","aliases":["christmas_tree"]},{"emoji":"⛪","aliases":["church"]},{"emoji":"🎦","aliases":["cinema"]},{"emoji":"🎪","aliases":["circus_tent"]},{"emoji":"🌇","aliases":["city_sunrise"]},{"emoji":"🌆","aliases":["city_sunset"]},{"emoji":"🏙️","aliases":["cityscape"]},{"emoji":"🆑","aliases":["cl"]},{"emoji":"🗜️","aliases":["clamp"]},{"emoji":"👏","aliases":["clap"]},{"emoji":"🎬","aliases":["clapper"]},{"emoji":"🏛️","aliases":["classical_building"]},{"emoji":"🧗","aliases":["climbing"]},{"emoji":"🧗♂️","aliases":["climbing_man"]},{"emoji":"🧗♀️","aliases":["climbing_woman"]},{"emoji":"🥂","aliases":["clinking_glasses"]},{"emoji":"📋","aliases":["clipboard"]},{"emoji":"🇨🇵","aliases":["clipperton_island"]},{"emoji":"🕐","aliases":["clock1"]},{"emoji":"🕙","aliases":["clock10"]},{"emoji":"🕥","aliases":["clock1030"]},{"emoji":"🕚","aliases":["clock11"]},{"emoji":"🕦","aliases":["clock1130"]},{"emoji":"🕛","aliases":["clock12"]},{"emoji":"🕧","aliases":["clock1230"]},{"emoji":"🕜","aliases":["clock130"]},{"emoji":"🕑","aliases":["clock2"]},{"emoji":"🕝","aliases":["clock230"]},{"emoji":"🕒","aliases":["clock3"]},{"emoji":"🕞","aliases":["clock330"]},{"emoji":"🕓","aliases":["clock4"]},{"emoji":"🕟","aliases":["clock430"]},{"emoji":"🕔","aliases":["clock5"]},{"emoji":"🕠","aliases":["clock530"]},{"emoji":"🕕","aliases":["clock6"]},{"emoji":"🕡","aliases":["clock630"]},{"emoji":"🕖","aliases":["clock7"]},{"emoji":"🕢","aliases":["clock730"]},{"emoji":"🕗","aliases":["clock8"]},{"emoji":"🕣","aliases":["clock830"]},{"emoji":"🕘","aliases":["clock9"]},{"emoji":"🕤","aliases":["clock930"]},{"emoji":"📕","aliases":["closed_book"]},{"emoji":"🔐","aliases":["closed_lock_with_key"]},{"emoji":"🌂","aliases":["closed_umbrella"]},{"emoji":"☁️","aliases":["cloud"]},{"emoji":"🌩️","aliases":["cloud_with_lightning"]},{"emoji":"⛈️","aliases":["cloud_with_lightning_and_rain"]},{"emoji":"🌧️","aliases":["cloud_with_rain"]},{"emoji":"🌨️","aliases":["cloud_with_snow"]},{"emoji":"🤡","aliases":["clown_face"]},{"emoji":"♣️","aliases":["clubs"]},{"emoji":"🇨🇳","aliases":["cn"]},{"emoji":"🧥","aliases":["coat"]},{"emoji":"🪳","aliases":["cockroach"]},{"emoji":"🍸","aliases":["cocktail"]},{"emoji":"🥥","aliases":["coconut"]},{"emoji":"🇨🇨","aliases":["cocos_islands"]},{"emoji":"☕","aliases":["coffee"]},{"emoji":"⚰️","aliases":["coffin"]},{"emoji":"🪙","aliases":["coin"]},{"emoji":"🥶","aliases":["cold_face"]},{"emoji":"😰","aliases":["cold_sweat"]},{"emoji":"🇨🇴","aliases":["colombia"]},{"emoji":"☄️","aliases":["comet"]},{"emoji":"🇰🇲","aliases":["comoros"]},{"emoji":"🧭","aliases":["compass"]},{"emoji":"💻","aliases":["computer"]},{"emoji":"🖱️","aliases":["computer_mouse"]},{"emoji":"🎊","aliases":["confetti_ball"]},{"emoji":"😖","aliases":["confounded"]},{"emoji":"😕","aliases":["confused"]},{"emoji":"🇨🇬","aliases":["congo_brazzaville"]},{"emoji":"🇨🇩","aliases":["congo_kinshasa"]},{"emoji":"㊗️","aliases":["congratulations"]},{"emoji":"🚧","aliases":["construction"]},{"emoji":"👷","aliases":["construction_worker"]},{"emoji":"👷♂️","aliases":["construction_worker_man"]},{"emoji":"👷♀️","aliases":["construction_worker_woman"]},{"emoji":"🎛️","aliases":["control_knobs"]},{"emoji":"🏪","aliases":["convenience_store"]},{"emoji":"🧑🍳","aliases":["cook"]},{"emoji":"🇨🇰","aliases":["cook_islands"]},{"emoji":"🍪","aliases":["cookie"]},{"emoji":"🆒","aliases":["cool"]},{"emoji":"©️","aliases":["copyright"]},{"emoji":"🪸","aliases":["coral"]},{"emoji":"🌽","aliases":["corn"]},{"emoji":"🇨🇷","aliases":["costa_rica"]},{"emoji":"🇨🇮","aliases":["cote_divoire"]},{"emoji":"🛋️","aliases":["couch_and_lamp"]},{"emoji":"👫","aliases":["couple"]},{"emoji":"💑","aliases":["couple_with_heart"]},{"emoji":"👨❤️👨","aliases":["couple_with_heart_man_man"]},{"emoji":"👩❤️👨","aliases":["couple_with_heart_woman_man"]},{"emoji":"👩❤️👩","aliases":["couple_with_heart_woman_woman"]},{"emoji":"💏","aliases":["couplekiss"]},{"emoji":"👨❤️💋👨","aliases":["couplekiss_man_man"]},{"emoji":"👩❤️💋👨","aliases":["couplekiss_man_woman"]},{"emoji":"👩❤️💋👩","aliases":["couplekiss_woman_woman"]},{"emoji":"🐮","aliases":["cow"]},{"emoji":"🐄","aliases":["cow2"]},{"emoji":"🤠","aliases":["cowboy_hat_face"]},{"emoji":"🦀","aliases":["crab"]},{"emoji":"🖍️","aliases":["crayon"]},{"emoji":"💳","aliases":["credit_card"]},{"emoji":"🌙","aliases":["crescent_moon"]},{"emoji":"🦗","aliases":["cricket"]},{"emoji":"🏏","aliases":["cricket_game"]},{"emoji":"🇭🇷","aliases":["croatia"]},{"emoji":"🐊","aliases":["crocodile"]},{"emoji":"🥐","aliases":["croissant"]},{"emoji":"🤞","aliases":["crossed_fingers"]},{"emoji":"🎌","aliases":["crossed_flags"]},{"emoji":"⚔️","aliases":["crossed_swords"]},{"emoji":"👑","aliases":["crown"]},{"emoji":"🩼","aliases":["crutch"]},{"emoji":"😢","aliases":["cry"]},{"emoji":"😿","aliases":["crying_cat_face"]},{"emoji":"🔮","aliases":["crystal_ball"]},{"emoji":"🇨🇺","aliases":["cuba"]},{"emoji":"🥒","aliases":["cucumber"]},{"emoji":"🥤","aliases":["cup_with_straw"]},{"emoji":"🧁","aliases":["cupcake"]},{"emoji":"💘","aliases":["cupid"]},{"emoji":"🇨🇼","aliases":["curacao"]},{"emoji":"🥌","aliases":["curling_stone"]},{"emoji":"👨🦱","aliases":["curly_haired_man"]},{"emoji":"👩🦱","aliases":["curly_haired_woman"]},{"emoji":"➰","aliases":["curly_loop"]},{"emoji":"💱","aliases":["currency_exchange"]},{"emoji":"🍛","aliases":["curry"]},{"emoji":"🤬","aliases":["cursing_face"]},{"emoji":"🍮","aliases":["custard"]},{"emoji":"🛃","aliases":["customs"]},{"emoji":"🥩","aliases":["cut_of_meat"]},{"emoji":"🌀","aliases":["cyclone"]},{"emoji":"🇨🇾","aliases":["cyprus"]},{"emoji":"🇨🇿","aliases":["czech_republic"]},{"emoji":"🗡️","aliases":["dagger"]},{"emoji":"👯","aliases":["dancers"]},{"emoji":"👯♂️","aliases":["dancing_men"]},{"emoji":"👯♀️","aliases":["dancing_women"]},{"emoji":"🍡","aliases":["dango"]},{"emoji":"🕶️","aliases":["dark_sunglasses"]},{"emoji":"🎯","aliases":["dart"]},{"emoji":"💨","aliases":["dash"]},{"emoji":"📅","aliases":["date"]},{"emoji":"🇩🇪","aliases":["de"]},{"emoji":"🧏♂️","aliases":["deaf_man"]},{"emoji":"🧏","aliases":["deaf_person"]},{"emoji":"🧏♀️","aliases":["deaf_woman"]},{"emoji":"🌳","aliases":["deciduous_tree"]},{"emoji":"🦌","aliases":["deer"]},{"emoji":"🇩🇰","aliases":["denmark"]},{"emoji":"🏬","aliases":["department_store"]},{"emoji":"🏚️","aliases":["derelict_house"]},{"emoji":"🏜️","aliases":["desert"]},{"emoji":"🏝️","aliases":["desert_island"]},{"emoji":"🖥️","aliases":["desktop_computer"]},{"emoji":"🕵️","aliases":["detective"]},{"emoji":"💠","aliases":["diamond_shape_with_a_dot_inside"]},{"emoji":"♦️","aliases":["diamonds"]},{"emoji":"🇩🇬","aliases":["diego_garcia"]},{"emoji":"😞","aliases":["disappointed"]},{"emoji":"😥","aliases":["disappointed_relieved"]},{"emoji":"🥸","aliases":["disguised_face"]},{"emoji":"🤿","aliases":["diving_mask"]},{"emoji":"🪔","aliases":["diya_lamp"]},{"emoji":"💫","aliases":["dizzy"]},{"emoji":"😵","aliases":["dizzy_face"]},{"emoji":"🇩🇯","aliases":["djibouti"]},{"emoji":"🧬","aliases":["dna"]},{"emoji":"🚯","aliases":["do_not_litter"]},{"emoji":"🦤","aliases":["dodo"]},{"emoji":"🐶","aliases":["dog"]},{"emoji":"🐕","aliases":["dog2"]},{"emoji":"💵","aliases":["dollar"]},{"emoji":"🎎","aliases":["dolls"]},{"emoji":"🐬","aliases":["dolphin","flipper"]},{"emoji":"🇩🇲","aliases":["dominica"]},{"emoji":"🇩🇴","aliases":["dominican_republic"]},{"emoji":"🫏","aliases":["donkey"]},{"emoji":"🚪","aliases":["door"]},{"emoji":"🫥","aliases":["dotted_line_face"]},{"emoji":"🍩","aliases":["doughnut"]},{"emoji":"🕊️","aliases":["dove"]},{"emoji":"🐉","aliases":["dragon"]},{"emoji":"🐲","aliases":["dragon_face"]},{"emoji":"👗","aliases":["dress"]},{"emoji":"🐪","aliases":["dromedary_camel"]},{"emoji":"🤤","aliases":["drooling_face"]},{"emoji":"🩸","aliases":["drop_of_blood"]},{"emoji":"💧","aliases":["droplet"]},{"emoji":"🥁","aliases":["drum"]},{"emoji":"🦆","aliases":["duck"]},{"emoji":"🥟","aliases":["dumpling"]},{"emoji":"📀","aliases":["dvd"]},{"emoji":"🦅","aliases":["eagle"]},{"emoji":"👂","aliases":["ear"]},{"emoji":"🌾","aliases":["ear_of_rice"]},{"emoji":"🦻","aliases":["ear_with_hearing_aid"]},{"emoji":"🌍","aliases":["earth_africa"]},{"emoji":"🌎","aliases":["earth_americas"]},{"emoji":"🌏","aliases":["earth_asia"]},{"emoji":"🇪🇨","aliases":["ecuador"]},{"emoji":"🥚","aliases":["egg"]},{"emoji":"🍆","aliases":["eggplant"]},{"emoji":"🇪🇬","aliases":["egypt"]},{"emoji":"8️⃣","aliases":["eight"]},{"emoji":"✴️","aliases":["eight_pointed_black_star"]},{"emoji":"✳️","aliases":["eight_spoked_asterisk"]},{"emoji":"⏏️","aliases":["eject_button"]},{"emoji":"🇸🇻","aliases":["el_salvador"]},{"emoji":"🔌","aliases":["electric_plug"]},{"emoji":"🐘","aliases":["elephant"]},{"emoji":"🛗","aliases":["elevator"]},{"emoji":"🧝","aliases":["elf"]},{"emoji":"🧝♂️","aliases":["elf_man"]},{"emoji":"🧝♀️","aliases":["elf_woman"]},{"emoji":"📧","aliases":["email","e-mail"]},{"emoji":"🪹","aliases":["empty_nest"]},{"emoji":"🔚","aliases":["end"]},{"emoji":"🏴","aliases":["england"]},{"emoji":"✉️","aliases":["envelope"]},{"emoji":"📩","aliases":["envelope_with_arrow"]},{"emoji":"🇬🇶","aliases":["equatorial_guinea"]},{"emoji":"🇪🇷","aliases":["eritrea"]},{"emoji":"🇪🇸","aliases":["es"]},{"emoji":"🇪🇪","aliases":["estonia"]},{"emoji":"🇪🇹","aliases":["ethiopia"]},{"emoji":"🇪🇺","aliases":["eu","european_union"]},{"emoji":"💶","aliases":["euro"]},{"emoji":"🏰","aliases":["european_castle"]},{"emoji":"🏤","aliases":["european_post_office"]},{"emoji":"🌲","aliases":["evergreen_tree"]},{"emoji":"❗","aliases":["exclamation","heavy_exclamation_mark"]},{"emoji":"🤯","aliases":["exploding_head"]},{"emoji":"😑","aliases":["expressionless"]},{"emoji":"👁️","aliases":["eye"]},{"emoji":"👁️🗨️","aliases":["eye_speech_bubble"]},{"emoji":"👓","aliases":["eyeglasses"]},{"emoji":"👀","aliases":["eyes"]},{"emoji":"😮💨","aliases":["face_exhaling"]},{"emoji":"🥹","aliases":["face_holding_back_tears"]},{"emoji":"😶🌫️","aliases":["face_in_clouds"]},{"emoji":"🫤","aliases":["face_with_diagonal_mouth"]},{"emoji":"🤕","aliases":["face_with_head_bandage"]},{"emoji":"🫢","aliases":["face_with_open_eyes_and_hand_over_mouth"]},{"emoji":"🫣","aliases":["face_with_peeking_eye"]},{"emoji":"😵💫","aliases":["face_with_spiral_eyes"]},{"emoji":"🤒","aliases":["face_with_thermometer"]},{"emoji":"🤦","aliases":["facepalm"]},{"emoji":"🏭","aliases":["factory"]},{"emoji":"🧑🏭","aliases":["factory_worker"]},{"emoji":"🧚","aliases":["fairy"]},{"emoji":"🧚♂️","aliases":["fairy_man"]},{"emoji":"🧚♀️","aliases":["fairy_woman"]},{"emoji":"🧆","aliases":["falafel"]},{"emoji":"🇫🇰","aliases":["falkland_islands"]},{"emoji":"🍂","aliases":["fallen_leaf"]},{"emoji":"👪","aliases":["family"]},{"emoji":"👨👦","aliases":["family_man_boy"]},{"emoji":"👨👦👦","aliases":["family_man_boy_boy"]},{"emoji":"👨👧","aliases":["family_man_girl"]},{"emoji":"👨👧👦","aliases":["family_man_girl_boy"]},{"emoji":"👨👧👧","aliases":["family_man_girl_girl"]},{"emoji":"👨👨👦","aliases":["family_man_man_boy"]},{"emoji":"👨👨👦👦","aliases":["family_man_man_boy_boy"]},{"emoji":"👨👨👧","aliases":["family_man_man_girl"]},{"emoji":"👨👨👧👦","aliases":["family_man_man_girl_boy"]},{"emoji":"👨👨👧👧","aliases":["family_man_man_girl_girl"]},{"emoji":"👨👩👦","aliases":["family_man_woman_boy"]},{"emoji":"👨👩👦👦","aliases":["family_man_woman_boy_boy"]},{"emoji":"👨👩👧","aliases":["family_man_woman_girl"]},{"emoji":"👨👩👧👦","aliases":["family_man_woman_girl_boy"]},{"emoji":"👨👩👧👧","aliases":["family_man_woman_girl_girl"]},{"emoji":"👩👦","aliases":["family_woman_boy"]},{"emoji":"👩👦👦","aliases":["family_woman_boy_boy"]},{"emoji":"👩👧","aliases":["family_woman_girl"]},{"emoji":"👩👧👦","aliases":["family_woman_girl_boy"]},{"emoji":"👩👧👧","aliases":["family_woman_girl_girl"]},{"emoji":"👩👩👦","aliases":["family_woman_woman_boy"]},{"emoji":"👩👩👦👦","aliases":["family_woman_woman_boy_boy"]},{"emoji":"👩👩👧","aliases":["family_woman_woman_girl"]},{"emoji":"👩👩👧👦","aliases":["family_woman_woman_girl_boy"]},{"emoji":"👩👩👧👧","aliases":["family_woman_woman_girl_girl"]},{"emoji":"🧑🌾","aliases":["farmer"]},{"emoji":"🇫🇴","aliases":["faroe_islands"]},{"emoji":"⏩","aliases":["fast_forward"]},{"emoji":"📠","aliases":["fax"]},{"emoji":"😨","aliases":["fearful"]},{"emoji":"🪶","aliases":["feather"]},{"emoji":"🐾","aliases":["feet","paw_prints"]},{"emoji":"🕵️♀️","aliases":["female_detective"]},{"emoji":"♀️","aliases":["female_sign"]},{"emoji":"🎡","aliases":["ferris_wheel"]},{"emoji":"⛴️","aliases":["ferry"]},{"emoji":"🏑","aliases":["field_hockey"]},{"emoji":"🇫🇯","aliases":["fiji"]},{"emoji":"🗄️","aliases":["file_cabinet"]},{"emoji":"📁","aliases":["file_folder"]},{"emoji":"📽️","aliases":["film_projector"]},{"emoji":"🎞️","aliases":["film_strip"]},{"emoji":"🇫🇮","aliases":["finland"]},{"emoji":"🔥","aliases":["fire"]},{"emoji":"🚒","aliases":["fire_engine"]},{"emoji":"🧯","aliases":["fire_extinguisher"]},{"emoji":"🧨","aliases":["firecracker"]},{"emoji":"🧑🚒","aliases":["firefighter"]},{"emoji":"🎆","aliases":["fireworks"]},{"emoji":"🌓","aliases":["first_quarter_moon"]},{"emoji":"🌛","aliases":["first_quarter_moon_with_face"]},{"emoji":"🐟","aliases":["fish"]},{"emoji":"🍥","aliases":["fish_cake"]},{"emoji":"🎣","aliases":["fishing_pole_and_fish"]},{"emoji":"🤛","aliases":["fist_left"]},{"emoji":"👊","aliases":["fist_oncoming","facepunch","punch"]},{"emoji":"✊","aliases":["fist_raised","fist"]},{"emoji":"🤜","aliases":["fist_right"]},{"emoji":"5️⃣","aliases":["five"]},{"emoji":"🎏","aliases":["flags"]},{"emoji":"🦩","aliases":["flamingo"]},{"emoji":"🔦","aliases":["flashlight"]},{"emoji":"🥿","aliases":["flat_shoe"]},{"emoji":"🫓","aliases":["flatbread"]},{"emoji":"⚜️","aliases":["fleur_de_lis"]},{"emoji":"🛬","aliases":["flight_arrival"]},{"emoji":"🛫","aliases":["flight_departure"]},{"emoji":"💾","aliases":["floppy_disk"]},{"emoji":"🎴","aliases":["flower_playing_cards"]},{"emoji":"😳","aliases":["flushed"]},{"emoji":"🪈","aliases":["flute"]},{"emoji":"🪰","aliases":["fly"]},{"emoji":"🥏","aliases":["flying_disc"]},{"emoji":"🛸","aliases":["flying_saucer"]},{"emoji":"🌫️","aliases":["fog"]},{"emoji":"🌁","aliases":["foggy"]},{"emoji":"🪭","aliases":["folding_hand_fan"]},{"emoji":"🫕","aliases":["fondue"]},{"emoji":"🦶","aliases":["foot"]},{"emoji":"🏈","aliases":["football"]},{"emoji":"👣","aliases":["footprints"]},{"emoji":"🍴","aliases":["fork_and_knife"]},{"emoji":"🥠","aliases":["fortune_cookie"]},{"emoji":"⛲","aliases":["fountain"]},{"emoji":"🖋️","aliases":["fountain_pen"]},{"emoji":"4️⃣","aliases":["four"]},{"emoji":"🍀","aliases":["four_leaf_clover"]},{"emoji":"🦊","aliases":["fox_face"]},{"emoji":"🇫🇷","aliases":["fr"]},{"emoji":"🖼️","aliases":["framed_picture"]},{"emoji":"🆓","aliases":["free"]},{"emoji":"🇬🇫","aliases":["french_guiana"]},{"emoji":"🇵🇫","aliases":["french_polynesia"]},{"emoji":"🇹🇫","aliases":["french_southern_territories"]},{"emoji":"🍳","aliases":["fried_egg"]},{"emoji":"🍤","aliases":["fried_shrimp"]},{"emoji":"🍟","aliases":["fries"]},{"emoji":"🐸","aliases":["frog"]},{"emoji":"😦","aliases":["frowning"]},{"emoji":"☹️","aliases":["frowning_face"]},{"emoji":"🙍♂️","aliases":["frowning_man"]},{"emoji":"🙍","aliases":["frowning_person"]},{"emoji":"🙍♀️","aliases":["frowning_woman"]},{"emoji":"⛽","aliases":["fuelpump"]},{"emoji":"🌕","aliases":["full_moon"]},{"emoji":"🌝","aliases":["full_moon_with_face"]},{"emoji":"⚱️","aliases":["funeral_urn"]},{"emoji":"🇬🇦","aliases":["gabon"]},{"emoji":"🇬🇲","aliases":["gambia"]},{"emoji":"🎲","aliases":["game_die"]},{"emoji":"🧄","aliases":["garlic"]},{"emoji":"🇬🇧","aliases":["gb","uk"]},{"emoji":"⚙️","aliases":["gear"]},{"emoji":"💎","aliases":["gem"]},{"emoji":"♊","aliases":["gemini"]},{"emoji":"🧞","aliases":["genie"]},{"emoji":"🧞♂️","aliases":["genie_man"]},{"emoji":"🧞♀️","aliases":["genie_woman"]},{"emoji":"🇬🇪","aliases":["georgia"]},{"emoji":"🇬🇭","aliases":["ghana"]},{"emoji":"👻","aliases":["ghost"]},{"emoji":"🇬🇮","aliases":["gibraltar"]},{"emoji":"🎁","aliases":["gift"]},{"emoji":"💝","aliases":["gift_heart"]},{"emoji":"🫚","aliases":["ginger_root"]},{"emoji":"🦒","aliases":["giraffe"]},{"emoji":"👧","aliases":["girl"]},{"emoji":"🌐","aliases":["globe_with_meridians"]},{"emoji":"🧤","aliases":["gloves"]},{"emoji":"🥅","aliases":["goal_net"]},{"emoji":"🐐","aliases":["goat"]},{"emoji":"🥽","aliases":["goggles"]},{"emoji":"⛳","aliases":["golf"]},{"emoji":"🏌️","aliases":["golfing"]},{"emoji":"🏌️♂️","aliases":["golfing_man"]},{"emoji":"🏌️♀️","aliases":["golfing_woman"]},{"emoji":"🪿","aliases":["goose"]},{"emoji":"🦍","aliases":["gorilla"]},{"emoji":"🍇","aliases":["grapes"]},{"emoji":"🇬🇷","aliases":["greece"]},{"emoji":"🍏","aliases":["green_apple"]},{"emoji":"📗","aliases":["green_book"]},{"emoji":"🟢","aliases":["green_circle"]},{"emoji":"💚","aliases":["green_heart"]},{"emoji":"🥗","aliases":["green_salad"]},{"emoji":"🟩","aliases":["green_square"]},{"emoji":"🇬🇱","aliases":["greenland"]},{"emoji":"🇬🇩","aliases":["grenada"]},{"emoji":"❕","aliases":["grey_exclamation"]},{"emoji":"🩶","aliases":["grey_heart"]},{"emoji":"❔","aliases":["grey_question"]},{"emoji":"😬","aliases":["grimacing"]},{"emoji":"😁","aliases":["grin"]},{"emoji":"😀","aliases":["grinning"]},{"emoji":"🇬🇵","aliases":["guadeloupe"]},{"emoji":"🇬🇺","aliases":["guam"]},{"emoji":"💂","aliases":["guard"]},{"emoji":"💂♂️","aliases":["guardsman"]},{"emoji":"💂♀️","aliases":["guardswoman"]},{"emoji":"🇬🇹","aliases":["guatemala"]},{"emoji":"🇬🇬","aliases":["guernsey"]},{"emoji":"🦮","aliases":["guide_dog"]},{"emoji":"🇬🇳","aliases":["guinea"]},{"emoji":"🇬🇼","aliases":["guinea_bissau"]},{"emoji":"🎸","aliases":["guitar"]},{"emoji":"🔫","aliases":["gun"]},{"emoji":"🇬🇾","aliases":["guyana"]},{"emoji":"🪮","aliases":["hair_pick"]},{"emoji":"💇","aliases":["haircut"]},{"emoji":"💇♂️","aliases":["haircut_man"]},{"emoji":"💇♀️","aliases":["haircut_woman"]},{"emoji":"🇭🇹","aliases":["haiti"]},{"emoji":"🍔","aliases":["hamburger"]},{"emoji":"🔨","aliases":["hammer"]},{"emoji":"⚒️","aliases":["hammer_and_pick"]},{"emoji":"🛠️","aliases":["hammer_and_wrench"]},{"emoji":"🪬","aliases":["hamsa"]},{"emoji":"🐹","aliases":["hamster"]},{"emoji":"✋","aliases":["hand","raised_hand"]},{"emoji":"🤭","aliases":["hand_over_mouth"]},{"emoji":"🫰","aliases":["hand_with_index_finger_and_thumb_crossed"]},{"emoji":"👜","aliases":["handbag"]},{"emoji":"🤾","aliases":["handball_person"]},{"emoji":"🤝","aliases":["handshake"]},{"emoji":"💩","aliases":["hankey","poop","shit"]},{"emoji":"#️⃣","aliases":["hash"]},{"emoji":"🐥","aliases":["hatched_chick"]},{"emoji":"🐣","aliases":["hatching_chick"]},{"emoji":"🎧","aliases":["headphones"]},{"emoji":"🪦","aliases":["headstone"]},{"emoji":"🧑⚕️","aliases":["health_worker"]},{"emoji":"🙉","aliases":["hear_no_evil"]},{"emoji":"🇭🇲","aliases":["heard_mcdonald_islands"]},{"emoji":"❤️","aliases":["heart"]},{"emoji":"💟","aliases":["heart_decoration"]},{"emoji":"😍","aliases":["heart_eyes"]},{"emoji":"😻","aliases":["heart_eyes_cat"]},{"emoji":"🫶","aliases":["heart_hands"]},{"emoji":"❤️🔥","aliases":["heart_on_fire"]},{"emoji":"💓","aliases":["heartbeat"]},{"emoji":"💗","aliases":["heartpulse"]},{"emoji":"♥️","aliases":["hearts"]},{"emoji":"✔️","aliases":["heavy_check_mark"]},{"emoji":"➗","aliases":["heavy_division_sign"]},{"emoji":"💲","aliases":["heavy_dollar_sign"]},{"emoji":"🟰","aliases":["heavy_equals_sign"]},{"emoji":"❣️","aliases":["heavy_heart_exclamation"]},{"emoji":"➖","aliases":["heavy_minus_sign"]},{"emoji":"✖️","aliases":["heavy_multiplication_x"]},{"emoji":"➕","aliases":["heavy_plus_sign"]},{"emoji":"🦔","aliases":["hedgehog"]},{"emoji":"🚁","aliases":["helicopter"]},{"emoji":"🌿","aliases":["herb"]},{"emoji":"🌺","aliases":["hibiscus"]},{"emoji":"🔆","aliases":["high_brightness"]},{"emoji":"👠","aliases":["high_heel"]},{"emoji":"🥾","aliases":["hiking_boot"]},{"emoji":"🛕","aliases":["hindu_temple"]},{"emoji":"🦛","aliases":["hippopotamus"]},{"emoji":"🔪","aliases":["hocho","knife"]},{"emoji":"🕳️","aliases":["hole"]},{"emoji":"🇭🇳","aliases":["honduras"]},{"emoji":"🍯","aliases":["honey_pot"]},{"emoji":"🇭🇰","aliases":["hong_kong"]},{"emoji":"🪝","aliases":["hook"]},{"emoji":"🐴","aliases":["horse"]},{"emoji":"🏇","aliases":["horse_racing"]},{"emoji":"🏥","aliases":["hospital"]},{"emoji":"🥵","aliases":["hot_face"]},{"emoji":"🌶️","aliases":["hot_pepper"]},{"emoji":"🌭","aliases":["hotdog"]},{"emoji":"🏨","aliases":["hotel"]},{"emoji":"♨️","aliases":["hotsprings"]},{"emoji":"⌛","aliases":["hourglass"]},{"emoji":"⏳","aliases":["hourglass_flowing_sand"]},{"emoji":"🏠","aliases":["house"]},{"emoji":"🏡","aliases":["house_with_garden"]},{"emoji":"🏘️","aliases":["houses"]},{"emoji":"🤗","aliases":["hugs"]},{"emoji":"🇭🇺","aliases":["hungary"]},{"emoji":"😯","aliases":["hushed"]},{"emoji":"🛖","aliases":["hut"]},{"emoji":"🪻","aliases":["hyacinth"]},{"emoji":"🍨","aliases":["ice_cream"]},{"emoji":"🧊","aliases":["ice_cube"]},{"emoji":"🏒","aliases":["ice_hockey"]},{"emoji":"⛸️","aliases":["ice_skate"]},{"emoji":"🍦","aliases":["icecream"]},{"emoji":"🇮🇸","aliases":["iceland"]},{"emoji":"🆔","aliases":["id"]},{"emoji":"🪪","aliases":["identification_card"]},{"emoji":"🉐","aliases":["ideograph_advantage"]},{"emoji":"👿","aliases":["imp"]},{"emoji":"📥","aliases":["inbox_tray"]},{"emoji":"📨","aliases":["incoming_envelope"]},{"emoji":"🫵","aliases":["index_pointing_at_the_viewer"]},{"emoji":"🇮🇳","aliases":["india"]},{"emoji":"🇮🇩","aliases":["indonesia"]},{"emoji":"♾️","aliases":["infinity"]},{"emoji":"ℹ️","aliases":["information_source"]},{"emoji":"😇","aliases":["innocent"]},{"emoji":"⁉️","aliases":["interrobang"]},{"emoji":"📱","aliases":["iphone"]},{"emoji":"🇮🇷","aliases":["iran"]},{"emoji":"🇮🇶","aliases":["iraq"]},{"emoji":"🇮🇪","aliases":["ireland"]},{"emoji":"🇮🇲","aliases":["isle_of_man"]},{"emoji":"🇮🇱","aliases":["israel"]},{"emoji":"🇮🇹","aliases":["it"]},{"emoji":"🏮","aliases":["izakaya_lantern","lantern"]},{"emoji":"🎃","aliases":["jack_o_lantern"]},{"emoji":"🇯🇲","aliases":["jamaica"]},{"emoji":"🗾","aliases":["japan"]},{"emoji":"🏯","aliases":["japanese_castle"]},{"emoji":"👺","aliases":["japanese_goblin"]},{"emoji":"👹","aliases":["japanese_ogre"]},{"emoji":"🫙","aliases":["jar"]},{"emoji":"👖","aliases":["jeans"]},{"emoji":"🪼","aliases":["jellyfish"]},{"emoji":"🇯🇪","aliases":["jersey"]},{"emoji":"🧩","aliases":["jigsaw"]},{"emoji":"🇯🇴","aliases":["jordan"]},{"emoji":"😂","aliases":["joy"]},{"emoji":"😹","aliases":["joy_cat"]},{"emoji":"🕹️","aliases":["joystick"]},{"emoji":"🇯🇵","aliases":["jp"]},{"emoji":"🧑⚖️","aliases":["judge"]},{"emoji":"🤹","aliases":["juggling_person"]},{"emoji":"🕋","aliases":["kaaba"]},{"emoji":"🦘","aliases":["kangaroo"]},{"emoji":"🇰🇿","aliases":["kazakhstan"]},{"emoji":"🇰🇪","aliases":["kenya"]},{"emoji":"🔑","aliases":["key"]},{"emoji":"⌨️","aliases":["keyboard"]},{"emoji":"🔟","aliases":["keycap_ten"]},{"emoji":"🪯","aliases":["khanda"]},{"emoji":"🛴","aliases":["kick_scooter"]},{"emoji":"👘","aliases":["kimono"]},{"emoji":"🇰🇮","aliases":["kiribati"]},{"emoji":"💋","aliases":["kiss"]},{"emoji":"😗","aliases":["kissing"]},{"emoji":"😽","aliases":["kissing_cat"]},{"emoji":"😚","aliases":["kissing_closed_eyes"]},{"emoji":"😘","aliases":["kissing_heart"]},{"emoji":"😙","aliases":["kissing_smiling_eyes"]},{"emoji":"🪁","aliases":["kite"]},{"emoji":"🥝","aliases":["kiwi_fruit"]},{"emoji":"🧎♂️","aliases":["kneeling_man"]},{"emoji":"🧎","aliases":["kneeling_person"]},{"emoji":"🧎♀️","aliases":["kneeling_woman"]},{"emoji":"🪢","aliases":["knot"]},{"emoji":"🐨","aliases":["koala"]},{"emoji":"🈁","aliases":["koko"]},{"emoji":"🇽🇰","aliases":["kosovo"]},{"emoji":"🇰🇷","aliases":["kr"]},{"emoji":"🇰🇼","aliases":["kuwait"]},{"emoji":"🇰🇬","aliases":["kyrgyzstan"]},{"emoji":"🥼","aliases":["lab_coat"]},{"emoji":"🏷️","aliases":["label"]},{"emoji":"🥍","aliases":["lacrosse"]},{"emoji":"🪜","aliases":["ladder"]},{"emoji":"🐞","aliases":["lady_beetle"]},{"emoji":"🇱🇦","aliases":["laos"]},{"emoji":"🔵","aliases":["large_blue_circle"]},{"emoji":"🔷","aliases":["large_blue_diamond"]},{"emoji":"🔶","aliases":["large_orange_diamond"]},{"emoji":"🌗","aliases":["last_quarter_moon"]},{"emoji":"🌜","aliases":["last_quarter_moon_with_face"]},{"emoji":"✝️","aliases":["latin_cross"]},{"emoji":"🇱🇻","aliases":["latvia"]},{"emoji":"😆","aliases":["laughing","satisfied","laugh"]},{"emoji":"🥬","aliases":["leafy_green"]},{"emoji":"🍃","aliases":["leaves"]},{"emoji":"🇱🇧","aliases":["lebanon"]},{"emoji":"📒","aliases":["ledger"]},{"emoji":"🛅","aliases":["left_luggage"]},{"emoji":"↔️","aliases":["left_right_arrow"]},{"emoji":"🗨️","aliases":["left_speech_bubble"]},{"emoji":"↩️","aliases":["leftwards_arrow_with_hook"]},{"emoji":"🫲","aliases":["leftwards_hand"]},{"emoji":"🫷","aliases":["leftwards_pushing_hand"]},{"emoji":"🦵","aliases":["leg"]},{"emoji":"🍋","aliases":["lemon"]},{"emoji":"♌","aliases":["leo"]},{"emoji":"🐆","aliases":["leopard"]},{"emoji":"🇱🇸","aliases":["lesotho"]},{"emoji":"🎚️","aliases":["level_slider"]},{"emoji":"🇱🇷","aliases":["liberia"]},{"emoji":"♎","aliases":["libra"]},{"emoji":"🇱🇾","aliases":["libya"]},{"emoji":"🇱🇮","aliases":["liechtenstein"]},{"emoji":"🩵","aliases":["light_blue_heart"]},{"emoji":"🚈","aliases":["light_rail"]},{"emoji":"🔗","aliases":["link"]},{"emoji":"🦁","aliases":["lion"]},{"emoji":"👄","aliases":["lips"]},{"emoji":"💄","aliases":["lipstick"]},{"emoji":"🇱🇹","aliases":["lithuania"]},{"emoji":"🦎","aliases":["lizard"]},{"emoji":"🦙","aliases":["llama"]},{"emoji":"🦞","aliases":["lobster"]},{"emoji":"🔒","aliases":["lock"]},{"emoji":"🔏","aliases":["lock_with_ink_pen"]},{"emoji":"🍭","aliases":["lollipop"]},{"emoji":"🪘","aliases":["long_drum"]},{"emoji":"➿","aliases":["loop"]},{"emoji":"🧴","aliases":["lotion_bottle"]},{"emoji":"🪷","aliases":["lotus"]},{"emoji":"🧘","aliases":["lotus_position"]},{"emoji":"🧘♂️","aliases":["lotus_position_man"]},{"emoji":"🧘♀️","aliases":["lotus_position_woman"]},{"emoji":"🔊","aliases":["loud_sound"]},{"emoji":"📢","aliases":["loudspeaker"]},{"emoji":"🏩","aliases":["love_hotel"]},{"emoji":"💌","aliases":["love_letter"]},{"emoji":"🤟","aliases":["love_you_gesture"]},{"emoji":"🪫","aliases":["low_battery"]},{"emoji":"🔅","aliases":["low_brightness"]},{"emoji":"🧳","aliases":["luggage"]},{"emoji":"🫁","aliases":["lungs"]},{"emoji":"🇱🇺","aliases":["luxembourg"]},{"emoji":"🤥","aliases":["lying_face"]},{"emoji":"Ⓜ️","aliases":["m"]},{"emoji":"🇲🇴","aliases":["macau"]},{"emoji":"🇲🇰","aliases":["macedonia"]},{"emoji":"🇲🇬","aliases":["madagascar"]},{"emoji":"🔍","aliases":["mag"]},{"emoji":"🔎","aliases":["mag_right"]},{"emoji":"🧙","aliases":["mage"]},{"emoji":"🧙♂️","aliases":["mage_man"]},{"emoji":"🧙♀️","aliases":["mage_woman"]},{"emoji":"🪄","aliases":["magic_wand"]},{"emoji":"🧲","aliases":["magnet"]},{"emoji":"🀄","aliases":["mahjong"]},{"emoji":"📫","aliases":["mailbox"]},{"emoji":"📪","aliases":["mailbox_closed"]},{"emoji":"📬","aliases":["mailbox_with_mail"]},{"emoji":"📭","aliases":["mailbox_with_no_mail"]},{"emoji":"🇲🇼","aliases":["malawi"]},{"emoji":"🇲🇾","aliases":["malaysia"]},{"emoji":"🇲🇻","aliases":["maldives"]},{"emoji":"🕵️♂️","aliases":["male_detective"]},{"emoji":"♂️","aliases":["male_sign"]},{"emoji":"🇲🇱","aliases":["mali"]},{"emoji":"🇲🇹","aliases":["malta"]},{"emoji":"🦣","aliases":["mammoth"]},{"emoji":"👨","aliases":["man"]},{"emoji":"👨🎨","aliases":["man_artist"]},{"emoji":"👨🚀","aliases":["man_astronaut"]},{"emoji":"🧔♂️","aliases":["man_beard"]},{"emoji":"🤸♂️","aliases":["man_cartwheeling"]},{"emoji":"👨🍳","aliases":["man_cook"]},{"emoji":"🕺","aliases":["man_dancing"]},{"emoji":"🤦♂️","aliases":["man_facepalming"]},{"emoji":"👨🏭","aliases":["man_factory_worker"]},{"emoji":"👨🌾","aliases":["man_farmer"]},{"emoji":"👨🍼","aliases":["man_feeding_baby"]},{"emoji":"👨🚒","aliases":["man_firefighter"]},{"emoji":"👨⚕️","aliases":["man_health_worker"]},{"emoji":"👨🦽","aliases":["man_in_manual_wheelchair"]},{"emoji":"👨🦼","aliases":["man_in_motorized_wheelchair"]},{"emoji":"🤵♂️","aliases":["man_in_tuxedo"]},{"emoji":"👨⚖️","aliases":["man_judge"]},{"emoji":"🤹♂️","aliases":["man_juggling"]},{"emoji":"👨🔧","aliases":["man_mechanic"]},{"emoji":"👨💼","aliases":["man_office_worker"]},{"emoji":"👨✈️","aliases":["man_pilot"]},{"emoji":"🤾♂️","aliases":["man_playing_handball"]},{"emoji":"🤽♂️","aliases":["man_playing_water_polo"]},{"emoji":"👨🔬","aliases":["man_scientist"]},{"emoji":"🤷♂️","aliases":["man_shrugging"]},{"emoji":"👨🎤","aliases":["man_singer"]},{"emoji":"👨🎓","aliases":["man_student"]},{"emoji":"👨🏫","aliases":["man_teacher"]},{"emoji":"👨💻","aliases":["man_technologist"]},{"emoji":"👲","aliases":["man_with_gua_pi_mao"]},{"emoji":"👨🦯","aliases":["man_with_probing_cane"]},{"emoji":"👳♂️","aliases":["man_with_turban"]},{"emoji":"👰♂️","aliases":["man_with_veil"]},{"emoji":"🥭","aliases":["mango"]},{"emoji":"👞","aliases":["mans_shoe","shoe"]},{"emoji":"🕰️","aliases":["mantelpiece_clock"]},{"emoji":"🦽","aliases":["manual_wheelchair"]},{"emoji":"🍁","aliases":["maple_leaf"]},{"emoji":"🪇","aliases":["maracas"]},{"emoji":"🇲🇭","aliases":["marshall_islands"]},{"emoji":"🥋","aliases":["martial_arts_uniform"]},{"emoji":"🇲🇶","aliases":["martinique"]},{"emoji":"😷","aliases":["mask"]},{"emoji":"💆","aliases":["massage"]},{"emoji":"💆♂️","aliases":["massage_man"]},{"emoji":"💆♀️","aliases":["massage_woman"]},{"emoji":"🧉","aliases":["mate"]},{"emoji":"🇲🇷","aliases":["mauritania"]},{"emoji":"🇲🇺","aliases":["mauritius"]},{"emoji":"🇾🇹","aliases":["mayotte"]},{"emoji":"🍖","aliases":["meat_on_bone"]},{"emoji":"🧑🔧","aliases":["mechanic"]},{"emoji":"🦾","aliases":["mechanical_arm"]},{"emoji":"🦿","aliases":["mechanical_leg"]},{"emoji":"🎖️","aliases":["medal_military"]},{"emoji":"🏅","aliases":["medal_sports"]},{"emoji":"⚕️","aliases":["medical_symbol"]},{"emoji":"📣","aliases":["mega"]},{"emoji":"🍈","aliases":["melon"]},{"emoji":"🫠","aliases":["melting_face"]},{"emoji":"📝","aliases":["memo","pencil"]},{"emoji":"🤼♂️","aliases":["men_wrestling"]},{"emoji":"❤️🩹","aliases":["mending_heart"]},{"emoji":"🕎","aliases":["menorah"]},{"emoji":"🚹","aliases":["mens"]},{"emoji":"🧜♀️","aliases":["mermaid"]},{"emoji":"🧜♂️","aliases":["merman"]},{"emoji":"🧜","aliases":["merperson"]},{"emoji":"🤘","aliases":["metal"]},{"emoji":"🚇","aliases":["metro"]},{"emoji":"🇲🇽","aliases":["mexico"]},{"emoji":"🦠","aliases":["microbe"]},{"emoji":"🇫🇲","aliases":["micronesia"]},{"emoji":"🎤","aliases":["microphone"]},{"emoji":"🔬","aliases":["microscope"]},{"emoji":"🖕","aliases":["middle_finger","fu"]},{"emoji":"🪖","aliases":["military_helmet"]},{"emoji":"🥛","aliases":["milk_glass"]},{"emoji":"🌌","aliases":["milky_way"]},{"emoji":"🚐","aliases":["minibus"]},{"emoji":"💽","aliases":["minidisc"]},{"emoji":"🪞","aliases":["mirror"]},{"emoji":"🪩","aliases":["mirror_ball"]},{"emoji":"📴","aliases":["mobile_phone_off"]},{"emoji":"🇲🇩","aliases":["moldova"]},{"emoji":"🇲🇨","aliases":["monaco"]},{"emoji":"🤑","aliases":["money_mouth_face"]},{"emoji":"💸","aliases":["money_with_wings"]},{"emoji":"💰","aliases":["moneybag"]},{"emoji":"🇲🇳","aliases":["mongolia"]},{"emoji":"🐒","aliases":["monkey"]},{"emoji":"🐵","aliases":["monkey_face"]},{"emoji":"🧐","aliases":["monocle_face"]},{"emoji":"🚝","aliases":["monorail"]},{"emoji":"🇲🇪","aliases":["montenegro"]},{"emoji":"🇲🇸","aliases":["montserrat"]},{"emoji":"🌔","aliases":["moon","waxing_gibbous_moon"]},{"emoji":"🥮","aliases":["moon_cake"]},{"emoji":"🫎","aliases":["moose"]},{"emoji":"🇲🇦","aliases":["morocco"]},{"emoji":"🎓","aliases":["mortar_board"]},{"emoji":"🕌","aliases":["mosque"]},{"emoji":"🦟","aliases":["mosquito"]},{"emoji":"🛥️","aliases":["motor_boat"]},{"emoji":"🛵","aliases":["motor_scooter"]},{"emoji":"🏍️","aliases":["motorcycle"]},{"emoji":"🦼","aliases":["motorized_wheelchair"]},{"emoji":"🛣️","aliases":["motorway"]},{"emoji":"🗻","aliases":["mount_fuji"]},{"emoji":"⛰️","aliases":["mountain"]},{"emoji":"🚵","aliases":["mountain_bicyclist"]},{"emoji":"🚵♂️","aliases":["mountain_biking_man"]},{"emoji":"🚵♀️","aliases":["mountain_biking_woman"]},{"emoji":"🚠","aliases":["mountain_cableway"]},{"emoji":"🚞","aliases":["mountain_railway"]},{"emoji":"🏔️","aliases":["mountain_snow"]},{"emoji":"🐭","aliases":["mouse"]},{"emoji":"🐁","aliases":["mouse2"]},{"emoji":"🪤","aliases":["mouse_trap"]},{"emoji":"🎥","aliases":["movie_camera"]},{"emoji":"🗿","aliases":["moyai"]},{"emoji":"🇲🇿","aliases":["mozambique"]},{"emoji":"🤶","aliases":["mrs_claus"]},{"emoji":"💪","aliases":["muscle"]},{"emoji":"🍄","aliases":["mushroom"]},{"emoji":"🎹","aliases":["musical_keyboard"]},{"emoji":"🎵","aliases":["musical_note"]},{"emoji":"🎼","aliases":["musical_score"]},{"emoji":"🔇","aliases":["mute"]},{"emoji":"🧑🎄","aliases":["mx_claus"]},{"emoji":"🇲🇲","aliases":["myanmar"]},{"emoji":"💅","aliases":["nail_care"]},{"emoji":"📛","aliases":["name_badge"]},{"emoji":"🇳🇦","aliases":["namibia"]},{"emoji":"🏞️","aliases":["national_park"]},{"emoji":"🇳🇷","aliases":["nauru"]},{"emoji":"🤢","aliases":["nauseated_face"]},{"emoji":"🧿","aliases":["nazar_amulet"]},{"emoji":"👔","aliases":["necktie"]},{"emoji":"❎","aliases":["negative_squared_cross_mark"]},{"emoji":"🇳🇵","aliases":["nepal"]},{"emoji":"🤓","aliases":["nerd_face"]},{"emoji":"🪺","aliases":["nest_with_eggs"]},{"emoji":"🪆","aliases":["nesting_dolls"]},{"emoji":"🇳🇱","aliases":["netherlands"]},{"emoji":"😐","aliases":["neutral_face"]},{"emoji":"🆕","aliases":["new"]},{"emoji":"🇳🇨","aliases":["new_caledonia"]},{"emoji":"🌑","aliases":["new_moon"]},{"emoji":"🌚","aliases":["new_moon_with_face"]},{"emoji":"🇳🇿","aliases":["new_zealand"]},{"emoji":"📰","aliases":["newspaper"]},{"emoji":"🗞️","aliases":["newspaper_roll"]},{"emoji":"⏭️","aliases":["next_track_button"]},{"emoji":"🆖","aliases":["ng"]},{"emoji":"🇳🇮","aliases":["nicaragua"]},{"emoji":"🇳🇪","aliases":["niger"]},{"emoji":"🇳🇬","aliases":["nigeria"]},{"emoji":"🌃","aliases":["night_with_stars"]},{"emoji":"9️⃣","aliases":["nine"]},{"emoji":"🥷","aliases":["ninja"]},{"emoji":"🇳🇺","aliases":["niue"]},{"emoji":"🔕","aliases":["no_bell"]},{"emoji":"🚳","aliases":["no_bicycles"]},{"emoji":"⛔","aliases":["no_entry"]},{"emoji":"🚫","aliases":["no_entry_sign"]},{"emoji":"🙅","aliases":["no_good"]},{"emoji":"🙅♂️","aliases":["no_good_man","ng_man"]},{"emoji":"🙅♀️","aliases":["no_good_woman","ng_woman"]},{"emoji":"📵","aliases":["no_mobile_phones"]},{"emoji":"😶","aliases":["no_mouth"]},{"emoji":"🚷","aliases":["no_pedestrians"]},{"emoji":"🚭","aliases":["no_smoking"]},{"emoji":"🚱","aliases":["non-potable_water"]},{"emoji":"🇳🇫","aliases":["norfolk_island"]},{"emoji":"🇰🇵","aliases":["north_korea"]},{"emoji":"🇲🇵","aliases":["northern_mariana_islands"]},{"emoji":"🇳🇴","aliases":["norway"]},{"emoji":"👃","aliases":["nose"]},{"emoji":"📓","aliases":["notebook"]},{"emoji":"📔","aliases":["notebook_with_decorative_cover"]},{"emoji":"🎶","aliases":["notes"]},{"emoji":"🔩","aliases":["nut_and_bolt"]},{"emoji":"⭕","aliases":["o"]},{"emoji":"🅾️","aliases":["o2"]},{"emoji":"🌊","aliases":["ocean"]},{"emoji":"🐙","aliases":["octopus"]},{"emoji":"🍢","aliases":["oden"]},{"emoji":"🏢","aliases":["office"]},{"emoji":"🧑💼","aliases":["office_worker"]},{"emoji":"🛢️","aliases":["oil_drum"]},{"emoji":"🆗","aliases":["ok"]},{"emoji":"👌","aliases":["ok_hand"]},{"emoji":"🙆♂️","aliases":["ok_man"]},{"emoji":"🙆","aliases":["ok_person"]},{"emoji":"🙆♀️","aliases":["ok_woman"]},{"emoji":"🗝️","aliases":["old_key"]},{"emoji":"🧓","aliases":["older_adult"]},{"emoji":"👴","aliases":["older_man"]},{"emoji":"👵","aliases":["older_woman"]},{"emoji":"🫒","aliases":["olive"]},{"emoji":"🕉️","aliases":["om"]},{"emoji":"🇴🇲","aliases":["oman"]},{"emoji":"🔛","aliases":["on"]},{"emoji":"🚘","aliases":["oncoming_automobile"]},{"emoji":"🚍","aliases":["oncoming_bus"]},{"emoji":"🚔","aliases":["oncoming_police_car"]},{"emoji":"🚖","aliases":["oncoming_taxi"]},{"emoji":"1️⃣","aliases":["one"]},{"emoji":"🩱","aliases":["one_piece_swimsuit"]},{"emoji":"🧅","aliases":["onion"]},{"emoji":"📂","aliases":["open_file_folder"]},{"emoji":"👐","aliases":["open_hands"]},{"emoji":"😮","aliases":["open_mouth"]},{"emoji":"☂️","aliases":["open_umbrella"]},{"emoji":"⛎","aliases":["ophiuchus"]},{"emoji":"📙","aliases":["orange_book"]},{"emoji":"🟠","aliases":["orange_circle"]},{"emoji":"🧡","aliases":["orange_heart"]},{"emoji":"🟧","aliases":["orange_square"]},{"emoji":"🦧","aliases":["orangutan"]},{"emoji":"☦️","aliases":["orthodox_cross"]},{"emoji":"🦦","aliases":["otter"]},{"emoji":"📤","aliases":["outbox_tray"]},{"emoji":"🦉","aliases":["owl"]},{"emoji":"🐂","aliases":["ox"]},{"emoji":"🦪","aliases":["oyster"]},{"emoji":"📦","aliases":["package"]},{"emoji":"📄","aliases":["page_facing_up"]},{"emoji":"📃","aliases":["page_with_curl"]},{"emoji":"📟","aliases":["pager"]},{"emoji":"🖌️","aliases":["paintbrush"]},{"emoji":"🇵🇰","aliases":["pakistan"]},{"emoji":"🇵🇼","aliases":["palau"]},{"emoji":"🇵🇸","aliases":["palestinian_territories"]},{"emoji":"🫳","aliases":["palm_down_hand"]},{"emoji":"🌴","aliases":["palm_tree"]},{"emoji":"🫴","aliases":["palm_up_hand"]},{"emoji":"🤲","aliases":["palms_up_together"]},{"emoji":"🇵🇦","aliases":["panama"]},{"emoji":"🥞","aliases":["pancakes"]},{"emoji":"🐼","aliases":["panda_face"]},{"emoji":"📎","aliases":["paperclip"]},{"emoji":"🖇️","aliases":["paperclips"]},{"emoji":"🇵🇬","aliases":["papua_new_guinea"]},{"emoji":"🪂","aliases":["parachute"]},{"emoji":"🇵🇾","aliases":["paraguay"]},{"emoji":"⛱️","aliases":["parasol_on_ground"]},{"emoji":"🅿️","aliases":["parking"]},{"emoji":"🦜","aliases":["parrot"]},{"emoji":"〽️","aliases":["part_alternation_mark"]},{"emoji":"⛅","aliases":["partly_sunny"]},{"emoji":"🥳","aliases":["partying_face"]},{"emoji":"🛳️","aliases":["passenger_ship"]},{"emoji":"🛂","aliases":["passport_control"]},{"emoji":"⏸️","aliases":["pause_button"]},{"emoji":"🫛","aliases":["pea_pod"]},{"emoji":"☮️","aliases":["peace_symbol"]},{"emoji":"🍑","aliases":["peach"]},{"emoji":"🦚","aliases":["peacock"]},{"emoji":"🥜","aliases":["peanuts"]},{"emoji":"🍐","aliases":["pear"]},{"emoji":"🖊️","aliases":["pen"]},{"emoji":"✏️","aliases":["pencil2"]},{"emoji":"🐧","aliases":["penguin"]},{"emoji":"😔","aliases":["pensive"]},{"emoji":"🧑🤝🧑","aliases":["people_holding_hands"]},{"emoji":"🫂","aliases":["people_hugging"]},{"emoji":"🎭","aliases":["performing_arts"]},{"emoji":"😣","aliases":["persevere"]},{"emoji":"🧑🦲","aliases":["person_bald"]},{"emoji":"🧑🦱","aliases":["person_curly_hair"]},{"emoji":"🧑🍼","aliases":["person_feeding_baby"]},{"emoji":"🤺","aliases":["person_fencing"]},{"emoji":"🧑🦽","aliases":["person_in_manual_wheelchair"]},{"emoji":"🧑🦼","aliases":["person_in_motorized_wheelchair"]},{"emoji":"🤵","aliases":["person_in_tuxedo"]},{"emoji":"🧑🦰","aliases":["person_red_hair"]},{"emoji":"🧑🦳","aliases":["person_white_hair"]},{"emoji":"🫅","aliases":["person_with_crown"]},{"emoji":"🧑🦯","aliases":["person_with_probing_cane"]},{"emoji":"👳","aliases":["person_with_turban"]},{"emoji":"👰","aliases":["person_with_veil"]},{"emoji":"🇵🇪","aliases":["peru"]},{"emoji":"🧫","aliases":["petri_dish"]},{"emoji":"🇵🇭","aliases":["philippines"]},{"emoji":"☎️","aliases":["phone","telephone"]},{"emoji":"⛏️","aliases":["pick"]},{"emoji":"🛻","aliases":["pickup_truck"]},{"emoji":"🥧","aliases":["pie"]},{"emoji":"🐷","aliases":["pig"]},{"emoji":"🐖","aliases":["pig2"]},{"emoji":"🐽","aliases":["pig_nose"]},{"emoji":"💊","aliases":["pill"]},{"emoji":"🧑✈️","aliases":["pilot"]},{"emoji":"🪅","aliases":["pinata"]},{"emoji":"🤌","aliases":["pinched_fingers"]},{"emoji":"🤏","aliases":["pinching_hand"]},{"emoji":"🍍","aliases":["pineapple"]},{"emoji":"🏓","aliases":["ping_pong"]},{"emoji":"🩷","aliases":["pink_heart"]},{"emoji":"🏴☠️","aliases":["pirate_flag"]},{"emoji":"♓","aliases":["pisces"]},{"emoji":"🇵🇳","aliases":["pitcairn_islands"]},{"emoji":"🍕","aliases":["pizza"]},{"emoji":"🪧","aliases":["placard"]},{"emoji":"🛐","aliases":["place_of_worship"]},{"emoji":"🍽️","aliases":["plate_with_cutlery"]},{"emoji":"⏯️","aliases":["play_or_pause_button"]},{"emoji":"🛝","aliases":["playground_slide"]},{"emoji":"🥺","aliases":["pleading_face"]},{"emoji":"🪠","aliases":["plunger"]},{"emoji":"👇","aliases":["point_down"]},{"emoji":"👈","aliases":["point_left"]},{"emoji":"👉","aliases":["point_right"]},{"emoji":"☝️","aliases":["point_up"]},{"emoji":"👆","aliases":["point_up_2"]},{"emoji":"🇵🇱","aliases":["poland"]},{"emoji":"🐻❄️","aliases":["polar_bear"]},{"emoji":"🚓","aliases":["police_car"]},{"emoji":"👮","aliases":["police_officer","cop"]},{"emoji":"👮♂️","aliases":["policeman"]},{"emoji":"👮♀️","aliases":["policewoman"]},{"emoji":"🐩","aliases":["poodle"]},{"emoji":"🍿","aliases":["popcorn"]},{"emoji":"🇵🇹","aliases":["portugal"]},{"emoji":"🏣","aliases":["post_office"]},{"emoji":"📯","aliases":["postal_horn"]},{"emoji":"📮","aliases":["postbox"]},{"emoji":"🚰","aliases":["potable_water"]},{"emoji":"🥔","aliases":["potato"]},{"emoji":"🪴","aliases":["potted_plant"]},{"emoji":"👝","aliases":["pouch"]},{"emoji":"🍗","aliases":["poultry_leg"]},{"emoji":"💷","aliases":["pound"]},{"emoji":"🫗","aliases":["pouring_liquid"]},{"emoji":"😾","aliases":["pouting_cat"]},{"emoji":"🙎","aliases":["pouting_face"]},{"emoji":"🙎♂️","aliases":["pouting_man"]},{"emoji":"🙎♀️","aliases":["pouting_woman"]},{"emoji":"🙏","aliases":["pray"]},{"emoji":"📿","aliases":["prayer_beads"]},{"emoji":"🫃","aliases":["pregnant_man"]},{"emoji":"🫄","aliases":["pregnant_person"]},{"emoji":"🤰","aliases":["pregnant_woman"]},{"emoji":"🥨","aliases":["pretzel"]},{"emoji":"⏮️","aliases":["previous_track_button"]},{"emoji":"🤴","aliases":["prince"]},{"emoji":"👸","aliases":["princess"]},{"emoji":"🖨️","aliases":["printer"]},{"emoji":"🦯","aliases":["probing_cane"]},{"emoji":"🇵🇷","aliases":["puerto_rico"]},{"emoji":"🟣","aliases":["purple_circle"]},{"emoji":"💜","aliases":["purple_heart"]},{"emoji":"🟪","aliases":["purple_square"]},{"emoji":"👛","aliases":["purse"]},{"emoji":"📌","aliases":["pushpin"]},{"emoji":"🚮","aliases":["put_litter_in_its_place"]},{"emoji":"🇶🇦","aliases":["qatar"]},{"emoji":"❓","aliases":["question"]},{"emoji":"🐰","aliases":["rabbit"]},{"emoji":"🐇","aliases":["rabbit2"]},{"emoji":"🦝","aliases":["raccoon"]},{"emoji":"🐎","aliases":["racehorse"]},{"emoji":"🏎️","aliases":["racing_car"]},{"emoji":"📻","aliases":["radio"]},{"emoji":"🔘","aliases":["radio_button"]},{"emoji":"☢️","aliases":["radioactive"]},{"emoji":"😡","aliases":["rage","pout"]},{"emoji":"🚃","aliases":["railway_car"]},{"emoji":"🛤️","aliases":["railway_track"]},{"emoji":"🌈","aliases":["rainbow"]},{"emoji":"🏳️🌈","aliases":["rainbow_flag"]},{"emoji":"🤚","aliases":["raised_back_of_hand"]},{"emoji":"🤨","aliases":["raised_eyebrow"]},{"emoji":"🖐️","aliases":["raised_hand_with_fingers_splayed"]},{"emoji":"🙌","aliases":["raised_hands"]},{"emoji":"🙋","aliases":["raising_hand"]},{"emoji":"🙋♂️","aliases":["raising_hand_man"]},{"emoji":"🙋♀️","aliases":["raising_hand_woman"]},{"emoji":"🐏","aliases":["ram"]},{"emoji":"🍜","aliases":["ramen"]},{"emoji":"🐀","aliases":["rat"]},{"emoji":"🪒","aliases":["razor"]},{"emoji":"🧾","aliases":["receipt"]},{"emoji":"⏺️","aliases":["record_button"]},{"emoji":"♻️","aliases":["recycle"]},{"emoji":"🔴","aliases":["red_circle"]},{"emoji":"🧧","aliases":["red_envelope"]},{"emoji":"👨🦰","aliases":["red_haired_man"]},{"emoji":"👩🦰","aliases":["red_haired_woman"]},{"emoji":"🟥","aliases":["red_square"]},{"emoji":"®️","aliases":["registered"]},{"emoji":"☺️","aliases":["relaxed"]},{"emoji":"😌","aliases":["relieved"]},{"emoji":"🎗️","aliases":["reminder_ribbon"]},{"emoji":"🔁","aliases":["repeat"]},{"emoji":"🔂","aliases":["repeat_one"]},{"emoji":"⛑️","aliases":["rescue_worker_helmet"]},{"emoji":"🚻","aliases":["restroom"]},{"emoji":"🇷🇪","aliases":["reunion"]},{"emoji":"💞","aliases":["revolving_hearts"]},{"emoji":"⏪","aliases":["rewind"]},{"emoji":"🦏","aliases":["rhinoceros"]},{"emoji":"🎀","aliases":["ribbon"]},{"emoji":"🍚","aliases":["rice"]},{"emoji":"🍙","aliases":["rice_ball"]},{"emoji":"🍘","aliases":["rice_cracker"]},{"emoji":"🎑","aliases":["rice_scene"]},{"emoji":"🗯️","aliases":["right_anger_bubble"]},{"emoji":"🫱","aliases":["rightwards_hand"]},{"emoji":"🫸","aliases":["rightwards_pushing_hand"]},{"emoji":"💍","aliases":["ring"]},{"emoji":"🛟","aliases":["ring_buoy"]},{"emoji":"🪐","aliases":["ringed_planet"]},{"emoji":"🤖","aliases":["robot"]},{"emoji":"🪨","aliases":["rock"]},{"emoji":"🚀","aliases":["rocket"]},{"emoji":"🤣","aliases":["rofl"]},{"emoji":"🙄","aliases":["roll_eyes"]},{"emoji":"🧻","aliases":["roll_of_paper"]},{"emoji":"🎢","aliases":["roller_coaster"]},{"emoji":"🛼","aliases":["roller_skate"]},{"emoji":"🇷🇴","aliases":["romania"]},{"emoji":"🐓","aliases":["rooster"]},{"emoji":"🌹","aliases":["rose"]},{"emoji":"🏵️","aliases":["rosette"]},{"emoji":"🚨","aliases":["rotating_light"]},{"emoji":"📍","aliases":["round_pushpin"]},{"emoji":"🚣","aliases":["rowboat"]},{"emoji":"🚣♂️","aliases":["rowing_man"]},{"emoji":"🚣♀️","aliases":["rowing_woman"]},{"emoji":"🇷🇺","aliases":["ru"]},{"emoji":"🏉","aliases":["rugby_football"]},{"emoji":"🏃","aliases":["runner","running"]},{"emoji":"🏃♂️","aliases":["running_man"]},{"emoji":"🎽","aliases":["running_shirt_with_sash"]},{"emoji":"🏃♀️","aliases":["running_woman"]},{"emoji":"🇷🇼","aliases":["rwanda"]},{"emoji":"🈂️","aliases":["sa"]},{"emoji":"🧷","aliases":["safety_pin"]},{"emoji":"🦺","aliases":["safety_vest"]},{"emoji":"♐","aliases":["sagittarius"]},{"emoji":"🍶","aliases":["sake"]},{"emoji":"🧂","aliases":["salt"]},{"emoji":"🫡","aliases":["saluting_face"]},{"emoji":"🇼🇸","aliases":["samoa"]},{"emoji":"🇸🇲","aliases":["san_marino"]},{"emoji":"👡","aliases":["sandal"]},{"emoji":"🥪","aliases":["sandwich"]},{"emoji":"🎅","aliases":["santa"]},{"emoji":"🇸🇹","aliases":["sao_tome_principe"]},{"emoji":"🥻","aliases":["sari"]},{"emoji":"📡","aliases":["satellite"]},{"emoji":"🇸🇦","aliases":["saudi_arabia"]},{"emoji":"🧖♂️","aliases":["sauna_man"]},{"emoji":"🧖","aliases":["sauna_person"]},{"emoji":"🧖♀️","aliases":["sauna_woman"]},{"emoji":"🦕","aliases":["sauropod"]},{"emoji":"🎷","aliases":["saxophone"]},{"emoji":"🧣","aliases":["scarf"]},{"emoji":"🏫","aliases":["school"]},{"emoji":"🎒","aliases":["school_satchel"]},{"emoji":"🧑🔬","aliases":["scientist"]},{"emoji":"✂️","aliases":["scissors"]},{"emoji":"🦂","aliases":["scorpion"]},{"emoji":"♏","aliases":["scorpius"]},{"emoji":"🏴","aliases":["scotland"]},{"emoji":"😱","aliases":["scream"]},{"emoji":"🙀","aliases":["scream_cat"]},{"emoji":"🪛","aliases":["screwdriver"]},{"emoji":"📜","aliases":["scroll"]},{"emoji":"🦭","aliases":["seal"]},{"emoji":"💺","aliases":["seat"]},{"emoji":"㊙️","aliases":["secret"]},{"emoji":"🙈","aliases":["see_no_evil"]},{"emoji":"🌱","aliases":["seedling"]},{"emoji":"🤳","aliases":["selfie"]},{"emoji":"🇸🇳","aliases":["senegal"]},{"emoji":"🇷🇸","aliases":["serbia"]},{"emoji":"🐕🦺","aliases":["service_dog"]},{"emoji":"7️⃣","aliases":["seven"]},{"emoji":"🪡","aliases":["sewing_needle"]},{"emoji":"🇸🇨","aliases":["seychelles"]},{"emoji":"🫨","aliases":["shaking_face"]},{"emoji":"🥘","aliases":["shallow_pan_of_food"]},{"emoji":"☘️","aliases":["shamrock"]},{"emoji":"🦈","aliases":["shark"]},{"emoji":"🍧","aliases":["shaved_ice"]},{"emoji":"🐑","aliases":["sheep"]},{"emoji":"🐚","aliases":["shell"]},{"emoji":"🛡️","aliases":["shield"]},{"emoji":"⛩️","aliases":["shinto_shrine"]},{"emoji":"🚢","aliases":["ship"]},{"emoji":"👕","aliases":["shirt","tshirt"]},{"emoji":"🛍️","aliases":["shopping"]},{"emoji":"🛒","aliases":["shopping_cart"]},{"emoji":"🩳","aliases":["shorts"]},{"emoji":"🚿","aliases":["shower"]},{"emoji":"🦐","aliases":["shrimp"]},{"emoji":"🤷","aliases":["shrug"]},{"emoji":"🤫","aliases":["shushing_face"]},{"emoji":"🇸🇱","aliases":["sierra_leone"]},{"emoji":"📶","aliases":["signal_strength"]},{"emoji":"🇸🇬","aliases":["singapore"]},{"emoji":"🧑🎤","aliases":["singer"]},{"emoji":"🇸🇽","aliases":["sint_maarten"]},{"emoji":"6️⃣","aliases":["six"]},{"emoji":"🔯","aliases":["six_pointed_star"]},{"emoji":"🛹","aliases":["skateboard"]},{"emoji":"🎿","aliases":["ski"]},{"emoji":"⛷️","aliases":["skier"]},{"emoji":"💀","aliases":["skull"]},{"emoji":"☠️","aliases":["skull_and_crossbones"]},{"emoji":"🦨","aliases":["skunk"]},{"emoji":"🛷","aliases":["sled"]},{"emoji":"😴","aliases":["sleeping"]},{"emoji":"🛌","aliases":["sleeping_bed"]},{"emoji":"😪","aliases":["sleepy"]},{"emoji":"🙁","aliases":["slightly_frowning_face"]},{"emoji":"🙂","aliases":["slightly_smiling_face"]},{"emoji":"🎰","aliases":["slot_machine"]},{"emoji":"🦥","aliases":["sloth"]},{"emoji":"🇸🇰","aliases":["slovakia"]},{"emoji":"🇸🇮","aliases":["slovenia"]},{"emoji":"🛩️","aliases":["small_airplane"]},{"emoji":"🔹","aliases":["small_blue_diamond"]},{"emoji":"🔸","aliases":["small_orange_diamond"]},{"emoji":"🔺","aliases":["small_red_triangle"]},{"emoji":"🔻","aliases":["small_red_triangle_down"]},{"emoji":"😄","aliases":["smile"]},{"emoji":"😸","aliases":["smile_cat"]},{"emoji":"😃","aliases":["smiley"]},{"emoji":"😺","aliases":["smiley_cat"]},{"emoji":"🥲","aliases":["smiling_face_with_tear"]},{"emoji":"🥰","aliases":["smiling_face_with_three_hearts"]},{"emoji":"😈","aliases":["smiling_imp"]},{"emoji":"😏","aliases":["smirk"]},{"emoji":"😼","aliases":["smirk_cat"]},{"emoji":"🚬","aliases":["smoking"]},{"emoji":"🐌","aliases":["snail"]},{"emoji":"🐍","aliases":["snake"]},{"emoji":"🤧","aliases":["sneezing_face"]},{"emoji":"🏂","aliases":["snowboarder"]},{"emoji":"❄️","aliases":["snowflake"]},{"emoji":"⛄","aliases":["snowman"]},{"emoji":"☃️","aliases":["snowman_with_snow"]},{"emoji":"🧼","aliases":["soap"]},{"emoji":"😭","aliases":["sob"]},{"emoji":"⚽","aliases":["soccer"]},{"emoji":"🧦","aliases":["socks"]},{"emoji":"🥎","aliases":["softball"]},{"emoji":"🇸🇧","aliases":["solomon_islands"]},{"emoji":"🇸🇴","aliases":["somalia"]},{"emoji":"🔜","aliases":["soon"]},{"emoji":"🆘","aliases":["sos"]},{"emoji":"🔉","aliases":["sound"]},{"emoji":"🇿🇦","aliases":["south_africa"]},{"emoji":"🇬🇸","aliases":["south_georgia_south_sandwich_islands"]},{"emoji":"🇸🇸","aliases":["south_sudan"]},{"emoji":"👾","aliases":["space_invader"]},{"emoji":"♠️","aliases":["spades"]},{"emoji":"🍝","aliases":["spaghetti"]},{"emoji":"❇️","aliases":["sparkle"]},{"emoji":"🎇","aliases":["sparkler"]},{"emoji":"✨","aliases":["sparkles"]},{"emoji":"💖","aliases":["sparkling_heart"]},{"emoji":"🙊","aliases":["speak_no_evil"]},{"emoji":"🔈","aliases":["speaker"]},{"emoji":"🗣️","aliases":["speaking_head"]},{"emoji":"💬","aliases":["speech_balloon"]},{"emoji":"🚤","aliases":["speedboat"]},{"emoji":"🕷️","aliases":["spider"]},{"emoji":"🕸️","aliases":["spider_web"]},{"emoji":"🗓️","aliases":["spiral_calendar"]},{"emoji":"🗒️","aliases":["spiral_notepad"]},{"emoji":"🧽","aliases":["sponge"]},{"emoji":"🥄","aliases":["spoon"]},{"emoji":"🦑","aliases":["squid"]},{"emoji":"🇱🇰","aliases":["sri_lanka"]},{"emoji":"🇧🇱","aliases":["st_barthelemy"]},{"emoji":"🇸🇭","aliases":["st_helena"]},{"emoji":"🇰🇳","aliases":["st_kitts_nevis"]},{"emoji":"🇱🇨","aliases":["st_lucia"]},{"emoji":"🇲🇫","aliases":["st_martin"]},{"emoji":"🇵🇲","aliases":["st_pierre_miquelon"]},{"emoji":"🇻🇨","aliases":["st_vincent_grenadines"]},{"emoji":"🏟️","aliases":["stadium"]},{"emoji":"🧍♂️","aliases":["standing_man"]},{"emoji":"🧍","aliases":["standing_person"]},{"emoji":"🧍♀️","aliases":["standing_woman"]},{"emoji":"⭐","aliases":["star"]},{"emoji":"🌟","aliases":["star2"]},{"emoji":"☪️","aliases":["star_and_crescent"]},{"emoji":"✡️","aliases":["star_of_david"]},{"emoji":"🤩","aliases":["star_struck"]},{"emoji":"🌠","aliases":["stars"]},{"emoji":"🚉","aliases":["station"]},{"emoji":"🗽","aliases":["statue_of_liberty"]},{"emoji":"🚂","aliases":["steam_locomotive"]},{"emoji":"🩺","aliases":["stethoscope"]},{"emoji":"🍲","aliases":["stew"]},{"emoji":"⏹️","aliases":["stop_button"]},{"emoji":"🛑","aliases":["stop_sign"]},{"emoji":"⏱️","aliases":["stopwatch"]},{"emoji":"📏","aliases":["straight_ruler"]},{"emoji":"🍓","aliases":["strawberry"]},{"emoji":"😛","aliases":["stuck_out_tongue"]},{"emoji":"😝","aliases":["stuck_out_tongue_closed_eyes"]},{"emoji":"😜","aliases":["stuck_out_tongue_winking_eye"]},{"emoji":"🧑🎓","aliases":["student"]},{"emoji":"🎙️","aliases":["studio_microphone"]},{"emoji":"🥙","aliases":["stuffed_flatbread"]},{"emoji":"🇸🇩","aliases":["sudan"]},{"emoji":"🌥️","aliases":["sun_behind_large_cloud"]},{"emoji":"🌦️","aliases":["sun_behind_rain_cloud"]},{"emoji":"🌤️","aliases":["sun_behind_small_cloud"]},{"emoji":"🌞","aliases":["sun_with_face"]},{"emoji":"🌻","aliases":["sunflower"]},{"emoji":"😎","aliases":["sunglasses"]},{"emoji":"☀️","aliases":["sunny"]},{"emoji":"🌅","aliases":["sunrise"]},{"emoji":"🌄","aliases":["sunrise_over_mountains"]},{"emoji":"🦸","aliases":["superhero"]},{"emoji":"🦸♂️","aliases":["superhero_man"]},{"emoji":"🦸♀️","aliases":["superhero_woman"]},{"emoji":"🦹","aliases":["supervillain"]},{"emoji":"🦹♂️","aliases":["supervillain_man"]},{"emoji":"🦹♀️","aliases":["supervillain_woman"]},{"emoji":"🏄","aliases":["surfer"]},{"emoji":"🏄♂️","aliases":["surfing_man"]},{"emoji":"🏄♀️","aliases":["surfing_woman"]},{"emoji":"🇸🇷","aliases":["suriname"]},{"emoji":"🍣","aliases":["sushi"]},{"emoji":"🚟","aliases":["suspension_railway"]},{"emoji":"🇸🇯","aliases":["svalbard_jan_mayen"]},{"emoji":"🦢","aliases":["swan"]},{"emoji":"🇸🇿","aliases":["swaziland"]},{"emoji":"😓","aliases":["sweat"]},{"emoji":"💦","aliases":["sweat_drops"]},{"emoji":"😅","aliases":["sweat_smile"]},{"emoji":"🇸🇪","aliases":["sweden"]},{"emoji":"🍠","aliases":["sweet_potato"]},{"emoji":"🩲","aliases":["swim_brief"]},{"emoji":"🏊","aliases":["swimmer"]},{"emoji":"🏊♂️","aliases":["swimming_man"]},{"emoji":"🏊♀️","aliases":["swimming_woman"]},{"emoji":"🇨🇭","aliases":["switzerland"]},{"emoji":"🔣","aliases":["symbols"]},{"emoji":"🕍","aliases":["synagogue"]},{"emoji":"🇸🇾","aliases":["syria"]},{"emoji":"💉","aliases":["syringe"]},{"emoji":"🦖","aliases":["t-rex"]},{"emoji":"🌮","aliases":["taco"]},{"emoji":"🎉","aliases":["tada","hooray"]},{"emoji":"🇹🇼","aliases":["taiwan"]},{"emoji":"🇹🇯","aliases":["tajikistan"]},{"emoji":"🥡","aliases":["takeout_box"]},{"emoji":"🫔","aliases":["tamale"]},{"emoji":"🎋","aliases":["tanabata_tree"]},{"emoji":"🍊","aliases":["tangerine","orange","mandarin"]},{"emoji":"🇹🇿","aliases":["tanzania"]},{"emoji":"♉","aliases":["taurus"]},{"emoji":"🚕","aliases":["taxi"]},{"emoji":"🍵","aliases":["tea"]},{"emoji":"🧑🏫","aliases":["teacher"]},{"emoji":"🫖","aliases":["teapot"]},{"emoji":"🧑💻","aliases":["technologist"]},{"emoji":"🧸","aliases":["teddy_bear"]},{"emoji":"📞","aliases":["telephone_receiver"]},{"emoji":"🔭","aliases":["telescope"]},{"emoji":"🎾","aliases":["tennis"]},{"emoji":"⛺","aliases":["tent"]},{"emoji":"🧪","aliases":["test_tube"]},{"emoji":"🇹🇭","aliases":["thailand"]},{"emoji":"🌡️","aliases":["thermometer"]},{"emoji":"🤔","aliases":["thinking"]},{"emoji":"🩴","aliases":["thong_sandal"]},{"emoji":"💭","aliases":["thought_balloon"]},{"emoji":"🧵","aliases":["thread"]},{"emoji":"3️⃣","aliases":["three"]},{"emoji":"🎫","aliases":["ticket"]},{"emoji":"🎟️","aliases":["tickets"]},{"emoji":"🐯","aliases":["tiger"]},{"emoji":"🐅","aliases":["tiger2"]},{"emoji":"⏲️","aliases":["timer_clock"]},{"emoji":"🇹🇱","aliases":["timor_leste"]},{"emoji":"💁♂️","aliases":["tipping_hand_man","sassy_man"]},{"emoji":"💁","aliases":["tipping_hand_person","information_desk_person"]},{"emoji":"💁♀️","aliases":["tipping_hand_woman","sassy_woman"]},{"emoji":"😫","aliases":["tired_face"]},{"emoji":"™️","aliases":["tm"]},{"emoji":"🇹🇬","aliases":["togo"]},{"emoji":"🚽","aliases":["toilet"]},{"emoji":"🇹🇰","aliases":["tokelau"]},{"emoji":"🗼","aliases":["tokyo_tower"]},{"emoji":"🍅","aliases":["tomato"]},{"emoji":"🇹🇴","aliases":["tonga"]},{"emoji":"👅","aliases":["tongue"]},{"emoji":"🧰","aliases":["toolbox"]},{"emoji":"🦷","aliases":["tooth"]},{"emoji":"🪥","aliases":["toothbrush"]},{"emoji":"🔝","aliases":["top"]},{"emoji":"🎩","aliases":["tophat"]},{"emoji":"🌪️","aliases":["tornado"]},{"emoji":"🇹🇷","aliases":["tr"]},{"emoji":"🖲️","aliases":["trackball"]},{"emoji":"🚜","aliases":["tractor"]},{"emoji":"🚥","aliases":["traffic_light"]},{"emoji":"🚋","aliases":["train"]},{"emoji":"🚆","aliases":["train2"]},{"emoji":"🚊","aliases":["tram"]},{"emoji":"🏳️⚧️","aliases":["transgender_flag"]},{"emoji":"⚧️","aliases":["transgender_symbol"]},{"emoji":"🚩","aliases":["triangular_flag_on_post"]},{"emoji":"📐","aliases":["triangular_ruler"]},{"emoji":"🔱","aliases":["trident"]},{"emoji":"🇹🇹","aliases":["trinidad_tobago"]},{"emoji":"🇹🇦","aliases":["tristan_da_cunha"]},{"emoji":"😤","aliases":["triumph"]},{"emoji":"🧌","aliases":["troll"]},{"emoji":"🚎","aliases":["trolleybus"]},{"emoji":"🏆","aliases":["trophy"]},{"emoji":"🍹","aliases":["tropical_drink"]},{"emoji":"🐠","aliases":["tropical_fish"]},{"emoji":"🚚","aliases":["truck"]},{"emoji":"🎺","aliases":["trumpet"]},{"emoji":"🌷","aliases":["tulip"]},{"emoji":"🥃","aliases":["tumbler_glass"]},{"emoji":"🇹🇳","aliases":["tunisia"]},{"emoji":"🦃","aliases":["turkey"]},{"emoji":"🇹🇲","aliases":["turkmenistan"]},{"emoji":"🇹🇨","aliases":["turks_caicos_islands"]},{"emoji":"🐢","aliases":["turtle"]},{"emoji":"🇹🇻","aliases":["tuvalu"]},{"emoji":"📺","aliases":["tv"]},{"emoji":"🔀","aliases":["twisted_rightwards_arrows"]},{"emoji":"2️⃣","aliases":["two"]},{"emoji":"💕","aliases":["two_hearts"]},{"emoji":"👬","aliases":["two_men_holding_hands"]},{"emoji":"👭","aliases":["two_women_holding_hands"]},{"emoji":"🈹","aliases":["u5272"]},{"emoji":"🈴","aliases":["u5408"]},{"emoji":"🈺","aliases":["u55b6"]},{"emoji":"🈯","aliases":["u6307"]},{"emoji":"🈷️","aliases":["u6708"]},{"emoji":"🈶","aliases":["u6709"]},{"emoji":"🈵","aliases":["u6e80"]},{"emoji":"🈚","aliases":["u7121"]},{"emoji":"🈸","aliases":["u7533"]},{"emoji":"🈲","aliases":["u7981"]},{"emoji":"🈳","aliases":["u7a7a"]},{"emoji":"🇺🇬","aliases":["uganda"]},{"emoji":"🇺🇦","aliases":["ukraine"]},{"emoji":"☔","aliases":["umbrella"]},{"emoji":"😒","aliases":["unamused"]},{"emoji":"🔞","aliases":["underage"]},{"emoji":"🦄","aliases":["unicorn"]},{"emoji":"🇦🇪","aliases":["united_arab_emirates"]},{"emoji":"🇺🇳","aliases":["united_nations"]},{"emoji":"🔓","aliases":["unlock"]},{"emoji":"🆙","aliases":["up"]},{"emoji":"🙃","aliases":["upside_down_face"]},{"emoji":"🇺🇾","aliases":["uruguay"]},{"emoji":"🇺🇸","aliases":["us"]},{"emoji":"🇺🇲","aliases":["us_outlying_islands"]},{"emoji":"🇻🇮","aliases":["us_virgin_islands"]},{"emoji":"🇺🇿","aliases":["uzbekistan"]},{"emoji":"✌️","aliases":["v"]},{"emoji":"🧛","aliases":["vampire"]},{"emoji":"🧛♂️","aliases":["vampire_man"]},{"emoji":"🧛♀️","aliases":["vampire_woman"]},{"emoji":"🇻🇺","aliases":["vanuatu"]},{"emoji":"🇻🇦","aliases":["vatican_city"]},{"emoji":"🇻🇪","aliases":["venezuela"]},{"emoji":"🚦","aliases":["vertical_traffic_light"]},{"emoji":"📼","aliases":["vhs"]},{"emoji":"📳","aliases":["vibration_mode"]},{"emoji":"📹","aliases":["video_camera"]},{"emoji":"🎮","aliases":["video_game"]},{"emoji":"🇻🇳","aliases":["vietnam"]},{"emoji":"🎻","aliases":["violin"]},{"emoji":"♍","aliases":["virgo"]},{"emoji":"🌋","aliases":["volcano"]},{"emoji":"🏐","aliases":["volleyball"]},{"emoji":"🤮","aliases":["vomiting_face"]},{"emoji":"🆚","aliases":["vs"]},{"emoji":"🖖","aliases":["vulcan_salute"]},{"emoji":"🧇","aliases":["waffle"]},{"emoji":"🏴","aliases":["wales"]},{"emoji":"🚶","aliases":["walking"]},{"emoji":"🚶♂️","aliases":["walking_man"]},{"emoji":"🚶♀️","aliases":["walking_woman"]},{"emoji":"🇼🇫","aliases":["wallis_futuna"]},{"emoji":"🌘","aliases":["waning_crescent_moon"]},{"emoji":"🌖","aliases":["waning_gibbous_moon"]},{"emoji":"⚠️","aliases":["warning"]},{"emoji":"🗑️","aliases":["wastebasket"]},{"emoji":"⌚","aliases":["watch"]},{"emoji":"🐃","aliases":["water_buffalo"]},{"emoji":"🤽","aliases":["water_polo"]},{"emoji":"🍉","aliases":["watermelon"]},{"emoji":"👋","aliases":["wave"]},{"emoji":"〰️","aliases":["wavy_dash"]},{"emoji":"🌒","aliases":["waxing_crescent_moon"]},{"emoji":"🚾","aliases":["wc"]},{"emoji":"😩","aliases":["weary"]},{"emoji":"💒","aliases":["wedding"]},{"emoji":"🏋️","aliases":["weight_lifting"]},{"emoji":"🏋️♂️","aliases":["weight_lifting_man"]},{"emoji":"🏋️♀️","aliases":["weight_lifting_woman"]},{"emoji":"🇪🇭","aliases":["western_sahara"]},{"emoji":"🐳","aliases":["whale"]},{"emoji":"🐋","aliases":["whale2"]},{"emoji":"🛞","aliases":["wheel"]},{"emoji":"☸️","aliases":["wheel_of_dharma"]},{"emoji":"♿","aliases":["wheelchair"]},{"emoji":"✅","aliases":["white_check_mark"]},{"emoji":"⚪","aliases":["white_circle"]},{"emoji":"🏳️","aliases":["white_flag"]},{"emoji":"💮","aliases":["white_flower"]},{"emoji":"👨🦳","aliases":["white_haired_man"]},{"emoji":"👩🦳","aliases":["white_haired_woman"]},{"emoji":"🤍","aliases":["white_heart"]},{"emoji":"⬜","aliases":["white_large_square"]},{"emoji":"◽","aliases":["white_medium_small_square"]},{"emoji":"◻️","aliases":["white_medium_square"]},{"emoji":"▫️","aliases":["white_small_square"]},{"emoji":"🔳","aliases":["white_square_button"]},{"emoji":"🥀","aliases":["wilted_flower"]},{"emoji":"🎐","aliases":["wind_chime"]},{"emoji":"🌬️","aliases":["wind_face"]},{"emoji":"🪟","aliases":["window"]},{"emoji":"🍷","aliases":["wine_glass"]},{"emoji":"🪽","aliases":["wing"]},{"emoji":"😉","aliases":["wink"]},{"emoji":"🛜","aliases":["wireless"]},{"emoji":"🐺","aliases":["wolf"]},{"emoji":"👩","aliases":["woman"]},{"emoji":"👩🎨","aliases":["woman_artist"]},{"emoji":"👩🚀","aliases":["woman_astronaut"]},{"emoji":"🧔♀️","aliases":["woman_beard"]},{"emoji":"🤸♀️","aliases":["woman_cartwheeling"]},{"emoji":"👩🍳","aliases":["woman_cook"]},{"emoji":"💃","aliases":["woman_dancing","dancer"]},{"emoji":"🤦♀️","aliases":["woman_facepalming"]},{"emoji":"👩🏭","aliases":["woman_factory_worker"]},{"emoji":"👩🌾","aliases":["woman_farmer"]},{"emoji":"👩🍼","aliases":["woman_feeding_baby"]},{"emoji":"👩🚒","aliases":["woman_firefighter"]},{"emoji":"👩⚕️","aliases":["woman_health_worker"]},{"emoji":"👩🦽","aliases":["woman_in_manual_wheelchair"]},{"emoji":"👩🦼","aliases":["woman_in_motorized_wheelchair"]},{"emoji":"🤵♀️","aliases":["woman_in_tuxedo"]},{"emoji":"👩⚖️","aliases":["woman_judge"]},{"emoji":"🤹♀️","aliases":["woman_juggling"]},{"emoji":"👩🔧","aliases":["woman_mechanic"]},{"emoji":"👩💼","aliases":["woman_office_worker"]},{"emoji":"👩✈️","aliases":["woman_pilot"]},{"emoji":"🤾♀️","aliases":["woman_playing_handball"]},{"emoji":"🤽♀️","aliases":["woman_playing_water_polo"]},{"emoji":"👩🔬","aliases":["woman_scientist"]},{"emoji":"🤷♀️","aliases":["woman_shrugging"]},{"emoji":"👩🎤","aliases":["woman_singer"]},{"emoji":"👩🎓","aliases":["woman_student"]},{"emoji":"👩🏫","aliases":["woman_teacher"]},{"emoji":"👩💻","aliases":["woman_technologist"]},{"emoji":"🧕","aliases":["woman_with_headscarf"]},{"emoji":"👩🦯","aliases":["woman_with_probing_cane"]},{"emoji":"👳♀️","aliases":["woman_with_turban"]},{"emoji":"👰♀️","aliases":["woman_with_veil","bride_with_veil"]},{"emoji":"👚","aliases":["womans_clothes"]},{"emoji":"👒","aliases":["womans_hat"]},{"emoji":"🤼♀️","aliases":["women_wrestling"]},{"emoji":"🚺","aliases":["womens"]},{"emoji":"🪵","aliases":["wood"]},{"emoji":"🥴","aliases":["woozy_face"]},{"emoji":"🗺️","aliases":["world_map"]},{"emoji":"🪱","aliases":["worm"]},{"emoji":"😟","aliases":["worried"]},{"emoji":"🔧","aliases":["wrench"]},{"emoji":"🤼","aliases":["wrestling"]},{"emoji":"✍️","aliases":["writing_hand"]},{"emoji":"❌","aliases":["x"]},{"emoji":"🩻","aliases":["x_ray"]},{"emoji":"🧶","aliases":["yarn"]},{"emoji":"🥱","aliases":["yawning_face"]},{"emoji":"🟡","aliases":["yellow_circle"]},{"emoji":"💛","aliases":["yellow_heart"]},{"emoji":"🟨","aliases":["yellow_square"]},{"emoji":"🇾🇪","aliases":["yemen"]},{"emoji":"💴","aliases":["yen"]},{"emoji":"☯️","aliases":["yin_yang"]},{"emoji":"🪀","aliases":["yo_yo"]},{"emoji":"😋","aliases":["yum"]},{"emoji":"🇿🇲","aliases":["zambia"]},{"emoji":"🤪","aliases":["zany_face"]},{"emoji":"⚡","aliases":["zap"]},{"emoji":"🦓","aliases":["zebra"]},{"emoji":"0️⃣","aliases":["zero"]},{"emoji":"🇿🇼","aliases":["zimbabwe"]},{"emoji":"🤐","aliases":["zipper_mouth_face"]},{"emoji":"🧟","aliases":["zombie"]},{"emoji":"🧟♂️","aliases":["zombie_man"]},{"emoji":"🧟♀️","aliases":["zombie_woman"]},{"emoji":"💤","aliases":["zzz"]}]
\ No newline at end of file
+[
+ {
+ "emoji": "👍",
+ "aliases": [
+ "+1",
+ "thumbsup"
+ ]
+ },
+ {
+ "emoji": "👎",
+ "aliases": [
+ "-1",
+ "thumbsdown"
+ ]
+ },
+ {
+ "emoji": "💯",
+ "aliases": [
+ "100"
+ ]
+ },
+ {
+ "emoji": "🔢",
+ "aliases": [
+ "1234"
+ ]
+ },
+ {
+ "emoji": "🥇",
+ "aliases": [
+ "1st_place_medal"
+ ]
+ },
+ {
+ "emoji": "🥈",
+ "aliases": [
+ "2nd_place_medal"
+ ]
+ },
+ {
+ "emoji": "🥉",
+ "aliases": [
+ "3rd_place_medal"
+ ]
+ },
+ {
+ "emoji": "🎱",
+ "aliases": [
+ "8ball"
+ ]
+ },
+ {
+ "emoji": "🅰️",
+ "aliases": [
+ "a"
+ ]
+ },
+ {
+ "emoji": "🆎",
+ "aliases": [
+ "ab"
+ ]
+ },
+ {
+ "emoji": "🧮",
+ "aliases": [
+ "abacus"
+ ]
+ },
+ {
+ "emoji": "🔤",
+ "aliases": [
+ "abc"
+ ]
+ },
+ {
+ "emoji": "🔡",
+ "aliases": [
+ "abcd"
+ ]
+ },
+ {
+ "emoji": "🉑",
+ "aliases": [
+ "accept"
+ ]
+ },
+ {
+ "emoji": "🪗",
+ "aliases": [
+ "accordion"
+ ]
+ },
+ {
+ "emoji": "🩹",
+ "aliases": [
+ "adhesive_bandage"
+ ]
+ },
+ {
+ "emoji": "🧑",
+ "aliases": [
+ "adult"
+ ]
+ },
+ {
+ "emoji": "🚡",
+ "aliases": [
+ "aerial_tramway"
+ ]
+ },
+ {
+ "emoji": "🇦🇫",
+ "aliases": [
+ "afghanistan"
+ ]
+ },
+ {
+ "emoji": "✈️",
+ "aliases": [
+ "airplane"
+ ]
+ },
+ {
+ "emoji": "🇦🇽",
+ "aliases": [
+ "aland_islands"
+ ]
+ },
+ {
+ "emoji": "⏰",
+ "aliases": [
+ "alarm_clock"
+ ]
+ },
+ {
+ "emoji": "🇦🇱",
+ "aliases": [
+ "albania"
+ ]
+ },
+ {
+ "emoji": "⚗️",
+ "aliases": [
+ "alembic"
+ ]
+ },
+ {
+ "emoji": "🇩🇿",
+ "aliases": [
+ "algeria"
+ ]
+ },
+ {
+ "emoji": "👽",
+ "aliases": [
+ "alien"
+ ]
+ },
+ {
+ "emoji": "🚑",
+ "aliases": [
+ "ambulance"
+ ]
+ },
+ {
+ "emoji": "🇦🇸",
+ "aliases": [
+ "american_samoa"
+ ]
+ },
+ {
+ "emoji": "🏺",
+ "aliases": [
+ "amphora"
+ ]
+ },
+ {
+ "emoji": "🫀",
+ "aliases": [
+ "anatomical_heart"
+ ]
+ },
+ {
+ "emoji": "⚓",
+ "aliases": [
+ "anchor"
+ ]
+ },
+ {
+ "emoji": "🇦🇩",
+ "aliases": [
+ "andorra"
+ ]
+ },
+ {
+ "emoji": "👼",
+ "aliases": [
+ "angel"
+ ]
+ },
+ {
+ "emoji": "💢",
+ "aliases": [
+ "anger"
+ ]
+ },
+ {
+ "emoji": "🇦🇴",
+ "aliases": [
+ "angola"
+ ]
+ },
+ {
+ "emoji": "😠",
+ "aliases": [
+ "angry"
+ ]
+ },
+ {
+ "emoji": "🇦🇮",
+ "aliases": [
+ "anguilla"
+ ]
+ },
+ {
+ "emoji": "😧",
+ "aliases": [
+ "anguished"
+ ]
+ },
+ {
+ "emoji": "🐜",
+ "aliases": [
+ "ant"
+ ]
+ },
+ {
+ "emoji": "🇦🇶",
+ "aliases": [
+ "antarctica"
+ ]
+ },
+ {
+ "emoji": "🇦🇬",
+ "aliases": [
+ "antigua_barbuda"
+ ]
+ },
+ {
+ "emoji": "🍎",
+ "aliases": [
+ "apple"
+ ]
+ },
+ {
+ "emoji": "♒",
+ "aliases": [
+ "aquarius"
+ ]
+ },
+ {
+ "emoji": "🇦🇷",
+ "aliases": [
+ "argentina"
+ ]
+ },
+ {
+ "emoji": "♈",
+ "aliases": [
+ "aries"
+ ]
+ },
+ {
+ "emoji": "🇦🇲",
+ "aliases": [
+ "armenia"
+ ]
+ },
+ {
+ "emoji": "◀️",
+ "aliases": [
+ "arrow_backward"
+ ]
+ },
+ {
+ "emoji": "⏬",
+ "aliases": [
+ "arrow_double_down"
+ ]
+ },
+ {
+ "emoji": "⏫",
+ "aliases": [
+ "arrow_double_up"
+ ]
+ },
+ {
+ "emoji": "⬇️",
+ "aliases": [
+ "arrow_down"
+ ]
+ },
+ {
+ "emoji": "🔽",
+ "aliases": [
+ "arrow_down_small"
+ ]
+ },
+ {
+ "emoji": "▶️",
+ "aliases": [
+ "arrow_forward"
+ ]
+ },
+ {
+ "emoji": "⤵️",
+ "aliases": [
+ "arrow_heading_down"
+ ]
+ },
+ {
+ "emoji": "⤴️",
+ "aliases": [
+ "arrow_heading_up"
+ ]
+ },
+ {
+ "emoji": "⬅️",
+ "aliases": [
+ "arrow_left"
+ ]
+ },
+ {
+ "emoji": "↙️",
+ "aliases": [
+ "arrow_lower_left"
+ ]
+ },
+ {
+ "emoji": "↘️",
+ "aliases": [
+ "arrow_lower_right"
+ ]
+ },
+ {
+ "emoji": "➡️",
+ "aliases": [
+ "arrow_right"
+ ]
+ },
+ {
+ "emoji": "↪️",
+ "aliases": [
+ "arrow_right_hook"
+ ]
+ },
+ {
+ "emoji": "⬆️",
+ "aliases": [
+ "arrow_up"
+ ]
+ },
+ {
+ "emoji": "↕️",
+ "aliases": [
+ "arrow_up_down"
+ ]
+ },
+ {
+ "emoji": "🔼",
+ "aliases": [
+ "arrow_up_small"
+ ]
+ },
+ {
+ "emoji": "↖️",
+ "aliases": [
+ "arrow_upper_left"
+ ]
+ },
+ {
+ "emoji": "↗️",
+ "aliases": [
+ "arrow_upper_right"
+ ]
+ },
+ {
+ "emoji": "🔃",
+ "aliases": [
+ "arrows_clockwise"
+ ]
+ },
+ {
+ "emoji": "🔄",
+ "aliases": [
+ "arrows_counterclockwise"
+ ]
+ },
+ {
+ "emoji": "🎨",
+ "aliases": [
+ "art"
+ ]
+ },
+ {
+ "emoji": "🚛",
+ "aliases": [
+ "articulated_lorry"
+ ]
+ },
+ {
+ "emoji": "🛰️",
+ "aliases": [
+ "artificial_satellite"
+ ]
+ },
+ {
+ "emoji": "🧑🎨",
+ "aliases": [
+ "artist"
+ ]
+ },
+ {
+ "emoji": "🇦🇼",
+ "aliases": [
+ "aruba"
+ ]
+ },
+ {
+ "emoji": "🇦🇨",
+ "aliases": [
+ "ascension_island"
+ ]
+ },
+ {
+ "emoji": "*️⃣",
+ "aliases": [
+ "asterisk"
+ ]
+ },
+ {
+ "emoji": "😲",
+ "aliases": [
+ "astonished"
+ ]
+ },
+ {
+ "emoji": "🧑🚀",
+ "aliases": [
+ "astronaut"
+ ]
+ },
+ {
+ "emoji": "👟",
+ "aliases": [
+ "athletic_shoe"
+ ]
+ },
+ {
+ "emoji": "🏧",
+ "aliases": [
+ "atm"
+ ]
+ },
+ {
+ "emoji": "⚛️",
+ "aliases": [
+ "atom_symbol"
+ ]
+ },
+ {
+ "emoji": "🇦🇺",
+ "aliases": [
+ "australia"
+ ]
+ },
+ {
+ "emoji": "🇦🇹",
+ "aliases": [
+ "austria"
+ ]
+ },
+ {
+ "emoji": "🛺",
+ "aliases": [
+ "auto_rickshaw"
+ ]
+ },
+ {
+ "emoji": "🥑",
+ "aliases": [
+ "avocado"
+ ]
+ },
+ {
+ "emoji": "🪓",
+ "aliases": [
+ "axe"
+ ]
+ },
+ {
+ "emoji": "🇦🇿",
+ "aliases": [
+ "azerbaijan"
+ ]
+ },
+ {
+ "emoji": "🅱️",
+ "aliases": [
+ "b"
+ ]
+ },
+ {
+ "emoji": "👶",
+ "aliases": [
+ "baby"
+ ]
+ },
+ {
+ "emoji": "🍼",
+ "aliases": [
+ "baby_bottle"
+ ]
+ },
+ {
+ "emoji": "🐤",
+ "aliases": [
+ "baby_chick"
+ ]
+ },
+ {
+ "emoji": "🚼",
+ "aliases": [
+ "baby_symbol"
+ ]
+ },
+ {
+ "emoji": "🔙",
+ "aliases": [
+ "back"
+ ]
+ },
+ {
+ "emoji": "🥓",
+ "aliases": [
+ "bacon"
+ ]
+ },
+ {
+ "emoji": "🦡",
+ "aliases": [
+ "badger"
+ ]
+ },
+ {
+ "emoji": "🏸",
+ "aliases": [
+ "badminton"
+ ]
+ },
+ {
+ "emoji": "🥯",
+ "aliases": [
+ "bagel"
+ ]
+ },
+ {
+ "emoji": "🛄",
+ "aliases": [
+ "baggage_claim"
+ ]
+ },
+ {
+ "emoji": "🥖",
+ "aliases": [
+ "baguette_bread"
+ ]
+ },
+ {
+ "emoji": "🇧🇸",
+ "aliases": [
+ "bahamas"
+ ]
+ },
+ {
+ "emoji": "🇧🇭",
+ "aliases": [
+ "bahrain"
+ ]
+ },
+ {
+ "emoji": "⚖️",
+ "aliases": [
+ "balance_scale"
+ ]
+ },
+ {
+ "emoji": "👨🦲",
+ "aliases": [
+ "bald_man"
+ ]
+ },
+ {
+ "emoji": "👩🦲",
+ "aliases": [
+ "bald_woman"
+ ]
+ },
+ {
+ "emoji": "🩰",
+ "aliases": [
+ "ballet_shoes"
+ ]
+ },
+ {
+ "emoji": "🎈",
+ "aliases": [
+ "balloon"
+ ]
+ },
+ {
+ "emoji": "🗳️",
+ "aliases": [
+ "ballot_box"
+ ]
+ },
+ {
+ "emoji": "☑️",
+ "aliases": [
+ "ballot_box_with_check"
+ ]
+ },
+ {
+ "emoji": "🎍",
+ "aliases": [
+ "bamboo"
+ ]
+ },
+ {
+ "emoji": "🍌",
+ "aliases": [
+ "banana"
+ ]
+ },
+ {
+ "emoji": "‼️",
+ "aliases": [
+ "bangbang"
+ ]
+ },
+ {
+ "emoji": "🇧🇩",
+ "aliases": [
+ "bangladesh"
+ ]
+ },
+ {
+ "emoji": "🪕",
+ "aliases": [
+ "banjo"
+ ]
+ },
+ {
+ "emoji": "🏦",
+ "aliases": [
+ "bank"
+ ]
+ },
+ {
+ "emoji": "📊",
+ "aliases": [
+ "bar_chart"
+ ]
+ },
+ {
+ "emoji": "🇧🇧",
+ "aliases": [
+ "barbados"
+ ]
+ },
+ {
+ "emoji": "💈",
+ "aliases": [
+ "barber"
+ ]
+ },
+ {
+ "emoji": "⚾",
+ "aliases": [
+ "baseball"
+ ]
+ },
+ {
+ "emoji": "🧺",
+ "aliases": [
+ "basket"
+ ]
+ },
+ {
+ "emoji": "🏀",
+ "aliases": [
+ "basketball"
+ ]
+ },
+ {
+ "emoji": "🦇",
+ "aliases": [
+ "bat"
+ ]
+ },
+ {
+ "emoji": "🛀",
+ "aliases": [
+ "bath"
+ ]
+ },
+ {
+ "emoji": "🛁",
+ "aliases": [
+ "bathtub"
+ ]
+ },
+ {
+ "emoji": "🔋",
+ "aliases": [
+ "battery"
+ ]
+ },
+ {
+ "emoji": "🏖️",
+ "aliases": [
+ "beach_umbrella"
+ ]
+ },
+ {
+ "emoji": "🫘",
+ "aliases": [
+ "beans"
+ ]
+ },
+ {
+ "emoji": "🐻",
+ "aliases": [
+ "bear"
+ ]
+ },
+ {
+ "emoji": "🧔",
+ "aliases": [
+ "bearded_person"
+ ]
+ },
+ {
+ "emoji": "🦫",
+ "aliases": [
+ "beaver"
+ ]
+ },
+ {
+ "emoji": "🛏️",
+ "aliases": [
+ "bed"
+ ]
+ },
+ {
+ "emoji": "🐝",
+ "aliases": [
+ "bee",
+ "honeybee"
+ ]
+ },
+ {
+ "emoji": "🍺",
+ "aliases": [
+ "beer"
+ ]
+ },
+ {
+ "emoji": "🍻",
+ "aliases": [
+ "beers"
+ ]
+ },
+ {
+ "emoji": "🪲",
+ "aliases": [
+ "beetle"
+ ]
+ },
+ {
+ "emoji": "🔰",
+ "aliases": [
+ "beginner"
+ ]
+ },
+ {
+ "emoji": "🇧🇾",
+ "aliases": [
+ "belarus"
+ ]
+ },
+ {
+ "emoji": "🇧🇪",
+ "aliases": [
+ "belgium"
+ ]
+ },
+ {
+ "emoji": "🇧🇿",
+ "aliases": [
+ "belize"
+ ]
+ },
+ {
+ "emoji": "🔔",
+ "aliases": [
+ "bell"
+ ]
+ },
+ {
+ "emoji": "🫑",
+ "aliases": [
+ "bell_pepper"
+ ]
+ },
+ {
+ "emoji": "🛎️",
+ "aliases": [
+ "bellhop_bell"
+ ]
+ },
+ {
+ "emoji": "🇧🇯",
+ "aliases": [
+ "benin"
+ ]
+ },
+ {
+ "emoji": "🍱",
+ "aliases": [
+ "bento"
+ ]
+ },
+ {
+ "emoji": "🇧🇲",
+ "aliases": [
+ "bermuda"
+ ]
+ },
+ {
+ "emoji": "🧃",
+ "aliases": [
+ "beverage_box"
+ ]
+ },
+ {
+ "emoji": "🇧🇹",
+ "aliases": [
+ "bhutan"
+ ]
+ },
+ {
+ "emoji": "🚴",
+ "aliases": [
+ "bicyclist"
+ ]
+ },
+ {
+ "emoji": "🚲",
+ "aliases": [
+ "bike"
+ ]
+ },
+ {
+ "emoji": "🚴♂️",
+ "aliases": [
+ "biking_man"
+ ]
+ },
+ {
+ "emoji": "🚴♀️",
+ "aliases": [
+ "biking_woman"
+ ]
+ },
+ {
+ "emoji": "👙",
+ "aliases": [
+ "bikini"
+ ]
+ },
+ {
+ "emoji": "🧢",
+ "aliases": [
+ "billed_cap"
+ ]
+ },
+ {
+ "emoji": "☣️",
+ "aliases": [
+ "biohazard"
+ ]
+ },
+ {
+ "emoji": "🐦",
+ "aliases": [
+ "bird"
+ ]
+ },
+ {
+ "emoji": "🎂",
+ "aliases": [
+ "birthday"
+ ]
+ },
+ {
+ "emoji": "🦬",
+ "aliases": [
+ "bison"
+ ]
+ },
+ {
+ "emoji": "🫦",
+ "aliases": [
+ "biting_lip"
+ ]
+ },
+ {
+ "emoji": "🐦⬛",
+ "aliases": [
+ "black_bird"
+ ]
+ },
+ {
+ "emoji": "🐈⬛",
+ "aliases": [
+ "black_cat"
+ ]
+ },
+ {
+ "emoji": "⚫",
+ "aliases": [
+ "black_circle"
+ ]
+ },
+ {
+ "emoji": "🏴",
+ "aliases": [
+ "black_flag"
+ ]
+ },
+ {
+ "emoji": "🖤",
+ "aliases": [
+ "black_heart"
+ ]
+ },
+ {
+ "emoji": "🃏",
+ "aliases": [
+ "black_joker"
+ ]
+ },
+ {
+ "emoji": "⬛",
+ "aliases": [
+ "black_large_square"
+ ]
+ },
+ {
+ "emoji": "◾",
+ "aliases": [
+ "black_medium_small_square"
+ ]
+ },
+ {
+ "emoji": "◼️",
+ "aliases": [
+ "black_medium_square"
+ ]
+ },
+ {
+ "emoji": "✒️",
+ "aliases": [
+ "black_nib"
+ ]
+ },
+ {
+ "emoji": "▪️",
+ "aliases": [
+ "black_small_square"
+ ]
+ },
+ {
+ "emoji": "🔲",
+ "aliases": [
+ "black_square_button"
+ ]
+ },
+ {
+ "emoji": "👱♂️",
+ "aliases": [
+ "blond_haired_man"
+ ]
+ },
+ {
+ "emoji": "👱",
+ "aliases": [
+ "blond_haired_person"
+ ]
+ },
+ {
+ "emoji": "👱♀️",
+ "aliases": [
+ "blond_haired_woman",
+ "blonde_woman"
+ ]
+ },
+ {
+ "emoji": "🌼",
+ "aliases": [
+ "blossom"
+ ]
+ },
+ {
+ "emoji": "🐡",
+ "aliases": [
+ "blowfish"
+ ]
+ },
+ {
+ "emoji": "📘",
+ "aliases": [
+ "blue_book"
+ ]
+ },
+ {
+ "emoji": "🚙",
+ "aliases": [
+ "blue_car"
+ ]
+ },
+ {
+ "emoji": "💙",
+ "aliases": [
+ "blue_heart"
+ ]
+ },
+ {
+ "emoji": "🟦",
+ "aliases": [
+ "blue_square"
+ ]
+ },
+ {
+ "emoji": "🫐",
+ "aliases": [
+ "blueberries"
+ ]
+ },
+ {
+ "emoji": "😊",
+ "aliases": [
+ "blush"
+ ]
+ },
+ {
+ "emoji": "🐗",
+ "aliases": [
+ "boar"
+ ]
+ },
+ {
+ "emoji": "⛵",
+ "aliases": [
+ "boat",
+ "sailboat"
+ ]
+ },
+ {
+ "emoji": "🇧🇴",
+ "aliases": [
+ "bolivia"
+ ]
+ },
+ {
+ "emoji": "💣",
+ "aliases": [
+ "bomb"
+ ]
+ },
+ {
+ "emoji": "🦴",
+ "aliases": [
+ "bone"
+ ]
+ },
+ {
+ "emoji": "📖",
+ "aliases": [
+ "book",
+ "open_book"
+ ]
+ },
+ {
+ "emoji": "🔖",
+ "aliases": [
+ "bookmark"
+ ]
+ },
+ {
+ "emoji": "📑",
+ "aliases": [
+ "bookmark_tabs"
+ ]
+ },
+ {
+ "emoji": "📚",
+ "aliases": [
+ "books"
+ ]
+ },
+ {
+ "emoji": "💥",
+ "aliases": [
+ "boom",
+ "collision"
+ ]
+ },
+ {
+ "emoji": "🪃",
+ "aliases": [
+ "boomerang"
+ ]
+ },
+ {
+ "emoji": "👢",
+ "aliases": [
+ "boot"
+ ]
+ },
+ {
+ "emoji": "🇧🇦",
+ "aliases": [
+ "bosnia_herzegovina"
+ ]
+ },
+ {
+ "emoji": "🇧🇼",
+ "aliases": [
+ "botswana"
+ ]
+ },
+ {
+ "emoji": "⛹️♂️",
+ "aliases": [
+ "bouncing_ball_man",
+ "basketball_man"
+ ]
+ },
+ {
+ "emoji": "⛹️",
+ "aliases": [
+ "bouncing_ball_person"
+ ]
+ },
+ {
+ "emoji": "⛹️♀️",
+ "aliases": [
+ "bouncing_ball_woman",
+ "basketball_woman"
+ ]
+ },
+ {
+ "emoji": "💐",
+ "aliases": [
+ "bouquet"
+ ]
+ },
+ {
+ "emoji": "🇧🇻",
+ "aliases": [
+ "bouvet_island"
+ ]
+ },
+ {
+ "emoji": "🙇",
+ "aliases": [
+ "bow"
+ ]
+ },
+ {
+ "emoji": "🏹",
+ "aliases": [
+ "bow_and_arrow"
+ ]
+ },
+ {
+ "emoji": "🙇♂️",
+ "aliases": [
+ "bowing_man"
+ ]
+ },
+ {
+ "emoji": "🙇♀️",
+ "aliases": [
+ "bowing_woman"
+ ]
+ },
+ {
+ "emoji": "🥣",
+ "aliases": [
+ "bowl_with_spoon"
+ ]
+ },
+ {
+ "emoji": "🎳",
+ "aliases": [
+ "bowling"
+ ]
+ },
+ {
+ "emoji": "🥊",
+ "aliases": [
+ "boxing_glove"
+ ]
+ },
+ {
+ "emoji": "👦",
+ "aliases": [
+ "boy"
+ ]
+ },
+ {
+ "emoji": "🧠",
+ "aliases": [
+ "brain"
+ ]
+ },
+ {
+ "emoji": "🇧🇷",
+ "aliases": [
+ "brazil"
+ ]
+ },
+ {
+ "emoji": "🍞",
+ "aliases": [
+ "bread"
+ ]
+ },
+ {
+ "emoji": "🤱",
+ "aliases": [
+ "breast_feeding"
+ ]
+ },
+ {
+ "emoji": "🧱",
+ "aliases": [
+ "bricks"
+ ]
+ },
+ {
+ "emoji": "🌉",
+ "aliases": [
+ "bridge_at_night"
+ ]
+ },
+ {
+ "emoji": "💼",
+ "aliases": [
+ "briefcase"
+ ]
+ },
+ {
+ "emoji": "🇮🇴",
+ "aliases": [
+ "british_indian_ocean_territory"
+ ]
+ },
+ {
+ "emoji": "🇻🇬",
+ "aliases": [
+ "british_virgin_islands"
+ ]
+ },
+ {
+ "emoji": "🥦",
+ "aliases": [
+ "broccoli"
+ ]
+ },
+ {
+ "emoji": "⛓️💥",
+ "aliases": [
+ "broken_chain"
+ ]
+ },
+ {
+ "emoji": "💔",
+ "aliases": [
+ "broken_heart"
+ ]
+ },
+ {
+ "emoji": "🧹",
+ "aliases": [
+ "broom"
+ ]
+ },
+ {
+ "emoji": "🟤",
+ "aliases": [
+ "brown_circle"
+ ]
+ },
+ {
+ "emoji": "🤎",
+ "aliases": [
+ "brown_heart"
+ ]
+ },
+ {
+ "emoji": "🍄🟫",
+ "aliases": [
+ "brown_mushroom"
+ ]
+ },
+ {
+ "emoji": "🟫",
+ "aliases": [
+ "brown_square"
+ ]
+ },
+ {
+ "emoji": "🇧🇳",
+ "aliases": [
+ "brunei"
+ ]
+ },
+ {
+ "emoji": "🧋",
+ "aliases": [
+ "bubble_tea"
+ ]
+ },
+ {
+ "emoji": "🫧",
+ "aliases": [
+ "bubbles"
+ ]
+ },
+ {
+ "emoji": "🪣",
+ "aliases": [
+ "bucket"
+ ]
+ },
+ {
+ "emoji": "🐛",
+ "aliases": [
+ "bug"
+ ]
+ },
+ {
+ "emoji": "🏗️",
+ "aliases": [
+ "building_construction"
+ ]
+ },
+ {
+ "emoji": "💡",
+ "aliases": [
+ "bulb"
+ ]
+ },
+ {
+ "emoji": "🇧🇬",
+ "aliases": [
+ "bulgaria"
+ ]
+ },
+ {
+ "emoji": "🚅",
+ "aliases": [
+ "bullettrain_front"
+ ]
+ },
+ {
+ "emoji": "🚄",
+ "aliases": [
+ "bullettrain_side"
+ ]
+ },
+ {
+ "emoji": "🇧🇫",
+ "aliases": [
+ "burkina_faso"
+ ]
+ },
+ {
+ "emoji": "🌯",
+ "aliases": [
+ "burrito"
+ ]
+ },
+ {
+ "emoji": "🇧🇮",
+ "aliases": [
+ "burundi"
+ ]
+ },
+ {
+ "emoji": "🚌",
+ "aliases": [
+ "bus"
+ ]
+ },
+ {
+ "emoji": "🕴️",
+ "aliases": [
+ "business_suit_levitating"
+ ]
+ },
+ {
+ "emoji": "🚏",
+ "aliases": [
+ "busstop"
+ ]
+ },
+ {
+ "emoji": "👤",
+ "aliases": [
+ "bust_in_silhouette"
+ ]
+ },
+ {
+ "emoji": "👥",
+ "aliases": [
+ "busts_in_silhouette"
+ ]
+ },
+ {
+ "emoji": "🧈",
+ "aliases": [
+ "butter"
+ ]
+ },
+ {
+ "emoji": "🦋",
+ "aliases": [
+ "butterfly"
+ ]
+ },
+ {
+ "emoji": "🌵",
+ "aliases": [
+ "cactus"
+ ]
+ },
+ {
+ "emoji": "🍰",
+ "aliases": [
+ "cake"
+ ]
+ },
+ {
+ "emoji": "📆",
+ "aliases": [
+ "calendar"
+ ]
+ },
+ {
+ "emoji": "🤙",
+ "aliases": [
+ "call_me_hand"
+ ]
+ },
+ {
+ "emoji": "📲",
+ "aliases": [
+ "calling"
+ ]
+ },
+ {
+ "emoji": "🇰🇭",
+ "aliases": [
+ "cambodia"
+ ]
+ },
+ {
+ "emoji": "🐫",
+ "aliases": [
+ "camel"
+ ]
+ },
+ {
+ "emoji": "📷",
+ "aliases": [
+ "camera"
+ ]
+ },
+ {
+ "emoji": "📸",
+ "aliases": [
+ "camera_flash"
+ ]
+ },
+ {
+ "emoji": "🇨🇲",
+ "aliases": [
+ "cameroon"
+ ]
+ },
+ {
+ "emoji": "🏕️",
+ "aliases": [
+ "camping"
+ ]
+ },
+ {
+ "emoji": "🇨🇦",
+ "aliases": [
+ "canada"
+ ]
+ },
+ {
+ "emoji": "🇮🇨",
+ "aliases": [
+ "canary_islands"
+ ]
+ },
+ {
+ "emoji": "♋",
+ "aliases": [
+ "cancer"
+ ]
+ },
+ {
+ "emoji": "🕯️",
+ "aliases": [
+ "candle"
+ ]
+ },
+ {
+ "emoji": "🍬",
+ "aliases": [
+ "candy"
+ ]
+ },
+ {
+ "emoji": "🥫",
+ "aliases": [
+ "canned_food"
+ ]
+ },
+ {
+ "emoji": "🛶",
+ "aliases": [
+ "canoe"
+ ]
+ },
+ {
+ "emoji": "🇨🇻",
+ "aliases": [
+ "cape_verde"
+ ]
+ },
+ {
+ "emoji": "🔠",
+ "aliases": [
+ "capital_abcd"
+ ]
+ },
+ {
+ "emoji": "♑",
+ "aliases": [
+ "capricorn"
+ ]
+ },
+ {
+ "emoji": "🚗",
+ "aliases": [
+ "car",
+ "red_car"
+ ]
+ },
+ {
+ "emoji": "🗃️",
+ "aliases": [
+ "card_file_box"
+ ]
+ },
+ {
+ "emoji": "📇",
+ "aliases": [
+ "card_index"
+ ]
+ },
+ {
+ "emoji": "🗂️",
+ "aliases": [
+ "card_index_dividers"
+ ]
+ },
+ {
+ "emoji": "🇧🇶",
+ "aliases": [
+ "caribbean_netherlands"
+ ]
+ },
+ {
+ "emoji": "🎠",
+ "aliases": [
+ "carousel_horse"
+ ]
+ },
+ {
+ "emoji": "🪚",
+ "aliases": [
+ "carpentry_saw"
+ ]
+ },
+ {
+ "emoji": "🥕",
+ "aliases": [
+ "carrot"
+ ]
+ },
+ {
+ "emoji": "🤸",
+ "aliases": [
+ "cartwheeling"
+ ]
+ },
+ {
+ "emoji": "🐱",
+ "aliases": [
+ "cat"
+ ]
+ },
+ {
+ "emoji": "🐈",
+ "aliases": [
+ "cat2"
+ ]
+ },
+ {
+ "emoji": "🇰🇾",
+ "aliases": [
+ "cayman_islands"
+ ]
+ },
+ {
+ "emoji": "💿",
+ "aliases": [
+ "cd"
+ ]
+ },
+ {
+ "emoji": "🇨🇫",
+ "aliases": [
+ "central_african_republic"
+ ]
+ },
+ {
+ "emoji": "🇪🇦",
+ "aliases": [
+ "ceuta_melilla"
+ ]
+ },
+ {
+ "emoji": "🇹🇩",
+ "aliases": [
+ "chad"
+ ]
+ },
+ {
+ "emoji": "⛓️",
+ "aliases": [
+ "chains"
+ ]
+ },
+ {
+ "emoji": "🪑",
+ "aliases": [
+ "chair"
+ ]
+ },
+ {
+ "emoji": "🍾",
+ "aliases": [
+ "champagne"
+ ]
+ },
+ {
+ "emoji": "💹",
+ "aliases": [
+ "chart"
+ ]
+ },
+ {
+ "emoji": "📉",
+ "aliases": [
+ "chart_with_downwards_trend"
+ ]
+ },
+ {
+ "emoji": "📈",
+ "aliases": [
+ "chart_with_upwards_trend"
+ ]
+ },
+ {
+ "emoji": "🏁",
+ "aliases": [
+ "checkered_flag"
+ ]
+ },
+ {
+ "emoji": "🧀",
+ "aliases": [
+ "cheese"
+ ]
+ },
+ {
+ "emoji": "🍒",
+ "aliases": [
+ "cherries"
+ ]
+ },
+ {
+ "emoji": "🌸",
+ "aliases": [
+ "cherry_blossom"
+ ]
+ },
+ {
+ "emoji": "♟️",
+ "aliases": [
+ "chess_pawn"
+ ]
+ },
+ {
+ "emoji": "🌰",
+ "aliases": [
+ "chestnut"
+ ]
+ },
+ {
+ "emoji": "🐔",
+ "aliases": [
+ "chicken"
+ ]
+ },
+ {
+ "emoji": "🧒",
+ "aliases": [
+ "child"
+ ]
+ },
+ {
+ "emoji": "🚸",
+ "aliases": [
+ "children_crossing"
+ ]
+ },
+ {
+ "emoji": "🇨🇱",
+ "aliases": [
+ "chile"
+ ]
+ },
+ {
+ "emoji": "🐿️",
+ "aliases": [
+ "chipmunk"
+ ]
+ },
+ {
+ "emoji": "🍫",
+ "aliases": [
+ "chocolate_bar"
+ ]
+ },
+ {
+ "emoji": "🥢",
+ "aliases": [
+ "chopsticks"
+ ]
+ },
+ {
+ "emoji": "🇨🇽",
+ "aliases": [
+ "christmas_island"
+ ]
+ },
+ {
+ "emoji": "🎄",
+ "aliases": [
+ "christmas_tree"
+ ]
+ },
+ {
+ "emoji": "⛪",
+ "aliases": [
+ "church"
+ ]
+ },
+ {
+ "emoji": "🎦",
+ "aliases": [
+ "cinema"
+ ]
+ },
+ {
+ "emoji": "🎪",
+ "aliases": [
+ "circus_tent"
+ ]
+ },
+ {
+ "emoji": "🌇",
+ "aliases": [
+ "city_sunrise"
+ ]
+ },
+ {
+ "emoji": "🌆",
+ "aliases": [
+ "city_sunset"
+ ]
+ },
+ {
+ "emoji": "🏙️",
+ "aliases": [
+ "cityscape"
+ ]
+ },
+ {
+ "emoji": "🆑",
+ "aliases": [
+ "cl"
+ ]
+ },
+ {
+ "emoji": "🗜️",
+ "aliases": [
+ "clamp"
+ ]
+ },
+ {
+ "emoji": "👏",
+ "aliases": [
+ "clap"
+ ]
+ },
+ {
+ "emoji": "🎬",
+ "aliases": [
+ "clapper"
+ ]
+ },
+ {
+ "emoji": "🏛️",
+ "aliases": [
+ "classical_building"
+ ]
+ },
+ {
+ "emoji": "🧗",
+ "aliases": [
+ "climbing"
+ ]
+ },
+ {
+ "emoji": "🧗♂️",
+ "aliases": [
+ "climbing_man"
+ ]
+ },
+ {
+ "emoji": "🧗♀️",
+ "aliases": [
+ "climbing_woman"
+ ]
+ },
+ {
+ "emoji": "🥂",
+ "aliases": [
+ "clinking_glasses"
+ ]
+ },
+ {
+ "emoji": "📋",
+ "aliases": [
+ "clipboard"
+ ]
+ },
+ {
+ "emoji": "🇨🇵",
+ "aliases": [
+ "clipperton_island"
+ ]
+ },
+ {
+ "emoji": "🕐",
+ "aliases": [
+ "clock1"
+ ]
+ },
+ {
+ "emoji": "🕙",
+ "aliases": [
+ "clock10"
+ ]
+ },
+ {
+ "emoji": "🕥",
+ "aliases": [
+ "clock1030"
+ ]
+ },
+ {
+ "emoji": "🕚",
+ "aliases": [
+ "clock11"
+ ]
+ },
+ {
+ "emoji": "🕦",
+ "aliases": [
+ "clock1130"
+ ]
+ },
+ {
+ "emoji": "🕛",
+ "aliases": [
+ "clock12"
+ ]
+ },
+ {
+ "emoji": "🕧",
+ "aliases": [
+ "clock1230"
+ ]
+ },
+ {
+ "emoji": "🕜",
+ "aliases": [
+ "clock130"
+ ]
+ },
+ {
+ "emoji": "🕑",
+ "aliases": [
+ "clock2"
+ ]
+ },
+ {
+ "emoji": "🕝",
+ "aliases": [
+ "clock230"
+ ]
+ },
+ {
+ "emoji": "🕒",
+ "aliases": [
+ "clock3"
+ ]
+ },
+ {
+ "emoji": "🕞",
+ "aliases": [
+ "clock330"
+ ]
+ },
+ {
+ "emoji": "🕓",
+ "aliases": [
+ "clock4"
+ ]
+ },
+ {
+ "emoji": "🕟",
+ "aliases": [
+ "clock430"
+ ]
+ },
+ {
+ "emoji": "🕔",
+ "aliases": [
+ "clock5"
+ ]
+ },
+ {
+ "emoji": "🕠",
+ "aliases": [
+ "clock530"
+ ]
+ },
+ {
+ "emoji": "🕕",
+ "aliases": [
+ "clock6"
+ ]
+ },
+ {
+ "emoji": "🕡",
+ "aliases": [
+ "clock630"
+ ]
+ },
+ {
+ "emoji": "🕖",
+ "aliases": [
+ "clock7"
+ ]
+ },
+ {
+ "emoji": "🕢",
+ "aliases": [
+ "clock730"
+ ]
+ },
+ {
+ "emoji": "🕗",
+ "aliases": [
+ "clock8"
+ ]
+ },
+ {
+ "emoji": "🕣",
+ "aliases": [
+ "clock830"
+ ]
+ },
+ {
+ "emoji": "🕘",
+ "aliases": [
+ "clock9"
+ ]
+ },
+ {
+ "emoji": "🕤",
+ "aliases": [
+ "clock930"
+ ]
+ },
+ {
+ "emoji": "📕",
+ "aliases": [
+ "closed_book"
+ ]
+ },
+ {
+ "emoji": "🔐",
+ "aliases": [
+ "closed_lock_with_key"
+ ]
+ },
+ {
+ "emoji": "🌂",
+ "aliases": [
+ "closed_umbrella"
+ ]
+ },
+ {
+ "emoji": "☁️",
+ "aliases": [
+ "cloud"
+ ]
+ },
+ {
+ "emoji": "🌩️",
+ "aliases": [
+ "cloud_with_lightning"
+ ]
+ },
+ {
+ "emoji": "⛈️",
+ "aliases": [
+ "cloud_with_lightning_and_rain"
+ ]
+ },
+ {
+ "emoji": "🌧️",
+ "aliases": [
+ "cloud_with_rain"
+ ]
+ },
+ {
+ "emoji": "🌨️",
+ "aliases": [
+ "cloud_with_snow"
+ ]
+ },
+ {
+ "emoji": "🤡",
+ "aliases": [
+ "clown_face"
+ ]
+ },
+ {
+ "emoji": "♣️",
+ "aliases": [
+ "clubs"
+ ]
+ },
+ {
+ "emoji": "🇨🇳",
+ "aliases": [
+ "cn"
+ ]
+ },
+ {
+ "emoji": "🧥",
+ "aliases": [
+ "coat"
+ ]
+ },
+ {
+ "emoji": "🪳",
+ "aliases": [
+ "cockroach"
+ ]
+ },
+ {
+ "emoji": "🍸",
+ "aliases": [
+ "cocktail"
+ ]
+ },
+ {
+ "emoji": "🥥",
+ "aliases": [
+ "coconut"
+ ]
+ },
+ {
+ "emoji": "🇨🇨",
+ "aliases": [
+ "cocos_islands"
+ ]
+ },
+ {
+ "emoji": "☕",
+ "aliases": [
+ "coffee"
+ ]
+ },
+ {
+ "emoji": "⚰️",
+ "aliases": [
+ "coffin"
+ ]
+ },
+ {
+ "emoji": "🪙",
+ "aliases": [
+ "coin"
+ ]
+ },
+ {
+ "emoji": "🥶",
+ "aliases": [
+ "cold_face"
+ ]
+ },
+ {
+ "emoji": "😰",
+ "aliases": [
+ "cold_sweat"
+ ]
+ },
+ {
+ "emoji": "🇨🇴",
+ "aliases": [
+ "colombia"
+ ]
+ },
+ {
+ "emoji": "☄️",
+ "aliases": [
+ "comet"
+ ]
+ },
+ {
+ "emoji": "🇰🇲",
+ "aliases": [
+ "comoros"
+ ]
+ },
+ {
+ "emoji": "🧭",
+ "aliases": [
+ "compass"
+ ]
+ },
+ {
+ "emoji": "💻",
+ "aliases": [
+ "computer"
+ ]
+ },
+ {
+ "emoji": "🖱️",
+ "aliases": [
+ "computer_mouse"
+ ]
+ },
+ {
+ "emoji": "🎊",
+ "aliases": [
+ "confetti_ball"
+ ]
+ },
+ {
+ "emoji": "😖",
+ "aliases": [
+ "confounded"
+ ]
+ },
+ {
+ "emoji": "😕",
+ "aliases": [
+ "confused"
+ ]
+ },
+ {
+ "emoji": "🇨🇬",
+ "aliases": [
+ "congo_brazzaville"
+ ]
+ },
+ {
+ "emoji": "🇨🇩",
+ "aliases": [
+ "congo_kinshasa"
+ ]
+ },
+ {
+ "emoji": "㊗️",
+ "aliases": [
+ "congratulations"
+ ]
+ },
+ {
+ "emoji": "🚧",
+ "aliases": [
+ "construction"
+ ]
+ },
+ {
+ "emoji": "👷",
+ "aliases": [
+ "construction_worker"
+ ]
+ },
+ {
+ "emoji": "👷♂️",
+ "aliases": [
+ "construction_worker_man"
+ ]
+ },
+ {
+ "emoji": "👷♀️",
+ "aliases": [
+ "construction_worker_woman"
+ ]
+ },
+ {
+ "emoji": "🎛️",
+ "aliases": [
+ "control_knobs"
+ ]
+ },
+ {
+ "emoji": "🏪",
+ "aliases": [
+ "convenience_store"
+ ]
+ },
+ {
+ "emoji": "🧑🍳",
+ "aliases": [
+ "cook"
+ ]
+ },
+ {
+ "emoji": "🇨🇰",
+ "aliases": [
+ "cook_islands"
+ ]
+ },
+ {
+ "emoji": "🍪",
+ "aliases": [
+ "cookie"
+ ]
+ },
+ {
+ "emoji": "🆒",
+ "aliases": [
+ "cool"
+ ]
+ },
+ {
+ "emoji": "©️",
+ "aliases": [
+ "copyright"
+ ]
+ },
+ {
+ "emoji": "🪸",
+ "aliases": [
+ "coral"
+ ]
+ },
+ {
+ "emoji": "🌽",
+ "aliases": [
+ "corn"
+ ]
+ },
+ {
+ "emoji": "🇨🇷",
+ "aliases": [
+ "costa_rica"
+ ]
+ },
+ {
+ "emoji": "🇨🇮",
+ "aliases": [
+ "cote_divoire"
+ ]
+ },
+ {
+ "emoji": "🛋️",
+ "aliases": [
+ "couch_and_lamp"
+ ]
+ },
+ {
+ "emoji": "👫",
+ "aliases": [
+ "couple"
+ ]
+ },
+ {
+ "emoji": "💑",
+ "aliases": [
+ "couple_with_heart"
+ ]
+ },
+ {
+ "emoji": "👨❤️👨",
+ "aliases": [
+ "couple_with_heart_man_man"
+ ]
+ },
+ {
+ "emoji": "👩❤️👨",
+ "aliases": [
+ "couple_with_heart_woman_man"
+ ]
+ },
+ {
+ "emoji": "👩❤️👩",
+ "aliases": [
+ "couple_with_heart_woman_woman"
+ ]
+ },
+ {
+ "emoji": "💏",
+ "aliases": [
+ "couplekiss"
+ ]
+ },
+ {
+ "emoji": "👨❤️💋👨",
+ "aliases": [
+ "couplekiss_man_man"
+ ]
+ },
+ {
+ "emoji": "👩❤️💋👨",
+ "aliases": [
+ "couplekiss_man_woman"
+ ]
+ },
+ {
+ "emoji": "👩❤️💋👩",
+ "aliases": [
+ "couplekiss_woman_woman"
+ ]
+ },
+ {
+ "emoji": "🐮",
+ "aliases": [
+ "cow"
+ ]
+ },
+ {
+ "emoji": "🐄",
+ "aliases": [
+ "cow2"
+ ]
+ },
+ {
+ "emoji": "🤠",
+ "aliases": [
+ "cowboy_hat_face"
+ ]
+ },
+ {
+ "emoji": "🦀",
+ "aliases": [
+ "crab"
+ ]
+ },
+ {
+ "emoji": "🖍️",
+ "aliases": [
+ "crayon"
+ ]
+ },
+ {
+ "emoji": "💳",
+ "aliases": [
+ "credit_card"
+ ]
+ },
+ {
+ "emoji": "🌙",
+ "aliases": [
+ "crescent_moon"
+ ]
+ },
+ {
+ "emoji": "🦗",
+ "aliases": [
+ "cricket"
+ ]
+ },
+ {
+ "emoji": "🏏",
+ "aliases": [
+ "cricket_game"
+ ]
+ },
+ {
+ "emoji": "🇭🇷",
+ "aliases": [
+ "croatia"
+ ]
+ },
+ {
+ "emoji": "🐊",
+ "aliases": [
+ "crocodile"
+ ]
+ },
+ {
+ "emoji": "🥐",
+ "aliases": [
+ "croissant"
+ ]
+ },
+ {
+ "emoji": "🤞",
+ "aliases": [
+ "crossed_fingers"
+ ]
+ },
+ {
+ "emoji": "🎌",
+ "aliases": [
+ "crossed_flags"
+ ]
+ },
+ {
+ "emoji": "⚔️",
+ "aliases": [
+ "crossed_swords"
+ ]
+ },
+ {
+ "emoji": "👑",
+ "aliases": [
+ "crown"
+ ]
+ },
+ {
+ "emoji": "🩼",
+ "aliases": [
+ "crutch"
+ ]
+ },
+ {
+ "emoji": "😢",
+ "aliases": [
+ "cry"
+ ]
+ },
+ {
+ "emoji": "😿",
+ "aliases": [
+ "crying_cat_face"
+ ]
+ },
+ {
+ "emoji": "🔮",
+ "aliases": [
+ "crystal_ball"
+ ]
+ },
+ {
+ "emoji": "🇨🇺",
+ "aliases": [
+ "cuba"
+ ]
+ },
+ {
+ "emoji": "🥒",
+ "aliases": [
+ "cucumber"
+ ]
+ },
+ {
+ "emoji": "🥤",
+ "aliases": [
+ "cup_with_straw"
+ ]
+ },
+ {
+ "emoji": "🧁",
+ "aliases": [
+ "cupcake"
+ ]
+ },
+ {
+ "emoji": "💘",
+ "aliases": [
+ "cupid"
+ ]
+ },
+ {
+ "emoji": "🇨🇼",
+ "aliases": [
+ "curacao"
+ ]
+ },
+ {
+ "emoji": "🥌",
+ "aliases": [
+ "curling_stone"
+ ]
+ },
+ {
+ "emoji": "👨🦱",
+ "aliases": [
+ "curly_haired_man"
+ ]
+ },
+ {
+ "emoji": "👩🦱",
+ "aliases": [
+ "curly_haired_woman"
+ ]
+ },
+ {
+ "emoji": "➰",
+ "aliases": [
+ "curly_loop"
+ ]
+ },
+ {
+ "emoji": "💱",
+ "aliases": [
+ "currency_exchange"
+ ]
+ },
+ {
+ "emoji": "🍛",
+ "aliases": [
+ "curry"
+ ]
+ },
+ {
+ "emoji": "🤬",
+ "aliases": [
+ "cursing_face"
+ ]
+ },
+ {
+ "emoji": "🍮",
+ "aliases": [
+ "custard"
+ ]
+ },
+ {
+ "emoji": "🛃",
+ "aliases": [
+ "customs"
+ ]
+ },
+ {
+ "emoji": "🥩",
+ "aliases": [
+ "cut_of_meat"
+ ]
+ },
+ {
+ "emoji": "🌀",
+ "aliases": [
+ "cyclone"
+ ]
+ },
+ {
+ "emoji": "🇨🇾",
+ "aliases": [
+ "cyprus"
+ ]
+ },
+ {
+ "emoji": "🇨🇿",
+ "aliases": [
+ "czech_republic"
+ ]
+ },
+ {
+ "emoji": "🗡️",
+ "aliases": [
+ "dagger"
+ ]
+ },
+ {
+ "emoji": "👯",
+ "aliases": [
+ "dancers"
+ ]
+ },
+ {
+ "emoji": "👯♂️",
+ "aliases": [
+ "dancing_men"
+ ]
+ },
+ {
+ "emoji": "👯♀️",
+ "aliases": [
+ "dancing_women"
+ ]
+ },
+ {
+ "emoji": "🍡",
+ "aliases": [
+ "dango"
+ ]
+ },
+ {
+ "emoji": "🕶️",
+ "aliases": [
+ "dark_sunglasses"
+ ]
+ },
+ {
+ "emoji": "🎯",
+ "aliases": [
+ "dart"
+ ]
+ },
+ {
+ "emoji": "💨",
+ "aliases": [
+ "dash"
+ ]
+ },
+ {
+ "emoji": "📅",
+ "aliases": [
+ "date"
+ ]
+ },
+ {
+ "emoji": "🇩🇪",
+ "aliases": [
+ "de"
+ ]
+ },
+ {
+ "emoji": "🧏♂️",
+ "aliases": [
+ "deaf_man"
+ ]
+ },
+ {
+ "emoji": "🧏",
+ "aliases": [
+ "deaf_person"
+ ]
+ },
+ {
+ "emoji": "🧏♀️",
+ "aliases": [
+ "deaf_woman"
+ ]
+ },
+ {
+ "emoji": "🌳",
+ "aliases": [
+ "deciduous_tree"
+ ]
+ },
+ {
+ "emoji": "🦌",
+ "aliases": [
+ "deer"
+ ]
+ },
+ {
+ "emoji": "🇩🇰",
+ "aliases": [
+ "denmark"
+ ]
+ },
+ {
+ "emoji": "🏬",
+ "aliases": [
+ "department_store"
+ ]
+ },
+ {
+ "emoji": "🏚️",
+ "aliases": [
+ "derelict_house"
+ ]
+ },
+ {
+ "emoji": "🏜️",
+ "aliases": [
+ "desert"
+ ]
+ },
+ {
+ "emoji": "🏝️",
+ "aliases": [
+ "desert_island"
+ ]
+ },
+ {
+ "emoji": "🖥️",
+ "aliases": [
+ "desktop_computer"
+ ]
+ },
+ {
+ "emoji": "🕵️",
+ "aliases": [
+ "detective"
+ ]
+ },
+ {
+ "emoji": "💠",
+ "aliases": [
+ "diamond_shape_with_a_dot_inside"
+ ]
+ },
+ {
+ "emoji": "♦️",
+ "aliases": [
+ "diamonds"
+ ]
+ },
+ {
+ "emoji": "🇩🇬",
+ "aliases": [
+ "diego_garcia"
+ ]
+ },
+ {
+ "emoji": "😞",
+ "aliases": [
+ "disappointed"
+ ]
+ },
+ {
+ "emoji": "😥",
+ "aliases": [
+ "disappointed_relieved"
+ ]
+ },
+ {
+ "emoji": "🥸",
+ "aliases": [
+ "disguised_face"
+ ]
+ },
+ {
+ "emoji": "🤿",
+ "aliases": [
+ "diving_mask"
+ ]
+ },
+ {
+ "emoji": "🪔",
+ "aliases": [
+ "diya_lamp"
+ ]
+ },
+ {
+ "emoji": "💫",
+ "aliases": [
+ "dizzy"
+ ]
+ },
+ {
+ "emoji": "😵",
+ "aliases": [
+ "dizzy_face"
+ ]
+ },
+ {
+ "emoji": "🇩🇯",
+ "aliases": [
+ "djibouti"
+ ]
+ },
+ {
+ "emoji": "🧬",
+ "aliases": [
+ "dna"
+ ]
+ },
+ {
+ "emoji": "🚯",
+ "aliases": [
+ "do_not_litter"
+ ]
+ },
+ {
+ "emoji": "🦤",
+ "aliases": [
+ "dodo"
+ ]
+ },
+ {
+ "emoji": "🐶",
+ "aliases": [
+ "dog"
+ ]
+ },
+ {
+ "emoji": "🐕",
+ "aliases": [
+ "dog2"
+ ]
+ },
+ {
+ "emoji": "💵",
+ "aliases": [
+ "dollar"
+ ]
+ },
+ {
+ "emoji": "🎎",
+ "aliases": [
+ "dolls"
+ ]
+ },
+ {
+ "emoji": "🐬",
+ "aliases": [
+ "dolphin",
+ "flipper"
+ ]
+ },
+ {
+ "emoji": "🇩🇲",
+ "aliases": [
+ "dominica"
+ ]
+ },
+ {
+ "emoji": "🇩🇴",
+ "aliases": [
+ "dominican_republic"
+ ]
+ },
+ {
+ "emoji": "🫏",
+ "aliases": [
+ "donkey"
+ ]
+ },
+ {
+ "emoji": "🚪",
+ "aliases": [
+ "door"
+ ]
+ },
+ {
+ "emoji": "🫥",
+ "aliases": [
+ "dotted_line_face"
+ ]
+ },
+ {
+ "emoji": "🍩",
+ "aliases": [
+ "doughnut"
+ ]
+ },
+ {
+ "emoji": "🕊️",
+ "aliases": [
+ "dove"
+ ]
+ },
+ {
+ "emoji": "🐉",
+ "aliases": [
+ "dragon"
+ ]
+ },
+ {
+ "emoji": "🐲",
+ "aliases": [
+ "dragon_face"
+ ]
+ },
+ {
+ "emoji": "👗",
+ "aliases": [
+ "dress"
+ ]
+ },
+ {
+ "emoji": "🐪",
+ "aliases": [
+ "dromedary_camel"
+ ]
+ },
+ {
+ "emoji": "🤤",
+ "aliases": [
+ "drooling_face"
+ ]
+ },
+ {
+ "emoji": "🩸",
+ "aliases": [
+ "drop_of_blood"
+ ]
+ },
+ {
+ "emoji": "💧",
+ "aliases": [
+ "droplet"
+ ]
+ },
+ {
+ "emoji": "🥁",
+ "aliases": [
+ "drum"
+ ]
+ },
+ {
+ "emoji": "🦆",
+ "aliases": [
+ "duck"
+ ]
+ },
+ {
+ "emoji": "🥟",
+ "aliases": [
+ "dumpling"
+ ]
+ },
+ {
+ "emoji": "📀",
+ "aliases": [
+ "dvd"
+ ]
+ },
+ {
+ "emoji": "🦅",
+ "aliases": [
+ "eagle"
+ ]
+ },
+ {
+ "emoji": "👂",
+ "aliases": [
+ "ear"
+ ]
+ },
+ {
+ "emoji": "🌾",
+ "aliases": [
+ "ear_of_rice"
+ ]
+ },
+ {
+ "emoji": "🦻",
+ "aliases": [
+ "ear_with_hearing_aid"
+ ]
+ },
+ {
+ "emoji": "🌍",
+ "aliases": [
+ "earth_africa"
+ ]
+ },
+ {
+ "emoji": "🌎",
+ "aliases": [
+ "earth_americas"
+ ]
+ },
+ {
+ "emoji": "🌏",
+ "aliases": [
+ "earth_asia"
+ ]
+ },
+ {
+ "emoji": "🇪🇨",
+ "aliases": [
+ "ecuador"
+ ]
+ },
+ {
+ "emoji": "🥚",
+ "aliases": [
+ "egg"
+ ]
+ },
+ {
+ "emoji": "🍆",
+ "aliases": [
+ "eggplant"
+ ]
+ },
+ {
+ "emoji": "🇪🇬",
+ "aliases": [
+ "egypt"
+ ]
+ },
+ {
+ "emoji": "8️⃣",
+ "aliases": [
+ "eight"
+ ]
+ },
+ {
+ "emoji": "✴️",
+ "aliases": [
+ "eight_pointed_black_star"
+ ]
+ },
+ {
+ "emoji": "✳️",
+ "aliases": [
+ "eight_spoked_asterisk"
+ ]
+ },
+ {
+ "emoji": "⏏️",
+ "aliases": [
+ "eject_button"
+ ]
+ },
+ {
+ "emoji": "🇸🇻",
+ "aliases": [
+ "el_salvador"
+ ]
+ },
+ {
+ "emoji": "🔌",
+ "aliases": [
+ "electric_plug"
+ ]
+ },
+ {
+ "emoji": "🐘",
+ "aliases": [
+ "elephant"
+ ]
+ },
+ {
+ "emoji": "🛗",
+ "aliases": [
+ "elevator"
+ ]
+ },
+ {
+ "emoji": "🧝",
+ "aliases": [
+ "elf"
+ ]
+ },
+ {
+ "emoji": "🧝♂️",
+ "aliases": [
+ "elf_man"
+ ]
+ },
+ {
+ "emoji": "🧝♀️",
+ "aliases": [
+ "elf_woman"
+ ]
+ },
+ {
+ "emoji": "📧",
+ "aliases": [
+ "email",
+ "e-mail"
+ ]
+ },
+ {
+ "emoji": "🪹",
+ "aliases": [
+ "empty_nest"
+ ]
+ },
+ {
+ "emoji": "🔚",
+ "aliases": [
+ "end"
+ ]
+ },
+ {
+ "emoji": "🏴",
+ "aliases": [
+ "england"
+ ]
+ },
+ {
+ "emoji": "✉️",
+ "aliases": [
+ "envelope"
+ ]
+ },
+ {
+ "emoji": "📩",
+ "aliases": [
+ "envelope_with_arrow"
+ ]
+ },
+ {
+ "emoji": "🇬🇶",
+ "aliases": [
+ "equatorial_guinea"
+ ]
+ },
+ {
+ "emoji": "🇪🇷",
+ "aliases": [
+ "eritrea"
+ ]
+ },
+ {
+ "emoji": "🇪🇸",
+ "aliases": [
+ "es"
+ ]
+ },
+ {
+ "emoji": "🇪🇪",
+ "aliases": [
+ "estonia"
+ ]
+ },
+ {
+ "emoji": "🇪🇹",
+ "aliases": [
+ "ethiopia"
+ ]
+ },
+ {
+ "emoji": "🇪🇺",
+ "aliases": [
+ "eu",
+ "european_union"
+ ]
+ },
+ {
+ "emoji": "💶",
+ "aliases": [
+ "euro"
+ ]
+ },
+ {
+ "emoji": "🏰",
+ "aliases": [
+ "european_castle"
+ ]
+ },
+ {
+ "emoji": "🏤",
+ "aliases": [
+ "european_post_office"
+ ]
+ },
+ {
+ "emoji": "🌲",
+ "aliases": [
+ "evergreen_tree"
+ ]
+ },
+ {
+ "emoji": "❗",
+ "aliases": [
+ "exclamation",
+ "heavy_exclamation_mark"
+ ]
+ },
+ {
+ "emoji": "🤯",
+ "aliases": [
+ "exploding_head"
+ ]
+ },
+ {
+ "emoji": "😑",
+ "aliases": [
+ "expressionless"
+ ]
+ },
+ {
+ "emoji": "👁️",
+ "aliases": [
+ "eye"
+ ]
+ },
+ {
+ "emoji": "👁️🗨️",
+ "aliases": [
+ "eye_speech_bubble"
+ ]
+ },
+ {
+ "emoji": "👓",
+ "aliases": [
+ "eyeglasses"
+ ]
+ },
+ {
+ "emoji": "👀",
+ "aliases": [
+ "eyes"
+ ]
+ },
+ {
+ "emoji": "😮💨",
+ "aliases": [
+ "face_exhaling"
+ ]
+ },
+ {
+ "emoji": "🥹",
+ "aliases": [
+ "face_holding_back_tears"
+ ]
+ },
+ {
+ "emoji": "😶🌫️",
+ "aliases": [
+ "face_in_clouds"
+ ]
+ },
+ {
+ "emoji": "",
+ "aliases": [
+ "face_with_bags_under_eyes"
+ ]
+ },
+ {
+ "emoji": "🫤",
+ "aliases": [
+ "face_with_diagonal_mouth"
+ ]
+ },
+ {
+ "emoji": "🤕",
+ "aliases": [
+ "face_with_head_bandage"
+ ]
+ },
+ {
+ "emoji": "🫢",
+ "aliases": [
+ "face_with_open_eyes_and_hand_over_mouth"
+ ]
+ },
+ {
+ "emoji": "🫣",
+ "aliases": [
+ "face_with_peeking_eye"
+ ]
+ },
+ {
+ "emoji": "😵💫",
+ "aliases": [
+ "face_with_spiral_eyes"
+ ]
+ },
+ {
+ "emoji": "🤒",
+ "aliases": [
+ "face_with_thermometer"
+ ]
+ },
+ {
+ "emoji": "🤦",
+ "aliases": [
+ "facepalm"
+ ]
+ },
+ {
+ "emoji": "🏭",
+ "aliases": [
+ "factory"
+ ]
+ },
+ {
+ "emoji": "🧑🏭",
+ "aliases": [
+ "factory_worker"
+ ]
+ },
+ {
+ "emoji": "🧚",
+ "aliases": [
+ "fairy"
+ ]
+ },
+ {
+ "emoji": "🧚♂️",
+ "aliases": [
+ "fairy_man"
+ ]
+ },
+ {
+ "emoji": "🧚♀️",
+ "aliases": [
+ "fairy_woman"
+ ]
+ },
+ {
+ "emoji": "🧆",
+ "aliases": [
+ "falafel"
+ ]
+ },
+ {
+ "emoji": "🇫🇰",
+ "aliases": [
+ "falkland_islands"
+ ]
+ },
+ {
+ "emoji": "🍂",
+ "aliases": [
+ "fallen_leaf"
+ ]
+ },
+ {
+ "emoji": "👪",
+ "aliases": [
+ "family"
+ ]
+ },
+ {
+ "emoji": "🧑🧑🧒",
+ "aliases": [
+ "family_adult_adult_child"
+ ]
+ },
+ {
+ "emoji": "🧑🧑🧒🧒",
+ "aliases": [
+ "family_adult_adult_child_child"
+ ]
+ },
+ {
+ "emoji": "🧑🧒",
+ "aliases": [
+ "family_adult_child"
+ ]
+ },
+ {
+ "emoji": "🧑🧒🧒",
+ "aliases": [
+ "family_adult_child_child"
+ ]
+ },
+ {
+ "emoji": "👨👦",
+ "aliases": [
+ "family_man_boy"
+ ]
+ },
+ {
+ "emoji": "👨👦👦",
+ "aliases": [
+ "family_man_boy_boy"
+ ]
+ },
+ {
+ "emoji": "👨👧",
+ "aliases": [
+ "family_man_girl"
+ ]
+ },
+ {
+ "emoji": "👨👧👦",
+ "aliases": [
+ "family_man_girl_boy"
+ ]
+ },
+ {
+ "emoji": "👨👧👧",
+ "aliases": [
+ "family_man_girl_girl"
+ ]
+ },
+ {
+ "emoji": "👨👨👦",
+ "aliases": [
+ "family_man_man_boy"
+ ]
+ },
+ {
+ "emoji": "👨👨👦👦",
+ "aliases": [
+ "family_man_man_boy_boy"
+ ]
+ },
+ {
+ "emoji": "👨👨👧",
+ "aliases": [
+ "family_man_man_girl"
+ ]
+ },
+ {
+ "emoji": "👨👨👧👦",
+ "aliases": [
+ "family_man_man_girl_boy"
+ ]
+ },
+ {
+ "emoji": "👨👨👧👧",
+ "aliases": [
+ "family_man_man_girl_girl"
+ ]
+ },
+ {
+ "emoji": "👨👩👦",
+ "aliases": [
+ "family_man_woman_boy"
+ ]
+ },
+ {
+ "emoji": "👨👩👦👦",
+ "aliases": [
+ "family_man_woman_boy_boy"
+ ]
+ },
+ {
+ "emoji": "👨👩👧",
+ "aliases": [
+ "family_man_woman_girl"
+ ]
+ },
+ {
+ "emoji": "👨👩👧👦",
+ "aliases": [
+ "family_man_woman_girl_boy"
+ ]
+ },
+ {
+ "emoji": "👨👩👧👧",
+ "aliases": [
+ "family_man_woman_girl_girl"
+ ]
+ },
+ {
+ "emoji": "👩👦",
+ "aliases": [
+ "family_woman_boy"
+ ]
+ },
+ {
+ "emoji": "👩👦👦",
+ "aliases": [
+ "family_woman_boy_boy"
+ ]
+ },
+ {
+ "emoji": "👩👧",
+ "aliases": [
+ "family_woman_girl"
+ ]
+ },
+ {
+ "emoji": "👩👧👦",
+ "aliases": [
+ "family_woman_girl_boy"
+ ]
+ },
+ {
+ "emoji": "👩👧👧",
+ "aliases": [
+ "family_woman_girl_girl"
+ ]
+ },
+ {
+ "emoji": "👩👩👦",
+ "aliases": [
+ "family_woman_woman_boy"
+ ]
+ },
+ {
+ "emoji": "👩👩👦👦",
+ "aliases": [
+ "family_woman_woman_boy_boy"
+ ]
+ },
+ {
+ "emoji": "👩👩👧",
+ "aliases": [
+ "family_woman_woman_girl"
+ ]
+ },
+ {
+ "emoji": "👩👩👧👦",
+ "aliases": [
+ "family_woman_woman_girl_boy"
+ ]
+ },
+ {
+ "emoji": "👩👩👧👧",
+ "aliases": [
+ "family_woman_woman_girl_girl"
+ ]
+ },
+ {
+ "emoji": "🧑🌾",
+ "aliases": [
+ "farmer"
+ ]
+ },
+ {
+ "emoji": "🇫🇴",
+ "aliases": [
+ "faroe_islands"
+ ]
+ },
+ {
+ "emoji": "⏩",
+ "aliases": [
+ "fast_forward"
+ ]
+ },
+ {
+ "emoji": "📠",
+ "aliases": [
+ "fax"
+ ]
+ },
+ {
+ "emoji": "😨",
+ "aliases": [
+ "fearful"
+ ]
+ },
+ {
+ "emoji": "🪶",
+ "aliases": [
+ "feather"
+ ]
+ },
+ {
+ "emoji": "🐾",
+ "aliases": [
+ "feet",
+ "paw_prints"
+ ]
+ },
+ {
+ "emoji": "🕵️♀️",
+ "aliases": [
+ "female_detective"
+ ]
+ },
+ {
+ "emoji": "♀️",
+ "aliases": [
+ "female_sign"
+ ]
+ },
+ {
+ "emoji": "🎡",
+ "aliases": [
+ "ferris_wheel"
+ ]
+ },
+ {
+ "emoji": "⛴️",
+ "aliases": [
+ "ferry"
+ ]
+ },
+ {
+ "emoji": "🏑",
+ "aliases": [
+ "field_hockey"
+ ]
+ },
+ {
+ "emoji": "🇫🇯",
+ "aliases": [
+ "fiji"
+ ]
+ },
+ {
+ "emoji": "🗄️",
+ "aliases": [
+ "file_cabinet"
+ ]
+ },
+ {
+ "emoji": "📁",
+ "aliases": [
+ "file_folder"
+ ]
+ },
+ {
+ "emoji": "📽️",
+ "aliases": [
+ "film_projector"
+ ]
+ },
+ {
+ "emoji": "🎞️",
+ "aliases": [
+ "film_strip"
+ ]
+ },
+ {
+ "emoji": "",
+ "aliases": [
+ "fingerprint"
+ ]
+ },
+ {
+ "emoji": "🇫🇮",
+ "aliases": [
+ "finland"
+ ]
+ },
+ {
+ "emoji": "🔥",
+ "aliases": [
+ "fire"
+ ]
+ },
+ {
+ "emoji": "🚒",
+ "aliases": [
+ "fire_engine"
+ ]
+ },
+ {
+ "emoji": "🧯",
+ "aliases": [
+ "fire_extinguisher"
+ ]
+ },
+ {
+ "emoji": "🧨",
+ "aliases": [
+ "firecracker"
+ ]
+ },
+ {
+ "emoji": "🧑🚒",
+ "aliases": [
+ "firefighter"
+ ]
+ },
+ {
+ "emoji": "🎆",
+ "aliases": [
+ "fireworks"
+ ]
+ },
+ {
+ "emoji": "🌓",
+ "aliases": [
+ "first_quarter_moon"
+ ]
+ },
+ {
+ "emoji": "🌛",
+ "aliases": [
+ "first_quarter_moon_with_face"
+ ]
+ },
+ {
+ "emoji": "🐟",
+ "aliases": [
+ "fish"
+ ]
+ },
+ {
+ "emoji": "🍥",
+ "aliases": [
+ "fish_cake"
+ ]
+ },
+ {
+ "emoji": "🎣",
+ "aliases": [
+ "fishing_pole_and_fish"
+ ]
+ },
+ {
+ "emoji": "🤛",
+ "aliases": [
+ "fist_left"
+ ]
+ },
+ {
+ "emoji": "👊",
+ "aliases": [
+ "fist_oncoming",
+ "facepunch",
+ "punch"
+ ]
+ },
+ {
+ "emoji": "✊",
+ "aliases": [
+ "fist_raised",
+ "fist"
+ ]
+ },
+ {
+ "emoji": "🤜",
+ "aliases": [
+ "fist_right"
+ ]
+ },
+ {
+ "emoji": "5️⃣",
+ "aliases": [
+ "five"
+ ]
+ },
+ {
+ "emoji": "🇨🇶",
+ "aliases": [
+ "flag_sark"
+ ]
+ },
+ {
+ "emoji": "🎏",
+ "aliases": [
+ "flags"
+ ]
+ },
+ {
+ "emoji": "🦩",
+ "aliases": [
+ "flamingo"
+ ]
+ },
+ {
+ "emoji": "🔦",
+ "aliases": [
+ "flashlight"
+ ]
+ },
+ {
+ "emoji": "🥿",
+ "aliases": [
+ "flat_shoe"
+ ]
+ },
+ {
+ "emoji": "🫓",
+ "aliases": [
+ "flatbread"
+ ]
+ },
+ {
+ "emoji": "⚜️",
+ "aliases": [
+ "fleur_de_lis"
+ ]
+ },
+ {
+ "emoji": "🛬",
+ "aliases": [
+ "flight_arrival"
+ ]
+ },
+ {
+ "emoji": "🛫",
+ "aliases": [
+ "flight_departure"
+ ]
+ },
+ {
+ "emoji": "💾",
+ "aliases": [
+ "floppy_disk"
+ ]
+ },
+ {
+ "emoji": "🎴",
+ "aliases": [
+ "flower_playing_cards"
+ ]
+ },
+ {
+ "emoji": "😳",
+ "aliases": [
+ "flushed"
+ ]
+ },
+ {
+ "emoji": "🪈",
+ "aliases": [
+ "flute"
+ ]
+ },
+ {
+ "emoji": "🪰",
+ "aliases": [
+ "fly"
+ ]
+ },
+ {
+ "emoji": "🥏",
+ "aliases": [
+ "flying_disc"
+ ]
+ },
+ {
+ "emoji": "🛸",
+ "aliases": [
+ "flying_saucer"
+ ]
+ },
+ {
+ "emoji": "🌫️",
+ "aliases": [
+ "fog"
+ ]
+ },
+ {
+ "emoji": "🌁",
+ "aliases": [
+ "foggy"
+ ]
+ },
+ {
+ "emoji": "🪭",
+ "aliases": [
+ "folding_hand_fan"
+ ]
+ },
+ {
+ "emoji": "🫕",
+ "aliases": [
+ "fondue"
+ ]
+ },
+ {
+ "emoji": "🦶",
+ "aliases": [
+ "foot"
+ ]
+ },
+ {
+ "emoji": "🏈",
+ "aliases": [
+ "football"
+ ]
+ },
+ {
+ "emoji": "👣",
+ "aliases": [
+ "footprints"
+ ]
+ },
+ {
+ "emoji": "🍴",
+ "aliases": [
+ "fork_and_knife"
+ ]
+ },
+ {
+ "emoji": "🥠",
+ "aliases": [
+ "fortune_cookie"
+ ]
+ },
+ {
+ "emoji": "⛲",
+ "aliases": [
+ "fountain"
+ ]
+ },
+ {
+ "emoji": "🖋️",
+ "aliases": [
+ "fountain_pen"
+ ]
+ },
+ {
+ "emoji": "4️⃣",
+ "aliases": [
+ "four"
+ ]
+ },
+ {
+ "emoji": "🍀",
+ "aliases": [
+ "four_leaf_clover"
+ ]
+ },
+ {
+ "emoji": "🦊",
+ "aliases": [
+ "fox_face"
+ ]
+ },
+ {
+ "emoji": "🇫🇷",
+ "aliases": [
+ "fr"
+ ]
+ },
+ {
+ "emoji": "🖼️",
+ "aliases": [
+ "framed_picture"
+ ]
+ },
+ {
+ "emoji": "🆓",
+ "aliases": [
+ "free"
+ ]
+ },
+ {
+ "emoji": "🇬🇫",
+ "aliases": [
+ "french_guiana"
+ ]
+ },
+ {
+ "emoji": "🇵🇫",
+ "aliases": [
+ "french_polynesia"
+ ]
+ },
+ {
+ "emoji": "🇹🇫",
+ "aliases": [
+ "french_southern_territories"
+ ]
+ },
+ {
+ "emoji": "🍳",
+ "aliases": [
+ "fried_egg"
+ ]
+ },
+ {
+ "emoji": "🍤",
+ "aliases": [
+ "fried_shrimp"
+ ]
+ },
+ {
+ "emoji": "🍟",
+ "aliases": [
+ "fries"
+ ]
+ },
+ {
+ "emoji": "🐸",
+ "aliases": [
+ "frog"
+ ]
+ },
+ {
+ "emoji": "😦",
+ "aliases": [
+ "frowning"
+ ]
+ },
+ {
+ "emoji": "☹️",
+ "aliases": [
+ "frowning_face"
+ ]
+ },
+ {
+ "emoji": "🙍♂️",
+ "aliases": [
+ "frowning_man"
+ ]
+ },
+ {
+ "emoji": "🙍",
+ "aliases": [
+ "frowning_person"
+ ]
+ },
+ {
+ "emoji": "🙍♀️",
+ "aliases": [
+ "frowning_woman"
+ ]
+ },
+ {
+ "emoji": "⛽",
+ "aliases": [
+ "fuelpump"
+ ]
+ },
+ {
+ "emoji": "🌕",
+ "aliases": [
+ "full_moon"
+ ]
+ },
+ {
+ "emoji": "🌝",
+ "aliases": [
+ "full_moon_with_face"
+ ]
+ },
+ {
+ "emoji": "⚱️",
+ "aliases": [
+ "funeral_urn"
+ ]
+ },
+ {
+ "emoji": "🇬🇦",
+ "aliases": [
+ "gabon"
+ ]
+ },
+ {
+ "emoji": "🇬🇲",
+ "aliases": [
+ "gambia"
+ ]
+ },
+ {
+ "emoji": "🎲",
+ "aliases": [
+ "game_die"
+ ]
+ },
+ {
+ "emoji": "🧄",
+ "aliases": [
+ "garlic"
+ ]
+ },
+ {
+ "emoji": "🇬🇧",
+ "aliases": [
+ "gb",
+ "uk"
+ ]
+ },
+ {
+ "emoji": "⚙️",
+ "aliases": [
+ "gear"
+ ]
+ },
+ {
+ "emoji": "💎",
+ "aliases": [
+ "gem"
+ ]
+ },
+ {
+ "emoji": "♊",
+ "aliases": [
+ "gemini"
+ ]
+ },
+ {
+ "emoji": "🧞",
+ "aliases": [
+ "genie"
+ ]
+ },
+ {
+ "emoji": "🧞♂️",
+ "aliases": [
+ "genie_man"
+ ]
+ },
+ {
+ "emoji": "🧞♀️",
+ "aliases": [
+ "genie_woman"
+ ]
+ },
+ {
+ "emoji": "🇬🇪",
+ "aliases": [
+ "georgia"
+ ]
+ },
+ {
+ "emoji": "🇬🇭",
+ "aliases": [
+ "ghana"
+ ]
+ },
+ {
+ "emoji": "👻",
+ "aliases": [
+ "ghost"
+ ]
+ },
+ {
+ "emoji": "🇬🇮",
+ "aliases": [
+ "gibraltar"
+ ]
+ },
+ {
+ "emoji": "🎁",
+ "aliases": [
+ "gift"
+ ]
+ },
+ {
+ "emoji": "💝",
+ "aliases": [
+ "gift_heart"
+ ]
+ },
+ {
+ "emoji": "🫚",
+ "aliases": [
+ "ginger_root"
+ ]
+ },
+ {
+ "emoji": "🦒",
+ "aliases": [
+ "giraffe"
+ ]
+ },
+ {
+ "emoji": "👧",
+ "aliases": [
+ "girl"
+ ]
+ },
+ {
+ "emoji": "🌐",
+ "aliases": [
+ "globe_with_meridians"
+ ]
+ },
+ {
+ "emoji": "🧤",
+ "aliases": [
+ "gloves"
+ ]
+ },
+ {
+ "emoji": "🥅",
+ "aliases": [
+ "goal_net"
+ ]
+ },
+ {
+ "emoji": "🐐",
+ "aliases": [
+ "goat"
+ ]
+ },
+ {
+ "emoji": "🥽",
+ "aliases": [
+ "goggles"
+ ]
+ },
+ {
+ "emoji": "⛳",
+ "aliases": [
+ "golf"
+ ]
+ },
+ {
+ "emoji": "🏌️",
+ "aliases": [
+ "golfing"
+ ]
+ },
+ {
+ "emoji": "🏌️♂️",
+ "aliases": [
+ "golfing_man"
+ ]
+ },
+ {
+ "emoji": "🏌️♀️",
+ "aliases": [
+ "golfing_woman"
+ ]
+ },
+ {
+ "emoji": "🪿",
+ "aliases": [
+ "goose"
+ ]
+ },
+ {
+ "emoji": "🦍",
+ "aliases": [
+ "gorilla"
+ ]
+ },
+ {
+ "emoji": "🍇",
+ "aliases": [
+ "grapes"
+ ]
+ },
+ {
+ "emoji": "🇬🇷",
+ "aliases": [
+ "greece"
+ ]
+ },
+ {
+ "emoji": "🍏",
+ "aliases": [
+ "green_apple"
+ ]
+ },
+ {
+ "emoji": "📗",
+ "aliases": [
+ "green_book"
+ ]
+ },
+ {
+ "emoji": "🟢",
+ "aliases": [
+ "green_circle"
+ ]
+ },
+ {
+ "emoji": "💚",
+ "aliases": [
+ "green_heart"
+ ]
+ },
+ {
+ "emoji": "🥗",
+ "aliases": [
+ "green_salad"
+ ]
+ },
+ {
+ "emoji": "🟩",
+ "aliases": [
+ "green_square"
+ ]
+ },
+ {
+ "emoji": "🇬🇱",
+ "aliases": [
+ "greenland"
+ ]
+ },
+ {
+ "emoji": "🇬🇩",
+ "aliases": [
+ "grenada"
+ ]
+ },
+ {
+ "emoji": "❕",
+ "aliases": [
+ "grey_exclamation"
+ ]
+ },
+ {
+ "emoji": "🩶",
+ "aliases": [
+ "grey_heart"
+ ]
+ },
+ {
+ "emoji": "❔",
+ "aliases": [
+ "grey_question"
+ ]
+ },
+ {
+ "emoji": "😬",
+ "aliases": [
+ "grimacing"
+ ]
+ },
+ {
+ "emoji": "😁",
+ "aliases": [
+ "grin"
+ ]
+ },
+ {
+ "emoji": "😀",
+ "aliases": [
+ "grinning"
+ ]
+ },
+ {
+ "emoji": "🇬🇵",
+ "aliases": [
+ "guadeloupe"
+ ]
+ },
+ {
+ "emoji": "🇬🇺",
+ "aliases": [
+ "guam"
+ ]
+ },
+ {
+ "emoji": "💂",
+ "aliases": [
+ "guard"
+ ]
+ },
+ {
+ "emoji": "💂♂️",
+ "aliases": [
+ "guardsman"
+ ]
+ },
+ {
+ "emoji": "💂♀️",
+ "aliases": [
+ "guardswoman"
+ ]
+ },
+ {
+ "emoji": "🇬🇹",
+ "aliases": [
+ "guatemala"
+ ]
+ },
+ {
+ "emoji": "🇬🇬",
+ "aliases": [
+ "guernsey"
+ ]
+ },
+ {
+ "emoji": "🦮",
+ "aliases": [
+ "guide_dog"
+ ]
+ },
+ {
+ "emoji": "🇬🇳",
+ "aliases": [
+ "guinea"
+ ]
+ },
+ {
+ "emoji": "🇬🇼",
+ "aliases": [
+ "guinea_bissau"
+ ]
+ },
+ {
+ "emoji": "🎸",
+ "aliases": [
+ "guitar"
+ ]
+ },
+ {
+ "emoji": "🔫",
+ "aliases": [
+ "gun"
+ ]
+ },
+ {
+ "emoji": "🇬🇾",
+ "aliases": [
+ "guyana"
+ ]
+ },
+ {
+ "emoji": "🪮",
+ "aliases": [
+ "hair_pick"
+ ]
+ },
+ {
+ "emoji": "💇",
+ "aliases": [
+ "haircut"
+ ]
+ },
+ {
+ "emoji": "💇♂️",
+ "aliases": [
+ "haircut_man"
+ ]
+ },
+ {
+ "emoji": "💇♀️",
+ "aliases": [
+ "haircut_woman"
+ ]
+ },
+ {
+ "emoji": "🇭🇹",
+ "aliases": [
+ "haiti"
+ ]
+ },
+ {
+ "emoji": "🍔",
+ "aliases": [
+ "hamburger"
+ ]
+ },
+ {
+ "emoji": "🔨",
+ "aliases": [
+ "hammer"
+ ]
+ },
+ {
+ "emoji": "⚒️",
+ "aliases": [
+ "hammer_and_pick"
+ ]
+ },
+ {
+ "emoji": "🛠️",
+ "aliases": [
+ "hammer_and_wrench"
+ ]
+ },
+ {
+ "emoji": "🪬",
+ "aliases": [
+ "hamsa"
+ ]
+ },
+ {
+ "emoji": "🐹",
+ "aliases": [
+ "hamster"
+ ]
+ },
+ {
+ "emoji": "✋",
+ "aliases": [
+ "hand",
+ "raised_hand"
+ ]
+ },
+ {
+ "emoji": "🤭",
+ "aliases": [
+ "hand_over_mouth"
+ ]
+ },
+ {
+ "emoji": "🫰",
+ "aliases": [
+ "hand_with_index_finger_and_thumb_crossed"
+ ]
+ },
+ {
+ "emoji": "👜",
+ "aliases": [
+ "handbag"
+ ]
+ },
+ {
+ "emoji": "🤾",
+ "aliases": [
+ "handball_person"
+ ]
+ },
+ {
+ "emoji": "🤝",
+ "aliases": [
+ "handshake"
+ ]
+ },
+ {
+ "emoji": "💩",
+ "aliases": [
+ "hankey",
+ "poop",
+ "shit"
+ ]
+ },
+ {
+ "emoji": "",
+ "aliases": [
+ "harp"
+ ]
+ },
+ {
+ "emoji": "#️⃣",
+ "aliases": [
+ "hash"
+ ]
+ },
+ {
+ "emoji": "🐥",
+ "aliases": [
+ "hatched_chick"
+ ]
+ },
+ {
+ "emoji": "🐣",
+ "aliases": [
+ "hatching_chick"
+ ]
+ },
+ {
+ "emoji": "🙂↔️",
+ "aliases": [
+ "head_shaking_horizontally"
+ ]
+ },
+ {
+ "emoji": "🙂↕️",
+ "aliases": [
+ "head_shaking_vertically"
+ ]
+ },
+ {
+ "emoji": "🎧",
+ "aliases": [
+ "headphones"
+ ]
+ },
+ {
+ "emoji": "🪦",
+ "aliases": [
+ "headstone"
+ ]
+ },
+ {
+ "emoji": "🧑⚕️",
+ "aliases": [
+ "health_worker"
+ ]
+ },
+ {
+ "emoji": "🙉",
+ "aliases": [
+ "hear_no_evil"
+ ]
+ },
+ {
+ "emoji": "🇭🇲",
+ "aliases": [
+ "heard_mcdonald_islands"
+ ]
+ },
+ {
+ "emoji": "❤️",
+ "aliases": [
+ "heart"
+ ]
+ },
+ {
+ "emoji": "💟",
+ "aliases": [
+ "heart_decoration"
+ ]
+ },
+ {
+ "emoji": "😍",
+ "aliases": [
+ "heart_eyes"
+ ]
+ },
+ {
+ "emoji": "😻",
+ "aliases": [
+ "heart_eyes_cat"
+ ]
+ },
+ {
+ "emoji": "🫶",
+ "aliases": [
+ "heart_hands"
+ ]
+ },
+ {
+ "emoji": "❤️🔥",
+ "aliases": [
+ "heart_on_fire"
+ ]
+ },
+ {
+ "emoji": "💓",
+ "aliases": [
+ "heartbeat"
+ ]
+ },
+ {
+ "emoji": "💗",
+ "aliases": [
+ "heartpulse"
+ ]
+ },
+ {
+ "emoji": "♥️",
+ "aliases": [
+ "hearts"
+ ]
+ },
+ {
+ "emoji": "✔️",
+ "aliases": [
+ "heavy_check_mark"
+ ]
+ },
+ {
+ "emoji": "➗",
+ "aliases": [
+ "heavy_division_sign"
+ ]
+ },
+ {
+ "emoji": "💲",
+ "aliases": [
+ "heavy_dollar_sign"
+ ]
+ },
+ {
+ "emoji": "🟰",
+ "aliases": [
+ "heavy_equals_sign"
+ ]
+ },
+ {
+ "emoji": "❣️",
+ "aliases": [
+ "heavy_heart_exclamation"
+ ]
+ },
+ {
+ "emoji": "➖",
+ "aliases": [
+ "heavy_minus_sign"
+ ]
+ },
+ {
+ "emoji": "✖️",
+ "aliases": [
+ "heavy_multiplication_x"
+ ]
+ },
+ {
+ "emoji": "➕",
+ "aliases": [
+ "heavy_plus_sign"
+ ]
+ },
+ {
+ "emoji": "🦔",
+ "aliases": [
+ "hedgehog"
+ ]
+ },
+ {
+ "emoji": "🚁",
+ "aliases": [
+ "helicopter"
+ ]
+ },
+ {
+ "emoji": "🌿",
+ "aliases": [
+ "herb"
+ ]
+ },
+ {
+ "emoji": "🌺",
+ "aliases": [
+ "hibiscus"
+ ]
+ },
+ {
+ "emoji": "🔆",
+ "aliases": [
+ "high_brightness"
+ ]
+ },
+ {
+ "emoji": "👠",
+ "aliases": [
+ "high_heel"
+ ]
+ },
+ {
+ "emoji": "🥾",
+ "aliases": [
+ "hiking_boot"
+ ]
+ },
+ {
+ "emoji": "🛕",
+ "aliases": [
+ "hindu_temple"
+ ]
+ },
+ {
+ "emoji": "🦛",
+ "aliases": [
+ "hippopotamus"
+ ]
+ },
+ {
+ "emoji": "🔪",
+ "aliases": [
+ "hocho",
+ "knife"
+ ]
+ },
+ {
+ "emoji": "🕳️",
+ "aliases": [
+ "hole"
+ ]
+ },
+ {
+ "emoji": "🇭🇳",
+ "aliases": [
+ "honduras"
+ ]
+ },
+ {
+ "emoji": "🍯",
+ "aliases": [
+ "honey_pot"
+ ]
+ },
+ {
+ "emoji": "🇭🇰",
+ "aliases": [
+ "hong_kong"
+ ]
+ },
+ {
+ "emoji": "🪝",
+ "aliases": [
+ "hook"
+ ]
+ },
+ {
+ "emoji": "🐴",
+ "aliases": [
+ "horse"
+ ]
+ },
+ {
+ "emoji": "🏇",
+ "aliases": [
+ "horse_racing"
+ ]
+ },
+ {
+ "emoji": "🏥",
+ "aliases": [
+ "hospital"
+ ]
+ },
+ {
+ "emoji": "🥵",
+ "aliases": [
+ "hot_face"
+ ]
+ },
+ {
+ "emoji": "🌶️",
+ "aliases": [
+ "hot_pepper"
+ ]
+ },
+ {
+ "emoji": "🌭",
+ "aliases": [
+ "hotdog"
+ ]
+ },
+ {
+ "emoji": "🏨",
+ "aliases": [
+ "hotel"
+ ]
+ },
+ {
+ "emoji": "♨️",
+ "aliases": [
+ "hotsprings"
+ ]
+ },
+ {
+ "emoji": "⌛",
+ "aliases": [
+ "hourglass"
+ ]
+ },
+ {
+ "emoji": "⏳",
+ "aliases": [
+ "hourglass_flowing_sand"
+ ]
+ },
+ {
+ "emoji": "🏠",
+ "aliases": [
+ "house"
+ ]
+ },
+ {
+ "emoji": "🏡",
+ "aliases": [
+ "house_with_garden"
+ ]
+ },
+ {
+ "emoji": "🏘️",
+ "aliases": [
+ "houses"
+ ]
+ },
+ {
+ "emoji": "🤗",
+ "aliases": [
+ "hugs"
+ ]
+ },
+ {
+ "emoji": "🇭🇺",
+ "aliases": [
+ "hungary"
+ ]
+ },
+ {
+ "emoji": "😯",
+ "aliases": [
+ "hushed"
+ ]
+ },
+ {
+ "emoji": "🛖",
+ "aliases": [
+ "hut"
+ ]
+ },
+ {
+ "emoji": "🪻",
+ "aliases": [
+ "hyacinth"
+ ]
+ },
+ {
+ "emoji": "🍨",
+ "aliases": [
+ "ice_cream"
+ ]
+ },
+ {
+ "emoji": "🧊",
+ "aliases": [
+ "ice_cube"
+ ]
+ },
+ {
+ "emoji": "🏒",
+ "aliases": [
+ "ice_hockey"
+ ]
+ },
+ {
+ "emoji": "⛸️",
+ "aliases": [
+ "ice_skate"
+ ]
+ },
+ {
+ "emoji": "🍦",
+ "aliases": [
+ "icecream"
+ ]
+ },
+ {
+ "emoji": "🇮🇸",
+ "aliases": [
+ "iceland"
+ ]
+ },
+ {
+ "emoji": "🆔",
+ "aliases": [
+ "id"
+ ]
+ },
+ {
+ "emoji": "🪪",
+ "aliases": [
+ "identification_card"
+ ]
+ },
+ {
+ "emoji": "🉐",
+ "aliases": [
+ "ideograph_advantage"
+ ]
+ },
+ {
+ "emoji": "👿",
+ "aliases": [
+ "imp"
+ ]
+ },
+ {
+ "emoji": "📥",
+ "aliases": [
+ "inbox_tray"
+ ]
+ },
+ {
+ "emoji": "📨",
+ "aliases": [
+ "incoming_envelope"
+ ]
+ },
+ {
+ "emoji": "🫵",
+ "aliases": [
+ "index_pointing_at_the_viewer"
+ ]
+ },
+ {
+ "emoji": "🇮🇳",
+ "aliases": [
+ "india"
+ ]
+ },
+ {
+ "emoji": "🇮🇩",
+ "aliases": [
+ "indonesia"
+ ]
+ },
+ {
+ "emoji": "♾️",
+ "aliases": [
+ "infinity"
+ ]
+ },
+ {
+ "emoji": "ℹ️",
+ "aliases": [
+ "information_source"
+ ]
+ },
+ {
+ "emoji": "😇",
+ "aliases": [
+ "innocent"
+ ]
+ },
+ {
+ "emoji": "⁉️",
+ "aliases": [
+ "interrobang"
+ ]
+ },
+ {
+ "emoji": "📱",
+ "aliases": [
+ "iphone"
+ ]
+ },
+ {
+ "emoji": "🇮🇷",
+ "aliases": [
+ "iran"
+ ]
+ },
+ {
+ "emoji": "🇮🇶",
+ "aliases": [
+ "iraq"
+ ]
+ },
+ {
+ "emoji": "🇮🇪",
+ "aliases": [
+ "ireland"
+ ]
+ },
+ {
+ "emoji": "🇮🇲",
+ "aliases": [
+ "isle_of_man"
+ ]
+ },
+ {
+ "emoji": "🇮🇱",
+ "aliases": [
+ "israel"
+ ]
+ },
+ {
+ "emoji": "🇮🇹",
+ "aliases": [
+ "it"
+ ]
+ },
+ {
+ "emoji": "🏮",
+ "aliases": [
+ "izakaya_lantern",
+ "lantern"
+ ]
+ },
+ {
+ "emoji": "🎃",
+ "aliases": [
+ "jack_o_lantern"
+ ]
+ },
+ {
+ "emoji": "🇯🇲",
+ "aliases": [
+ "jamaica"
+ ]
+ },
+ {
+ "emoji": "🗾",
+ "aliases": [
+ "japan"
+ ]
+ },
+ {
+ "emoji": "🏯",
+ "aliases": [
+ "japanese_castle"
+ ]
+ },
+ {
+ "emoji": "👺",
+ "aliases": [
+ "japanese_goblin"
+ ]
+ },
+ {
+ "emoji": "👹",
+ "aliases": [
+ "japanese_ogre"
+ ]
+ },
+ {
+ "emoji": "🫙",
+ "aliases": [
+ "jar"
+ ]
+ },
+ {
+ "emoji": "👖",
+ "aliases": [
+ "jeans"
+ ]
+ },
+ {
+ "emoji": "🪼",
+ "aliases": [
+ "jellyfish"
+ ]
+ },
+ {
+ "emoji": "🇯🇪",
+ "aliases": [
+ "jersey"
+ ]
+ },
+ {
+ "emoji": "🧩",
+ "aliases": [
+ "jigsaw"
+ ]
+ },
+ {
+ "emoji": "🇯🇴",
+ "aliases": [
+ "jordan"
+ ]
+ },
+ {
+ "emoji": "😂",
+ "aliases": [
+ "joy"
+ ]
+ },
+ {
+ "emoji": "😹",
+ "aliases": [
+ "joy_cat"
+ ]
+ },
+ {
+ "emoji": "🕹️",
+ "aliases": [
+ "joystick"
+ ]
+ },
+ {
+ "emoji": "🇯🇵",
+ "aliases": [
+ "jp"
+ ]
+ },
+ {
+ "emoji": "🧑⚖️",
+ "aliases": [
+ "judge"
+ ]
+ },
+ {
+ "emoji": "🤹",
+ "aliases": [
+ "juggling_person"
+ ]
+ },
+ {
+ "emoji": "🕋",
+ "aliases": [
+ "kaaba"
+ ]
+ },
+ {
+ "emoji": "🦘",
+ "aliases": [
+ "kangaroo"
+ ]
+ },
+ {
+ "emoji": "🇰🇿",
+ "aliases": [
+ "kazakhstan"
+ ]
+ },
+ {
+ "emoji": "🇰🇪",
+ "aliases": [
+ "kenya"
+ ]
+ },
+ {
+ "emoji": "🔑",
+ "aliases": [
+ "key"
+ ]
+ },
+ {
+ "emoji": "⌨️",
+ "aliases": [
+ "keyboard"
+ ]
+ },
+ {
+ "emoji": "🔟",
+ "aliases": [
+ "keycap_ten"
+ ]
+ },
+ {
+ "emoji": "🪯",
+ "aliases": [
+ "khanda"
+ ]
+ },
+ {
+ "emoji": "🛴",
+ "aliases": [
+ "kick_scooter"
+ ]
+ },
+ {
+ "emoji": "👘",
+ "aliases": [
+ "kimono"
+ ]
+ },
+ {
+ "emoji": "🇰🇮",
+ "aliases": [
+ "kiribati"
+ ]
+ },
+ {
+ "emoji": "💋",
+ "aliases": [
+ "kiss"
+ ]
+ },
+ {
+ "emoji": "😗",
+ "aliases": [
+ "kissing"
+ ]
+ },
+ {
+ "emoji": "😽",
+ "aliases": [
+ "kissing_cat"
+ ]
+ },
+ {
+ "emoji": "😚",
+ "aliases": [
+ "kissing_closed_eyes"
+ ]
+ },
+ {
+ "emoji": "😘",
+ "aliases": [
+ "kissing_heart"
+ ]
+ },
+ {
+ "emoji": "😙",
+ "aliases": [
+ "kissing_smiling_eyes"
+ ]
+ },
+ {
+ "emoji": "🪁",
+ "aliases": [
+ "kite"
+ ]
+ },
+ {
+ "emoji": "🥝",
+ "aliases": [
+ "kiwi_fruit"
+ ]
+ },
+ {
+ "emoji": "🧎♂️",
+ "aliases": [
+ "kneeling_man"
+ ]
+ },
+ {
+ "emoji": "🧎",
+ "aliases": [
+ "kneeling_person"
+ ]
+ },
+ {
+ "emoji": "🧎♀️",
+ "aliases": [
+ "kneeling_woman"
+ ]
+ },
+ {
+ "emoji": "🪢",
+ "aliases": [
+ "knot"
+ ]
+ },
+ {
+ "emoji": "🐨",
+ "aliases": [
+ "koala"
+ ]
+ },
+ {
+ "emoji": "🈁",
+ "aliases": [
+ "koko"
+ ]
+ },
+ {
+ "emoji": "🇽🇰",
+ "aliases": [
+ "kosovo"
+ ]
+ },
+ {
+ "emoji": "🇰🇷",
+ "aliases": [
+ "kr"
+ ]
+ },
+ {
+ "emoji": "🇰🇼",
+ "aliases": [
+ "kuwait"
+ ]
+ },
+ {
+ "emoji": "🇰🇬",
+ "aliases": [
+ "kyrgyzstan"
+ ]
+ },
+ {
+ "emoji": "🥼",
+ "aliases": [
+ "lab_coat"
+ ]
+ },
+ {
+ "emoji": "🏷️",
+ "aliases": [
+ "label"
+ ]
+ },
+ {
+ "emoji": "🥍",
+ "aliases": [
+ "lacrosse"
+ ]
+ },
+ {
+ "emoji": "🪜",
+ "aliases": [
+ "ladder"
+ ]
+ },
+ {
+ "emoji": "🐞",
+ "aliases": [
+ "lady_beetle"
+ ]
+ },
+ {
+ "emoji": "🇱🇦",
+ "aliases": [
+ "laos"
+ ]
+ },
+ {
+ "emoji": "🔵",
+ "aliases": [
+ "large_blue_circle"
+ ]
+ },
+ {
+ "emoji": "🔷",
+ "aliases": [
+ "large_blue_diamond"
+ ]
+ },
+ {
+ "emoji": "🔶",
+ "aliases": [
+ "large_orange_diamond"
+ ]
+ },
+ {
+ "emoji": "🌗",
+ "aliases": [
+ "last_quarter_moon"
+ ]
+ },
+ {
+ "emoji": "🌜",
+ "aliases": [
+ "last_quarter_moon_with_face"
+ ]
+ },
+ {
+ "emoji": "✝️",
+ "aliases": [
+ "latin_cross"
+ ]
+ },
+ {
+ "emoji": "🇱🇻",
+ "aliases": [
+ "latvia"
+ ]
+ },
+ {
+ "emoji": "😆",
+ "aliases": [
+ "laughing",
+ "satisfied",
+ "laugh"
+ ]
+ },
+ {
+ "emoji": "",
+ "aliases": [
+ "leafless_tree"
+ ]
+ },
+ {
+ "emoji": "🥬",
+ "aliases": [
+ "leafy_green"
+ ]
+ },
+ {
+ "emoji": "🍃",
+ "aliases": [
+ "leaves"
+ ]
+ },
+ {
+ "emoji": "🇱🇧",
+ "aliases": [
+ "lebanon"
+ ]
+ },
+ {
+ "emoji": "📒",
+ "aliases": [
+ "ledger"
+ ]
+ },
+ {
+ "emoji": "🛅",
+ "aliases": [
+ "left_luggage"
+ ]
+ },
+ {
+ "emoji": "↔️",
+ "aliases": [
+ "left_right_arrow"
+ ]
+ },
+ {
+ "emoji": "🗨️",
+ "aliases": [
+ "left_speech_bubble"
+ ]
+ },
+ {
+ "emoji": "↩️",
+ "aliases": [
+ "leftwards_arrow_with_hook"
+ ]
+ },
+ {
+ "emoji": "🫲",
+ "aliases": [
+ "leftwards_hand"
+ ]
+ },
+ {
+ "emoji": "🫷",
+ "aliases": [
+ "leftwards_pushing_hand"
+ ]
+ },
+ {
+ "emoji": "🦵",
+ "aliases": [
+ "leg"
+ ]
+ },
+ {
+ "emoji": "🍋",
+ "aliases": [
+ "lemon"
+ ]
+ },
+ {
+ "emoji": "♌",
+ "aliases": [
+ "leo"
+ ]
+ },
+ {
+ "emoji": "🐆",
+ "aliases": [
+ "leopard"
+ ]
+ },
+ {
+ "emoji": "🇱🇸",
+ "aliases": [
+ "lesotho"
+ ]
+ },
+ {
+ "emoji": "🎚️",
+ "aliases": [
+ "level_slider"
+ ]
+ },
+ {
+ "emoji": "🇱🇷",
+ "aliases": [
+ "liberia"
+ ]
+ },
+ {
+ "emoji": "♎",
+ "aliases": [
+ "libra"
+ ]
+ },
+ {
+ "emoji": "🇱🇾",
+ "aliases": [
+ "libya"
+ ]
+ },
+ {
+ "emoji": "🇱🇮",
+ "aliases": [
+ "liechtenstein"
+ ]
+ },
+ {
+ "emoji": "🩵",
+ "aliases": [
+ "light_blue_heart"
+ ]
+ },
+ {
+ "emoji": "🚈",
+ "aliases": [
+ "light_rail"
+ ]
+ },
+ {
+ "emoji": "🍋🟩",
+ "aliases": [
+ "lime"
+ ]
+ },
+ {
+ "emoji": "🔗",
+ "aliases": [
+ "link"
+ ]
+ },
+ {
+ "emoji": "🦁",
+ "aliases": [
+ "lion"
+ ]
+ },
+ {
+ "emoji": "👄",
+ "aliases": [
+ "lips"
+ ]
+ },
+ {
+ "emoji": "💄",
+ "aliases": [
+ "lipstick"
+ ]
+ },
+ {
+ "emoji": "🇱🇹",
+ "aliases": [
+ "lithuania"
+ ]
+ },
+ {
+ "emoji": "🦎",
+ "aliases": [
+ "lizard"
+ ]
+ },
+ {
+ "emoji": "🦙",
+ "aliases": [
+ "llama"
+ ]
+ },
+ {
+ "emoji": "🦞",
+ "aliases": [
+ "lobster"
+ ]
+ },
+ {
+ "emoji": "🔒",
+ "aliases": [
+ "lock"
+ ]
+ },
+ {
+ "emoji": "🔏",
+ "aliases": [
+ "lock_with_ink_pen"
+ ]
+ },
+ {
+ "emoji": "🍭",
+ "aliases": [
+ "lollipop"
+ ]
+ },
+ {
+ "emoji": "🪘",
+ "aliases": [
+ "long_drum"
+ ]
+ },
+ {
+ "emoji": "➿",
+ "aliases": [
+ "loop"
+ ]
+ },
+ {
+ "emoji": "🧴",
+ "aliases": [
+ "lotion_bottle"
+ ]
+ },
+ {
+ "emoji": "🪷",
+ "aliases": [
+ "lotus"
+ ]
+ },
+ {
+ "emoji": "🧘",
+ "aliases": [
+ "lotus_position"
+ ]
+ },
+ {
+ "emoji": "🧘♂️",
+ "aliases": [
+ "lotus_position_man"
+ ]
+ },
+ {
+ "emoji": "🧘♀️",
+ "aliases": [
+ "lotus_position_woman"
+ ]
+ },
+ {
+ "emoji": "🔊",
+ "aliases": [
+ "loud_sound"
+ ]
+ },
+ {
+ "emoji": "📢",
+ "aliases": [
+ "loudspeaker"
+ ]
+ },
+ {
+ "emoji": "🏩",
+ "aliases": [
+ "love_hotel"
+ ]
+ },
+ {
+ "emoji": "💌",
+ "aliases": [
+ "love_letter"
+ ]
+ },
+ {
+ "emoji": "🤟",
+ "aliases": [
+ "love_you_gesture"
+ ]
+ },
+ {
+ "emoji": "🪫",
+ "aliases": [
+ "low_battery"
+ ]
+ },
+ {
+ "emoji": "🔅",
+ "aliases": [
+ "low_brightness"
+ ]
+ },
+ {
+ "emoji": "🧳",
+ "aliases": [
+ "luggage"
+ ]
+ },
+ {
+ "emoji": "🫁",
+ "aliases": [
+ "lungs"
+ ]
+ },
+ {
+ "emoji": "🇱🇺",
+ "aliases": [
+ "luxembourg"
+ ]
+ },
+ {
+ "emoji": "🤥",
+ "aliases": [
+ "lying_face"
+ ]
+ },
+ {
+ "emoji": "Ⓜ️",
+ "aliases": [
+ "m"
+ ]
+ },
+ {
+ "emoji": "🇲🇴",
+ "aliases": [
+ "macau"
+ ]
+ },
+ {
+ "emoji": "🇲🇰",
+ "aliases": [
+ "macedonia"
+ ]
+ },
+ {
+ "emoji": "🇲🇬",
+ "aliases": [
+ "madagascar"
+ ]
+ },
+ {
+ "emoji": "🔍",
+ "aliases": [
+ "mag"
+ ]
+ },
+ {
+ "emoji": "🔎",
+ "aliases": [
+ "mag_right"
+ ]
+ },
+ {
+ "emoji": "🧙",
+ "aliases": [
+ "mage"
+ ]
+ },
+ {
+ "emoji": "🧙♂️",
+ "aliases": [
+ "mage_man"
+ ]
+ },
+ {
+ "emoji": "🧙♀️",
+ "aliases": [
+ "mage_woman"
+ ]
+ },
+ {
+ "emoji": "🪄",
+ "aliases": [
+ "magic_wand"
+ ]
+ },
+ {
+ "emoji": "🧲",
+ "aliases": [
+ "magnet"
+ ]
+ },
+ {
+ "emoji": "🀄",
+ "aliases": [
+ "mahjong"
+ ]
+ },
+ {
+ "emoji": "📫",
+ "aliases": [
+ "mailbox"
+ ]
+ },
+ {
+ "emoji": "📪",
+ "aliases": [
+ "mailbox_closed"
+ ]
+ },
+ {
+ "emoji": "📬",
+ "aliases": [
+ "mailbox_with_mail"
+ ]
+ },
+ {
+ "emoji": "📭",
+ "aliases": [
+ "mailbox_with_no_mail"
+ ]
+ },
+ {
+ "emoji": "🇲🇼",
+ "aliases": [
+ "malawi"
+ ]
+ },
+ {
+ "emoji": "🇲🇾",
+ "aliases": [
+ "malaysia"
+ ]
+ },
+ {
+ "emoji": "🇲🇻",
+ "aliases": [
+ "maldives"
+ ]
+ },
+ {
+ "emoji": "🕵️♂️",
+ "aliases": [
+ "male_detective"
+ ]
+ },
+ {
+ "emoji": "♂️",
+ "aliases": [
+ "male_sign"
+ ]
+ },
+ {
+ "emoji": "🇲🇱",
+ "aliases": [
+ "mali"
+ ]
+ },
+ {
+ "emoji": "🇲🇹",
+ "aliases": [
+ "malta"
+ ]
+ },
+ {
+ "emoji": "🦣",
+ "aliases": [
+ "mammoth"
+ ]
+ },
+ {
+ "emoji": "👨",
+ "aliases": [
+ "man"
+ ]
+ },
+ {
+ "emoji": "👨🎨",
+ "aliases": [
+ "man_artist"
+ ]
+ },
+ {
+ "emoji": "👨🚀",
+ "aliases": [
+ "man_astronaut"
+ ]
+ },
+ {
+ "emoji": "🧔♂️",
+ "aliases": [
+ "man_beard"
+ ]
+ },
+ {
+ "emoji": "🤸♂️",
+ "aliases": [
+ "man_cartwheeling"
+ ]
+ },
+ {
+ "emoji": "👨🍳",
+ "aliases": [
+ "man_cook"
+ ]
+ },
+ {
+ "emoji": "🕺",
+ "aliases": [
+ "man_dancing"
+ ]
+ },
+ {
+ "emoji": "🤦♂️",
+ "aliases": [
+ "man_facepalming"
+ ]
+ },
+ {
+ "emoji": "👨🏭",
+ "aliases": [
+ "man_factory_worker"
+ ]
+ },
+ {
+ "emoji": "👨🌾",
+ "aliases": [
+ "man_farmer"
+ ]
+ },
+ {
+ "emoji": "👨🍼",
+ "aliases": [
+ "man_feeding_baby"
+ ]
+ },
+ {
+ "emoji": "👨🚒",
+ "aliases": [
+ "man_firefighter"
+ ]
+ },
+ {
+ "emoji": "👨⚕️",
+ "aliases": [
+ "man_health_worker"
+ ]
+ },
+ {
+ "emoji": "👨🦽",
+ "aliases": [
+ "man_in_manual_wheelchair"
+ ]
+ },
+ {
+ "emoji": "👨🦽➡️",
+ "aliases": [
+ "man_in_manual_wheelchair_facing_right"
+ ]
+ },
+ {
+ "emoji": "👨🦼",
+ "aliases": [
+ "man_in_motorized_wheelchair"
+ ]
+ },
+ {
+ "emoji": "👨🦼➡️",
+ "aliases": [
+ "man_in_motorized_wheelchair_facing_right"
+ ]
+ },
+ {
+ "emoji": "🤵♂️",
+ "aliases": [
+ "man_in_tuxedo"
+ ]
+ },
+ {
+ "emoji": "👨⚖️",
+ "aliases": [
+ "man_judge"
+ ]
+ },
+ {
+ "emoji": "🤹♂️",
+ "aliases": [
+ "man_juggling"
+ ]
+ },
+ {
+ "emoji": "🧎♂️➡️",
+ "aliases": [
+ "man_kneeling_facing_right"
+ ]
+ },
+ {
+ "emoji": "👨🔧",
+ "aliases": [
+ "man_mechanic"
+ ]
+ },
+ {
+ "emoji": "👨💼",
+ "aliases": [
+ "man_office_worker"
+ ]
+ },
+ {
+ "emoji": "👨✈️",
+ "aliases": [
+ "man_pilot"
+ ]
+ },
+ {
+ "emoji": "🤾♂️",
+ "aliases": [
+ "man_playing_handball"
+ ]
+ },
+ {
+ "emoji": "🤽♂️",
+ "aliases": [
+ "man_playing_water_polo"
+ ]
+ },
+ {
+ "emoji": "🏃♂️➡️",
+ "aliases": [
+ "man_running_facing_right"
+ ]
+ },
+ {
+ "emoji": "👨🔬",
+ "aliases": [
+ "man_scientist"
+ ]
+ },
+ {
+ "emoji": "🤷♂️",
+ "aliases": [
+ "man_shrugging"
+ ]
+ },
+ {
+ "emoji": "👨🎤",
+ "aliases": [
+ "man_singer"
+ ]
+ },
+ {
+ "emoji": "👨🎓",
+ "aliases": [
+ "man_student"
+ ]
+ },
+ {
+ "emoji": "👨🏫",
+ "aliases": [
+ "man_teacher"
+ ]
+ },
+ {
+ "emoji": "👨💻",
+ "aliases": [
+ "man_technologist"
+ ]
+ },
+ {
+ "emoji": "🚶♂️➡️",
+ "aliases": [
+ "man_walking_facing_right"
+ ]
+ },
+ {
+ "emoji": "👲",
+ "aliases": [
+ "man_with_gua_pi_mao"
+ ]
+ },
+ {
+ "emoji": "👨🦯",
+ "aliases": [
+ "man_with_probing_cane"
+ ]
+ },
+ {
+ "emoji": "👳♂️",
+ "aliases": [
+ "man_with_turban"
+ ]
+ },
+ {
+ "emoji": "👰♂️",
+ "aliases": [
+ "man_with_veil"
+ ]
+ },
+ {
+ "emoji": "👨🦯➡️",
+ "aliases": [
+ "man_with_white_cane_facing_right"
+ ]
+ },
+ {
+ "emoji": "🥭",
+ "aliases": [
+ "mango"
+ ]
+ },
+ {
+ "emoji": "👞",
+ "aliases": [
+ "mans_shoe",
+ "shoe"
+ ]
+ },
+ {
+ "emoji": "🕰️",
+ "aliases": [
+ "mantelpiece_clock"
+ ]
+ },
+ {
+ "emoji": "🦽",
+ "aliases": [
+ "manual_wheelchair"
+ ]
+ },
+ {
+ "emoji": "🍁",
+ "aliases": [
+ "maple_leaf"
+ ]
+ },
+ {
+ "emoji": "🪇",
+ "aliases": [
+ "maracas"
+ ]
+ },
+ {
+ "emoji": "🇲🇭",
+ "aliases": [
+ "marshall_islands"
+ ]
+ },
+ {
+ "emoji": "🥋",
+ "aliases": [
+ "martial_arts_uniform"
+ ]
+ },
+ {
+ "emoji": "🇲🇶",
+ "aliases": [
+ "martinique"
+ ]
+ },
+ {
+ "emoji": "😷",
+ "aliases": [
+ "mask"
+ ]
+ },
+ {
+ "emoji": "💆",
+ "aliases": [
+ "massage"
+ ]
+ },
+ {
+ "emoji": "💆♂️",
+ "aliases": [
+ "massage_man"
+ ]
+ },
+ {
+ "emoji": "💆♀️",
+ "aliases": [
+ "massage_woman"
+ ]
+ },
+ {
+ "emoji": "🧉",
+ "aliases": [
+ "mate"
+ ]
+ },
+ {
+ "emoji": "🇲🇷",
+ "aliases": [
+ "mauritania"
+ ]
+ },
+ {
+ "emoji": "🇲🇺",
+ "aliases": [
+ "mauritius"
+ ]
+ },
+ {
+ "emoji": "🇾🇹",
+ "aliases": [
+ "mayotte"
+ ]
+ },
+ {
+ "emoji": "🍖",
+ "aliases": [
+ "meat_on_bone"
+ ]
+ },
+ {
+ "emoji": "🧑🔧",
+ "aliases": [
+ "mechanic"
+ ]
+ },
+ {
+ "emoji": "🦾",
+ "aliases": [
+ "mechanical_arm"
+ ]
+ },
+ {
+ "emoji": "🦿",
+ "aliases": [
+ "mechanical_leg"
+ ]
+ },
+ {
+ "emoji": "🎖️",
+ "aliases": [
+ "medal_military"
+ ]
+ },
+ {
+ "emoji": "🏅",
+ "aliases": [
+ "medal_sports"
+ ]
+ },
+ {
+ "emoji": "⚕️",
+ "aliases": [
+ "medical_symbol"
+ ]
+ },
+ {
+ "emoji": "📣",
+ "aliases": [
+ "mega"
+ ]
+ },
+ {
+ "emoji": "🍈",
+ "aliases": [
+ "melon"
+ ]
+ },
+ {
+ "emoji": "🫠",
+ "aliases": [
+ "melting_face"
+ ]
+ },
+ {
+ "emoji": "📝",
+ "aliases": [
+ "memo",
+ "pencil"
+ ]
+ },
+ {
+ "emoji": "🤼♂️",
+ "aliases": [
+ "men_wrestling"
+ ]
+ },
+ {
+ "emoji": "❤️🩹",
+ "aliases": [
+ "mending_heart"
+ ]
+ },
+ {
+ "emoji": "🕎",
+ "aliases": [
+ "menorah"
+ ]
+ },
+ {
+ "emoji": "🚹",
+ "aliases": [
+ "mens"
+ ]
+ },
+ {
+ "emoji": "🧜♀️",
+ "aliases": [
+ "mermaid"
+ ]
+ },
+ {
+ "emoji": "🧜♂️",
+ "aliases": [
+ "merman"
+ ]
+ },
+ {
+ "emoji": "🧜",
+ "aliases": [
+ "merperson"
+ ]
+ },
+ {
+ "emoji": "🤘",
+ "aliases": [
+ "metal"
+ ]
+ },
+ {
+ "emoji": "🚇",
+ "aliases": [
+ "metro"
+ ]
+ },
+ {
+ "emoji": "🇲🇽",
+ "aliases": [
+ "mexico"
+ ]
+ },
+ {
+ "emoji": "🦠",
+ "aliases": [
+ "microbe"
+ ]
+ },
+ {
+ "emoji": "🇫🇲",
+ "aliases": [
+ "micronesia"
+ ]
+ },
+ {
+ "emoji": "🎤",
+ "aliases": [
+ "microphone"
+ ]
+ },
+ {
+ "emoji": "🔬",
+ "aliases": [
+ "microscope"
+ ]
+ },
+ {
+ "emoji": "🖕",
+ "aliases": [
+ "middle_finger",
+ "fu"
+ ]
+ },
+ {
+ "emoji": "🪖",
+ "aliases": [
+ "military_helmet"
+ ]
+ },
+ {
+ "emoji": "🥛",
+ "aliases": [
+ "milk_glass"
+ ]
+ },
+ {
+ "emoji": "🌌",
+ "aliases": [
+ "milky_way"
+ ]
+ },
+ {
+ "emoji": "🚐",
+ "aliases": [
+ "minibus"
+ ]
+ },
+ {
+ "emoji": "💽",
+ "aliases": [
+ "minidisc"
+ ]
+ },
+ {
+ "emoji": "🪞",
+ "aliases": [
+ "mirror"
+ ]
+ },
+ {
+ "emoji": "🪩",
+ "aliases": [
+ "mirror_ball"
+ ]
+ },
+ {
+ "emoji": "📴",
+ "aliases": [
+ "mobile_phone_off"
+ ]
+ },
+ {
+ "emoji": "🇲🇩",
+ "aliases": [
+ "moldova"
+ ]
+ },
+ {
+ "emoji": "🇲🇨",
+ "aliases": [
+ "monaco"
+ ]
+ },
+ {
+ "emoji": "🤑",
+ "aliases": [
+ "money_mouth_face"
+ ]
+ },
+ {
+ "emoji": "💸",
+ "aliases": [
+ "money_with_wings"
+ ]
+ },
+ {
+ "emoji": "💰",
+ "aliases": [
+ "moneybag"
+ ]
+ },
+ {
+ "emoji": "🇲🇳",
+ "aliases": [
+ "mongolia"
+ ]
+ },
+ {
+ "emoji": "🐒",
+ "aliases": [
+ "monkey"
+ ]
+ },
+ {
+ "emoji": "🐵",
+ "aliases": [
+ "monkey_face"
+ ]
+ },
+ {
+ "emoji": "🧐",
+ "aliases": [
+ "monocle_face"
+ ]
+ },
+ {
+ "emoji": "🚝",
+ "aliases": [
+ "monorail"
+ ]
+ },
+ {
+ "emoji": "🇲🇪",
+ "aliases": [
+ "montenegro"
+ ]
+ },
+ {
+ "emoji": "🇲🇸",
+ "aliases": [
+ "montserrat"
+ ]
+ },
+ {
+ "emoji": "🌔",
+ "aliases": [
+ "moon",
+ "waxing_gibbous_moon"
+ ]
+ },
+ {
+ "emoji": "🥮",
+ "aliases": [
+ "moon_cake"
+ ]
+ },
+ {
+ "emoji": "🫎",
+ "aliases": [
+ "moose"
+ ]
+ },
+ {
+ "emoji": "🇲🇦",
+ "aliases": [
+ "morocco"
+ ]
+ },
+ {
+ "emoji": "🎓",
+ "aliases": [
+ "mortar_board"
+ ]
+ },
+ {
+ "emoji": "🕌",
+ "aliases": [
+ "mosque"
+ ]
+ },
+ {
+ "emoji": "🦟",
+ "aliases": [
+ "mosquito"
+ ]
+ },
+ {
+ "emoji": "🛥️",
+ "aliases": [
+ "motor_boat"
+ ]
+ },
+ {
+ "emoji": "🛵",
+ "aliases": [
+ "motor_scooter"
+ ]
+ },
+ {
+ "emoji": "🏍️",
+ "aliases": [
+ "motorcycle"
+ ]
+ },
+ {
+ "emoji": "🦼",
+ "aliases": [
+ "motorized_wheelchair"
+ ]
+ },
+ {
+ "emoji": "🛣️",
+ "aliases": [
+ "motorway"
+ ]
+ },
+ {
+ "emoji": "🗻",
+ "aliases": [
+ "mount_fuji"
+ ]
+ },
+ {
+ "emoji": "⛰️",
+ "aliases": [
+ "mountain"
+ ]
+ },
+ {
+ "emoji": "🚵",
+ "aliases": [
+ "mountain_bicyclist"
+ ]
+ },
+ {
+ "emoji": "🚵♂️",
+ "aliases": [
+ "mountain_biking_man"
+ ]
+ },
+ {
+ "emoji": "🚵♀️",
+ "aliases": [
+ "mountain_biking_woman"
+ ]
+ },
+ {
+ "emoji": "🚠",
+ "aliases": [
+ "mountain_cableway"
+ ]
+ },
+ {
+ "emoji": "🚞",
+ "aliases": [
+ "mountain_railway"
+ ]
+ },
+ {
+ "emoji": "🏔️",
+ "aliases": [
+ "mountain_snow"
+ ]
+ },
+ {
+ "emoji": "🐭",
+ "aliases": [
+ "mouse"
+ ]
+ },
+ {
+ "emoji": "🐁",
+ "aliases": [
+ "mouse2"
+ ]
+ },
+ {
+ "emoji": "🪤",
+ "aliases": [
+ "mouse_trap"
+ ]
+ },
+ {
+ "emoji": "🎥",
+ "aliases": [
+ "movie_camera"
+ ]
+ },
+ {
+ "emoji": "🗿",
+ "aliases": [
+ "moyai"
+ ]
+ },
+ {
+ "emoji": "🇲🇿",
+ "aliases": [
+ "mozambique"
+ ]
+ },
+ {
+ "emoji": "🤶",
+ "aliases": [
+ "mrs_claus"
+ ]
+ },
+ {
+ "emoji": "💪",
+ "aliases": [
+ "muscle"
+ ]
+ },
+ {
+ "emoji": "🍄",
+ "aliases": [
+ "mushroom"
+ ]
+ },
+ {
+ "emoji": "🎹",
+ "aliases": [
+ "musical_keyboard"
+ ]
+ },
+ {
+ "emoji": "🎵",
+ "aliases": [
+ "musical_note"
+ ]
+ },
+ {
+ "emoji": "🎼",
+ "aliases": [
+ "musical_score"
+ ]
+ },
+ {
+ "emoji": "🔇",
+ "aliases": [
+ "mute"
+ ]
+ },
+ {
+ "emoji": "🧑🎄",
+ "aliases": [
+ "mx_claus"
+ ]
+ },
+ {
+ "emoji": "🇲🇲",
+ "aliases": [
+ "myanmar"
+ ]
+ },
+ {
+ "emoji": "💅",
+ "aliases": [
+ "nail_care"
+ ]
+ },
+ {
+ "emoji": "📛",
+ "aliases": [
+ "name_badge"
+ ]
+ },
+ {
+ "emoji": "🇳🇦",
+ "aliases": [
+ "namibia"
+ ]
+ },
+ {
+ "emoji": "🏞️",
+ "aliases": [
+ "national_park"
+ ]
+ },
+ {
+ "emoji": "🇳🇷",
+ "aliases": [
+ "nauru"
+ ]
+ },
+ {
+ "emoji": "🤢",
+ "aliases": [
+ "nauseated_face"
+ ]
+ },
+ {
+ "emoji": "🧿",
+ "aliases": [
+ "nazar_amulet"
+ ]
+ },
+ {
+ "emoji": "👔",
+ "aliases": [
+ "necktie"
+ ]
+ },
+ {
+ "emoji": "❎",
+ "aliases": [
+ "negative_squared_cross_mark"
+ ]
+ },
+ {
+ "emoji": "🇳🇵",
+ "aliases": [
+ "nepal"
+ ]
+ },
+ {
+ "emoji": "🤓",
+ "aliases": [
+ "nerd_face"
+ ]
+ },
+ {
+ "emoji": "🪺",
+ "aliases": [
+ "nest_with_eggs"
+ ]
+ },
+ {
+ "emoji": "🪆",
+ "aliases": [
+ "nesting_dolls"
+ ]
+ },
+ {
+ "emoji": "🇳🇱",
+ "aliases": [
+ "netherlands"
+ ]
+ },
+ {
+ "emoji": "😐",
+ "aliases": [
+ "neutral_face"
+ ]
+ },
+ {
+ "emoji": "🆕",
+ "aliases": [
+ "new"
+ ]
+ },
+ {
+ "emoji": "🇳🇨",
+ "aliases": [
+ "new_caledonia"
+ ]
+ },
+ {
+ "emoji": "🌑",
+ "aliases": [
+ "new_moon"
+ ]
+ },
+ {
+ "emoji": "🌚",
+ "aliases": [
+ "new_moon_with_face"
+ ]
+ },
+ {
+ "emoji": "🇳🇿",
+ "aliases": [
+ "new_zealand"
+ ]
+ },
+ {
+ "emoji": "📰",
+ "aliases": [
+ "newspaper"
+ ]
+ },
+ {
+ "emoji": "🗞️",
+ "aliases": [
+ "newspaper_roll"
+ ]
+ },
+ {
+ "emoji": "⏭️",
+ "aliases": [
+ "next_track_button"
+ ]
+ },
+ {
+ "emoji": "🆖",
+ "aliases": [
+ "ng"
+ ]
+ },
+ {
+ "emoji": "🇳🇮",
+ "aliases": [
+ "nicaragua"
+ ]
+ },
+ {
+ "emoji": "🇳🇪",
+ "aliases": [
+ "niger"
+ ]
+ },
+ {
+ "emoji": "🇳🇬",
+ "aliases": [
+ "nigeria"
+ ]
+ },
+ {
+ "emoji": "🌃",
+ "aliases": [
+ "night_with_stars"
+ ]
+ },
+ {
+ "emoji": "9️⃣",
+ "aliases": [
+ "nine"
+ ]
+ },
+ {
+ "emoji": "🥷",
+ "aliases": [
+ "ninja"
+ ]
+ },
+ {
+ "emoji": "🇳🇺",
+ "aliases": [
+ "niue"
+ ]
+ },
+ {
+ "emoji": "🔕",
+ "aliases": [
+ "no_bell"
+ ]
+ },
+ {
+ "emoji": "🚳",
+ "aliases": [
+ "no_bicycles"
+ ]
+ },
+ {
+ "emoji": "⛔",
+ "aliases": [
+ "no_entry"
+ ]
+ },
+ {
+ "emoji": "🚫",
+ "aliases": [
+ "no_entry_sign"
+ ]
+ },
+ {
+ "emoji": "🙅",
+ "aliases": [
+ "no_good"
+ ]
+ },
+ {
+ "emoji": "🙅♂️",
+ "aliases": [
+ "no_good_man",
+ "ng_man"
+ ]
+ },
+ {
+ "emoji": "🙅♀️",
+ "aliases": [
+ "no_good_woman",
+ "ng_woman"
+ ]
+ },
+ {
+ "emoji": "📵",
+ "aliases": [
+ "no_mobile_phones"
+ ]
+ },
+ {
+ "emoji": "😶",
+ "aliases": [
+ "no_mouth"
+ ]
+ },
+ {
+ "emoji": "🚷",
+ "aliases": [
+ "no_pedestrians"
+ ]
+ },
+ {
+ "emoji": "🚭",
+ "aliases": [
+ "no_smoking"
+ ]
+ },
+ {
+ "emoji": "🚱",
+ "aliases": [
+ "non-potable_water"
+ ]
+ },
+ {
+ "emoji": "🇳🇫",
+ "aliases": [
+ "norfolk_island"
+ ]
+ },
+ {
+ "emoji": "🇰🇵",
+ "aliases": [
+ "north_korea"
+ ]
+ },
+ {
+ "emoji": "🇲🇵",
+ "aliases": [
+ "northern_mariana_islands"
+ ]
+ },
+ {
+ "emoji": "🇳🇴",
+ "aliases": [
+ "norway"
+ ]
+ },
+ {
+ "emoji": "👃",
+ "aliases": [
+ "nose"
+ ]
+ },
+ {
+ "emoji": "📓",
+ "aliases": [
+ "notebook"
+ ]
+ },
+ {
+ "emoji": "📔",
+ "aliases": [
+ "notebook_with_decorative_cover"
+ ]
+ },
+ {
+ "emoji": "🎶",
+ "aliases": [
+ "notes"
+ ]
+ },
+ {
+ "emoji": "🔩",
+ "aliases": [
+ "nut_and_bolt"
+ ]
+ },
+ {
+ "emoji": "⭕",
+ "aliases": [
+ "o"
+ ]
+ },
+ {
+ "emoji": "🅾️",
+ "aliases": [
+ "o2"
+ ]
+ },
+ {
+ "emoji": "🌊",
+ "aliases": [
+ "ocean"
+ ]
+ },
+ {
+ "emoji": "🐙",
+ "aliases": [
+ "octopus"
+ ]
+ },
+ {
+ "emoji": "🍢",
+ "aliases": [
+ "oden"
+ ]
+ },
+ {
+ "emoji": "🏢",
+ "aliases": [
+ "office"
+ ]
+ },
+ {
+ "emoji": "🧑💼",
+ "aliases": [
+ "office_worker"
+ ]
+ },
+ {
+ "emoji": "🛢️",
+ "aliases": [
+ "oil_drum"
+ ]
+ },
+ {
+ "emoji": "🆗",
+ "aliases": [
+ "ok"
+ ]
+ },
+ {
+ "emoji": "👌",
+ "aliases": [
+ "ok_hand"
+ ]
+ },
+ {
+ "emoji": "🙆♂️",
+ "aliases": [
+ "ok_man"
+ ]
+ },
+ {
+ "emoji": "🙆",
+ "aliases": [
+ "ok_person"
+ ]
+ },
+ {
+ "emoji": "🙆♀️",
+ "aliases": [
+ "ok_woman"
+ ]
+ },
+ {
+ "emoji": "🗝️",
+ "aliases": [
+ "old_key"
+ ]
+ },
+ {
+ "emoji": "🧓",
+ "aliases": [
+ "older_adult"
+ ]
+ },
+ {
+ "emoji": "👴",
+ "aliases": [
+ "older_man"
+ ]
+ },
+ {
+ "emoji": "👵",
+ "aliases": [
+ "older_woman"
+ ]
+ },
+ {
+ "emoji": "🫒",
+ "aliases": [
+ "olive"
+ ]
+ },
+ {
+ "emoji": "🕉️",
+ "aliases": [
+ "om"
+ ]
+ },
+ {
+ "emoji": "🇴🇲",
+ "aliases": [
+ "oman"
+ ]
+ },
+ {
+ "emoji": "🔛",
+ "aliases": [
+ "on"
+ ]
+ },
+ {
+ "emoji": "🚘",
+ "aliases": [
+ "oncoming_automobile"
+ ]
+ },
+ {
+ "emoji": "🚍",
+ "aliases": [
+ "oncoming_bus"
+ ]
+ },
+ {
+ "emoji": "🚔",
+ "aliases": [
+ "oncoming_police_car"
+ ]
+ },
+ {
+ "emoji": "🚖",
+ "aliases": [
+ "oncoming_taxi"
+ ]
+ },
+ {
+ "emoji": "1️⃣",
+ "aliases": [
+ "one"
+ ]
+ },
+ {
+ "emoji": "🩱",
+ "aliases": [
+ "one_piece_swimsuit"
+ ]
+ },
+ {
+ "emoji": "🧅",
+ "aliases": [
+ "onion"
+ ]
+ },
+ {
+ "emoji": "📂",
+ "aliases": [
+ "open_file_folder"
+ ]
+ },
+ {
+ "emoji": "👐",
+ "aliases": [
+ "open_hands"
+ ]
+ },
+ {
+ "emoji": "😮",
+ "aliases": [
+ "open_mouth"
+ ]
+ },
+ {
+ "emoji": "☂️",
+ "aliases": [
+ "open_umbrella"
+ ]
+ },
+ {
+ "emoji": "⛎",
+ "aliases": [
+ "ophiuchus"
+ ]
+ },
+ {
+ "emoji": "📙",
+ "aliases": [
+ "orange_book"
+ ]
+ },
+ {
+ "emoji": "🟠",
+ "aliases": [
+ "orange_circle"
+ ]
+ },
+ {
+ "emoji": "🧡",
+ "aliases": [
+ "orange_heart"
+ ]
+ },
+ {
+ "emoji": "🟧",
+ "aliases": [
+ "orange_square"
+ ]
+ },
+ {
+ "emoji": "🦧",
+ "aliases": [
+ "orangutan"
+ ]
+ },
+ {
+ "emoji": "☦️",
+ "aliases": [
+ "orthodox_cross"
+ ]
+ },
+ {
+ "emoji": "🦦",
+ "aliases": [
+ "otter"
+ ]
+ },
+ {
+ "emoji": "📤",
+ "aliases": [
+ "outbox_tray"
+ ]
+ },
+ {
+ "emoji": "🦉",
+ "aliases": [
+ "owl"
+ ]
+ },
+ {
+ "emoji": "🐂",
+ "aliases": [
+ "ox"
+ ]
+ },
+ {
+ "emoji": "🦪",
+ "aliases": [
+ "oyster"
+ ]
+ },
+ {
+ "emoji": "📦",
+ "aliases": [
+ "package"
+ ]
+ },
+ {
+ "emoji": "📄",
+ "aliases": [
+ "page_facing_up"
+ ]
+ },
+ {
+ "emoji": "📃",
+ "aliases": [
+ "page_with_curl"
+ ]
+ },
+ {
+ "emoji": "📟",
+ "aliases": [
+ "pager"
+ ]
+ },
+ {
+ "emoji": "🖌️",
+ "aliases": [
+ "paintbrush"
+ ]
+ },
+ {
+ "emoji": "🇵🇰",
+ "aliases": [
+ "pakistan"
+ ]
+ },
+ {
+ "emoji": "🇵🇼",
+ "aliases": [
+ "palau"
+ ]
+ },
+ {
+ "emoji": "🇵🇸",
+ "aliases": [
+ "palestinian_territories"
+ ]
+ },
+ {
+ "emoji": "🫳",
+ "aliases": [
+ "palm_down_hand"
+ ]
+ },
+ {
+ "emoji": "🌴",
+ "aliases": [
+ "palm_tree"
+ ]
+ },
+ {
+ "emoji": "🫴",
+ "aliases": [
+ "palm_up_hand"
+ ]
+ },
+ {
+ "emoji": "🤲",
+ "aliases": [
+ "palms_up_together"
+ ]
+ },
+ {
+ "emoji": "🇵🇦",
+ "aliases": [
+ "panama"
+ ]
+ },
+ {
+ "emoji": "🥞",
+ "aliases": [
+ "pancakes"
+ ]
+ },
+ {
+ "emoji": "🐼",
+ "aliases": [
+ "panda_face"
+ ]
+ },
+ {
+ "emoji": "📎",
+ "aliases": [
+ "paperclip"
+ ]
+ },
+ {
+ "emoji": "🖇️",
+ "aliases": [
+ "paperclips"
+ ]
+ },
+ {
+ "emoji": "🇵🇬",
+ "aliases": [
+ "papua_new_guinea"
+ ]
+ },
+ {
+ "emoji": "🪂",
+ "aliases": [
+ "parachute"
+ ]
+ },
+ {
+ "emoji": "🇵🇾",
+ "aliases": [
+ "paraguay"
+ ]
+ },
+ {
+ "emoji": "⛱️",
+ "aliases": [
+ "parasol_on_ground"
+ ]
+ },
+ {
+ "emoji": "🅿️",
+ "aliases": [
+ "parking"
+ ]
+ },
+ {
+ "emoji": "🦜",
+ "aliases": [
+ "parrot"
+ ]
+ },
+ {
+ "emoji": "〽️",
+ "aliases": [
+ "part_alternation_mark"
+ ]
+ },
+ {
+ "emoji": "⛅",
+ "aliases": [
+ "partly_sunny"
+ ]
+ },
+ {
+ "emoji": "🥳",
+ "aliases": [
+ "partying_face"
+ ]
+ },
+ {
+ "emoji": "🛳️",
+ "aliases": [
+ "passenger_ship"
+ ]
+ },
+ {
+ "emoji": "🛂",
+ "aliases": [
+ "passport_control"
+ ]
+ },
+ {
+ "emoji": "⏸️",
+ "aliases": [
+ "pause_button"
+ ]
+ },
+ {
+ "emoji": "🫛",
+ "aliases": [
+ "pea_pod"
+ ]
+ },
+ {
+ "emoji": "☮️",
+ "aliases": [
+ "peace_symbol"
+ ]
+ },
+ {
+ "emoji": "🍑",
+ "aliases": [
+ "peach"
+ ]
+ },
+ {
+ "emoji": "🦚",
+ "aliases": [
+ "peacock"
+ ]
+ },
+ {
+ "emoji": "🥜",
+ "aliases": [
+ "peanuts"
+ ]
+ },
+ {
+ "emoji": "🍐",
+ "aliases": [
+ "pear"
+ ]
+ },
+ {
+ "emoji": "🖊️",
+ "aliases": [
+ "pen"
+ ]
+ },
+ {
+ "emoji": "✏️",
+ "aliases": [
+ "pencil2"
+ ]
+ },
+ {
+ "emoji": "🐧",
+ "aliases": [
+ "penguin"
+ ]
+ },
+ {
+ "emoji": "😔",
+ "aliases": [
+ "pensive"
+ ]
+ },
+ {
+ "emoji": "🧑🤝🧑",
+ "aliases": [
+ "people_holding_hands"
+ ]
+ },
+ {
+ "emoji": "🫂",
+ "aliases": [
+ "people_hugging"
+ ]
+ },
+ {
+ "emoji": "🎭",
+ "aliases": [
+ "performing_arts"
+ ]
+ },
+ {
+ "emoji": "😣",
+ "aliases": [
+ "persevere"
+ ]
+ },
+ {
+ "emoji": "🧑🦲",
+ "aliases": [
+ "person_bald"
+ ]
+ },
+ {
+ "emoji": "🧑🦱",
+ "aliases": [
+ "person_curly_hair"
+ ]
+ },
+ {
+ "emoji": "🧑🍼",
+ "aliases": [
+ "person_feeding_baby"
+ ]
+ },
+ {
+ "emoji": "🤺",
+ "aliases": [
+ "person_fencing"
+ ]
+ },
+ {
+ "emoji": "🧑🦽",
+ "aliases": [
+ "person_in_manual_wheelchair"
+ ]
+ },
+ {
+ "emoji": "🧑🦽➡️",
+ "aliases": [
+ "person_in_manual_wheelchair_facing_right"
+ ]
+ },
+ {
+ "emoji": "🧑🦼",
+ "aliases": [
+ "person_in_motorized_wheelchair"
+ ]
+ },
+ {
+ "emoji": "🧑🦼➡️",
+ "aliases": [
+ "person_in_motorized_wheelchair_facing_right"
+ ]
+ },
+ {
+ "emoji": "🤵",
+ "aliases": [
+ "person_in_tuxedo"
+ ]
+ },
+ {
+ "emoji": "🧎➡️",
+ "aliases": [
+ "person_kneeling_facing_right"
+ ]
+ },
+ {
+ "emoji": "🧑🦰",
+ "aliases": [
+ "person_red_hair"
+ ]
+ },
+ {
+ "emoji": "🏃➡️",
+ "aliases": [
+ "person_running_facing_right"
+ ]
+ },
+ {
+ "emoji": "🚶➡️",
+ "aliases": [
+ "person_walking_facing_right"
+ ]
+ },
+ {
+ "emoji": "🧑🦳",
+ "aliases": [
+ "person_white_hair"
+ ]
+ },
+ {
+ "emoji": "🫅",
+ "aliases": [
+ "person_with_crown"
+ ]
+ },
+ {
+ "emoji": "🧑🦯",
+ "aliases": [
+ "person_with_probing_cane"
+ ]
+ },
+ {
+ "emoji": "👳",
+ "aliases": [
+ "person_with_turban"
+ ]
+ },
+ {
+ "emoji": "👰",
+ "aliases": [
+ "person_with_veil"
+ ]
+ },
+ {
+ "emoji": "🧑🦯➡️",
+ "aliases": [
+ "person_with_white_cane_facing_right"
+ ]
+ },
+ {
+ "emoji": "🇵🇪",
+ "aliases": [
+ "peru"
+ ]
+ },
+ {
+ "emoji": "🧫",
+ "aliases": [
+ "petri_dish"
+ ]
+ },
+ {
+ "emoji": "🇵🇭",
+ "aliases": [
+ "philippines"
+ ]
+ },
+ {
+ "emoji": "🐦🔥",
+ "aliases": [
+ "phoenix"
+ ]
+ },
+ {
+ "emoji": "☎️",
+ "aliases": [
+ "phone",
+ "telephone"
+ ]
+ },
+ {
+ "emoji": "⛏️",
+ "aliases": [
+ "pick"
+ ]
+ },
+ {
+ "emoji": "🛻",
+ "aliases": [
+ "pickup_truck"
+ ]
+ },
+ {
+ "emoji": "🥧",
+ "aliases": [
+ "pie"
+ ]
+ },
+ {
+ "emoji": "🐷",
+ "aliases": [
+ "pig"
+ ]
+ },
+ {
+ "emoji": "🐖",
+ "aliases": [
+ "pig2"
+ ]
+ },
+ {
+ "emoji": "🐽",
+ "aliases": [
+ "pig_nose"
+ ]
+ },
+ {
+ "emoji": "💊",
+ "aliases": [
+ "pill"
+ ]
+ },
+ {
+ "emoji": "🧑✈️",
+ "aliases": [
+ "pilot"
+ ]
+ },
+ {
+ "emoji": "🪅",
+ "aliases": [
+ "pinata"
+ ]
+ },
+ {
+ "emoji": "🤌",
+ "aliases": [
+ "pinched_fingers"
+ ]
+ },
+ {
+ "emoji": "🤏",
+ "aliases": [
+ "pinching_hand"
+ ]
+ },
+ {
+ "emoji": "🍍",
+ "aliases": [
+ "pineapple"
+ ]
+ },
+ {
+ "emoji": "🏓",
+ "aliases": [
+ "ping_pong"
+ ]
+ },
+ {
+ "emoji": "🩷",
+ "aliases": [
+ "pink_heart"
+ ]
+ },
+ {
+ "emoji": "🏴☠️",
+ "aliases": [
+ "pirate_flag"
+ ]
+ },
+ {
+ "emoji": "♓",
+ "aliases": [
+ "pisces"
+ ]
+ },
+ {
+ "emoji": "🇵🇳",
+ "aliases": [
+ "pitcairn_islands"
+ ]
+ },
+ {
+ "emoji": "🍕",
+ "aliases": [
+ "pizza"
+ ]
+ },
+ {
+ "emoji": "🪧",
+ "aliases": [
+ "placard"
+ ]
+ },
+ {
+ "emoji": "🛐",
+ "aliases": [
+ "place_of_worship"
+ ]
+ },
+ {
+ "emoji": "🍽️",
+ "aliases": [
+ "plate_with_cutlery"
+ ]
+ },
+ {
+ "emoji": "⏯️",
+ "aliases": [
+ "play_or_pause_button"
+ ]
+ },
+ {
+ "emoji": "🛝",
+ "aliases": [
+ "playground_slide"
+ ]
+ },
+ {
+ "emoji": "🥺",
+ "aliases": [
+ "pleading_face"
+ ]
+ },
+ {
+ "emoji": "🪠",
+ "aliases": [
+ "plunger"
+ ]
+ },
+ {
+ "emoji": "👇",
+ "aliases": [
+ "point_down"
+ ]
+ },
+ {
+ "emoji": "👈",
+ "aliases": [
+ "point_left"
+ ]
+ },
+ {
+ "emoji": "👉",
+ "aliases": [
+ "point_right"
+ ]
+ },
+ {
+ "emoji": "☝️",
+ "aliases": [
+ "point_up"
+ ]
+ },
+ {
+ "emoji": "👆",
+ "aliases": [
+ "point_up_2"
+ ]
+ },
+ {
+ "emoji": "🇵🇱",
+ "aliases": [
+ "poland"
+ ]
+ },
+ {
+ "emoji": "🐻❄️",
+ "aliases": [
+ "polar_bear"
+ ]
+ },
+ {
+ "emoji": "🚓",
+ "aliases": [
+ "police_car"
+ ]
+ },
+ {
+ "emoji": "👮",
+ "aliases": [
+ "police_officer",
+ "cop"
+ ]
+ },
+ {
+ "emoji": "👮♂️",
+ "aliases": [
+ "policeman"
+ ]
+ },
+ {
+ "emoji": "👮♀️",
+ "aliases": [
+ "policewoman"
+ ]
+ },
+ {
+ "emoji": "🐩",
+ "aliases": [
+ "poodle"
+ ]
+ },
+ {
+ "emoji": "🍿",
+ "aliases": [
+ "popcorn"
+ ]
+ },
+ {
+ "emoji": "🇵🇹",
+ "aliases": [
+ "portugal"
+ ]
+ },
+ {
+ "emoji": "🏣",
+ "aliases": [
+ "post_office"
+ ]
+ },
+ {
+ "emoji": "📯",
+ "aliases": [
+ "postal_horn"
+ ]
+ },
+ {
+ "emoji": "📮",
+ "aliases": [
+ "postbox"
+ ]
+ },
+ {
+ "emoji": "🚰",
+ "aliases": [
+ "potable_water"
+ ]
+ },
+ {
+ "emoji": "🥔",
+ "aliases": [
+ "potato"
+ ]
+ },
+ {
+ "emoji": "🪴",
+ "aliases": [
+ "potted_plant"
+ ]
+ },
+ {
+ "emoji": "👝",
+ "aliases": [
+ "pouch"
+ ]
+ },
+ {
+ "emoji": "🍗",
+ "aliases": [
+ "poultry_leg"
+ ]
+ },
+ {
+ "emoji": "💷",
+ "aliases": [
+ "pound"
+ ]
+ },
+ {
+ "emoji": "🫗",
+ "aliases": [
+ "pouring_liquid"
+ ]
+ },
+ {
+ "emoji": "😾",
+ "aliases": [
+ "pouting_cat"
+ ]
+ },
+ {
+ "emoji": "🙎",
+ "aliases": [
+ "pouting_face"
+ ]
+ },
+ {
+ "emoji": "🙎♂️",
+ "aliases": [
+ "pouting_man"
+ ]
+ },
+ {
+ "emoji": "🙎♀️",
+ "aliases": [
+ "pouting_woman"
+ ]
+ },
+ {
+ "emoji": "🙏",
+ "aliases": [
+ "pray"
+ ]
+ },
+ {
+ "emoji": "📿",
+ "aliases": [
+ "prayer_beads"
+ ]
+ },
+ {
+ "emoji": "🫃",
+ "aliases": [
+ "pregnant_man"
+ ]
+ },
+ {
+ "emoji": "🫄",
+ "aliases": [
+ "pregnant_person"
+ ]
+ },
+ {
+ "emoji": "🤰",
+ "aliases": [
+ "pregnant_woman"
+ ]
+ },
+ {
+ "emoji": "🥨",
+ "aliases": [
+ "pretzel"
+ ]
+ },
+ {
+ "emoji": "⏮️",
+ "aliases": [
+ "previous_track_button"
+ ]
+ },
+ {
+ "emoji": "🤴",
+ "aliases": [
+ "prince"
+ ]
+ },
+ {
+ "emoji": "👸",
+ "aliases": [
+ "princess"
+ ]
+ },
+ {
+ "emoji": "🖨️",
+ "aliases": [
+ "printer"
+ ]
+ },
+ {
+ "emoji": "🦯",
+ "aliases": [
+ "probing_cane"
+ ]
+ },
+ {
+ "emoji": "🇵🇷",
+ "aliases": [
+ "puerto_rico"
+ ]
+ },
+ {
+ "emoji": "🟣",
+ "aliases": [
+ "purple_circle"
+ ]
+ },
+ {
+ "emoji": "💜",
+ "aliases": [
+ "purple_heart"
+ ]
+ },
+ {
+ "emoji": "🟪",
+ "aliases": [
+ "purple_square"
+ ]
+ },
+ {
+ "emoji": "👛",
+ "aliases": [
+ "purse"
+ ]
+ },
+ {
+ "emoji": "📌",
+ "aliases": [
+ "pushpin"
+ ]
+ },
+ {
+ "emoji": "🚮",
+ "aliases": [
+ "put_litter_in_its_place"
+ ]
+ },
+ {
+ "emoji": "🇶🇦",
+ "aliases": [
+ "qatar"
+ ]
+ },
+ {
+ "emoji": "❓",
+ "aliases": [
+ "question"
+ ]
+ },
+ {
+ "emoji": "🐰",
+ "aliases": [
+ "rabbit"
+ ]
+ },
+ {
+ "emoji": "🐇",
+ "aliases": [
+ "rabbit2"
+ ]
+ },
+ {
+ "emoji": "🦝",
+ "aliases": [
+ "raccoon"
+ ]
+ },
+ {
+ "emoji": "🐎",
+ "aliases": [
+ "racehorse"
+ ]
+ },
+ {
+ "emoji": "🏎️",
+ "aliases": [
+ "racing_car"
+ ]
+ },
+ {
+ "emoji": "📻",
+ "aliases": [
+ "radio"
+ ]
+ },
+ {
+ "emoji": "🔘",
+ "aliases": [
+ "radio_button"
+ ]
+ },
+ {
+ "emoji": "☢️",
+ "aliases": [
+ "radioactive"
+ ]
+ },
+ {
+ "emoji": "😡",
+ "aliases": [
+ "rage",
+ "pout"
+ ]
+ },
+ {
+ "emoji": "🚃",
+ "aliases": [
+ "railway_car"
+ ]
+ },
+ {
+ "emoji": "🛤️",
+ "aliases": [
+ "railway_track"
+ ]
+ },
+ {
+ "emoji": "🌈",
+ "aliases": [
+ "rainbow"
+ ]
+ },
+ {
+ "emoji": "🏳️🌈",
+ "aliases": [
+ "rainbow_flag"
+ ]
+ },
+ {
+ "emoji": "🤚",
+ "aliases": [
+ "raised_back_of_hand"
+ ]
+ },
+ {
+ "emoji": "🤨",
+ "aliases": [
+ "raised_eyebrow"
+ ]
+ },
+ {
+ "emoji": "🖐️",
+ "aliases": [
+ "raised_hand_with_fingers_splayed"
+ ]
+ },
+ {
+ "emoji": "🙌",
+ "aliases": [
+ "raised_hands"
+ ]
+ },
+ {
+ "emoji": "🙋",
+ "aliases": [
+ "raising_hand"
+ ]
+ },
+ {
+ "emoji": "🙋♂️",
+ "aliases": [
+ "raising_hand_man"
+ ]
+ },
+ {
+ "emoji": "🙋♀️",
+ "aliases": [
+ "raising_hand_woman"
+ ]
+ },
+ {
+ "emoji": "🐏",
+ "aliases": [
+ "ram"
+ ]
+ },
+ {
+ "emoji": "🍜",
+ "aliases": [
+ "ramen"
+ ]
+ },
+ {
+ "emoji": "🐀",
+ "aliases": [
+ "rat"
+ ]
+ },
+ {
+ "emoji": "🪒",
+ "aliases": [
+ "razor"
+ ]
+ },
+ {
+ "emoji": "🧾",
+ "aliases": [
+ "receipt"
+ ]
+ },
+ {
+ "emoji": "⏺️",
+ "aliases": [
+ "record_button"
+ ]
+ },
+ {
+ "emoji": "♻️",
+ "aliases": [
+ "recycle"
+ ]
+ },
+ {
+ "emoji": "🔴",
+ "aliases": [
+ "red_circle"
+ ]
+ },
+ {
+ "emoji": "🧧",
+ "aliases": [
+ "red_envelope"
+ ]
+ },
+ {
+ "emoji": "👨🦰",
+ "aliases": [
+ "red_haired_man"
+ ]
+ },
+ {
+ "emoji": "👩🦰",
+ "aliases": [
+ "red_haired_woman"
+ ]
+ },
+ {
+ "emoji": "🟥",
+ "aliases": [
+ "red_square"
+ ]
+ },
+ {
+ "emoji": "®️",
+ "aliases": [
+ "registered"
+ ]
+ },
+ {
+ "emoji": "☺️",
+ "aliases": [
+ "relaxed"
+ ]
+ },
+ {
+ "emoji": "😌",
+ "aliases": [
+ "relieved"
+ ]
+ },
+ {
+ "emoji": "🎗️",
+ "aliases": [
+ "reminder_ribbon"
+ ]
+ },
+ {
+ "emoji": "🔁",
+ "aliases": [
+ "repeat"
+ ]
+ },
+ {
+ "emoji": "🔂",
+ "aliases": [
+ "repeat_one"
+ ]
+ },
+ {
+ "emoji": "⛑️",
+ "aliases": [
+ "rescue_worker_helmet"
+ ]
+ },
+ {
+ "emoji": "🚻",
+ "aliases": [
+ "restroom"
+ ]
+ },
+ {
+ "emoji": "🇷🇪",
+ "aliases": [
+ "reunion"
+ ]
+ },
+ {
+ "emoji": "💞",
+ "aliases": [
+ "revolving_hearts"
+ ]
+ },
+ {
+ "emoji": "⏪",
+ "aliases": [
+ "rewind"
+ ]
+ },
+ {
+ "emoji": "🦏",
+ "aliases": [
+ "rhinoceros"
+ ]
+ },
+ {
+ "emoji": "🎀",
+ "aliases": [
+ "ribbon"
+ ]
+ },
+ {
+ "emoji": "🍚",
+ "aliases": [
+ "rice"
+ ]
+ },
+ {
+ "emoji": "🍙",
+ "aliases": [
+ "rice_ball"
+ ]
+ },
+ {
+ "emoji": "🍘",
+ "aliases": [
+ "rice_cracker"
+ ]
+ },
+ {
+ "emoji": "🎑",
+ "aliases": [
+ "rice_scene"
+ ]
+ },
+ {
+ "emoji": "🗯️",
+ "aliases": [
+ "right_anger_bubble"
+ ]
+ },
+ {
+ "emoji": "🫱",
+ "aliases": [
+ "rightwards_hand"
+ ]
+ },
+ {
+ "emoji": "🫸",
+ "aliases": [
+ "rightwards_pushing_hand"
+ ]
+ },
+ {
+ "emoji": "💍",
+ "aliases": [
+ "ring"
+ ]
+ },
+ {
+ "emoji": "🛟",
+ "aliases": [
+ "ring_buoy"
+ ]
+ },
+ {
+ "emoji": "🪐",
+ "aliases": [
+ "ringed_planet"
+ ]
+ },
+ {
+ "emoji": "🤖",
+ "aliases": [
+ "robot"
+ ]
+ },
+ {
+ "emoji": "🪨",
+ "aliases": [
+ "rock"
+ ]
+ },
+ {
+ "emoji": "🚀",
+ "aliases": [
+ "rocket"
+ ]
+ },
+ {
+ "emoji": "🤣",
+ "aliases": [
+ "rofl"
+ ]
+ },
+ {
+ "emoji": "🙄",
+ "aliases": [
+ "roll_eyes"
+ ]
+ },
+ {
+ "emoji": "🧻",
+ "aliases": [
+ "roll_of_paper"
+ ]
+ },
+ {
+ "emoji": "🎢",
+ "aliases": [
+ "roller_coaster"
+ ]
+ },
+ {
+ "emoji": "🛼",
+ "aliases": [
+ "roller_skate"
+ ]
+ },
+ {
+ "emoji": "🇷🇴",
+ "aliases": [
+ "romania"
+ ]
+ },
+ {
+ "emoji": "🐓",
+ "aliases": [
+ "rooster"
+ ]
+ },
+ {
+ "emoji": "",
+ "aliases": [
+ "root_vegetable"
+ ]
+ },
+ {
+ "emoji": "🌹",
+ "aliases": [
+ "rose"
+ ]
+ },
+ {
+ "emoji": "🏵️",
+ "aliases": [
+ "rosette"
+ ]
+ },
+ {
+ "emoji": "🚨",
+ "aliases": [
+ "rotating_light"
+ ]
+ },
+ {
+ "emoji": "📍",
+ "aliases": [
+ "round_pushpin"
+ ]
+ },
+ {
+ "emoji": "🚣",
+ "aliases": [
+ "rowboat"
+ ]
+ },
+ {
+ "emoji": "🚣♂️",
+ "aliases": [
+ "rowing_man"
+ ]
+ },
+ {
+ "emoji": "🚣♀️",
+ "aliases": [
+ "rowing_woman"
+ ]
+ },
+ {
+ "emoji": "🇷🇺",
+ "aliases": [
+ "ru"
+ ]
+ },
+ {
+ "emoji": "🏉",
+ "aliases": [
+ "rugby_football"
+ ]
+ },
+ {
+ "emoji": "🏃",
+ "aliases": [
+ "runner",
+ "running"
+ ]
+ },
+ {
+ "emoji": "🏃♂️",
+ "aliases": [
+ "running_man"
+ ]
+ },
+ {
+ "emoji": "🎽",
+ "aliases": [
+ "running_shirt_with_sash"
+ ]
+ },
+ {
+ "emoji": "🏃♀️",
+ "aliases": [
+ "running_woman"
+ ]
+ },
+ {
+ "emoji": "🇷🇼",
+ "aliases": [
+ "rwanda"
+ ]
+ },
+ {
+ "emoji": "🈂️",
+ "aliases": [
+ "sa"
+ ]
+ },
+ {
+ "emoji": "🧷",
+ "aliases": [
+ "safety_pin"
+ ]
+ },
+ {
+ "emoji": "🦺",
+ "aliases": [
+ "safety_vest"
+ ]
+ },
+ {
+ "emoji": "♐",
+ "aliases": [
+ "sagittarius"
+ ]
+ },
+ {
+ "emoji": "🍶",
+ "aliases": [
+ "sake"
+ ]
+ },
+ {
+ "emoji": "🧂",
+ "aliases": [
+ "salt"
+ ]
+ },
+ {
+ "emoji": "🫡",
+ "aliases": [
+ "saluting_face"
+ ]
+ },
+ {
+ "emoji": "🇼🇸",
+ "aliases": [
+ "samoa"
+ ]
+ },
+ {
+ "emoji": "🇸🇲",
+ "aliases": [
+ "san_marino"
+ ]
+ },
+ {
+ "emoji": "👡",
+ "aliases": [
+ "sandal"
+ ]
+ },
+ {
+ "emoji": "🥪",
+ "aliases": [
+ "sandwich"
+ ]
+ },
+ {
+ "emoji": "🎅",
+ "aliases": [
+ "santa"
+ ]
+ },
+ {
+ "emoji": "🇸🇹",
+ "aliases": [
+ "sao_tome_principe"
+ ]
+ },
+ {
+ "emoji": "🥻",
+ "aliases": [
+ "sari"
+ ]
+ },
+ {
+ "emoji": "📡",
+ "aliases": [
+ "satellite"
+ ]
+ },
+ {
+ "emoji": "🇸🇦",
+ "aliases": [
+ "saudi_arabia"
+ ]
+ },
+ {
+ "emoji": "🧖♂️",
+ "aliases": [
+ "sauna_man"
+ ]
+ },
+ {
+ "emoji": "🧖",
+ "aliases": [
+ "sauna_person"
+ ]
+ },
+ {
+ "emoji": "🧖♀️",
+ "aliases": [
+ "sauna_woman"
+ ]
+ },
+ {
+ "emoji": "🦕",
+ "aliases": [
+ "sauropod"
+ ]
+ },
+ {
+ "emoji": "🎷",
+ "aliases": [
+ "saxophone"
+ ]
+ },
+ {
+ "emoji": "🧣",
+ "aliases": [
+ "scarf"
+ ]
+ },
+ {
+ "emoji": "🏫",
+ "aliases": [
+ "school"
+ ]
+ },
+ {
+ "emoji": "🎒",
+ "aliases": [
+ "school_satchel"
+ ]
+ },
+ {
+ "emoji": "🧑🔬",
+ "aliases": [
+ "scientist"
+ ]
+ },
+ {
+ "emoji": "✂️",
+ "aliases": [
+ "scissors"
+ ]
+ },
+ {
+ "emoji": "🦂",
+ "aliases": [
+ "scorpion"
+ ]
+ },
+ {
+ "emoji": "♏",
+ "aliases": [
+ "scorpius"
+ ]
+ },
+ {
+ "emoji": "🏴",
+ "aliases": [
+ "scotland"
+ ]
+ },
+ {
+ "emoji": "😱",
+ "aliases": [
+ "scream"
+ ]
+ },
+ {
+ "emoji": "🙀",
+ "aliases": [
+ "scream_cat"
+ ]
+ },
+ {
+ "emoji": "🪛",
+ "aliases": [
+ "screwdriver"
+ ]
+ },
+ {
+ "emoji": "📜",
+ "aliases": [
+ "scroll"
+ ]
+ },
+ {
+ "emoji": "🦭",
+ "aliases": [
+ "seal"
+ ]
+ },
+ {
+ "emoji": "💺",
+ "aliases": [
+ "seat"
+ ]
+ },
+ {
+ "emoji": "㊙️",
+ "aliases": [
+ "secret"
+ ]
+ },
+ {
+ "emoji": "🙈",
+ "aliases": [
+ "see_no_evil"
+ ]
+ },
+ {
+ "emoji": "🌱",
+ "aliases": [
+ "seedling"
+ ]
+ },
+ {
+ "emoji": "🤳",
+ "aliases": [
+ "selfie"
+ ]
+ },
+ {
+ "emoji": "🇸🇳",
+ "aliases": [
+ "senegal"
+ ]
+ },
+ {
+ "emoji": "🇷🇸",
+ "aliases": [
+ "serbia"
+ ]
+ },
+ {
+ "emoji": "🐕🦺",
+ "aliases": [
+ "service_dog"
+ ]
+ },
+ {
+ "emoji": "7️⃣",
+ "aliases": [
+ "seven"
+ ]
+ },
+ {
+ "emoji": "🪡",
+ "aliases": [
+ "sewing_needle"
+ ]
+ },
+ {
+ "emoji": "🇸🇨",
+ "aliases": [
+ "seychelles"
+ ]
+ },
+ {
+ "emoji": "🫨",
+ "aliases": [
+ "shaking_face"
+ ]
+ },
+ {
+ "emoji": "🥘",
+ "aliases": [
+ "shallow_pan_of_food"
+ ]
+ },
+ {
+ "emoji": "☘️",
+ "aliases": [
+ "shamrock"
+ ]
+ },
+ {
+ "emoji": "🦈",
+ "aliases": [
+ "shark"
+ ]
+ },
+ {
+ "emoji": "🍧",
+ "aliases": [
+ "shaved_ice"
+ ]
+ },
+ {
+ "emoji": "🐑",
+ "aliases": [
+ "sheep"
+ ]
+ },
+ {
+ "emoji": "🐚",
+ "aliases": [
+ "shell"
+ ]
+ },
+ {
+ "emoji": "🛡️",
+ "aliases": [
+ "shield"
+ ]
+ },
+ {
+ "emoji": "⛩️",
+ "aliases": [
+ "shinto_shrine"
+ ]
+ },
+ {
+ "emoji": "🚢",
+ "aliases": [
+ "ship"
+ ]
+ },
+ {
+ "emoji": "👕",
+ "aliases": [
+ "shirt",
+ "tshirt"
+ ]
+ },
+ {
+ "emoji": "🛍️",
+ "aliases": [
+ "shopping"
+ ]
+ },
+ {
+ "emoji": "🛒",
+ "aliases": [
+ "shopping_cart"
+ ]
+ },
+ {
+ "emoji": "🩳",
+ "aliases": [
+ "shorts"
+ ]
+ },
+ {
+ "emoji": "",
+ "aliases": [
+ "shovel"
+ ]
+ },
+ {
+ "emoji": "🚿",
+ "aliases": [
+ "shower"
+ ]
+ },
+ {
+ "emoji": "🦐",
+ "aliases": [
+ "shrimp"
+ ]
+ },
+ {
+ "emoji": "🤷",
+ "aliases": [
+ "shrug"
+ ]
+ },
+ {
+ "emoji": "🤫",
+ "aliases": [
+ "shushing_face"
+ ]
+ },
+ {
+ "emoji": "🇸🇱",
+ "aliases": [
+ "sierra_leone"
+ ]
+ },
+ {
+ "emoji": "📶",
+ "aliases": [
+ "signal_strength"
+ ]
+ },
+ {
+ "emoji": "🇸🇬",
+ "aliases": [
+ "singapore"
+ ]
+ },
+ {
+ "emoji": "🧑🎤",
+ "aliases": [
+ "singer"
+ ]
+ },
+ {
+ "emoji": "🇸🇽",
+ "aliases": [
+ "sint_maarten"
+ ]
+ },
+ {
+ "emoji": "6️⃣",
+ "aliases": [
+ "six"
+ ]
+ },
+ {
+ "emoji": "🔯",
+ "aliases": [
+ "six_pointed_star"
+ ]
+ },
+ {
+ "emoji": "🛹",
+ "aliases": [
+ "skateboard"
+ ]
+ },
+ {
+ "emoji": "🎿",
+ "aliases": [
+ "ski"
+ ]
+ },
+ {
+ "emoji": "⛷️",
+ "aliases": [
+ "skier"
+ ]
+ },
+ {
+ "emoji": "💀",
+ "aliases": [
+ "skull"
+ ]
+ },
+ {
+ "emoji": "☠️",
+ "aliases": [
+ "skull_and_crossbones"
+ ]
+ },
+ {
+ "emoji": "🦨",
+ "aliases": [
+ "skunk"
+ ]
+ },
+ {
+ "emoji": "🛷",
+ "aliases": [
+ "sled"
+ ]
+ },
+ {
+ "emoji": "😴",
+ "aliases": [
+ "sleeping"
+ ]
+ },
+ {
+ "emoji": "🛌",
+ "aliases": [
+ "sleeping_bed"
+ ]
+ },
+ {
+ "emoji": "😪",
+ "aliases": [
+ "sleepy"
+ ]
+ },
+ {
+ "emoji": "🙁",
+ "aliases": [
+ "slightly_frowning_face"
+ ]
+ },
+ {
+ "emoji": "🙂",
+ "aliases": [
+ "slightly_smiling_face"
+ ]
+ },
+ {
+ "emoji": "🎰",
+ "aliases": [
+ "slot_machine"
+ ]
+ },
+ {
+ "emoji": "🦥",
+ "aliases": [
+ "sloth"
+ ]
+ },
+ {
+ "emoji": "🇸🇰",
+ "aliases": [
+ "slovakia"
+ ]
+ },
+ {
+ "emoji": "🇸🇮",
+ "aliases": [
+ "slovenia"
+ ]
+ },
+ {
+ "emoji": "🛩️",
+ "aliases": [
+ "small_airplane"
+ ]
+ },
+ {
+ "emoji": "🔹",
+ "aliases": [
+ "small_blue_diamond"
+ ]
+ },
+ {
+ "emoji": "🔸",
+ "aliases": [
+ "small_orange_diamond"
+ ]
+ },
+ {
+ "emoji": "🔺",
+ "aliases": [
+ "small_red_triangle"
+ ]
+ },
+ {
+ "emoji": "🔻",
+ "aliases": [
+ "small_red_triangle_down"
+ ]
+ },
+ {
+ "emoji": "😄",
+ "aliases": [
+ "smile"
+ ]
+ },
+ {
+ "emoji": "😸",
+ "aliases": [
+ "smile_cat"
+ ]
+ },
+ {
+ "emoji": "😃",
+ "aliases": [
+ "smiley"
+ ]
+ },
+ {
+ "emoji": "😺",
+ "aliases": [
+ "smiley_cat"
+ ]
+ },
+ {
+ "emoji": "🥲",
+ "aliases": [
+ "smiling_face_with_tear"
+ ]
+ },
+ {
+ "emoji": "🥰",
+ "aliases": [
+ "smiling_face_with_three_hearts"
+ ]
+ },
+ {
+ "emoji": "😈",
+ "aliases": [
+ "smiling_imp"
+ ]
+ },
+ {
+ "emoji": "😏",
+ "aliases": [
+ "smirk"
+ ]
+ },
+ {
+ "emoji": "😼",
+ "aliases": [
+ "smirk_cat"
+ ]
+ },
+ {
+ "emoji": "🚬",
+ "aliases": [
+ "smoking"
+ ]
+ },
+ {
+ "emoji": "🐌",
+ "aliases": [
+ "snail"
+ ]
+ },
+ {
+ "emoji": "🐍",
+ "aliases": [
+ "snake"
+ ]
+ },
+ {
+ "emoji": "🤧",
+ "aliases": [
+ "sneezing_face"
+ ]
+ },
+ {
+ "emoji": "🏂",
+ "aliases": [
+ "snowboarder"
+ ]
+ },
+ {
+ "emoji": "❄️",
+ "aliases": [
+ "snowflake"
+ ]
+ },
+ {
+ "emoji": "⛄",
+ "aliases": [
+ "snowman"
+ ]
+ },
+ {
+ "emoji": "☃️",
+ "aliases": [
+ "snowman_with_snow"
+ ]
+ },
+ {
+ "emoji": "🧼",
+ "aliases": [
+ "soap"
+ ]
+ },
+ {
+ "emoji": "😭",
+ "aliases": [
+ "sob"
+ ]
+ },
+ {
+ "emoji": "⚽",
+ "aliases": [
+ "soccer"
+ ]
+ },
+ {
+ "emoji": "🧦",
+ "aliases": [
+ "socks"
+ ]
+ },
+ {
+ "emoji": "🥎",
+ "aliases": [
+ "softball"
+ ]
+ },
+ {
+ "emoji": "🇸🇧",
+ "aliases": [
+ "solomon_islands"
+ ]
+ },
+ {
+ "emoji": "🇸🇴",
+ "aliases": [
+ "somalia"
+ ]
+ },
+ {
+ "emoji": "🔜",
+ "aliases": [
+ "soon"
+ ]
+ },
+ {
+ "emoji": "🆘",
+ "aliases": [
+ "sos"
+ ]
+ },
+ {
+ "emoji": "🔉",
+ "aliases": [
+ "sound"
+ ]
+ },
+ {
+ "emoji": "🇿🇦",
+ "aliases": [
+ "south_africa"
+ ]
+ },
+ {
+ "emoji": "🇬🇸",
+ "aliases": [
+ "south_georgia_south_sandwich_islands"
+ ]
+ },
+ {
+ "emoji": "🇸🇸",
+ "aliases": [
+ "south_sudan"
+ ]
+ },
+ {
+ "emoji": "👾",
+ "aliases": [
+ "space_invader"
+ ]
+ },
+ {
+ "emoji": "♠️",
+ "aliases": [
+ "spades"
+ ]
+ },
+ {
+ "emoji": "🍝",
+ "aliases": [
+ "spaghetti"
+ ]
+ },
+ {
+ "emoji": "❇️",
+ "aliases": [
+ "sparkle"
+ ]
+ },
+ {
+ "emoji": "🎇",
+ "aliases": [
+ "sparkler"
+ ]
+ },
+ {
+ "emoji": "✨",
+ "aliases": [
+ "sparkles"
+ ]
+ },
+ {
+ "emoji": "💖",
+ "aliases": [
+ "sparkling_heart"
+ ]
+ },
+ {
+ "emoji": "🙊",
+ "aliases": [
+ "speak_no_evil"
+ ]
+ },
+ {
+ "emoji": "🔈",
+ "aliases": [
+ "speaker"
+ ]
+ },
+ {
+ "emoji": "🗣️",
+ "aliases": [
+ "speaking_head"
+ ]
+ },
+ {
+ "emoji": "💬",
+ "aliases": [
+ "speech_balloon"
+ ]
+ },
+ {
+ "emoji": "🚤",
+ "aliases": [
+ "speedboat"
+ ]
+ },
+ {
+ "emoji": "🕷️",
+ "aliases": [
+ "spider"
+ ]
+ },
+ {
+ "emoji": "🕸️",
+ "aliases": [
+ "spider_web"
+ ]
+ },
+ {
+ "emoji": "🗓️",
+ "aliases": [
+ "spiral_calendar"
+ ]
+ },
+ {
+ "emoji": "🗒️",
+ "aliases": [
+ "spiral_notepad"
+ ]
+ },
+ {
+ "emoji": "",
+ "aliases": [
+ "splatter"
+ ]
+ },
+ {
+ "emoji": "🧽",
+ "aliases": [
+ "sponge"
+ ]
+ },
+ {
+ "emoji": "🥄",
+ "aliases": [
+ "spoon"
+ ]
+ },
+ {
+ "emoji": "🦑",
+ "aliases": [
+ "squid"
+ ]
+ },
+ {
+ "emoji": "🇱🇰",
+ "aliases": [
+ "sri_lanka"
+ ]
+ },
+ {
+ "emoji": "🇧🇱",
+ "aliases": [
+ "st_barthelemy"
+ ]
+ },
+ {
+ "emoji": "🇸🇭",
+ "aliases": [
+ "st_helena"
+ ]
+ },
+ {
+ "emoji": "🇰🇳",
+ "aliases": [
+ "st_kitts_nevis"
+ ]
+ },
+ {
+ "emoji": "🇱🇨",
+ "aliases": [
+ "st_lucia"
+ ]
+ },
+ {
+ "emoji": "🇲🇫",
+ "aliases": [
+ "st_martin"
+ ]
+ },
+ {
+ "emoji": "🇵🇲",
+ "aliases": [
+ "st_pierre_miquelon"
+ ]
+ },
+ {
+ "emoji": "🇻🇨",
+ "aliases": [
+ "st_vincent_grenadines"
+ ]
+ },
+ {
+ "emoji": "🏟️",
+ "aliases": [
+ "stadium"
+ ]
+ },
+ {
+ "emoji": "🧍♂️",
+ "aliases": [
+ "standing_man"
+ ]
+ },
+ {
+ "emoji": "🧍",
+ "aliases": [
+ "standing_person"
+ ]
+ },
+ {
+ "emoji": "🧍♀️",
+ "aliases": [
+ "standing_woman"
+ ]
+ },
+ {
+ "emoji": "⭐",
+ "aliases": [
+ "star"
+ ]
+ },
+ {
+ "emoji": "🌟",
+ "aliases": [
+ "star2"
+ ]
+ },
+ {
+ "emoji": "☪️",
+ "aliases": [
+ "star_and_crescent"
+ ]
+ },
+ {
+ "emoji": "✡️",
+ "aliases": [
+ "star_of_david"
+ ]
+ },
+ {
+ "emoji": "🤩",
+ "aliases": [
+ "star_struck"
+ ]
+ },
+ {
+ "emoji": "🌠",
+ "aliases": [
+ "stars"
+ ]
+ },
+ {
+ "emoji": "🚉",
+ "aliases": [
+ "station"
+ ]
+ },
+ {
+ "emoji": "🗽",
+ "aliases": [
+ "statue_of_liberty"
+ ]
+ },
+ {
+ "emoji": "🚂",
+ "aliases": [
+ "steam_locomotive"
+ ]
+ },
+ {
+ "emoji": "🩺",
+ "aliases": [
+ "stethoscope"
+ ]
+ },
+ {
+ "emoji": "🍲",
+ "aliases": [
+ "stew"
+ ]
+ },
+ {
+ "emoji": "⏹️",
+ "aliases": [
+ "stop_button"
+ ]
+ },
+ {
+ "emoji": "🛑",
+ "aliases": [
+ "stop_sign"
+ ]
+ },
+ {
+ "emoji": "⏱️",
+ "aliases": [
+ "stopwatch"
+ ]
+ },
+ {
+ "emoji": "📏",
+ "aliases": [
+ "straight_ruler"
+ ]
+ },
+ {
+ "emoji": "🍓",
+ "aliases": [
+ "strawberry"
+ ]
+ },
+ {
+ "emoji": "😛",
+ "aliases": [
+ "stuck_out_tongue"
+ ]
+ },
+ {
+ "emoji": "😝",
+ "aliases": [
+ "stuck_out_tongue_closed_eyes"
+ ]
+ },
+ {
+ "emoji": "😜",
+ "aliases": [
+ "stuck_out_tongue_winking_eye"
+ ]
+ },
+ {
+ "emoji": "🧑🎓",
+ "aliases": [
+ "student"
+ ]
+ },
+ {
+ "emoji": "🎙️",
+ "aliases": [
+ "studio_microphone"
+ ]
+ },
+ {
+ "emoji": "🥙",
+ "aliases": [
+ "stuffed_flatbread"
+ ]
+ },
+ {
+ "emoji": "🇸🇩",
+ "aliases": [
+ "sudan"
+ ]
+ },
+ {
+ "emoji": "🌥️",
+ "aliases": [
+ "sun_behind_large_cloud"
+ ]
+ },
+ {
+ "emoji": "🌦️",
+ "aliases": [
+ "sun_behind_rain_cloud"
+ ]
+ },
+ {
+ "emoji": "🌤️",
+ "aliases": [
+ "sun_behind_small_cloud"
+ ]
+ },
+ {
+ "emoji": "🌞",
+ "aliases": [
+ "sun_with_face"
+ ]
+ },
+ {
+ "emoji": "🌻",
+ "aliases": [
+ "sunflower"
+ ]
+ },
+ {
+ "emoji": "😎",
+ "aliases": [
+ "sunglasses"
+ ]
+ },
+ {
+ "emoji": "☀️",
+ "aliases": [
+ "sunny"
+ ]
+ },
+ {
+ "emoji": "🌅",
+ "aliases": [
+ "sunrise"
+ ]
+ },
+ {
+ "emoji": "🌄",
+ "aliases": [
+ "sunrise_over_mountains"
+ ]
+ },
+ {
+ "emoji": "🦸",
+ "aliases": [
+ "superhero"
+ ]
+ },
+ {
+ "emoji": "🦸♂️",
+ "aliases": [
+ "superhero_man"
+ ]
+ },
+ {
+ "emoji": "🦸♀️",
+ "aliases": [
+ "superhero_woman"
+ ]
+ },
+ {
+ "emoji": "🦹",
+ "aliases": [
+ "supervillain"
+ ]
+ },
+ {
+ "emoji": "🦹♂️",
+ "aliases": [
+ "supervillain_man"
+ ]
+ },
+ {
+ "emoji": "🦹♀️",
+ "aliases": [
+ "supervillain_woman"
+ ]
+ },
+ {
+ "emoji": "🏄",
+ "aliases": [
+ "surfer"
+ ]
+ },
+ {
+ "emoji": "🏄♂️",
+ "aliases": [
+ "surfing_man"
+ ]
+ },
+ {
+ "emoji": "🏄♀️",
+ "aliases": [
+ "surfing_woman"
+ ]
+ },
+ {
+ "emoji": "🇸🇷",
+ "aliases": [
+ "suriname"
+ ]
+ },
+ {
+ "emoji": "🍣",
+ "aliases": [
+ "sushi"
+ ]
+ },
+ {
+ "emoji": "🚟",
+ "aliases": [
+ "suspension_railway"
+ ]
+ },
+ {
+ "emoji": "🇸🇯",
+ "aliases": [
+ "svalbard_jan_mayen"
+ ]
+ },
+ {
+ "emoji": "🦢",
+ "aliases": [
+ "swan"
+ ]
+ },
+ {
+ "emoji": "🇸🇿",
+ "aliases": [
+ "swaziland"
+ ]
+ },
+ {
+ "emoji": "😓",
+ "aliases": [
+ "sweat"
+ ]
+ },
+ {
+ "emoji": "💦",
+ "aliases": [
+ "sweat_drops"
+ ]
+ },
+ {
+ "emoji": "😅",
+ "aliases": [
+ "sweat_smile"
+ ]
+ },
+ {
+ "emoji": "🇸🇪",
+ "aliases": [
+ "sweden"
+ ]
+ },
+ {
+ "emoji": "🍠",
+ "aliases": [
+ "sweet_potato"
+ ]
+ },
+ {
+ "emoji": "🩲",
+ "aliases": [
+ "swim_brief"
+ ]
+ },
+ {
+ "emoji": "🏊",
+ "aliases": [
+ "swimmer"
+ ]
+ },
+ {
+ "emoji": "🏊♂️",
+ "aliases": [
+ "swimming_man"
+ ]
+ },
+ {
+ "emoji": "🏊♀️",
+ "aliases": [
+ "swimming_woman"
+ ]
+ },
+ {
+ "emoji": "🇨🇭",
+ "aliases": [
+ "switzerland"
+ ]
+ },
+ {
+ "emoji": "🔣",
+ "aliases": [
+ "symbols"
+ ]
+ },
+ {
+ "emoji": "🕍",
+ "aliases": [
+ "synagogue"
+ ]
+ },
+ {
+ "emoji": "🇸🇾",
+ "aliases": [
+ "syria"
+ ]
+ },
+ {
+ "emoji": "💉",
+ "aliases": [
+ "syringe"
+ ]
+ },
+ {
+ "emoji": "🦖",
+ "aliases": [
+ "t-rex"
+ ]
+ },
+ {
+ "emoji": "🌮",
+ "aliases": [
+ "taco"
+ ]
+ },
+ {
+ "emoji": "🎉",
+ "aliases": [
+ "tada",
+ "hooray"
+ ]
+ },
+ {
+ "emoji": "🇹🇼",
+ "aliases": [
+ "taiwan"
+ ]
+ },
+ {
+ "emoji": "🇹🇯",
+ "aliases": [
+ "tajikistan"
+ ]
+ },
+ {
+ "emoji": "🥡",
+ "aliases": [
+ "takeout_box"
+ ]
+ },
+ {
+ "emoji": "🫔",
+ "aliases": [
+ "tamale"
+ ]
+ },
+ {
+ "emoji": "🎋",
+ "aliases": [
+ "tanabata_tree"
+ ]
+ },
+ {
+ "emoji": "🍊",
+ "aliases": [
+ "tangerine",
+ "orange",
+ "mandarin"
+ ]
+ },
+ {
+ "emoji": "🇹🇿",
+ "aliases": [
+ "tanzania"
+ ]
+ },
+ {
+ "emoji": "♉",
+ "aliases": [
+ "taurus"
+ ]
+ },
+ {
+ "emoji": "🚕",
+ "aliases": [
+ "taxi"
+ ]
+ },
+ {
+ "emoji": "🍵",
+ "aliases": [
+ "tea"
+ ]
+ },
+ {
+ "emoji": "🧑🏫",
+ "aliases": [
+ "teacher"
+ ]
+ },
+ {
+ "emoji": "🫖",
+ "aliases": [
+ "teapot"
+ ]
+ },
+ {
+ "emoji": "🧑💻",
+ "aliases": [
+ "technologist"
+ ]
+ },
+ {
+ "emoji": "🧸",
+ "aliases": [
+ "teddy_bear"
+ ]
+ },
+ {
+ "emoji": "📞",
+ "aliases": [
+ "telephone_receiver"
+ ]
+ },
+ {
+ "emoji": "🔭",
+ "aliases": [
+ "telescope"
+ ]
+ },
+ {
+ "emoji": "🎾",
+ "aliases": [
+ "tennis"
+ ]
+ },
+ {
+ "emoji": "⛺",
+ "aliases": [
+ "tent"
+ ]
+ },
+ {
+ "emoji": "🧪",
+ "aliases": [
+ "test_tube"
+ ]
+ },
+ {
+ "emoji": "🇹🇭",
+ "aliases": [
+ "thailand"
+ ]
+ },
+ {
+ "emoji": "🌡️",
+ "aliases": [
+ "thermometer"
+ ]
+ },
+ {
+ "emoji": "🤔",
+ "aliases": [
+ "thinking"
+ ]
+ },
+ {
+ "emoji": "🩴",
+ "aliases": [
+ "thong_sandal"
+ ]
+ },
+ {
+ "emoji": "💭",
+ "aliases": [
+ "thought_balloon"
+ ]
+ },
+ {
+ "emoji": "🧵",
+ "aliases": [
+ "thread"
+ ]
+ },
+ {
+ "emoji": "3️⃣",
+ "aliases": [
+ "three"
+ ]
+ },
+ {
+ "emoji": "🎫",
+ "aliases": [
+ "ticket"
+ ]
+ },
+ {
+ "emoji": "🎟️",
+ "aliases": [
+ "tickets"
+ ]
+ },
+ {
+ "emoji": "🐯",
+ "aliases": [
+ "tiger"
+ ]
+ },
+ {
+ "emoji": "🐅",
+ "aliases": [
+ "tiger2"
+ ]
+ },
+ {
+ "emoji": "⏲️",
+ "aliases": [
+ "timer_clock"
+ ]
+ },
+ {
+ "emoji": "🇹🇱",
+ "aliases": [
+ "timor_leste"
+ ]
+ },
+ {
+ "emoji": "💁♂️",
+ "aliases": [
+ "tipping_hand_man",
+ "sassy_man"
+ ]
+ },
+ {
+ "emoji": "💁",
+ "aliases": [
+ "tipping_hand_person",
+ "information_desk_person"
+ ]
+ },
+ {
+ "emoji": "💁♀️",
+ "aliases": [
+ "tipping_hand_woman",
+ "sassy_woman"
+ ]
+ },
+ {
+ "emoji": "😫",
+ "aliases": [
+ "tired_face"
+ ]
+ },
+ {
+ "emoji": "™️",
+ "aliases": [
+ "tm"
+ ]
+ },
+ {
+ "emoji": "🇹🇬",
+ "aliases": [
+ "togo"
+ ]
+ },
+ {
+ "emoji": "🚽",
+ "aliases": [
+ "toilet"
+ ]
+ },
+ {
+ "emoji": "🇹🇰",
+ "aliases": [
+ "tokelau"
+ ]
+ },
+ {
+ "emoji": "🗼",
+ "aliases": [
+ "tokyo_tower"
+ ]
+ },
+ {
+ "emoji": "🍅",
+ "aliases": [
+ "tomato"
+ ]
+ },
+ {
+ "emoji": "🇹🇴",
+ "aliases": [
+ "tonga"
+ ]
+ },
+ {
+ "emoji": "👅",
+ "aliases": [
+ "tongue"
+ ]
+ },
+ {
+ "emoji": "🧰",
+ "aliases": [
+ "toolbox"
+ ]
+ },
+ {
+ "emoji": "🦷",
+ "aliases": [
+ "tooth"
+ ]
+ },
+ {
+ "emoji": "🪥",
+ "aliases": [
+ "toothbrush"
+ ]
+ },
+ {
+ "emoji": "🔝",
+ "aliases": [
+ "top"
+ ]
+ },
+ {
+ "emoji": "🎩",
+ "aliases": [
+ "tophat"
+ ]
+ },
+ {
+ "emoji": "🌪️",
+ "aliases": [
+ "tornado"
+ ]
+ },
+ {
+ "emoji": "🇹🇷",
+ "aliases": [
+ "tr"
+ ]
+ },
+ {
+ "emoji": "🖲️",
+ "aliases": [
+ "trackball"
+ ]
+ },
+ {
+ "emoji": "🚜",
+ "aliases": [
+ "tractor"
+ ]
+ },
+ {
+ "emoji": "🚥",
+ "aliases": [
+ "traffic_light"
+ ]
+ },
+ {
+ "emoji": "🚋",
+ "aliases": [
+ "train"
+ ]
+ },
+ {
+ "emoji": "🚆",
+ "aliases": [
+ "train2"
+ ]
+ },
+ {
+ "emoji": "🚊",
+ "aliases": [
+ "tram"
+ ]
+ },
+ {
+ "emoji": "🏳️⚧️",
+ "aliases": [
+ "transgender_flag"
+ ]
+ },
+ {
+ "emoji": "⚧️",
+ "aliases": [
+ "transgender_symbol"
+ ]
+ },
+ {
+ "emoji": "🚩",
+ "aliases": [
+ "triangular_flag_on_post"
+ ]
+ },
+ {
+ "emoji": "📐",
+ "aliases": [
+ "triangular_ruler"
+ ]
+ },
+ {
+ "emoji": "🔱",
+ "aliases": [
+ "trident"
+ ]
+ },
+ {
+ "emoji": "🇹🇹",
+ "aliases": [
+ "trinidad_tobago"
+ ]
+ },
+ {
+ "emoji": "🇹🇦",
+ "aliases": [
+ "tristan_da_cunha"
+ ]
+ },
+ {
+ "emoji": "😤",
+ "aliases": [
+ "triumph"
+ ]
+ },
+ {
+ "emoji": "🧌",
+ "aliases": [
+ "troll"
+ ]
+ },
+ {
+ "emoji": "🚎",
+ "aliases": [
+ "trolleybus"
+ ]
+ },
+ {
+ "emoji": "🏆",
+ "aliases": [
+ "trophy"
+ ]
+ },
+ {
+ "emoji": "🍹",
+ "aliases": [
+ "tropical_drink"
+ ]
+ },
+ {
+ "emoji": "🐠",
+ "aliases": [
+ "tropical_fish"
+ ]
+ },
+ {
+ "emoji": "🚚",
+ "aliases": [
+ "truck"
+ ]
+ },
+ {
+ "emoji": "🎺",
+ "aliases": [
+ "trumpet"
+ ]
+ },
+ {
+ "emoji": "🌷",
+ "aliases": [
+ "tulip"
+ ]
+ },
+ {
+ "emoji": "🥃",
+ "aliases": [
+ "tumbler_glass"
+ ]
+ },
+ {
+ "emoji": "🇹🇳",
+ "aliases": [
+ "tunisia"
+ ]
+ },
+ {
+ "emoji": "🦃",
+ "aliases": [
+ "turkey"
+ ]
+ },
+ {
+ "emoji": "🇹🇲",
+ "aliases": [
+ "turkmenistan"
+ ]
+ },
+ {
+ "emoji": "🇹🇨",
+ "aliases": [
+ "turks_caicos_islands"
+ ]
+ },
+ {
+ "emoji": "🐢",
+ "aliases": [
+ "turtle"
+ ]
+ },
+ {
+ "emoji": "🇹🇻",
+ "aliases": [
+ "tuvalu"
+ ]
+ },
+ {
+ "emoji": "📺",
+ "aliases": [
+ "tv"
+ ]
+ },
+ {
+ "emoji": "🔀",
+ "aliases": [
+ "twisted_rightwards_arrows"
+ ]
+ },
+ {
+ "emoji": "2️⃣",
+ "aliases": [
+ "two"
+ ]
+ },
+ {
+ "emoji": "💕",
+ "aliases": [
+ "two_hearts"
+ ]
+ },
+ {
+ "emoji": "👬",
+ "aliases": [
+ "two_men_holding_hands"
+ ]
+ },
+ {
+ "emoji": "👭",
+ "aliases": [
+ "two_women_holding_hands"
+ ]
+ },
+ {
+ "emoji": "🈹",
+ "aliases": [
+ "u5272"
+ ]
+ },
+ {
+ "emoji": "🈴",
+ "aliases": [
+ "u5408"
+ ]
+ },
+ {
+ "emoji": "🈺",
+ "aliases": [
+ "u55b6"
+ ]
+ },
+ {
+ "emoji": "🈯",
+ "aliases": [
+ "u6307"
+ ]
+ },
+ {
+ "emoji": "🈷️",
+ "aliases": [
+ "u6708"
+ ]
+ },
+ {
+ "emoji": "🈶",
+ "aliases": [
+ "u6709"
+ ]
+ },
+ {
+ "emoji": "🈵",
+ "aliases": [
+ "u6e80"
+ ]
+ },
+ {
+ "emoji": "🈚",
+ "aliases": [
+ "u7121"
+ ]
+ },
+ {
+ "emoji": "🈸",
+ "aliases": [
+ "u7533"
+ ]
+ },
+ {
+ "emoji": "🈲",
+ "aliases": [
+ "u7981"
+ ]
+ },
+ {
+ "emoji": "🈳",
+ "aliases": [
+ "u7a7a"
+ ]
+ },
+ {
+ "emoji": "🇺🇬",
+ "aliases": [
+ "uganda"
+ ]
+ },
+ {
+ "emoji": "🇺🇦",
+ "aliases": [
+ "ukraine"
+ ]
+ },
+ {
+ "emoji": "☔",
+ "aliases": [
+ "umbrella"
+ ]
+ },
+ {
+ "emoji": "😒",
+ "aliases": [
+ "unamused"
+ ]
+ },
+ {
+ "emoji": "🔞",
+ "aliases": [
+ "underage"
+ ]
+ },
+ {
+ "emoji": "🦄",
+ "aliases": [
+ "unicorn"
+ ]
+ },
+ {
+ "emoji": "🇦🇪",
+ "aliases": [
+ "united_arab_emirates"
+ ]
+ },
+ {
+ "emoji": "🇺🇳",
+ "aliases": [
+ "united_nations"
+ ]
+ },
+ {
+ "emoji": "🔓",
+ "aliases": [
+ "unlock"
+ ]
+ },
+ {
+ "emoji": "🆙",
+ "aliases": [
+ "up"
+ ]
+ },
+ {
+ "emoji": "🙃",
+ "aliases": [
+ "upside_down_face"
+ ]
+ },
+ {
+ "emoji": "🇺🇾",
+ "aliases": [
+ "uruguay"
+ ]
+ },
+ {
+ "emoji": "🇺🇸",
+ "aliases": [
+ "us"
+ ]
+ },
+ {
+ "emoji": "🇺🇲",
+ "aliases": [
+ "us_outlying_islands"
+ ]
+ },
+ {
+ "emoji": "🇻🇮",
+ "aliases": [
+ "us_virgin_islands"
+ ]
+ },
+ {
+ "emoji": "🇺🇿",
+ "aliases": [
+ "uzbekistan"
+ ]
+ },
+ {
+ "emoji": "✌️",
+ "aliases": [
+ "v"
+ ]
+ },
+ {
+ "emoji": "🧛",
+ "aliases": [
+ "vampire"
+ ]
+ },
+ {
+ "emoji": "🧛♂️",
+ "aliases": [
+ "vampire_man"
+ ]
+ },
+ {
+ "emoji": "🧛♀️",
+ "aliases": [
+ "vampire_woman"
+ ]
+ },
+ {
+ "emoji": "🇻🇺",
+ "aliases": [
+ "vanuatu"
+ ]
+ },
+ {
+ "emoji": "🇻🇦",
+ "aliases": [
+ "vatican_city"
+ ]
+ },
+ {
+ "emoji": "🇻🇪",
+ "aliases": [
+ "venezuela"
+ ]
+ },
+ {
+ "emoji": "🚦",
+ "aliases": [
+ "vertical_traffic_light"
+ ]
+ },
+ {
+ "emoji": "📼",
+ "aliases": [
+ "vhs"
+ ]
+ },
+ {
+ "emoji": "📳",
+ "aliases": [
+ "vibration_mode"
+ ]
+ },
+ {
+ "emoji": "📹",
+ "aliases": [
+ "video_camera"
+ ]
+ },
+ {
+ "emoji": "🎮",
+ "aliases": [
+ "video_game"
+ ]
+ },
+ {
+ "emoji": "🇻🇳",
+ "aliases": [
+ "vietnam"
+ ]
+ },
+ {
+ "emoji": "🎻",
+ "aliases": [
+ "violin"
+ ]
+ },
+ {
+ "emoji": "♍",
+ "aliases": [
+ "virgo"
+ ]
+ },
+ {
+ "emoji": "🌋",
+ "aliases": [
+ "volcano"
+ ]
+ },
+ {
+ "emoji": "🏐",
+ "aliases": [
+ "volleyball"
+ ]
+ },
+ {
+ "emoji": "🤮",
+ "aliases": [
+ "vomiting_face"
+ ]
+ },
+ {
+ "emoji": "🆚",
+ "aliases": [
+ "vs"
+ ]
+ },
+ {
+ "emoji": "🖖",
+ "aliases": [
+ "vulcan_salute"
+ ]
+ },
+ {
+ "emoji": "🧇",
+ "aliases": [
+ "waffle"
+ ]
+ },
+ {
+ "emoji": "🏴",
+ "aliases": [
+ "wales"
+ ]
+ },
+ {
+ "emoji": "🚶",
+ "aliases": [
+ "walking"
+ ]
+ },
+ {
+ "emoji": "🚶♂️",
+ "aliases": [
+ "walking_man"
+ ]
+ },
+ {
+ "emoji": "🚶♀️",
+ "aliases": [
+ "walking_woman"
+ ]
+ },
+ {
+ "emoji": "🇼🇫",
+ "aliases": [
+ "wallis_futuna"
+ ]
+ },
+ {
+ "emoji": "🌘",
+ "aliases": [
+ "waning_crescent_moon"
+ ]
+ },
+ {
+ "emoji": "🌖",
+ "aliases": [
+ "waning_gibbous_moon"
+ ]
+ },
+ {
+ "emoji": "⚠️",
+ "aliases": [
+ "warning"
+ ]
+ },
+ {
+ "emoji": "🗑️",
+ "aliases": [
+ "wastebasket"
+ ]
+ },
+ {
+ "emoji": "⌚",
+ "aliases": [
+ "watch"
+ ]
+ },
+ {
+ "emoji": "🐃",
+ "aliases": [
+ "water_buffalo"
+ ]
+ },
+ {
+ "emoji": "🤽",
+ "aliases": [
+ "water_polo"
+ ]
+ },
+ {
+ "emoji": "🍉",
+ "aliases": [
+ "watermelon"
+ ]
+ },
+ {
+ "emoji": "👋",
+ "aliases": [
+ "wave"
+ ]
+ },
+ {
+ "emoji": "〰️",
+ "aliases": [
+ "wavy_dash"
+ ]
+ },
+ {
+ "emoji": "🌒",
+ "aliases": [
+ "waxing_crescent_moon"
+ ]
+ },
+ {
+ "emoji": "🚾",
+ "aliases": [
+ "wc"
+ ]
+ },
+ {
+ "emoji": "😩",
+ "aliases": [
+ "weary"
+ ]
+ },
+ {
+ "emoji": "💒",
+ "aliases": [
+ "wedding"
+ ]
+ },
+ {
+ "emoji": "🏋️",
+ "aliases": [
+ "weight_lifting"
+ ]
+ },
+ {
+ "emoji": "🏋️♂️",
+ "aliases": [
+ "weight_lifting_man"
+ ]
+ },
+ {
+ "emoji": "🏋️♀️",
+ "aliases": [
+ "weight_lifting_woman"
+ ]
+ },
+ {
+ "emoji": "🇪🇭",
+ "aliases": [
+ "western_sahara"
+ ]
+ },
+ {
+ "emoji": "🐳",
+ "aliases": [
+ "whale"
+ ]
+ },
+ {
+ "emoji": "🐋",
+ "aliases": [
+ "whale2"
+ ]
+ },
+ {
+ "emoji": "🛞",
+ "aliases": [
+ "wheel"
+ ]
+ },
+ {
+ "emoji": "☸️",
+ "aliases": [
+ "wheel_of_dharma"
+ ]
+ },
+ {
+ "emoji": "♿",
+ "aliases": [
+ "wheelchair"
+ ]
+ },
+ {
+ "emoji": "✅",
+ "aliases": [
+ "white_check_mark"
+ ]
+ },
+ {
+ "emoji": "⚪",
+ "aliases": [
+ "white_circle"
+ ]
+ },
+ {
+ "emoji": "🏳️",
+ "aliases": [
+ "white_flag"
+ ]
+ },
+ {
+ "emoji": "💮",
+ "aliases": [
+ "white_flower"
+ ]
+ },
+ {
+ "emoji": "👨🦳",
+ "aliases": [
+ "white_haired_man"
+ ]
+ },
+ {
+ "emoji": "👩🦳",
+ "aliases": [
+ "white_haired_woman"
+ ]
+ },
+ {
+ "emoji": "🤍",
+ "aliases": [
+ "white_heart"
+ ]
+ },
+ {
+ "emoji": "⬜",
+ "aliases": [
+ "white_large_square"
+ ]
+ },
+ {
+ "emoji": "◽",
+ "aliases": [
+ "white_medium_small_square"
+ ]
+ },
+ {
+ "emoji": "◻️",
+ "aliases": [
+ "white_medium_square"
+ ]
+ },
+ {
+ "emoji": "▫️",
+ "aliases": [
+ "white_small_square"
+ ]
+ },
+ {
+ "emoji": "🔳",
+ "aliases": [
+ "white_square_button"
+ ]
+ },
+ {
+ "emoji": "🥀",
+ "aliases": [
+ "wilted_flower"
+ ]
+ },
+ {
+ "emoji": "🎐",
+ "aliases": [
+ "wind_chime"
+ ]
+ },
+ {
+ "emoji": "🌬️",
+ "aliases": [
+ "wind_face"
+ ]
+ },
+ {
+ "emoji": "🪟",
+ "aliases": [
+ "window"
+ ]
+ },
+ {
+ "emoji": "🍷",
+ "aliases": [
+ "wine_glass"
+ ]
+ },
+ {
+ "emoji": "🪽",
+ "aliases": [
+ "wing"
+ ]
+ },
+ {
+ "emoji": "😉",
+ "aliases": [
+ "wink"
+ ]
+ },
+ {
+ "emoji": "🛜",
+ "aliases": [
+ "wireless"
+ ]
+ },
+ {
+ "emoji": "🐺",
+ "aliases": [
+ "wolf"
+ ]
+ },
+ {
+ "emoji": "👩",
+ "aliases": [
+ "woman"
+ ]
+ },
+ {
+ "emoji": "👩🎨",
+ "aliases": [
+ "woman_artist"
+ ]
+ },
+ {
+ "emoji": "👩🚀",
+ "aliases": [
+ "woman_astronaut"
+ ]
+ },
+ {
+ "emoji": "🧔♀️",
+ "aliases": [
+ "woman_beard"
+ ]
+ },
+ {
+ "emoji": "🤸♀️",
+ "aliases": [
+ "woman_cartwheeling"
+ ]
+ },
+ {
+ "emoji": "👩🍳",
+ "aliases": [
+ "woman_cook"
+ ]
+ },
+ {
+ "emoji": "💃",
+ "aliases": [
+ "woman_dancing",
+ "dancer"
+ ]
+ },
+ {
+ "emoji": "🤦♀️",
+ "aliases": [
+ "woman_facepalming"
+ ]
+ },
+ {
+ "emoji": "👩🏭",
+ "aliases": [
+ "woman_factory_worker"
+ ]
+ },
+ {
+ "emoji": "👩🌾",
+ "aliases": [
+ "woman_farmer"
+ ]
+ },
+ {
+ "emoji": "👩🍼",
+ "aliases": [
+ "woman_feeding_baby"
+ ]
+ },
+ {
+ "emoji": "👩🚒",
+ "aliases": [
+ "woman_firefighter"
+ ]
+ },
+ {
+ "emoji": "👩⚕️",
+ "aliases": [
+ "woman_health_worker"
+ ]
+ },
+ {
+ "emoji": "👩🦽",
+ "aliases": [
+ "woman_in_manual_wheelchair"
+ ]
+ },
+ {
+ "emoji": "👩🦽➡️",
+ "aliases": [
+ "woman_in_manual_wheelchair_facing_right"
+ ]
+ },
+ {
+ "emoji": "👩🦼",
+ "aliases": [
+ "woman_in_motorized_wheelchair"
+ ]
+ },
+ {
+ "emoji": "👩🦼➡️",
+ "aliases": [
+ "woman_in_motorized_wheelchair_facing_right"
+ ]
+ },
+ {
+ "emoji": "🤵♀️",
+ "aliases": [
+ "woman_in_tuxedo"
+ ]
+ },
+ {
+ "emoji": "👩⚖️",
+ "aliases": [
+ "woman_judge"
+ ]
+ },
+ {
+ "emoji": "🤹♀️",
+ "aliases": [
+ "woman_juggling"
+ ]
+ },
+ {
+ "emoji": "🧎♀️➡️",
+ "aliases": [
+ "woman_kneeling_facing_right"
+ ]
+ },
+ {
+ "emoji": "👩🔧",
+ "aliases": [
+ "woman_mechanic"
+ ]
+ },
+ {
+ "emoji": "👩💼",
+ "aliases": [
+ "woman_office_worker"
+ ]
+ },
+ {
+ "emoji": "👩✈️",
+ "aliases": [
+ "woman_pilot"
+ ]
+ },
+ {
+ "emoji": "🤾♀️",
+ "aliases": [
+ "woman_playing_handball"
+ ]
+ },
+ {
+ "emoji": "🤽♀️",
+ "aliases": [
+ "woman_playing_water_polo"
+ ]
+ },
+ {
+ "emoji": "🏃♀️➡️",
+ "aliases": [
+ "woman_running_facing_right"
+ ]
+ },
+ {
+ "emoji": "👩🔬",
+ "aliases": [
+ "woman_scientist"
+ ]
+ },
+ {
+ "emoji": "🤷♀️",
+ "aliases": [
+ "woman_shrugging"
+ ]
+ },
+ {
+ "emoji": "👩🎤",
+ "aliases": [
+ "woman_singer"
+ ]
+ },
+ {
+ "emoji": "👩🎓",
+ "aliases": [
+ "woman_student"
+ ]
+ },
+ {
+ "emoji": "👩🏫",
+ "aliases": [
+ "woman_teacher"
+ ]
+ },
+ {
+ "emoji": "👩💻",
+ "aliases": [
+ "woman_technologist"
+ ]
+ },
+ {
+ "emoji": "🚶♀️➡️",
+ "aliases": [
+ "woman_walking_facing_right"
+ ]
+ },
+ {
+ "emoji": "🧕",
+ "aliases": [
+ "woman_with_headscarf"
+ ]
+ },
+ {
+ "emoji": "👩🦯",
+ "aliases": [
+ "woman_with_probing_cane"
+ ]
+ },
+ {
+ "emoji": "👳♀️",
+ "aliases": [
+ "woman_with_turban"
+ ]
+ },
+ {
+ "emoji": "👰♀️",
+ "aliases": [
+ "woman_with_veil",
+ "bride_with_veil"
+ ]
+ },
+ {
+ "emoji": "👩🦯➡️",
+ "aliases": [
+ "woman_with_white_cane_facing_right"
+ ]
+ },
+ {
+ "emoji": "👚",
+ "aliases": [
+ "womans_clothes"
+ ]
+ },
+ {
+ "emoji": "👒",
+ "aliases": [
+ "womans_hat"
+ ]
+ },
+ {
+ "emoji": "🤼♀️",
+ "aliases": [
+ "women_wrestling"
+ ]
+ },
+ {
+ "emoji": "🚺",
+ "aliases": [
+ "womens"
+ ]
+ },
+ {
+ "emoji": "🪵",
+ "aliases": [
+ "wood"
+ ]
+ },
+ {
+ "emoji": "🥴",
+ "aliases": [
+ "woozy_face"
+ ]
+ },
+ {
+ "emoji": "🗺️",
+ "aliases": [
+ "world_map"
+ ]
+ },
+ {
+ "emoji": "🪱",
+ "aliases": [
+ "worm"
+ ]
+ },
+ {
+ "emoji": "😟",
+ "aliases": [
+ "worried"
+ ]
+ },
+ {
+ "emoji": "🔧",
+ "aliases": [
+ "wrench"
+ ]
+ },
+ {
+ "emoji": "🤼",
+ "aliases": [
+ "wrestling"
+ ]
+ },
+ {
+ "emoji": "✍️",
+ "aliases": [
+ "writing_hand"
+ ]
+ },
+ {
+ "emoji": "❌",
+ "aliases": [
+ "x"
+ ]
+ },
+ {
+ "emoji": "🩻",
+ "aliases": [
+ "x_ray"
+ ]
+ },
+ {
+ "emoji": "🧶",
+ "aliases": [
+ "yarn"
+ ]
+ },
+ {
+ "emoji": "🥱",
+ "aliases": [
+ "yawning_face"
+ ]
+ },
+ {
+ "emoji": "🟡",
+ "aliases": [
+ "yellow_circle"
+ ]
+ },
+ {
+ "emoji": "💛",
+ "aliases": [
+ "yellow_heart"
+ ]
+ },
+ {
+ "emoji": "🟨",
+ "aliases": [
+ "yellow_square"
+ ]
+ },
+ {
+ "emoji": "🇾🇪",
+ "aliases": [
+ "yemen"
+ ]
+ },
+ {
+ "emoji": "💴",
+ "aliases": [
+ "yen"
+ ]
+ },
+ {
+ "emoji": "☯️",
+ "aliases": [
+ "yin_yang"
+ ]
+ },
+ {
+ "emoji": "🪀",
+ "aliases": [
+ "yo_yo"
+ ]
+ },
+ {
+ "emoji": "😋",
+ "aliases": [
+ "yum"
+ ]
+ },
+ {
+ "emoji": "🇿🇲",
+ "aliases": [
+ "zambia"
+ ]
+ },
+ {
+ "emoji": "🤪",
+ "aliases": [
+ "zany_face"
+ ]
+ },
+ {
+ "emoji": "⚡",
+ "aliases": [
+ "zap"
+ ]
+ },
+ {
+ "emoji": "🦓",
+ "aliases": [
+ "zebra"
+ ]
+ },
+ {
+ "emoji": "0️⃣",
+ "aliases": [
+ "zero"
+ ]
+ },
+ {
+ "emoji": "🇿🇼",
+ "aliases": [
+ "zimbabwe"
+ ]
+ },
+ {
+ "emoji": "🤐",
+ "aliases": [
+ "zipper_mouth_face"
+ ]
+ },
+ {
+ "emoji": "🧟",
+ "aliases": [
+ "zombie"
+ ]
+ },
+ {
+ "emoji": "🧟♂️",
+ "aliases": [
+ "zombie_man"
+ ]
+ },
+ {
+ "emoji": "🧟♀️",
+ "aliases": [
+ "zombie_woman"
+ ]
+ },
+ {
+ "emoji": "💤",
+ "aliases": [
+ "zzz"
+ ]
+ }
+]
diff --git a/assets/go-licenses.json b/assets/go-licenses.json
index b105757683..58f45b980c 100644
--- a/assets/go-licenses.json
+++ b/assets/go-licenses.json
@@ -27,7 +27,7 @@
{
"name": "connectrpc.com/connect",
"path": "connectrpc.com/connect/LICENSE",
- "licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright 2021-2024 The Connect Authors\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
+ "licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright 2021-2025 The Connect Authors\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
"name": "dario.cat/mergo",
@@ -114,6 +114,11 @@
"path": "github.com/ProtonMail/go-crypto/LICENSE",
"licenseText": "Copyright (c) 2009 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
+ {
+ "name": "github.com/PuerkitoBio/goquery",
+ "path": "github.com/PuerkitoBio/goquery/LICENSE",
+ "licenseText": "Copyright (c) 2012-2021, Martin Angers \u0026 Contributors\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.\n\n* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
+ },
{
"name": "github.com/RoaringBitmap/roaring/v2",
"path": "github.com/RoaringBitmap/roaring/v2/LICENSE",
@@ -139,6 +144,11 @@
"path": "github.com/andybalholm/brotli/LICENSE",
"licenseText": "Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n"
},
+ {
+ "name": "github.com/andybalholm/cascadia",
+ "path": "github.com/andybalholm/cascadia/LICENSE",
+ "licenseText": "Copyright (c) 2011 Andy Balholm. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
+ },
{
"name": "github.com/anmitsu/go-shlex",
"path": "github.com/anmitsu/go-shlex/LICENSE",
@@ -190,8 +200,8 @@
"licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Aymerick JEHANNE\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n"
},
{
- "name": "github.com/beorn7/perks/quantile",
- "path": "github.com/beorn7/perks/quantile/LICENSE",
+ "name": "github.com/beorn7/perks",
+ "path": "github.com/beorn7/perks/LICENSE",
"licenseText": "Copyright (C) 2013 Blake Mizerany\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\nLIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\nWITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
},
{
@@ -325,8 +335,8 @@
"licenseText": "The MIT License (MIT)\n\nCopyright (c) 2014 Florian Sundermann\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
{
- "name": "github.com/bradfitz/gomemcache/memcache",
- "path": "github.com/bradfitz/gomemcache/memcache/LICENSE",
+ "name": "github.com/bradfitz/gomemcache",
+ "path": "github.com/bradfitz/gomemcache/LICENSE",
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
@@ -355,8 +365,8 @@
"licenseText": "Copyright (c) 2016 Caleb Spare\n\nMIT License\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\nLIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\nWITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
},
{
- "name": "github.com/charmbracelet/git-lfs-transfer/transfer",
- "path": "github.com/charmbracelet/git-lfs-transfer/transfer/LICENSE",
+ "name": "github.com/charmbracelet/git-lfs-transfer",
+ "path": "github.com/charmbracelet/git-lfs-transfer/LICENSE",
"licenseText": "MIT License\n\nCopyright (c) 2022-2023 Charmbracelet, Inc\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
{
@@ -385,8 +395,8 @@
"licenseText": "Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"{}\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright {yyyy} {name of copyright owner}\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n"
},
{
- "name": "github.com/cpuguy83/go-md2man/v2/md2man",
- "path": "github.com/cpuguy83/go-md2man/v2/md2man/LICENSE.md",
+ "name": "github.com/cpuguy83/go-md2man/v2",
+ "path": "github.com/cpuguy83/go-md2man/v2/LICENSE.md",
"licenseText": "The MIT License (MIT)\n\nCopyright (c) 2014 Brian Goff\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
{
@@ -395,8 +405,8 @@
"licenseText": "Copyright (C) 2014-2015 Docker Inc \u0026 Go Authors. All rights reserved.\nCopyright (C) 2017-2024 SUSE LLC. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/davecgh/go-spew/spew",
- "path": "github.com/davecgh/go-spew/spew/LICENSE",
+ "name": "github.com/davecgh/go-spew",
+ "path": "github.com/davecgh/go-spew/LICENSE",
"licenseText": "ISC License\n\nCopyright (c) 2012-2016 Dave Collins \u003cdave@davec.name\u003e\n\nPermission to use, copy, modify, and/or distribute this software for any\npurpose with or without fee is hereby granted, provided that the above\ncopyright notice and this permission notice appear in all copies.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\nWITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\nMERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\nANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\nWHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\nACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\nOR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n"
},
{
@@ -409,16 +419,6 @@
"path": "github.com/dimiro1/reply/LICENSE",
"licenseText": "MIT License\n\nCopyright (c) Discourse\nCopyright (c) Claudemiro\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
- {
- "name": "github.com/djherbis/buffer",
- "path": "github.com/djherbis/buffer/LICENSE.txt",
- "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Dustin H\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
- },
- {
- "name": "github.com/djherbis/nio/v3",
- "path": "github.com/djherbis/nio/v3/LICENSE.txt",
- "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Dustin H\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
- },
{
"name": "github.com/dlclark/regexp2",
"path": "github.com/dlclark/regexp2/LICENSE",
@@ -574,6 +574,11 @@
"path": "github.com/go-webauthn/webauthn/LICENSE",
"licenseText": "Copyright (c) 2017 Duo Security, Inc. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions\nare met:\n\n1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n3. Neither the name of the copyright holder nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\nIS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,\nTHE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\nPURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\nEXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\nPROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\nPROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\nLIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\nNEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\nCopyright (c) 2021-2022 github.com/go-webauthn/webauthn authors.\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the\nfollowing conditions are met:\n\n1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following\n disclaimer.\n\n2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following\n disclaimer in the documentation and/or other materials provided with the distribution.\n\n3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products\n derived from this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,\nINCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\nWHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\nTHIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
},
+ {
+ "name": "github.com/go-webauthn/x",
+ "path": "github.com/go-webauthn/x/LICENSE",
+ "licenseText": "Copyright (c) 2021-2023 github.com/go-webauthn authors.\n\nRedistribution and use in source and binary forms, with or without modification, are permitted provided that the\nfollowing conditions are met:\n\n1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following\n disclaimer.\n\n2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following\n disclaimer in the documentation and/or other materials provided with the distribution.\n\n3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products\n derived from this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,\nINCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\nWHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\nTHIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
+ },
{
"name": "github.com/go-webauthn/x/revoke",
"path": "github.com/go-webauthn/x/revoke/LICENSE",
@@ -594,11 +599,6 @@
"path": "github.com/gogs/go-gogs-client/LICENSE",
"licenseText": "The MIT License (MIT)\n\nCopyright (c) 2014 Go Git Service\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n"
},
- {
- "name": "github.com/golang-jwt/jwt/v4",
- "path": "github.com/golang-jwt/jwt/v4/LICENSE",
- "licenseText": "Copyright (c) 2012 Dave Grijalva\nCopyright (c) 2021 golang-jwt maintainers\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n"
- },
{
"name": "github.com/golang-jwt/jwt/v5",
"path": "github.com/golang-jwt/jwt/v5/LICENSE",
@@ -615,15 +615,10 @@
"licenseText": "Copyright (c) 2017 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/golang/groupcache/lru",
- "path": "github.com/golang/groupcache/lru/LICENSE",
+ "name": "github.com/golang/groupcache",
+ "path": "github.com/golang/groupcache/LICENSE",
"licenseText": "Apache License\nVersion 2.0, January 2004\nhttp://www.apache.org/licenses/\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n1. Definitions.\n\n\"License\" shall mean the terms and conditions for use, reproduction, and\ndistribution as defined by Sections 1 through 9 of this document.\n\n\"Licensor\" shall mean the copyright owner or entity authorized by the copyright\nowner that is granting the License.\n\n\"Legal Entity\" shall mean the union of the acting entity and all other entities\nthat control, are controlled by, or are under common control with that entity.\nFor the purposes of this definition, \"control\" means (i) the power, direct or\nindirect, to cause the direction or management of such entity, whether by\ncontract or otherwise, or (ii) ownership of fifty percent (50%) or more of the\noutstanding shares, or (iii) beneficial ownership of such entity.\n\n\"You\" (or \"Your\") shall mean an individual or Legal Entity exercising\npermissions granted by this License.\n\n\"Source\" form shall mean the preferred form for making modifications, including\nbut not limited to software source code, documentation source, and configuration\nfiles.\n\n\"Object\" form shall mean any form resulting from mechanical transformation or\ntranslation of a Source form, including but not limited to compiled object code,\ngenerated documentation, and conversions to other media types.\n\n\"Work\" shall mean the work of authorship, whether in Source or Object form, made\navailable under the License, as indicated by a copyright notice that is included\nin or attached to the work (an example is provided in the Appendix below).\n\n\"Derivative Works\" shall mean any work, whether in Source or Object form, that\nis based on (or derived from) the Work and for which the editorial revisions,\nannotations, elaborations, or other modifications represent, as a whole, an\noriginal work of authorship. For the purposes of this License, Derivative Works\nshall not include works that remain separable from, or merely link (or bind by\nname) to the interfaces of, the Work and Derivative Works thereof.\n\n\"Contribution\" shall mean any work of authorship, including the original version\nof the Work and any modifications or additions to that Work or Derivative Works\nthereof, that is intentionally submitted to Licensor for inclusion in the Work\nby the copyright owner or by an individual or Legal Entity authorized to submit\non behalf of the copyright owner. For the purposes of this definition,\n\"submitted\" means any form of electronic, verbal, or written communication sent\nto the Licensor or its representatives, including but not limited to\ncommunication on electronic mailing lists, source code control systems, and\nissue tracking systems that are managed by, or on behalf of, the Licensor for\nthe purpose of discussing and improving the Work, but excluding communication\nthat is conspicuously marked or otherwise designated in writing by the copyright\nowner as \"Not a Contribution.\"\n\n\"Contributor\" shall mean Licensor and any individual or Legal Entity on behalf\nof whom a Contribution has been received by Licensor and subsequently\nincorporated within the Work.\n\n2. Grant of Copyright License.\n\nSubject to the terms and conditions of this License, each Contributor hereby\ngrants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,\nirrevocable copyright license to reproduce, prepare Derivative Works of,\npublicly display, publicly perform, sublicense, and distribute the Work and such\nDerivative Works in Source or Object form.\n\n3. Grant of Patent License.\n\nSubject to the terms and conditions of this License, each Contributor hereby\ngrants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,\nirrevocable (except as stated in this section) patent license to make, have\nmade, use, offer to sell, sell, import, and otherwise transfer the Work, where\nsuch license applies only to those patent claims licensable by such Contributor\nthat are necessarily infringed by their Contribution(s) alone or by combination\nof their Contribution(s) with the Work to which such Contribution(s) was\nsubmitted. If You institute patent litigation against any entity (including a\ncross-claim or counterclaim in a lawsuit) alleging that the Work or a\nContribution incorporated within the Work constitutes direct or contributory\npatent infringement, then any patent licenses granted to You under this License\nfor that Work shall terminate as of the date such litigation is filed.\n\n4. Redistribution.\n\nYou may reproduce and distribute copies of the Work or Derivative Works thereof\nin any medium, with or without modifications, and in Source or Object form,\nprovided that You meet the following conditions:\n\nYou must give any other recipients of the Work or Derivative Works a copy of\nthis License; and\nYou must cause any modified files to carry prominent notices stating that You\nchanged the files; and\nYou must retain, in the Source form of any Derivative Works that You distribute,\nall copyright, patent, trademark, and attribution notices from the Source form\nof the Work, excluding those notices that do not pertain to any part of the\nDerivative Works; and\nIf the Work includes a \"NOTICE\" text file as part of its distribution, then any\nDerivative Works that You distribute must include a readable copy of the\nattribution notices contained within such NOTICE file, excluding those notices\nthat do not pertain to any part of the Derivative Works, in at least one of the\nfollowing places: within a NOTICE text file distributed as part of the\nDerivative Works; within the Source form or documentation, if provided along\nwith the Derivative Works; or, within a display generated by the Derivative\nWorks, if and wherever such third-party notices normally appear. The contents of\nthe NOTICE file are for informational purposes only and do not modify the\nLicense. You may add Your own attribution notices within Derivative Works that\nYou distribute, alongside or as an addendum to the NOTICE text from the Work,\nprovided that such additional attribution notices cannot be construed as\nmodifying the License.\nYou may add Your own copyright statement to Your modifications and may provide\nadditional or different license terms and conditions for use, reproduction, or\ndistribution of Your modifications, or for any such Derivative Works as a whole,\nprovided Your use, reproduction, and distribution of the Work otherwise complies\nwith the conditions stated in this License.\n\n5. Submission of Contributions.\n\nUnless You explicitly state otherwise, any Contribution intentionally submitted\nfor inclusion in the Work by You to the Licensor shall be under the terms and\nconditions of this License, without any additional terms or conditions.\nNotwithstanding the above, nothing herein shall supersede or modify the terms of\nany separate license agreement you may have executed with Licensor regarding\nsuch Contributions.\n\n6. Trademarks.\n\nThis License does not grant permission to use the trade names, trademarks,\nservice marks, or product names of the Licensor, except as required for\nreasonable and customary use in describing the origin of the Work and\nreproducing the content of the NOTICE file.\n\n7. Disclaimer of Warranty.\n\nUnless required by applicable law or agreed to in writing, Licensor provides the\nWork (and each Contributor provides its Contributions) on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,\nincluding, without limitation, any warranties or conditions of TITLE,\nNON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are\nsolely responsible for determining the appropriateness of using or\nredistributing the Work and assume any risks associated with Your exercise of\npermissions under this License.\n\n8. Limitation of Liability.\n\nIn no event and under no legal theory, whether in tort (including negligence),\ncontract, or otherwise, unless required by applicable law (such as deliberate\nand grossly negligent acts) or agreed to in writing, shall any Contributor be\nliable to You for damages, including any direct, indirect, special, incidental,\nor consequential damages of any character arising as a result of this License or\nout of the use or inability to use the Work (including but not limited to\ndamages for loss of goodwill, work stoppage, computer failure or malfunction, or\nany and all other commercial damages or losses), even if such Contributor has\nbeen advised of the possibility of such damages.\n\n9. Accepting Warranty or Additional Liability.\n\nWhile redistributing the Work or Derivative Works thereof, You may choose to\noffer, and charge a fee for, acceptance of support, warranty, indemnity, or\nother liability obligations and/or rights consistent with this License. However,\nin accepting such obligations, You may act only on Your own behalf and on Your\nsole responsibility, not on behalf of any other Contributor, and only if You\nagree to indemnify, defend, and hold each Contributor harmless for any liability\nincurred by, or claims asserted against, such Contributor by reason of your\naccepting any such warranty or additional liability.\n\nEND OF TERMS AND CONDITIONS\n\nAPPENDIX: How to apply the Apache License to your work\n\nTo apply the Apache License to your work, attach the following boilerplate\nnotice, with the fields enclosed by brackets \"[]\" replaced with your own\nidentifying information. (Don't include the brackets!) The text should be\nenclosed in the appropriate comment syntax for the file format. We also\nrecommend that a file or class name and description of purpose be included on\nthe same \"printed page\" as the copyright notice for easier identification within\nthird-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
- {
- "name": "github.com/golang/protobuf/proto",
- "path": "github.com/golang/protobuf/proto/LICENSE",
- "licenseText": "Copyright 2010 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n"
- },
{
"name": "github.com/golang/snappy",
"path": "github.com/golang/snappy/LICENSE",
@@ -635,18 +630,18 @@
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
- "name": "github.com/google/flatbuffers/go",
- "path": "github.com/google/flatbuffers/go/LICENSE",
+ "name": "github.com/google/flatbuffers",
+ "path": "github.com/google/flatbuffers/LICENSE",
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
- "name": "github.com/google/go-github/v74/github",
- "path": "github.com/google/go-github/v74/github/LICENSE",
+ "name": "github.com/google/go-github/v74",
+ "path": "github.com/google/go-github/v74/LICENSE",
"licenseText": "Copyright (c) 2013 The go-github AUTHORS. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/google/go-querystring/query",
- "path": "github.com/google/go-querystring/query/LICENSE",
+ "name": "github.com/google/go-querystring",
+ "path": "github.com/google/go-querystring/LICENSE",
"licenseText": "Copyright (c) 2013 Google. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
@@ -660,8 +655,8 @@
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
- "name": "github.com/google/pprof/profile",
- "path": "github.com/google/pprof/profile/LICENSE",
+ "name": "github.com/google/pprof",
+ "path": "github.com/google/pprof/LICENSE",
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
@@ -670,8 +665,8 @@
"licenseText": "Copyright (c) 2009,2014 Google Inc. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/gorilla/css/scanner",
- "path": "github.com/gorilla/css/scanner/LICENSE",
+ "name": "github.com/gorilla/css",
+ "path": "github.com/gorilla/css/LICENSE",
"licenseText": "Copyright (c) 2023 The Gorilla Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n\t * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n\t * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n\t * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n"
},
{
@@ -717,7 +712,7 @@
{
"name": "github.com/hashicorp/go-version",
"path": "github.com/hashicorp/go-version/LICENSE",
- "licenseText": "Mozilla Public License, version 2.0\n\n1. Definitions\n\n1.1. “Contributor”\n\n means each individual or legal entity that creates, contributes to the\n creation of, or owns Covered Software.\n\n1.2. “Contributor Version”\n\n means the combination of the Contributions of others (if any) used by a\n Contributor and that particular Contributor’s Contribution.\n\n1.3. “Contribution”\n\n means Covered Software of a particular Contributor.\n\n1.4. “Covered Software”\n\n means Source Code Form to which the initial Contributor has attached the\n notice in Exhibit A, the Executable Form of such Source Code Form, and\n Modifications of such Source Code Form, in each case including portions\n thereof.\n\n1.5. “Incompatible With Secondary Licenses”\n means\n\n a. that the initial Contributor has attached the notice described in\n Exhibit B to the Covered Software; or\n\n b. that the Covered Software was made available under the terms of version\n 1.1 or earlier of the License, but not also under the terms of a\n Secondary License.\n\n1.6. “Executable Form”\n\n means any form of the work other than Source Code Form.\n\n1.7. “Larger Work”\n\n means a work that combines Covered Software with other material, in a separate\n file or files, that is not Covered Software.\n\n1.8. “License”\n\n means this document.\n\n1.9. “Licensable”\n\n means having the right to grant, to the maximum extent possible, whether at the\n time of the initial grant or subsequently, any and all of the rights conveyed by\n this License.\n\n1.10. “Modifications”\n\n means any of the following:\n\n a. any file in Source Code Form that results from an addition to, deletion\n from, or modification of the contents of Covered Software; or\n\n b. any new file in Source Code Form that contains any Covered Software.\n\n1.11. “Patent Claims” of a Contributor\n\n means any patent claim(s), including without limitation, method, process,\n and apparatus claims, in any patent Licensable by such Contributor that\n would be infringed, but for the grant of the License, by the making,\n using, selling, offering for sale, having made, import, or transfer of\n either its Contributions or its Contributor Version.\n\n1.12. “Secondary License”\n\n means either the GNU General Public License, Version 2.0, the GNU Lesser\n General Public License, Version 2.1, the GNU Affero General Public\n License, Version 3.0, or any later versions of those licenses.\n\n1.13. “Source Code Form”\n\n means the form of the work preferred for making modifications.\n\n1.14. “You” (or “Your”)\n\n means an individual or a legal entity exercising rights under this\n License. For legal entities, “You” includes any entity that controls, is\n controlled by, or is under common control with You. For purposes of this\n definition, “control” means (a) the power, direct or indirect, to cause\n the direction or management of such entity, whether by contract or\n otherwise, or (b) ownership of more than fifty percent (50%) of the\n outstanding shares or beneficial ownership of such entity.\n\n\n2. License Grants and Conditions\n\n2.1. Grants\n\n Each Contributor hereby grants You a world-wide, royalty-free,\n non-exclusive license:\n\n a. under intellectual property rights (other than patent or trademark)\n Licensable by such Contributor to use, reproduce, make available,\n modify, display, perform, distribute, and otherwise exploit its\n Contributions, either on an unmodified basis, with Modifications, or as\n part of a Larger Work; and\n\n b. under Patent Claims of such Contributor to make, use, sell, offer for\n sale, have made, import, and otherwise transfer either its Contributions\n or its Contributor Version.\n\n2.2. Effective Date\n\n The licenses granted in Section 2.1 with respect to any Contribution become\n effective for each Contribution on the date the Contributor first distributes\n such Contribution.\n\n2.3. Limitations on Grant Scope\n\n The licenses granted in this Section 2 are the only rights granted under this\n License. No additional rights or licenses will be implied from the distribution\n or licensing of Covered Software under this License. Notwithstanding Section\n 2.1(b) above, no patent license is granted by a Contributor:\n\n a. for any code that a Contributor has removed from Covered Software; or\n\n b. for infringements caused by: (i) Your and any other third party’s\n modifications of Covered Software, or (ii) the combination of its\n Contributions with other software (except as part of its Contributor\n Version); or\n\n c. under Patent Claims infringed by Covered Software in the absence of its\n Contributions.\n\n This License does not grant any rights in the trademarks, service marks, or\n logos of any Contributor (except as may be necessary to comply with the\n notice requirements in Section 3.4).\n\n2.4. Subsequent Licenses\n\n No Contributor makes additional grants as a result of Your choice to\n distribute the Covered Software under a subsequent version of this License\n (see Section 10.2) or under the terms of a Secondary License (if permitted\n under the terms of Section 3.3).\n\n2.5. Representation\n\n Each Contributor represents that the Contributor believes its Contributions\n are its original creation(s) or it has sufficient rights to grant the\n rights to its Contributions conveyed by this License.\n\n2.6. Fair Use\n\n This License is not intended to limit any rights You have under applicable\n copyright doctrines of fair use, fair dealing, or other equivalents.\n\n2.7. Conditions\n\n Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in\n Section 2.1.\n\n\n3. Responsibilities\n\n3.1. Distribution of Source Form\n\n All distribution of Covered Software in Source Code Form, including any\n Modifications that You create or to which You contribute, must be under the\n terms of this License. You must inform recipients that the Source Code Form\n of the Covered Software is governed by the terms of this License, and how\n they can obtain a copy of this License. You may not attempt to alter or\n restrict the recipients’ rights in the Source Code Form.\n\n3.2. Distribution of Executable Form\n\n If You distribute Covered Software in Executable Form then:\n\n a. such Covered Software must also be made available in Source Code Form,\n as described in Section 3.1, and You must inform recipients of the\n Executable Form how they can obtain a copy of such Source Code Form by\n reasonable means in a timely manner, at a charge no more than the cost\n of distribution to the recipient; and\n\n b. You may distribute such Executable Form under the terms of this License,\n or sublicense it under different terms, provided that the license for\n the Executable Form does not attempt to limit or alter the recipients’\n rights in the Source Code Form under this License.\n\n3.3. Distribution of a Larger Work\n\n You may create and distribute a Larger Work under terms of Your choice,\n provided that You also comply with the requirements of this License for the\n Covered Software. If the Larger Work is a combination of Covered Software\n with a work governed by one or more Secondary Licenses, and the Covered\n Software is not Incompatible With Secondary Licenses, this License permits\n You to additionally distribute such Covered Software under the terms of\n such Secondary License(s), so that the recipient of the Larger Work may, at\n their option, further distribute the Covered Software under the terms of\n either this License or such Secondary License(s).\n\n3.4. Notices\n\n You may not remove or alter the substance of any license notices (including\n copyright notices, patent notices, disclaimers of warranty, or limitations\n of liability) contained within the Source Code Form of the Covered\n Software, except that You may alter any license notices to the extent\n required to remedy known factual inaccuracies.\n\n3.5. Application of Additional Terms\n\n You may choose to offer, and to charge a fee for, warranty, support,\n indemnity or liability obligations to one or more recipients of Covered\n Software. However, You may do so only on Your own behalf, and not on behalf\n of any Contributor. You must make it absolutely clear that any such\n warranty, support, indemnity, or liability obligation is offered by You\n alone, and You hereby agree to indemnify every Contributor for any\n liability incurred by such Contributor as a result of warranty, support,\n indemnity or liability terms You offer. You may include additional\n disclaimers of warranty and limitations of liability specific to any\n jurisdiction.\n\n4. Inability to Comply Due to Statute or Regulation\n\n If it is impossible for You to comply with any of the terms of this License\n with respect to some or all of the Covered Software due to statute, judicial\n order, or regulation then You must: (a) comply with the terms of this License\n to the maximum extent possible; and (b) describe the limitations and the code\n they affect. Such description must be placed in a text file included with all\n distributions of the Covered Software under this License. Except to the\n extent prohibited by statute or regulation, such description must be\n sufficiently detailed for a recipient of ordinary skill to be able to\n understand it.\n\n5. Termination\n\n5.1. The rights granted under this License will terminate automatically if You\n fail to comply with any of its terms. However, if You become compliant,\n then the rights granted under this License from a particular Contributor\n are reinstated (a) provisionally, unless and until such Contributor\n explicitly and finally terminates Your grants, and (b) on an ongoing basis,\n if such Contributor fails to notify You of the non-compliance by some\n reasonable means prior to 60 days after You have come back into compliance.\n Moreover, Your grants from a particular Contributor are reinstated on an\n ongoing basis if such Contributor notifies You of the non-compliance by\n some reasonable means, this is the first time You have received notice of\n non-compliance with this License from such Contributor, and You become\n compliant prior to 30 days after Your receipt of the notice.\n\n5.2. If You initiate litigation against any entity by asserting a patent\n infringement claim (excluding declaratory judgment actions, counter-claims,\n and cross-claims) alleging that a Contributor Version directly or\n indirectly infringes any patent, then the rights granted to You by any and\n all Contributors for the Covered Software under Section 2.1 of this License\n shall terminate.\n\n5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user\n license agreements (excluding distributors and resellers) which have been\n validly granted by You or Your distributors under this License prior to\n termination shall survive termination.\n\n6. Disclaimer of Warranty\n\n Covered Software is provided under this License on an “as is” basis, without\n warranty of any kind, either expressed, implied, or statutory, including,\n without limitation, warranties that the Covered Software is free of defects,\n merchantable, fit for a particular purpose or non-infringing. The entire\n risk as to the quality and performance of the Covered Software is with You.\n Should any Covered Software prove defective in any respect, You (not any\n Contributor) assume the cost of any necessary servicing, repair, or\n correction. This disclaimer of warranty constitutes an essential part of this\n License. No use of any Covered Software is authorized under this License\n except under this disclaimer.\n\n7. Limitation of Liability\n\n Under no circumstances and under no legal theory, whether tort (including\n negligence), contract, or otherwise, shall any Contributor, or anyone who\n distributes Covered Software as permitted above, be liable to You for any\n direct, indirect, special, incidental, or consequential damages of any\n character including, without limitation, damages for lost profits, loss of\n goodwill, work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses, even if such party shall have been\n informed of the possibility of such damages. This limitation of liability\n shall not apply to liability for death or personal injury resulting from such\n party’s negligence to the extent applicable law prohibits such limitation.\n Some jurisdictions do not allow the exclusion or limitation of incidental or\n consequential damages, so this exclusion and limitation may not apply to You.\n\n8. Litigation\n\n Any litigation relating to this License may be brought only in the courts of\n a jurisdiction where the defendant maintains its principal place of business\n and such litigation shall be governed by laws of that jurisdiction, without\n reference to its conflict-of-law provisions. Nothing in this Section shall\n prevent a party’s ability to bring cross-claims or counter-claims.\n\n9. Miscellaneous\n\n This License represents the complete agreement concerning the subject matter\n hereof. If any provision of this License is held to be unenforceable, such\n provision shall be reformed only to the extent necessary to make it\n enforceable. Any law or regulation which provides that the language of a\n contract shall be construed against the drafter shall not be used to construe\n this License against a Contributor.\n\n\n10. Versions of the License\n\n10.1. New Versions\n\n Mozilla Foundation is the license steward. Except as provided in Section\n 10.3, no one other than the license steward has the right to modify or\n publish new versions of this License. Each version will be given a\n distinguishing version number.\n\n10.2. Effect of New Versions\n\n You may distribute the Covered Software under the terms of the version of\n the License under which You originally received the Covered Software, or\n under the terms of any subsequent version published by the license\n steward.\n\n10.3. Modified Versions\n\n If you create software not governed by this License, and you want to\n create a new license for such software, you may create and use a modified\n version of this License if you rename the license and remove any\n references to the name of the license steward (except to note that such\n modified license differs from this License).\n\n10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses\n If You choose to distribute Source Code Form that is Incompatible With\n Secondary Licenses under the terms of this version of the License, the\n notice described in Exhibit B of this License must be attached.\n\nExhibit A - Source Code Form License Notice\n\n This Source Code Form is subject to the\n terms of the Mozilla Public License, v.\n 2.0. If a copy of the MPL was not\n distributed with this file, You can\n obtain one at\n http://mozilla.org/MPL/2.0/.\n\nIf it is not possible or desirable to put the notice in a particular file, then\nYou may include the notice in a location (such as a LICENSE file in a relevant\ndirectory) where a recipient would be likely to look for such a notice.\n\nYou may add additional accurate notices of copyright ownership.\n\nExhibit B - “Incompatible With Secondary Licenses” Notice\n\n This Source Code Form is “Incompatible\n With Secondary Licenses”, as defined by\n the Mozilla Public License, v. 2.0.\n\n"
+ "licenseText": "Copyright IBM Corp. 2014, 2025\n\nMozilla Public License, version 2.0\n\n1. Definitions\n\n1.1. “Contributor”\n\n means each individual or legal entity that creates, contributes to the\n creation of, or owns Covered Software.\n\n1.2. “Contributor Version”\n\n means the combination of the Contributions of others (if any) used by a\n Contributor and that particular Contributor’s Contribution.\n\n1.3. “Contribution”\n\n means Covered Software of a particular Contributor.\n\n1.4. “Covered Software”\n\n means Source Code Form to which the initial Contributor has attached the\n notice in Exhibit A, the Executable Form of such Source Code Form, and\n Modifications of such Source Code Form, in each case including portions\n thereof.\n\n1.5. “Incompatible With Secondary Licenses”\n means\n\n a. that the initial Contributor has attached the notice described in\n Exhibit B to the Covered Software; or\n\n b. that the Covered Software was made available under the terms of version\n 1.1 or earlier of the License, but not also under the terms of a\n Secondary License.\n\n1.6. “Executable Form”\n\n means any form of the work other than Source Code Form.\n\n1.7. “Larger Work”\n\n means a work that combines Covered Software with other material, in a separate\n file or files, that is not Covered Software.\n\n1.8. “License”\n\n means this document.\n\n1.9. “Licensable”\n\n means having the right to grant, to the maximum extent possible, whether at the\n time of the initial grant or subsequently, any and all of the rights conveyed by\n this License.\n\n1.10. “Modifications”\n\n means any of the following:\n\n a. any file in Source Code Form that results from an addition to, deletion\n from, or modification of the contents of Covered Software; or\n\n b. any new file in Source Code Form that contains any Covered Software.\n\n1.11. “Patent Claims” of a Contributor\n\n means any patent claim(s), including without limitation, method, process,\n and apparatus claims, in any patent Licensable by such Contributor that\n would be infringed, but for the grant of the License, by the making,\n using, selling, offering for sale, having made, import, or transfer of\n either its Contributions or its Contributor Version.\n\n1.12. “Secondary License”\n\n means either the GNU General Public License, Version 2.0, the GNU Lesser\n General Public License, Version 2.1, the GNU Affero General Public\n License, Version 3.0, or any later versions of those licenses.\n\n1.13. “Source Code Form”\n\n means the form of the work preferred for making modifications.\n\n1.14. “You” (or “Your”)\n\n means an individual or a legal entity exercising rights under this\n License. For legal entities, “You” includes any entity that controls, is\n controlled by, or is under common control with You. For purposes of this\n definition, “control” means (a) the power, direct or indirect, to cause\n the direction or management of such entity, whether by contract or\n otherwise, or (b) ownership of more than fifty percent (50%) of the\n outstanding shares or beneficial ownership of such entity.\n\n\n2. License Grants and Conditions\n\n2.1. Grants\n\n Each Contributor hereby grants You a world-wide, royalty-free,\n non-exclusive license:\n\n a. under intellectual property rights (other than patent or trademark)\n Licensable by such Contributor to use, reproduce, make available,\n modify, display, perform, distribute, and otherwise exploit its\n Contributions, either on an unmodified basis, with Modifications, or as\n part of a Larger Work; and\n\n b. under Patent Claims of such Contributor to make, use, sell, offer for\n sale, have made, import, and otherwise transfer either its Contributions\n or its Contributor Version.\n\n2.2. Effective Date\n\n The licenses granted in Section 2.1 with respect to any Contribution become\n effective for each Contribution on the date the Contributor first distributes\n such Contribution.\n\n2.3. Limitations on Grant Scope\n\n The licenses granted in this Section 2 are the only rights granted under this\n License. No additional rights or licenses will be implied from the distribution\n or licensing of Covered Software under this License. Notwithstanding Section\n 2.1(b) above, no patent license is granted by a Contributor:\n\n a. for any code that a Contributor has removed from Covered Software; or\n\n b. for infringements caused by: (i) Your and any other third party’s\n modifications of Covered Software, or (ii) the combination of its\n Contributions with other software (except as part of its Contributor\n Version); or\n\n c. under Patent Claims infringed by Covered Software in the absence of its\n Contributions.\n\n This License does not grant any rights in the trademarks, service marks, or\n logos of any Contributor (except as may be necessary to comply with the\n notice requirements in Section 3.4).\n\n2.4. Subsequent Licenses\n\n No Contributor makes additional grants as a result of Your choice to\n distribute the Covered Software under a subsequent version of this License\n (see Section 10.2) or under the terms of a Secondary License (if permitted\n under the terms of Section 3.3).\n\n2.5. Representation\n\n Each Contributor represents that the Contributor believes its Contributions\n are its original creation(s) or it has sufficient rights to grant the\n rights to its Contributions conveyed by this License.\n\n2.6. Fair Use\n\n This License is not intended to limit any rights You have under applicable\n copyright doctrines of fair use, fair dealing, or other equivalents.\n\n2.7. Conditions\n\n Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in\n Section 2.1.\n\n\n3. Responsibilities\n\n3.1. Distribution of Source Form\n\n All distribution of Covered Software in Source Code Form, including any\n Modifications that You create or to which You contribute, must be under the\n terms of this License. You must inform recipients that the Source Code Form\n of the Covered Software is governed by the terms of this License, and how\n they can obtain a copy of this License. You may not attempt to alter or\n restrict the recipients’ rights in the Source Code Form.\n\n3.2. Distribution of Executable Form\n\n If You distribute Covered Software in Executable Form then:\n\n a. such Covered Software must also be made available in Source Code Form,\n as described in Section 3.1, and You must inform recipients of the\n Executable Form how they can obtain a copy of such Source Code Form by\n reasonable means in a timely manner, at a charge no more than the cost\n of distribution to the recipient; and\n\n b. You may distribute such Executable Form under the terms of this License,\n or sublicense it under different terms, provided that the license for\n the Executable Form does not attempt to limit or alter the recipients’\n rights in the Source Code Form under this License.\n\n3.3. Distribution of a Larger Work\n\n You may create and distribute a Larger Work under terms of Your choice,\n provided that You also comply with the requirements of this License for the\n Covered Software. If the Larger Work is a combination of Covered Software\n with a work governed by one or more Secondary Licenses, and the Covered\n Software is not Incompatible With Secondary Licenses, this License permits\n You to additionally distribute such Covered Software under the terms of\n such Secondary License(s), so that the recipient of the Larger Work may, at\n their option, further distribute the Covered Software under the terms of\n either this License or such Secondary License(s).\n\n3.4. Notices\n\n You may not remove or alter the substance of any license notices (including\n copyright notices, patent notices, disclaimers of warranty, or limitations\n of liability) contained within the Source Code Form of the Covered\n Software, except that You may alter any license notices to the extent\n required to remedy known factual inaccuracies.\n\n3.5. Application of Additional Terms\n\n You may choose to offer, and to charge a fee for, warranty, support,\n indemnity or liability obligations to one or more recipients of Covered\n Software. However, You may do so only on Your own behalf, and not on behalf\n of any Contributor. You must make it absolutely clear that any such\n warranty, support, indemnity, or liability obligation is offered by You\n alone, and You hereby agree to indemnify every Contributor for any\n liability incurred by such Contributor as a result of warranty, support,\n indemnity or liability terms You offer. You may include additional\n disclaimers of warranty and limitations of liability specific to any\n jurisdiction.\n\n4. Inability to Comply Due to Statute or Regulation\n\n If it is impossible for You to comply with any of the terms of this License\n with respect to some or all of the Covered Software due to statute, judicial\n order, or regulation then You must: (a) comply with the terms of this License\n to the maximum extent possible; and (b) describe the limitations and the code\n they affect. Such description must be placed in a text file included with all\n distributions of the Covered Software under this License. Except to the\n extent prohibited by statute or regulation, such description must be\n sufficiently detailed for a recipient of ordinary skill to be able to\n understand it.\n\n5. Termination\n\n5.1. The rights granted under this License will terminate automatically if You\n fail to comply with any of its terms. However, if You become compliant,\n then the rights granted under this License from a particular Contributor\n are reinstated (a) provisionally, unless and until such Contributor\n explicitly and finally terminates Your grants, and (b) on an ongoing basis,\n if such Contributor fails to notify You of the non-compliance by some\n reasonable means prior to 60 days after You have come back into compliance.\n Moreover, Your grants from a particular Contributor are reinstated on an\n ongoing basis if such Contributor notifies You of the non-compliance by\n some reasonable means, this is the first time You have received notice of\n non-compliance with this License from such Contributor, and You become\n compliant prior to 30 days after Your receipt of the notice.\n\n5.2. If You initiate litigation against any entity by asserting a patent\n infringement claim (excluding declaratory judgment actions, counter-claims,\n and cross-claims) alleging that a Contributor Version directly or\n indirectly infringes any patent, then the rights granted to You by any and\n all Contributors for the Covered Software under Section 2.1 of this License\n shall terminate.\n\n5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user\n license agreements (excluding distributors and resellers) which have been\n validly granted by You or Your distributors under this License prior to\n termination shall survive termination.\n\n6. Disclaimer of Warranty\n\n Covered Software is provided under this License on an “as is” basis, without\n warranty of any kind, either expressed, implied, or statutory, including,\n without limitation, warranties that the Covered Software is free of defects,\n merchantable, fit for a particular purpose or non-infringing. The entire\n risk as to the quality and performance of the Covered Software is with You.\n Should any Covered Software prove defective in any respect, You (not any\n Contributor) assume the cost of any necessary servicing, repair, or\n correction. This disclaimer of warranty constitutes an essential part of this\n License. No use of any Covered Software is authorized under this License\n except under this disclaimer.\n\n7. Limitation of Liability\n\n Under no circumstances and under no legal theory, whether tort (including\n negligence), contract, or otherwise, shall any Contributor, or anyone who\n distributes Covered Software as permitted above, be liable to You for any\n direct, indirect, special, incidental, or consequential damages of any\n character including, without limitation, damages for lost profits, loss of\n goodwill, work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses, even if such party shall have been\n informed of the possibility of such damages. This limitation of liability\n shall not apply to liability for death or personal injury resulting from such\n party’s negligence to the extent applicable law prohibits such limitation.\n Some jurisdictions do not allow the exclusion or limitation of incidental or\n consequential damages, so this exclusion and limitation may not apply to You.\n\n8. Litigation\n\n Any litigation relating to this License may be brought only in the courts of\n a jurisdiction where the defendant maintains its principal place of business\n and such litigation shall be governed by laws of that jurisdiction, without\n reference to its conflict-of-law provisions. Nothing in this Section shall\n prevent a party’s ability to bring cross-claims or counter-claims.\n\n9. Miscellaneous\n\n This License represents the complete agreement concerning the subject matter\n hereof. If any provision of this License is held to be unenforceable, such\n provision shall be reformed only to the extent necessary to make it\n enforceable. Any law or regulation which provides that the language of a\n contract shall be construed against the drafter shall not be used to construe\n this License against a Contributor.\n\n\n10. Versions of the License\n\n10.1. New Versions\n\n Mozilla Foundation is the license steward. Except as provided in Section\n 10.3, no one other than the license steward has the right to modify or\n publish new versions of this License. Each version will be given a\n distinguishing version number.\n\n10.2. Effect of New Versions\n\n You may distribute the Covered Software under the terms of the version of\n the License under which You originally received the Covered Software, or\n under the terms of any subsequent version published by the license\n steward.\n\n10.3. Modified Versions\n\n If you create software not governed by this License, and you want to\n create a new license for such software, you may create and use a modified\n version of this License if you rename the license and remove any\n references to the name of the license steward (except to note that such\n modified license differs from this License).\n\n10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses\n If You choose to distribute Source Code Form that is Incompatible With\n Secondary Licenses under the terms of this version of the License, the\n notice described in Exhibit B of this License must be attached.\n\nExhibit A - Source Code Form License Notice\n\n This Source Code Form is subject to the\n terms of the Mozilla Public License, v.\n 2.0. If a copy of the MPL was not\n distributed with this file, You can\n obtain one at\n http://mozilla.org/MPL/2.0/.\n\nIf it is not possible or desirable to put the notice in a particular file, then\nYou may include the notice in a location (such as a LICENSE file in a relevant\ndirectory) where a recipient would be likely to look for such a notice.\n\nYou may add additional accurate notices of copyright ownership.\n\nExhibit B - “Incompatible With Secondary Licenses” Notice\n\n This Source Code Form is “Incompatible\n With Secondary Licenses”, as defined by\n the Mozilla Public License, v. 2.0.\n\n"
},
{
"name": "github.com/hashicorp/golang-lru/v2",
@@ -740,8 +735,8 @@
"licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Jay Taylor\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n"
},
{
- "name": "github.com/jbenet/go-context/io",
- "path": "github.com/jbenet/go-context/io/LICENSE",
+ "name": "github.com/jbenet/go-context",
+ "path": "github.com/jbenet/go-context/LICENSE",
"licenseText": "The MIT License (MIT)\n\nCopyright (c) 2014 Juan Batiz-Benet\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n"
},
{
@@ -799,6 +794,11 @@
"path": "github.com/klauspost/cpuid/v2/LICENSE",
"licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Klaus Post\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n"
},
+ {
+ "name": "github.com/klauspost/crc32",
+ "path": "github.com/klauspost/crc32/LICENSE",
+ "licenseText": "Copyright (c) 2012 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
+ },
{
"name": "github.com/klauspost/pgzip",
"path": "github.com/klauspost/pgzip/LICENSE",
@@ -806,8 +806,8 @@
},
{
"name": "github.com/lib/pq",
- "path": "github.com/lib/pq/LICENSE.md",
- "licenseText": "Copyright (c) 2011-2013, 'pq' Contributors\nPortions Copyright (C) 2011 Blake Mizerany\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
+ "path": "github.com/lib/pq/LICENSE",
+ "licenseText": "MIT License\n\nCopyright (c) 2011-2013, 'pq' Contributors. Portions Copyright (c) 2011 Blake Mizerany\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
{
"name": "github.com/libdns/libdns",
@@ -820,8 +820,8 @@
"licenseText": "Copyright (c) 2016 Mail.Ru Group\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
},
{
- "name": "github.com/markbates/going/defaults",
- "path": "github.com/markbates/going/defaults/LICENSE.txt",
+ "name": "github.com/markbates/going",
+ "path": "github.com/markbates/going/LICENSE.txt",
"licenseText": "Copyright (c) 2014 Mark Bates\n\nMIT License\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\nLIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\nWITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
},
{
@@ -875,8 +875,8 @@
"licenseText": "Copyright (c) 2012 The Go Authors. All rights reserved.\nCopyright (c) Microsoft Corporation.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/microsoft/go-mssqldb/internal/github.com/swisscom/mssql-always-encrypted/pkg",
- "path": "github.com/microsoft/go-mssqldb/internal/github.com/swisscom/mssql-always-encrypted/pkg/LICENSE.txt",
+ "name": "github.com/microsoft/go-mssqldb/internal/github.com/swisscom/mssql-always-encrypted",
+ "path": "github.com/microsoft/go-mssqldb/internal/github.com/swisscom/mssql-always-encrypted/LICENSE.txt",
"licenseText": "Copyright (c) 2021 Swisscom (Switzerland) Ltd\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n\n"
},
{
@@ -930,13 +930,13 @@
"licenseText": "Copyright (c) 2011, Open Knowledge Foundation Ltd.\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n\n Neither the name of the Open Knowledge Foundation Ltd. nor the\n names of its contributors may be used to endorse or promote\n products derived from this software without specific prior written\n permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nHOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/nektos/act/pkg",
- "path": "github.com/nektos/act/pkg/LICENSE",
+ "name": "github.com/nektos/act",
+ "path": "github.com/nektos/act/LICENSE",
"licenseText": "MIT License\n\nCopyright (c) 2022 The Gitea Authors\nCopyright (c) 2019\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
{
- "name": "github.com/niklasfasching/go-org/org",
- "path": "github.com/niklasfasching/go-org/org/LICENSE",
+ "name": "github.com/niklasfasching/go-org",
+ "path": "github.com/niklasfasching/go-org/LICENSE",
"licenseText": "MIT License\n\nCopyright (c) 2018 Niklas Fasching\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
{
@@ -980,8 +980,8 @@
"licenseText": "\n Apache License\n Version 2.0, January 2004\n https://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n Copyright 2019, 2020 OCI Contributors\n Copyright 2016 Docker, Inc.\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n https://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
- "name": "github.com/opencontainers/image-spec/specs-go",
- "path": "github.com/opencontainers/image-spec/specs-go/LICENSE",
+ "name": "github.com/opencontainers/image-spec",
+ "path": "github.com/opencontainers/image-spec/LICENSE",
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n Copyright 2016 The Linux Foundation.\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
@@ -1005,8 +1005,8 @@
"licenseText": "Copyright (c) 2015, Dave Cheney \u003cdave@cheney.net\u003e\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/pmezard/go-difflib/difflib",
- "path": "github.com/pmezard/go-difflib/difflib/LICENSE",
+ "name": "github.com/pmezard/go-difflib",
+ "path": "github.com/pmezard/go-difflib/LICENSE",
"licenseText": "Copyright (c) 2013, Patrick Mezard\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n Redistributions in binary form must reproduce the above copyright\nnotice, this list of conditions and the following disclaimer in the\ndocumentation and/or other materials provided with the distribution.\n The names of its contributors may not be used to endorse or promote\nproducts derived from this software without specific prior written\npermission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\nIS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED\nTO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A\nPARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nHOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED\nTO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\nPROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\nLIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\nNEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
@@ -1015,18 +1015,18 @@
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
- "name": "github.com/prometheus/client_golang/internal/github.com/golang/gddo/httputil",
- "path": "github.com/prometheus/client_golang/internal/github.com/golang/gddo/httputil/LICENSE",
- "licenseText": "Copyright (c) 2013 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
- },
- {
- "name": "github.com/prometheus/client_golang/prometheus",
- "path": "github.com/prometheus/client_golang/prometheus/LICENSE",
+ "name": "github.com/prometheus/client_golang",
+ "path": "github.com/prometheus/client_golang/LICENSE",
"licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
- "name": "github.com/prometheus/client_model/go",
- "path": "github.com/prometheus/client_model/go/LICENSE",
+ "name": "github.com/prometheus/client_golang/internal/github.com/golang/gddo",
+ "path": "github.com/prometheus/client_golang/internal/github.com/golang/gddo/LICENSE",
+ "licenseText": "Copyright (c) 2013 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
+ },
+ {
+ "name": "github.com/prometheus/client_model",
+ "path": "github.com/prometheus/client_model/LICENSE",
"licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
@@ -1080,10 +1080,15 @@
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
- "name": "github.com/sergi/go-diff/diffmatchpatch",
- "path": "github.com/sergi/go-diff/diffmatchpatch/LICENSE",
+ "name": "github.com/sergi/go-diff",
+ "path": "github.com/sergi/go-diff/LICENSE",
"licenseText": "Copyright (c) 2012-2016 The go-diff Authors. All rights reserved.\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included\nin all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\n"
},
+ {
+ "name": "github.com/shopspring/decimal",
+ "path": "github.com/shopspring/decimal/LICENSE",
+ "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2015 Spring, Inc.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n\n- Based on https://github.com/oguzbilgic/fpd, which has the following license:\n\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2013 Oguz Bilgic\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\n"
+ },
{
"name": "github.com/sirupsen/logrus",
"path": "github.com/sirupsen/logrus/LICENSE",
@@ -1115,13 +1120,13 @@
"licenseText": "MIT License\n\nCopyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
},
{
- "name": "github.com/syndtr/goleveldb/leveldb",
- "path": "github.com/syndtr/goleveldb/leveldb/LICENSE",
+ "name": "github.com/syndtr/goleveldb",
+ "path": "github.com/syndtr/goleveldb/LICENSE",
"licenseText": "Copyright 2012 Suryandaru Triandana \u003csyndtr@gmail.com\u003e\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above copyright\nnotice, this list of conditions and the following disclaimer in the\ndocumentation and/or other materials provided with the distribution.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nHOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "github.com/tinylib/msgp/msgp",
- "path": "github.com/tinylib/msgp/msgp/LICENSE",
+ "name": "github.com/tinylib/msgp",
+ "path": "github.com/tinylib/msgp/LICENSE",
"licenseText": "Copyright (c) 2014 Philip Hofer\nPortions Copyright (c) 2009 The Go Authors (license at http://golang.org) where indicated\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
},
{
@@ -1174,6 +1179,11 @@
"path": "github.com/xanzy/ssh-agent/LICENSE",
"licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"{}\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright {yyyy} {name of copyright owner}\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n"
},
+ {
+ "name": "github.com/xi2/xz",
+ "path": "github.com/xi2/xz/LICENSE",
+ "licenseText": "Licensing of github.com/xi2/xz\n==============================\n\n This Go package is a modified version of\n\n XZ Embedded \u003chttp://tukaani.org/xz/embedded.html\u003e\n\n The contents of the testdata directory are modified versions of\n the test files from\n\n XZ Utils \u003chttp://tukaani.org/xz/\u003e\n\n All the files in this package have been written by Michael Cross,\n Lasse Collin and/or Igor PavLov. All these files have been put\n into the public domain. You can do whatever you want with these\n files.\n\n This software is provided \"as is\", without any warranty.\n"
+ },
{
"name": "github.com/yohcop/openid-go",
"path": "github.com/yohcop/openid-go/LICENSE",
@@ -1222,16 +1232,26 @@
{
"name": "go.uber.org/zap",
"path": "go.uber.org/zap/LICENSE",
- "licenseText": "Copyright (c) 2016-2017 Uber Technologies, Inc.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n"
- },
- {
- "name": "go.uber.org/zap/exp/zapslog",
- "path": "go.uber.org/zap/exp/zapslog/LICENSE",
"licenseText": "Copyright (c) 2016-2024 Uber Technologies, Inc.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n"
},
{
- "name": "go4.org/readerutil",
- "path": "go4.org/readerutil/LICENSE",
+ "name": "go.uber.org/zap/exp",
+ "path": "go.uber.org/zap/exp/LICENSE",
+ "licenseText": "Copyright (c) 2016-2024 Uber Technologies, Inc.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n"
+ },
+ {
+ "name": "go.yaml.in/yaml/v2",
+ "path": "go.yaml.in/yaml/v2/LICENSE",
+ "licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"{}\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright {yyyy} {name of copyright owner}\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
+ },
+ {
+ "name": "go.yaml.in/yaml/v3",
+ "path": "go.yaml.in/yaml/v3/LICENSE",
+ "licenseText": "\nThis project is covered by two different licenses: MIT and Apache.\n\n#### MIT License ####\n\nThe following files were ported to Go from C files of libyaml, and thus\nare still covered by their original MIT license, with the additional\ncopyright staring in 2011 when the project was ported over:\n\n apic.go emitterc.go parserc.go readerc.go scannerc.go\n writerc.go yamlh.go yamlprivateh.go\n\nCopyright (c) 2006-2010 Kirill Simonov\nCopyright (c) 2006-2011 Kirill Simonov\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n### Apache License ###\n\nAll the remaining project files are covered by the Apache license:\n\nCopyright (c) 2011-2019 Canonical Ltd\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n"
+ },
+ {
+ "name": "go4.org",
+ "path": "go4.org/LICENSE",
"licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"{}\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright {yyyy} {name of copyright owner}\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n"
},
{
@@ -1245,8 +1265,8 @@
"licenseText": "Copyright 2009 The Go Authors.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google LLC nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "golang.org/x/mod/semver",
- "path": "golang.org/x/mod/semver/LICENSE",
+ "name": "golang.org/x/mod",
+ "path": "golang.org/x/mod/LICENSE",
"licenseText": "Copyright 2009 The Go Authors.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google LLC nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
@@ -1275,13 +1295,13 @@
"licenseText": "Copyright 2009 The Go Authors.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google LLC nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "golang.org/x/time/rate",
- "path": "golang.org/x/time/rate/LICENSE",
+ "name": "golang.org/x/time",
+ "path": "golang.org/x/time/LICENSE",
"licenseText": "Copyright 2009 The Go Authors.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google LLC nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
- "name": "google.golang.org/genproto/googleapis/rpc/status",
- "path": "google.golang.org/genproto/googleapis/rpc/status/LICENSE",
+ "name": "google.golang.org/genproto/googleapis/rpc",
+ "path": "google.golang.org/genproto/googleapis/rpc/LICENSE",
"licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n"
},
{
diff --git a/build/backport-locales.go b/build/backport-locales.go
deleted file mode 100644
index d112dd72bd..0000000000
--- a/build/backport-locales.go
+++ /dev/null
@@ -1,115 +0,0 @@
-// Copyright 2023 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-//go:build ignore
-
-package main
-
-import (
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- "strings"
-
- "code.gitea.io/gitea/modules/container"
- "code.gitea.io/gitea/modules/setting"
-)
-
-func main() {
- if len(os.Args) != 2 {
- println("usage: backport-locales ")
- println("eg: backport-locales release/v1.19")
- os.Exit(1)
- }
-
- mustNoErr := func(err error) {
- if err != nil {
- panic(err)
- }
- }
- collectInis := func(ref string) map[string]setting.ConfigProvider {
- inis := map[string]setting.ConfigProvider{}
- err := filepath.WalkDir("options/locale", func(path string, d os.DirEntry, err error) error {
- if err != nil {
- return err
- }
- if d.IsDir() || !strings.HasSuffix(d.Name(), ".ini") {
- return nil
- }
- cfg, err := setting.NewConfigProviderForLocale(path)
- mustNoErr(err)
- inis[path] = cfg
- fmt.Printf("collecting: %s @ %s\n", path, ref)
- return nil
- })
- mustNoErr(err)
- return inis
- }
-
- // collect new locales from current working directory
- inisNew := collectInis("HEAD")
-
- // switch to the target ref, and collect the old locales
- cmd := exec.Command("git", "checkout", os.Args[1])
- cmd.Stdout = os.Stdout
- cmd.Stderr = os.Stderr
- mustNoErr(cmd.Run())
- inisOld := collectInis(os.Args[1])
-
- // use old en-US as the base, and copy the new translations to the old locales
- enUsOld := inisOld["options/locale/locale_en-US.ini"]
- brokenWarned := make(container.Set[string])
- for path, iniOld := range inisOld {
- if iniOld == enUsOld {
- continue
- }
- iniNew := inisNew[path]
- if iniNew == nil {
- continue
- }
- for _, secEnUS := range enUsOld.Sections() {
- secOld := iniOld.Section(secEnUS.Name())
- secNew := iniNew.Section(secEnUS.Name())
- for _, keyEnUs := range secEnUS.Keys() {
- if secNew.HasKey(keyEnUs.Name()) {
- oldStr := secOld.Key(keyEnUs.Name()).String()
- newStr := secNew.Key(keyEnUs.Name()).String()
- broken := oldStr != "" && strings.Count(oldStr, "%") != strings.Count(newStr, "%")
- broken = broken || strings.Contains(oldStr, "\n") || strings.Contains(oldStr, "\n")
- if broken {
- brokenWarned.Add(secOld.Name() + "." + keyEnUs.Name())
- fmt.Println("----")
- fmt.Printf("WARNING: skip broken locale: %s , [%s] %s\n", path, secEnUS.Name(), keyEnUs.Name())
- fmt.Printf("\told: %s\n", strings.ReplaceAll(oldStr, "\n", "\\n"))
- fmt.Printf("\tnew: %s\n", strings.ReplaceAll(newStr, "\n", "\\n"))
- continue
- }
- secOld.Key(keyEnUs.Name()).SetValue(newStr)
- }
- }
- }
- mustNoErr(iniOld.SaveTo(path))
- }
-
- fmt.Println("========")
-
- for path, iniNew := range inisNew {
- for _, sec := range iniNew.Sections() {
- for _, key := range sec.Keys() {
- str := sec.Key(key.Name()).String()
- broken := strings.Contains(str, "\n")
- broken = broken || strings.HasPrefix(str, "`") != strings.HasSuffix(str, "`")
- broken = broken || strings.HasPrefix(str, "\"`")
- broken = broken || strings.HasPrefix(str, "`\"")
- broken = broken || strings.Count(str, `"`)%2 == 1
- broken = broken || strings.Count(str, "`")%2 == 1
- if broken && !brokenWarned.Contains(sec.Name()+"."+key.Name()) {
- fmt.Printf("WARNING: found broken locale: %s , [%s] %s\n", path, sec.Name(), key.Name())
- fmt.Printf("\tstr: %s\n", strings.ReplaceAll(str, "\n", "\\n"))
- fmt.Println("----")
- }
- }
- }
- }
-}
diff --git a/build/generate-emoji.go b/build/generate-emoji.go
index 446ab5f440..cbc1be2139 100644
--- a/build/generate-emoji.go
+++ b/build/generate-emoji.go
@@ -24,8 +24,8 @@ import (
)
const (
- gemojiURL = "https://raw.githubusercontent.com/github/gemoji/master/db/emoji.json"
- maxUnicodeVersion = 15
+ gemojiURL = "https://raw.githubusercontent.com/rhysd/gemoji/537ff2d7e0496e9964824f7f73ec7ece88c9765a/db/emoji.json"
+ maxUnicodeVersion = 16
)
var flagOut = flag.String("o", "modules/emoji/emoji_data.go", "out")
@@ -149,8 +149,8 @@ func generate() ([]byte, error) {
}
// write a JSON file to use with tribute (write before adding skin tones since we can't support them there yet)
- file, _ := json.Marshal(data)
- _ = os.WriteFile("assets/emoji.json", file, 0o644)
+ file, _ := json.MarshalIndent(data, "", " ")
+ _ = os.WriteFile("assets/emoji.json", append(file, '\n'), 0o644)
// Add skin tones to emoji that support it
var (
diff --git a/build/generate-go-licenses.go b/build/generate-go-licenses.go
index 84ba39025c..b710fdb841 100644
--- a/build/generate-go-licenses.go
+++ b/build/generate-go-licenses.go
@@ -8,99 +8,219 @@ package main
import (
"encoding/json"
"fmt"
- "io/fs"
"os"
- "path"
+ "os/exec"
"path/filepath"
"regexp"
+ "slices"
"sort"
"strings"
-
- "code.gitea.io/gitea/modules/container"
)
// regexp is based on go-license, excluding README and NOTICE
// https://github.com/google/go-licenses/blob/master/licenses/find.go
var licenseRe = regexp.MustCompile(`^(?i)((UN)?LICEN(S|C)E|COPYING).*$`)
+// primaryLicenseRe matches exact primary license filenames without suffixes.
+// When a directory has both primary and variant files (e.g. LICENSE and
+// LICENSE.docs), only the primary files are kept.
+var primaryLicenseRe = regexp.MustCompile(`^(?i)(LICEN[SC]E|COPYING)$`)
+
+// ignoredNames are LicenseEntry.Name values to exclude from the output.
+var ignoredNames = map[string]bool{
+ "code.gitea.io/gitea": true,
+ "code.gitea.io/gitea/options/license": true,
+}
+
+var excludedExt = map[string]bool{
+ ".gitignore": true,
+ ".go": true,
+ ".mod": true,
+ ".sum": true,
+ ".toml": true,
+ ".yaml": true,
+ ".yml": true,
+}
+
+type ModuleInfo struct {
+ Path string
+ Dir string
+ PkgDirs []string // directories of packages imported from this module
+}
+
type LicenseEntry struct {
Name string `json:"name"`
Path string `json:"path"`
LicenseText string `json:"licenseText"`
}
-func main() {
- if len(os.Args) != 3 {
- fmt.Println("usage: go run generate-go-licenses.go ")
+// getModules returns all dependency modules with their local directory paths
+// and the package directories used from each module.
+func getModules(goCmd string) []ModuleInfo {
+ cmd := exec.Command(goCmd, "list", "-deps", "-f",
+ "{{if .Module}}{{.Module.Path}}\t{{.Module.Dir}}\t{{.Dir}}{{end}}", "./...")
+ cmd.Stderr = os.Stderr
+ // Use GOOS=linux with CGO to ensure we capture all platform-specific
+ // dependencies, matching the CI environment.
+ cmd.Env = append(os.Environ(), "GOOS=linux", "GOARCH=amd64", "CGO_ENABLED=1")
+ output, err := cmd.Output()
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "failed to run 'go list -deps': %v\n", err)
os.Exit(1)
}
- base, out := os.Args[1], os.Args[2]
-
- // Add ext for excluded files because license_test.go will be included for some reason.
- // And there are more files that should be excluded, check with:
- //
- // go run github.com/google/go-licenses@v1.6.0 save . --force --save_path=.go-licenses 2>/dev/null
- // find .go-licenses -type f | while read FILE; do echo "${$(basename $FILE)##*.}"; done | sort -u
- // AUTHORS
- // COPYING
- // LICENSE
- // Makefile
- // NOTICE
- // gitignore
- // go
- // md
- // mod
- // sum
- // toml
- // txt
- // yml
- //
- // It could be removed once we have a better regex.
- excludedExt := container.SetOf(".gitignore", ".go", ".mod", ".sum", ".toml", ".yml")
-
- var paths []string
- err := filepath.WalkDir(base, func(path string, entry fs.DirEntry, err error) error {
- if err != nil {
- return err
+ var modules []ModuleInfo
+ seen := make(map[string]int) // module path -> index in modules
+ for _, line := range strings.Split(string(output), "\n") {
+ line = strings.TrimSpace(line)
+ if line == "" {
+ continue
}
- if entry.IsDir() || !licenseRe.MatchString(entry.Name()) || excludedExt.Contains(filepath.Ext(entry.Name())) {
- return nil
+ parts := strings.Split(line, "\t")
+ if len(parts) != 3 {
+ continue
}
- paths = append(paths, path)
- return nil
- })
- if err != nil {
- panic(err)
+ modPath, modDir, pkgDir := parts[0], parts[1], parts[2]
+ if idx, ok := seen[modPath]; ok {
+ modules[idx].PkgDirs = append(modules[idx].PkgDirs, pkgDir)
+ } else {
+ seen[modPath] = len(modules)
+ modules = append(modules, ModuleInfo{
+ Path: modPath,
+ Dir: modDir,
+ PkgDirs: []string{pkgDir},
+ })
+ }
+ }
+ return modules
+}
+
+// findLicenseFiles scans a module's root directory and its used package
+// directories for license files. It also walks up from each package directory
+// to the module root, scanning intermediate directories. Subdirectory licenses
+// are only included if their text differs from the root license(s).
+func findLicenseFiles(mod ModuleInfo) []LicenseEntry {
+ var entries []LicenseEntry
+ seenTexts := make(map[string]bool)
+
+ // First, collect root-level license files.
+ entries = append(entries, scanDirForLicenses(mod.Dir, mod.Path, "")...)
+ for _, e := range entries {
+ seenTexts[e.LicenseText] = true
}
- sort.Strings(paths)
+ // Then check each package directory and all intermediate parent directories
+ // up to the module root for license files with unique text.
+ seenDirs := map[string]bool{mod.Dir: true}
+ for _, pkgDir := range mod.PkgDirs {
+ for dir := pkgDir; dir != mod.Dir && strings.HasPrefix(dir, mod.Dir); dir = filepath.Dir(dir) {
+ if seenDirs[dir] {
+ continue
+ }
+ seenDirs[dir] = true
+ for _, e := range scanDirForLicenses(dir, mod.Path, mod.Dir) {
+ if !seenTexts[e.LicenseText] {
+ seenTexts[e.LicenseText] = true
+ entries = append(entries, e)
+ }
+ }
+ }
+ }
+ return entries
+}
+
+// scanDirForLicenses reads a single directory for license files and returns entries.
+// If moduleRoot is non-empty, paths are made relative to it.
+func scanDirForLicenses(dir, modulePath, moduleRoot string) []LicenseEntry {
+ dirEntries, err := os.ReadDir(dir)
+ if err != nil {
+ return nil
+ }
var entries []LicenseEntry
- for _, filePath := range paths {
- licenseText, err := os.ReadFile(filePath)
- if err != nil {
- panic(err)
+ for _, entry := range dirEntries {
+ if entry.IsDir() {
+ continue
}
-
- pkgPath := filepath.ToSlash(filePath)
- pkgPath = strings.TrimPrefix(pkgPath, base+"/")
- pkgName := path.Dir(pkgPath)
-
- // There might be a bug somewhere in go-licenses that sometimes interprets the
- // root package as "." and sometimes as "code.gitea.io/gitea". Workaround by
- // removing both of them for the sake of stable output.
- if pkgName == "." || pkgName == "code.gitea.io/gitea" {
+ name := entry.Name()
+ if !licenseRe.MatchString(name) {
+ continue
+ }
+ if excludedExt[strings.ToLower(filepath.Ext(name))] {
continue
}
+ content, err := os.ReadFile(filepath.Join(dir, name))
+ if err != nil {
+ continue
+ }
+
+ entryName := modulePath
+ entryPath := modulePath + "/" + name
+ if moduleRoot != "" {
+ rel, _ := filepath.Rel(moduleRoot, dir)
+ if rel != "." {
+ relSlash := filepath.ToSlash(rel)
+ entryName = modulePath + "/" + relSlash
+ entryPath = modulePath + "/" + relSlash + "/" + name
+ }
+ }
+
entries = append(entries, LicenseEntry{
- Name: pkgName,
- Path: pkgPath,
- LicenseText: string(licenseText),
+ Name: entryName,
+ Path: entryPath,
+ LicenseText: string(content),
})
}
+ // When multiple license files exist, prefer primary files (e.g. LICENSE)
+ // over variants with suffixes (e.g. LICENSE.docs, LICENSE-2.0.txt).
+ // If no primary file exists, keep only the first variant.
+ if len(entries) > 1 {
+ var primary []LicenseEntry
+ for _, e := range entries {
+ fileName := e.Path[strings.LastIndex(e.Path, "/")+1:]
+ if primaryLicenseRe.MatchString(fileName) {
+ primary = append(primary, e)
+ }
+ }
+ if len(primary) > 0 {
+ return primary
+ }
+ return entries[:1]
+ }
+
+ return entries
+}
+
+func main() {
+ if len(os.Args) != 2 {
+ fmt.Println("usage: go run generate-go-licenses.go ")
+ os.Exit(1)
+ }
+
+ out := os.Args[1]
+
+ goCmd := "go"
+ if env := os.Getenv("GO"); env != "" {
+ goCmd = env
+ }
+
+ modules := getModules(goCmd)
+
+ var entries []LicenseEntry
+ for _, mod := range modules {
+ entries = append(entries, findLicenseFiles(mod)...)
+ }
+
+ entries = slices.DeleteFunc(entries, func(e LicenseEntry) bool {
+ return ignoredNames[e.Name]
+ })
+
+ sort.Slice(entries, func(i, j int) bool {
+ return entries[i].Path < entries[j].Path
+ })
+
jsonBytes, err := json.MarshalIndent(entries, "", " ")
if err != nil {
panic(err)
diff --git a/cmd/admin.go b/cmd/admin.go
index a01274b90e..dbd48e5727 100644
--- a/cmd/admin.go
+++ b/cmd/admin.go
@@ -134,7 +134,7 @@ func runRepoSyncReleases(ctx context.Context, _ *cli.Command) error {
}
log.Trace(" currentNumReleases is %d, running SyncReleasesWithTags", oldnum)
- if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ if _, err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
log.Warn(" SyncReleasesWithTags: %v", err)
gitRepo.Close()
continue
diff --git a/cmd/admin_auth_ldap.go b/cmd/admin_auth_ldap.go
index 069ad6600c..c9be5abb37 100644
--- a/cmd/admin_auth_ldap.go
+++ b/cmd/admin_auth_ldap.go
@@ -94,6 +94,10 @@ func commonLdapCLIFlags() []cli.Flag {
Name: "public-ssh-key-attribute",
Usage: "The attribute of the user’s LDAP record containing the user’s public ssh key.",
},
+ &cli.BoolFlag{
+ Name: "ssh-keys-are-verified",
+ Usage: "Set to true to automatically flag SSH keys in LDAP as verified.",
+ },
&cli.BoolFlag{
Name: "skip-local-2fa",
Usage: "Set to true to skip local 2fa for users authenticated by this source",
@@ -294,6 +298,9 @@ func parseLdapConfig(c *cli.Command, config *ldap.Source) error {
if c.IsSet("public-ssh-key-attribute") {
config.AttributeSSHPublicKey = c.String("public-ssh-key-attribute")
}
+ if c.IsSet("ssh-keys-are-verified") {
+ config.SSHKeysAreVerified = c.Bool("ssh-keys-are-verified")
+ }
if c.IsSet("avatar-attribute") {
config.AttributeAvatar = c.String("avatar-attribute")
}
diff --git a/cmd/hook.go b/cmd/hook.go
index 1845ade625..6004f679ac 100644
--- a/cmd/hook.go
+++ b/cmd/hook.go
@@ -163,6 +163,14 @@ func (n *nilWriter) WriteString(s string) (int, error) {
return len(s), nil
}
+func parseGitHookCommitRefLine(line string) (oldCommitID, newCommitID string, refFullName git.RefName, ok bool) {
+ fields := strings.Split(line, " ")
+ if len(fields) != 3 {
+ return "", "", "", false
+ }
+ return fields[0], fields[1], git.RefName(fields[2]), true
+}
+
func runHookPreReceive(ctx context.Context, c *cli.Command) error {
if isInternal, _ := strconv.ParseBool(os.Getenv(repo_module.EnvIsInternal)); isInternal {
return nil
@@ -228,14 +236,11 @@ Gitea or set your environment appropriately.`, "")
continue
}
- fields := bytes.Fields(scanner.Bytes())
- if len(fields) != 3 {
+ oldCommitID, newCommitID, refFullName, ok := parseGitHookCommitRefLine(scanner.Text())
+ if !ok {
continue
}
- oldCommitID := string(fields[0])
- newCommitID := string(fields[1])
- refFullName := git.RefName(fields[2])
total++
lastline++
@@ -313,7 +318,7 @@ func runHookPostReceive(ctx context.Context, c *cli.Command) error {
setup(ctx, c.Bool("debug"))
// First of all run update-server-info no matter what
- if _, _, err := gitcmd.NewCommand("update-server-info").RunStdString(ctx); err != nil {
+ if err := gitcmd.NewCommand("update-server-info").RunWithStderr(ctx); err != nil {
return fmt.Errorf("failed to call 'git update-server-info': %w", err)
}
@@ -378,16 +383,13 @@ Gitea or set your environment appropriately.`, "")
continue
}
- fields := bytes.Fields(scanner.Bytes())
- if len(fields) != 3 {
+ var ok bool
+ oldCommitIDs[count], newCommitIDs[count], refFullNames[count], ok = parseGitHookCommitRefLine(scanner.Text())
+ if !ok {
continue
}
fmt.Fprintf(out, ".")
- oldCommitIDs[count] = string(fields[0])
- newCommitIDs[count] = string(fields[1])
- refFullNames[count] = git.RefName(fields[2])
-
commitID, _ := git.NewIDFromString(newCommitIDs[count])
if refFullNames[count] == git.BranchPrefix+"master" && !commitID.IsZero() && count == total {
masterPushed = true
@@ -594,8 +596,8 @@ Gitea or set your environment appropriately.`, "")
hookOptions.RefFullNames = make([]git.RefName, 0, hookBatchSize)
for {
- // note: pktLineTypeUnknow means pktLineTypeFlush and pktLineTypeData all allowed
- rs, err = readPktLine(ctx, reader, pktLineTypeUnknow)
+ // note: pktLineTypeUnknown means pktLineTypeFlush and pktLineTypeData all allowed
+ rs, err = readPktLine(ctx, reader, pktLineTypeUnknown)
if err != nil {
return err
}
@@ -614,7 +616,7 @@ Gitea or set your environment appropriately.`, "")
if hasPushOptions {
for {
- rs, err = readPktLine(ctx, reader, pktLineTypeUnknow)
+ rs, err = readPktLine(ctx, reader, pktLineTypeUnknown)
if err != nil {
return err
}
@@ -711,8 +713,8 @@ Gitea or set your environment appropriately.`, "")
type pktLineType int64
const (
- // UnKnow type
- pktLineTypeUnknow pktLineType = 0
+ // Unknown type
+ pktLineTypeUnknown pktLineType = 0
// flush-pkt "0000"
pktLineTypeFlush pktLineType = iota
// data line
diff --git a/cmd/hook_test.go b/cmd/hook_test.go
index 86cd4834f2..fefc33c01c 100644
--- a/cmd/hook_test.go
+++ b/cmd/hook_test.go
@@ -39,3 +39,17 @@ func TestPktLine(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, []byte("0007a\nb"), w.Bytes())
}
+
+func TestParseGitHookCommitRefLine(t *testing.T) {
+ oldCommitID, newCommitID, refName, ok := parseGitHookCommitRefLine("a b c")
+ assert.True(t, ok)
+ assert.Equal(t, "a", oldCommitID)
+ assert.Equal(t, "b", newCommitID)
+ assert.Equal(t, "c", string(refName))
+
+ _, _, _, ok = parseGitHookCommitRefLine("a\tb\tc")
+ assert.False(t, ok)
+
+ _, _, _, ok = parseGitHookCommitRefLine("a b")
+ assert.False(t, ok)
+}
diff --git a/cmd/main.go b/cmd/main.go
index 203799f02f..2ee00382d7 100644
--- a/cmd/main.go
+++ b/cmd/main.go
@@ -149,7 +149,7 @@ func NewMainApp(appVer AppVersion) *cli.Command {
app.Commands = append(app.Commands, subCmdWithConfig...)
app.Commands = append(app.Commands, subCmdStandalone...)
- setting.InitGiteaEnvVars()
+ setting.UnsetUnnecessaryEnvVars()
return app
}
diff --git a/cmd/main_test.go b/cmd/main_test.go
index 69ea1237c6..b1f6bb3ba9 100644
--- a/cmd/main_test.go
+++ b/cmd/main_test.go
@@ -157,6 +157,7 @@ func TestCliCmd(t *testing.T) {
for _, c := range cases {
t.Run(c.cmd, func(t *testing.T) {
+ defer test.MockVariableValue(&setting.InstallLock, false)()
app := newTestApp(cli.Command{
Action: func(ctx context.Context, cmd *cli.Command) error {
_, _ = fmt.Fprint(cmd.Root().Writer, makePathOutput(setting.AppWorkPath, setting.CustomPath, setting.CustomConf))
@@ -170,7 +171,10 @@ func TestCliCmd(t *testing.T) {
r, err := runTestApp(app, args...)
assert.NoError(t, err, c.cmd)
assert.NotEmpty(t, c.exp, c.cmd)
- assert.Contains(t, r.Stdout, c.exp, c.cmd)
+ if !assert.Contains(t, r.Stdout, c.exp, c.cmd) {
+ t.Log("Full output:\n" + r.Stdout)
+ t.Log("Expected:\n" + c.exp)
+ }
})
}
}
diff --git a/cmd/web.go b/cmd/web.go
index 6e39db2178..5000e780c5 100644
--- a/cmd/web.go
+++ b/cmd/web.go
@@ -8,14 +8,13 @@ import (
"fmt"
"net"
"net/http"
+ "net/http/pprof"
"os"
"path/filepath"
"strconv"
"strings"
"time"
- _ "net/http/pprof" // Used for debugging if enabled and a web server is running
-
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/gtprof"
@@ -23,6 +22,7 @@ import (
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/public"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers"
"code.gitea.io/gitea/routers/install"
@@ -163,8 +163,6 @@ func serveInstall(cmd *cli.Command) error {
}
func serveInstalled(c *cli.Command) error {
- setting.InitCfgProvider(setting.CustomConf)
- setting.LoadCommonSettings()
setting.MustInstalled()
showWebStartupMessage("Prepare to run web server")
@@ -234,22 +232,22 @@ func serveInstalled(c *cli.Command) error {
}
func servePprof() {
- // FIXME: it shouldn't use the global DefaultServeMux, and it should use a proper context
- http.DefaultServeMux.Handle("/debug/fgprof", fgprof.Handler())
+ mux := http.NewServeMux()
+ mux.HandleFunc("/debug/pprof/", pprof.Index)
+ mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline)
+ mux.HandleFunc("/debug/pprof/profile", pprof.Profile)
+ mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol)
+ mux.HandleFunc("/debug/pprof/trace", pprof.Trace)
+ mux.Handle("/debug/fgprof", fgprof.Handler())
+ // FIXME: it should use a proper context
_, _, finished := process.GetManager().AddTypedContext(context.TODO(), "Web: PProf Server", process.SystemProcessType, true)
// The pprof server is for debug purpose only, it shouldn't be exposed on public network. At the moment, it's not worth introducing a configurable option for it.
log.Info("Starting pprof server on localhost:6060")
- log.Info("Stopped pprof server: %v", http.ListenAndServe("localhost:6060", nil))
+ log.Info("Stopped pprof server: %v", http.ListenAndServe("localhost:6060", mux))
finished()
}
func runWeb(ctx context.Context, cmd *cli.Command) error {
- defer func() {
- if panicked := recover(); panicked != nil {
- log.Fatal("PANIC: %v\n%s", panicked, log.Stack(2))
- }
- }()
-
if subCmdName, valid := isValidDefaultSubCommand(cmd); !valid {
return fmt.Errorf("unknown command: %s", subCmdName)
}
@@ -269,6 +267,10 @@ func runWeb(ctx context.Context, cmd *cli.Command) error {
createPIDFile(cmd.String("pid"))
}
+ // init the HTML renderer and load templates, if error happens, it will report the error immediately and exit with error log
+ // in dev mode, it won't exit, but watch the template files for changes
+ _ = templates.PageRenderer()
+
if !setting.InstallLock {
if err := serveInstall(cmd); err != nil {
return err
diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini
index 40c066c2b1..c7f8401cd9 100644
--- a/custom/conf/app.example.ini
+++ b/custom/conf/app.example.ini
@@ -737,11 +737,8 @@ LEVEL = Info
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Git Operation timeout in seconds
;[git.timeout]
-;DEFAULT = 360
;MIGRATE = 600
;MIRROR = 300
-;CLONE = 300
-;PULL = 300
;GC = 60
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -1332,9 +1329,12 @@ LEVEL = Info
;; Leave it empty to allow users to select any theme from "{CustomPath}/public/assets/css/theme-*.css"
;THEMES =
;;
-;; The icons for file list (basic/material), this is a temporary option which will be replaced by a user setting in the future.
+;; The icon theme for files (basic/material)
;FILE_ICON_THEME = material
;;
+;; The icon theme for folders (basic/material)
+;FOLDER_ICON_THEME = basic
+;;
;; All available reactions users can choose on issues/prs and comments.
;; Values can be emoji alias (:smile:) or a unicode emoji.
;; For custom reactions, add a tightly cropped square image to public/assets/img/emoji/reaction_name.png
@@ -2488,8 +2488,9 @@ LEVEL = Info
;[highlight.mapping]
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;; Extension mapping to highlight class
-;; e.g. .toml=ini
+;; Extension mapping to highlight class, for example:
+;; .toml = ini
+;; .my-js = JavaScript
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
@@ -2857,6 +2858,9 @@ LEVEL = Info
;ABANDONED_JOB_TIMEOUT = 24h
;; Strings committers can place inside a commit message or PR title to skip executing the corresponding actions workflow
;SKIP_WORKFLOW_STRINGS = [skip ci],[ci skip],[no ci],[skip actions],[actions skip]
+;; Comma-separated list of workflow directories, the first one to exist
+;; in a repo is used to find Actions workflow files
+;WORKFLOW_DIRS = .gitea/workflows,.github/workflows
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
diff --git a/eslint.config.ts b/eslint.config.ts
index 253a7f4555..cd37c4321e 100644
--- a/eslint.config.ts
+++ b/eslint.config.ts
@@ -15,10 +15,22 @@ import vue from 'eslint-plugin-vue';
import vueScopedCss from 'eslint-plugin-vue-scoped-css';
import wc from 'eslint-plugin-wc';
import {defineConfig, globalIgnores} from 'eslint/config';
+import type {ESLint} from 'eslint';
const jsExts = ['js', 'mjs', 'cjs'] as const;
const tsExts = ['ts', 'mts', 'cts'] as const;
-const restrictedSyntax = ['WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression'];
+
+const restrictedGlobals = [
+ {name: 'localStorage', message: 'Use `modules/user-settings.ts` instead.'},
+ {name: 'fetch', message: 'Use `modules/fetch.ts` instead.'},
+];
+
+const restrictedProperties = [
+ {object: 'window', property: 'localStorage', message: 'Use `modules/user-settings.ts` instead.'},
+ {object: 'globalThis', property: 'localStorage', message: 'Use `modules/user-settings.ts` instead.'},
+ {object: 'window', property: 'fetch', message: 'Use `modules/fetch.ts` instead.'},
+ {object: 'globalThis', property: 'fetch', message: 'Use `modules/fetch.ts` instead.'},
+];
export default defineConfig([
globalIgnores([
@@ -32,10 +44,6 @@ export default defineConfig([
languageOptions: {
ecmaVersion: 'latest',
sourceType: 'module',
- globals: {
- ...globals.browser,
- ...globals.node,
- },
parser: typescriptParser,
parserOptions: {
sourceType: 'module',
@@ -55,8 +63,7 @@ export default defineConfig([
'@stylistic': stylistic,
'@typescript-eslint': typescriptPlugin.plugin,
'array-func': arrayFunc,
- // @ts-expect-error -- https://github.com/un-ts/eslint-plugin-import-x/issues/203
- 'import-x': importPlugin,
+ 'import-x': importPlugin as unknown as ESLint.Plugin, // https://github.com/un-ts/eslint-plugin-import-x/issues/203
regexp,
sonarjs,
unicorn,
@@ -69,7 +76,7 @@ export default defineConfig([
'import-x/resolver': {'typescript': true},
},
rules: {
- '@eslint-community/eslint-comments/disable-enable-pair': [2],
+ '@eslint-community/eslint-comments/disable-enable-pair': [0],
'@eslint-community/eslint-comments/no-aggregating-enable': [2],
'@eslint-community/eslint-comments/no-duplicate-disable': [2],
'@eslint-community/eslint-comments/no-restricted-disable': [0],
@@ -149,7 +156,7 @@ export default defineConfig([
'@typescript-eslint/adjacent-overload-signatures': [0],
'@typescript-eslint/array-type': [0],
'@typescript-eslint/await-thenable': [2],
- '@typescript-eslint/ban-ts-comment': [2, {'ts-expect-error': false, 'ts-ignore': true, 'ts-nocheck': false, 'ts-check': false}],
+ '@typescript-eslint/ban-ts-comment': [2, {'ts-expect-error': true, 'ts-ignore': true, 'ts-nocheck': false, 'ts-check': false}],
'@typescript-eslint/ban-tslint-comment': [0],
'@typescript-eslint/class-literal-property-style': [0],
'@typescript-eslint/class-methods-use-this': [0],
@@ -204,7 +211,7 @@ export default defineConfig([
'@typescript-eslint/no-non-null-asserted-nullish-coalescing': [0],
'@typescript-eslint/no-non-null-asserted-optional-chain': [2],
'@typescript-eslint/no-non-null-assertion': [0],
- '@typescript-eslint/no-redeclare': [0],
+ '@typescript-eslint/no-redeclare': [2],
'@typescript-eslint/no-redundant-type-constituents': [2],
'@typescript-eslint/no-require-imports': [2],
'@typescript-eslint/no-restricted-imports': [0],
@@ -233,7 +240,7 @@ export default defineConfig([
'@typescript-eslint/no-unused-vars': [2, {vars: 'all', args: 'all', caughtErrors: 'all', ignoreRestSiblings: false, argsIgnorePattern: '^_', varsIgnorePattern: '^_', caughtErrorsIgnorePattern: '^_', destructuredArrayIgnorePattern: '^_'}],
'@typescript-eslint/no-use-before-define': [2, {functions: false, classes: true, variables: true, allowNamedExports: true, typedefs: false, enums: false, ignoreTypeReferences: true}],
'@typescript-eslint/no-useless-constructor': [0],
- '@typescript-eslint/no-useless-default-assignment': [0], // https://github.com/typescript-eslint/typescript-eslint/issues/11847
+ '@typescript-eslint/no-useless-default-assignment': [2],
'@typescript-eslint/no-useless-empty-export': [0],
'@typescript-eslint/no-wrapper-object-types': [2],
'@typescript-eslint/non-nullable-type-assertion-style': [0],
@@ -264,6 +271,7 @@ export default defineConfig([
'@typescript-eslint/restrict-template-expressions': [0],
'@typescript-eslint/return-await': [0],
'@typescript-eslint/strict-boolean-expressions': [0],
+ '@typescript-eslint/strict-void-return': [0],
'@typescript-eslint/switch-exhaustiveness-check': [0],
'@typescript-eslint/triple-slash-reference': [2],
'@typescript-eslint/typedef': [0],
@@ -334,7 +342,7 @@ export default defineConfig([
'import-x/first': [2],
'import-x/group-exports': [0],
'import-x/max-dependencies': [0],
- 'import-x/named': [2],
+ 'import-x/named': [0],
'import-x/namespace': [0],
'import-x/newline-after-import': [0],
'import-x/no-absolute-path': [0],
@@ -362,7 +370,7 @@ export default defineConfig([
'import-x/no-self-import': [2],
'import-x/no-unassigned-import': [0],
'import-x/no-unresolved': [2, {commonjs: true, ignore: ['\\?.+$']}],
- // 'import-x/no-unused-modules': [2, {unusedExports: true}], // not compatible with eslint 9
+ 'import-x/no-unused-modules': [0], // incompatible with eslint 9
'import-x/no-useless-path-segments': [2, {commonjs: true}],
'import-x/no-webpack-loader-syntax': [2],
'import-x/order': [0],
@@ -429,7 +437,7 @@ export default defineConfig([
'no-import-assign': [2],
'no-inline-comments': [0],
'no-inner-declarations': [2],
- 'no-invalid-regexp': [2],
+ 'no-invalid-regexp': [0], // handled by regexp/no-invalid-regexp
'no-invalid-this': [0],
'no-irregular-whitespace': [2],
'no-iterator': [2],
@@ -543,7 +551,7 @@ export default defineConfig([
'no-new-func': [0], // handled by @typescript-eslint/no-implied-eval
'no-new-native-nonconstructor': [2],
'no-new-object': [2],
- 'no-new-symbol': [2],
+ 'no-new-symbol': [0], // handled by no-new-native-nonconstructor
'no-new-wrappers': [2],
'no-new': [0],
'no-nonoctal-decimal-escape': [2],
@@ -558,9 +566,10 @@ export default defineConfig([
'no-redeclare': [0], // must be disabled for typescript overloads
'no-regex-spaces': [2],
'no-restricted-exports': [0],
- 'no-restricted-globals': [2, 'addEventListener', 'blur', 'close', 'closed', 'confirm', 'defaultStatus', 'defaultstatus', 'error', 'event', 'external', 'find', 'focus', 'frameElement', 'frames', 'history', 'innerHeight', 'innerWidth', 'isFinite', 'isNaN', 'length', 'locationbar', 'menubar', 'moveBy', 'moveTo', 'name', 'onblur', 'onerror', 'onfocus', 'onload', 'onresize', 'onunload', 'open', 'opener', 'opera', 'outerHeight', 'outerWidth', 'pageXOffset', 'pageYOffset', 'parent', 'print', 'removeEventListener', 'resizeBy', 'resizeTo', 'screen', 'screenLeft', 'screenTop', 'screenX', 'screenY', 'scroll', 'scrollbars', 'scrollBy', 'scrollTo', 'scrollX', 'scrollY', 'status', 'statusbar', 'stop', 'toolbar', 'top'],
+ 'no-restricted-globals': [2, ...restrictedGlobals],
+ 'no-restricted-properties': [2, ...restrictedProperties],
'no-restricted-imports': [0],
- 'no-restricted-syntax': [2, ...restrictedSyntax, {selector: 'CallExpression[callee.name="fetch"]', message: 'use modules/fetch.ts instead'}],
+ 'no-restricted-syntax': [2, 'WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression'],
'no-return-assign': [0],
'no-script-url': [2],
'no-self-assign': [2, {props: true}],
@@ -573,7 +582,7 @@ export default defineConfig([
'no-template-curly-in-string': [2],
'no-ternary': [0],
'no-this-before-super': [2],
- 'no-throw-literal': [2],
+ 'no-throw-literal': [0], // handled by @typescript-eslint/only-throw-error
'no-undef-init': [2],
'no-undef': [2], // it is still needed by eslint & IDE to prompt undefined names in real time
'no-undefined': [0],
@@ -591,7 +600,7 @@ export default defineConfig([
'no-unused-vars': [0], // handled by @typescript-eslint/no-unused-vars
'no-use-before-define': [0], // handled by @typescript-eslint/no-use-before-define
'no-useless-assignment': [2],
- 'no-useless-backreference': [2],
+ 'no-useless-backreference': [0], // handled by regexp/no-useless-backreference
'no-useless-call': [2],
'no-useless-catch': [2],
'no-useless-computed-key': [2],
@@ -599,7 +608,7 @@ export default defineConfig([
'no-useless-constructor': [2],
'no-useless-escape': [2],
'no-useless-rename': [2],
- 'no-useless-return': [2],
+ 'no-useless-return': [0], // handled by sonarjs/no-redundant-jump
'no-var': [2],
'no-void': [2],
'no-warning-comments': [0],
@@ -608,7 +617,7 @@ export default defineConfig([
'one-var-declaration-per-line': [0],
'one-var': [0],
'operator-assignment': [2, 'always'],
- 'operator-linebreak': [2, 'after'],
+ 'operator-linebreak': [0], // handled by @stylistic/operator-linebreak
'prefer-arrow-callback': [2, {allowNamedFunctions: true, allowUnboundThis: true}],
'prefer-const': [2, {destructuring: 'all', ignoreReadBeforeAssign: true}],
'prefer-destructuring': [0],
@@ -688,7 +697,7 @@ export default defineConfig([
'regexp/prefer-question-quantifier': [2],
'regexp/prefer-range': [2],
'regexp/prefer-regexp-exec': [2],
- 'regexp/prefer-regexp-test': [2],
+ 'regexp/prefer-regexp-test': [0], // handled by unicorn/prefer-regexp-test
'regexp/prefer-result-array-groups': [0],
'regexp/prefer-set-operation': [2],
'regexp/prefer-star-quantifier': [2],
@@ -718,7 +727,7 @@ export default defineConfig([
'sonarjs/no-empty-collection': [2],
'sonarjs/no-extra-arguments': [2],
'sonarjs/no-gratuitous-expressions': [2],
- 'sonarjs/no-identical-conditions': [2],
+ 'sonarjs/no-identical-conditions': [0], // handled by no-dupe-else-if
'sonarjs/no-identical-expressions': [2],
'sonarjs/no-identical-functions': [2, 5],
'sonarjs/no-ignored-return': [2],
@@ -731,7 +740,7 @@ export default defineConfig([
'sonarjs/no-small-switch': [0],
'sonarjs/no-unused-collection': [2],
'sonarjs/no-use-of-empty-return-value': [2],
- 'sonarjs/no-useless-catch': [2],
+ 'sonarjs/no-useless-catch': [0], // handled by no-useless-catch
'sonarjs/non-existent-operator': [2],
'sonarjs/prefer-immediate-return': [0],
'sonarjs/prefer-object-literal': [0],
@@ -758,6 +767,7 @@ export default defineConfig([
'unicorn/filename-case': [0],
'unicorn/import-index': [0],
'unicorn/import-style': [0],
+ 'unicorn/isolated-functions': [2, {functions: []}],
'unicorn/new-for-builtins': [2],
'unicorn/no-abusive-eslint-disable': [0],
'unicorn/no-anonymous-default-export': [0],
@@ -797,7 +807,7 @@ export default defineConfig([
'unicorn/no-unnecessary-await': [2],
'unicorn/no-unnecessary-polyfills': [2],
'unicorn/no-unreadable-array-destructuring': [0],
- 'unicorn/no-unreadable-iife': [2],
+ 'unicorn/no-unreadable-iife': [0],
'unicorn/no-unused-properties': [2],
'unicorn/no-useless-collection-argument': [2],
'unicorn/no-useless-fallback-in-spread': [2],
@@ -810,7 +820,7 @@ export default defineConfig([
'unicorn/number-literal-case': [0],
'unicorn/numeric-separators-style': [0],
'unicorn/prefer-add-event-listener': [2],
- 'unicorn/prefer-array-find': [2],
+ 'unicorn/prefer-array-find': [0], // handled by @typescript-eslint/prefer-find
'unicorn/prefer-array-flat': [2],
'unicorn/prefer-array-flat-map': [2],
'unicorn/prefer-array-index-of': [2],
@@ -827,7 +837,7 @@ export default defineConfig([
'unicorn/prefer-event-target': [2],
'unicorn/prefer-export-from': [0],
'unicorn/prefer-global-this': [0],
- 'unicorn/prefer-includes': [2],
+ 'unicorn/prefer-includes': [0], // handled by @typescript-eslint/prefer-includes
'unicorn/prefer-json-parse-buffer': [0],
'unicorn/prefer-keyboard-event-key': [2],
'unicorn/prefer-logical-operator-over-ternary': [2],
@@ -854,7 +864,7 @@ export default defineConfig([
'unicorn/prefer-string-raw': [0],
'unicorn/prefer-string-replace-all': [0],
'unicorn/prefer-string-slice': [0],
- 'unicorn/prefer-string-starts-ends-with': [2],
+ 'unicorn/prefer-string-starts-ends-with': [0], // handled by @typescript-eslint/prefer-string-starts-ends-with
'unicorn/prefer-string-trim-start-end': [2],
'unicorn/prefer-structured-clone': [2],
'unicorn/prefer-switch': [0],
@@ -915,8 +925,7 @@ export default defineConfig([
},
extends: [
vue.configs['flat/recommended'],
- // @ts-expect-error
- vueScopedCss.configs['flat/recommended'],
+ vueScopedCss.configs['flat/recommended'] as any,
],
rules: {
'vue/attributes-order': [0],
@@ -926,12 +935,6 @@ export default defineConfig([
'vue/require-typed-ref': [2],
},
},
- {
- files: ['web_src/js/modules/fetch.ts', 'web_src/js/standalone/**/*'],
- rules: {
- 'no-restricted-syntax': [2, ...restrictedSyntax],
- },
- },
{
files: ['**/*.test.ts', 'web_src/js/test/setup.ts'],
plugins: {vitest},
@@ -985,42 +988,23 @@ export default defineConfig([
'vitest/require-to-throw-message': [0],
'vitest/require-top-level-describe': [0],
'vitest/valid-describe-callback': [2],
- 'vitest/valid-expect': [2],
+ 'vitest/valid-expect': [2, {maxArgs: 2}],
'vitest/valid-title': [2],
},
},
- {
- files: ['web_src/js/types.ts'],
- rules: {
- 'import-x/no-unused-modules': [0],
- },
- },
{
files: ['**/*.d.ts'],
rules: {
- 'import-x/no-unused-modules': [0],
'@typescript-eslint/consistent-type-definitions': [0],
'@typescript-eslint/consistent-type-imports': [0],
},
},
{
- files: ['*.config.*'],
- rules: {
- 'import-x/no-unused-modules': [0],
- },
- },
- {
- files: ['web_src/**/*', 'docs/**/*'],
- languageOptions: {globals: globals.browser},
+ files: ['*', 'tools/**/*'],
+ languageOptions: {globals: globals.nodeBuiltin},
},
{
files: ['web_src/**/*'],
- languageOptions: {
- globals: {
- ...globals.browser,
- __webpack_public_path__: true,
- process: false, // https://github.com/webpack/webpack/issues/15833
- },
- },
+ languageOptions: {globals: {...globals.browser, ...globals.webpack}},
},
]);
diff --git a/flake.lock b/flake.lock
index 4cbc85b87a..a608aa3b89 100644
--- a/flake.lock
+++ b/flake.lock
@@ -2,11 +2,11 @@
"nodes": {
"nixpkgs": {
"locked": {
- "lastModified": 1760038930,
- "narHash": "sha256-Oncbh0UmHjSlxO7ErQDM3KM0A5/Znfofj2BSzlHLeVw=",
+ "lastModified": 1771369470,
+ "narHash": "sha256-0NBlEBKkN3lufyvFegY4TYv5mCNHbi5OmBDrzihbBMQ=",
"owner": "nixos",
"repo": "nixpkgs",
- "rev": "0b4defa2584313f3b781240b29d61f6f9f7e0df3",
+ "rev": "0182a361324364ae3f436a63005877674cf45efb",
"type": "github"
},
"original": {
diff --git a/go.mod b/go.mod
index a89c2a5c73..b7a3af6a3f 100644
--- a/go.mod
+++ b/go.mod
@@ -1,8 +1,6 @@
module code.gitea.io/gitea
-go 1.25.0
-
-toolchain go1.25.5
+go 1.26.0
// rfc5280 said: "The serial number is an integer assigned by the CA to each certificate."
// But some CAs use negative serial number, just relax the check. related:
@@ -11,9 +9,9 @@ godebug x509negativeserial=1
require (
code.gitea.io/actions-proto-go v0.4.1
- code.gitea.io/sdk/gitea v0.22.0
+ code.gitea.io/sdk/gitea v0.23.2
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570
- connectrpc.com/connect v1.18.1
+ connectrpc.com/connect v1.19.1
gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed
gitea.com/go-chi/cache v0.2.1
gitea.com/go-chi/captcha v0.0.0-20240315150714-fb487f629098
@@ -26,24 +24,22 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.2
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
github.com/ProtonMail/go-crypto v1.3.0
- github.com/PuerkitoBio/goquery v1.10.3
+ github.com/PuerkitoBio/goquery v1.11.0
github.com/SaveTheRbtz/zstd-seekable-format-go/pkg v0.8.0
- github.com/alecthomas/chroma/v2 v2.21.1
- github.com/aws/aws-sdk-go-v2/credentials v1.18.10
- github.com/aws/aws-sdk-go-v2/service/codecommit v1.32.2
+ github.com/alecthomas/chroma/v2 v2.23.1
+ github.com/aws/aws-sdk-go-v2/credentials v1.19.7
+ github.com/aws/aws-sdk-go-v2/service/codecommit v1.33.8
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb
- github.com/blevesearch/bleve/v2 v2.5.3
+ github.com/blevesearch/bleve/v2 v2.5.7
github.com/bohde/codel v0.2.0
github.com/buildkite/terminal-to-html/v3 v3.16.8
- github.com/caddyserver/certmagic v0.24.0
+ github.com/caddyserver/certmagic v0.25.1
github.com/charmbracelet/git-lfs-transfer v0.1.1-0.20251013092601-6327009efd21
github.com/chi-middleware/proxy v1.1.1
github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21
- github.com/djherbis/buffer v1.2.0
- github.com/djherbis/nio/v3 v3.0.1
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707
github.com/dustin/go-humanize v1.0.1
- github.com/editorconfig/editorconfig-core-go/v2 v2.6.3
+ github.com/editorconfig/editorconfig-core-go/v2 v2.6.4
github.com/emersion/go-imap v1.2.1
github.com/emirpasic/gods v1.18.1
github.com/ethantkoenig/rupture v1.0.1
@@ -52,54 +48,53 @@ require (
github.com/gliderlabs/ssh v0.3.8
github.com/go-ap/activitypub v0.0.0-20250810115208-cb73b20a1742
github.com/go-ap/jsonld v0.0.0-20221030091449-f2a191312c73
- github.com/go-chi/chi/v5 v5.2.3
+ github.com/go-chi/chi/v5 v5.2.5
github.com/go-chi/cors v1.2.2
github.com/go-co-op/gocron v1.37.0
- github.com/go-enry/go-enry/v2 v2.9.2
- github.com/go-git/go-billy/v5 v5.6.2
- github.com/go-git/go-git/v5 v5.16.3
- github.com/go-ldap/ldap/v3 v3.4.11
- github.com/go-redsync/redsync/v4 v4.13.0
+ github.com/go-enry/go-enry/v2 v2.9.4
+ github.com/go-git/go-billy/v5 v5.7.0
+ github.com/go-git/go-git/v5 v5.16.5
+ github.com/go-ldap/ldap/v3 v3.4.12
+ github.com/go-redsync/redsync/v4 v4.15.0
github.com/go-sql-driver/mysql v1.9.3
github.com/go-webauthn/webauthn v0.13.4
github.com/goccy/go-json v0.10.5
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85
- github.com/golang-jwt/jwt/v5 v5.3.0
+ github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/go-github/v74 v74.0.0
github.com/google/licenseclassifier/v2 v2.0.0
- github.com/google/pprof v0.0.0-20250820193118-f64d9cf942d6
+ github.com/google/pprof v0.0.0-20260202012954-cb029daf43ef
github.com/google/uuid v1.6.0
github.com/gorilla/feeds v1.2.0
github.com/gorilla/sessions v1.4.0
- github.com/hashicorp/go-version v1.7.0
+ github.com/hashicorp/go-version v1.8.0
github.com/hashicorp/golang-lru/v2 v2.0.7
github.com/huandu/xstrings v1.5.0
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056
github.com/jhillyerd/enmime v1.3.0
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
- github.com/klauspost/compress v1.18.0
+ github.com/klauspost/compress v1.18.3
github.com/klauspost/cpuid/v2 v2.3.0
- github.com/lib/pq v1.10.9
+ github.com/lib/pq v1.11.1
github.com/markbates/goth v1.82.0
github.com/mattn/go-isatty v0.0.20
- github.com/mattn/go-sqlite3 v1.14.32
- github.com/meilisearch/meilisearch-go v0.33.2
- github.com/mholt/archives v0.0.0-20251009205813-e30ac6010726
+ github.com/mattn/go-sqlite3 v1.14.33
+ github.com/meilisearch/meilisearch-go v0.36.0
+ github.com/mholt/archives v0.1.5
github.com/microcosm-cc/bluemonday v1.0.27
- github.com/microsoft/go-mssqldb v1.9.3
- github.com/minio/minio-go/v7 v7.0.95
+ github.com/microsoft/go-mssqldb v1.9.6
+ github.com/minio/minio-go/v7 v7.0.98
github.com/msteinert/pam v1.2.0
github.com/nektos/act v0.2.63
github.com/niklasfasching/go-org v1.9.1
github.com/olivere/elastic/v7 v7.0.32
github.com/opencontainers/go-digest v1.0.0
github.com/opencontainers/image-spec v1.1.1
- github.com/pkg/errors v0.9.1
github.com/pquerna/otp v1.5.0
- github.com/prometheus/client_golang v1.23.0
+ github.com/prometheus/client_golang v1.23.2
github.com/quasoft/websspi v1.1.2
- github.com/redis/go-redis/v9 v9.12.1
+ github.com/redis/go-redis/v9 v9.17.3
github.com/robfig/cron/v3 v3.0.1
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1
github.com/sassoftware/go-rpmutils v0.4.0
@@ -113,29 +108,29 @@ require (
github.com/wneessen/go-mail v0.7.2
github.com/xeipuuv/gojsonschema v1.2.0
github.com/yohcop/openid-go v1.0.1
- github.com/yuin/goldmark v1.7.13
+ github.com/yuin/goldmark v1.7.16
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc
github.com/yuin/goldmark-meta v1.1.0
gitlab.com/gitlab-org/api/client-go v0.142.4
- golang.org/x/crypto v0.45.0
- golang.org/x/image v0.30.0
- golang.org/x/net v0.47.0
- golang.org/x/oauth2 v0.30.0
- golang.org/x/sync v0.18.0
- golang.org/x/sys v0.38.0
- golang.org/x/text v0.31.0
- google.golang.org/grpc v1.75.0
- google.golang.org/protobuf v1.36.8
- gopkg.in/ini.v1 v1.67.0
+ golang.org/x/crypto v0.47.0
+ golang.org/x/image v0.35.0
+ golang.org/x/net v0.49.0
+ golang.org/x/oauth2 v0.34.0
+ golang.org/x/sync v0.19.0
+ golang.org/x/sys v0.40.0
+ golang.org/x/text v0.33.0
+ google.golang.org/grpc v1.78.0
+ google.golang.org/protobuf v1.36.11
+ gopkg.in/ini.v1 v1.67.1
gopkg.in/yaml.v3 v3.0.1
mvdan.cc/xurls/v2 v2.6.0
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251
xorm.io/builder v0.3.13
- xorm.io/xorm v1.3.10
+ xorm.io/xorm v1.3.11
)
require (
- cloud.google.com/go/compute/metadata v0.8.0 // indirect
+ cloud.google.com/go/compute/metadata v0.9.0 // indirect
code.gitea.io/gitea-vet v0.2.3 // indirect
dario.cat/mergo v1.0.2 // indirect
filippo.io/edwards25519 v1.1.0 // indirect
@@ -148,20 +143,20 @@ require (
github.com/andybalholm/brotli v1.2.0 // indirect
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect
- github.com/aws/aws-sdk-go-v2 v1.38.3 // indirect
- github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.6 // indirect
- github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.6 // indirect
- github.com/aws/smithy-go v1.23.0 // indirect
+ github.com/aws/aws-sdk-go-v2 v1.41.1 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 // indirect
+ github.com/aws/smithy-go v1.24.0 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.24.0 // indirect
- github.com/blevesearch/bleve_index_api v1.2.9 // indirect
+ github.com/blevesearch/bleve_index_api v1.2.11 // indirect
github.com/blevesearch/geo v0.2.4 // indirect
- github.com/blevesearch/go-faiss v1.0.25 // indirect
+ github.com/blevesearch/go-faiss v1.0.26 // indirect
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
github.com/blevesearch/gtreap v0.1.1 // indirect
github.com/blevesearch/mmap-go v1.0.4 // indirect
- github.com/blevesearch/scorch_segment_api/v2 v2.3.11 // indirect
+ github.com/blevesearch/scorch_segment_api/v2 v2.3.13 // indirect
github.com/blevesearch/segment v0.9.1 // indirect
github.com/blevesearch/snowballstem v0.9.0 // indirect
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
@@ -171,14 +166,14 @@ require (
github.com/blevesearch/zapx/v13 v13.4.2 // indirect
github.com/blevesearch/zapx/v14 v14.4.2 // indirect
github.com/blevesearch/zapx/v15 v15.4.2 // indirect
- github.com/blevesearch/zapx/v16 v16.2.4 // indirect
+ github.com/blevesearch/zapx/v16 v16.2.8 // indirect
github.com/bmatcuk/doublestar/v4 v4.9.1 // indirect
github.com/bodgit/plumbing v1.3.0 // indirect
github.com/bodgit/sevenzip v1.6.1 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/boombuler/barcode v1.1.0 // indirect
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf // indirect
- github.com/caddyserver/zerossl v0.1.3 // indirect
+ github.com/caddyserver/zerossl v0.1.4 // indirect
github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudflare/circl v1.6.1 // indirect
@@ -202,11 +197,9 @@ require (
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-ini/ini v1.67.0 // indirect
github.com/go-webauthn/x v0.1.24 // indirect
- github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect
github.com/golang-sql/sqlexp v0.1.0 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
- github.com/golang/protobuf v1.5.4 // indirect
github.com/golang/snappy v1.0.0 // indirect
github.com/google/btree v1.1.3 // indirect
github.com/google/flatbuffers v25.2.10+incompatible // indirect
@@ -223,6 +216,7 @@ require (
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/kevinburke/ssh_config v1.4.0 // indirect
+ github.com/klauspost/crc32 v1.3.0 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/libdns/libdns v1.1.1 // indirect
github.com/mailru/easyjson v0.9.0 // indirect
@@ -230,8 +224,8 @@ require (
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mattn/go-shellwords v1.0.12 // indirect
- github.com/mholt/acmez/v3 v3.1.2 // indirect
- github.com/miekg/dns v1.1.68 // indirect
+ github.com/mholt/acmez/v3 v3.1.4 // indirect
+ github.com/miekg/dns v1.1.69 // indirect
github.com/mikelolasagasti/xz v1.0.1 // indirect
github.com/minio/crc64nvme v1.1.1 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
@@ -251,20 +245,22 @@ require (
github.com/philhofer/fwd v1.2.0 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pjbgf/sha1cd v0.4.0 // indirect
+ github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
- github.com/prometheus/common v0.65.0 // indirect
+ github.com/prometheus/common v0.66.1 // indirect
github.com/prometheus/procfs v0.17.0 // indirect
github.com/rhysd/actionlint v1.7.7 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rs/xid v1.6.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
+ github.com/shopspring/decimal v1.4.0 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.3.1 // indirect
github.com/sorairolake/lzip-go v0.3.8 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
- github.com/tinylib/msgp v1.4.0 // indirect
+ github.com/tinylib/msgp v1.6.1 // indirect
github.com/unknwon/com v1.0.1 // indirect
github.com/valyala/fastjson v1.6.4 // indirect
github.com/x448/float16 v0.8.4 // indirect
@@ -277,14 +273,16 @@ require (
go.etcd.io/bbolt v1.4.3 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
- go.uber.org/zap v1.27.0 // indirect
+ go.uber.org/zap v1.27.1 // indirect
go.uber.org/zap/exp v0.3.0 // indirect
+ go.yaml.in/yaml/v2 v2.4.2 // indirect
+ go.yaml.in/yaml/v3 v3.0.4 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b // indirect
- golang.org/x/mod v0.29.0 // indirect
+ golang.org/x/mod v0.31.0 // indirect
golang.org/x/time v0.12.0 // indirect
- golang.org/x/tools v0.38.0 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1 // indirect
+ golang.org/x/tools v0.40.0 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
)
@@ -296,8 +294,6 @@ ignore (
replace github.com/jaytaylor/html2text => github.com/Necoro/html2text v0.0.0-20250804200300-7bf1ce1c7347
-replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1
-
replace github.com/nektos/act => gitea.com/gitea/act v0.261.7-0.20251003180512-ac6e4b751763
replace git.sr.ht/~mariusor/go-xsd-duration => gitea.com/gitea/go-xsd-duration v0.0.0-20220703122237-02e73435a078
diff --git a/go.sum b/go.sum
index ac70239339..1a6decc18b 100644
--- a/go.sum
+++ b/go.sum
@@ -9,8 +9,8 @@ cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6T
cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
-cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA=
-cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw=
+cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs=
+cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
@@ -20,12 +20,12 @@ code.gitea.io/actions-proto-go v0.4.1 h1:l0EYhjsgpUe/1VABo2eK7zcoNX2W44WOnb0MSLr
code.gitea.io/actions-proto-go v0.4.1/go.mod h1:mn7Wkqz6JbnTOHQpot3yDeHx+O5C9EGhMEE+htvHBas=
code.gitea.io/gitea-vet v0.2.3 h1:gdFmm6WOTM65rE8FUBTRzeQZYzXePKSSB1+r574hWwI=
code.gitea.io/gitea-vet v0.2.3/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE=
-code.gitea.io/sdk/gitea v0.22.0 h1:HCKq7bX/HQ85Nw7c/HAhWgRye+vBp5nQOE8Md1+9Ef0=
-code.gitea.io/sdk/gitea v0.22.0/go.mod h1:yyF5+GhljqvA30sRDreoyHILruNiy4ASufugzYg0VHM=
+code.gitea.io/sdk/gitea v0.23.2 h1:iJB1FDmLegwfwjX8gotBDHdPSbk/ZR8V9VmEJaVsJYg=
+code.gitea.io/sdk/gitea v0.23.2/go.mod h1:yyF5+GhljqvA30sRDreoyHILruNiy4ASufugzYg0VHM=
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570 h1:TXbikPqa7YRtfU9vS6QJBg77pUvbEb6StRdZO8t1bEY=
codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570/go.mod h1:IIAjsijsd8q1isWX8MACefDEgTQslQ4stk2AeeTt3kM=
-connectrpc.com/connect v1.18.1 h1:PAg7CjSAGvscaf6YZKUefjoih5Z/qYkyaTrBW8xvYPw=
-connectrpc.com/connect v1.18.1/go.mod h1:0292hj1rnx8oFrStN7cB4jjVBeqs+Yx5yDIC2prWDO8=
+connectrpc.com/connect v1.19.1 h1:R5M57z05+90EfEvCY1b7hBxDVOUl45PrtXtAV2fOC14=
+connectrpc.com/connect v1.19.1/go.mod h1:tN20fjdGlewnSFeZxLKb0xwIZ6ozc3OQs2hTXy4du9w=
dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
@@ -53,8 +53,6 @@ github.com/42wim/httpsig v1.2.3 h1:xb0YyWhkYj57SPtfSttIobJUPJZB9as1nsfo7KWVcEs=
github.com/42wim/httpsig v1.2.3/go.mod h1:nZq9OlYKDrUBhptd77IHx4/sZZD+IxTBADvAPI9G/EM=
github.com/42wim/sshsig v0.0.0-20250502153856-5100632e8920 h1:mWAVGlovzUfREJBhm0GwJnDNu21yRrL9QH9NIzAU3rg=
github.com/42wim/sshsig v0.0.0-20250502153856-5100632e8920/go.mod h1:zWxcT7BIWOe05xVJL0VMvO/PJ6RpoCux10heb77H6Q8=
-github.com/6543/go-version v1.3.1 h1:HvOp+Telns7HWJ2Xo/05YXQSB2bE0WmVgbHqwMPZT4U=
-github.com/6543/go-version v1.3.1/go.mod h1:oqFAHCwtLVUTLdhQmVZWYvaHXTdsbB4SY85at64SQEo=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.19.0 h1:ci6Yd6nysBRLEodoziB6ah1+YOzZbZk+NYneoA6q+6E=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.19.0/go.mod h1:QyVsSSN64v5TGltphKLQ2sQxe4OBQg0J1eKRcVBnfgE=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
@@ -85,8 +83,8 @@ github.com/Necoro/html2text v0.0.0-20250804200300-7bf1ce1c7347 h1:3JhDl+JysaO8nh
github.com/Necoro/html2text v0.0.0-20250804200300-7bf1ce1c7347/go.mod h1:2ErI0aycD43Ufr6CFK5lT/NrHGmoZuVbn1nlPThw69o=
github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
-github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
-github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
+github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw=
+github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ=
github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06Mq5mKs52e1TwOo=
github.com/RoaringBitmap/roaring v0.7.1/go.mod h1:jdT9ykXwHFNdJbEtxePexlFYH9LXucApeS0/+/g+p1I=
github.com/RoaringBitmap/roaring/v2 v2.10.0 h1:HbJ8Cs71lfCJyvmSptxeMX2PtvOC8yonlU0GQcy2Ak0=
@@ -98,13 +96,13 @@ github.com/SaveTheRbtz/zstd-seekable-format-go/pkg v0.8.0/go.mod h1:1HmmMEVsr+0R
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.2.0/go.mod h1:vf4zrexSH54oEjJ7EdB65tGNHmH3pGZmVkgTP5RHvAs=
-github.com/alecthomas/chroma/v2 v2.21.1 h1:FaSDrp6N+3pphkNKU6HPCiYLgm8dbe5UXIXcoBhZSWA=
-github.com/alecthomas/chroma/v2 v2.21.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o=
+github.com/alecthomas/chroma/v2 v2.23.1 h1:nv2AVZdTyClGbVQkIzlDm/rnhk1E9bU9nXwmZ/Vk/iY=
+github.com/alecthomas/chroma/v2 v2.23.1/go.mod h1:NqVhfBR0lte5Ouh3DcthuUCTUpDC9cxBOfyMbMQPs3o=
github.com/alecthomas/repr v0.0.0-20220113201626-b1b626ac65ae/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8=
github.com/alecthomas/repr v0.5.2 h1:SU73FTI9D1P5UNtvseffFSGmdNci/O6RsqzeXJtP0Qs=
github.com/alecthomas/repr v0.5.2/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
-github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7VVbI0o4wBRNQIgn917usHWOd6VAffYI=
-github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
+github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e h1:4dAU9FXIyQktpoUAgOJK3OTFc/xug0PCXYCqU0FgDKI=
+github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
@@ -114,18 +112,18 @@ github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuW
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
-github.com/aws/aws-sdk-go-v2 v1.38.3 h1:B6cV4oxnMs45fql4yRH+/Po/YU+597zgWqvDpYMturk=
-github.com/aws/aws-sdk-go-v2 v1.38.3/go.mod h1:sDioUELIUO9Znk23YVmIk86/9DOpkbyyVb1i/gUNFXY=
-github.com/aws/aws-sdk-go-v2/credentials v1.18.10 h1:xdJnXCouCx8Y0NncgoptztUocIYLKeQxrCgN6x9sdhg=
-github.com/aws/aws-sdk-go-v2/credentials v1.18.10/go.mod h1:7tQk08ntj914F/5i9jC4+2HQTAuJirq7m1vZVIhEkWs=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.6 h1:uF68eJA6+S9iVr9WgX1NaRGyQ/6MdIyc4JNUo6TN1FA=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.6/go.mod h1:qlPeVZCGPiobx8wb1ft0GHT5l+dc6ldnwInDFaMvC7Y=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.6 h1:pa1DEC6JoI0zduhZePp3zmhWvk/xxm4NB8Hy/Tlsgos=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.6/go.mod h1:gxEjPebnhWGJoaDdtDkA0JX46VRg1wcTHYe63OfX5pE=
-github.com/aws/aws-sdk-go-v2/service/codecommit v1.32.2 h1:qIySgaSYDLcInLpY0e7HPCi+AVeD/LTsl9EL1b692oA=
-github.com/aws/aws-sdk-go-v2/service/codecommit v1.32.2/go.mod h1:SobWM1535Mn1WuThoIVLiLa/C1rRbxbbq5PZW2QFCIM=
-github.com/aws/smithy-go v1.23.0 h1:8n6I3gXzWJB2DxBDnfxgBaSX6oe0d/t10qGz7OKqMCE=
-github.com/aws/smithy-go v1.23.0/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=
+github.com/aws/aws-sdk-go-v2 v1.41.1 h1:ABlyEARCDLN034NhxlRUSZr4l71mh+T5KAeGh6cerhU=
+github.com/aws/aws-sdk-go-v2 v1.41.1/go.mod h1:MayyLB8y+buD9hZqkCW3kX1AKq07Y5pXxtgB+rRFhz0=
+github.com/aws/aws-sdk-go-v2/credentials v1.19.7 h1:tHK47VqqtJxOymRrNtUXN5SP/zUTvZKeLx4tH6PGQc8=
+github.com/aws/aws-sdk-go-v2/credentials v1.19.7/go.mod h1:qOZk8sPDrxhf+4Wf4oT2urYJrYt3RejHSzgAquYeppw=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 h1:xOLELNKGp2vsiteLsvLPwxC+mYmO6OZ8PYgiuPJzF8U=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17/go.mod h1:5M5CI3D12dNOtH3/mk6minaRwI2/37ifCURZISxA/IQ=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 h1:WWLqlh79iO48yLkj1v3ISRNiv+3KdQoZ6JWyfcsyQik=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17/go.mod h1:EhG22vHRrvF8oXSTYStZhJc1aUgKtnJe+aOiFEV90cM=
+github.com/aws/aws-sdk-go-v2/service/codecommit v1.33.8 h1:KxKGfYvkVOe/U/Z4yAd0ZySRJHavuL31VOC+fn7WEAs=
+github.com/aws/aws-sdk-go-v2/service/codecommit v1.33.8/go.mod h1:cznnFD3BzYY+NB+4WoQ7SxdTACOsMqGCbQ5QaByPz4w=
+github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk=
+github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
@@ -138,15 +136,15 @@ github.com/bits-and-blooms/bitset v1.24.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4=
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb/go.mod h1:PkYb9DJNAwrSvRx5DYA+gUcOIgTGVMNkfSCbZM8cWpI=
github.com/blevesearch/bleve/v2 v2.0.5/go.mod h1:ZjWibgnbRX33c+vBRgla9QhPb4QOjD6fdVJ+R1Bk8LM=
-github.com/blevesearch/bleve/v2 v2.5.3 h1:9l1xtKaETv64SZc1jc4Sy0N804laSa/LeMbYddq1YEM=
-github.com/blevesearch/bleve/v2 v2.5.3/go.mod h1:Z/e8aWjiq8HeX+nW8qROSxiE0830yQA071dwR3yoMzw=
+github.com/blevesearch/bleve/v2 v2.5.7 h1:2d9YrL5zrX5EBBW++GOaEKjE+NPWeZGaX77IM26m1Z8=
+github.com/blevesearch/bleve/v2 v2.5.7/go.mod h1:yj0NlS7ocGC4VOSAedqDDMktdh2935v2CSWOCDMHdSA=
github.com/blevesearch/bleve_index_api v1.0.0/go.mod h1:fiwKS0xLEm+gBRgv5mumf0dhgFr2mDgZah1pqv1c1M4=
-github.com/blevesearch/bleve_index_api v1.2.9 h1:WqD3kvYwnlYLv8sTdH+AF7n/L4v969Cek68+wZnYj4Q=
-github.com/blevesearch/bleve_index_api v1.2.9/go.mod h1:rKQDl4u51uwafZxFrPD1R7xFOwKnzZW7s/LSeK4lgo0=
+github.com/blevesearch/bleve_index_api v1.2.11 h1:bXQ54kVuwP8hdrXUSOnvTQfgK0KI1+f9A0ITJT8tX1s=
+github.com/blevesearch/bleve_index_api v1.2.11/go.mod h1:rKQDl4u51uwafZxFrPD1R7xFOwKnzZW7s/LSeK4lgo0=
github.com/blevesearch/geo v0.2.4 h1:ECIGQhw+QALCZaDcogRTNSJYQXRtC8/m8IKiA706cqk=
github.com/blevesearch/geo v0.2.4/go.mod h1:K56Q33AzXt2YExVHGObtmRSFYZKYGv0JEN5mdacJJR8=
-github.com/blevesearch/go-faiss v1.0.25 h1:lel1rkOUGbT1CJ0YgzKwC7k+XH0XVBHnCVWahdCXk4U=
-github.com/blevesearch/go-faiss v1.0.25/go.mod h1:OMGQwOaRRYxrmeNdMrXJPvVx8gBnvE5RYrr0BahNnkk=
+github.com/blevesearch/go-faiss v1.0.26 h1:4dRLolFgjPyjkaXwff4NfbZFdE/dfywbzDqporeQvXI=
+github.com/blevesearch/go-faiss v1.0.26/go.mod h1:OMGQwOaRRYxrmeNdMrXJPvVx8gBnvE5RYrr0BahNnkk=
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y=
@@ -155,8 +153,8 @@ github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
github.com/blevesearch/scorch_segment_api/v2 v2.0.1/go.mod h1:lq7yK2jQy1yQjtjTfU931aVqz7pYxEudHaDwOt1tXfU=
-github.com/blevesearch/scorch_segment_api/v2 v2.3.11 h1:bYuEgsyGqgU/gy0/Vk6g1eCUqGBs2r+3bRCv+Cnq2kc=
-github.com/blevesearch/scorch_segment_api/v2 v2.3.11/go.mod h1:aAWoeQ3DdoZ3Z5138jXVSd1T/klGwvg11z0pSxrJSEk=
+github.com/blevesearch/scorch_segment_api/v2 v2.3.13 h1:ZPjv/4VwWvHJZKeMSgScCapOy8+DdmsmRyLmSB88UoY=
+github.com/blevesearch/scorch_segment_api/v2 v2.3.13/go.mod h1:ENk2LClTehOuMS8XzN3UxBEErYmtwkE7MAArFTXs9Vc=
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
@@ -184,8 +182,8 @@ github.com/blevesearch/zapx/v14 v14.4.2/go.mod h1:rz0XNb/OZSMjNorufDGSpFpjoFKhXm
github.com/blevesearch/zapx/v15 v15.2.0/go.mod h1:MmQceLpWfME4n1WrBFIwplhWmaQbQqLQARpaKUEOs/A=
github.com/blevesearch/zapx/v15 v15.4.2 h1:sWxpDE0QQOTjyxYbAVjt3+0ieu8NCE0fDRaFxEsp31k=
github.com/blevesearch/zapx/v15 v15.4.2/go.mod h1:1pssev/59FsuWcgSnTa0OeEpOzmhtmr/0/11H0Z8+Nw=
-github.com/blevesearch/zapx/v16 v16.2.4 h1:tGgfvleXTAkwsD5mEzgM3zCS/7pgocTCnO1oyAUjlww=
-github.com/blevesearch/zapx/v16 v16.2.4/go.mod h1:Rti/REtuuMmzwsI8/C/qIzRaEoSK/wiFYw5e5ctUKKs=
+github.com/blevesearch/zapx/v16 v16.2.8 h1:SlnzF0YGtSlrsOE3oE7EgEX6BIepGpeqxs1IjMbHLQI=
+github.com/blevesearch/zapx/v16 v16.2.8/go.mod h1:murSoCJPCk25MqURrcJaBQ1RekuqSCSfMjXH4rHyA14=
github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
github.com/bmizerany/perks v0.0.0-20141205001514-d9a9656a3a4b/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q=
@@ -208,10 +206,10 @@ github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
github.com/buildkite/terminal-to-html/v3 v3.16.8 h1:QN/daUob6cmK8GcdKnwn9+YTlPr1vNj+oeAIiJK6fPc=
github.com/buildkite/terminal-to-html/v3 v3.16.8/go.mod h1:+k1KVKROZocrTLsEQ9PEf9A+8+X8uaVV5iO1ZIOwKYM=
-github.com/caddyserver/certmagic v0.24.0 h1:EfXTWpxHAUKgDfOj6MHImJN8Jm4AMFfMT6ITuKhrDF0=
-github.com/caddyserver/certmagic v0.24.0/go.mod h1:xPT7dC1DuHHnS2yuEQCEyks+b89sUkMENh8dJF+InLE=
-github.com/caddyserver/zerossl v0.1.3 h1:onS+pxp3M8HnHpN5MMbOMyNjmTheJyWRaZYwn+YTAyA=
-github.com/caddyserver/zerossl v0.1.3/go.mod h1:CxA0acn7oEGO6//4rtrRjYgEoa4MFw/XofZnrYwGqG4=
+github.com/caddyserver/certmagic v0.25.1 h1:4sIKKbOt5pg6+sL7tEwymE1x2bj6CHr80da1CRRIPbY=
+github.com/caddyserver/certmagic v0.25.1/go.mod h1:VhyvndxtVton/Fo/wKhRoC46Rbw1fmjvQ3GjHYSQTEY=
+github.com/caddyserver/zerossl v0.1.4 h1:CVJOE3MZeFisCERZjkxIcsqIH4fnFdlYWnPYeFtBHRw=
+github.com/caddyserver/zerossl v0.1.4/go.mod h1:CxA0acn7oEGO6//4rtrRjYgEoa4MFw/XofZnrYwGqG4=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a h1:MISbI8sU/PSK/ztvmWKFcI7UGb5/HQT7B+i3a2myKgI=
github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a/go.mod h1:2GxOXOlEPAMFPfp014mK1SWq8G8BN8o7/dfYqJrVGn8=
@@ -260,11 +258,6 @@ github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/r
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21 h1:PdsjTl0Cg+ZJgOx/CFV5NNgO1ThTreqdgKYiDCMHJwA=
github.com/dimiro1/reply v0.0.0-20200315094148-d0136a4c9e21/go.mod h1:xJvkyD6Y2rZapGvPJLYo9dyx1s5dxBEDPa8T3YTuOk0=
-github.com/djherbis/buffer v1.1.0/go.mod h1:VwN8VdFkMY0DCALdY8o00d3IZ6Amz/UNVMWcSaJT44o=
-github.com/djherbis/buffer v1.2.0 h1:PH5Dd2ss0C7CRRhQCZ2u7MssF+No9ide8Ye71nPHcrQ=
-github.com/djherbis/buffer v1.2.0/go.mod h1:fjnebbZjCUpPinBRD+TDwXSOeNQ7fPQWLfGQqiAiUyE=
-github.com/djherbis/nio/v3 v3.0.1 h1:6wxhnuppteMa6RHA4L81Dq7ThkZH8SwnDzXDYy95vB4=
-github.com/djherbis/nio/v3 v3.0.1/go.mod h1:Ng4h80pbZFMla1yKzm61cF0tqqilXZYrogmWgZxOcmg=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
@@ -276,8 +269,8 @@ github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdf
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/dvyukov/go-fuzz v0.0.0-20210429054444-fca39067bc72/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw=
-github.com/editorconfig/editorconfig-core-go/v2 v2.6.3 h1:XVUp6qW3BIkmM3/1EkrHpa6bL56APOynfXcZEmIgOhs=
-github.com/editorconfig/editorconfig-core-go/v2 v2.6.3/go.mod h1:ThHVc+hqbUsmE1wmK/MASpQEhCleWu1JDJDNhUOMy0c=
+github.com/editorconfig/editorconfig-core-go/v2 v2.6.4 h1:CHwUbBVVyKWRX9kt5A/OtwhYUJB32DrFp9xzmjR6cac=
+github.com/editorconfig/editorconfig-core-go/v2 v2.6.4/go.mod h1:JWRVKHdVW+dkv6F8p+xGCa6a+TyMrqsFbFkSs/aQkrQ=
github.com/elazarl/go-bindata-assetfs v1.0.1/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4=
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
@@ -321,40 +314,40 @@ github.com/go-ap/jsonld v0.0.0-20221030091449-f2a191312c73/go.mod h1:jyveZeGw5La
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-chi/chi/v5 v5.0.1/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
-github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
-github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
+github.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug=
+github.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0=
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
github.com/go-co-op/gocron v1.37.0 h1:ZYDJGtQ4OMhTLKOKMIch+/CY70Brbb1dGdooLEhh7b0=
github.com/go-co-op/gocron v1.37.0/go.mod h1:3L/n6BkO7ABj+TrfSVXLRzsP26zmikL4ISkLQ0O8iNY=
-github.com/go-enry/go-enry/v2 v2.9.2 h1:giOQAtCgBX08kosrX818DCQJTCNtKwoPBGu0qb6nKTY=
-github.com/go-enry/go-enry/v2 v2.9.2/go.mod h1:9yrj4ES1YrbNb1Wb7/PWYr2bpaCXUGRt0uafN0ISyG8=
+github.com/go-enry/go-enry/v2 v2.9.4 h1:DS4l06/NgMzYjsJ2J52wORo6UsfFDjDCwfAn7w3gG44=
+github.com/go-enry/go-enry/v2 v2.9.4/go.mod h1:9yrj4ES1YrbNb1Wb7/PWYr2bpaCXUGRt0uafN0ISyG8=
github.com/go-enry/go-oniguruma v1.2.1 h1:k8aAMuJfMrqm/56SG2lV9Cfti6tC4x8673aHCcBk+eo=
github.com/go-enry/go-oniguruma v1.2.1/go.mod h1:bWDhYP+S6xZQgiRL7wlTScFYBe023B6ilRZbCAD5Hf4=
github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e h1:oRq/fiirun5HqlEWMLIcDmLpIELlG4iGbd0s8iqgPi8=
github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e/go.mod h1:RCMrTZvN1bJYtofsG4rd5NaO5obxQ5xBkdiS7xsT7bM=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
-github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
-github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
+github.com/go-git/go-billy/v5 v5.7.0 h1:83lBUJhGWhYp0ngzCMSgllhUSuoHP1iEWYjsPl9nwqM=
+github.com/go-git/go-billy/v5 v5.7.0/go.mod h1:/1IUejTKH8xipsAcdfcSAlUlo2J7lkYV8GTKxAT/L3E=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
-github.com/go-git/go-git/v5 v5.16.3 h1:Z8BtvxZ09bYm/yYNgPKCzgWtaRqDTgIKRgIRHBfU6Z8=
-github.com/go-git/go-git/v5 v5.16.3/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8=
+github.com/go-git/go-git/v5 v5.16.5 h1:mdkuqblwr57kVfXri5TTH+nMFLNUxIj9Z7F5ykFbw5s=
+github.com/go-git/go-git/v5 v5.16.5/go.mod h1:QOMLpNf1qxuSY4StA/ArOdfFR2TrKEjJiye2kel2m+M=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
-github.com/go-ldap/ldap/v3 v3.4.11 h1:4k0Yxweg+a3OyBLjdYn5OKglv18JNvfDykSoI8bW0gU=
-github.com/go-ldap/ldap/v3 v3.4.11/go.mod h1:bY7t0FLK8OAVpp/vV6sSlpz3EQDGcQwc8pF0ujLgKvM=
+github.com/go-ldap/ldap/v3 v3.4.12 h1:1b81mv7MagXZ7+1r7cLTWmyuTqVqdwbtJSjC0DAp9s4=
+github.com/go-ldap/ldap/v3 v3.4.12/go.mod h1:+SPAGcTtOfmGsCb3h1RFiq4xpp4N636G75OEace8lNo=
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-redis/redis/v7 v7.4.1 h1:PASvf36gyUpr2zdOUS/9Zqc80GbM+9BDyiJSJDDOrTI=
github.com/go-redis/redis/v7 v7.4.1/go.mod h1:JDNMw23GTyLNC4GZu9njt15ctBQVn7xjRfnwdHj/Dcg=
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
-github.com/go-redsync/redsync/v4 v4.13.0 h1:49X6GJfnbLGaIpBBREM/zA4uIMDXKAh1NDkvQ1EkZKA=
-github.com/go-redsync/redsync/v4 v4.13.0/go.mod h1:HMW4Q224GZQz6x1Xc7040Yfgacukdzu7ifTDAKiyErQ=
+github.com/go-redsync/redsync/v4 v4.15.0 h1:KH/XymuxSV7vyKs6z1Cxxj+N+N18JlPxgXeP6x4JY54=
+github.com/go-redsync/redsync/v4 v4.15.0/go.mod h1:qNp+lLs3vkfZbtA/aM/OjlZHfEr5YTAYhRktFPKHC7s=
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
@@ -373,10 +366,8 @@ github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f h1:3BSP1Tbs2djlpprl7w
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14=
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 h1:UjoPNDAQ5JPCjlxoJd6K8ALZqSDDhk2ymieAZOVaDg0=
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85/go.mod h1:fR6z1Ie6rtF7kl/vBYMfgD5/G5B1blui7z426/sj2DU=
-github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI=
-github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
-github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
-github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
+github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
+github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
@@ -407,8 +398,8 @@ github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8l
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs=
github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/gomodule/redigo v1.8.9 h1:Sl3u+2BI/kk+VEatbj0scLdrFhjPmbxOc1myhDP41ws=
-github.com/gomodule/redigo v1.8.9/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs06a1uzZE=
+github.com/gomodule/redigo v1.9.3 h1:dNPSXeXv6HCq2jdyWfjgmhBdqnR6PRO3m/G05nvpPC8=
+github.com/gomodule/redigo v1.9.3/go.mod h1:KsU3hiK/Ay8U42qpaJk+kuNa3C+spxapWpM+ywhcgtw=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg=
@@ -441,8 +432,8 @@ github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OI
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik=
-github.com/google/pprof v0.0.0-20250820193118-f64d9cf942d6 h1:EEHtgt9IwisQ2AZ4pIsMjahcegHh6rmhqxzIRQIyepY=
-github.com/google/pprof v0.0.0-20250820193118-f64d9cf942d6/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U=
+github.com/google/pprof v0.0.0-20260202012954-cb029daf43ef h1:xpF9fUHpoIrrjX24DURVKiwHcFpw19ndIs+FwTSMbno=
+github.com/google/pprof v0.0.0-20260202012954-cb029daf43ef/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
@@ -481,6 +472,8 @@ github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVU
github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
+github.com/hashicorp/go-version v1.8.0 h1:KAkNb1HAiZd1ukkxDFGmokVZe1Xy9HG6NUp+bPle2i4=
+github.com/hashicorp/go-version v1.8.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
@@ -525,12 +518,14 @@ github.com/kevinburke/ssh_config v1.4.0 h1:6xxtP5bZ2E4NF5tuQulISpTO2z8XbtH8cg1PW
github.com/kevinburke/ssh_config v1.4.0/go.mod h1:q2RIzfka+BXARoNexmF9gkxEX7DmvbW9P4hIVx2Kg4M=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
-github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
-github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
+github.com/klauspost/compress v1.18.3 h1:9PJRvfbmTabkOX8moIpXPbMMbYN60bWImDDU7L+/6zw=
+github.com/klauspost/compress v1.18.3/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
+github.com/klauspost/crc32 v1.3.0 h1:sSmTt3gUt81RP655XGZPElI0PelVTZ6YwCRnPSupoFM=
+github.com/klauspost/crc32 v1.3.0/go.mod h1:D7kQaZhnkX/Y0tstFGf8VUzv2UofNGqCjnC3zdHB0Hw=
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/kljensen/snowball v0.6.0/go.mod h1:27N7E8fVU5H68RlUmnWwZCfxgt4POBJfENGMvNRhldw=
@@ -546,8 +541,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
-github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
-github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
+github.com/lib/pq v1.11.1 h1:wuChtj2hfsGmmx3nf1m7xC2XpK6OtelS2shMY+bGMtI=
+github.com/lib/pq v1.11.1/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA=
github.com/libdns/libdns v1.1.1 h1:wPrHrXILoSHKWJKGd0EiAVmiJbFShguILTg9leS/P/U=
github.com/libdns/libdns v1.1.1/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
@@ -566,28 +561,28 @@ github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6T
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk=
github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y=
-github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
-github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
-github.com/meilisearch/meilisearch-go v0.33.2 h1:YgsQSLYhAkRN2ias6I1KNRTjdYCN5w2uHbLUQ+xgrws=
-github.com/meilisearch/meilisearch-go v0.33.2/go.mod h1:6eOPcQ+OAuwXvnONlfSgfgvr7TIAWM/6OdhcVHg8cF0=
-github.com/mholt/acmez/v3 v3.1.2 h1:auob8J/0FhmdClQicvJvuDavgd5ezwLBfKuYmynhYzc=
-github.com/mholt/acmez/v3 v3.1.2/go.mod h1:L1wOU06KKvq7tswuMDwKdcHeKpFFgkppZy/y0DFxagQ=
-github.com/mholt/archives v0.0.0-20251009205813-e30ac6010726 h1:narluFTg20M5KBwKxedpFiSMkdjQRRNUlpY4uAsKMwk=
-github.com/mholt/archives v0.0.0-20251009205813-e30ac6010726/go.mod h1:3TPMmBLPsgszL+1As5zECTuKwKvIfj6YcwWPpeTAXF4=
+github.com/mattn/go-sqlite3 v1.14.33 h1:A5blZ5ulQo2AtayQ9/limgHEkFreKj1Dv226a1K73s0=
+github.com/mattn/go-sqlite3 v1.14.33/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
+github.com/meilisearch/meilisearch-go v0.36.0 h1:N1etykTektXt5KPcSbhBO0d5Xx5NaKj4pJWEM7WA5dI=
+github.com/meilisearch/meilisearch-go v0.36.0/go.mod h1:HBfHzKMxcSbTOvqdfuRA/yf6Vk9IivcwKocWRuW7W78=
+github.com/mholt/acmez/v3 v3.1.4 h1:DyzZe/RnAzT3rpZj/2Ii5xZpiEvvYk3cQEN/RmqxwFQ=
+github.com/mholt/acmez/v3 v3.1.4/go.mod h1:L1wOU06KKvq7tswuMDwKdcHeKpFFgkppZy/y0DFxagQ=
+github.com/mholt/archives v0.1.5 h1:Fh2hl1j7VEhc6DZs2DLMgiBNChUux154a1G+2esNvzQ=
+github.com/mholt/archives v0.1.5/go.mod h1:3TPMmBLPsgszL+1As5zECTuKwKvIfj6YcwWPpeTAXF4=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
-github.com/microsoft/go-mssqldb v1.9.3 h1:hy4p+LDC8LIGvI3JATnLVmBOLMJbmn5X400mr5j0lPs=
-github.com/microsoft/go-mssqldb v1.9.3/go.mod h1:GBbW9ASTiDC+mpgWDGKdm3FnFLTUsLYN3iFL90lQ+PA=
-github.com/miekg/dns v1.1.68 h1:jsSRkNozw7G/mnmXULynzMNIsgY2dHC8LO6U6Ij2JEA=
-github.com/miekg/dns v1.1.68/go.mod h1:fujopn7TB3Pu3JM69XaawiU0wqjpL9/8xGop5UrTPps=
+github.com/microsoft/go-mssqldb v1.9.6 h1:1MNQg5UiSsokiPz3++K2KPx4moKrwIqly1wv+RyCKTw=
+github.com/microsoft/go-mssqldb v1.9.6/go.mod h1:yYMPDufyoF2vVuVCUGtZARr06DKFIhMrluTcgWlXpr4=
+github.com/miekg/dns v1.1.69 h1:Kb7Y/1Jo+SG+a2GtfoFUfDkG//csdRPwRLkCsxDG9Sc=
+github.com/miekg/dns v1.1.69/go.mod h1:7OyjD9nEba5OkqQ/hB4fy3PIoxafSZJtducccIelz3g=
github.com/mikelolasagasti/xz v1.0.1 h1:Q2F2jX0RYJUG3+WsM+FJknv+6eVjsjXNDV0KJXZzkD0=
github.com/mikelolasagasti/xz v1.0.1/go.mod h1:muAirjiOUxPRXwm9HdDtB3uoRPrGnL85XHtokL9Hcgc=
github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI=
github.com/minio/crc64nvme v1.1.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
-github.com/minio/minio-go/v7 v7.0.95 h1:ywOUPg+PebTMTzn9VDsoFJy32ZuARN9zhB+K3IYEvYU=
-github.com/minio/minio-go/v7 v7.0.95/go.mod h1:wOOX3uxS334vImCNRVyIDdXX9OsXDm89ToynKgqUKlo=
+github.com/minio/minio-go/v7 v7.0.98 h1:MeAVKjLVz+XJ28zFcuYyImNSAh8Mq725uNW4beRisi0=
+github.com/minio/minio-go/v7 v7.0.98/go.mod h1:cY0Y+W7yozf0mdIclrttzo1Iiu7mEf9y7nk2uXqMOvM=
github.com/minio/minlz v1.0.1 h1:OUZUzXcib8diiX+JYxyRLIdomyZYzHct6EShOKtQY2A=
github.com/minio/minlz v1.0.1/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
@@ -608,6 +603,8 @@ github.com/msteinert/pam v1.2.0 h1:mYfjlvN2KYs2Pb9G6nb/1f/nPfAttT/Jee5Sq9r3bGE=
github.com/msteinert/pam v1.2.0/go.mod h1:d2n0DCUK8rGecChV3JzvmsDjOY4R7AYbsNxAT+ftQl0=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
+github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
+github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/niklasfasching/go-org v1.9.1 h1:/3s4uTPOF06pImGa2Yvlp24yKXZoTYM+nsIlMzfpg/0=
github.com/niklasfasching/go-org v1.9.1/go.mod h1:ZAGFFkWvUQcpazmi/8nHqwvARpr1xpb+Es67oUGX/48=
github.com/nwaples/rardecode/v2 v2.2.0 h1:4ufPGHiNe1rYJxYfehALLjup4Ls3ck42CWwjKiOqu0A=
@@ -658,24 +655,26 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pquerna/otp v1.5.0 h1:NMMR+WrmaqXU4EzdGJEE1aUUI0AMRzsp96fFFWNPwxs=
github.com/pquerna/otp v1.5.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
-github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc=
-github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE=
+github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
+github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
-github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE=
-github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8=
+github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
+github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0=
github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw=
github.com/quasoft/websspi v1.1.2 h1:/mA4w0LxWlE3novvsoEL6BBA1WnjJATbjkh1kFrTidw=
github.com/quasoft/websspi v1.1.2/go.mod h1:HmVdl939dQ0WIXZhyik+ARdI03M6bQzaSEKcgpFmewk=
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
-github.com/redis/go-redis/v9 v9.12.1 h1:k5iquqv27aBtnTm2tIkROUDp8JBXhXZIVu1InSgvovg=
-github.com/redis/go-redis/v9 v9.12.1/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
-github.com/redis/rueidis v1.0.19 h1:s65oWtotzlIFN8eMPhyYwxlwLR1lUdhza2KtWprKYSo=
-github.com/redis/rueidis v1.0.19/go.mod h1:8B+r5wdnjwK3lTFml5VtxjzGOQAC+5UmujoD12pDrEo=
-github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
-github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
+github.com/redis/go-redis/v9 v9.17.3 h1:fN29NdNrE17KttK5Ndf20buqfDZwGNgoUr9qjl1DQx4=
+github.com/redis/go-redis/v9 v9.17.3/go.mod h1:u410H11HMLoB+TP67dz8rL9s6QW2j76l0//kSOd3370=
+github.com/redis/rueidis v1.0.69 h1:WlUefRhuDekji5LsD387ys3UCJtSFeBVf0e5yI0B8b4=
+github.com/redis/rueidis v1.0.69/go.mod h1:Lkhr2QTgcoYBhxARU7kJRO8SyVlgUuEkcJO1Y8MCluA=
+github.com/redis/rueidis/rueidiscompat v1.0.69 h1:IWVYY9lXdjNO3do2VpJT7aDFi8zbCUuQxZB6E2Grahs=
+github.com/redis/rueidis/rueidiscompat v1.0.69/go.mod h1:iC4Y8DoN0Uth0Uezg9e2trvNRC7QAgGeuP2OPLb5ccI=
+github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
+github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rhysd/actionlint v1.7.7 h1:0KgkoNTrYY7vmOCs9BW2AHxLvvpoY9nEUzgBHiPUr0k=
github.com/rhysd/actionlint v1.7.7/go.mod h1:AE6I6vJEkNaIfWqC2GNE5spIJNhxf8NCtLEKU4NnUXg=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
@@ -703,6 +702,8 @@ github.com/serenize/snaker v0.0.0-20171204205717-a683aaf2d516/go.mod h1:Yow6lPLS
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw=
github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
+github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
+github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
@@ -751,8 +752,8 @@ github.com/stvp/tempredis v0.0.0-20181119212430-b82af8480203/go.mod h1:oqN97ltKN
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
-github.com/tinylib/msgp v1.4.0 h1:SYOeDRiydzOw9kSiwdYp9UcBgPFtLU2WDHaJXyHruf8=
-github.com/tinylib/msgp v1.4.0/go.mod h1:cvjFkb4RiC8qSBOPMGPSzSAx47nAsfhLVTCZZNuHv5o=
+github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=
+github.com/tinylib/msgp v1.6.1/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA=
github.com/tstranex/u2f v1.0.0 h1:HhJkSzDDlVSVIVt7pDJwCHQj67k7A5EeBgPmeD+pVsQ=
github.com/tstranex/u2f v1.0.0/go.mod h1:eahSLaqAS0zsIEv80+vXT7WanXs7MQQDg3j3wGBSayo=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
@@ -792,8 +793,8 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/goldmark v1.4.15/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-github.com/yuin/goldmark v1.7.13 h1:GPddIs617DnBLFFVJFgpo1aBfe/4xcvMc3SB5t/D0pA=
-github.com/yuin/goldmark v1.7.13/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
+github.com/yuin/goldmark v1.7.16 h1:n+CJdUxaFMiDUNnWC3dMWCIQJSkxH4uz3ZwQBkAlVNE=
+github.com/yuin/goldmark v1.7.16/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc h1:+IAOyRda+RLrxa1WC7umKOZRsGq4QrFFMYApOeHzQwQ=
github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc/go.mod h1:ovIvrum6DQJA4QsJSovrkC4saKHQVs7TvcaeO8AIl5I=
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
@@ -820,10 +821,14 @@ go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
-go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
-go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
+go.uber.org/zap v1.27.1 h1:08RqriUEv8+ArZRYSTXy1LeBScaMpVSTBhCeaZYfMYc=
+go.uber.org/zap v1.27.1/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
go.uber.org/zap/exp v0.3.0 h1:6JYzdifzYkGmTdRR59oYH+Ng7k49H9qVpWwNSsGJj3U=
go.uber.org/zap/exp v0.3.0/go.mod h1:5I384qq7XGxYyByIhHm6jg5CHkGY0nsTfbDLgDDlgJQ=
+go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
+go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
+go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
+go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc=
go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
@@ -840,8 +845,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
-golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
-golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
+golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
+golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -854,8 +859,8 @@ golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b h1:DXr+pvt3nC887026GRP39Ej11
golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
-golang.org/x/image v0.30.0 h1:jD5RhkmVAnjqaCUXfbGBrn3lpxbknfN9w2UhHHU+5B4=
-golang.org/x/image v0.30.0/go.mod h1:SAEUTxCCMWSrJcCy/4HwavEsfZZJlYxeHLc6tTiAe/c=
+golang.org/x/image v0.35.0 h1:LKjiHdgMtO8z7Fh18nGY6KDcoEtVfsgLDPeLyguqb7I=
+golang.org/x/image v0.35.0/go.mod h1:MwPLTVgvxSASsxdLzKrl8BRFuyqMyGhLwmC+TO1Sybk=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -878,8 +883,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
-golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
-golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
+golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
+golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -908,15 +913,15 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
-golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
-golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
+golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
+golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI=
-golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU=
+golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
+golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -932,8 +937,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
-golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
-golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
+golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
+golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -975,8 +980,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
-golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
+golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
+golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@@ -987,8 +992,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
-golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
-golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
+golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
+golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -1002,8 +1007,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
-golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
-golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
+golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
+golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
@@ -1039,8 +1044,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
-golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
-golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
+golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
+golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -1071,8 +1076,8 @@ google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvx
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1 h1:pmJpJEvT846VzausCQ5d7KreSROcDqmO388w5YbnltA=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1/go.mod h1:GmFNa4BdJZ2a8G+wCe9Bg3wwThLrJun751XstdJt5Og=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda h1:i/Q+bfisr7gq6feoJnS/DlpdwEL4ihp41fvRiM3Ork0=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20251029180050-ab9386a59fda/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@@ -1080,16 +1085,16 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
-google.golang.org/grpc v1.75.0 h1:+TW+dqTd2Biwe6KKfhE5JpiYIBWq865PhKGSXiivqt4=
-google.golang.org/grpc v1.75.0/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ=
+google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc=
+google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
-google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc=
-google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU=
+google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
+google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -1097,8 +1102,8 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
-gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
-gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
+gopkg.in/ini.v1 v1.67.1 h1:tVBILHy0R6e4wkYOn3XmiITt/hEVH4TFMYvAX2Ytz6k=
+gopkg.in/ini.v1 v1.67.1/go.mod h1:x/cyOwCgZqOkJoDIJ3c1KNHMo10+nLGAhh+kn3Zizss=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
@@ -1125,20 +1130,20 @@ modernc.org/cc/v3 v3.40.0 h1:P3g79IUS/93SYhtoeaHW+kRCIrYaxJ27MFPv+7kaTOw=
modernc.org/cc/v3 v3.40.0/go.mod h1:/bTg4dnWkSXowUO6ssQKnOV0yMVxDYNIsIrzqTFDGH0=
modernc.org/ccgo/v3 v3.16.13 h1:Mkgdzl46i5F/CNR/Kj80Ri59hC8TKAhZrYSaqvkwzUw=
modernc.org/ccgo/v3 v3.16.13/go.mod h1:2Quk+5YgpImhPjv2Qsob1DnZ/4som1lJTodubIcoUkY=
-modernc.org/libc v1.22.2 h1:4U7v51GyhlWqQmwCHj28Rdq2Yzwk55ovjFrdPjs8Hb0=
-modernc.org/libc v1.22.2/go.mod h1:uvQavJ1pZ0hIoC/jfqNoMLURIMhKzINIWypNM17puug=
-modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ=
-modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
-modernc.org/memory v1.4.0 h1:crykUfNSnMAXaOJnnxcSzbUGMqkLWjklJKkBK2nwZwk=
-modernc.org/memory v1.4.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU=
+modernc.org/libc v1.55.3 h1:AzcW1mhlPNrRtjS5sS+eW2ISCgSOLLNyFzRh/V3Qj/U=
+modernc.org/libc v1.55.3/go.mod h1:qFXepLhz+JjFThQ4kzwzOjA/y/artDeg+pcYnY+Q83w=
+modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
+modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
+modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
+modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU=
modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
modernc.org/sqlite v1.20.4 h1:J8+m2trkN+KKoE7jglyHYYYiaq5xmz2HoHJIiBlRzbE=
modernc.org/sqlite v1.20.4/go.mod h1:zKcGyrICaxNTMEHSr1HQ2GUraP0j+845GYw37+EyT6A=
-modernc.org/strutil v1.1.3 h1:fNMm+oJklMGYfU9Ylcywl0CO5O6nTfaowNsh2wpPjzY=
-modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw=
-modernc.org/token v1.0.1 h1:A3qvTqOwexpfZZeyI0FeGPDlSWX5pjZu9hF4lU+EKWg=
-modernc.org/token v1.0.1/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
+modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
+modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
+modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
+modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
mvdan.cc/xurls/v2 v2.6.0 h1:3NTZpeTxYVWNSokW3MKeyVkz/j7uYXYiMtXRUfmjbgI=
mvdan.cc/xurls/v2 v2.6.0/go.mod h1:bCvEZ1XvdA6wDnxY7jPPjEmigDtvtvPXAD/Exa9IMSk=
pgregory.net/rapid v0.4.2 h1:lsi9jhvZTYvzVpeG93WWgimPRmiJQfGFRNTEZh1dtY0=
@@ -1150,5 +1155,5 @@ strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY=
xorm.io/builder v0.3.13 h1:a3jmiVVL19psGeXx8GIurTp7p0IIgqeDmwhcR6BAOAo=
xorm.io/builder v0.3.13/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
-xorm.io/xorm v1.3.10 h1:yR83hTT4mKIPyA/lvWFTzS35xjLwkiYnwdw0Qupeh0o=
-xorm.io/xorm v1.3.10/go.mod h1:Lo7hmsFF0F0GbDE7ubX5ZKa+eCf0eCuiJAUG3oI5cxQ=
+xorm.io/xorm v1.3.11 h1:i4tlVUASogb0ZZFJHA7dZqoRU2pUpUsutnNdaOlFyMI=
+xorm.io/xorm v1.3.11/go.mod h1:cs0ePc8O4a0jD78cNvD+0VFwhqotTvLQZv372QsDw7Q=
diff --git a/main.go b/main.go
index bc2121b1e7..fcfbb73371 100644
--- a/main.go
+++ b/main.go
@@ -26,9 +26,8 @@ import (
// these flags will be set by the build flags
var (
- Version = "development" // program version for this build
- Tags = "" // the Golang build tags
- MakeVersion = "" // "make" program version if built with make
+ Version = "development" // program version for this build
+ Tags = "" // the Golang build tags
)
func init() {
@@ -50,9 +49,6 @@ func main() {
func formatBuiltWith() string {
version := runtime.Version()
- if len(MakeVersion) > 0 {
- version = MakeVersion + ", " + runtime.Version()
- }
if len(Tags) == 0 {
return " built with " + version
}
diff --git a/models/actions/run.go b/models/actions/run.go
index be332d6857..99e6267071 100644
--- a/models/actions/run.go
+++ b/models/actions/run.go
@@ -168,7 +168,7 @@ func (run *ActionRun) GetPushEventPayload() (*api.PushPayload, error) {
}
func (run *ActionRun) GetPullRequestEventPayload() (*api.PullRequestPayload, error) {
- if run.Event.IsPullRequest() {
+ if run.Event.IsPullRequest() || run.Event.IsPullRequestReview() {
var payload api.PullRequestPayload
if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil {
return nil, err
diff --git a/models/actions/task.go b/models/actions/task.go
index 8b4ecf28f7..4f41b69c97 100644
--- a/models/actions/task.go
+++ b/models/actions/task.go
@@ -8,6 +8,7 @@ import (
"crypto/subtle"
"errors"
"fmt"
+ "strings"
"time"
auth_model "code.gitea.io/gitea/models/auth"
@@ -20,6 +21,7 @@ import (
runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
lru "github.com/hashicorp/golang-lru/v2"
+ "github.com/nektos/act/pkg/jobparser"
"google.golang.org/protobuf/types/known/timestamppb"
"xorm.io/builder"
)
@@ -214,6 +216,20 @@ func GetRunningTaskByToken(ctx context.Context, token string) (*ActionTask, erro
return nil, errNotExist
}
+func makeTaskStepDisplayName(step *jobparser.Step, limit int) (name string) {
+ if step.Name != "" {
+ name = step.Name // the step has an explicit name
+ } else {
+ // for unnamed step, its "String()" method tries to get a display name by its "name", "uses",
+ // "run" or "id" (last fallback), we add the "Run " prefix for unnamed steps for better display
+ // for multi-line "run" scripts, only use the first line to match GitHub's behavior
+ // https://github.com/actions/runner/blob/66800900843747f37591b077091dd2c8cf2c1796/src/Runner.Worker/Handlers/ScriptHandler.cs#L45-L58
+ runStr, _, _ := strings.Cut(strings.TrimSpace(step.Run), "\n")
+ name = "Run " + util.IfZero(strings.TrimSpace(runStr), step.String())
+ }
+ return util.EllipsisDisplayString(name, limit) // database column has a length limit
+}
+
func CreateTaskForRunner(ctx context.Context, runner *ActionRunner) (*ActionTask, bool, error) {
ctx, committer, err := db.TxContext(ctx)
if err != nil {
@@ -293,9 +309,8 @@ func CreateTaskForRunner(ctx context.Context, runner *ActionRunner) (*ActionTask
if len(workflowJob.Steps) > 0 {
steps := make([]*ActionTaskStep, len(workflowJob.Steps))
for i, v := range workflowJob.Steps {
- name := util.EllipsisDisplayString(v.String(), 255)
steps[i] = &ActionTaskStep{
- Name: name,
+ Name: makeTaskStepDisplayName(v, 255),
TaskID: task.ID,
Index: int64(i),
RepoID: task.RepoID,
diff --git a/models/actions/task_step.go b/models/actions/task_step.go
index 3af1fe3f5a..03ffbf1931 100644
--- a/models/actions/task_step.go
+++ b/models/actions/task_step.go
@@ -14,7 +14,7 @@ import (
// ActionTaskStep represents a step of ActionTask
type ActionTaskStep struct {
ID int64
- Name string `xorm:"VARCHAR(255)"`
+ Name string `xorm:"VARCHAR(255)"` // the step name, for display purpose only, it will be truncated if it is too long
TaskID int64 `xorm:"index unique(task_index)"`
Index int64 `xorm:"index unique(task_index)"`
RepoID int64 `xorm:"index"`
diff --git a/models/actions/task_test.go b/models/actions/task_test.go
new file mode 100644
index 0000000000..15d4e16f42
--- /dev/null
+++ b/models/actions/task_test.go
@@ -0,0 +1,76 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/nektos/act/pkg/jobparser"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMakeTaskStepDisplayName(t *testing.T) {
+ tests := []struct {
+ name string
+ jobStep *jobparser.Step
+ expected string
+ }{
+ {
+ name: "explicit name",
+ jobStep: &jobparser.Step{
+ Name: "Test Step",
+ },
+ expected: "Test Step",
+ },
+ {
+ name: "uses step",
+ jobStep: &jobparser.Step{
+ Uses: "actions/checkout@v4",
+ },
+ expected: "Run actions/checkout@v4",
+ },
+ {
+ name: "single-line run",
+ jobStep: &jobparser.Step{
+ Run: "echo hello",
+ },
+ expected: "Run echo hello",
+ },
+ {
+ name: "multi-line run block scalar",
+ jobStep: &jobparser.Step{
+ Run: "\n echo hello \r\n echo world \n ",
+ },
+ expected: "Run echo hello",
+ },
+ {
+ name: "fallback to id",
+ jobStep: &jobparser.Step{
+ ID: "step-id",
+ },
+ expected: "Run step-id",
+ },
+ {
+ name: "very long name truncated",
+ jobStep: &jobparser.Step{
+ Name: strings.Repeat("a", 300),
+ },
+ expected: strings.Repeat("a", 252) + "…",
+ },
+ {
+ name: "very long run truncated",
+ jobStep: &jobparser.Step{
+ Run: strings.Repeat("a", 300),
+ },
+ expected: "Run " + strings.Repeat("a", 248) + "…",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ result := makeTaskStepDisplayName(tt.jobStep, 255)
+ assert.Equal(t, tt.expected, result)
+ })
+ }
+}
diff --git a/models/activities/action_list.go b/models/activities/action_list.go
index b52cf7ee49..29ff2fdf7a 100644
--- a/models/activities/action_list.go
+++ b/models/activities/action_list.go
@@ -30,7 +30,7 @@ func (actions ActionList) getUserIDs() []int64 {
func (actions ActionList) LoadActUsers(ctx context.Context) (map[int64]*user_model.User, error) {
if len(actions) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // returns nil when there are no actions
}
userIDs := actions.getUserIDs()
diff --git a/models/activities/user_heatmap.go b/models/activities/user_heatmap.go
index ef67838be7..e24d44c519 100644
--- a/models/activities/user_heatmap.go
+++ b/models/activities/user_heatmap.go
@@ -19,14 +19,14 @@ type UserHeatmapData struct {
Contributions int64 `json:"contributions"`
}
-// GetUserHeatmapDataByUser returns an array of UserHeatmapData
+// GetUserHeatmapDataByUser returns an array of UserHeatmapData, it checks whether doer can access user's activity
func GetUserHeatmapDataByUser(ctx context.Context, user, doer *user_model.User) ([]*UserHeatmapData, error) {
return getUserHeatmapData(ctx, user, nil, doer)
}
-// GetUserHeatmapDataByUserTeam returns an array of UserHeatmapData
-func GetUserHeatmapDataByUserTeam(ctx context.Context, user *user_model.User, team *organization.Team, doer *user_model.User) ([]*UserHeatmapData, error) {
- return getUserHeatmapData(ctx, user, team, doer)
+// GetUserHeatmapDataByOrgTeam returns an array of UserHeatmapData, it checks whether doer can access org's activity
+func GetUserHeatmapDataByOrgTeam(ctx context.Context, org *organization.Organization, team *organization.Team, doer *user_model.User) ([]*UserHeatmapData, error) {
+ return getUserHeatmapData(ctx, org.AsUser(), team, doer)
}
func getUserHeatmapData(ctx context.Context, user *user_model.User, team *organization.Team, doer *user_model.User) ([]*UserHeatmapData, error) {
@@ -71,12 +71,3 @@ func getUserHeatmapData(ctx context.Context, user *user_model.User, team *organi
OrderBy("timestamp").
Find(&hdata)
}
-
-// GetTotalContributionsInHeatmap returns the total number of contributions in a heatmap
-func GetTotalContributionsInHeatmap(hdata []*UserHeatmapData) int64 {
- var total int64
- for _, v := range hdata {
- total += v.Contributions
- }
- return total
-}
diff --git a/models/asymkey/gpg_key_commit_verification.go b/models/asymkey/gpg_key_commit_verification.go
index 375b703f7b..ae5192de9f 100644
--- a/models/asymkey/gpg_key_commit_verification.go
+++ b/models/asymkey/gpg_key_commit_verification.go
@@ -70,7 +70,7 @@ func hashAndVerify(sig *packet.Signature, payload string, k *GPGKey) (*GPGKey, e
// We will ignore errors in verification as they don't need to be propagated up
err = verifySign(sig, hash, k)
if err != nil {
- return nil, nil
+ return nil, nil //nolint:nilnil // verification failed, not an error
}
return k, nil
}
@@ -86,7 +86,7 @@ func hashAndVerifyWithSubKeys(sig *packet.Signature, payload string, k *GPGKey)
return verified, err
}
}
- return nil, nil
+ return nil, nil //nolint:nilnil // verification failed, not an error
}
func HashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *user_model.User, email string) *CommitVerification {
diff --git a/models/asymkey/gpg_key_test.go b/models/asymkey/gpg_key_test.go
index 4621337f11..e6656cb70d 100644
--- a/models/asymkey/gpg_key_test.go
+++ b/models/asymkey/gpg_key_test.go
@@ -11,7 +11,6 @@ import (
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/timeutil"
- "code.gitea.io/gitea/modules/util"
"github.com/ProtonMail/go-crypto/openpgp"
"github.com/ProtonMail/go-crypto/openpgp/packet"
@@ -398,7 +397,7 @@ epiDVQ==
func TestTryGetKeyIDFromSignature(t *testing.T) {
assert.Empty(t, TryGetKeyIDFromSignature(&packet.Signature{}))
assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{
- IssuerKeyId: util.ToPointer(uint64(0x38D1A3EADDBEA9C)),
+ IssuerKeyId: new(uint64(0x38D1A3EADDBEA9C)),
}))
assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{
IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c},
@@ -419,7 +418,7 @@ func TestParseGPGKey(t *testing.T) {
// then revoke the key
for _, id := range e.Identities {
- id.Revocations = append(id.Revocations, &packet.Signature{RevocationReason: util.ToPointer(packet.KeyCompromised)})
+ id.Revocations = append(id.Revocations, &packet.Signature{RevocationReason: new(packet.KeyCompromised)})
}
k, err = parseGPGKey(t.Context(), 1, e, true)
require.NoError(t, err)
diff --git a/models/asymkey/ssh_key.go b/models/asymkey/ssh_key.go
index d77b5d46a7..98784b36bd 100644
--- a/models/asymkey/ssh_key.go
+++ b/models/asymkey/ssh_key.go
@@ -84,7 +84,7 @@ func addKey(ctx context.Context, key *PublicKey) (err error) {
}
// AddPublicKey adds new public key to database and authorized_keys file.
-func AddPublicKey(ctx context.Context, ownerID int64, name, content string, authSourceID int64) (*PublicKey, error) {
+func AddPublicKey(ctx context.Context, ownerID int64, name, content string, authSourceID int64, verified bool) (*PublicKey, error) {
log.Trace(content)
fingerprint, err := CalcFingerprint(content)
@@ -115,6 +115,7 @@ func AddPublicKey(ctx context.Context, ownerID int64, name, content string, auth
Mode: perm.AccessModeWrite,
Type: KeyTypeUser,
LoginSourceID: authSourceID,
+ Verified: verified,
}
if err = addKey(ctx, key); err != nil {
return nil, fmt.Errorf("addKey: %w", err)
@@ -298,7 +299,7 @@ func deleteKeysMarkedForDeletion(ctx context.Context, keys []string) (bool, erro
}
// AddPublicKeysBySource add a users public keys. Returns true if there are changes.
-func AddPublicKeysBySource(ctx context.Context, usr *user_model.User, s *auth.Source, sshPublicKeys []string) bool {
+func AddPublicKeysBySource(ctx context.Context, usr *user_model.User, s *auth.Source, sshPublicKeys []string, verified bool) bool {
var sshKeysNeedUpdate bool
for _, sshKey := range sshPublicKeys {
var err error
@@ -317,7 +318,7 @@ func AddPublicKeysBySource(ctx context.Context, usr *user_model.User, s *auth.So
marshalled = marshalled[:len(marshalled)-1]
sshKeyName := fmt.Sprintf("%s-%s", s.Name, ssh.FingerprintSHA256(out))
- if _, err := AddPublicKey(ctx, usr.ID, sshKeyName, marshalled, s.ID); err != nil {
+ if _, err := AddPublicKey(ctx, usr.ID, sshKeyName, marshalled, s.ID, verified); err != nil {
if IsErrKeyAlreadyExist(err) {
log.Trace("AddPublicKeysBySource[%s]: Public SSH Key %s already exists for user", sshKeyName, usr.Name)
} else {
@@ -336,7 +337,7 @@ func AddPublicKeysBySource(ctx context.Context, usr *user_model.User, s *auth.So
}
// SynchronizePublicKeys updates a user's public keys. Returns true if there are changes.
-func SynchronizePublicKeys(ctx context.Context, usr *user_model.User, s *auth.Source, sshPublicKeys []string) bool {
+func SynchronizePublicKeys(ctx context.Context, usr *user_model.User, s *auth.Source, sshPublicKeys []string, verified bool) bool {
var sshKeysNeedUpdate bool
log.Trace("synchronizePublicKeys[%s]: Handling Public SSH Key synchronization for user %s", s.Name, usr.Name)
@@ -381,7 +382,7 @@ func SynchronizePublicKeys(ctx context.Context, usr *user_model.User, s *auth.So
newKeys = append(newKeys, key)
}
}
- if AddPublicKeysBySource(ctx, usr, s, newKeys) {
+ if AddPublicKeysBySource(ctx, usr, s, newKeys, verified) {
sshKeysNeedUpdate = true
}
diff --git a/models/auth/oauth2.go b/models/auth/oauth2.go
index d664841306..2f5aff0933 100644
--- a/models/auth/oauth2.go
+++ b/models/auth/oauth2.go
@@ -209,7 +209,7 @@ func (app *OAuth2Application) GetGrantByUserID(ctx context.Context, userID int64
if has, err := db.GetEngine(ctx).Where("user_id = ? AND application_id = ?", userID, app.ID).Get(grant); err != nil {
return nil, err
} else if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return grant, nil
}
@@ -431,13 +431,13 @@ func GetOAuth2AuthorizationByCode(ctx context.Context, code string) (auth *OAuth
if has, err := db.GetEngine(ctx).Where("code = ?", code).Get(auth); err != nil {
return nil, err
} else if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
auth.Grant = new(OAuth2Grant)
if has, err := db.GetEngine(ctx).ID(auth.GrantID).Get(auth.Grant); err != nil {
return nil, err
} else if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return auth, nil
}
@@ -521,7 +521,7 @@ func GetOAuth2GrantByID(ctx context.Context, id int64) (grant *OAuth2Grant, err
if has, err := db.GetEngine(ctx).ID(id).Get(grant); err != nil {
return nil, err
} else if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return grant, err
}
diff --git a/models/db/collation.go b/models/db/collation.go
index 79ade87380..203f7cbfe4 100644
--- a/models/db/collation.go
+++ b/models/db/collation.go
@@ -98,7 +98,7 @@ func CheckCollations(x *xorm.Engine) (*CheckCollationsResult, error) {
return nil, err
}
} else {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil for unsupported database types
}
if res.DatabaseCollation == "" {
diff --git a/models/db/common.go b/models/db/common.go
index ea628bf2a0..b3c43f8b62 100644
--- a/models/db/common.go
+++ b/models/db/common.go
@@ -12,30 +12,30 @@ import (
"xorm.io/builder"
)
-// BuildCaseInsensitiveLike returns a condition to check if the given value is like the given key case-insensitively.
-// Handles especially SQLite correctly as UPPER there only transforms ASCII letters.
+// BuildCaseInsensitiveLike returns a case-insensitive LIKE condition for the given key and value.
+// Cast the search value and the database column value to the same case for case-insensitive matching.
+// * SQLite: only cast ASCII chars because it doesn't handle complete Unicode case folding
+// * Other databases: use database's string function, assuming that they are able to handle complete Unicode case folding correctly
func BuildCaseInsensitiveLike(key, value string) builder.Cond {
+ // ToLowerASCII is about 7% faster than ToUpperASCII (according to Golang's benchmark)
if setting.Database.Type.IsSQLite3() {
- return builder.Like{"UPPER(" + key + ")", util.ToUpperASCII(value)}
+ return builder.Like{"LOWER(" + key + ")", util.ToLowerASCII(value)}
}
- return builder.Like{"UPPER(" + key + ")", strings.ToUpper(value)}
+ return builder.Like{"LOWER(" + key + ")", strings.ToLower(value)}
}
// BuildCaseInsensitiveIn returns a condition to check if the given value is in the given values case-insensitively.
-// Handles especially SQLite correctly as UPPER there only transforms ASCII letters.
+// See BuildCaseInsensitiveLike for more details
func BuildCaseInsensitiveIn(key string, values []string) builder.Cond {
- uppers := make([]string, 0, len(values))
+ incaseValues := make([]string, len(values))
+ caseCast := strings.ToLower
if setting.Database.Type.IsSQLite3() {
- for _, value := range values {
- uppers = append(uppers, util.ToUpperASCII(value))
- }
- } else {
- for _, value := range values {
- uppers = append(uppers, strings.ToUpper(value))
- }
+ caseCast = util.ToLowerASCII
}
-
- return builder.In("UPPER("+key+")", uppers)
+ for i, value := range values {
+ incaseValues[i] = caseCast(value)
+ }
+ return builder.In("LOWER("+key+")", incaseValues)
}
// BuilderDialect returns the xorm.Builder dialect of the engine
diff --git a/models/db/engine_init.go b/models/db/engine_init.go
index f26189b805..ef5db3ff5e 100644
--- a/models/db/engine_init.go
+++ b/models/db/engine_init.go
@@ -57,7 +57,7 @@ func InitEngine(ctx context.Context) error {
xe, err := newXORMEngine()
if err != nil {
if strings.Contains(err.Error(), "SQLite3 support") {
- return fmt.Errorf(`sqlite3 requires: -tags sqlite,sqlite_unlock_notify%s%w`, "\n", err)
+ return fmt.Errorf("sqlite3 requires: -tags sqlite,sqlite_unlock_notify\n%w", err)
}
return fmt.Errorf("failed to connect to database: %w", err)
}
diff --git a/models/dbfs/dbfile.go b/models/dbfs/dbfile.go
index eaf506fbe6..ccb13583e1 100644
--- a/models/dbfs/dbfile.go
+++ b/models/dbfs/dbfile.go
@@ -339,7 +339,7 @@ func findFileMetaByID(ctx context.Context, metaID int64) (*dbfsMeta, error) {
} else if ok {
return &fileMeta, nil
}
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
func buildPath(path string) string {
diff --git a/models/fixtures/action_run.yml b/models/fixtures/action_run.yml
index 44b131c961..ac5e8303c3 100644
--- a/models/fixtures/action_run.yml
+++ b/models/fixtures/action_run.yml
@@ -139,26 +139,7 @@
updated: 1683636626
need_approval: 0
approved_by: 0
--
- id: 804
- title: "use a private action"
- repo_id: 60
- owner_id: 40
- workflow_id: "run.yaml"
- index: 189
- trigger_user_id: 40
- ref: "refs/heads/master"
- commit_sha: "6e64b26de7ba966d01d90ecfaf5c7f14ef203e86"
- event: "push"
- trigger_event: "push"
- is_fork_pull_request: 0
- status: 1
- started: 1683636528
- stopped: 1683636626
- created: 1683636108
- updated: 1683636626
- need_approval: 0
- approved_by: 0
+
-
id: 805
title: "update actions"
diff --git a/models/fixtures/action_run_job.yml b/models/fixtures/action_run_job.yml
index c5aeb4931c..04799b73ca 100644
--- a/models/fixtures/action_run_job.yml
+++ b/models/fixtures/action_run_job.yml
@@ -129,20 +129,7 @@
status: 5
started: 1683636528
stopped: 1683636626
--
- id: 205
- run_id: 804
- repo_id: 6
- owner_id: 10
- commit_sha: 6e64b26de7ba966d01d90ecfaf5c7f14ef203e86
- is_fork_pull_request: 0
- name: job_2
- attempt: 1
- job_id: job_2
- task_id: 48
- status: 1
- started: 1683636528
- stopped: 1683636626
+
-
id: 206
run_id: 805
diff --git a/models/fixtures/action_task.yml b/models/fixtures/action_task.yml
index a28ddd0add..e1bc588dc5 100644
--- a/models/fixtures/action_task.yml
+++ b/models/fixtures/action_task.yml
@@ -177,26 +177,7 @@
log_length: 0
log_size: 0
log_expired: 0
--
- id: 55
- job_id: 205
- attempt: 1
- runner_id: 1
- status: 6 # 6 is the status code for "running"
- started: 1683636528
- stopped: 1683636626
- repo_id: 6
- owner_id: 10
- commit_sha: 6e64b26de7ba966d01d90ecfaf5c7f14ef203e86
- is_fork_pull_request: 0
- token_hash: b8d3962425466b6709b9ac51446f93260c54afe8e7b6d3686e34f991fb8a8953822b0deed86fe41a103f34bc48dbc478422b
- token_salt: ERxJGHvg3I
- token_last_eight: 182199eb
- log_filename: collaborative-owner-test/1a/49.log
- log_in_storage: 1
- log_length: 707
- log_size: 90179
- log_expired: 0
+
-
id: 56
attempt: 1
diff --git a/models/git/branch.go b/models/git/branch.go
index 7fef9f5ca3..1d6eeb6868 100644
--- a/models/git/branch.go
+++ b/models/git/branch.go
@@ -397,10 +397,16 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
if protectedBranch != nil {
// there is a protect rule for this branch
- protectedBranch.RuleName = to
- if _, err = sess.ID(protectedBranch.ID).Cols("branch_name").Update(protectedBranch); err != nil {
+ existingRule, err := GetProtectedBranchRuleByName(ctx, repo.ID, to)
+ if err != nil {
return err
}
+ if existingRule == nil || existingRule.ID == protectedBranch.ID {
+ protectedBranch.RuleName = to
+ if _, err = sess.ID(protectedBranch.ID).Cols("branch_name").Update(protectedBranch); err != nil {
+ return err
+ }
+ }
} else {
// some glob protect rules may match this branch
protected, err := IsBranchProtected(ctx, repo.ID, from)
@@ -490,12 +496,25 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o
opts.CommitAfterUnix = time.Now().Add(-time.Hour * 2).Unix()
}
- baseBranch, err := GetBranch(ctx, opts.BaseRepo.ID, opts.BaseRepo.DefaultBranch)
+ var ignoredCommitIDs []string
+ baseDefaultBranch, err := GetBranch(ctx, opts.BaseRepo.ID, opts.BaseRepo.DefaultBranch)
if err != nil {
- return nil, err
+ log.Warn("GetBranch:DefaultBranch: %v", err)
+ } else {
+ ignoredCommitIDs = append(ignoredCommitIDs, baseDefaultBranch.CommitID)
}
- // find all related branches, these branches may already created PRs, we will check later
+ baseDefaultTargetBranchName := opts.BaseRepo.MustGetUnit(ctx, unit.TypePullRequests).PullRequestsConfig().DefaultTargetBranch
+ if baseDefaultTargetBranchName != "" && baseDefaultTargetBranchName != opts.BaseRepo.DefaultBranch {
+ baseDefaultTargetBranch, err := GetBranch(ctx, opts.BaseRepo.ID, baseDefaultTargetBranchName)
+ if err != nil {
+ log.Warn("GetBranch:DefaultTargetBranch: %v", err)
+ } else {
+ ignoredCommitIDs = append(ignoredCommitIDs, baseDefaultTargetBranch.CommitID)
+ }
+ }
+
+ // find all related branches, these branches may already have PRs, we will check later
var branches []*Branch
if err := db.GetEngine(ctx).
Where(builder.And(
@@ -506,7 +525,7 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o
builder.Gte{"commit_time": opts.CommitAfterUnix},
builder.In("repo_id", repoIDs),
// newly created branch have no changes, so skip them
- builder.Neq{"commit_id": baseBranch.CommitID},
+ builder.NotIn("commit_id", ignoredCommitIDs),
)).
OrderBy(db.SearchOrderByRecentUpdated.String()).
Find(&branches); err != nil {
@@ -514,10 +533,8 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o
}
newBranches := make([]*RecentlyPushedNewBranch, 0, len(branches))
- if opts.MaxCount == 0 {
- // by default we display 2 recently pushed new branch
- opts.MaxCount = 2
- }
+ opts.MaxCount = util.IfZero(opts.MaxCount, 2) // by default, we display 2 recently pushed new branch
+ baseTargetBranchName := opts.BaseRepo.GetPullRequestTargetBranch(ctx)
for _, branch := range branches {
// whether the branch is protected
protected, err := IsBranchProtected(ctx, branch.RepoID, branch.Name)
@@ -555,7 +572,7 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o
BranchDisplayName: branchDisplayName,
BranchName: branch.Name,
BranchLink: fmt.Sprintf("%s/src/branch/%s", branch.Repo.Link(), util.PathEscapeSegments(branch.Name)),
- BranchCompareURL: branch.Repo.ComposeBranchCompareURL(opts.BaseRepo, branch.Name),
+ BranchCompareURL: branch.Repo.ComposeBranchCompareURL(opts.BaseRepo, baseTargetBranchName, branch.Name),
CommitTime: branch.CommitTime,
})
}
diff --git a/models/git/branch_test.go b/models/git/branch_test.go
index 5be435172b..6de3ea552c 100644
--- a/models/git/branch_test.go
+++ b/models/git/branch_test.go
@@ -114,7 +114,7 @@ func TestFindRenamedBranch(t *testing.T) {
assert.True(t, exist)
assert.Equal(t, "master", branch.To)
- _, exist, err = git_model.FindRenamedBranch(t.Context(), 1, "unknow")
+ _, exist, err = git_model.FindRenamedBranch(t.Context(), 1, "unknown")
assert.NoError(t, err)
assert.False(t, exist)
}
@@ -159,6 +159,53 @@ func TestRenameBranch(t *testing.T) {
})
}
+func TestRenameBranchProtectedRuleConflict(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ master := unittest.AssertExistsAndLoadBean(t, &git_model.Branch{RepoID: repo1.ID, Name: "master"})
+
+ devBranch := &git_model.Branch{
+ RepoID: repo1.ID,
+ Name: "dev",
+ CommitID: master.CommitID,
+ CommitMessage: master.CommitMessage,
+ CommitTime: master.CommitTime,
+ PusherID: master.PusherID,
+ }
+ assert.NoError(t, db.Insert(t.Context(), devBranch))
+
+ pbDev := git_model.ProtectedBranch{
+ RepoID: repo1.ID,
+ RuleName: "dev",
+ CanPush: true,
+ }
+ assert.NoError(t, git_model.UpdateProtectBranch(t.Context(), repo1, &pbDev, git_model.WhitelistOptions{}))
+
+ pbMain := git_model.ProtectedBranch{
+ RepoID: repo1.ID,
+ RuleName: "main",
+ CanPush: true,
+ }
+ assert.NoError(t, git_model.UpdateProtectBranch(t.Context(), repo1, &pbMain, git_model.WhitelistOptions{}))
+
+ assert.NoError(t, git_model.RenameBranch(t.Context(), repo1, "dev", "main", func(ctx context.Context, isDefault bool) error {
+ return nil
+ }))
+
+ unittest.AssertNotExistsBean(t, &git_model.Branch{RepoID: repo1.ID, Name: "dev"})
+ unittest.AssertExistsAndLoadBean(t, &git_model.Branch{RepoID: repo1.ID, Name: "main"})
+
+ protectedDev, err := git_model.GetProtectedBranchRuleByName(t.Context(), repo1.ID, "dev")
+ assert.NoError(t, err)
+ assert.NotNil(t, protectedDev)
+ assert.Equal(t, "dev", protectedDev.RuleName)
+
+ protectedMainByID, err := git_model.GetProtectedBranchRuleByID(t.Context(), repo1.ID, pbMain.ID)
+ assert.NoError(t, err)
+ assert.NotNil(t, protectedMainByID)
+ assert.Equal(t, "main", protectedMainByID.RuleName)
+}
+
func TestOnlyGetDeletedBranchOnCorrectRepo(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
diff --git a/models/git/lfs.go b/models/git/lfs.go
index a4ae3e7bee..1f3d086ac2 100644
--- a/models/git/lfs.go
+++ b/models/git/lfs.go
@@ -312,15 +312,12 @@ func IterateRepositoryIDsWithLFSMetaObjects(ctx context.Context, f func(ctx cont
// IterateLFSMetaObjectsForRepoOptions provides options for IterateLFSMetaObjectsForRepo
type IterateLFSMetaObjectsForRepoOptions struct {
- OlderThan timeutil.TimeStamp
- UpdatedLessRecentlyThan timeutil.TimeStamp
- OrderByUpdated bool
- LoopFunctionAlwaysUpdates bool
+ OlderThan timeutil.TimeStamp
+ UpdatedLessRecentlyThan timeutil.TimeStamp
}
// IterateLFSMetaObjectsForRepo provides a iterator for LFSMetaObjects per Repo
func IterateLFSMetaObjectsForRepo(ctx context.Context, repoID int64, f func(context.Context, *LFSMetaObject, int64) error, opts *IterateLFSMetaObjectsForRepoOptions) error {
- var start int
batchSize := setting.Database.IterateBufferSize
engine := db.GetEngine(ctx)
type CountLFSMetaObject struct {
@@ -328,7 +325,7 @@ func IterateLFSMetaObjectsForRepo(ctx context.Context, repoID int64, f func(cont
LFSMetaObject `xorm:"extends"`
}
- id := int64(0)
+ lastID := int64(0)
for {
beans := make([]*CountLFSMetaObject, 0, batchSize)
@@ -341,29 +338,23 @@ func IterateLFSMetaObjectsForRepo(ctx context.Context, repoID int64, f func(cont
if !opts.UpdatedLessRecentlyThan.IsZero() {
sess.And("`lfs_meta_object`.updated_unix < ?", opts.UpdatedLessRecentlyThan)
}
- sess.GroupBy("`lfs_meta_object`.id")
- if opts.OrderByUpdated {
- sess.OrderBy("`lfs_meta_object`.updated_unix ASC")
- } else {
- sess.And("`lfs_meta_object`.id > ?", id)
- sess.OrderBy("`lfs_meta_object`.id ASC")
- }
- if err := sess.Limit(batchSize, start).Find(&beans); err != nil {
+ sess.GroupBy("`lfs_meta_object`.id").
+ And("`lfs_meta_object`.id > ?", lastID).
+ OrderBy("`lfs_meta_object`.id ASC")
+
+ if err := sess.Limit(batchSize).Find(&beans); err != nil {
return err
}
if len(beans) == 0 {
return nil
}
- if !opts.LoopFunctionAlwaysUpdates {
- start += len(beans)
- }
for _, bean := range beans {
if err := f(ctx, &bean.LFSMetaObject, bean.Count); err != nil {
return err
}
}
- id = beans[len(beans)-1].ID
+ lastID = beans[len(beans)-1].ID
}
}
diff --git a/models/git/lfs_lock.go b/models/git/lfs_lock.go
index 184e616915..8e63598fb2 100644
--- a/models/git/lfs_lock.go
+++ b/models/git/lfs_lock.go
@@ -101,10 +101,10 @@ func GetLFSLock(ctx context.Context, repo *repo_model.Repository, path string) (
return rel, nil
}
-// GetLFSLockByID returns release by given id.
-func GetLFSLockByID(ctx context.Context, id int64) (*LFSLock, error) {
+// GetLFSLockByIDAndRepo returns lfs lock by given id and repository id.
+func GetLFSLockByIDAndRepo(ctx context.Context, id, repoID int64) (*LFSLock, error) {
lock := new(LFSLock)
- has, err := db.GetEngine(ctx).ID(id).Get(lock)
+ has, err := db.GetEngine(ctx).ID(id).And("repo_id = ?", repoID).Get(lock)
if err != nil {
return nil, err
} else if !has {
@@ -130,7 +130,7 @@ func GetLFSLockByRepoID(ctx context.Context, repoID int64, page, pageSize int) (
// GetTreePathLock returns LSF lock for the treePath
func GetTreePathLock(ctx context.Context, repoID int64, treePath string) (*LFSLock, error) {
if !setting.LFS.StartServer {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when LFS is not started
}
locks, err := GetLFSLockByRepoID(ctx, repoID, 0, 0)
@@ -142,7 +142,7 @@ func GetTreePathLock(ctx context.Context, repoID int64, treePath string) (*LFSLo
return lock, nil
}
}
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
// CountLFSLockByRepoID returns a count of all LFSLocks associated with a repository.
@@ -153,7 +153,7 @@ func CountLFSLockByRepoID(ctx context.Context, repoID int64) (int64, error) {
// DeleteLFSLockByID deletes a lock by given ID.
func DeleteLFSLockByID(ctx context.Context, id int64, repo *repo_model.Repository, u *user_model.User, force bool) (*LFSLock, error) {
return db.WithTx2(ctx, func(ctx context.Context) (*LFSLock, error) {
- lock, err := GetLFSLockByID(ctx, id)
+ lock, err := GetLFSLockByIDAndRepo(ctx, id, repo.ID)
if err != nil {
return nil, err
}
diff --git a/models/git/lfs_lock_test.go b/models/git/lfs_lock_test.go
new file mode 100644
index 0000000000..c88e89be47
--- /dev/null
+++ b/models/git/lfs_lock_test.go
@@ -0,0 +1,82 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func createTestLock(t *testing.T, repo *repo_model.Repository, owner *user_model.User) *LFSLock {
+ t.Helper()
+
+ path := fmt.Sprintf("%s-%d-%d", t.Name(), repo.ID, time.Now().UnixNano())
+ lock, err := CreateLFSLock(t.Context(), repo, &LFSLock{
+ OwnerID: owner.ID,
+ Path: path,
+ })
+ require.NoError(t, err)
+ return lock
+}
+
+func TestGetLFSLockByIDAndRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+
+ lockRepo1 := createTestLock(t, repo1, user2)
+ lockRepo3 := createTestLock(t, repo3, user4)
+
+ fetched, err := GetLFSLockByIDAndRepo(t.Context(), lockRepo1.ID, repo1.ID)
+ require.NoError(t, err)
+ assert.Equal(t, lockRepo1.ID, fetched.ID)
+ assert.Equal(t, repo1.ID, fetched.RepoID)
+
+ _, err = GetLFSLockByIDAndRepo(t.Context(), lockRepo1.ID, repo3.ID)
+ assert.Error(t, err)
+ assert.True(t, IsErrLFSLockNotExist(err))
+
+ _, err = GetLFSLockByIDAndRepo(t.Context(), lockRepo3.ID, repo1.ID)
+ assert.Error(t, err)
+ assert.True(t, IsErrLFSLockNotExist(err))
+}
+
+func TestDeleteLFSLockByIDRequiresRepoMatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+
+ lockRepo1 := createTestLock(t, repo1, user2)
+ lockRepo3 := createTestLock(t, repo3, user4)
+
+ _, err := DeleteLFSLockByID(t.Context(), lockRepo3.ID, repo1, user2, true)
+ assert.Error(t, err)
+ assert.True(t, IsErrLFSLockNotExist(err))
+
+ existing, err := GetLFSLockByIDAndRepo(t.Context(), lockRepo3.ID, repo3.ID)
+ require.NoError(t, err)
+ assert.Equal(t, lockRepo3.ID, existing.ID)
+
+ deleted, err := DeleteLFSLockByID(t.Context(), lockRepo3.ID, repo3, user4, true)
+ require.NoError(t, err)
+ assert.Equal(t, lockRepo3.ID, deleted.ID)
+
+ deleted, err = DeleteLFSLockByID(t.Context(), lockRepo1.ID, repo1, user2, false)
+ require.NoError(t, err)
+ assert.Equal(t, lockRepo1.ID, deleted.ID)
+}
diff --git a/models/git/lfs_test.go b/models/git/lfs_test.go
new file mode 100644
index 0000000000..4c0242f439
--- /dev/null
+++ b/models/git/lfs_test.go
@@ -0,0 +1,61 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git_test
+
+import (
+ "bytes"
+ "context"
+ "strconv"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestIterateLFSMetaObjectsForRepoUpdatesDoNotSkip(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ ctx := t.Context()
+ repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, "user2", "repo1")
+ assert.NoError(t, err)
+
+ defer test.MockVariableValue(&setting.Database.IterateBufferSize, 1)()
+
+ created := make([]*git_model.LFSMetaObject, 0, 3)
+ for i := range 3 {
+ content := []byte("gitea-lfs-" + strconv.Itoa(i))
+ pointer, err := lfs.GeneratePointer(bytes.NewReader(content))
+ assert.NoError(t, err)
+
+ meta, err := git_model.NewLFSMetaObject(ctx, repo.ID, pointer)
+ assert.NoError(t, err)
+ created = append(created, meta)
+ }
+
+ iterated := make([]int64, 0, len(created))
+ cutoff := time.Now().Add(24 * time.Hour)
+ iterErr := git_model.IterateLFSMetaObjectsForRepo(ctx, repo.ID, func(ctx context.Context, meta *git_model.LFSMetaObject, count int64) error {
+ iterated = append(iterated, meta.ID)
+ _, err := db.GetEngine(ctx).ID(meta.ID).Cols("updated_unix").Update(&git_model.LFSMetaObject{
+ UpdatedUnix: timeutil.TimeStamp(time.Now().Unix()),
+ })
+ return err
+ }, &git_model.IterateLFSMetaObjectsForRepoOptions{
+ OlderThan: timeutil.TimeStamp(cutoff.Unix()),
+ UpdatedLessRecentlyThan: timeutil.TimeStamp(cutoff.Unix()),
+ })
+ assert.NoError(t, iterErr)
+
+ expected := []int64{created[0].ID, created[1].ID, created[2].ID}
+ assert.Equal(t, expected, iterated)
+}
diff --git a/models/git/protected_branch.go b/models/git/protected_branch.go
index 1085c14cae..f4567a0aac 100644
--- a/models/git/protected_branch.go
+++ b/models/git/protected_branch.go
@@ -318,7 +318,7 @@ func GetProtectedBranchRuleByName(ctx context.Context, repoID int64, ruleName st
if err != nil {
return nil, err
} else if !exist {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return rel, nil
}
@@ -329,7 +329,7 @@ func GetProtectedBranchRuleByID(ctx context.Context, repoID, ruleID int64) (*Pro
if err != nil {
return nil, err
} else if !exist {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return rel, nil
}
diff --git a/models/git/protected_tag.go b/models/git/protected_tag.go
index 95642df593..dc38daf981 100644
--- a/models/git/protected_tag.go
+++ b/models/git/protected_tag.go
@@ -104,7 +104,7 @@ func GetProtectedTagByID(ctx context.Context, id int64) (*ProtectedTag, error) {
return nil, err
}
if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return tag, nil
}
@@ -117,7 +117,7 @@ func GetProtectedTagByNamePattern(ctx context.Context, repoID int64, pattern str
return nil, err
}
if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return tag, nil
}
diff --git a/models/issues/comment.go b/models/issues/comment.go
index fd0500833e..25e74c01ea 100644
--- a/models/issues/comment.go
+++ b/models/issues/comment.go
@@ -20,6 +20,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/htmlutil"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/references"
@@ -233,11 +234,17 @@ func (r RoleInRepo) LocaleHelper(lang translation.Locale) string {
return lang.TrString("repo.issues.role." + string(r) + "_helper")
}
+type SpecialDoerNameType string
+
+const SpecialDoerNameCodeOwners SpecialDoerNameType = "CODEOWNERS"
+
// CommentMetaData stores metadata for a comment, these data will not be changed once inserted into database
type CommentMetaData struct {
ProjectColumnID int64 `json:"project_column_id,omitempty"`
ProjectColumnTitle string `json:"project_column_title,omitempty"`
ProjectTitle string `json:"project_title,omitempty"`
+
+ SpecialDoerName SpecialDoerNameType `json:"special_doer_name,omitempty"` // e.g. "CODEOWNERS" for CODEOWNERS-triggered review requests
}
// Comment represents a comment in commit and issue page.
@@ -692,7 +699,7 @@ func (c *Comment) LoadTime(ctx context.Context) error {
return nil
}
var err error
- c.Time, err = GetTrackedTimeByID(ctx, c.TimeID)
+ c.Time, err = GetTrackedTimeByID(ctx, c.IssueID, c.TimeID)
return err
}
@@ -764,6 +771,37 @@ func (c *Comment) CodeCommentLink(ctx context.Context) string {
return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag())
}
+func (c *Comment) MetaSpecialDoerTr(locale translation.Locale) template.HTML {
+ if c.CommentMetaData == nil {
+ return ""
+ }
+ if c.CommentMetaData.SpecialDoerName == SpecialDoerNameCodeOwners {
+ return locale.Tr("repo.issues.review.codeowners_rules")
+ }
+ return htmlutil.HTMLFormat("%s", c.CommentMetaData.SpecialDoerName)
+}
+
+func (c *Comment) TimelineRequestedReviewTr(locale translation.Locale, createdStr template.HTML) template.HTML {
+ if c.AssigneeID > 0 {
+ // it guarantees LoadAssigneeUserAndTeam has been called, and c.Assignee is Ghost user but not nil if the user doesn't exist
+ if c.RemovedAssignee {
+ if c.PosterID == c.AssigneeID {
+ return locale.Tr("repo.issues.review.remove_review_request_self", createdStr)
+ }
+ return locale.Tr("repo.issues.review.remove_review_request", c.Assignee.GetDisplayName(), createdStr)
+ }
+ return locale.Tr("repo.issues.review.add_review_request", c.Assignee.GetDisplayName(), createdStr)
+ }
+ teamName := "Ghost Team"
+ if c.AssigneeTeam != nil {
+ teamName = c.AssigneeTeam.Name
+ }
+ if c.RemovedAssignee {
+ return locale.Tr("repo.issues.review.remove_review_request", teamName, createdStr)
+ }
+ return locale.Tr("repo.issues.review.add_review_request", teamName, createdStr)
+}
+
// CreateComment creates comment with context
func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) {
return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
@@ -780,6 +818,11 @@ func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment,
ProjectTitle: opts.ProjectTitle,
}
}
+ if opts.SpecialDoerName != "" {
+ commentMetaData = &CommentMetaData{
+ SpecialDoerName: opts.SpecialDoerName,
+ }
+ }
comment := &Comment{
Type: opts.Type,
@@ -976,6 +1019,7 @@ type CreateCommentOptions struct {
RefIsPull bool
IsForcePush bool
Invalidated bool
+ SpecialDoerName SpecialDoerNameType // e.g. "CODEOWNERS" for CODEOWNERS-triggered review requests
}
// GetCommentByID returns the comment by given ID.
@@ -990,6 +1034,20 @@ func GetCommentByID(ctx context.Context, id int64) (*Comment, error) {
return c, nil
}
+func GetCommentWithRepoID(ctx context.Context, repoID, commentID int64) (*Comment, error) {
+ c, err := GetCommentByID(ctx, commentID)
+ if err != nil {
+ return nil, err
+ }
+ if err := c.LoadIssue(ctx); err != nil {
+ return nil, err
+ }
+ if c.Issue.RepoID != repoID {
+ return nil, ErrCommentNotExist{commentID, 0}
+ }
+ return c, nil
+}
+
// FindCommentsOptions describes the conditions to Find comments
type FindCommentsOptions struct {
db.ListOptions
diff --git a/models/issues/comment_code.go b/models/issues/comment_code.go
index 55e67a1243..8de52f815a 100644
--- a/models/issues/comment_code.go
+++ b/models/issues/comment_code.go
@@ -102,6 +102,7 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
continue
}
comment.Review = re
+ comment.Issue = issue
}
comments[n] = comment
n++
diff --git a/models/issues/issue.go b/models/issues/issue.go
index 053b96dceb..655cdebdfc 100644
--- a/models/issues/issue.go
+++ b/models/issues/issue.go
@@ -498,7 +498,7 @@ func (issue *Issue) GetLastComment(ctx context.Context) (*Comment, error) {
return nil, err
}
if !exist {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return &c, nil
}
@@ -682,7 +682,7 @@ func (issue *Issue) GetParticipantIDsByIssue(ctx context.Context) ([]int64, erro
}
// BlockedByDependencies finds all Dependencies an issue is blocked by
-func (issue *Issue) BlockedByDependencies(ctx context.Context, opts db.ListOptions) (issueDeps []*DependencyInfo, err error) {
+func (issue *Issue) BlockedByDependencies(ctx context.Context, opts db.ListOptions) (issueDeps []*DependencyInfo, total int64, err error) {
sess := db.GetEngine(ctx).
Table("issue").
Join("INNER", "repository", "repository.id = issue.repo_id").
@@ -693,13 +693,13 @@ func (issue *Issue) BlockedByDependencies(ctx context.Context, opts db.ListOptio
if opts.Page > 0 {
sess = db.SetSessionPagination(sess, &opts)
}
- err = sess.Find(&issueDeps)
+ total, err = sess.FindAndCount(&issueDeps)
for _, depInfo := range issueDeps {
depInfo.Issue.Repo = &depInfo.Repository
}
- return issueDeps, err
+ return issueDeps, total, err
}
// BlockingDependencies returns all blocking dependencies, aka all other issues a given issue blocks
diff --git a/models/issues/milestone_list.go b/models/issues/milestone_list.go
index 955ab2356d..021b8beb9e 100644
--- a/models/issues/milestone_list.go
+++ b/models/issues/milestone_list.go
@@ -24,6 +24,18 @@ func (milestones MilestoneList) getMilestoneIDs() []int64 {
return ids
}
+// SplitByOpenClosed splits the milestone list into open and closed milestones
+func (milestones MilestoneList) SplitByOpenClosed() (open, closed MilestoneList) {
+ for _, m := range milestones {
+ if m.IsClosed {
+ closed = append(closed, m)
+ } else {
+ open = append(open, m)
+ }
+ }
+ return open, closed
+}
+
// FindMilestoneOptions contain options to get milestones
type FindMilestoneOptions struct {
db.ListOptions
diff --git a/models/issues/pull.go b/models/issues/pull.go
index 1ffcd683d5..9f180f9ac9 100644
--- a/models/issues/pull.go
+++ b/models/issues/pull.go
@@ -658,17 +658,24 @@ func (pr *PullRequest) IsWorkInProgress(ctx context.Context) bool {
// HasWorkInProgressPrefix determines if the given PR title has a Work In Progress prefix
func HasWorkInProgressPrefix(title string) bool {
- for _, prefix := range setting.Repository.PullRequest.WorkInProgressPrefixes {
- if strings.HasPrefix(strings.ToUpper(title), strings.ToUpper(prefix)) {
- return true
- }
- }
- return false
+ _, ok := CutWorkInProgressPrefix(title)
+ return ok
}
-// IsFilesConflicted determines if the Pull Request has changes conflicting with the target branch.
+func CutWorkInProgressPrefix(title string) (origTitle string, ok bool) {
+ for _, prefix := range setting.Repository.PullRequest.WorkInProgressPrefixes {
+ prefixLen := len(prefix)
+ if prefixLen <= len(title) && util.AsciiEqualFold(title[:prefixLen], prefix) {
+ return title[len(prefix):], true
+ }
+ }
+ return title, false
+}
+
+// IsFilesConflicted determines if the Pull Request has changes conflicting with the target branch.
+// Sometimes a conflict may not list any files
func (pr *PullRequest) IsFilesConflicted() bool {
- return len(pr.ConflictedFiles) > 0
+ return pr.Status == PullRequestStatusConflict
}
// GetWorkInProgressPrefix returns the prefix used to mark the pull request as a work in progress.
diff --git a/models/issues/pull_test.go b/models/issues/pull_test.go
index 7089af253b..25b27cbe9c 100644
--- a/models/issues/pull_test.go
+++ b/models/issues/pull_test.go
@@ -130,7 +130,7 @@ func TestLoadRequestedReviewers(t *testing.T) {
user1, err := user_model.GetUserByID(t.Context(), 1)
assert.NoError(t, err)
- comment, err := issues_model.AddReviewRequest(t.Context(), issue, user1, &user_model.User{})
+ comment, err := issues_model.AddReviewRequest(t.Context(), issue, user1, &user_model.User{}, false)
assert.NoError(t, err)
assert.NotNil(t, comment)
diff --git a/models/issues/review.go b/models/issues/review.go
index b758fa5ffa..10e3fcd664 100644
--- a/models/issues/review.go
+++ b/models/issues/review.go
@@ -384,7 +384,7 @@ func CreateReview(ctx context.Context, opts CreateReviewOptions) (*Review, error
// GetCurrentReview returns the current pending review of reviewer for given issue
func GetCurrentReview(ctx context.Context, reviewer *user_model.User, issue *Issue) (*Review, error) {
if reviewer == nil {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when reviewer is nil
}
reviews, err := FindReviews(ctx, FindReviewOptions{
Types: []ReviewType{ReviewTypePending},
@@ -643,7 +643,7 @@ func InsertReviews(ctx context.Context, reviews []*Review) error {
}
// AddReviewRequest add a review request from one reviewer
-func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User) (*Comment, error) {
+func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User, isCodeOwners bool) (*Comment, error) {
return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
sess := db.GetEngine(ctx)
@@ -702,6 +702,7 @@ func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_mo
RemovedAssignee: false, // Use RemovedAssignee as !isRequest
AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID
ReviewID: review.ID,
+ SpecialDoerName: util.Iif(isCodeOwners, SpecialDoerNameCodeOwners, ""),
})
if err != nil {
return nil, err
@@ -767,7 +768,7 @@ func restoreLatestOfficialReview(ctx context.Context, issueID, reviewerID int64)
}
// AddTeamReviewRequest add a review request from one team
-func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) {
+func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User, isCodeOwners bool) (*Comment, error) {
return db.WithTx2(ctx, func(ctx context.Context) (*Comment, error) {
review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID)
if err != nil && !IsErrReviewNotExist(err) {
@@ -812,6 +813,7 @@ func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organizat
RemovedAssignee: false, // Use RemovedAssignee as !isRequest
AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID
ReviewID: review.ID,
+ SpecialDoerName: util.Iif(isCodeOwners, SpecialDoerNameCodeOwners, ""),
})
if err != nil {
return nil, fmt.Errorf("CreateComment(): %w", err)
diff --git a/models/issues/review_test.go b/models/issues/review_test.go
index 6795ea8e66..092d88d174 100644
--- a/models/issues/review_test.go
+++ b/models/issues/review_test.go
@@ -321,14 +321,28 @@ func TestAddReviewRequest(t *testing.T) {
pull.HasMerged = false
assert.NoError(t, pull.UpdateCols(t.Context(), "has_merged"))
issue.IsClosed = true
- _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{})
+ _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{}, false)
assert.Error(t, err)
assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err))
pull.HasMerged = true
assert.NoError(t, pull.UpdateCols(t.Context(), "has_merged"))
issue.IsClosed = false
- _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{})
+ _, err = issues_model.AddReviewRequest(t.Context(), issue, reviewer, &user_model.User{}, false)
assert.Error(t, err)
assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err))
+
+ // Test CODEOWNERS review request stores metadata correctly
+ pull2 := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ assert.NoError(t, pull2.LoadIssue(t.Context()))
+ issue2 := pull2.Issue
+ assert.NoError(t, issue2.LoadRepo(t.Context()))
+ reviewer2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 7})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ comment, err := issues_model.AddReviewRequest(t.Context(), issue2, reviewer2, doer, true)
+ assert.NoError(t, err)
+ assert.NotNil(t, comment)
+ assert.NotNil(t, comment.CommentMetaData)
+ assert.Equal(t, issues_model.SpecialDoerNameCodeOwners, comment.CommentMetaData.SpecialDoerName)
}
diff --git a/models/issues/stopwatch.go b/models/issues/stopwatch.go
index 761b8f91a0..f119951b09 100644
--- a/models/issues/stopwatch.go
+++ b/models/issues/stopwatch.go
@@ -12,6 +12,8 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
)
// Stopwatch represents a stopwatch for time tracking.
@@ -232,3 +234,14 @@ func CancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) (
})
return ok, err
}
+
+// RemoveStopwatchesByRepoID removes all stopwatches for a user in a specific repository
+// this function should be called before removing all the issues of the repository
+func RemoveStopwatchesByRepoID(ctx context.Context, userID, repoID int64) error {
+ _, err := db.GetEngine(ctx).
+ Where("`stopwatch`.user_id = ?", userID).
+ And(builder.In("`stopwatch`.issue_id",
+ builder.Select("id").From("issue").Where(builder.Eq{"repo_id": repoID}))).
+ Delete(new(Stopwatch))
+ return err
+}
diff --git a/models/issues/tracked_time.go b/models/issues/tracked_time.go
index 9c11881e44..0b5c341f1f 100644
--- a/models/issues/tracked_time.go
+++ b/models/issues/tracked_time.go
@@ -311,13 +311,13 @@ func deleteTime(ctx context.Context, t *TrackedTime) error {
}
// GetTrackedTimeByID returns raw TrackedTime without loading attributes by id
-func GetTrackedTimeByID(ctx context.Context, id int64) (*TrackedTime, error) {
+func GetTrackedTimeByID(ctx context.Context, issueID, trackedTimeID int64) (*TrackedTime, error) {
time := new(TrackedTime)
- has, err := db.GetEngine(ctx).ID(id).Get(time)
+ has, err := db.GetEngine(ctx).ID(trackedTimeID).Where("issue_id = ?", issueID).Get(time)
if err != nil {
return nil, err
} else if !has {
- return nil, db.ErrNotExist{Resource: "tracked_time", ID: id}
+ return nil, db.ErrNotExist{Resource: "tracked_time", ID: trackedTimeID}
}
return time, nil
}
diff --git a/models/migrations/base/db.go b/models/migrations/base/db.go
index 479a46379c..3b8f0e00a0 100644
--- a/models/migrations/base/db.go
+++ b/models/migrations/base/db.go
@@ -5,21 +5,14 @@ package base
import (
"context"
- "database/sql"
"errors"
"fmt"
- "os"
- "path"
"reflect"
"regexp"
"strings"
- "time"
- "code.gitea.io/gitea/models/db"
- "code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/util"
"xorm.io/xorm"
"xorm.io/xorm/schemas"
@@ -515,114 +508,3 @@ func ModifyColumn(x *xorm.Engine, tableName string, col *schemas.Column) error {
}
return nil
}
-
-func removeAllWithRetry(dir string) error {
- var err error
- for range 20 {
- err = os.RemoveAll(dir)
- if err == nil {
- break
- }
- time.Sleep(100 * time.Millisecond)
- }
- return err
-}
-
-func newXORMEngine() (*xorm.Engine, error) {
- if err := db.InitEngine(context.Background()); err != nil {
- return nil, err
- }
- x := unittest.GetXORMEngine()
- return x, nil
-}
-
-func deleteDB() error {
- switch {
- case setting.Database.Type.IsSQLite3():
- if err := util.Remove(setting.Database.Path); err != nil {
- return err
- }
- return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
-
- case setting.Database.Type.IsMySQL():
- db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
- setting.Database.User, setting.Database.Passwd, setting.Database.Host))
- if err != nil {
- return err
- }
- defer db.Close()
-
- if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
- return err
- }
-
- if _, err = db.Exec("CREATE DATABASE IF NOT EXISTS " + setting.Database.Name); err != nil {
- return err
- }
- return nil
- case setting.Database.Type.IsPostgreSQL():
- db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s",
- setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode))
- if err != nil {
- return err
- }
- defer db.Close()
-
- if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
- return err
- }
-
- if _, err = db.Exec("CREATE DATABASE " + setting.Database.Name); err != nil {
- return err
- }
- db.Close()
-
- // Check if we need to setup a specific schema
- if len(setting.Database.Schema) != 0 {
- db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s",
- setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name, setting.Database.SSLMode))
- if err != nil {
- return err
- }
- defer db.Close()
-
- schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
- if err != nil {
- return err
- }
- defer schrows.Close()
-
- if !schrows.Next() {
- // Create and setup a DB schema
- _, err = db.Exec("CREATE SCHEMA " + setting.Database.Schema)
- if err != nil {
- return err
- }
- }
-
- // Make the user's default search path the created schema; this will affect new connections
- _, err = db.Exec(fmt.Sprintf(`ALTER USER "%s" SET search_path = %s`, setting.Database.User, setting.Database.Schema))
- if err != nil {
- return err
- }
- return nil
- }
- case setting.Database.Type.IsMSSQL():
- host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
- db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
- host, port, "master", setting.Database.User, setting.Database.Passwd))
- if err != nil {
- return err
- }
- defer db.Close()
-
- if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS [%s]", setting.Database.Name)); err != nil {
- return err
- }
- if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE [%s]", setting.Database.Name)); err != nil {
- return err
- }
- }
-
- return nil
-}
diff --git a/models/migrations/base/db_test.go b/models/migrations/base/db_test.go
index 80bf00b22a..00635ca72e 100644
--- a/models/migrations/base/db_test.go
+++ b/models/migrations/base/db_test.go
@@ -11,6 +11,10 @@ import (
"xorm.io/xorm/names"
)
+func TestMain(m *testing.M) {
+ MainTest(m)
+}
+
func Test_DropTableColumns(t *testing.T) {
x, deferable := PrepareTestEnv(t, 0)
if x == nil || t.Failed() {
diff --git a/models/migrations/base/main_test.go b/models/migrations/base/main_test.go
deleted file mode 100644
index c1c789150f..0000000000
--- a/models/migrations/base/main_test.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright 2021 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package base
-
-import (
- "testing"
-)
-
-func TestMain(m *testing.M) {
- MainTest(m)
-}
diff --git a/models/migrations/base/tests.go b/models/migrations/base/tests.go
index 83beca8fb9..17ea951b5a 100644
--- a/models/migrations/base/tests.go
+++ b/models/migrations/base/tests.go
@@ -4,18 +4,20 @@
package base
import (
+ "database/sql"
"fmt"
"os"
+ "path"
"path/filepath"
- "runtime"
"testing"
+ "code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/tempdir"
- "code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/testlogger"
+ "code.gitea.io/gitea/modules/util"
"github.com/stretchr/testify/require"
"xorm.io/xorm"
@@ -24,6 +26,105 @@ import (
// FIXME: this file shouldn't be in a normal package, it should only be compiled for tests
+func newXORMEngine(t *testing.T) (*xorm.Engine, error) {
+ if err := db.InitEngine(t.Context()); err != nil {
+ return nil, err
+ }
+ x := unittest.GetXORMEngine()
+ return x, nil
+}
+
+func deleteDB() error {
+ switch {
+ case setting.Database.Type.IsSQLite3():
+ if err := util.Remove(setting.Database.Path); err != nil {
+ return err
+ }
+ return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
+
+ case setting.Database.Type.IsMySQL():
+ db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
+ setting.Database.User, setting.Database.Passwd, setting.Database.Host))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec("CREATE DATABASE IF NOT EXISTS " + setting.Database.Name); err != nil {
+ return err
+ }
+ return nil
+ case setting.Database.Type.IsPostgreSQL():
+ db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s",
+ setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec("CREATE DATABASE " + setting.Database.Name); err != nil {
+ return err
+ }
+ db.Close()
+
+ // Check if we need to setup a specific schema
+ if len(setting.Database.Schema) != 0 {
+ db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s",
+ setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name, setting.Database.SSLMode))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
+ if err != nil {
+ return err
+ }
+ defer schrows.Close()
+
+ if !schrows.Next() {
+ // Create and setup a DB schema
+ _, err = db.Exec("CREATE SCHEMA " + setting.Database.Schema)
+ if err != nil {
+ return err
+ }
+ }
+
+ // Make the user's default search path the created schema; this will affect new connections
+ _, err = db.Exec(fmt.Sprintf(`ALTER USER "%s" SET search_path = %s`, setting.Database.User, setting.Database.Schema))
+ if err != nil {
+ return err
+ }
+ return nil
+ }
+ case setting.Database.Type.IsMSSQL():
+ host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
+ db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
+ host, port, "master", setting.Database.User, setting.Database.Passwd))
+ if err != nil {
+ return err
+ }
+ defer db.Close()
+
+ if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS [%s]", setting.Database.Name)); err != nil {
+ return err
+ }
+ if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE [%s]", setting.Database.Name)); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
// PrepareTestEnv prepares the test environment and reset the database. The skip parameter should usually be 0.
// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from.
//
@@ -40,7 +141,7 @@ func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, fu
return nil, deferFn
}
- x, err := newXORMEngine()
+ x, err := newXORMEngine(t)
require.NoError(t, err)
if x != nil {
oldDefer := deferFn
@@ -99,51 +200,26 @@ func LoadTableSchemasMap(t *testing.T, x *xorm.Engine) map[string]*schemas.Table
return tableMap
}
-func MainTest(m *testing.M) {
+func mainTest(m *testing.M) int {
testlogger.Init()
- giteaRoot := test.SetupGiteaRoot()
- giteaBinary := "gitea"
- if runtime.GOOS == "windows" {
- giteaBinary += ".exe"
- }
- setting.AppPath = filepath.Join(giteaRoot, giteaBinary)
- if _, err := os.Stat(setting.AppPath); err != nil {
- testlogger.Fatalf("Could not find gitea binary at %s\n", setting.AppPath)
- }
-
- giteaConf := os.Getenv("GITEA_CONF")
- if giteaConf == "" {
- giteaConf = filepath.Join(filepath.Dir(setting.AppPath), "tests/sqlite.ini")
- _, _ = fmt.Fprintf(os.Stderr, "Environment variable $GITEA_CONF not set - defaulting to %s\n", giteaConf)
- }
-
- if !filepath.IsAbs(giteaConf) {
- setting.CustomConf = filepath.Join(giteaRoot, giteaConf)
- } else {
- setting.CustomConf = giteaConf
- }
-
tmpDataPath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("data")
if err != nil {
- testlogger.Fatalf("Unable to create temporary data path %v\n", err)
+ testlogger.Panicf("Unable to create temporary data path %v\n", err)
}
defer cleanup()
- setting.CustomPath = filepath.Join(setting.AppWorkPath, "custom")
setting.AppDataPath = tmpDataPath
unittest.InitSettingsForTesting()
if err = git.InitFull(); err != nil {
- testlogger.Fatalf("Unable to InitFull: %v\n", err)
+ testlogger.Panicf("Unable to InitFull: %v\n", err)
}
setting.LoadDBSetting()
setting.InitLoggersForTest()
-
- exitStatus := m.Run()
-
- if err := removeAllWithRetry(setting.RepoRootPath); err != nil {
- _, _ = fmt.Fprintf(os.Stderr, "os.RemoveAll: %v\n", err)
- }
- os.Exit(exitStatus)
+ return m.Run()
+}
+
+func MainTest(m *testing.M) {
+ os.Exit(mainTest(m))
}
diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go
index ba598f5173..dff8cbf3c8 100644
--- a/models/migrations/migrations.go
+++ b/models/migrations/migrations.go
@@ -399,7 +399,8 @@ func prepareMigrationTasks() []*migration {
newMigration(323, "Add support for actions concurrency", v1_26.AddActionsConcurrency),
newMigration(324, "Fix closed milestone completeness for milestones with no issues", v1_26.FixClosedMilestoneCompleteness),
- newMigration(325, "Add new table project_workflow", v1_26.AddProjectWorkflow),
+ newMigration(325, "Fix missed repo_id when migrate attachments", v1_26.FixMissedRepoIDWhenMigrateAttachments),
+ newMigration(326, "Add new table project_workflow", v1_26.AddProjectWorkflow),
}
return preparedMigrations
}
diff --git a/models/migrations/v1_21/v276.go b/models/migrations/v1_21/v276.go
index 3ab7e22cd0..be24b31902 100644
--- a/models/migrations/v1_21/v276.go
+++ b/models/migrations/v1_21/v276.go
@@ -5,14 +5,10 @@ package v1_21
import (
"context"
- "fmt"
- "path/filepath"
- "strings"
- "code.gitea.io/gitea/modules/git"
- giturl "code.gitea.io/gitea/modules/git/url"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/util"
"xorm.io/xorm"
)
@@ -163,16 +159,13 @@ func migratePushMirrors(x *xorm.Engine) error {
}
func getRemoteAddress(ownerName, repoName, remoteName string) (string, error) {
- repoPath := filepath.Join(setting.RepoRootPath, strings.ToLower(ownerName), strings.ToLower(repoName)+".git")
- if exist, _ := util.IsExist(repoPath); !exist {
+ ctx := context.Background()
+ relativePath := repo_model.RelativePath(ownerName, repoName)
+ if exist, _ := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(relativePath)); !exist {
return "", nil
}
- remoteURL, err := git.GetRemoteAddress(context.Background(), repoPath, remoteName)
- if err != nil {
- return "", fmt.Errorf("get remote %s's address of %s/%s failed: %v", remoteName, ownerName, repoName, err)
- }
- u, err := giturl.ParseGitURL(remoteURL)
+ u, err := gitrepo.GitRemoteGetURL(ctx, repo_model.StorageRepo(relativePath), remoteName)
if err != nil {
return "", err
}
diff --git a/models/migrations/v1_26/v325.go b/models/migrations/v1_26/v325.go
index 5e431f78f4..d81540f44e 100644
--- a/models/migrations/v1_26/v325.go
+++ b/models/migrations/v1_26/v325.go
@@ -1,25 +1,18 @@
-// Copyright 2025 The Gitea Authors. All rights reserved.
+// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package v1_26
import (
- "code.gitea.io/gitea/modules/timeutil"
-
"xorm.io/xorm"
)
-func AddProjectWorkflow(x *xorm.Engine) error {
- type ProjectWorkflow struct {
- ID int64
- ProjectID int64 `xorm:"INDEX"`
- WorkflowEvent string
- WorkflowFilters string `xorm:"TEXT JSON"`
- WorkflowActions string `xorm:"TEXT JSON"`
- Enabled bool `xorm:"DEFAULT true"`
- CreatedUnix timeutil.TimeStamp `xorm:"created"`
- UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
+func FixMissedRepoIDWhenMigrateAttachments(x *xorm.Engine) error {
+ _, err := x.Exec("UPDATE `attachment` SET `repo_id` = (SELECT `repo_id` FROM `issue` WHERE `issue`.`id` = `attachment`.`issue_id`) WHERE `issue_id` > 0 AND (`repo_id` IS NULL OR `repo_id` = 0);")
+ if err != nil {
+ return err
}
- return x.Sync(&ProjectWorkflow{})
+ _, err = x.Exec("UPDATE `attachment` SET `repo_id` = (SELECT `repo_id` FROM `release` WHERE `release`.`id` = `attachment`.`release_id`) WHERE `release_id` > 0 AND (`repo_id` IS NULL OR `repo_id` = 0);")
+ return err
}
diff --git a/models/migrations/v1_26/v325_test.go b/models/migrations/v1_26/v325_test.go
new file mode 100644
index 0000000000..d4a66fee81
--- /dev/null
+++ b/models/migrations/v1_26/v325_test.go
@@ -0,0 +1,45 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_26
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/migrations/base"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_FixMissedRepoIDWhenMigrateAttachments(t *testing.T) {
+ type Attachment struct {
+ ID int64 `xorm:"pk autoincr"`
+ UUID string `xorm:"uuid UNIQUE"`
+ RepoID int64 `xorm:"INDEX"` // this should not be zero
+ IssueID int64 `xorm:"INDEX"` // maybe zero when creating
+ ReleaseID int64 `xorm:"INDEX"` // maybe zero when creating
+ UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added
+ CommentID int64 `xorm:"INDEX"`
+ Name string
+ DownloadCount int64 `xorm:"DEFAULT 0"`
+ Size int64 `xorm:"DEFAULT 0"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+ }
+
+ type Issue struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX"`
+ }
+
+ type Release struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX"`
+ }
+
+ // Prepare and load the testing database
+ x, deferrable := base.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release))
+ defer deferrable()
+
+ require.NoError(t, FixMissedRepoIDWhenMigrateAttachments(x))
+}
diff --git a/models/migrations/v1_26/v326.go b/models/migrations/v1_26/v326.go
new file mode 100644
index 0000000000..5e431f78f4
--- /dev/null
+++ b/models/migrations/v1_26/v326.go
@@ -0,0 +1,25 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_26
+
+import (
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/xorm"
+)
+
+func AddProjectWorkflow(x *xorm.Engine) error {
+ type ProjectWorkflow struct {
+ ID int64
+ ProjectID int64 `xorm:"INDEX"`
+ WorkflowEvent string
+ WorkflowFilters string `xorm:"TEXT JSON"`
+ WorkflowActions string `xorm:"TEXT JSON"`
+ Enabled bool `xorm:"DEFAULT true"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
+ }
+
+ return x.Sync(&ProjectWorkflow{})
+}
diff --git a/models/migrations/v1_9/v82.go b/models/migrations/v1_9/v82.go
index f0307bf07a..8796b0563d 100644
--- a/models/migrations/v1_9/v82.go
+++ b/models/migrations/v1_9/v82.go
@@ -6,11 +6,10 @@ package v1_9
import (
"context"
"fmt"
- "path/filepath"
- "strings"
+ repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
- "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/gitrepo"
"xorm.io/xorm"
)
@@ -34,16 +33,6 @@ func FixReleaseSha1OnReleaseTable(ctx context.Context, x *xorm.Engine) error {
Name string
}
- // UserPath returns the path absolute path of user repositories.
- UserPath := func(userName string) string {
- return filepath.Join(setting.RepoRootPath, strings.ToLower(userName))
- }
-
- // RepoPath returns repository path by given user and repository name.
- RepoPath := func(userName, repoName string) string {
- return filepath.Join(UserPath(userName), strings.ToLower(repoName)+".git")
- }
-
// Update release sha1
const batchSize = 100
sess := x.NewSession()
@@ -99,7 +88,7 @@ func FixReleaseSha1OnReleaseTable(ctx context.Context, x *xorm.Engine) error {
userCache[repo.OwnerID] = user
}
- gitRepo, err = git.OpenRepository(ctx, RepoPath(user.Name, repo.Name))
+ gitRepo, err = gitrepo.OpenRepository(ctx, repo_model.StorageRepo(repo_model.RelativePath(user.Name, repo.Name)))
if err != nil {
return err
}
diff --git a/models/organization/org_user.go b/models/organization/org_user.go
index 4d7527c15f..69cd960944 100644
--- a/models/organization/org_user.go
+++ b/models/organization/org_user.go
@@ -174,13 +174,13 @@ func GetOrgAssignees(ctx context.Context, orgID int64) (_ []*user_model.User, er
func loadOrganizationOwners(ctx context.Context, users user_model.UserList, orgID int64) (map[int64]*TeamUser, error) {
if len(users) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when there are no users
}
ownerTeam, err := GetOwnerTeam(ctx, orgID)
if err != nil {
if IsErrTeamNotExist(err) {
log.Error("Organization does not have owner team: %d", orgID)
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when owner team does not exist
}
return nil, err
}
diff --git a/models/packages/package_blob.go b/models/packages/package_blob.go
index d9c30b6533..e765bbf0c2 100644
--- a/models/packages/package_blob.go
+++ b/models/packages/package_blob.go
@@ -43,13 +43,15 @@ func GetOrInsertBlob(ctx context.Context, pb *PackageBlob) (*PackageBlob, bool,
existing := &PackageBlob{}
- has, err := e.Where(builder.Eq{
+ hashCond := builder.Eq{
"size": pb.Size,
"hash_md5": pb.HashMD5,
"hash_sha1": pb.HashSHA1,
"hash_sha256": pb.HashSHA256,
"hash_sha512": pb.HashSHA512,
- }).Get(existing)
+ }
+
+ has, err := e.Where(hashCond).Get(existing)
if err != nil {
return nil, false, err
}
@@ -57,6 +59,11 @@ func GetOrInsertBlob(ctx context.Context, pb *PackageBlob) (*PackageBlob, bool,
return existing, true, nil
}
if _, err = e.Insert(pb); err != nil {
+ // Handle race condition: another request may have inserted the same blob
+ // between our SELECT and INSERT. Retry the SELECT to get the existing blob.
+ if has, _ = e.Where(hashCond).Get(existing); has {
+ return existing, true, nil
+ }
return nil, false, err
}
return pb, false, nil
diff --git a/models/packages/package_blob_test.go b/models/packages/package_blob_test.go
new file mode 100644
index 0000000000..8b636b4ee0
--- /dev/null
+++ b/models/packages/package_blob_test.go
@@ -0,0 +1,51 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package packages
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "golang.org/x/sync/errgroup"
+)
+
+func TestGetOrInsertBlobConcurrent(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ testBlob := PackageBlob{
+ Size: 123,
+ HashMD5: "md5",
+ HashSHA1: "sha1",
+ HashSHA256: "sha256",
+ HashSHA512: "sha512",
+ }
+
+ const numGoroutines = 3
+ var wg errgroup.Group
+ results := make([]*PackageBlob, numGoroutines)
+ existed := make([]bool, numGoroutines)
+ for idx := range numGoroutines {
+ wg.Go(func() error {
+ blob := testBlob // Create a copy of the test blob for each goroutine
+ var err error
+ results[idx], existed[idx], err = GetOrInsertBlob(t.Context(), &blob)
+ return err
+ })
+ }
+ require.NoError(t, wg.Wait())
+
+ // then: all GetOrInsertBlob succeeds with the same blob ID, and only one indicates it did not exist before
+ existedCount := 0
+ assert.NotNil(t, results[0])
+ for i := range numGoroutines {
+ assert.Equal(t, results[0].ID, results[i].ID)
+ if existed[i] {
+ existedCount++
+ }
+ }
+ assert.Equal(t, numGoroutines-1, existedCount)
+}
diff --git a/models/project/column.go b/models/project/column.go
index 814794fac7..47ff2dc26a 100644
--- a/models/project/column.go
+++ b/models/project/column.go
@@ -229,13 +229,13 @@ func GetColumn(ctx context.Context, columnID int64) (*Column, error) {
return column, nil
}
-func GetColumnByProjectIDAndColumnID(ctx context.Context, projectID, columnID int64) (*Column, error) {
+func GetColumnByIDAndProjectID(ctx context.Context, columnID, projectID int64) (*Column, error) {
column := new(Column)
- has, err := db.GetEngine(ctx).Where("project_id=? AND id=?", projectID, columnID).Get(column)
+ has, err := db.GetEngine(ctx).ID(columnID).And("project_id=?", projectID).Get(column)
if err != nil {
return nil, err
} else if !has {
- return nil, ErrProjectColumnNotExist{ProjectID: projectID, ColumnID: columnID}
+ return nil, ErrProjectColumnNotExist{ColumnID: columnID}
}
return column, nil
diff --git a/models/project/project.go b/models/project/project.go
index e423a8025b..205a102201 100644
--- a/models/project/project.go
+++ b/models/project/project.go
@@ -323,6 +323,19 @@ func GetProjectByName(ctx context.Context, repoID int64, name string) (*Project,
return p, nil
}
+// GetProjectByIDAndOwner returns the projects in a repository
+func GetProjectByIDAndOwner(ctx context.Context, id, ownerID int64) (*Project, error) {
+ p := new(Project)
+ has, err := db.GetEngine(ctx).ID(id).And("owner_id = ?", ownerID).Get(p)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrProjectNotExist{ID: id}
+ }
+
+ return p, nil
+}
+
// GetProjectForRepoByID returns the projects in a repository
func GetProjectForRepoByID(ctx context.Context, repoID, id int64) (*Project, error) {
p := new(Project)
diff --git a/models/renderhelper/repo_file.go b/models/renderhelper/repo_file.go
index e0375ed280..f1df8e89e0 100644
--- a/models/renderhelper/repo_file.go
+++ b/models/renderhelper/repo_file.go
@@ -70,6 +70,6 @@ func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository,
"repo": helper.opts.DeprecatedRepoName,
})
}
- rctx = rctx.WithHelper(helper)
+ rctx = rctx.WithHelper(helper).WithEnableHeadingIDGeneration(true)
return rctx
}
diff --git a/models/renderhelper/repo_wiki.go b/models/renderhelper/repo_wiki.go
index b75f1b9701..218b1e4a67 100644
--- a/models/renderhelper/repo_wiki.go
+++ b/models/renderhelper/repo_wiki.go
@@ -71,7 +71,7 @@ func NewRenderContextRepoWiki(ctx context.Context, repo *repo_model.Repository,
"markupAllowShortIssuePattern": "true",
})
}
- rctx = rctx.WithHelper(helper)
+ rctx = rctx.WithHelper(helper).WithEnableHeadingIDGeneration(true)
helper.ctx = rctx
return rctx
}
diff --git a/models/repo/archiver.go b/models/repo/archiver.go
index 4f1b7238d7..ca981a178c 100644
--- a/models/repo/archiver.go
+++ b/models/repo/archiver.go
@@ -107,7 +107,7 @@ func GetRepoArchiver(ctx context.Context, repoID int64, tp ArchiveType, commitID
if has {
return &archiver, nil
}
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
// ExistsRepoArchiverWithStoragePath checks if there is a RepoArchiver for a given storage path
diff --git a/models/repo/attachment.go b/models/repo/attachment.go
index 835bee5402..27856f2d2e 100644
--- a/models/repo/attachment.go
+++ b/models/repo/attachment.go
@@ -166,6 +166,11 @@ func GetAttachmentByReleaseIDFileName(ctx context.Context, releaseID int64, file
return attach, nil
}
+func GetUnlinkedAttachmentsByUserID(ctx context.Context, userID int64) ([]*Attachment, error) {
+ attachments := make([]*Attachment, 0, 10)
+ return attachments, db.GetEngine(ctx).Where("uploader_id = ? AND issue_id = 0 AND release_id = 0 AND comment_id = 0", userID).Find(&attachments)
+}
+
// DeleteAttachment deletes the given attachment and optionally the associated file.
func DeleteAttachment(ctx context.Context, a *Attachment, remove bool) error {
_, err := DeleteAttachments(ctx, []*Attachment{a}, remove)
diff --git a/models/repo/attachment_test.go b/models/repo/attachment_test.go
index d41008344d..07f4c587a7 100644
--- a/models/repo/attachment_test.go
+++ b/models/repo/attachment_test.go
@@ -101,3 +101,19 @@ func TestGetAttachmentsByUUIDs(t *testing.T) {
assert.Equal(t, int64(1), attachList[0].IssueID)
assert.Equal(t, int64(5), attachList[1].IssueID)
}
+
+func TestGetUnlinkedAttachmentsByUserID(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ attachments, err := repo_model.GetUnlinkedAttachmentsByUserID(t.Context(), 8)
+ assert.NoError(t, err)
+ assert.Len(t, attachments, 1)
+ assert.Equal(t, int64(10), attachments[0].ID)
+ assert.Zero(t, attachments[0].IssueID)
+ assert.Zero(t, attachments[0].ReleaseID)
+ assert.Zero(t, attachments[0].CommentID)
+
+ attachments, err = repo_model.GetUnlinkedAttachmentsByUserID(t.Context(), 1)
+ assert.NoError(t, err)
+ assert.Empty(t, attachments)
+}
diff --git a/models/repo/fork.go b/models/repo/fork.go
index 1c75e86458..80b3e5634e 100644
--- a/models/repo/fork.go
+++ b/models/repo/fork.go
@@ -49,7 +49,7 @@ func GetUserFork(ctx context.Context, repoID, userID int64) (*Repository, error)
return nil, err
}
if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return &forkedRepo, nil
}
diff --git a/models/repo/pull_request_default.go b/models/repo/pull_request_default.go
new file mode 100644
index 0000000000..89f8eb6a2c
--- /dev/null
+++ b/models/repo/pull_request_default.go
@@ -0,0 +1,16 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/util"
+)
+
+func (repo *Repository) GetPullRequestTargetBranch(ctx context.Context) string {
+ unitPRConfig := repo.MustGetUnit(ctx, unit.TypePullRequests).PullRequestsConfig()
+ return util.IfZero(unitPRConfig.DefaultTargetBranch, repo.DefaultBranch)
+}
diff --git a/models/repo/pull_request_default_test.go b/models/repo/pull_request_default_test.go
new file mode 100644
index 0000000000..1c4f585ed9
--- /dev/null
+++ b/models/repo/pull_request_default_test.go
@@ -0,0 +1,32 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDefaultTargetBranchSelection(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ ctx := t.Context()
+ repo := unittest.AssertExistsAndLoadBean(t, &Repository{ID: 1})
+
+ assert.Equal(t, repo.DefaultBranch, repo.GetPullRequestTargetBranch(ctx))
+
+ repo.Units = nil
+ prUnit, err := repo.GetUnit(ctx, unit.TypePullRequests)
+ assert.NoError(t, err)
+ prConfig := prUnit.PullRequestsConfig()
+ prConfig.DefaultTargetBranch = "branch2"
+ prUnit.Config = prConfig
+ assert.NoError(t, UpdateRepoUnit(ctx, prUnit))
+ repo.Units = nil
+ assert.Equal(t, "branch2", repo.GetPullRequestTargetBranch(ctx))
+}
diff --git a/models/repo/release.go b/models/repo/release.go
index 67aa390e6d..e2010c8a38 100644
--- a/models/repo/release.go
+++ b/models/repo/release.go
@@ -93,15 +93,25 @@ func init() {
db.RegisterModel(new(Release))
}
-// LoadAttributes load repo and publisher attributes for a release
-func (r *Release) LoadAttributes(ctx context.Context) error {
- var err error
- if r.Repo == nil {
- r.Repo, err = GetRepositoryByID(ctx, r.RepoID)
- if err != nil {
- return err
- }
+// LegacyAttachmentMissingRepoIDCutoff marks the date when repo_id started to be written during uploads
+// (2026-01-16T00:00:00Z). Older rows might have repo_id=0 and should be tolerated once.
+const LegacyAttachmentMissingRepoIDCutoff timeutil.TimeStamp = 1768521600
+
+func (r *Release) LoadRepo(ctx context.Context) (err error) {
+ if r.Repo != nil {
+ return nil
}
+
+ r.Repo, err = GetRepositoryByID(ctx, r.RepoID)
+ return err
+}
+
+// LoadAttributes load repo and publisher attributes for a release
+func (r *Release) LoadAttributes(ctx context.Context) (err error) {
+ if err := r.LoadRepo(ctx); err != nil {
+ return err
+ }
+
if r.Publisher == nil {
r.Publisher, err = user_model.GetUserByID(ctx, r.PublisherID)
if err != nil {
@@ -168,6 +178,11 @@ func UpdateReleaseNumCommits(ctx context.Context, rel *Release) error {
// AddReleaseAttachments adds a release attachments
func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs []string) (err error) {
+ rel, err := GetReleaseByID(ctx, releaseID)
+ if err != nil {
+ return err
+ }
+
// Check attachments
attachments, err := GetAttachmentsByUUIDs(ctx, attachmentUUIDs)
if err != nil {
@@ -175,6 +190,17 @@ func AddReleaseAttachments(ctx context.Context, releaseID int64, attachmentUUIDs
}
for i := range attachments {
+ if attachments[i].RepoID == 0 && attachments[i].CreatedUnix < LegacyAttachmentMissingRepoIDCutoff {
+ attachments[i].RepoID = rel.RepoID
+ if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Cols("repo_id").Update(attachments[i]); err != nil {
+ return fmt.Errorf("update attachment repo_id [%d]: %w", attachments[i].ID, err)
+ }
+ }
+
+ if attachments[i].RepoID != rel.RepoID {
+ return util.NewPermissionDeniedErrorf("attachment belongs to different repository")
+ }
+
if attachments[i].ReleaseID != 0 {
return util.NewPermissionDeniedErrorf("release permission denied")
}
diff --git a/models/repo/release_test.go b/models/repo/release_test.go
index 01f0fb3cff..2a09ffb36d 100644
--- a/models/repo/release_test.go
+++ b/models/repo/release_test.go
@@ -6,7 +6,9 @@ package repo
import (
"testing"
+ "code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/util"
"github.com/stretchr/testify/assert"
)
@@ -37,3 +39,54 @@ func Test_FindTagsByCommitIDs(t *testing.T) {
assert.Equal(t, "delete-tag", rels[1].TagName)
assert.Equal(t, "v1.0", rels[2].TagName)
}
+
+func TestAddReleaseAttachmentsRejectsDifferentRepo(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ uuid := "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12" // attachment 2 belongs to repo 2
+ err := AddReleaseAttachments(t.Context(), 1, []string{uuid})
+ assert.Error(t, err)
+ assert.ErrorIs(t, err, util.ErrPermissionDenied)
+
+ attach, err := GetAttachmentByUUID(t.Context(), uuid)
+ assert.NoError(t, err)
+ assert.Zero(t, attach.ReleaseID, "attachment should not be linked to release on failure")
+}
+
+func TestAddReleaseAttachmentsAllowsLegacyMissingRepoID(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ legacyUUID := "a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a20" // attachment 10 has repo_id 0
+ err := AddReleaseAttachments(t.Context(), 1, []string{legacyUUID})
+ assert.NoError(t, err)
+
+ attach, err := GetAttachmentByUUID(t.Context(), legacyUUID)
+ assert.NoError(t, err)
+ assert.EqualValues(t, 1, attach.RepoID)
+ assert.EqualValues(t, 1, attach.ReleaseID)
+}
+
+func TestAddReleaseAttachmentsRejectsRecentZeroRepoID(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ recentUUID := "a0eebc99-9c0b-4ef8-bb6d-6bb9bd3800aa"
+ attachment := &Attachment{
+ UUID: recentUUID,
+ RepoID: 0,
+ IssueID: 0,
+ ReleaseID: 0,
+ CommentID: 0,
+ Name: "recent-zero",
+ CreatedUnix: LegacyAttachmentMissingRepoIDCutoff + 1,
+ }
+ assert.NoError(t, db.Insert(t.Context(), attachment))
+
+ err := AddReleaseAttachments(t.Context(), 1, []string{recentUUID})
+ assert.Error(t, err)
+ assert.ErrorIs(t, err, util.ErrPermissionDenied)
+
+ attach, err := GetAttachmentByUUID(t.Context(), recentUUID)
+ assert.NoError(t, err)
+ assert.Zero(t, attach.ReleaseID)
+ assert.Zero(t, attach.RepoID)
+}
diff --git a/models/repo/repo.go b/models/repo/repo.go
index 605a9e0f3f..07b9bf30cc 100644
--- a/models/repo/repo.go
+++ b/models/repo/repo.go
@@ -613,16 +613,13 @@ func (repo *Repository) ComposeCompareURL(oldCommitID, newCommitID string) strin
return fmt.Sprintf("%s/%s/compare/%s...%s", url.PathEscape(repo.OwnerName), url.PathEscape(repo.Name), util.PathEscapeSegments(oldCommitID), util.PathEscapeSegments(newCommitID))
}
-func (repo *Repository) ComposeBranchCompareURL(baseRepo *Repository, branchName string) string {
- if baseRepo == nil {
- baseRepo = repo
- }
+func (repo *Repository) ComposeBranchCompareURL(baseRepo *Repository, baseBranch, branchName string) string {
var cmpBranchEscaped string
if repo.ID != baseRepo.ID {
cmpBranchEscaped = fmt.Sprintf("%s/%s:", url.PathEscape(repo.OwnerName), url.PathEscape(repo.Name))
}
cmpBranchEscaped = fmt.Sprintf("%s%s", cmpBranchEscaped, util.PathEscapeSegments(branchName))
- return fmt.Sprintf("%s/compare/%s...%s", baseRepo.Link(), util.PathEscapeSegments(baseRepo.DefaultBranch), cmpBranchEscaped)
+ return fmt.Sprintf("%s/compare/%s...%s", baseRepo.Link(), util.PathEscapeSegments(baseBranch), cmpBranchEscaped)
}
// IsOwnedBy returns true when user owns this repository
@@ -881,7 +878,7 @@ func IsRepositoryModelExist(ctx context.Context, u *user_model.User, repoName st
// non-generated repositories, and TemplateRepo will be left untouched)
func GetTemplateRepo(ctx context.Context, repo *Repository) (*Repository, error) {
if !repo.IsGenerated() {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil for non-generated repositories
}
return GetRepositoryByID(ctx, repo.TemplateID)
diff --git a/models/repo/repo_unit.go b/models/repo/repo_unit.go
index ad0bb9d3f8..d03d5e1e6a 100644
--- a/models/repo/repo_unit.go
+++ b/models/repo/repo_unit.go
@@ -131,6 +131,7 @@ type PullRequestsConfig struct {
DefaultDeleteBranchAfterMerge bool
DefaultMergeStyle MergeStyle
DefaultAllowMaintainerEdit bool
+ DefaultTargetBranch string
}
// FromDB fills up a PullRequestsConfig from serialized format.
diff --git a/models/repo/topic.go b/models/repo/topic.go
index f8f706fc1a..6d5209d821 100644
--- a/models/repo/topic.go
+++ b/models/repo/topic.go
@@ -257,7 +257,7 @@ func DeleteTopic(ctx context.Context, repoID int64, topicName string) (*Topic, e
}
if topic == nil {
// Repo doesn't have topic, can't be removed
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the topic does not exist
}
return db.WithTx2(ctx, func(ctx context.Context) (*Topic, error) {
diff --git a/models/repo/user_repo.go b/models/repo/user_repo.go
index 232087d865..08cf964bc8 100644
--- a/models/repo/user_repo.go
+++ b/models/repo/user_repo.go
@@ -151,7 +151,7 @@ func GetRepoAssignees(ctx context.Context, repo *Repository) (_ []*user_model.Us
func GetIssuePostersWithSearch(ctx context.Context, repo *Repository, isPull bool, search string, isShowFullName bool) ([]*user_model.User, error) {
users := make([]*user_model.User, 0, 30)
var prefixCond builder.Cond = builder.Like{"lower_name", strings.ToLower(search) + "%"}
- if isShowFullName {
+ if search != "" && isShowFullName {
prefixCond = prefixCond.Or(db.BuildCaseInsensitiveLike("full_name", "%"+search+"%"))
}
diff --git a/models/repo/watch.go b/models/repo/watch.go
index a616544cae..1e63d5c3d2 100644
--- a/models/repo/watch.go
+++ b/models/repo/watch.go
@@ -176,3 +176,13 @@ func WatchIfAuto(ctx context.Context, userID, repoID int64, isWrite bool) error
}
return watchRepoMode(ctx, watch, WatchModeAuto)
}
+
+// ClearRepoWatches clears all watches for a repository and from the user that watched it.
+// Used when a repository is set to private.
+func ClearRepoWatches(ctx context.Context, repoID int64) error {
+ if _, err := db.Exec(ctx, "UPDATE `repository` SET num_watches = 0 WHERE id = ?", repoID); err != nil {
+ return err
+ }
+
+ return db.DeleteBeans(ctx, Watch{RepoID: repoID})
+}
diff --git a/models/repo/watch_test.go b/models/repo/watch_test.go
index 19e363f6b0..97576fb787 100644
--- a/models/repo/watch_test.go
+++ b/models/repo/watch_test.go
@@ -13,6 +13,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestIsWatching(t *testing.T) {
@@ -119,3 +120,21 @@ func TestWatchIfAuto(t *testing.T) {
assert.NoError(t, err)
assert.Len(t, watchers, prevCount)
}
+
+func TestClearRepoWatches(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ const repoID int64 = 1
+ watchers, err := repo_model.GetRepoWatchersIDs(t.Context(), repoID)
+ require.NoError(t, err)
+ require.NotEmpty(t, watchers)
+
+ assert.NoError(t, repo_model.ClearRepoWatches(t.Context(), repoID))
+
+ watchers, err = repo_model.GetRepoWatchersIDs(t.Context(), repoID)
+ assert.NoError(t, err)
+ assert.Empty(t, watchers)
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ assert.Zero(t, repo.NumWatches)
+}
diff --git a/models/unittest/fixtures_loader.go b/models/unittest/fixtures_loader.go
index d92b0cdb14..5b79cb5643 100644
--- a/models/unittest/fixtures_loader.go
+++ b/models/unittest/fixtures_loader.go
@@ -169,7 +169,7 @@ func (f *fixturesLoaderInternal) Load() error {
func FixturesFileFullPaths(dir string, files []string) (map[string]*FixtureItem, error) {
if files != nil && len(files) == 0 {
- return nil, nil // load nothing
+ return nil, nil //nolint:nilnil // load nothing
}
files = slices.Clone(files)
if len(files) == 0 {
diff --git a/models/unittest/fixtures_test.go b/models/unittest/fixtures_test.go
index 8a4c5f1793..72944ec0db 100644
--- a/models/unittest/fixtures_test.go
+++ b/models/unittest/fixtures_test.go
@@ -4,19 +4,20 @@
package unittest_test
import (
+ "os"
"path/filepath"
"testing"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/require"
"xorm.io/xorm"
)
var NewFixturesLoaderVendor = func(e *xorm.Engine, opts unittest.FixturesOptions) (unittest.FixturesLoader, error) {
- return nil, nil
+ return nil, nil //nolint:nilnil // no vendor fixtures loader configured
}
/*
@@ -58,9 +59,15 @@ func NewFixturesLoaderVendorGoTestfixtures(e *xorm.Engine, opts unittest.Fixture
}
*/
+func TestMain(m *testing.M) {
+ setting.SetupGiteaTestEnv()
+ os.Exit(m.Run())
+}
+
func prepareTestFixturesLoaders(t testing.TB) unittest.FixturesOptions {
_ = user_model.User{}
- opts := unittest.FixturesOptions{Dir: filepath.Join(test.SetupGiteaRoot(), "models", "fixtures"), Files: []string{
+ giteaRoot := setting.GetGiteaTestSourceRoot()
+ opts := unittest.FixturesOptions{Dir: filepath.Join(giteaRoot, "models", "fixtures"), Files: []string{
"user.yml",
}}
require.NoError(t, unittest.CreateTestEngine(opts))
diff --git a/models/unittest/fscopy.go b/models/unittest/fscopy.go
index 98b01815bd..cddb7a3f77 100644
--- a/models/unittest/fscopy.go
+++ b/models/unittest/fscopy.go
@@ -4,10 +4,12 @@
package unittest
import (
+ "errors"
"os"
"path/filepath"
"strings"
+ "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
@@ -39,7 +41,20 @@ func SyncFile(srcPath, destPath string) error {
// SyncDirs synchronizes files recursively from source to target directory.
// It returns error when error occurs in underlying functions.
func SyncDirs(srcPath, destPath string) error {
- err := os.MkdirAll(destPath, os.ModePerm)
+ destPath = filepath.Clean(destPath)
+ destPathAbs, err := filepath.Abs(destPath)
+ if err != nil {
+ return err
+ }
+ devDataPathAbs, err := filepath.Abs(filepath.Join(setting.GetGiteaTestSourceRoot(), "data"))
+ if err != nil {
+ return err
+ }
+ if strings.HasPrefix(destPathAbs+string(filepath.Separator), devDataPathAbs+string(filepath.Separator)) {
+ return errors.New("destination path should not be inside Gitea data directory, otherwise your data for dev mode will be removed")
+ }
+
+ err = os.MkdirAll(destPath, os.ModePerm)
if err != nil {
return err
}
diff --git a/models/unittest/testdb.go b/models/unittest/testdb.go
index 4611a079ec..63c9a3a999 100644
--- a/models/unittest/testdb.go
+++ b/models/unittest/testdb.go
@@ -21,7 +21,7 @@ import (
"code.gitea.io/gitea/modules/setting/config"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/tempdir"
- "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/testlogger"
"code.gitea.io/gitea/modules/util"
"github.com/stretchr/testify/assert"
@@ -29,16 +29,10 @@ import (
"xorm.io/xorm/names"
)
-var giteaRoot string
-
-func fatalTestError(fmtStr string, args ...any) {
- _, _ = fmt.Fprintf(os.Stderr, fmtStr, args...)
- os.Exit(1)
-}
-
// InitSettingsForTesting initializes config provider and load common settings for tests
func InitSettingsForTesting() {
- setting.IsInTesting = true
+ setting.SetupGiteaTestEnv()
+
log.OsExiter = func(code int) {
if code != 0 {
// non-zero exit code (log.Fatal) shouldn't occur during testing, if it happens, show a full stacktrace for more details
@@ -50,8 +44,12 @@ func InitSettingsForTesting() {
setting.CustomConf = filepath.Join(setting.CustomPath, "conf/app-unittest-tmp.ini")
_ = os.Remove(setting.CustomConf)
}
- setting.InitCfgProvider(setting.CustomConf)
- setting.LoadCommonSettings()
+
+ // init paths and config system for testing
+ getTestEnv := func(key string) string {
+ return ""
+ }
+ setting.InitWorkPathAndCommonConfig(getTestEnv, setting.ArgWorkPathAndCustomConf{CustomConf: setting.CustomConf})
if err := setting.PrepareAppDataPath(); err != nil {
log.Fatal("Can not prepare APP_DATA_PATH: %v", err)
@@ -60,7 +58,6 @@ func InitSettingsForTesting() {
_ = hash.Register("dummy", hash.NewDummyHasher)
setting.PasswordHashAlgo, _ = hash.SetDefaultPasswordHashAlgorithm("dummy")
- setting.InitGiteaEnvVarsForTesting()
}
// TestOptions represents test options
@@ -73,17 +70,18 @@ type TestOptions struct {
// MainTest a reusable TestMain(..) function for unit tests that need to use a
// test database. Creates the test database, and sets necessary settings.
func MainTest(m *testing.M, testOptsArg ...*TestOptions) {
- testOpts := util.OptionalArg(testOptsArg, &TestOptions{})
- giteaRoot = test.SetupGiteaRoot()
- setting.CustomPath = filepath.Join(giteaRoot, "custom")
- InitSettingsForTesting()
+ os.Exit(mainTest(m, testOptsArg...))
+}
+func mainTest(m *testing.M, testOptsArg ...*TestOptions) int {
+ testOpts := util.OptionalArg(testOptsArg, &TestOptions{})
+ InitSettingsForTesting()
+ giteaRoot := setting.GetGiteaTestSourceRoot()
fixturesOpts := FixturesOptions{Dir: filepath.Join(giteaRoot, "models", "fixtures"), Files: testOpts.FixtureFiles}
if err := CreateTestEngine(fixturesOpts); err != nil {
- fatalTestError("Error creating test engine: %v\n", err)
+ testlogger.Panicf("Error creating test engine: %v\n", err)
}
- setting.IsInTesting = true
setting.AppURL = "https://try.gitea.io/"
setting.Domain = "try.gitea.io"
setting.RunUser = "runuser"
@@ -95,20 +93,18 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) {
setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master"
repoRootPath, cleanup1, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("repos")
if err != nil {
- fatalTestError("TempDir: %v\n", err)
+ testlogger.Panicf("TempDir: %v\n", err)
}
defer cleanup1()
setting.RepoRootPath = repoRootPath
appDataPath, cleanup2, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("appdata")
if err != nil {
- fatalTestError("TempDir: %v\n", err)
+ testlogger.Panicf("TempDir: %v\n", err)
}
defer cleanup2()
setting.AppDataPath = appDataPath
- setting.AppWorkPath = giteaRoot
- setting.StaticRootPath = giteaRoot
setting.GravatarSource = "https://secure.gravatar.com/avatar/"
setting.Attachment.Storage.Path = filepath.Join(setting.AppDataPath, "attachments")
@@ -132,22 +128,22 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) {
config.SetDynGetter(system.NewDatabaseDynKeyGetter())
if err = cache.Init(); err != nil {
- fatalTestError("cache.Init: %v\n", err)
+ testlogger.Panicf("cache.Init: %v\n", err)
}
if err = storage.Init(); err != nil {
- fatalTestError("storage.Init: %v\n", err)
+ testlogger.Panicf("storage.Init: %v\n", err)
}
if err = SyncDirs(filepath.Join(giteaRoot, "tests", "gitea-repositories-meta"), setting.RepoRootPath); err != nil {
- fatalTestError("util.SyncDirs: %v\n", err)
+ testlogger.Panicf("util.SyncDirs: %v\n", err)
}
if err = git.InitFull(); err != nil {
- fatalTestError("git.Init: %v\n", err)
+ testlogger.Panicf("git.Init: %v\n", err)
}
if testOpts.SetUp != nil {
if err := testOpts.SetUp(); err != nil {
- fatalTestError("set up failed: %v\n", err)
+ testlogger.Panicf("set up failed: %v\n", err)
}
}
@@ -155,10 +151,10 @@ func MainTest(m *testing.M, testOptsArg ...*TestOptions) {
if testOpts.TearDown != nil {
if err := testOpts.TearDown(); err != nil {
- fatalTestError("tear down failed: %v\n", err)
+ testlogger.Panicf("tear down failed: %v\n", err)
}
}
- os.Exit(exitStatus)
+ return exitStatus
}
// FixturesOptions fixtures needs to be loaded options
@@ -172,7 +168,7 @@ func CreateTestEngine(opts FixturesOptions) error {
x, err := xorm.NewEngine("sqlite3", "file::memory:?cache=shared&_txlock=immediate")
if err != nil {
if strings.Contains(err.Error(), "unknown driver") {
- return fmt.Errorf(`sqlite3 requires: -tags sqlite,sqlite_unlock_notify%s%w`, "\n", err)
+ return fmt.Errorf("sqlite3 requires: -tags sqlite,sqlite_unlock_notify\n%w", err)
}
return err
}
@@ -182,7 +178,7 @@ func CreateTestEngine(opts FixturesOptions) error {
if err = db.SyncAllTables(); err != nil {
return err
}
- switch os.Getenv("GITEA_UNIT_TESTS_LOG_SQL") {
+ switch os.Getenv("GITEA_TEST_LOG_SQL") {
case "true", "1":
x.ShowSQL(true)
}
@@ -199,7 +195,6 @@ func PrepareTestDatabase() error {
// by tests that use the above MainTest(..) function.
func PrepareTestEnv(t testing.TB) {
assert.NoError(t, PrepareTestDatabase())
- metaPath := filepath.Join(giteaRoot, "tests", "gitea-repositories-meta")
+ metaPath := filepath.Join(setting.GetGiteaTestSourceRoot(), "tests", "gitea-repositories-meta")
assert.NoError(t, SyncDirs(metaPath, setting.RepoRootPath))
- test.SetupGiteaRoot() // Makes sure GITEA_ROOT is set
}
diff --git a/models/user/block.go b/models/user/block.go
index 5f2b65a199..f4afd47d0f 100644
--- a/models/user/block.go
+++ b/models/user/block.go
@@ -90,7 +90,7 @@ func GetBlocking(ctx context.Context, blockerID, blockeeID int64) (*Blocking, er
return nil, err
}
if len(blocks) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return blocks[0], nil
}
diff --git a/models/user/email_address.go b/models/user/email_address.go
index 67aa1bdd82..aa483d5f00 100644
--- a/models/user/email_address.go
+++ b/models/user/email_address.go
@@ -215,7 +215,7 @@ func GetEmailAddressByID(ctx context.Context, uid, id int64) (*EmailAddress, err
if has, err := db.GetEngine(ctx).ID(id).Get(email); err != nil {
return nil, err
} else if !has {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return email, nil
}
@@ -276,17 +276,22 @@ func updateActivation(ctx context.Context, email *EmailAddress, activate bool) e
return UpdateUserCols(ctx, user, "rands")
}
-func MakeActiveEmailPrimary(ctx context.Context, emailID int64) error {
- return makeEmailPrimaryInternal(ctx, emailID, true)
+func MakeActiveEmailPrimary(ctx context.Context, ownerID, emailID int64) error {
+ return makeEmailPrimaryInternal(ctx, ownerID, emailID, true)
}
-func MakeInactiveEmailPrimary(ctx context.Context, emailID int64) error {
- return makeEmailPrimaryInternal(ctx, emailID, false)
+func MakeInactiveEmailPrimary(ctx context.Context, ownerID, emailID int64) error {
+ return makeEmailPrimaryInternal(ctx, ownerID, emailID, false)
}
-func makeEmailPrimaryInternal(ctx context.Context, emailID int64, isActive bool) error {
+func makeEmailPrimaryInternal(ctx context.Context, ownerID, emailID int64, isActive bool) error {
email := &EmailAddress{}
- if has, err := db.GetEngine(ctx).ID(emailID).Where(builder.Eq{"is_activated": isActive}).Get(email); err != nil {
+ if has, err := db.GetEngine(ctx).ID(emailID).
+ Where(builder.Eq{
+ "uid": ownerID,
+ "is_activated": isActive,
+ }).
+ Get(email); err != nil {
return err
} else if !has {
return ErrEmailAddressNotExist{}
@@ -336,7 +341,7 @@ func ChangeInactivePrimaryEmail(ctx context.Context, uid int64, oldEmailAddr, ne
if err != nil {
return err
}
- return MakeInactiveEmailPrimary(ctx, newEmail.ID)
+ return MakeInactiveEmailPrimary(ctx, uid, newEmail.ID)
})
}
diff --git a/models/user/email_address_test.go b/models/user/email_address_test.go
index 6ef18fb0f6..4167aaac0d 100644
--- a/models/user/email_address_test.go
+++ b/models/user/email_address_test.go
@@ -46,22 +46,22 @@ func TestIsEmailUsed(t *testing.T) {
func TestMakeEmailPrimary(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
- err := user_model.MakeActiveEmailPrimary(t.Context(), 9999999)
+ err := user_model.MakeActiveEmailPrimary(t.Context(), 1, 9999999)
assert.Error(t, err)
assert.ErrorIs(t, err, user_model.ErrEmailAddressNotExist{})
email := unittest.AssertExistsAndLoadBean(t, &user_model.EmailAddress{Email: "user11@example.com"})
- err = user_model.MakeActiveEmailPrimary(t.Context(), email.ID)
+ err = user_model.MakeActiveEmailPrimary(t.Context(), email.UID, email.ID)
assert.Error(t, err)
assert.ErrorIs(t, err, user_model.ErrEmailAddressNotExist{}) // inactive email is considered as not exist for "MakeActiveEmailPrimary"
email = unittest.AssertExistsAndLoadBean(t, &user_model.EmailAddress{Email: "user9999999@example.com"})
- err = user_model.MakeActiveEmailPrimary(t.Context(), email.ID)
+ err = user_model.MakeActiveEmailPrimary(t.Context(), email.UID, email.ID)
assert.Error(t, err)
assert.True(t, user_model.IsErrUserNotExist(err))
email = unittest.AssertExistsAndLoadBean(t, &user_model.EmailAddress{Email: "user101@example.com"})
- err = user_model.MakeActiveEmailPrimary(t.Context(), email.ID)
+ err = user_model.MakeActiveEmailPrimary(t.Context(), email.UID, email.ID)
assert.NoError(t, err)
user, _ := user_model.GetUserByID(t.Context(), int64(10))
diff --git a/models/user/list.go b/models/user/list.go
index ca589d1e02..4337c34963 100644
--- a/models/user/list.go
+++ b/models/user/list.go
@@ -48,7 +48,7 @@ func (users UserList) GetTwoFaStatus(ctx context.Context) map[int64]bool {
func (users UserList) loadTwoFactorStatus(ctx context.Context) (map[int64]*auth.TwoFactor, error) {
if len(users) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // returns nil when there are no users
}
userIDs := users.GetUserIDs()
diff --git a/models/user/openid.go b/models/user/openid.go
index 420c67ca18..5baa48c824 100644
--- a/models/user/openid.go
+++ b/models/user/openid.go
@@ -102,7 +102,13 @@ func DeleteUserOpenID(ctx context.Context, openid *UserOpenID) (err error) {
}
// ToggleUserOpenIDVisibility toggles visibility of an openid address of given user.
-func ToggleUserOpenIDVisibility(ctx context.Context, id int64) (err error) {
- _, err = db.GetEngine(ctx).Exec("update `user_open_id` set `show` = not `show` where `id` = ?", id)
- return err
+func ToggleUserOpenIDVisibility(ctx context.Context, id int64, user *User) error {
+ affected, err := db.GetEngine(ctx).Exec("update `user_open_id` set `show` = not `show` where `id` = ? AND uid = ?", id, user.ID)
+ if err != nil {
+ return err
+ }
+ if n, _ := affected.RowsAffected(); n != 1 {
+ return util.NewNotExistErrorf("OpenID is unknown")
+ }
+ return nil
}
diff --git a/models/user/openid_test.go b/models/user/openid_test.go
index fa260e7a9e..6d2260324f 100644
--- a/models/user/openid_test.go
+++ b/models/user/openid_test.go
@@ -8,6 +8,7 @@ import (
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/util"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -33,12 +34,14 @@ func TestGetUserOpenIDs(t *testing.T) {
func TestToggleUserOpenIDVisibility(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
+ user, err := user_model.GetUserByID(t.Context(), int64(2))
+ require.NoError(t, err)
oids, err := user_model.GetUserOpenIDs(t.Context(), int64(2))
require.NoError(t, err)
require.Len(t, oids, 1)
assert.True(t, oids[0].Show)
- err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID)
+ err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID, user)
require.NoError(t, err)
oids, err = user_model.GetUserOpenIDs(t.Context(), int64(2))
@@ -46,7 +49,7 @@ func TestToggleUserOpenIDVisibility(t *testing.T) {
require.Len(t, oids, 1)
assert.False(t, oids[0].Show)
- err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID)
+ err = user_model.ToggleUserOpenIDVisibility(t.Context(), oids[0].ID, user)
require.NoError(t, err)
oids, err = user_model.GetUserOpenIDs(t.Context(), int64(2))
@@ -55,3 +58,13 @@ func TestToggleUserOpenIDVisibility(t *testing.T) {
assert.True(t, oids[0].Show)
}
}
+
+func TestToggleUserOpenIDVisibilityRequiresOwnership(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ unauthorizedUser, err := user_model.GetUserByID(t.Context(), int64(2))
+ require.NoError(t, err)
+
+ err = user_model.ToggleUserOpenIDVisibility(t.Context(), int64(1), unauthorizedUser)
+ require.Error(t, err)
+ assert.ErrorIs(t, err, util.ErrNotExist)
+}
diff --git a/models/user/user.go b/models/user/user.go
index 928bfdf380..48066ea26f 100644
--- a/models/user/user.go
+++ b/models/user/user.go
@@ -13,6 +13,7 @@ import (
"net/url"
"path/filepath"
"regexp"
+ "strconv"
"strings"
"sync"
"time"
@@ -212,7 +213,7 @@ func (u *User) SetLastLogin() {
// GetPlaceholderEmail returns an noreply email
func (u *User) GetPlaceholderEmail() string {
- return fmt.Sprintf("%s@%s", u.LowerName, setting.Service.NoReplyAddress)
+ return fmt.Sprintf("%d+%s@%s", u.ID, u.LowerName, setting.Service.NoReplyAddress)
}
// GetEmail returns a noreply email, if the user has set to keep his
@@ -495,10 +496,10 @@ func (u *User) ShortName(length int) string {
return util.EllipsisDisplayString(u.Name, length)
}
-// IsMailable checks if a user is eligible
-// to receive emails.
+// IsMailable checks if a user is eligible to receive emails.
+// System users like Ghost and Gitea Actions are excluded.
func (u *User) IsMailable() bool {
- return u.IsActive
+ return u.IsActive && !u.IsGiteaActions() && !u.IsGhost()
}
// IsUserExist checks if given username exist,
@@ -1193,19 +1194,23 @@ func (eum *EmailUserMap) GetByEmail(email string) *User {
func GetUsersByEmails(ctx context.Context, emails []string) (*EmailUserMap, error) {
if len(emails) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when there are no emails to look up
}
needCheckEmails := make(container.Set[string])
needCheckUserNames := make(container.Set[string])
+ needCheckUserIDs := make(container.Set[int64])
noReplyAddressSuffix := "@" + strings.ToLower(setting.Service.NoReplyAddress)
for _, email := range emails {
emailLower := strings.ToLower(email)
- if noReplyUserNameLower, ok := strings.CutSuffix(emailLower, noReplyAddressSuffix); ok {
- needCheckUserNames.Add(noReplyUserNameLower)
- needCheckEmails.Add(emailLower)
- } else {
- needCheckEmails.Add(emailLower)
+ needCheckEmails.Add(emailLower)
+ if localPart, ok := strings.CutSuffix(emailLower, noReplyAddressSuffix); ok {
+ name, id := parseLocalPartToNameID(localPart)
+ if id != 0 {
+ needCheckUserIDs.Add(id)
+ } else if name != "" {
+ needCheckUserNames.Add(name)
+ }
}
}
@@ -1235,16 +1240,59 @@ func GetUsersByEmails(ctx context.Context, emails []string) (*EmailUserMap, erro
}
}
- users := make(map[int64]*User, len(needCheckUserNames))
- if err := db.GetEngine(ctx).In("lower_name", needCheckUserNames.Values()).Find(&users); err != nil {
- return nil, err
+ usersByIDs := make(map[int64]*User)
+ if len(needCheckUserIDs) > 0 || len(needCheckUserNames) > 0 {
+ cond := builder.NewCond()
+ if len(needCheckUserIDs) > 0 {
+ cond = cond.Or(builder.In("id", needCheckUserIDs.Values()))
+ }
+ if len(needCheckUserNames) > 0 {
+ cond = cond.Or(builder.In("lower_name", needCheckUserNames.Values()))
+ }
+ if err := db.GetEngine(ctx).Where(cond).Find(&usersByIDs); err != nil {
+ return nil, err
+ }
}
- for _, user := range users {
- results[strings.ToLower(user.GetPlaceholderEmail())] = user
+
+ usersByName := make(map[string]*User)
+ for _, user := range usersByIDs {
+ usersByName[user.LowerName] = user
}
+
+ for _, email := range emails {
+ emailLower := strings.ToLower(email)
+ if _, ok := results[emailLower]; ok {
+ continue
+ }
+
+ localPart, ok := strings.CutSuffix(emailLower, noReplyAddressSuffix)
+ if !ok {
+ continue
+ }
+ name, id := parseLocalPartToNameID(localPart)
+ if user, ok := usersByIDs[id]; ok {
+ results[emailLower] = user
+ } else if user, ok := usersByName[name]; ok {
+ results[emailLower] = user
+ }
+ }
+
return &EmailUserMap{results}, nil
}
+// parseLocalPartToNameID attempts to unparse local-part of email that's in format id+user
+// returns user and id if possible
+func parseLocalPartToNameID(localPart string) (string, int64) {
+ var id int64
+ idstr, name, hasPlus := strings.Cut(localPart, "+")
+ if hasPlus {
+ id, _ = strconv.ParseInt(idstr, 10, 64)
+ } else {
+ name = idstr
+ }
+ return name, id
+}
+
// GetUserByEmail returns the user object by given e-mail if exists.
func GetUserByEmail(ctx context.Context, email string) (*User, error) {
if len(email) == 0 {
@@ -1263,16 +1311,12 @@ func GetUserByEmail(ctx context.Context, email string) (*User, error) {
}
// Finally, if email address is the protected email address:
- if before, ok := strings.CutSuffix(email, "@"+setting.Service.NoReplyAddress); ok {
- username := before
- user := &User{}
- has, err := db.GetEngine(ctx).Where("lower_name=?", username).Get(user)
- if err != nil {
- return nil, err
- }
- if has {
- return user, nil
+ if localPart, ok := strings.CutSuffix(email, strings.ToLower("@"+setting.Service.NoReplyAddress)); ok {
+ name, id := parseLocalPartToNameID(localPart)
+ if id != 0 {
+ return GetUserByID(ctx, id)
}
+ return GetUserByName(ctx, name)
}
return nil, ErrUserNotExist{Name: email}
diff --git a/models/user/user_system.go b/models/user/user_system.go
index 86fbab5745..416ecac783 100644
--- a/models/user/user_system.go
+++ b/models/user/user_system.go
@@ -4,6 +4,7 @@
package user
import (
+ "strconv"
"strings"
"code.gitea.io/gitea/modules/structs"
@@ -23,10 +24,6 @@ func NewGhostUser() *User {
}
}
-func IsGhostUserName(name string) bool {
- return strings.EqualFold(name, GhostUserName)
-}
-
// IsGhost check if user is fake user for a deleted account
func (u *User) IsGhost() bool {
if u == nil {
@@ -41,10 +38,6 @@ const (
ActionsUserEmail = "teabot@gitea.io"
)
-func IsGiteaActionsUserName(name string) bool {
- return strings.EqualFold(name, ActionsUserName)
-}
-
// NewActionsUser creates and returns a fake user for running the actions.
func NewActionsUser() *User {
return &User{
@@ -61,6 +54,27 @@ func NewActionsUser() *User {
}
}
+func NewActionsUserWithTaskID(id int64) *User {
+ u := NewActionsUser()
+ // LoginName is for only internal usage in this case, so it can be moved to other fields in the future
+ u.LoginSource = -1
+ u.LoginName = "@" + ActionsUserName + "/" + strconv.FormatInt(id, 10)
+ return u
+}
+
+func GetActionsUserTaskID(u *User) (int64, bool) {
+ if u == nil || u.ID != ActionsUserID {
+ return 0, false
+ }
+ prefix, payload, _ := strings.Cut(u.LoginName, "/")
+ if prefix != "@"+ActionsUserName {
+ return 0, false
+ } else if taskID, err := strconv.ParseInt(payload, 10, 64); err == nil {
+ return taskID, true
+ }
+ return 0, false
+}
+
func (u *User) IsGiteaActions() bool {
return u != nil && u.ID == ActionsUserID
}
@@ -97,10 +111,10 @@ func (u *User) IsProjectWorkflows() bool {
}
func GetSystemUserByName(name string) *User {
- if IsGhostUserName(name) {
+ if strings.EqualFold(name, GhostUserName) {
return NewGhostUser()
}
- if IsGiteaActionsUserName(name) {
+ if strings.EqualFold(name, ActionsUserName) {
return NewActionsUser()
}
if IsProjectWorkflowsUserName(name) {
diff --git a/models/user/user_system_test.go b/models/user/user_system_test.go
index 9bc9d4552f..bfc56322a1 100644
--- a/models/user/user_system_test.go
+++ b/models/user/user_system_test.go
@@ -16,14 +16,20 @@ func TestSystemUser(t *testing.T) {
assert.Equal(t, "Ghost", u.Name)
assert.Equal(t, "ghost", u.LowerName)
assert.True(t, u.IsGhost())
- assert.True(t, IsGhostUserName("gHost"))
+
+ u = GetSystemUserByName("gHost")
+ require.NotNil(t, u)
+ assert.Equal(t, "Ghost", u.Name)
u, err = GetPossibleUserByID(t.Context(), -2)
require.NoError(t, err)
assert.Equal(t, "gitea-actions", u.Name)
assert.Equal(t, "gitea-actions", u.LowerName)
assert.True(t, u.IsGiteaActions())
- assert.True(t, IsGiteaActionsUserName("Gitea-actionS"))
+
+ u = GetSystemUserByName("Gitea-actionS")
+ require.NotNil(t, u)
+ assert.Equal(t, "Gitea Actions", u.FullName)
u, err = GetPossibleUserByID(t.Context(), -3)
require.NoError(t, err)
diff --git a/models/user/user_test.go b/models/user/user_test.go
index 923f2cd40e..956eaeafb4 100644
--- a/models/user/user_test.go
+++ b/models/user/user_test.go
@@ -51,12 +51,27 @@ func TestOAuth2Application_LoadUser(t *testing.T) {
func TestUserEmails(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
+ defer test.MockVariableValue(&setting.Service.NoReplyAddress, "NoReply.gitea.internal")()
t.Run("GetUserEmailsByNames", func(t *testing.T) {
- // ignore none active user email
+ // ignore not active user email
assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(t.Context(), []string{"user8", "user9"}))
assert.ElementsMatch(t, []string{"user8@example.com", "user5@example.com"}, user_model.GetUserEmailsByNames(t.Context(), []string{"user8", "user5"}))
assert.ElementsMatch(t, []string{"user8@example.com"}, user_model.GetUserEmailsByNames(t.Context(), []string{"user8", "org7"}))
})
+
+ cases := []struct {
+ Email string
+ UID int64
+ }{
+ {"UseR1@example.com", 1},
+ {"user1-2@example.COM", 1},
+ {"USER2@" + setting.Service.NoReplyAddress, 2},
+ {"2+user2@" + setting.Service.NoReplyAddress, 2},
+ {"2+oldUser2UsernameWhichDoesNotMatterForQuery@" + setting.Service.NoReplyAddress, 2},
+ {"99999+badUser@" + setting.Service.NoReplyAddress, 0},
+ {"user4@example.com", 4},
+ {"no-such", 0},
+ }
t.Run("GetUsersByEmails", func(t *testing.T) {
defer test.MockVariableValue(&setting.Service.NoReplyAddress, "NoReply.gitea.internal")()
testGetUserByEmail := func(t *testing.T, email string, uid int64) {
@@ -70,15 +85,27 @@ func TestUserEmails(t *testing.T) {
require.NotNil(t, user)
assert.Equal(t, uid, user.ID)
}
- cases := []struct {
- Email string
- UID int64
- }{
- {"UseR1@example.com", 1},
- {"user1-2@example.COM", 1},
- {"USER2@" + setting.Service.NoReplyAddress, 2},
- {"user4@example.com", 4},
- {"no-such", 0},
+ for _, c := range cases {
+ t.Run(c.Email, func(t *testing.T) {
+ testGetUserByEmail(t, c.Email, c.UID)
+ })
+ }
+
+ t.Run("NoReplyConflict", func(t *testing.T) {
+ setting.Service.NoReplyAddress = "example.com"
+ testGetUserByEmail(t, "user1-2@example.COM", 1)
+ })
+ })
+ t.Run("GetUserByEmail", func(t *testing.T) {
+ testGetUserByEmail := func(t *testing.T, email string, uid int64) {
+ user, err := user_model.GetUserByEmail(t.Context(), email)
+ if uid == 0 {
+ require.Error(t, err)
+ assert.Nil(t, user)
+ } else {
+ require.NotNil(t, user)
+ assert.Equal(t, uid, user.ID)
+ }
}
for _, c := range cases {
t.Run(c.Email, func(t *testing.T) {
diff --git a/models/webhook/webhook_system.go b/models/webhook/webhook_system.go
index 58d9d4a5c1..e8b5040c96 100644
--- a/models/webhook/webhook_system.go
+++ b/models/webhook/webhook_system.go
@@ -9,19 +9,32 @@ import (
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/optional"
+
+ "xorm.io/builder"
)
-// GetSystemOrDefaultWebhooks returns webhooks by given argument or all if argument is missing.
-func GetSystemOrDefaultWebhooks(ctx context.Context, isSystemWebhook optional.Option[bool]) ([]*Webhook, error) {
- webhooks := make([]*Webhook, 0, 5)
- if !isSystemWebhook.Has() {
- return webhooks, db.GetEngine(ctx).Where("repo_id=? AND owner_id=?", 0, 0).
- Find(&webhooks)
- }
+// ListSystemWebhookOptions options for listing system or default webhooks
+type ListSystemWebhookOptions struct {
+ db.ListOptions
+ IsActive optional.Option[bool]
+ IsSystem optional.Option[bool]
+}
- return webhooks, db.GetEngine(ctx).
- Where("repo_id=? AND owner_id=? AND is_system_webhook=?", 0, 0, isSystemWebhook.Value()).
- Find(&webhooks)
+func (opts ListSystemWebhookOptions) ToConds() builder.Cond {
+ cond := builder.NewCond()
+ cond = cond.And(builder.Eq{"webhook.repo_id": 0}, builder.Eq{"webhook.owner_id": 0})
+ if opts.IsActive.Has() {
+ cond = cond.And(builder.Eq{"webhook.is_active": opts.IsActive.Value()})
+ }
+ if opts.IsSystem.Has() {
+ cond = cond.And(builder.Eq{"is_system_webhook": opts.IsSystem.Value()})
+ }
+ return cond
+}
+
+// GetGlobalWebhooks returns global (default and/or system) webhooks
+func GetGlobalWebhooks(ctx context.Context, opts *ListSystemWebhookOptions) ([]*Webhook, int64, error) {
+ return db.FindAndCount[Webhook](ctx, opts)
}
// GetDefaultWebhooks returns all admin-default webhooks.
diff --git a/models/webhook/webhook_system_test.go b/models/webhook/webhook_system_test.go
index 8aac693995..d0013c6873 100644
--- a/models/webhook/webhook_system_test.go
+++ b/models/webhook/webhook_system_test.go
@@ -12,23 +12,24 @@ import (
"github.com/stretchr/testify/assert"
)
-func TestGetSystemOrDefaultWebhooks(t *testing.T) {
+func TestListSystemWebhookOptions(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
-
- hooks, err := GetSystemOrDefaultWebhooks(t.Context(), optional.None[bool]())
+ opts := ListSystemWebhookOptions{IsSystem: optional.None[bool]()}
+ hooks, _, err := GetGlobalWebhooks(t.Context(), &opts)
assert.NoError(t, err)
if assert.Len(t, hooks, 2) {
assert.Equal(t, int64(5), hooks[0].ID)
assert.Equal(t, int64(6), hooks[1].ID)
}
-
- hooks, err = GetSystemOrDefaultWebhooks(t.Context(), optional.Some(true))
+ opts.IsSystem = optional.Some(true)
+ hooks, _, err = GetGlobalWebhooks(t.Context(), &opts)
assert.NoError(t, err)
if assert.Len(t, hooks, 1) {
assert.Equal(t, int64(5), hooks[0].ID)
}
- hooks, err = GetSystemOrDefaultWebhooks(t.Context(), optional.Some(false))
+ opts.IsSystem = optional.Some(false)
+ hooks, _, err = GetGlobalWebhooks(t.Context(), &opts)
assert.NoError(t, err)
if assert.Len(t, hooks, 1) {
assert.Equal(t, int64(6), hooks[0].ID)
diff --git a/modules/actions/workflows.go b/modules/actions/workflows.go
index 26a6ebc370..72892f4124 100644
--- a/modules/actions/workflows.go
+++ b/modules/actions/workflows.go
@@ -11,6 +11,7 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/glob"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
webhook_module "code.gitea.io/gitea/modules/webhook"
@@ -41,22 +42,30 @@ func IsWorkflow(path string) bool {
return false
}
- return strings.HasPrefix(path, ".gitea/workflows") || strings.HasPrefix(path, ".github/workflows")
+ for _, workflowDir := range setting.Actions.WorkflowDirs {
+ if strings.HasPrefix(path, workflowDir+"/") {
+ return true
+ }
+ }
+ return false
}
func ListWorkflows(commit *git.Commit) (string, git.Entries, error) {
- rpath := ".gitea/workflows"
- tree, err := commit.SubTree(rpath)
- if _, ok := err.(git.ErrNotExist); ok {
- rpath = ".github/workflows"
- tree, err = commit.SubTree(rpath)
+ var tree *git.Tree
+ var err error
+ var workflowDir string
+ for _, workflowDir = range setting.Actions.WorkflowDirs {
+ tree, err = commit.SubTree(workflowDir)
+ if err == nil {
+ break
+ }
+ if !git.IsErrNotExist(err) {
+ return "", nil, err
+ }
}
- if _, ok := err.(git.ErrNotExist); ok {
+ if tree == nil {
return "", nil, nil
}
- if err != nil {
- return "", nil, err
- }
entries, err := tree.ListEntriesRecursiveFast()
if err != nil {
@@ -69,7 +78,7 @@ func ListWorkflows(commit *git.Commit) (string, git.Entries, error) {
ret = append(ret, entry)
}
}
- return rpath, ret, nil
+ return workflowDir, ret, nil
}
func GetContentFromEntry(entry *git.TreeEntry) ([]byte, error) {
diff --git a/modules/actions/workflows_test.go b/modules/actions/workflows_test.go
index 89620fb698..77a65aae49 100644
--- a/modules/actions/workflows_test.go
+++ b/modules/actions/workflows_test.go
@@ -7,12 +7,83 @@ import (
"testing"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
webhook_module "code.gitea.io/gitea/modules/webhook"
"github.com/stretchr/testify/assert"
)
+func TestIsWorkflow(t *testing.T) {
+ oldDirs := setting.Actions.WorkflowDirs
+ defer func() {
+ setting.Actions.WorkflowDirs = oldDirs
+ }()
+
+ tests := []struct {
+ name string
+ dirs []string
+ path string
+ expected bool
+ }{
+ {
+ name: "default with yml extension",
+ dirs: []string{".gitea/workflows", ".github/workflows"},
+ path: ".gitea/workflows/test.yml",
+ expected: true,
+ },
+ {
+ name: "default with yaml extension",
+ dirs: []string{".gitea/workflows", ".github/workflows"},
+ path: ".github/workflows/test.yaml",
+ expected: true,
+ },
+ {
+ name: "only gitea configured, github path rejected",
+ dirs: []string{".gitea/workflows"},
+ path: ".github/workflows/test.yml",
+ expected: false,
+ },
+ {
+ name: "only github configured, gitea path rejected",
+ dirs: []string{".github/workflows"},
+ path: ".gitea/workflows/test.yml",
+ expected: false,
+ },
+ {
+ name: "custom workflow dir",
+ dirs: []string{".custom/workflows"},
+ path: ".custom/workflows/deploy.yml",
+ expected: true,
+ },
+ {
+ name: "non-workflow file",
+ dirs: []string{".gitea/workflows", ".github/workflows"},
+ path: ".gitea/workflows/readme.md",
+ expected: false,
+ },
+ {
+ name: "directory boundary",
+ dirs: []string{".gitea/workflows"},
+ path: ".gitea/workflows2/test.yml",
+ expected: false,
+ },
+ {
+ name: "unrelated path",
+ dirs: []string{".gitea/workflows", ".github/workflows"},
+ path: "src/main.go",
+ expected: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ setting.Actions.WorkflowDirs = tt.dirs
+ assert.Equal(t, tt.expected, IsWorkflow(tt.path))
+ })
+ }
+}
+
func TestDetectMatched(t *testing.T) {
testCases := []struct {
desc string
diff --git a/modules/analyze/code_language.go b/modules/analyze/code_language.go
index 74e7a06d06..d8589861d3 100644
--- a/modules/analyze/code_language.go
+++ b/modules/analyze/code_language.go
@@ -4,12 +4,13 @@
package analyze
import (
- "path/filepath"
+ "path"
"github.com/go-enry/go-enry/v2"
)
// GetCodeLanguage detects code language based on file name and content
+// It can be slow when the content is used for detection
func GetCodeLanguage(filename string, content []byte) string {
if language, ok := enry.GetLanguageByExtension(filename); ok {
return language
@@ -23,5 +24,5 @@ func GetCodeLanguage(filename string, content []byte) string {
return enry.OtherLanguage
}
- return enry.GetLanguage(filepath.Base(filename), content)
+ return enry.GetLanguage(path.Base(filename), content)
}
diff --git a/modules/analyze/vendor.go b/modules/analyze/vendor.go
index adcca923dd..f3e75f535f 100644
--- a/modules/analyze/vendor.go
+++ b/modules/analyze/vendor.go
@@ -4,10 +4,28 @@
package analyze
import (
+ "path"
+ "strings"
+
"github.com/go-enry/go-enry/v2"
)
-// IsVendor returns whether or not path is a vendor path.
-func IsVendor(path string) bool {
- return enry.IsVendor(path)
+// IsVendor returns whether the path is a vendor path.
+// It uses go-enry's IsVendor function but overrides its detection for certain
+// special cases that shouldn't be marked as vendored in the diff view.
+func IsVendor(treePath string) bool {
+ if !enry.IsVendor(treePath) {
+ return false
+ }
+
+ // Override detection for single files
+ basename := path.Base(treePath)
+ switch basename {
+ case ".gitignore", ".gitattributes", ".gitmodules":
+ return false
+ }
+ if strings.HasPrefix(treePath, ".github/") || strings.HasPrefix(treePath, ".gitea/") {
+ return false
+ }
+ return true
}
diff --git a/modules/analyze/vendor_test.go b/modules/analyze/vendor_test.go
index 02a51d4c8f..6efb825de6 100644
--- a/modules/analyze/vendor_test.go
+++ b/modules/analyze/vendor_test.go
@@ -14,6 +14,7 @@ func TestIsVendor(t *testing.T) {
path string
want bool
}{
+ // Original go-enry test cases
{"cache/", true},
{"random/cache/", true},
{"cache", false},
@@ -34,6 +35,14 @@ func TestIsVendor(t *testing.T) {
{"a/docs/_build/", true},
{"a/dasdocs/_build-vsdoc.js", true},
{"a/dasdocs/_build-vsdoc.j", false},
+
+ // Override: Git/GitHub/Gitea-related paths should NOT be detected as vendored
+ {".gitignore", false},
+ {".gitattributes", false},
+ {".gitmodules", false},
+ {"src/.gitignore", false},
+ {".github/workflows/ci.yml", false},
+ {".gitea/workflows/ci.yml", false},
}
for _, tt := range tests {
t.Run(tt.path, func(t *testing.T) {
diff --git a/modules/assetfs/layered.go b/modules/assetfs/layered.go
index ce55475bd9..41e4ca7376 100644
--- a/modules/assetfs/layered.go
+++ b/modules/assetfs/layered.go
@@ -6,9 +6,7 @@ package assetfs
import (
"context"
"fmt"
- "io"
"io/fs"
- "net/http"
"os"
"path/filepath"
"sort"
@@ -25,7 +23,7 @@ import (
// Layer represents a layer in a layered asset file-system. It has a name and works like http.FileSystem
type Layer struct {
name string
- fs http.FileSystem
+ fs fs.FS
localPath string
}
@@ -34,7 +32,7 @@ func (l *Layer) Name() string {
}
// Open opens the named file. The caller is responsible for closing the file.
-func (l *Layer) Open(name string) (http.File, error) {
+func (l *Layer) Open(name string) (fs.File, error) {
return l.fs.Open(name)
}
@@ -48,12 +46,12 @@ func Local(name, base string, sub ...string) *Layer {
panic(fmt.Sprintf("Unable to get absolute path for %q: %v", base, err))
}
root := util.FilePathJoinAbs(base, sub...)
- return &Layer{name: name, fs: http.Dir(root), localPath: root}
+ return &Layer{name: name, fs: os.DirFS(root), localPath: root}
}
// Bindata returns a new Layer with the given name, it serves files from the given bindata asset.
func Bindata(name string, fs fs.FS) *Layer {
- return &Layer{name: name, fs: http.FS(fs)}
+ return &Layer{name: name, fs: fs}
}
// LayeredFS is a layered asset file-system. It works like http.FileSystem, but it can have multiple layers.
@@ -69,7 +67,7 @@ func Layered(layers ...*Layer) *LayeredFS {
}
// Open opens the named file. The caller is responsible for closing the file.
-func (l *LayeredFS) Open(name string) (http.File, error) {
+func (l *LayeredFS) Open(name string) (fs.File, error) {
for _, layer := range l.layers {
f, err := layer.Open(name)
if err == nil || !os.IsNotExist(err) {
@@ -89,40 +87,34 @@ func (l *LayeredFS) ReadFile(elems ...string) ([]byte, error) {
func (l *LayeredFS) ReadLayeredFile(elems ...string) ([]byte, string, error) {
name := util.PathJoinRel(elems...)
for _, layer := range l.layers {
- f, err := layer.Open(name)
+ bs, err := fs.ReadFile(layer, name)
if os.IsNotExist(err) {
continue
} else if err != nil {
return nil, layer.name, err
}
- bs, err := io.ReadAll(f)
- _ = f.Close()
return bs, layer.name, err
}
return nil, "", fs.ErrNotExist
}
-func shouldInclude(info fs.FileInfo, fileMode ...bool) bool {
- if util.IsCommonHiddenFileName(info.Name()) {
+func shouldInclude(dirEntry fs.DirEntry, fileMode ...bool) bool {
+ if util.IsCommonHiddenFileName(dirEntry.Name()) {
return false
}
if len(fileMode) == 0 {
return true
} else if len(fileMode) == 1 {
- return fileMode[0] == !info.Mode().IsDir()
+ return fileMode[0] == !dirEntry.IsDir()
}
panic("too many arguments for fileMode in shouldInclude")
}
-func readDir(layer *Layer, name string) ([]fs.FileInfo, error) {
- f, err := layer.Open(name)
- if os.IsNotExist(err) {
+func readDirOptional(layer *Layer, name string) (entries []fs.DirEntry, err error) {
+ if entries, err = fs.ReadDir(layer, name); os.IsNotExist(err) {
return nil, nil
- } else if err != nil {
- return nil, err
}
- defer f.Close()
- return f.Readdir(-1)
+ return entries, err
}
// ListFiles lists files/directories in the given directory. The fileMode controls the returned files.
@@ -133,13 +125,13 @@ func readDir(layer *Layer, name string) ([]fs.FileInfo, error) {
func (l *LayeredFS) ListFiles(name string, fileMode ...bool) ([]string, error) {
fileSet := make(container.Set[string])
for _, layer := range l.layers {
- infos, err := readDir(layer, name)
+ entries, err := readDirOptional(layer, name)
if err != nil {
return nil, err
}
- for _, info := range infos {
- if shouldInclude(info, fileMode...) {
- fileSet.Add(info.Name())
+ for _, entry := range entries {
+ if shouldInclude(entry, fileMode...) {
+ fileSet.Add(entry.Name())
}
}
}
@@ -163,16 +155,16 @@ func listAllFiles(layers []*Layer, name string, fileMode ...bool) ([]string, err
var list func(dir string) error
list = func(dir string) error {
for _, layer := range layers {
- infos, err := readDir(layer, dir)
+ entries, err := readDirOptional(layer, dir)
if err != nil {
return err
}
- for _, info := range infos {
- path := util.PathJoinRelX(dir, info.Name())
- if shouldInclude(info, fileMode...) {
+ for _, entry := range entries {
+ path := util.PathJoinRelX(dir, entry.Name())
+ if shouldInclude(entry, fileMode...) {
fileSet.Add(path)
}
- if info.IsDir() {
+ if entry.IsDir() {
if err = list(path); err != nil {
return err
}
diff --git a/modules/emoji/emoji_data.go b/modules/emoji/emoji_data.go
index 8d0ae0a43e..ef25e3d9ea 100644
--- a/modules/emoji/emoji_data.go
+++ b/modules/emoji/emoji_data.go
@@ -4,7 +4,7 @@
package emoji
// Code generated by build/generate-emoji.go. DO NOT EDIT.
-// Sourced from https://raw.githubusercontent.com/github/gemoji/master/db/emoji.json
+// Sourced from https://raw.githubusercontent.com/rhysd/gemoji/537ff2d7e0496e9964824f7f73ec7ece88c9765a/db/emoji.json
var GemojiData = Gemoji{
{"\U0001f44d", "thumbs up", []string{"+1", "thumbsup"}, "6.0", true},
{"\U0001f44d\U0001f3ff", "thumbs up: Dark Skin Tone", []string{"+1_Dark_Skin_Tone"}, "12.0", false},
@@ -345,10 +345,12 @@ var GemojiData = Gemoji{
{"\U0001f1ee\U0001f1f4", "flag: British Indian Ocean Territory", []string{"british_indian_ocean_territory"}, "6.0", false},
{"\U0001f1fb\U0001f1ec", "flag: British Virgin Islands", []string{"british_virgin_islands"}, "6.0", false},
{"\U0001f966", "broccoli", []string{"broccoli"}, "11.0", false},
+ {"\u26d3\ufe0f\u200d\U0001f4a5", "broken chain", []string{"broken_chain"}, "15.1", false},
{"\U0001f494", "broken heart", []string{"broken_heart"}, "6.0", false},
{"\U0001f9f9", "broom", []string{"broom"}, "11.0", false},
{"\U0001f7e4", "brown circle", []string{"brown_circle"}, "12.0", false},
{"\U0001f90e", "brown heart", []string{"brown_heart"}, "12.0", false},
+ {"\U0001f344\u200d\U0001f7eb", "brown mushroom", []string{"brown_mushroom"}, "15.1", false},
{"\U0001f7eb", "brown square", []string{"brown_square"}, "12.0", false},
{"\U0001f1e7\U0001f1f3", "flag: Brunei", []string{"brunei"}, "6.0", false},
{"\U0001f9cb", "bubble tea", []string{"bubble_tea"}, "13.0", false},
@@ -838,6 +840,7 @@ var GemojiData = Gemoji{
{"\U0001f62e\u200d\U0001f4a8", "face exhaling", []string{"face_exhaling"}, "13.1", false},
{"\U0001f979", "face holding back tears", []string{"face_holding_back_tears"}, "14.0", false},
{"\U0001f636\u200d\U0001f32b\ufe0f", "face in clouds", []string{"face_in_clouds"}, "13.1", false},
+ {"\U0001fae9", "face with bags under eyes", []string{"face_with_bags_under_eyes"}, "16.0", false},
{"\U0001fae4", "face with diagonal mouth", []string{"face_with_diagonal_mouth"}, "14.0", false},
{"\U0001f915", "face with head-bandage", []string{"face_with_head_bandage"}, "8.0", false},
{"\U0001fae2", "face with open eyes and hand over mouth", []string{"face_with_open_eyes_and_hand_over_mouth"}, "14.0", false},
@@ -879,6 +882,10 @@ var GemojiData = Gemoji{
{"\U0001f1eb\U0001f1f0", "flag: Falkland Islands", []string{"falkland_islands"}, "6.0", false},
{"\U0001f342", "fallen leaf", []string{"fallen_leaf"}, "6.0", false},
{"\U0001f46a", "family", []string{"family"}, "6.0", false},
+ {"\U0001f9d1\u200d\U0001f9d1\u200d\U0001f9d2", "family: adult, adult, child", []string{"family_adult_adult_child"}, "15.1", false},
+ {"\U0001f9d1\u200d\U0001f9d1\u200d\U0001f9d2\u200d\U0001f9d2", "family: adult, adult, child, child", []string{"family_adult_adult_child_child"}, "15.1", false},
+ {"\U0001f9d1\u200d\U0001f9d2", "family: adult, child", []string{"family_adult_child"}, "15.1", false},
+ {"\U0001f9d1\u200d\U0001f9d2\u200d\U0001f9d2", "family: adult, child, child", []string{"family_adult_child_child"}, "15.1", false},
{"\U0001f468\u200d\U0001f466", "family: man, boy", []string{"family_man_boy"}, "6.0", false},
{"\U0001f468\u200d\U0001f466\u200d\U0001f466", "family: man, boy, boy", []string{"family_man_boy_boy"}, "6.0", false},
{"\U0001f468\u200d\U0001f467", "family: man, girl", []string{"family_man_girl"}, "6.0", false},
@@ -931,6 +938,7 @@ var GemojiData = Gemoji{
{"\U0001f4c1", "file folder", []string{"file_folder"}, "6.0", false},
{"\U0001f4fd\ufe0f", "film projector", []string{"film_projector"}, "7.0", false},
{"\U0001f39e\ufe0f", "film frames", []string{"film_strip"}, "7.0", false},
+ {"\U0001fac6", "fingerprint", []string{"fingerprint"}, "16.0", false},
{"\U0001f1eb\U0001f1ee", "flag: Finland", []string{"finland"}, "6.0", false},
{"\U0001f525", "fire", []string{"fire"}, "6.0", false},
{"\U0001f692", "fire engine", []string{"fire_engine"}, "6.0", false},
@@ -973,6 +981,7 @@ var GemojiData = Gemoji{
{"\U0001f91c\U0001f3fc", "right-facing fist: Medium-Light Skin Tone", []string{"fist_right_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f91c\U0001f3fd", "right-facing fist: Medium Skin Tone", []string{"fist_right_Medium_Skin_Tone"}, "12.0", false},
{"5\ufe0f\u20e3", "keycap: 5", []string{"five"}, "", false},
+ {"\U0001f1e8\U0001f1f6", "flag: Sark", []string{"flag_sark"}, "16.0", false},
{"\U0001f38f", "carp streamer", []string{"flags"}, "6.0", false},
{"\U0001f9a9", "flamingo", []string{"flamingo"}, "12.0", false},
{"\U0001f526", "flashlight", []string{"flashlight"}, "6.0", false},
@@ -1189,9 +1198,12 @@ var GemojiData = Gemoji{
{"\U0001f91d\U0001f3fc", "handshake: Medium-Light Skin Tone", []string{"handshake_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f91d\U0001f3fd", "handshake: Medium Skin Tone", []string{"handshake_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f4a9", "pile of poo", []string{"hankey", "poop", "shit"}, "6.0", false},
+ {"\U0001fa89", "harp", []string{"harp"}, "16.0", false},
{"#\ufe0f\u20e3", "keycap: #", []string{"hash"}, "", false},
{"\U0001f425", "front-facing baby chick", []string{"hatched_chick"}, "6.0", false},
{"\U0001f423", "hatching chick", []string{"hatching_chick"}, "6.0", false},
+ {"\U0001f642\u200d\u2194\ufe0f", "head shaking horizontally", []string{"head_shaking_horizontally"}, "15.1", false},
+ {"\U0001f642\u200d\u2195\ufe0f", "head shaking vertically", []string{"head_shaking_vertically"}, "15.1", false},
{"\U0001f3a7", "headphone", []string{"headphones"}, "6.0", false},
{"\U0001faa6", "headstone", []string{"headstone"}, "13.0", false},
{"\U0001f9d1\u200d\u2695\ufe0f", "health worker", []string{"health_worker"}, "12.1", true},
@@ -1380,6 +1392,7 @@ var GemojiData = Gemoji{
{"\u271d\ufe0f", "latin cross", []string{"latin_cross"}, "", false},
{"\U0001f1f1\U0001f1fb", "flag: Latvia", []string{"latvia"}, "6.0", false},
{"\U0001f606", "grinning squinting face", []string{"laughing", "satisfied", "laugh"}, "6.0", false},
+ {"\U0001fabe", "leafless tree", []string{"leafless_tree"}, "16.0", false},
{"\U0001f96c", "leafy green", []string{"leafy_green"}, "11.0", false},
{"\U0001f343", "leaf fluttering in wind", []string{"leaves"}, "6.0", false},
{"\U0001f1f1\U0001f1e7", "flag: Lebanon", []string{"lebanon"}, "6.0", false},
@@ -1417,6 +1430,7 @@ var GemojiData = Gemoji{
{"\U0001f1f1\U0001f1ee", "flag: Liechtenstein", []string{"liechtenstein"}, "6.0", false},
{"\U0001fa75", "light blue heart", []string{"light_blue_heart"}, "15.0", false},
{"\U0001f688", "light rail", []string{"light_rail"}, "6.0", false},
+ {"\U0001f34b\u200d\U0001f7e9", "lime", []string{"lime"}, "15.1", false},
{"\U0001f517", "link", []string{"link"}, "6.0", false},
{"\U0001f981", "lion", []string{"lion"}, "8.0", false},
{"\U0001f444", "mouth", []string{"lips"}, "6.0", false},
@@ -1594,12 +1608,24 @@ var GemojiData = Gemoji{
{"\U0001f468\U0001f3fe\u200d\U0001f9bd", "man in manual wheelchair: Medium-Dark Skin Tone", []string{"man_in_manual_wheelchair_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fc\u200d\U0001f9bd", "man in manual wheelchair: Medium-Light Skin Tone", []string{"man_in_manual_wheelchair_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fd\u200d\U0001f9bd", "man in manual wheelchair: Medium Skin Tone", []string{"man_in_manual_wheelchair_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\u200d\U0001f9bd\u200d\u27a1\ufe0f", "man in manual wheelchair facing right", []string{"man_in_manual_wheelchair_facing_right"}, "15.1", true},
+ {"\U0001f468\U0001f3ff\u200d\U0001f9bd\u200d\u27a1\ufe0f", "man in manual wheelchair facing right: Dark Skin Tone", []string{"man_in_manual_wheelchair_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fb\u200d\U0001f9bd\u200d\u27a1\ufe0f", "man in manual wheelchair facing right: Light Skin Tone", []string{"man_in_manual_wheelchair_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fe\u200d\U0001f9bd\u200d\u27a1\ufe0f", "man in manual wheelchair facing right: Medium-Dark Skin Tone", []string{"man_in_manual_wheelchair_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fc\u200d\U0001f9bd\u200d\u27a1\ufe0f", "man in manual wheelchair facing right: Medium-Light Skin Tone", []string{"man_in_manual_wheelchair_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fd\u200d\U0001f9bd\u200d\u27a1\ufe0f", "man in manual wheelchair facing right: Medium Skin Tone", []string{"man_in_manual_wheelchair_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f468\u200d\U0001f9bc", "man in motorized wheelchair", []string{"man_in_motorized_wheelchair"}, "12.0", true},
{"\U0001f468\U0001f3ff\u200d\U0001f9bc", "man in motorized wheelchair: Dark Skin Tone", []string{"man_in_motorized_wheelchair_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fb\u200d\U0001f9bc", "man in motorized wheelchair: Light Skin Tone", []string{"man_in_motorized_wheelchair_Light_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fe\u200d\U0001f9bc", "man in motorized wheelchair: Medium-Dark Skin Tone", []string{"man_in_motorized_wheelchair_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fc\u200d\U0001f9bc", "man in motorized wheelchair: Medium-Light Skin Tone", []string{"man_in_motorized_wheelchair_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fd\u200d\U0001f9bc", "man in motorized wheelchair: Medium Skin Tone", []string{"man_in_motorized_wheelchair_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\u200d\U0001f9bc\u200d\u27a1\ufe0f", "man in motorized wheelchair facing right", []string{"man_in_motorized_wheelchair_facing_right"}, "15.1", true},
+ {"\U0001f468\U0001f3ff\u200d\U0001f9bc\u200d\u27a1\ufe0f", "man in motorized wheelchair facing right: Dark Skin Tone", []string{"man_in_motorized_wheelchair_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fb\u200d\U0001f9bc\u200d\u27a1\ufe0f", "man in motorized wheelchair facing right: Light Skin Tone", []string{"man_in_motorized_wheelchair_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fe\u200d\U0001f9bc\u200d\u27a1\ufe0f", "man in motorized wheelchair facing right: Medium-Dark Skin Tone", []string{"man_in_motorized_wheelchair_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fc\u200d\U0001f9bc\u200d\u27a1\ufe0f", "man in motorized wheelchair facing right: Medium-Light Skin Tone", []string{"man_in_motorized_wheelchair_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fd\u200d\U0001f9bc\u200d\u27a1\ufe0f", "man in motorized wheelchair facing right: Medium Skin Tone", []string{"man_in_motorized_wheelchair_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f935\u200d\u2642\ufe0f", "man in tuxedo", []string{"man_in_tuxedo"}, "13.0", true},
{"\U0001f935\U0001f3ff\u200d\u2642\ufe0f", "man in tuxedo: Dark Skin Tone", []string{"man_in_tuxedo_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f935\U0001f3fb\u200d\u2642\ufe0f", "man in tuxedo: Light Skin Tone", []string{"man_in_tuxedo_Light_Skin_Tone"}, "12.0", false},
@@ -1618,6 +1644,12 @@ var GemojiData = Gemoji{
{"\U0001f939\U0001f3fe\u200d\u2642\ufe0f", "man juggling: Medium-Dark Skin Tone", []string{"man_juggling_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f939\U0001f3fc\u200d\u2642\ufe0f", "man juggling: Medium-Light Skin Tone", []string{"man_juggling_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f939\U0001f3fd\u200d\u2642\ufe0f", "man juggling: Medium Skin Tone", []string{"man_juggling_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man kneeling facing right", []string{"man_kneeling_facing_right"}, "15.1", true},
+ {"\U0001f9ce\U0001f3ff\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man kneeling facing right: Dark Skin Tone", []string{"man_kneeling_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fb\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man kneeling facing right: Light Skin Tone", []string{"man_kneeling_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fe\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man kneeling facing right: Medium-Dark Skin Tone", []string{"man_kneeling_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fc\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man kneeling facing right: Medium-Light Skin Tone", []string{"man_kneeling_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fd\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man kneeling facing right: Medium Skin Tone", []string{"man_kneeling_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f468\u200d\U0001f527", "man mechanic", []string{"man_mechanic"}, "", true},
{"\U0001f468\U0001f3ff\u200d\U0001f527", "man mechanic: Dark Skin Tone", []string{"man_mechanic_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fb\u200d\U0001f527", "man mechanic: Light Skin Tone", []string{"man_mechanic_Light_Skin_Tone"}, "12.0", false},
@@ -1648,6 +1680,12 @@ var GemojiData = Gemoji{
{"\U0001f93d\U0001f3fe\u200d\u2642\ufe0f", "man playing water polo: Medium-Dark Skin Tone", []string{"man_playing_water_polo_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f93d\U0001f3fc\u200d\u2642\ufe0f", "man playing water polo: Medium-Light Skin Tone", []string{"man_playing_water_polo_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f93d\U0001f3fd\u200d\u2642\ufe0f", "man playing water polo: Medium Skin Tone", []string{"man_playing_water_polo_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man running facing right", []string{"man_running_facing_right"}, "15.1", true},
+ {"\U0001f3c3\U0001f3ff\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man running facing right: Dark Skin Tone", []string{"man_running_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fb\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man running facing right: Light Skin Tone", []string{"man_running_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fe\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man running facing right: Medium-Dark Skin Tone", []string{"man_running_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fc\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man running facing right: Medium-Light Skin Tone", []string{"man_running_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fd\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man running facing right: Medium Skin Tone", []string{"man_running_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f468\u200d\U0001f52c", "man scientist", []string{"man_scientist"}, "", true},
{"\U0001f468\U0001f3ff\u200d\U0001f52c", "man scientist: Dark Skin Tone", []string{"man_scientist_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fb\u200d\U0001f52c", "man scientist: Light Skin Tone", []string{"man_scientist_Light_Skin_Tone"}, "12.0", false},
@@ -1684,6 +1722,12 @@ var GemojiData = Gemoji{
{"\U0001f468\U0001f3fe\u200d\U0001f4bb", "man technologist: Medium-Dark Skin Tone", []string{"man_technologist_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fc\u200d\U0001f4bb", "man technologist: Medium-Light Skin Tone", []string{"man_technologist_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f468\U0001f3fd\u200d\U0001f4bb", "man technologist: Medium Skin Tone", []string{"man_technologist_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man walking facing right", []string{"man_walking_facing_right"}, "15.1", true},
+ {"\U0001f6b6\U0001f3ff\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man walking facing right: Dark Skin Tone", []string{"man_walking_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fb\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man walking facing right: Light Skin Tone", []string{"man_walking_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fe\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man walking facing right: Medium-Dark Skin Tone", []string{"man_walking_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fc\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man walking facing right: Medium-Light Skin Tone", []string{"man_walking_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fd\u200d\u2642\ufe0f\u200d\u27a1\ufe0f", "man walking facing right: Medium Skin Tone", []string{"man_walking_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f472", "person with skullcap", []string{"man_with_gua_pi_mao"}, "6.0", true},
{"\U0001f472\U0001f3ff", "person with skullcap: Dark Skin Tone", []string{"man_with_gua_pi_mao_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f472\U0001f3fb", "person with skullcap: Light Skin Tone", []string{"man_with_gua_pi_mao_Light_Skin_Tone"}, "12.0", false},
@@ -1708,6 +1752,12 @@ var GemojiData = Gemoji{
{"\U0001f470\U0001f3fe\u200d\u2642\ufe0f", "man with veil: Medium-Dark Skin Tone", []string{"man_with_veil_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f470\U0001f3fc\u200d\u2642\ufe0f", "man with veil: Medium-Light Skin Tone", []string{"man_with_veil_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f470\U0001f3fd\u200d\u2642\ufe0f", "man with veil: Medium Skin Tone", []string{"man_with_veil_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\u200d\U0001f9af\u200d\u27a1\ufe0f", "man with white cane facing right", []string{"man_with_white_cane_facing_right"}, "15.1", true},
+ {"\U0001f468\U0001f3ff\u200d\U0001f9af\u200d\u27a1\ufe0f", "man with white cane facing right: Dark Skin Tone", []string{"man_with_white_cane_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fb\u200d\U0001f9af\u200d\u27a1\ufe0f", "man with white cane facing right: Light Skin Tone", []string{"man_with_white_cane_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fe\u200d\U0001f9af\u200d\u27a1\ufe0f", "man with white cane facing right: Medium-Dark Skin Tone", []string{"man_with_white_cane_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fc\u200d\U0001f9af\u200d\u27a1\ufe0f", "man with white cane facing right: Medium-Light Skin Tone", []string{"man_with_white_cane_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f468\U0001f3fd\u200d\U0001f9af\u200d\u27a1\ufe0f", "man with white cane facing right: Medium Skin Tone", []string{"man_with_white_cane_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f96d", "mango", []string{"mango"}, "11.0", false},
{"\U0001f45e", "man’s shoe", []string{"mans_shoe", "shoe"}, "6.0", false},
{"\U0001f570\ufe0f", "mantelpiece clock", []string{"mantelpiece_clock"}, "7.0", false},
@@ -1874,12 +1924,12 @@ var GemojiData = Gemoji{
{"\U0001f3b5", "musical note", []string{"musical_note"}, "6.0", false},
{"\U0001f3bc", "musical score", []string{"musical_score"}, "6.0", false},
{"\U0001f507", "muted speaker", []string{"mute"}, "6.0", false},
- {"\U0001f9d1\u200d\U0001f384", "mx claus", []string{"mx_claus"}, "13.0", true},
- {"\U0001f9d1\U0001f3ff\u200d\U0001f384", "mx claus: Dark Skin Tone", []string{"mx_claus_Dark_Skin_Tone"}, "12.0", false},
- {"\U0001f9d1\U0001f3fb\u200d\U0001f384", "mx claus: Light Skin Tone", []string{"mx_claus_Light_Skin_Tone"}, "12.0", false},
- {"\U0001f9d1\U0001f3fe\u200d\U0001f384", "mx claus: Medium-Dark Skin Tone", []string{"mx_claus_Medium-Dark_Skin_Tone"}, "12.0", false},
- {"\U0001f9d1\U0001f3fc\u200d\U0001f384", "mx claus: Medium-Light Skin Tone", []string{"mx_claus_Medium-Light_Skin_Tone"}, "12.0", false},
- {"\U0001f9d1\U0001f3fd\u200d\U0001f384", "mx claus: Medium Skin Tone", []string{"mx_claus_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\u200d\U0001f384", "Mx Claus", []string{"mx_claus"}, "13.0", true},
+ {"\U0001f9d1\U0001f3ff\u200d\U0001f384", "Mx Claus: Dark Skin Tone", []string{"mx_claus_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fb\u200d\U0001f384", "Mx Claus: Light Skin Tone", []string{"mx_claus_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fe\u200d\U0001f384", "Mx Claus: Medium-Dark Skin Tone", []string{"mx_claus_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fc\u200d\U0001f384", "Mx Claus: Medium-Light Skin Tone", []string{"mx_claus_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fd\u200d\U0001f384", "Mx Claus: Medium Skin Tone", []string{"mx_claus_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f1f2\U0001f1f2", "flag: Myanmar (Burma)", []string{"myanmar"}, "6.0", false},
{"\U0001f485", "nail polish", []string{"nail_care"}, "6.0", true},
{"\U0001f485\U0001f3ff", "nail polish: Dark Skin Tone", []string{"nail_care_Dark_Skin_Tone"}, "12.0", false},
@@ -2140,24 +2190,54 @@ var GemojiData = Gemoji{
{"\U0001f9d1\U0001f3fe\u200d\U0001f9bd", "person in manual wheelchair: Medium-Dark Skin Tone", []string{"person_in_manual_wheelchair_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fc\u200d\U0001f9bd", "person in manual wheelchair: Medium-Light Skin Tone", []string{"person_in_manual_wheelchair_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fd\u200d\U0001f9bd", "person in manual wheelchair: Medium Skin Tone", []string{"person_in_manual_wheelchair_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\u200d\U0001f9bd\u200d\u27a1\ufe0f", "person in manual wheelchair facing right", []string{"person_in_manual_wheelchair_facing_right"}, "15.1", true},
+ {"\U0001f9d1\U0001f3ff\u200d\U0001f9bd\u200d\u27a1\ufe0f", "person in manual wheelchair facing right: Dark Skin Tone", []string{"person_in_manual_wheelchair_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fb\u200d\U0001f9bd\u200d\u27a1\ufe0f", "person in manual wheelchair facing right: Light Skin Tone", []string{"person_in_manual_wheelchair_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fe\u200d\U0001f9bd\u200d\u27a1\ufe0f", "person in manual wheelchair facing right: Medium-Dark Skin Tone", []string{"person_in_manual_wheelchair_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fc\u200d\U0001f9bd\u200d\u27a1\ufe0f", "person in manual wheelchair facing right: Medium-Light Skin Tone", []string{"person_in_manual_wheelchair_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fd\u200d\U0001f9bd\u200d\u27a1\ufe0f", "person in manual wheelchair facing right: Medium Skin Tone", []string{"person_in_manual_wheelchair_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\u200d\U0001f9bc", "person in motorized wheelchair", []string{"person_in_motorized_wheelchair"}, "12.1", true},
{"\U0001f9d1\U0001f3ff\u200d\U0001f9bc", "person in motorized wheelchair: Dark Skin Tone", []string{"person_in_motorized_wheelchair_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fb\u200d\U0001f9bc", "person in motorized wheelchair: Light Skin Tone", []string{"person_in_motorized_wheelchair_Light_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fe\u200d\U0001f9bc", "person in motorized wheelchair: Medium-Dark Skin Tone", []string{"person_in_motorized_wheelchair_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fc\u200d\U0001f9bc", "person in motorized wheelchair: Medium-Light Skin Tone", []string{"person_in_motorized_wheelchair_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fd\u200d\U0001f9bc", "person in motorized wheelchair: Medium Skin Tone", []string{"person_in_motorized_wheelchair_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\u200d\U0001f9bc\u200d\u27a1\ufe0f", "person in motorized wheelchair facing right", []string{"person_in_motorized_wheelchair_facing_right"}, "15.1", true},
+ {"\U0001f9d1\U0001f3ff\u200d\U0001f9bc\u200d\u27a1\ufe0f", "person in motorized wheelchair facing right: Dark Skin Tone", []string{"person_in_motorized_wheelchair_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fb\u200d\U0001f9bc\u200d\u27a1\ufe0f", "person in motorized wheelchair facing right: Light Skin Tone", []string{"person_in_motorized_wheelchair_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fe\u200d\U0001f9bc\u200d\u27a1\ufe0f", "person in motorized wheelchair facing right: Medium-Dark Skin Tone", []string{"person_in_motorized_wheelchair_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fc\u200d\U0001f9bc\u200d\u27a1\ufe0f", "person in motorized wheelchair facing right: Medium-Light Skin Tone", []string{"person_in_motorized_wheelchair_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fd\u200d\U0001f9bc\u200d\u27a1\ufe0f", "person in motorized wheelchair facing right: Medium Skin Tone", []string{"person_in_motorized_wheelchair_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f935", "person in tuxedo", []string{"person_in_tuxedo"}, "9.0", true},
{"\U0001f935\U0001f3ff", "person in tuxedo: Dark Skin Tone", []string{"person_in_tuxedo_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f935\U0001f3fb", "person in tuxedo: Light Skin Tone", []string{"person_in_tuxedo_Light_Skin_Tone"}, "12.0", false},
{"\U0001f935\U0001f3fe", "person in tuxedo: Medium-Dark Skin Tone", []string{"person_in_tuxedo_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f935\U0001f3fc", "person in tuxedo: Medium-Light Skin Tone", []string{"person_in_tuxedo_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f935\U0001f3fd", "person in tuxedo: Medium Skin Tone", []string{"person_in_tuxedo_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\u200d\u27a1\ufe0f", "person kneeling facing right", []string{"person_kneeling_facing_right"}, "15.1", true},
+ {"\U0001f9ce\U0001f3ff\u200d\u27a1\ufe0f", "person kneeling facing right: Dark Skin Tone", []string{"person_kneeling_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fb\u200d\u27a1\ufe0f", "person kneeling facing right: Light Skin Tone", []string{"person_kneeling_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fe\u200d\u27a1\ufe0f", "person kneeling facing right: Medium-Dark Skin Tone", []string{"person_kneeling_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fc\u200d\u27a1\ufe0f", "person kneeling facing right: Medium-Light Skin Tone", []string{"person_kneeling_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fd\u200d\u27a1\ufe0f", "person kneeling facing right: Medium Skin Tone", []string{"person_kneeling_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\u200d\U0001f9b0", "person: red hair", []string{"person_red_hair"}, "12.1", true},
{"\U0001f9d1\U0001f3ff\u200d\U0001f9b0", "person: red hair: Dark Skin Tone", []string{"person_red_hair_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fb\u200d\U0001f9b0", "person: red hair: Light Skin Tone", []string{"person_red_hair_Light_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fe\u200d\U0001f9b0", "person: red hair: Medium-Dark Skin Tone", []string{"person_red_hair_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fc\u200d\U0001f9b0", "person: red hair: Medium-Light Skin Tone", []string{"person_red_hair_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fd\u200d\U0001f9b0", "person: red hair: Medium Skin Tone", []string{"person_red_hair_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\u200d\u27a1\ufe0f", "person running facing right", []string{"person_running_facing_right"}, "15.1", true},
+ {"\U0001f3c3\U0001f3ff\u200d\u27a1\ufe0f", "person running facing right: Dark Skin Tone", []string{"person_running_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fb\u200d\u27a1\ufe0f", "person running facing right: Light Skin Tone", []string{"person_running_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fe\u200d\u27a1\ufe0f", "person running facing right: Medium-Dark Skin Tone", []string{"person_running_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fc\u200d\u27a1\ufe0f", "person running facing right: Medium-Light Skin Tone", []string{"person_running_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fd\u200d\u27a1\ufe0f", "person running facing right: Medium Skin Tone", []string{"person_running_facing_right_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\u200d\u27a1\ufe0f", "person walking facing right", []string{"person_walking_facing_right"}, "15.1", true},
+ {"\U0001f6b6\U0001f3ff\u200d\u27a1\ufe0f", "person walking facing right: Dark Skin Tone", []string{"person_walking_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fb\u200d\u27a1\ufe0f", "person walking facing right: Light Skin Tone", []string{"person_walking_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fe\u200d\u27a1\ufe0f", "person walking facing right: Medium-Dark Skin Tone", []string{"person_walking_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fc\u200d\u27a1\ufe0f", "person walking facing right: Medium-Light Skin Tone", []string{"person_walking_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fd\u200d\u27a1\ufe0f", "person walking facing right: Medium Skin Tone", []string{"person_walking_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\u200d\U0001f9b3", "person: white hair", []string{"person_white_hair"}, "12.1", true},
{"\U0001f9d1\U0001f3ff\u200d\U0001f9b3", "person: white hair: Dark Skin Tone", []string{"person_white_hair_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f9d1\U0001f3fb\u200d\U0001f9b3", "person: white hair: Light Skin Tone", []string{"person_white_hair_Light_Skin_Tone"}, "12.0", false},
@@ -2188,9 +2268,16 @@ var GemojiData = Gemoji{
{"\U0001f470\U0001f3fe", "person with veil: Medium-Dark Skin Tone", []string{"person_with_veil_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f470\U0001f3fc", "person with veil: Medium-Light Skin Tone", []string{"person_with_veil_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f470\U0001f3fd", "person with veil: Medium Skin Tone", []string{"person_with_veil_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\u200d\U0001f9af\u200d\u27a1\ufe0f", "person with white cane facing right", []string{"person_with_white_cane_facing_right"}, "15.1", true},
+ {"\U0001f9d1\U0001f3ff\u200d\U0001f9af\u200d\u27a1\ufe0f", "person with white cane facing right: Dark Skin Tone", []string{"person_with_white_cane_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fb\u200d\U0001f9af\u200d\u27a1\ufe0f", "person with white cane facing right: Light Skin Tone", []string{"person_with_white_cane_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fe\u200d\U0001f9af\u200d\u27a1\ufe0f", "person with white cane facing right: Medium-Dark Skin Tone", []string{"person_with_white_cane_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fc\u200d\U0001f9af\u200d\u27a1\ufe0f", "person with white cane facing right: Medium-Light Skin Tone", []string{"person_with_white_cane_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9d1\U0001f3fd\u200d\U0001f9af\u200d\u27a1\ufe0f", "person with white cane facing right: Medium Skin Tone", []string{"person_with_white_cane_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f1f5\U0001f1ea", "flag: Peru", []string{"peru"}, "6.0", false},
{"\U0001f9eb", "petri dish", []string{"petri_dish"}, "11.0", false},
{"\U0001f1f5\U0001f1ed", "flag: Philippines", []string{"philippines"}, "6.0", false},
+ {"\U0001f426\u200d\U0001f525", "phoenix", []string{"phoenix"}, "15.1", false},
{"\u260e\ufe0f", "telephone", []string{"phone", "telephone"}, "", false},
{"\u26cf\ufe0f", "pick", []string{"pick"}, "5.2", false},
{"\U0001f6fb", "pickup truck", []string{"pickup_truck"}, "13.0", false},
@@ -2480,6 +2567,7 @@ var GemojiData = Gemoji{
{"\U0001f6fc", "roller skate", []string{"roller_skate"}, "13.0", false},
{"\U0001f1f7\U0001f1f4", "flag: Romania", []string{"romania"}, "6.0", false},
{"\U0001f413", "rooster", []string{"rooster"}, "6.0", false},
+ {"\U0001fadc", "root vegetable", []string{"root_vegetable"}, "16.0", false},
{"\U0001f339", "rose", []string{"rose"}, "6.0", false},
{"\U0001f3f5\ufe0f", "rosette", []string{"rosette"}, "7.0", false},
{"\U0001f6a8", "police car light", []string{"rotating_light"}, "6.0", false},
@@ -2613,6 +2701,7 @@ var GemojiData = Gemoji{
{"\U0001f6cd\ufe0f", "shopping bags", []string{"shopping"}, "7.0", false},
{"\U0001f6d2", "shopping cart", []string{"shopping_cart"}, "9.0", false},
{"\U0001fa73", "shorts", []string{"shorts"}, "12.0", false},
+ {"\U0001fa8f", "shovel", []string{"shovel"}, "16.0", false},
{"\U0001f6bf", "shower", []string{"shower"}, "6.0", false},
{"\U0001f990", "shrimp", []string{"shrimp"}, "9.0", false},
{"\U0001f937", "person shrugging", []string{"shrug"}, "11.0", true},
@@ -2711,6 +2800,7 @@ var GemojiData = Gemoji{
{"\U0001f578\ufe0f", "spider web", []string{"spider_web"}, "7.0", false},
{"\U0001f5d3\ufe0f", "spiral calendar", []string{"spiral_calendar"}, "7.0", false},
{"\U0001f5d2\ufe0f", "spiral notepad", []string{"spiral_notepad"}, "7.0", false},
+ {"\U0001fadf", "splatter", []string{"splatter"}, "16.0", false},
{"\U0001f9fd", "sponge", []string{"sponge"}, "11.0", false},
{"\U0001f944", "spoon", []string{"spoon"}, "9.0", false},
{"\U0001f991", "squid", []string{"squid"}, "9.0", false},
@@ -2945,7 +3035,7 @@ var GemojiData = Gemoji{
{"\U0001f51d", "TOP arrow", []string{"top"}, "6.0", false},
{"\U0001f3a9", "top hat", []string{"tophat"}, "6.0", false},
{"\U0001f32a\ufe0f", "tornado", []string{"tornado"}, "7.0", false},
- {"\U0001f1f9\U0001f1f7", "flag: Turkey", []string{"tr"}, "8.0", false},
+ {"\U0001f1f9\U0001f1f7", "flag: Türkiye", []string{"tr"}, "8.0", false},
{"\U0001f5b2\ufe0f", "trackball", []string{"trackball"}, "7.0", false},
{"\U0001f69c", "tractor", []string{"tractor"}, "6.0", false},
{"\U0001f6a5", "horizontal traffic light", []string{"traffic_light"}, "6.0", false},
@@ -3247,12 +3337,24 @@ var GemojiData = Gemoji{
{"\U0001f469\U0001f3fe\u200d\U0001f9bd", "woman in manual wheelchair: Medium-Dark Skin Tone", []string{"woman_in_manual_wheelchair_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fc\u200d\U0001f9bd", "woman in manual wheelchair: Medium-Light Skin Tone", []string{"woman_in_manual_wheelchair_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fd\u200d\U0001f9bd", "woman in manual wheelchair: Medium Skin Tone", []string{"woman_in_manual_wheelchair_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\u200d\U0001f9bd\u200d\u27a1\ufe0f", "woman in manual wheelchair facing right", []string{"woman_in_manual_wheelchair_facing_right"}, "15.1", true},
+ {"\U0001f469\U0001f3ff\u200d\U0001f9bd\u200d\u27a1\ufe0f", "woman in manual wheelchair facing right: Dark Skin Tone", []string{"woman_in_manual_wheelchair_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fb\u200d\U0001f9bd\u200d\u27a1\ufe0f", "woman in manual wheelchair facing right: Light Skin Tone", []string{"woman_in_manual_wheelchair_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fe\u200d\U0001f9bd\u200d\u27a1\ufe0f", "woman in manual wheelchair facing right: Medium-Dark Skin Tone", []string{"woman_in_manual_wheelchair_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fc\u200d\U0001f9bd\u200d\u27a1\ufe0f", "woman in manual wheelchair facing right: Medium-Light Skin Tone", []string{"woman_in_manual_wheelchair_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fd\u200d\U0001f9bd\u200d\u27a1\ufe0f", "woman in manual wheelchair facing right: Medium Skin Tone", []string{"woman_in_manual_wheelchair_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f469\u200d\U0001f9bc", "woman in motorized wheelchair", []string{"woman_in_motorized_wheelchair"}, "12.0", true},
{"\U0001f469\U0001f3ff\u200d\U0001f9bc", "woman in motorized wheelchair: Dark Skin Tone", []string{"woman_in_motorized_wheelchair_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fb\u200d\U0001f9bc", "woman in motorized wheelchair: Light Skin Tone", []string{"woman_in_motorized_wheelchair_Light_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fe\u200d\U0001f9bc", "woman in motorized wheelchair: Medium-Dark Skin Tone", []string{"woman_in_motorized_wheelchair_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fc\u200d\U0001f9bc", "woman in motorized wheelchair: Medium-Light Skin Tone", []string{"woman_in_motorized_wheelchair_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fd\u200d\U0001f9bc", "woman in motorized wheelchair: Medium Skin Tone", []string{"woman_in_motorized_wheelchair_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\u200d\U0001f9bc\u200d\u27a1\ufe0f", "woman in motorized wheelchair facing right", []string{"woman_in_motorized_wheelchair_facing_right"}, "15.1", true},
+ {"\U0001f469\U0001f3ff\u200d\U0001f9bc\u200d\u27a1\ufe0f", "woman in motorized wheelchair facing right: Dark Skin Tone", []string{"woman_in_motorized_wheelchair_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fb\u200d\U0001f9bc\u200d\u27a1\ufe0f", "woman in motorized wheelchair facing right: Light Skin Tone", []string{"woman_in_motorized_wheelchair_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fe\u200d\U0001f9bc\u200d\u27a1\ufe0f", "woman in motorized wheelchair facing right: Medium-Dark Skin Tone", []string{"woman_in_motorized_wheelchair_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fc\u200d\U0001f9bc\u200d\u27a1\ufe0f", "woman in motorized wheelchair facing right: Medium-Light Skin Tone", []string{"woman_in_motorized_wheelchair_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fd\u200d\U0001f9bc\u200d\u27a1\ufe0f", "woman in motorized wheelchair facing right: Medium Skin Tone", []string{"woman_in_motorized_wheelchair_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f935\u200d\u2640\ufe0f", "woman in tuxedo", []string{"woman_in_tuxedo"}, "13.0", true},
{"\U0001f935\U0001f3ff\u200d\u2640\ufe0f", "woman in tuxedo: Dark Skin Tone", []string{"woman_in_tuxedo_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f935\U0001f3fb\u200d\u2640\ufe0f", "woman in tuxedo: Light Skin Tone", []string{"woman_in_tuxedo_Light_Skin_Tone"}, "12.0", false},
@@ -3271,6 +3373,12 @@ var GemojiData = Gemoji{
{"\U0001f939\U0001f3fe\u200d\u2640\ufe0f", "woman juggling: Medium-Dark Skin Tone", []string{"woman_juggling_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f939\U0001f3fc\u200d\u2640\ufe0f", "woman juggling: Medium-Light Skin Tone", []string{"woman_juggling_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f939\U0001f3fd\u200d\u2640\ufe0f", "woman juggling: Medium Skin Tone", []string{"woman_juggling_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman kneeling facing right", []string{"woman_kneeling_facing_right"}, "15.1", true},
+ {"\U0001f9ce\U0001f3ff\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman kneeling facing right: Dark Skin Tone", []string{"woman_kneeling_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fb\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman kneeling facing right: Light Skin Tone", []string{"woman_kneeling_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fe\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman kneeling facing right: Medium-Dark Skin Tone", []string{"woman_kneeling_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fc\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman kneeling facing right: Medium-Light Skin Tone", []string{"woman_kneeling_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f9ce\U0001f3fd\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman kneeling facing right: Medium Skin Tone", []string{"woman_kneeling_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f469\u200d\U0001f527", "woman mechanic", []string{"woman_mechanic"}, "", true},
{"\U0001f469\U0001f3ff\u200d\U0001f527", "woman mechanic: Dark Skin Tone", []string{"woman_mechanic_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fb\u200d\U0001f527", "woman mechanic: Light Skin Tone", []string{"woman_mechanic_Light_Skin_Tone"}, "12.0", false},
@@ -3301,6 +3409,12 @@ var GemojiData = Gemoji{
{"\U0001f93d\U0001f3fe\u200d\u2640\ufe0f", "woman playing water polo: Medium-Dark Skin Tone", []string{"woman_playing_water_polo_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f93d\U0001f3fc\u200d\u2640\ufe0f", "woman playing water polo: Medium-Light Skin Tone", []string{"woman_playing_water_polo_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f93d\U0001f3fd\u200d\u2640\ufe0f", "woman playing water polo: Medium Skin Tone", []string{"woman_playing_water_polo_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman running facing right", []string{"woman_running_facing_right"}, "15.1", true},
+ {"\U0001f3c3\U0001f3ff\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman running facing right: Dark Skin Tone", []string{"woman_running_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fb\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman running facing right: Light Skin Tone", []string{"woman_running_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fe\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman running facing right: Medium-Dark Skin Tone", []string{"woman_running_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fc\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman running facing right: Medium-Light Skin Tone", []string{"woman_running_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f3c3\U0001f3fd\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman running facing right: Medium Skin Tone", []string{"woman_running_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f469\u200d\U0001f52c", "woman scientist", []string{"woman_scientist"}, "", true},
{"\U0001f469\U0001f3ff\u200d\U0001f52c", "woman scientist: Dark Skin Tone", []string{"woman_scientist_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fb\u200d\U0001f52c", "woman scientist: Light Skin Tone", []string{"woman_scientist_Light_Skin_Tone"}, "12.0", false},
@@ -3337,6 +3451,12 @@ var GemojiData = Gemoji{
{"\U0001f469\U0001f3fe\u200d\U0001f4bb", "woman technologist: Medium-Dark Skin Tone", []string{"woman_technologist_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fc\u200d\U0001f4bb", "woman technologist: Medium-Light Skin Tone", []string{"woman_technologist_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f469\U0001f3fd\u200d\U0001f4bb", "woman technologist: Medium Skin Tone", []string{"woman_technologist_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman walking facing right", []string{"woman_walking_facing_right"}, "15.1", true},
+ {"\U0001f6b6\U0001f3ff\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman walking facing right: Dark Skin Tone", []string{"woman_walking_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fb\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman walking facing right: Light Skin Tone", []string{"woman_walking_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fe\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman walking facing right: Medium-Dark Skin Tone", []string{"woman_walking_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fc\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman walking facing right: Medium-Light Skin Tone", []string{"woman_walking_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f6b6\U0001f3fd\u200d\u2640\ufe0f\u200d\u27a1\ufe0f", "woman walking facing right: Medium Skin Tone", []string{"woman_walking_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f9d5", "woman with headscarf", []string{"woman_with_headscarf"}, "11.0", true},
{"\U0001f9d5\U0001f3ff", "woman with headscarf: Dark Skin Tone", []string{"woman_with_headscarf_Dark_Skin_Tone"}, "12.0", false},
{"\U0001f9d5\U0001f3fb", "woman with headscarf: Light Skin Tone", []string{"woman_with_headscarf_Light_Skin_Tone"}, "12.0", false},
@@ -3361,6 +3481,12 @@ var GemojiData = Gemoji{
{"\U0001f470\U0001f3fe\u200d\u2640\ufe0f", "woman with veil: Medium-Dark Skin Tone", []string{"woman_with_veil_Medium-Dark_Skin_Tone"}, "12.0", false},
{"\U0001f470\U0001f3fc\u200d\u2640\ufe0f", "woman with veil: Medium-Light Skin Tone", []string{"woman_with_veil_Medium-Light_Skin_Tone"}, "12.0", false},
{"\U0001f470\U0001f3fd\u200d\u2640\ufe0f", "woman with veil: Medium Skin Tone", []string{"woman_with_veil_Medium_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\u200d\U0001f9af\u200d\u27a1\ufe0f", "woman with white cane facing right", []string{"woman_with_white_cane_facing_right"}, "15.1", true},
+ {"\U0001f469\U0001f3ff\u200d\U0001f9af\u200d\u27a1\ufe0f", "woman with white cane facing right: Dark Skin Tone", []string{"woman_with_white_cane_facing_right_Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fb\u200d\U0001f9af\u200d\u27a1\ufe0f", "woman with white cane facing right: Light Skin Tone", []string{"woman_with_white_cane_facing_right_Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fe\u200d\U0001f9af\u200d\u27a1\ufe0f", "woman with white cane facing right: Medium-Dark Skin Tone", []string{"woman_with_white_cane_facing_right_Medium-Dark_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fc\u200d\U0001f9af\u200d\u27a1\ufe0f", "woman with white cane facing right: Medium-Light Skin Tone", []string{"woman_with_white_cane_facing_right_Medium-Light_Skin_Tone"}, "12.0", false},
+ {"\U0001f469\U0001f3fd\u200d\U0001f9af\u200d\u27a1\ufe0f", "woman with white cane facing right: Medium Skin Tone", []string{"woman_with_white_cane_facing_right_Medium_Skin_Tone"}, "12.0", false},
{"\U0001f45a", "woman’s clothes", []string{"womans_clothes"}, "6.0", false},
{"\U0001f452", "woman’s hat", []string{"womans_hat"}, "6.0", false},
{"\U0001f93c\u200d\u2640\ufe0f", "women wrestling", []string{"women_wrestling"}, "9.0", false},
diff --git a/modules/eventsource/manager_run.go b/modules/eventsource/manager_run.go
index f66dc78c7e..4a42224dda 100644
--- a/modules/eventsource/manager_run.go
+++ b/modules/eventsource/manager_run.go
@@ -9,6 +9,7 @@ import (
activities_model "code.gitea.io/gitea/models/activities"
issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
@@ -91,7 +92,13 @@ loop:
}
for _, userStopwatches := range usersStopwatches {
- apiSWs, err := convert.ToStopWatches(ctx, userStopwatches.StopWatches)
+ u, err := user_model.GetUserByID(ctx, userStopwatches.UserID)
+ if err != nil {
+ log.Error("Unable to get user %d: %v", userStopwatches.UserID, err)
+ continue
+ }
+
+ apiSWs, err := convert.ToStopWatches(ctx, u, userStopwatches.StopWatches)
if err != nil {
if !issues_model.IsErrIssueNotExist(err) {
log.Error("Unable to APIFormat stopwatches: %v", err)
diff --git a/modules/fileicon/render.go b/modules/fileicon/render.go
index 6b2fcfa81e..5bf2a3a02e 100644
--- a/modules/fileicon/render.go
+++ b/modules/fileicon/render.go
@@ -34,7 +34,13 @@ func (p *RenderedIconPool) RenderToHTML() template.HTML {
}
func RenderEntryIconHTML(renderedIconPool *RenderedIconPool, entry *EntryInfo) template.HTML {
- if setting.UI.FileIconTheme == "material" {
+ // Use folder theme for directories and symlinks to directories
+ theme := setting.UI.FileIconTheme
+ if entry.EntryMode.IsDir() || (entry.EntryMode.IsLink() && entry.SymlinkToMode.IsDir()) {
+ theme = setting.UI.FolderIconTheme
+ }
+
+ if theme == "material" {
return DefaultMaterialIconProvider().EntryIconHTML(renderedIconPool, entry)
}
return BasicEntryIconHTML(entry)
diff --git a/modules/fileicon/render_test.go b/modules/fileicon/render_test.go
new file mode 100644
index 0000000000..d9998f3f4c
--- /dev/null
+++ b/modules/fileicon/render_test.go
@@ -0,0 +1,75 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package fileicon_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/fileicon"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRenderEntryIconHTML_WithDifferentThemes(t *testing.T) {
+ // Test that folder icons use the folder theme
+ t.Run("FolderUsesBasicTheme", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.UI.FileIconTheme, "material")()
+ defer test.MockVariableValue(&setting.UI.FolderIconTheme, "basic")()
+
+ folderEntry := &fileicon.EntryInfo{
+ BaseName: "testfolder",
+ EntryMode: git.EntryModeTree,
+ }
+
+ html := fileicon.RenderEntryIconHTML(nil, folderEntry)
+ // Basic theme renders octicon classes
+ assert.Contains(t, string(html), "octicon-file-directory-fill")
+ })
+
+ t.Run("FileUsesMaterialTheme", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.UI.FileIconTheme, "material")()
+ defer test.MockVariableValue(&setting.UI.FolderIconTheme, "basic")()
+
+ fileEntry := &fileicon.EntryInfo{
+ BaseName: "test.js",
+ EntryMode: git.EntryModeBlob,
+ }
+
+ html := fileicon.RenderEntryIconHTML(nil, fileEntry)
+ // Material theme for files renders material icons
+ assert.Contains(t, string(html), "svg-mfi-")
+ })
+
+ t.Run("SymlinkToFolderUsesBasicTheme", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.UI.FileIconTheme, "material")()
+ defer test.MockVariableValue(&setting.UI.FolderIconTheme, "basic")()
+
+ symlinkEntry := &fileicon.EntryInfo{
+ BaseName: "link",
+ EntryMode: git.EntryModeSymlink,
+ SymlinkToMode: git.EntryModeTree,
+ }
+
+ html := fileicon.RenderEntryIconHTML(nil, symlinkEntry)
+ // Symlinks to folders should use folder theme
+ assert.Contains(t, string(html), "octicon-file-directory-symlink")
+ })
+
+ t.Run("BothMaterialTheme", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.UI.FileIconTheme, "material")()
+ defer test.MockVariableValue(&setting.UI.FolderIconTheme, "material")()
+
+ folderEntry := &fileicon.EntryInfo{
+ BaseName: "testfolder",
+ EntryMode: git.EntryModeTree,
+ }
+
+ html := fileicon.RenderEntryIconHTML(nil, folderEntry)
+ // Material theme for folders renders material folder icons
+ assert.Contains(t, string(html), "svg-mfi-")
+ })
+}
diff --git a/modules/git/attribute/batch.go b/modules/git/attribute/batch.go
index 27befdfa25..b1e6387ade 100644
--- a/modules/git/attribute/batch.go
+++ b/modules/git/attribute/batch.go
@@ -7,7 +7,7 @@ import (
"bytes"
"context"
"fmt"
- "os"
+ "io"
"path/filepath"
"time"
@@ -20,7 +20,7 @@ import (
type BatchChecker struct {
attributesNum int
repo *git.Repository
- stdinWriter *os.File
+ stdinWriter io.WriteCloser
stdOut *nulSeparatedAttributeWriter
ctx context.Context
cancel context.CancelFunc
@@ -60,10 +60,7 @@ func NewBatchChecker(repo *git.Repository, treeish string, attributes []string)
},
}
- stdinReader, stdinWriter, err := os.Pipe()
- if err != nil {
- return nil, err
- }
+ stdinWriter, stdinWriterClose := cmd.MakeStdinPipe()
checker.stdinWriter = stdinWriter
lw := new(nulSeparatedAttributeWriter)
@@ -71,23 +68,19 @@ func NewBatchChecker(repo *git.Repository, treeish string, attributes []string)
lw.closed = make(chan struct{})
checker.stdOut = lw
- go func() {
- defer func() {
- _ = stdinReader.Close()
- _ = lw.Close()
- }()
- stdErr := new(bytes.Buffer)
- err := cmd.WithEnv(envs).
- WithDir(repo.Path).
- WithStdin(stdinReader).
- WithStdout(lw).
- WithStderr(stdErr).
- Run(ctx)
+ cmd.WithEnv(envs).
+ WithDir(repo.Path).
+ WithStdoutCopy(lw)
- if err != nil && !git.IsErrCanceledOrKilled(err) {
+ go func() {
+ defer stdinWriterClose()
+ defer checker.cancel()
+ defer lw.Close()
+
+ err := cmd.RunWithStderr(ctx)
+ if err != nil && !gitcmd.IsErrorCanceledOrKilled(err) {
log.Error("Attribute checker for commit %s exits with error: %v", treeish, err)
}
- checker.cancel()
}()
return checker, nil
diff --git a/modules/git/attribute/checker.go b/modules/git/attribute/checker.go
index 49c0eb90ef..3eea31e813 100644
--- a/modules/git/attribute/checker.go
+++ b/modules/git/attribute/checker.go
@@ -68,18 +68,14 @@ func CheckAttributes(ctx context.Context, gitRepo *git.Repository, treeish strin
}
defer cancel()
- stdOut := new(bytes.Buffer)
- stdErr := new(bytes.Buffer)
-
- if err := cmd.WithEnv(append(os.Environ(), envs...)).
+ stdout, _, err := cmd.WithEnv(append(os.Environ(), envs...)).
WithDir(gitRepo.Path).
- WithStdout(stdOut).
- WithStderr(stdErr).
- Run(ctx); err != nil {
- return nil, fmt.Errorf("failed to run check-attr: %w\n%s\n%s", err, stdOut.String(), stdErr.String())
+ RunStdBytes(ctx)
+ if err != nil {
+ return nil, fmt.Errorf("failed to run check-attr: %w", err)
}
- fields := bytes.Split(stdOut.Bytes(), []byte{'\000'})
+ fields := bytes.Split(stdout, []byte{'\000'})
if len(fields)%3 != 1 {
return nil, errors.New("wrong number of fields in return from check-attr")
}
diff --git a/modules/git/batch.go b/modules/git/batch.go
deleted file mode 100644
index f9e1748b54..0000000000
--- a/modules/git/batch.go
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2024 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package git
-
-import (
- "bufio"
- "context"
-)
-
-type Batch struct {
- cancel context.CancelFunc
- Reader *bufio.Reader
- Writer WriteCloserError
-}
-
-// NewBatch creates a new batch for the given repository, the Close must be invoked before release the batch
-func NewBatch(ctx context.Context, repoPath string) (*Batch, error) {
- // Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first!
- if err := ensureValidGitRepository(ctx, repoPath); err != nil {
- return nil, err
- }
-
- var batch Batch
- batch.Writer, batch.Reader, batch.cancel = catFileBatch(ctx, repoPath)
- return &batch, nil
-}
-
-func NewBatchCheck(ctx context.Context, repoPath string) (*Batch, error) {
- // Now because of some insanity with git cat-file not immediately failing if not run in a valid git directory we need to run git rev-parse first!
- if err := ensureValidGitRepository(ctx, repoPath); err != nil {
- return nil, err
- }
-
- var check Batch
- check.Writer, check.Reader, check.cancel = catFileBatchCheck(ctx, repoPath)
- return &check, nil
-}
-
-func (b *Batch) Close() {
- if b.cancel != nil {
- b.cancel()
- b.Reader = nil
- b.Writer = nil
- b.cancel = nil
- }
-}
diff --git a/modules/git/batch_reader.go b/modules/git/batch_reader.go
deleted file mode 100644
index b5cec130d5..0000000000
--- a/modules/git/batch_reader.go
+++ /dev/null
@@ -1,324 +0,0 @@
-// Copyright 2020 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package git
-
-import (
- "bufio"
- "bytes"
- "context"
- "io"
- "math"
- "strconv"
- "strings"
-
- "code.gitea.io/gitea/modules/git/gitcmd"
- "code.gitea.io/gitea/modules/log"
-
- "github.com/djherbis/buffer"
- "github.com/djherbis/nio/v3"
-)
-
-// WriteCloserError wraps an io.WriteCloser with an additional CloseWithError function
-type WriteCloserError interface {
- io.WriteCloser
- CloseWithError(err error) error
-}
-
-// ensureValidGitRepository runs git rev-parse in the repository path - thus ensuring that the repository is a valid repository.
-// Run before opening git cat-file.
-// This is needed otherwise the git cat-file will hang for invalid repositories.
-func ensureValidGitRepository(ctx context.Context, repoPath string) error {
- stderr := strings.Builder{}
- err := gitcmd.NewCommand("rev-parse").
- WithDir(repoPath).
- WithStderr(&stderr).
- Run(ctx)
- if err != nil {
- return gitcmd.ConcatenateError(err, (&stderr).String())
- }
- return nil
-}
-
-// catFileBatchCheck opens git cat-file --batch-check in the provided repo and returns a stdin pipe, a stdout reader and cancel function
-func catFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError, *bufio.Reader, func()) {
- batchStdinReader, batchStdinWriter := io.Pipe()
- batchStdoutReader, batchStdoutWriter := io.Pipe()
- ctx, ctxCancel := context.WithCancel(ctx)
- closed := make(chan struct{})
- cancel := func() {
- ctxCancel()
- _ = batchStdoutReader.Close()
- _ = batchStdinWriter.Close()
- <-closed
- }
-
- // Ensure cancel is called as soon as the provided context is cancelled
- go func() {
- <-ctx.Done()
- cancel()
- }()
-
- go func() {
- stderr := strings.Builder{}
- err := gitcmd.NewCommand("cat-file", "--batch-check").
- WithDir(repoPath).
- WithStdin(batchStdinReader).
- WithStdout(batchStdoutWriter).
- WithStderr(&stderr).
- WithUseContextTimeout(true).
- Run(ctx)
- if err != nil {
- _ = batchStdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
- _ = batchStdinReader.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
- } else {
- _ = batchStdoutWriter.Close()
- _ = batchStdinReader.Close()
- }
- close(closed)
- }()
-
- // For simplicities sake we'll use a buffered reader to read from the cat-file --batch-check
- batchReader := bufio.NewReader(batchStdoutReader)
-
- return batchStdinWriter, batchReader, cancel
-}
-
-// catFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function
-func catFileBatch(ctx context.Context, repoPath string) (WriteCloserError, *bufio.Reader, func()) {
- // We often want to feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary.
- // so let's create a batch stdin and stdout
- batchStdinReader, batchStdinWriter := io.Pipe()
- batchStdoutReader, batchStdoutWriter := nio.Pipe(buffer.New(32 * 1024))
- ctx, ctxCancel := context.WithCancel(ctx)
- closed := make(chan struct{})
- cancel := func() {
- ctxCancel()
- _ = batchStdinWriter.Close()
- _ = batchStdoutReader.Close()
- <-closed
- }
-
- // Ensure cancel is called as soon as the provided context is cancelled
- go func() {
- <-ctx.Done()
- cancel()
- }()
-
- go func() {
- stderr := strings.Builder{}
- err := gitcmd.NewCommand("cat-file", "--batch").
- WithDir(repoPath).
- WithStdin(batchStdinReader).
- WithStdout(batchStdoutWriter).
- WithStderr(&stderr).
- WithUseContextTimeout(true).
- Run(ctx)
- if err != nil {
- _ = batchStdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
- _ = batchStdinReader.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
- } else {
- _ = batchStdoutWriter.Close()
- _ = batchStdinReader.Close()
- }
- close(closed)
- }()
-
- // For simplicities sake we'll us a buffered reader to read from the cat-file --batch
- batchReader := bufio.NewReaderSize(batchStdoutReader, 32*1024)
-
- return batchStdinWriter, batchReader, cancel
-}
-
-// ReadBatchLine reads the header line from cat-file --batch
-// We expect: SP SP LF
-// then leaving the rest of the stream " LF" to be read
-func ReadBatchLine(rd *bufio.Reader) (sha []byte, typ string, size int64, err error) {
- typ, err = rd.ReadString('\n')
- if err != nil {
- return sha, typ, size, err
- }
- if len(typ) == 1 {
- typ, err = rd.ReadString('\n')
- if err != nil {
- return sha, typ, size, err
- }
- }
- idx := strings.IndexByte(typ, ' ')
- if idx < 0 {
- log.Debug("missing space typ: %s", typ)
- return sha, typ, size, ErrNotExist{ID: string(sha)}
- }
- sha = []byte(typ[:idx])
- typ = typ[idx+1:]
-
- idx = strings.IndexByte(typ, ' ')
- if idx < 0 {
- return sha, typ, size, ErrNotExist{ID: string(sha)}
- }
-
- sizeStr := typ[idx+1 : len(typ)-1]
- typ = typ[:idx]
-
- size, err = strconv.ParseInt(sizeStr, 10, 64)
- return sha, typ, size, err
-}
-
-// ReadTagObjectID reads a tag object ID hash from a cat-file --batch stream, throwing away the rest of the stream.
-func ReadTagObjectID(rd *bufio.Reader, size int64) (string, error) {
- var id string
- var n int64
-headerLoop:
- for {
- line, err := rd.ReadBytes('\n')
- if err != nil {
- return "", err
- }
- n += int64(len(line))
- idx := bytes.Index(line, []byte{' '})
- if idx < 0 {
- continue
- }
-
- if string(line[:idx]) == "object" {
- id = string(line[idx+1 : len(line)-1])
- break headerLoop
- }
- }
-
- // Discard the rest of the tag
- return id, DiscardFull(rd, size-n+1)
-}
-
-// ReadTreeID reads a tree ID from a cat-file --batch stream, throwing away the rest of the stream.
-func ReadTreeID(rd *bufio.Reader, size int64) (string, error) {
- var id string
- var n int64
-headerLoop:
- for {
- line, err := rd.ReadBytes('\n')
- if err != nil {
- return "", err
- }
- n += int64(len(line))
- idx := bytes.Index(line, []byte{' '})
- if idx < 0 {
- continue
- }
-
- if string(line[:idx]) == "tree" {
- id = string(line[idx+1 : len(line)-1])
- break headerLoop
- }
- }
-
- // Discard the rest of the commit
- return id, DiscardFull(rd, size-n+1)
-}
-
-// git tree files are a list:
-// SP NUL
-//
-// Unfortunately this 20-byte notation is somewhat in conflict to all other git tools
-// Therefore we need some method to convert these binary hashes to hex hashes
-
-// constant hextable to help quickly convert between binary and hex representation
-const hextable = "0123456789abcdef"
-
-// BinToHexHeash converts a binary Hash into a hex encoded one. Input and output can be the
-// same byte slice to support in place conversion without allocations.
-// This is at least 100x quicker that hex.EncodeToString
-func BinToHex(objectFormat ObjectFormat, sha, out []byte) []byte {
- for i := objectFormat.FullLength()/2 - 1; i >= 0; i-- {
- v := sha[i]
- vhi, vlo := v>>4, v&0x0f
- shi, slo := hextable[vhi], hextable[vlo]
- out[i*2], out[i*2+1] = shi, slo
- }
- return out
-}
-
-// ParseCatFileTreeLine reads an entry from a tree in a cat-file --batch stream
-// This carefully avoids allocations - except where fnameBuf is too small.
-// It is recommended therefore to pass in an fnameBuf large enough to avoid almost all allocations
-//
-// Each line is composed of:
-// SP NUL
-//
-// We don't attempt to convert the raw HASH to save a lot of time
-func ParseCatFileTreeLine(objectFormat ObjectFormat, rd *bufio.Reader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) {
- var readBytes []byte
-
- // Read the Mode & fname
- readBytes, err = rd.ReadSlice('\x00')
- if err != nil {
- return mode, fname, sha, n, err
- }
- idx := bytes.IndexByte(readBytes, ' ')
- if idx < 0 {
- log.Debug("missing space in readBytes ParseCatFileTreeLine: %s", readBytes)
- return mode, fname, sha, n, &ErrNotExist{}
- }
-
- n += idx + 1
- copy(modeBuf, readBytes[:idx])
- if len(modeBuf) >= idx {
- modeBuf = modeBuf[:idx]
- } else {
- modeBuf = append(modeBuf, readBytes[len(modeBuf):idx]...)
- }
- mode = modeBuf
-
- readBytes = readBytes[idx+1:]
-
- // Deal with the fname
- copy(fnameBuf, readBytes)
- if len(fnameBuf) > len(readBytes) {
- fnameBuf = fnameBuf[:len(readBytes)]
- } else {
- fnameBuf = append(fnameBuf, readBytes[len(fnameBuf):]...)
- }
- for err == bufio.ErrBufferFull {
- readBytes, err = rd.ReadSlice('\x00')
- fnameBuf = append(fnameBuf, readBytes...)
- }
- n += len(fnameBuf)
- if err != nil {
- return mode, fname, sha, n, err
- }
- fnameBuf = fnameBuf[:len(fnameBuf)-1]
- fname = fnameBuf
-
- // Deal with the binary hash
- idx = 0
- length := objectFormat.FullLength() / 2
- for idx < length {
- var read int
- read, err = rd.Read(shaBuf[idx:length])
- n += read
- if err != nil {
- return mode, fname, sha, n, err
- }
- idx += read
- }
- sha = shaBuf
- return mode, fname, sha, n, err
-}
-
-func DiscardFull(rd *bufio.Reader, discard int64) error {
- if discard > math.MaxInt32 {
- n, err := rd.Discard(math.MaxInt32)
- discard -= int64(n)
- if err != nil {
- return err
- }
- }
- for discard > 0 {
- n, err := rd.Discard(int(discard))
- discard -= int64(n)
- if err != nil {
- return err
- }
- }
- return nil
-}
diff --git a/modules/git/blob_gogit.go b/modules/git/blob_gogit.go
index 8c79c067c1..7cf622d180 100644
--- a/modules/git/blob_gogit.go
+++ b/modules/git/blob_gogit.go
@@ -9,24 +9,38 @@ package git
import (
"io"
+ "code.gitea.io/gitea/modules/log"
+
"github.com/go-git/go-git/v5/plumbing"
)
// Blob represents a Git object.
type Blob struct {
- ID ObjectID
+ ID ObjectID
+ repo *Repository
+ name string
+}
- gogitEncodedObj plumbing.EncodedObject
- name string
+func (b *Blob) gogitEncodedObj() (plumbing.EncodedObject, error) {
+ return b.repo.gogitRepo.Storer.EncodedObject(plumbing.AnyObject, plumbing.Hash(b.ID.RawValue()))
}
// DataAsync gets a ReadCloser for the contents of a blob without reading it all.
// Calling the Close function on the result will discard all unread output.
func (b *Blob) DataAsync() (io.ReadCloser, error) {
- return b.gogitEncodedObj.Reader()
+ obj, err := b.gogitEncodedObj()
+ if err != nil {
+ return nil, err
+ }
+ return obj.Reader()
}
// Size returns the uncompressed size of the blob
func (b *Blob) Size() int64 {
- return b.gogitEncodedObj.Size()
+ obj, err := b.gogitEncodedObj()
+ if err != nil {
+ log.Error("Error getting gogit encoded object for blob %s(%s): %v", b.name, b.ID.String(), err)
+ return 0
+ }
+ return obj.Size()
}
diff --git a/modules/git/blob_nogogit.go b/modules/git/blob_nogogit.go
index af3ce376d6..837b30fd88 100644
--- a/modules/git/blob_nogogit.go
+++ b/modules/git/blob_nogogit.go
@@ -6,8 +6,6 @@
package git
import (
- "bufio"
- "bytes"
"io"
"code.gitea.io/gitea/modules/log"
@@ -25,38 +23,28 @@ type Blob struct {
// DataAsync gets a ReadCloser for the contents of a blob without reading it all.
// Calling the Close function on the result will discard all unread output.
-func (b *Blob) DataAsync() (io.ReadCloser, error) {
- wr, rd, cancel, err := b.repo.CatFileBatch(b.repo.Ctx)
+func (b *Blob) DataAsync() (_ io.ReadCloser, retErr error) {
+ batch, cancel, err := b.repo.CatFileBatch(b.repo.Ctx)
if err != nil {
return nil, err
}
+ defer func() {
+ // if there was an error, cancel the batch right away,
+ // otherwise let the caller close it
+ if retErr != nil {
+ cancel()
+ }
+ }()
- _, err = wr.Write([]byte(b.ID.String() + "\n"))
+ info, contentReader, err := batch.QueryContent(b.ID.String())
if err != nil {
- cancel()
- return nil, err
- }
- _, _, size, err := ReadBatchLine(rd)
- if err != nil {
- cancel()
return nil, err
}
b.gotSize = true
- b.size = size
-
- if size < 4096 {
- bs, err := io.ReadAll(io.LimitReader(rd, size))
- defer cancel()
- if err != nil {
- return nil, err
- }
- _, err = rd.Discard(1)
- return io.NopCloser(bytes.NewReader(bs)), err
- }
-
+ b.size = info.Size
return &blobReader{
- rd: rd,
- n: size,
+ rd: contentReader,
+ n: info.Size,
cancel: cancel,
}, nil
}
@@ -67,30 +55,24 @@ func (b *Blob) Size() int64 {
return b.size
}
- wr, rd, cancel, err := b.repo.CatFileBatchCheck(b.repo.Ctx)
+ batch, cancel, err := b.repo.CatFileBatch(b.repo.Ctx)
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
return 0
}
defer cancel()
- _, err = wr.Write([]byte(b.ID.String() + "\n"))
+ info, err := batch.QueryInfo(b.ID.String())
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
return 0
}
- _, _, b.size, err = ReadBatchLine(rd)
- if err != nil {
- log.Debug("error whilst reading size for %s in %s. Error: %v", b.ID.String(), b.repo.Path, err)
- return 0
- }
-
b.gotSize = true
-
+ b.size = info.Size
return b.size
}
type blobReader struct {
- rd *bufio.Reader
+ rd BufferedReader
n int64
cancel func()
}
diff --git a/modules/git/catfile_batch.go b/modules/git/catfile_batch.go
new file mode 100644
index 0000000000..d13179f3ec
--- /dev/null
+++ b/modules/git/catfile_batch.go
@@ -0,0 +1,52 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "context"
+ "io"
+)
+
+type BufferedReader interface {
+ io.Reader
+ Buffered() int
+ Peek(n int) ([]byte, error)
+ Discard(n int) (int, error)
+ ReadString(sep byte) (string, error)
+ ReadSlice(sep byte) ([]byte, error)
+ ReadBytes(sep byte) ([]byte, error)
+}
+
+type CatFileObject struct {
+ ID string
+ Type string
+ Size int64
+}
+
+type CatFileBatch interface {
+ // QueryInfo queries the object info from the git repository by its object name using "git cat-file --batch" family commands.
+ // "git cat-file" accepts "" for the object name, it can be a ref name, object id, etc. https://git-scm.com/docs/gitrevisions
+ // In Gitea, we only use the simple ref name or object id, no other complex rev syntax like "suffix" or "git describe" although they are supported by git.
+ QueryInfo(obj string) (*CatFileObject, error)
+
+ // QueryContent is similar to QueryInfo, it queries the object info and additionally returns a reader for its content.
+ // FIXME: this design still follows the old pattern: the returned BufferedReader is very fragile,
+ // callers should carefully maintain its lifecycle and discard all unread data.
+ // TODO: It needs to be refactored to a fully managed Reader stream in the future, don't let callers manually Close or Discard
+ QueryContent(obj string) (*CatFileObject, BufferedReader, error)
+}
+
+type CatFileBatchCloser interface {
+ CatFileBatch
+ Close()
+}
+
+// NewBatch creates a "batch object provider (CatFileBatch)" for the given repository path to retrieve object info and content efficiently.
+// The CatFileBatch and the readers create by it should only be used in the same goroutine.
+func NewBatch(ctx context.Context, repoPath string) (CatFileBatchCloser, error) {
+ if DefaultFeatures().SupportCatFileBatchCommand {
+ return newCatFileBatchCommand(ctx, repoPath)
+ }
+ return newCatFileBatchLegacy(ctx, repoPath)
+}
diff --git a/modules/git/catfile_batch_command.go b/modules/git/catfile_batch_command.go
new file mode 100644
index 0000000000..710561f045
--- /dev/null
+++ b/modules/git/catfile_batch_command.go
@@ -0,0 +1,66 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "context"
+ "os"
+ "path/filepath"
+
+ "code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// catFileBatchCommand implements the CatFileBatch interface using the "cat-file --batch-command" command
+// for git version >= 2.36
+// ref: https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt---batch-command
+type catFileBatchCommand struct {
+ ctx context.Context
+ repoPath string
+ batch *catFileBatchCommunicator
+}
+
+var _ CatFileBatch = (*catFileBatchCommand)(nil)
+
+func newCatFileBatchCommand(ctx context.Context, repoPath string) (*catFileBatchCommand, error) {
+ if _, err := os.Stat(repoPath); err != nil {
+ return nil, util.NewNotExistErrorf("repo %q doesn't exist", filepath.Base(repoPath))
+ }
+ return &catFileBatchCommand{ctx: ctx, repoPath: repoPath}, nil
+}
+
+func (b *catFileBatchCommand) getBatch() *catFileBatchCommunicator {
+ if b.batch != nil {
+ return b.batch
+ }
+ b.batch = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch-command"))
+ return b.batch
+}
+
+func (b *catFileBatchCommand) QueryContent(obj string) (*CatFileObject, BufferedReader, error) {
+ _, err := b.getBatch().reqWriter.Write([]byte("contents " + obj + "\n"))
+ if err != nil {
+ return nil, nil, err
+ }
+ info, err := catFileBatchParseInfoLine(b.getBatch().respReader)
+ if err != nil {
+ return nil, nil, err
+ }
+ return info, b.getBatch().respReader, nil
+}
+
+func (b *catFileBatchCommand) QueryInfo(obj string) (*CatFileObject, error) {
+ _, err := b.getBatch().reqWriter.Write([]byte("info " + obj + "\n"))
+ if err != nil {
+ return nil, err
+ }
+ return catFileBatchParseInfoLine(b.getBatch().respReader)
+}
+
+func (b *catFileBatchCommand) Close() {
+ if b.batch != nil {
+ b.batch.Close()
+ b.batch = nil
+ }
+}
diff --git a/modules/git/catfile_batch_legacy.go b/modules/git/catfile_batch_legacy.go
new file mode 100644
index 0000000000..795fc4ce3d
--- /dev/null
+++ b/modules/git/catfile_batch_legacy.go
@@ -0,0 +1,81 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "context"
+ "io"
+ "os"
+ "path/filepath"
+
+ "code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// catFileBatchLegacy implements the CatFileBatch interface using the "cat-file --batch" command and "cat-file --batch-check" command
+// for git version < 2.36
+// to align with "--batch-command", it creates the two commands for querying object contents and object info separately
+// ref: https://git-scm.com/docs/git-cat-file#Documentation/git-cat-file.txt---batch
+type catFileBatchLegacy struct {
+ ctx context.Context
+ repoPath string
+ batchContent *catFileBatchCommunicator
+ batchCheck *catFileBatchCommunicator
+}
+
+var _ CatFileBatchCloser = (*catFileBatchLegacy)(nil)
+
+func newCatFileBatchLegacy(ctx context.Context, repoPath string) (*catFileBatchLegacy, error) {
+ if _, err := os.Stat(repoPath); err != nil {
+ return nil, util.NewNotExistErrorf("repo %q doesn't exist", filepath.Base(repoPath))
+ }
+ return &catFileBatchLegacy{ctx: ctx, repoPath: repoPath}, nil
+}
+
+func (b *catFileBatchLegacy) getBatchContent() *catFileBatchCommunicator {
+ if b.batchContent != nil {
+ return b.batchContent
+ }
+ b.batchContent = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch"))
+ return b.batchContent
+}
+
+func (b *catFileBatchLegacy) getBatchCheck() *catFileBatchCommunicator {
+ if b.batchCheck != nil {
+ return b.batchCheck
+ }
+ b.batchCheck = newCatFileBatch(b.ctx, b.repoPath, gitcmd.NewCommand("cat-file", "--batch-check"))
+ return b.batchCheck
+}
+
+func (b *catFileBatchLegacy) QueryContent(obj string) (*CatFileObject, BufferedReader, error) {
+ _, err := io.WriteString(b.getBatchContent().reqWriter, obj+"\n")
+ if err != nil {
+ return nil, nil, err
+ }
+ info, err := catFileBatchParseInfoLine(b.getBatchContent().respReader)
+ if err != nil {
+ return nil, nil, err
+ }
+ return info, b.getBatchContent().respReader, nil
+}
+
+func (b *catFileBatchLegacy) QueryInfo(obj string) (*CatFileObject, error) {
+ _, err := io.WriteString(b.getBatchCheck().reqWriter, obj+"\n")
+ if err != nil {
+ return nil, err
+ }
+ return catFileBatchParseInfoLine(b.getBatchCheck().respReader)
+}
+
+func (b *catFileBatchLegacy) Close() {
+ if b.batchContent != nil {
+ b.batchContent.Close()
+ b.batchContent = nil
+ }
+ if b.batchCheck != nil {
+ b.batchCheck.Close()
+ b.batchCheck = nil
+ }
+}
diff --git a/modules/git/catfile_batch_reader.go b/modules/git/catfile_batch_reader.go
new file mode 100644
index 0000000000..8a0b342079
--- /dev/null
+++ b/modules/git/catfile_batch_reader.go
@@ -0,0 +1,254 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "errors"
+ "io"
+ "math"
+ "strconv"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ "code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/log"
+)
+
+var catFileBatchDebugWaitClose atomic.Int64
+
+type catFileBatchCommunicator struct {
+ cancel context.CancelFunc
+ reqWriter io.Writer
+ respReader *bufio.Reader
+ debugGitCmd *gitcmd.Command
+}
+
+func (b *catFileBatchCommunicator) Close() {
+ if b.cancel != nil {
+ b.cancel()
+ b.cancel = nil
+ }
+}
+
+// newCatFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function
+func newCatFileBatch(ctx context.Context, repoPath string, cmdCatFile *gitcmd.Command) (ret *catFileBatchCommunicator) {
+ ctx, ctxCancel := context.WithCancelCause(ctx)
+
+ // We often want to feed the commits in order into cat-file --batch, followed by their trees and subtrees as necessary.
+ stdinWriter, stdoutReader, stdPipeClose := cmdCatFile.MakeStdinStdoutPipe()
+ pipeClose := func() {
+ if delay := catFileBatchDebugWaitClose.Load(); delay > 0 {
+ time.Sleep(time.Duration(delay)) // for testing purpose only
+ }
+ stdPipeClose()
+ }
+
+ ret = &catFileBatchCommunicator{
+ debugGitCmd: cmdCatFile,
+ cancel: func() { ctxCancel(nil) },
+ reqWriter: stdinWriter,
+ respReader: bufio.NewReaderSize(stdoutReader, 32*1024), // use a buffered reader for rich operations
+ }
+
+ err := cmdCatFile.WithDir(repoPath).StartWithStderr(ctx)
+ if err != nil {
+ log.Error("Unable to start git command %v: %v", cmdCatFile.LogString(), err)
+ // ideally here it should return the error, but it would require refactoring all callers
+ // so just return a dummy communicator that does nothing, almost the same behavior as before, not bad
+ ctxCancel(err)
+ pipeClose()
+ return ret
+ }
+
+ go func() {
+ err := cmdCatFile.WaitWithStderr()
+ if err != nil && !errors.Is(err, context.Canceled) {
+ log.Error("cat-file --batch command failed in repo %s, error: %v", repoPath, err)
+ }
+ ctxCancel(err)
+ pipeClose()
+ }()
+
+ return ret
+}
+
+// catFileBatchParseInfoLine reads the header line from cat-file --batch
+// We expect: SP SP LF
+// then leaving the rest of the stream " LF" to be read
+func catFileBatchParseInfoLine(rd BufferedReader) (*CatFileObject, error) {
+ typ, err := rd.ReadString('\n')
+ if err != nil {
+ return nil, err
+ }
+ if len(typ) == 1 {
+ typ, err = rd.ReadString('\n')
+ if err != nil {
+ return nil, err
+ }
+ }
+ idx := strings.IndexByte(typ, ' ')
+ if idx < 0 {
+ return nil, ErrNotExist{}
+ }
+ sha := typ[:idx]
+ typ = typ[idx+1:]
+
+ idx = strings.IndexByte(typ, ' ')
+ if idx < 0 {
+ return nil, ErrNotExist{ID: sha}
+ }
+
+ sizeStr := typ[idx+1 : len(typ)-1]
+ typ = typ[:idx]
+
+ size, err := strconv.ParseInt(sizeStr, 10, 64)
+ return &CatFileObject{ID: sha, Type: typ, Size: size}, err
+}
+
+// ReadTagObjectID reads a tag object ID hash from a cat-file --batch stream, throwing away the rest of the stream.
+func ReadTagObjectID(rd BufferedReader, size int64) (string, error) {
+ var id string
+ var n int64
+headerLoop:
+ for {
+ line, err := rd.ReadBytes('\n')
+ if err != nil {
+ return "", err
+ }
+ n += int64(len(line))
+ idx := bytes.Index(line, []byte{' '})
+ if idx < 0 {
+ continue
+ }
+
+ if string(line[:idx]) == "object" {
+ id = string(line[idx+1 : len(line)-1])
+ break headerLoop
+ }
+ }
+
+ // Discard the rest of the tag
+ return id, DiscardFull(rd, size-n+1)
+}
+
+// ReadTreeID reads a tree ID from a cat-file --batch stream, throwing away the rest of the stream.
+func ReadTreeID(rd BufferedReader, size int64) (string, error) {
+ var id string
+ var n int64
+headerLoop:
+ for {
+ line, err := rd.ReadBytes('\n')
+ if err != nil {
+ return "", err
+ }
+ n += int64(len(line))
+ idx := bytes.Index(line, []byte{' '})
+ if idx < 0 {
+ continue
+ }
+
+ if string(line[:idx]) == "tree" {
+ id = string(line[idx+1 : len(line)-1])
+ break headerLoop
+ }
+ }
+
+ // Discard the rest of the commit
+ return id, DiscardFull(rd, size-n+1)
+}
+
+// git tree files are a list:
+// SP NUL
+//
+// Unfortunately this 20-byte notation is somewhat in conflict to all other git tools
+// Therefore we need some method to convert these binary hashes to hex hashes
+
+// ParseCatFileTreeLine reads an entry from a tree in a cat-file --batch stream
+// This carefully avoids allocations - except where fnameBuf is too small.
+// It is recommended therefore to pass in an fnameBuf large enough to avoid almost all allocations
+//
+// Each line is composed of:
+// SP NUL
+//
+// We don't attempt to convert the raw HASH to save a lot of time
+func ParseCatFileTreeLine(objectFormat ObjectFormat, rd BufferedReader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) {
+ var readBytes []byte
+
+ // Read the Mode & fname
+ readBytes, err = rd.ReadSlice('\x00')
+ if err != nil {
+ return mode, fname, sha, n, err
+ }
+ idx := bytes.IndexByte(readBytes, ' ')
+ if idx < 0 {
+ log.Debug("missing space in readBytes ParseCatFileTreeLine: %s", readBytes)
+ return mode, fname, sha, n, &ErrNotExist{}
+ }
+
+ n += idx + 1
+ copy(modeBuf, readBytes[:idx])
+ if len(modeBuf) >= idx {
+ modeBuf = modeBuf[:idx]
+ } else {
+ modeBuf = append(modeBuf, readBytes[len(modeBuf):idx]...)
+ }
+ mode = modeBuf
+
+ readBytes = readBytes[idx+1:]
+
+ // Deal with the fname
+ copy(fnameBuf, readBytes)
+ if len(fnameBuf) > len(readBytes) {
+ fnameBuf = fnameBuf[:len(readBytes)]
+ } else {
+ fnameBuf = append(fnameBuf, readBytes[len(fnameBuf):]...)
+ }
+ for err == bufio.ErrBufferFull {
+ readBytes, err = rd.ReadSlice('\x00')
+ fnameBuf = append(fnameBuf, readBytes...)
+ }
+ n += len(fnameBuf)
+ if err != nil {
+ return mode, fname, sha, n, err
+ }
+ fnameBuf = fnameBuf[:len(fnameBuf)-1]
+ fname = fnameBuf
+
+ // Deal with the binary hash
+ idx = 0
+ length := objectFormat.FullLength() / 2
+ for idx < length {
+ var read int
+ read, err = rd.Read(shaBuf[idx:length])
+ n += read
+ if err != nil {
+ return mode, fname, sha, n, err
+ }
+ idx += read
+ }
+ sha = shaBuf
+ return mode, fname, sha, n, err
+}
+
+func DiscardFull(rd BufferedReader, discard int64) error {
+ if discard > math.MaxInt32 {
+ n, err := rd.Discard(math.MaxInt32)
+ discard -= int64(n)
+ if err != nil {
+ return err
+ }
+ }
+ for discard > 0 {
+ n, err := rd.Discard(int(discard))
+ discard -= int64(n)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/modules/git/catfile_batch_test.go b/modules/git/catfile_batch_test.go
new file mode 100644
index 0000000000..69662ffc1a
--- /dev/null
+++ b/modules/git/catfile_batch_test.go
@@ -0,0 +1,104 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "io"
+ "os"
+ "path/filepath"
+ "sync"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCatFileBatch(t *testing.T) {
+ defer test.MockVariableValue(&DefaultFeatures().SupportCatFileBatchCommand)()
+ DefaultFeatures().SupportCatFileBatchCommand = false
+ t.Run("LegacyCheck", testCatFileBatch)
+ DefaultFeatures().SupportCatFileBatchCommand = true
+ t.Run("BatchCommand", testCatFileBatch)
+}
+
+func testCatFileBatch(t *testing.T) {
+ t.Run("CorruptedGitRepo", func(t *testing.T) {
+ tmpDir := t.TempDir()
+ batch, err := NewBatch(t.Context(), tmpDir)
+ // as long as the directory exists, no error, because we can't really know whether the git repo is valid until we run commands
+ require.NoError(t, err)
+ defer batch.Close()
+
+ _, err = batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449")
+ require.Error(t, err)
+ _, err = batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449")
+ require.Error(t, err)
+ })
+
+ simulateQueryTerminated := func(pipeCloseDelay, pipeReadDelay time.Duration) (errRead error) {
+ catFileBatchDebugWaitClose.Store(int64(pipeCloseDelay))
+ defer catFileBatchDebugWaitClose.Store(0)
+ batch, err := NewBatch(t.Context(), filepath.Join(testReposDir, "repo1_bare"))
+ require.NoError(t, err)
+ defer batch.Close()
+ _, _ = batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449")
+ var c *catFileBatchCommunicator
+ switch b := batch.(type) {
+ case *catFileBatchLegacy:
+ c = b.batchCheck
+ _, _ = c.reqWriter.Write([]byte("in-complete-line-"))
+ case *catFileBatchCommand:
+ c = b.batch
+ _, _ = c.reqWriter.Write([]byte("info"))
+ default:
+ t.FailNow()
+ }
+
+ wg := sync.WaitGroup{}
+ wg.Go(func() {
+ time.Sleep(pipeReadDelay)
+ var n int
+ n, errRead = c.respReader.Read(make([]byte, 100))
+ assert.Zero(t, n)
+ })
+ time.Sleep(10 * time.Millisecond)
+ c.debugGitCmd.DebugKill()
+ wg.Wait()
+ return errRead
+ }
+
+ t.Run("QueryTerminated", func(t *testing.T) {
+ err := simulateQueryTerminated(0, 20*time.Millisecond)
+ assert.ErrorIs(t, err, os.ErrClosed) // pipes are closed faster
+ err = simulateQueryTerminated(40*time.Millisecond, 20*time.Millisecond)
+ assert.ErrorIs(t, err, io.EOF) // reader is faster
+ })
+
+ batch, err := NewBatch(t.Context(), filepath.Join(testReposDir, "repo1_bare"))
+ require.NoError(t, err)
+ defer batch.Close()
+
+ t.Run("QueryInfo", func(t *testing.T) {
+ info, err := batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449")
+ require.NoError(t, err)
+ assert.Equal(t, "e2129701f1a4d54dc44f03c93bca0a2aec7c5449", info.ID)
+ assert.Equal(t, "blob", info.Type)
+ assert.EqualValues(t, 6, info.Size)
+ })
+
+ t.Run("QueryContent", func(t *testing.T) {
+ info, rd, err := batch.QueryContent("e2129701f1a4d54dc44f03c93bca0a2aec7c5449")
+ require.NoError(t, err)
+ assert.Equal(t, "e2129701f1a4d54dc44f03c93bca0a2aec7c5449", info.ID)
+ assert.Equal(t, "blob", info.Type)
+ assert.EqualValues(t, 6, info.Size)
+
+ content, err := io.ReadAll(io.LimitReader(rd, info.Size))
+ require.NoError(t, err)
+ require.Equal(t, "file1\n", string(content))
+ })
+}
diff --git a/modules/git/commit.go b/modules/git/commit.go
index 1917a72bbf..b98d36d946 100644
--- a/modules/git/commit.go
+++ b/modules/git/commit.go
@@ -37,6 +37,10 @@ type CommitSignature struct {
// Message returns the commit message. Same as retrieving CommitMessage directly.
func (c *Commit) Message() string {
+ // FIXME: GIT-COMMIT-MESSAGE-ENCODING: this logic is not right
+ // * When need to use commit message in templates/database, it should be valid UTF-8
+ // * When need to get the original commit message, it should just use "c.CommitMessage"
+ // It's not easy to refactor at the moment, many templates need to be updated and tested
return c.CommitMessage
}
@@ -120,7 +124,7 @@ func CommitChanges(ctx context.Context, repoPath string, opts CommitChangesOptio
_, _, err := cmd.WithDir(repoPath).RunStdString(ctx)
// No stderr but exit status 1 means nothing to commit.
- if err != nil && err.Error() == "exit status 1" {
+ if gitcmd.IsErrorExitCode(err, 1) {
return nil
}
return err
@@ -315,7 +319,7 @@ func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, err
WithDir(repoPath).
RunStdString(ctx)
if err != nil {
- if strings.Contains(err.Error(), "exit status 128") {
+ if gitcmd.IsErrorExitCode(err, 128) {
return "", ErrNotExist{shortID, ""}
}
return "", err
diff --git a/modules/git/commit_info_test.go b/modules/git/commit_info_test.go
index 14a4174544..1e1697b006 100644
--- a/modules/git/commit_info_test.go
+++ b/modules/git/commit_info_test.go
@@ -30,28 +30,57 @@ func cloneRepo(tb testing.TB, url string) (string, error) {
}
func testGetCommitsInfo(t *testing.T, repo1 *Repository) {
+ type expectedEntryInfo struct {
+ CommitID string
+ Size int64
+ }
+
// these test case are specific to the repo1 test repo
testCases := []struct {
CommitID string
Path string
- ExpectedIDs map[string]string
+ ExpectedIDs map[string]expectedEntryInfo
ExpectedTreeCommit string
}{
- {"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", "", map[string]string{
- "file1.txt": "95bb4d39648ee7e325106df01a621c530863a653",
- "file2.txt": "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
+ {"8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", "", map[string]expectedEntryInfo{
+ "file1.txt": {
+ CommitID: "95bb4d39648ee7e325106df01a621c530863a653",
+ Size: 6,
+ },
+ "file2.txt": {
+ CommitID: "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
+ Size: 6,
+ },
}, "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2"},
- {"2839944139e0de9737a044f78b0e4b40d989a9e3", "", map[string]string{
- "file1.txt": "2839944139e0de9737a044f78b0e4b40d989a9e3",
- "branch1.txt": "9c9aef8dd84e02bc7ec12641deb4c930a7c30185",
+ {"2839944139e0de9737a044f78b0e4b40d989a9e3", "", map[string]expectedEntryInfo{
+ "file1.txt": {
+ CommitID: "2839944139e0de9737a044f78b0e4b40d989a9e3",
+ Size: 15,
+ },
+ "branch1.txt": {
+ CommitID: "9c9aef8dd84e02bc7ec12641deb4c930a7c30185",
+ Size: 8,
+ },
}, "2839944139e0de9737a044f78b0e4b40d989a9e3"},
- {"5c80b0245c1c6f8343fa418ec374b13b5d4ee658", "branch2", map[string]string{
- "branch2.txt": "5c80b0245c1c6f8343fa418ec374b13b5d4ee658",
+ {"5c80b0245c1c6f8343fa418ec374b13b5d4ee658", "branch2", map[string]expectedEntryInfo{
+ "branch2.txt": {
+ CommitID: "5c80b0245c1c6f8343fa418ec374b13b5d4ee658",
+ Size: 8,
+ },
}, "5c80b0245c1c6f8343fa418ec374b13b5d4ee658"},
- {"feaf4ba6bc635fec442f46ddd4512416ec43c2c2", "", map[string]string{
- "file1.txt": "95bb4d39648ee7e325106df01a621c530863a653",
- "file2.txt": "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
- "foo": "37991dec2c8e592043f47155ce4808d4580f9123",
+ {"feaf4ba6bc635fec442f46ddd4512416ec43c2c2", "", map[string]expectedEntryInfo{
+ "file1.txt": {
+ CommitID: "95bb4d39648ee7e325106df01a621c530863a653",
+ Size: 6,
+ },
+ "file2.txt": {
+ CommitID: "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2",
+ Size: 6,
+ },
+ "foo": {
+ CommitID: "37991dec2c8e592043f47155ce4808d4580f9123",
+ Size: 0,
+ },
}, "feaf4ba6bc635fec442f46ddd4512416ec43c2c2"},
}
for _, testCase := range testCases {
@@ -93,11 +122,12 @@ func testGetCommitsInfo(t *testing.T, repo1 *Repository) {
for _, commitInfo := range commitsInfo {
entry := commitInfo.Entry
commit := commitInfo.Commit
- expectedID, ok := testCase.ExpectedIDs[entry.Name()]
+ expectedInfo, ok := testCase.ExpectedIDs[entry.Name()]
if !assert.True(t, ok) {
continue
}
- assert.Equal(t, expectedID, commit.ID.String())
+ assert.Equal(t, expectedInfo.CommitID, commit.ID.String())
+ assert.Equal(t, expectedInfo.Size, entry.Size(), entry.Name())
}
}
}
diff --git a/modules/git/commit_submodule.go b/modules/git/commit_submodule.go
index ff253b7eca..5e5f90c20e 100644
--- a/modules/git/commit_submodule.go
+++ b/modules/git/commit_submodule.go
@@ -16,7 +16,7 @@ func (c *Commit) GetSubModules() (*ObjectCache[*SubModule], error) {
entry, err := c.GetTreeEntryByPath(".gitmodules")
if err != nil {
if _, ok := err.(ErrNotExist); ok {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the submodule does not exist
}
return nil, err
}
@@ -48,5 +48,5 @@ func (c *Commit) GetSubModule(entryName string) (*SubModule, error) {
return module, nil
}
}
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the submodule does not exist
}
diff --git a/modules/git/diff.go b/modules/git/diff.go
index 309d8f4615..d7732eaa29 100644
--- a/modules/git/diff.go
+++ b/modules/git/diff.go
@@ -5,11 +5,9 @@ package git
import (
"bufio"
- "bytes"
"context"
"fmt"
"io"
- "os"
"regexp"
"strconv"
"strings"
@@ -17,34 +15,57 @@ import (
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
)
-// RawDiffType type of a raw diff.
+// RawDiffType output format: diff or patch
type RawDiffType string
-// RawDiffType possible values.
const (
RawDiffNormal RawDiffType = "diff"
RawDiffPatch RawDiffType = "patch"
)
// GetRawDiff dumps diff results of repository in given commit ID to io.Writer.
-func GetRawDiff(repo *Repository, commitID string, diffType RawDiffType, writer io.Writer) error {
- return GetRepoRawDiffForFile(repo, "", commitID, diffType, "", writer)
+func GetRawDiff(repo *Repository, commitID string, diffType RawDiffType, writer io.Writer) (retErr error) {
+ cmd, err := getRepoRawDiffForFileCmd(repo.Ctx, repo, "", commitID, diffType, "")
+ if err != nil {
+ return fmt.Errorf("getRepoRawDiffForFileCmd: %w", err)
+ }
+ return cmd.WithStdoutCopy(writer).RunWithStderr(repo.Ctx)
}
-// GetRepoRawDiffForFile dumps diff results of file in given commit ID to io.Writer according given repository
-func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string, writer io.Writer) error {
+// GetFileDiffCutAroundLine cuts the old or new part of the diff of a file around a specific line number
+func GetFileDiffCutAroundLine(
+ repo *Repository, startCommit, endCommit, treePath string,
+ line int64, old bool, numbersOfLine int,
+) (ret string, retErr error) {
+ cmd, err := getRepoRawDiffForFileCmd(repo.Ctx, repo, startCommit, endCommit, RawDiffNormal, treePath)
+ if err != nil {
+ return "", fmt.Errorf("getRepoRawDiffForFileCmd: %w", err)
+ }
+ stdoutReader, stdoutClose := cmd.MakeStdoutPipe()
+ defer stdoutClose()
+ cmd.WithPipelineFunc(func(ctx gitcmd.Context) error {
+ ret, err = CutDiffAroundLine(stdoutReader, line, old, numbersOfLine)
+ return err
+ })
+ return ret, cmd.RunWithStderr(repo.Ctx)
+}
+
+// getRepoRawDiffForFile returns an io.Reader for the diff results of file in given commit ID
+// and a "finish" function to wait for the git command and clean up resources after reading is done.
+func getRepoRawDiffForFileCmd(_ context.Context, repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string) (*gitcmd.Command, error) {
commit, err := repo.GetCommit(endCommit)
if err != nil {
- return err
+ return nil, err
}
var files []string
if len(file) > 0 {
files = append(files, file)
}
- cmd := gitcmd.NewCommand()
+ cmd := gitcmd.NewCommand().WithDir(repo.Path)
switch diffType {
case RawDiffNormal:
if len(startCommit) != 0 {
@@ -56,7 +77,7 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff
} else {
c, err := commit.Parent(0)
if err != nil {
- return err
+ return nil, err
}
cmd.AddArguments("diff").
AddOptionFormat("--find-renames=%s", setting.Git.DiffRenameSimilarityThreshold).
@@ -71,23 +92,15 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff
} else {
c, err := commit.Parent(0)
if err != nil {
- return err
+ return nil, err
}
query := fmt.Sprintf("%s...%s", endCommit, c.ID.String())
cmd.AddArguments("format-patch", "--no-signature", "--stdout").AddDynamicArguments(query).AddDashesAndList(files...)
}
default:
- return fmt.Errorf("invalid diffType: %s", diffType)
+ return nil, util.NewInvalidArgumentErrorf("invalid diff type: %s", diffType)
}
-
- stderr := new(bytes.Buffer)
- if err = cmd.WithDir(repo.Path).
- WithStdout(writer).
- WithStderr(stderr).
- Run(repo.Ctx); err != nil {
- return fmt.Errorf("Run: %w - %s", err, stderr)
- }
- return nil
+ return cmd, nil
}
// ParseDiffHunkString parse the diff hunk content and return
@@ -224,7 +237,7 @@ func CutDiffAroundLine(originalDiff io.Reader, line int64, old bool, numbersOfLi
}
}
if err := scanner.Err(); err != nil {
- return "", err
+ return "", fmt.Errorf("CutDiffAroundLine: scan: %w", err)
}
// No hunk found
@@ -290,30 +303,15 @@ func GetAffectedFiles(repo *Repository, branchName, oldCommitID, newCommitID str
}
oldCommitID = startCommitID
}
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- log.Error("Unable to create os.Pipe for %s", repo.Path)
- return nil, err
- }
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
affectedFiles := make([]string, 0, 32)
// Run `git diff --name-only` to get the names of the changed files
- err = gitcmd.NewCommand("diff", "--name-only").AddDynamicArguments(oldCommitID, newCommitID).
- WithEnv(env).
- WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- // Close the writer end of the pipe to begin processing
- _ = stdoutWriter.Close()
- defer func() {
- // Close the reader on return to terminate the git command if necessary
- _ = stdoutReader.Close()
- }()
+ cmd := gitcmd.NewCommand("diff", "--name-only").AddDynamicArguments(oldCommitID, newCommitID)
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+ err := cmd.WithEnv(env).WithDir(repo.Path).
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
// Now scan the output from the command
scanner := bufio.NewScanner(stdoutReader)
for scanner.Scan() {
diff --git a/modules/git/error.go b/modules/git/error.go
index d4b5412da9..1b7bdca043 100644
--- a/modules/git/error.go
+++ b/modules/git/error.go
@@ -4,8 +4,6 @@
package git
import (
- "context"
- "errors"
"fmt"
"strings"
@@ -143,10 +141,3 @@ func IsErrMoreThanOne(err error) bool {
func (err *ErrMoreThanOne) Error() string {
return fmt.Sprintf("ErrMoreThanOne Error: %v: %s\n%s", err.Err, err.StdErr, err.StdOut)
}
-
-func IsErrCanceledOrKilled(err error) bool {
- // When "cancel()" a git command's context, the returned error of "Run()" could be one of them:
- // - context.Canceled
- // - *exec.ExitError: "signal: killed"
- return err != nil && (errors.Is(err, context.Canceled) || err.Error() == "signal: killed")
-}
diff --git a/modules/git/foreachref/parser.go b/modules/git/foreachref/parser.go
index fa2ef316c7..91868076b4 100644
--- a/modules/git/foreachref/parser.go
+++ b/modules/git/foreachref/parser.go
@@ -30,9 +30,11 @@ type Parser struct {
func NewParser(r io.Reader, format Format) *Parser {
scanner := bufio.NewScanner(r)
- // default MaxScanTokenSize = 64 kiB may be too small for some references,
- // so allow the buffer to grow up to 4x if needed
- scanner.Buffer(nil, 4*bufio.MaxScanTokenSize)
+ // default Scanner.MaxScanTokenSize = 64 kiB may be too small for some references,
+ // so allow the buffer to be large enough in case the ref has long content (e.g.: a tag with long message)
+ // as long as it doesn't exceed some reasonable limit (4 MiB here, or MAX_DISPLAY_FILE_SIZE=8MiB), it is OK
+ // there are still some choices: 1. add a config option for the limit; 2. don't use scanner and write our own parser to fully handle large contents
+ scanner.Buffer(nil, 4*1024*1024)
// in addition to the reference delimiter we specified in the --format,
// `git for-each-ref` will always add a newline after every reference.
@@ -98,7 +100,7 @@ func (p *Parser) Err() error {
func (p *Parser) parseRef(refBlock string) (map[string]string, error) {
if refBlock == "" {
// must be at EOF
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to signal EOF
}
fieldValues := make(map[string]string)
diff --git a/modules/git/git.go b/modules/git/git.go
index 6d2c643b33..2df83f9843 100644
--- a/modules/git/git.go
+++ b/modules/git/git.go
@@ -12,25 +12,28 @@ import (
"path/filepath"
"runtime"
"strings"
- "time"
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/tempdir"
+ "code.gitea.io/gitea/modules/testlogger"
"github.com/hashicorp/go-version"
)
-const RequiredVersion = "2.0.0" // the minimum Git version required
+const RequiredVersion = "2.6.0" // the minimum Git version required
type Features struct {
gitVersion *version.Version
- UsingGogit bool
- SupportProcReceive bool // >= 2.29
- SupportHashSha256 bool // >= 2.42, SHA-256 repositories no longer an ‘experimental curiosity’
- SupportedObjectFormats []ObjectFormat // sha1, sha256
- SupportCheckAttrOnBare bool // >= 2.40
+ UsingGogit bool
+ SupportProcReceive bool // >= 2.29
+ SupportHashSha256 bool // >= 2.42, SHA-256 repositories no longer an ‘experimental curiosity’
+ SupportedObjectFormats []ObjectFormat // sha1, sha256
+ SupportCheckAttrOnBare bool // >= 2.40
+ SupportCatFileBatchCommand bool // >= 2.36, support `git cat-file --batch-command`
+ SupportGitMergeTree bool // >= 2.40 // we also need "--merge-base"
}
var defaultFeatures *Features
@@ -75,6 +78,8 @@ func loadGitVersionFeatures() (*Features, error) {
features.SupportedObjectFormats = append(features.SupportedObjectFormats, Sha256ObjectFormat)
}
features.SupportCheckAttrOnBare = features.CheckVersionAtLeast("2.40")
+ features.SupportCatFileBatchCommand = features.CheckVersionAtLeast("2.36")
+ features.SupportGitMergeTree = features.CheckVersionAtLeast("2.40") // we also need "--merge-base"
return features, nil
}
@@ -84,12 +89,17 @@ func parseGitVersionLine(s string) (*version.Version, error) {
return nil, fmt.Errorf("invalid git version: %q", s)
}
- // version string is like: "git version 2.29.3" or "git version 2.29.3.windows.1"
+ // version output is like: "git version {versionString}"
+ // versionString can be:
+ // * "2.5.3"
+ // * "2.29.3.windows.1"
+ // * "2.28.0.618.gf4bc123cb7": https://github.com/go-gitea/gitea/issues/12731
versionString := fields[2]
- if pos := strings.Index(versionString, "windows"); pos >= 1 {
- versionString = versionString[:pos-1]
+ versionFields := strings.Split(versionString, ".")
+ if len(versionFields) > 3 {
+ versionFields = versionFields[:3]
}
- return version.NewVersion(versionString)
+ return version.NewVersion(strings.Join(versionFields, "."))
}
func checkGitVersionCompatibility(gitVer *version.Version) error {
@@ -137,10 +147,6 @@ func InitSimple() error {
log.Warn("git module has been initialized already, duplicate init may work but it's better to fix it")
}
- if setting.Git.Timeout.Default > 0 {
- gitcmd.SetDefaultCommandExecutionTimeout(time.Duration(setting.Git.Timeout.Default) * time.Second)
- }
-
if err := gitcmd.SetExecutablePath(setting.Git.Path); err != nil {
return err
}
@@ -176,3 +182,23 @@ func InitFull() (err error) {
return syncGitConfig(context.Background())
}
+
+// RunGitTests helps to init the git module and run tests.
+// FIXME: GIT-PACKAGE-DEPENDENCY: the dependency is not right, setting.Git.HomePath is initialized in this package but used in gitcmd package
+func RunGitTests(m interface{ Run() int }) {
+ os.Exit(runGitTests(m))
+}
+
+func runGitTests(m interface{ Run() int }) int {
+ gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home")
+ if err != nil {
+ testlogger.Panicf("unable to create temp dir: %s", err.Error())
+ }
+ defer cleanup()
+
+ setting.Git.HomePath = gitHomePath
+ if err = InitFull(); err != nil {
+ testlogger.Panicf("failed to call Init: %s", err.Error())
+ }
+ return m.Run()
+}
diff --git a/modules/git/git_test.go b/modules/git/git_test.go
index 7a8ca74b01..e21cbe449a 100644
--- a/modules/git/git_test.go
+++ b/modules/git/git_test.go
@@ -4,42 +4,14 @@
package git
import (
- "fmt"
- "os"
"testing"
- "code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/tempdir"
-
"github.com/hashicorp/go-version"
"github.com/stretchr/testify/assert"
)
-func testRun(m *testing.M) error {
- gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home")
- if err != nil {
- return fmt.Errorf("unable to create temp dir: %w", err)
- }
- defer cleanup()
-
- setting.Git.HomePath = gitHomePath
-
- if err = InitFull(); err != nil {
- return fmt.Errorf("failed to call Init: %w", err)
- }
-
- exitCode := m.Run()
- if exitCode != 0 {
- return fmt.Errorf("run test failed, ExitCode=%d", exitCode)
- }
- return nil
-}
-
func TestMain(m *testing.M) {
- if err := testRun(m); err != nil {
- _, _ = fmt.Fprintf(os.Stderr, "Test failed: %v", err)
- os.Exit(1)
- }
+ RunGitTests(m)
}
func TestParseGitVersion(t *testing.T) {
@@ -51,6 +23,10 @@ func TestParseGitVersion(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, "2.29.3", v.String())
+ v, err = parseGitVersionLine("git version 2.28.0.618.gf4bc123cb7")
+ assert.NoError(t, err)
+ assert.Equal(t, "2.28.0", v.String())
+
_, err = parseGitVersionLine("git version")
assert.Error(t, err)
diff --git a/modules/git/gitcmd/command.go b/modules/git/gitcmd/command.go
index ff2827bd6c..e9b51802fe 100644
--- a/modules/git/gitcmd/command.go
+++ b/modules/git/gitcmd/command.go
@@ -13,7 +13,6 @@ import (
"os"
"os/exec"
"path/filepath"
- "runtime"
"strings"
"time"
@@ -29,24 +28,32 @@ import (
// In most cases, it shouldn't be used. Use AddXxx function instead
type TrustedCmdArgs []internal.CmdArg
-// defaultCommandExecutionTimeout default command execution timeout duration
-var defaultCommandExecutionTimeout = 360 * time.Second
-
-func SetDefaultCommandExecutionTimeout(timeout time.Duration) {
- defaultCommandExecutionTimeout = timeout
-}
-
-// DefaultLocale is the default LC_ALL to run git commands in.
-const DefaultLocale = "C"
-
// Command represents a command with its subcommands or arguments.
type Command struct {
+ callerInfo string
prog string
args []string
- brokenArgs []string
- cmd *exec.Cmd // for debug purpose only
+ preErrors []error
configArgs []string
opts runOpts
+
+ cmd *exec.Cmd
+
+ cmdCtx context.Context
+ cmdCancel process.CancelCauseFunc
+ cmdFinished process.FinishedFunc
+ cmdStartTime time.Time
+
+ parentPipeFiles []*os.File
+ parentPipeReaders []*os.File
+ childrenPipeFiles []*os.File
+
+ // only os.Pipe and in-memory buffers can work with Stdin safely, see https://github.com/golang/go/issues/77227 if the command would exit unexpectedly
+ cmdStdin io.Reader
+ cmdStdout io.Writer
+ cmdStderr io.Writer
+
+ cmdManagedStderr *bytes.Buffer
}
func logArgSanitize(arg string) string {
@@ -97,6 +104,10 @@ func NewCommand(args ...internal.CmdArg) *Command {
}
}
+func (c *Command) handlePreErrorBrokenCommand(arg string) {
+ c.preErrors = append(c.preErrors, util.ErrorWrap(ErrBrokenCommand, `broken git command argument %q`, arg))
+}
+
// isSafeArgumentValue checks if the argument is safe to be used as a value (not an option)
func isSafeArgumentValue(s string) bool {
return s == "" || s[0] != '-'
@@ -124,7 +135,7 @@ func (c *Command) AddArguments(args ...internal.CmdArg) *Command {
// The values are treated as dynamic argument values. It equals to: AddArguments("--opt") then AddDynamicArguments(val).
func (c *Command) AddOptionValues(opt internal.CmdArg, args ...string) *Command {
if !isValidArgumentOption(string(opt)) {
- c.brokenArgs = append(c.brokenArgs, string(opt))
+ c.handlePreErrorBrokenCommand(string(opt))
return c
}
c.args = append(c.args, string(opt))
@@ -136,12 +147,12 @@ func (c *Command) AddOptionValues(opt internal.CmdArg, args ...string) *Command
// For example: AddOptionFormat("--opt=%s %s", val1, val2) means 1 argument: {"--opt=val1 val2"}.
func (c *Command) AddOptionFormat(opt string, args ...any) *Command {
if !isValidArgumentOption(opt) {
- c.brokenArgs = append(c.brokenArgs, opt)
+ c.handlePreErrorBrokenCommand(opt)
return c
}
// a quick check to make sure the format string matches the number of arguments, to find low-level mistakes ASAP
if strings.Count(strings.ReplaceAll(opt, "%%", ""), "%") != len(args) {
- c.brokenArgs = append(c.brokenArgs, opt)
+ c.handlePreErrorBrokenCommand(opt)
return c
}
s := fmt.Sprintf(opt, args...)
@@ -155,10 +166,10 @@ func (c *Command) AddOptionFormat(opt string, args ...any) *Command {
func (c *Command) AddDynamicArguments(args ...string) *Command {
for _, arg := range args {
if !isSafeArgumentValue(arg) {
- c.brokenArgs = append(c.brokenArgs, arg)
+ c.handlePreErrorBrokenCommand(arg)
}
}
- if len(c.brokenArgs) != 0 {
+ if len(c.preErrors) != 0 {
return c
}
c.args = append(c.args, args...)
@@ -178,7 +189,7 @@ func (c *Command) AddDashesAndList(list ...string) *Command {
func (c *Command) AddConfig(key, value string) *Command {
kv := key + "=" + value
if !isSafeArgumentValue(kv) {
- c.brokenArgs = append(c.brokenArgs, key)
+ c.handlePreErrorBrokenCommand(kv)
} else {
c.configArgs = append(c.configArgs, "-c", kv)
}
@@ -195,11 +206,9 @@ func ToTrustedCmdArgs(args []string) TrustedCmdArgs {
return ret
}
-// runOpts represents parameters to run the command. If UseContextTimeout is specified, then Timeout is ignored.
type runOpts struct {
- Env []string
- Timeout time.Duration
- UseContextTimeout bool
+ Env []string
+ Timeout time.Duration
// Dir is the working dir for the git command, however:
// FIXME: this could be incorrect in many cases, for example:
@@ -209,21 +218,7 @@ type runOpts struct {
// The correct approach is to use `--git-dir" global argument
Dir string
- Stdout, Stderr io.Writer
-
- // Stdin is used for passing input to the command
- // The caller must make sure the Stdin writer is closed properly to finish the Run function.
- // Otherwise, the Run function may hang for long time or forever, especially when the Git's context deadline is not the same as the caller's.
- // Some common mistakes:
- // * `defer stdinWriter.Close()` then call `cmd.Run()`: the Run() would never return if the command is killed by timeout
- // * `go { case <- parentContext.Done(): stdinWriter.Close() }` with `cmd.Run(DefaultTimeout)`: the command would have been killed by timeout but the Run doesn't return until stdinWriter.Close()
- // * `go { if stdoutReader.Read() err != nil: stdinWriter.Close() }` with `cmd.Run()`: the stdoutReader may never return error if the command is killed by timeout
- // In the future, ideally the git module itself should have full control of the stdin, to avoid such problems and make it easier to refactor to a better architecture.
- Stdin io.Reader
-
- PipelineFunc func(context.Context, context.CancelFunc) error
-
- callerInfo string
+ PipelineFunc func(Context) error
}
func commonBaseEnvs() []string {
@@ -254,7 +249,7 @@ func commonBaseEnvs() []string {
// CommonGitCmdEnvs returns the common environment variables for a "git" command.
func CommonGitCmdEnvs() []string {
return append(commonBaseEnvs(), []string{
- "LC_ALL=" + DefaultLocale,
+ "LC_ALL=C", // ensure git output is in English, error messages are parsed in English
"GIT_TERMINAL_PROMPT=0", // avoid prompting for credentials interactively, supported since git v2.3
}...)
}
@@ -281,42 +276,106 @@ func (c *Command) WithTimeout(timeout time.Duration) *Command {
return c
}
-func (c *Command) WithStdout(stdout io.Writer) *Command {
- c.opts.Stdout = stdout
+func (c *Command) makeStdoutStderr(w *io.Writer) (PipeReader, func()) {
+ pr, pw, err := os.Pipe()
+ if err != nil {
+ c.preErrors = append(c.preErrors, err)
+ return &pipeNull{err}, func() {}
+ }
+ c.childrenPipeFiles = append(c.childrenPipeFiles, pw)
+ c.parentPipeFiles = append(c.parentPipeFiles, pr)
+ c.parentPipeReaders = append(c.parentPipeReaders, pr)
+ *w /* stdout, stderr */ = pw
+ return &pipeReader{f: pr}, func() { pr.Close() }
+}
+
+// MakeStdinPipe creates a writer for the command's stdin.
+// The returned closer function must be called by the caller to close the pipe.
+func (c *Command) MakeStdinPipe() (writer PipeWriter, closer func()) {
+ pr, pw, err := os.Pipe()
+ if err != nil {
+ c.preErrors = append(c.preErrors, err)
+ return &pipeNull{err}, func() {}
+ }
+ c.childrenPipeFiles = append(c.childrenPipeFiles, pr)
+ c.parentPipeFiles = append(c.parentPipeFiles, pw)
+ c.cmdStdin = pr
+ return &pipeWriter{pw}, func() { pw.Close() }
+}
+
+// MakeStdoutPipe creates a reader for the command's stdout.
+// The returned closer function must be called by the caller to close the pipe.
+// After the pipe reader is closed, the unread data will be discarded.
+//
+// If the process (git command) still tries to write after the pipe is closed, the Wait error will be "signal: broken pipe".
+// WithPipelineFunc + Run won't return "broken pipe" error in this case if the callback returns no error.
+// But if you are calling Start / Wait family functions, you should either drain the pipe before close it, or handle the Wait error correctly.
+func (c *Command) MakeStdoutPipe() (reader PipeReader, closer func()) {
+ return c.makeStdoutStderr(&c.cmdStdout)
+}
+
+// MakeStderrPipe is like MakeStdoutPipe, but for stderr.
+func (c *Command) MakeStderrPipe() (reader PipeReader, closer func()) {
+ return c.makeStdoutStderr(&c.cmdStderr)
+}
+
+func (c *Command) MakeStdinStdoutPipe() (stdin PipeWriter, stdout PipeReader, closer func()) {
+ stdin, stdinClose := c.MakeStdinPipe()
+ stdout, stdoutClose := c.MakeStdoutPipe()
+ return stdin, stdout, func() {
+ stdinClose()
+ stdoutClose()
+ }
+}
+
+func (c *Command) WithStdinBytes(stdin []byte) *Command {
+ c.cmdStdin = bytes.NewReader(stdin)
return c
}
-func (c *Command) WithStderr(stderr io.Writer) *Command {
- c.opts.Stderr = stderr
+func (c *Command) WithStdoutBuffer(w PipeBufferWriter) *Command {
+ c.cmdStdout = w
return c
}
-func (c *Command) WithStdin(stdin io.Reader) *Command {
- c.opts.Stdin = stdin
+// WithStdinCopy and WithStdoutCopy are general functions that accept any io.Reader / io.Writer.
+// In this case, Golang exec.Cmd will start new internal goroutines to do io.Copy between pipes and provided Reader/Writer.
+// If the reader or writer is blocked and never returns, then the io.Copy won't finish, then exec.Cmd.Wait won't return, which may cause deadlocks.
+// A typical deadlock example is:
+// * `r,w:=io.Pipe(); cmd.Stdin=r; defer w.Close(); cmd.Run()`: the Run() will never return because stdin reader is blocked forever and w.Close() will never be called.
+// If the reader/writer won't block forever (for example: read from a file or buffer), then these functions are safe to use.
+func (c *Command) WithStdinCopy(w io.Reader) *Command {
+ c.cmdStdin = w
return c
}
-func (c *Command) WithPipelineFunc(f func(context.Context, context.CancelFunc) error) *Command {
+func (c *Command) WithStdoutCopy(w io.Writer) *Command {
+ c.cmdStdout = w
+ return c
+}
+
+// WithPipelineFunc sets the pipeline function for the command.
+// The pipeline function will be called in the Run / Wait function after the command is started successfully.
+// The function can read/write from/to the command's stdio pipes (if any).
+// The pipeline function can cancel (kill) the command by calling ctx.CancelPipeline before the command finishes.
+// The returned error of Run / Wait can be joined errors from the pipeline function, context cause, and command exit error.
+// Caller can get the pipeline function's error (if any) by UnwrapPipelineError.
+func (c *Command) WithPipelineFunc(f func(ctx Context) error) *Command {
c.opts.PipelineFunc = f
return c
}
-func (c *Command) WithUseContextTimeout(useContextTimeout bool) *Command {
- c.opts.UseContextTimeout = useContextTimeout
- return c
-}
-
// WithParentCallerInfo can be used to set the caller info (usually function name) of the parent function of the caller.
// For most cases, "Run" family functions can get its caller info automatically
// But if you need to call "Run" family functions in a wrapper function: "FeatureFunc -> GeneralWrapperFunc -> RunXxx",
// then you can to call this function in GeneralWrapperFunc to set the caller info of FeatureFunc.
// The caller info can only be set once.
func (c *Command) WithParentCallerInfo(optInfo ...string) *Command {
- if c.opts.callerInfo != "" {
+ if c.callerInfo != "" {
return c
}
if len(optInfo) > 0 {
- c.opts.callerInfo = optInfo[0]
+ c.callerInfo = optInfo[0]
return c
}
skip := 1 /*parent "wrap/run" functions*/ + 1 /*this function*/
@@ -325,135 +384,174 @@ func (c *Command) WithParentCallerInfo(optInfo ...string) *Command {
if pos := strings.LastIndex(callerInfo, "/"); pos >= 0 {
callerInfo = callerInfo[pos+1:]
}
- c.opts.callerInfo = callerInfo
+ c.callerInfo = callerInfo
return c
}
-// Run runs the command
-func (c *Command) Run(ctx context.Context) error {
- if len(c.brokenArgs) != 0 {
- log.Error("git command is broken: %s, broken args: %s", c.LogString(), strings.Join(c.brokenArgs, " "))
- return ErrBrokenCommand
+func (c *Command) Start(ctx context.Context) (retErr error) {
+ if c.cmd != nil {
+ // this is a programming error, it will cause serious deadlock problems, so it must be fixed.
+ panic("git command has already been started")
}
- // We must not change the provided options
- timeout := c.opts.Timeout
- if timeout <= 0 {
- timeout = defaultCommandExecutionTimeout
+ defer func() {
+ c.closePipeFiles(c.childrenPipeFiles)
+ if retErr != nil {
+ // release the pipes to avoid resource leak since the command failed to start
+ c.closePipeFiles(c.parentPipeFiles)
+ // if error occurs, we must also finish the task, otherwise, cmdFinished will be called in "Wait" function
+ if c.cmdFinished != nil {
+ c.cmdFinished()
+ }
+ }
+ }()
+
+ if len(c.preErrors) != 0 {
+ // In most cases, such error shouldn't happen. If it happens, log it as error level with more details
+ err := errors.Join(c.preErrors...)
+ log.Error("git command: %s, error: %s", c.LogString(), err)
+ return err
}
cmdLogString := c.LogString()
- if c.opts.callerInfo == "" {
+ if c.callerInfo == "" {
c.WithParentCallerInfo()
}
// these logs are for debugging purposes only, so no guarantee of correctness or stability
- desc := fmt.Sprintf("git.Run(by:%s, repo:%s): %s", c.opts.callerInfo, logArgSanitize(c.opts.Dir), cmdLogString)
+ desc := fmt.Sprintf("git.Run(by:%s, repo:%s): %s", c.callerInfo, logArgSanitize(c.opts.Dir), cmdLogString)
log.Debug("git.Command: %s", desc)
_, span := gtprof.GetTracer().Start(ctx, gtprof.TraceSpanGitRun)
defer span.End()
- span.SetAttributeString(gtprof.TraceAttrFuncCaller, c.opts.callerInfo)
+ span.SetAttributeString(gtprof.TraceAttrFuncCaller, c.callerInfo)
span.SetAttributeString(gtprof.TraceAttrGitCommand, cmdLogString)
- var cancel context.CancelFunc
- var finished context.CancelFunc
-
- if c.opts.UseContextTimeout {
- ctx, cancel, finished = process.GetManager().AddContext(ctx, desc)
+ if c.opts.Timeout <= 0 {
+ c.cmdCtx, c.cmdCancel, c.cmdFinished = process.GetManager().AddContext(ctx, desc)
} else {
- ctx, cancel, finished = process.GetManager().AddContextTimeout(ctx, timeout, desc)
+ c.cmdCtx, c.cmdCancel, c.cmdFinished = process.GetManager().AddContextTimeout(ctx, c.opts.Timeout, desc)
}
- defer finished()
- startTime := time.Now()
+ c.cmdStartTime = time.Now()
- cmd := exec.CommandContext(ctx, c.prog, append(c.configArgs, c.args...)...)
- c.cmd = cmd // for debug purpose only
+ c.cmd = exec.CommandContext(c.cmdCtx, c.prog, append(c.configArgs, c.args...)...)
if c.opts.Env == nil {
- cmd.Env = os.Environ()
+ c.cmd.Env = os.Environ()
} else {
- cmd.Env = c.opts.Env
+ c.cmd.Env = c.opts.Env
}
- process.SetSysProcAttribute(cmd)
- cmd.Env = append(cmd.Env, CommonGitCmdEnvs()...)
- cmd.Dir = c.opts.Dir
- cmd.Stdout = c.opts.Stdout
- cmd.Stderr = c.opts.Stderr
- cmd.Stdin = c.opts.Stdin
- if err := cmd.Start(); err != nil {
- return err
+ process.SetSysProcAttribute(c.cmd)
+ c.cmd.Env = append(c.cmd.Env, CommonGitCmdEnvs()...)
+ c.cmd.Dir = c.opts.Dir
+ c.cmd.Stdout = c.cmdStdout
+ c.cmd.Stdin = c.cmdStdin
+ c.cmd.Stderr = c.cmdStderr
+ return c.cmd.Start()
+}
+
+func (c *Command) closePipeFiles(files []*os.File) {
+ for _, f := range files {
+ _ = f.Close()
}
+}
+
+func (c *Command) discardPipeReaders(files []*os.File) {
+ for _, f := range files {
+ _, _ = io.Copy(io.Discard, f)
+ }
+}
+
+func (c *Command) Wait() error {
+ defer func() {
+ // The reader in another goroutine might be still reading the stdout, so we shouldn't close the pipes here
+ // MakeStdoutPipe returns a closer function to force callers to close the pipe correctly
+ // Here we only need to mark the command as finished
+ c.cmdFinished()
+ }()
if c.opts.PipelineFunc != nil {
- err := c.opts.PipelineFunc(ctx, cancel)
- if err != nil {
- cancel()
- _ = cmd.Wait()
- return err
+ errPipeline := c.opts.PipelineFunc(&cmdContext{Context: c.cmdCtx, cmd: c})
+
+ if context.Cause(c.cmdCtx) == nil {
+ // if the context is not canceled explicitly, we need to discard the unread data,
+ // and wait for the command to exit normally, and then get its exit code
+ c.discardPipeReaders(c.parentPipeReaders)
+ } // else: canceled command will be killed, and the exit code is caused by kill
+
+ // after the pipeline function returns, we can safely close the pipes, then wait for the command to exit
+ c.closePipeFiles(c.parentPipeFiles)
+ errWait := c.cmd.Wait()
+ errCause := context.Cause(c.cmdCtx) // in case the cause is set during Wait(), get the final cancel cause
+
+ if unwrapped, ok := UnwrapPipelineError(errCause); ok {
+ if unwrapped != errPipeline {
+ panic("unwrapped context pipeline error should be the same one returned by pipeline function")
+ }
+ if unwrapped == nil {
+ // the pipeline function declares that there is no error, and it cancels (kills) the command ahead,
+ // so we should ignore the errors from "wait" and "cause"
+ errWait, errCause = nil, nil
+ }
}
+
+ // some legacy code still need to access the error returned by pipeline function by "==" but not "errors.Is"
+ // so we need to make sure the original error is able to be unwrapped by UnwrapPipelineError
+ return errors.Join(wrapPipelineError(errPipeline), errCause, errWait)
}
- err := cmd.Wait()
- elapsed := time.Since(startTime)
+ // there might be other goroutines using the context or pipes, so we just wait for the command to finish
+ errWait := c.cmd.Wait()
+ elapsed := time.Since(c.cmdStartTime)
if elapsed > time.Second {
- log.Debug("slow git.Command.Run: %s (%s)", c, elapsed)
+ log.Debug("slow git.Command.Run: %s (%s)", c, elapsed) // TODO: no need to log this for long-running commands
}
- // We need to check if the context is canceled by the program on Windows.
- // This is because Windows does not have signal checking when terminating the process.
- // It always returns exit code 1, unlike Linux, which has many exit codes for signals.
- // `err.Error()` returns "exit status 1" when using the `git check-attr` command after the context is canceled.
- if runtime.GOOS == "windows" &&
- err != nil &&
- (err.Error() == "" || err.Error() == "exit status 1") &&
- cmd.ProcessState.ExitCode() == 1 &&
- ctx.Err() == context.Canceled {
- return ctx.Err()
- }
+ // Here the logic is different from "PipelineFunc" case,
+ // because PipelineFunc can return error if it fails, it knows whether it succeeds or fails.
+ // But in normal case, the caller just runs the git command, the command's exit code is the source of truth.
+ // If the caller need to know whether the command error is caused by cancellation, it should check the "err" by itself.
+ errCause := context.Cause(c.cmdCtx)
+ return errors.Join(errCause, errWait)
+}
- if err != nil && ctx.Err() != context.DeadlineExceeded {
+func (c *Command) StartWithStderr(ctx context.Context) RunStdError {
+ if c.cmdStderr != nil {
+ panic("caller-provided stderr receiver doesn't work with managed stderr buffer")
+ }
+ c.cmdManagedStderr = &bytes.Buffer{}
+ c.cmdStderr = c.cmdManagedStderr
+ err := c.Start(ctx)
+ if err != nil {
+ return &runStdError{err: err}
+ }
+ return nil
+}
+
+func (c *Command) WaitWithStderr() RunStdError {
+ if c.cmdManagedStderr == nil {
+ panic("managed stderr buffer is not initialized")
+ }
+ errWait := c.Wait()
+ if errWait == nil {
+ // if no exec error but only stderr output, the stderr output is still saved in "c.cmdManagedStderr" and can be read later
+ return nil
+ }
+ return &runStdError{err: errWait, stderr: util.UnsafeBytesToString(c.cmdManagedStderr.Bytes())}
+}
+
+func (c *Command) RunWithStderr(ctx context.Context) RunStdError {
+ if err := c.StartWithStderr(ctx); err != nil {
+ return &runStdError{err: err}
+ }
+ return c.WaitWithStderr()
+}
+
+func (c *Command) Run(ctx context.Context) (err error) {
+ if err = c.Start(ctx); err != nil {
return err
}
-
- return ctx.Err()
-}
-
-type RunStdError interface {
- error
- Unwrap() error
- Stderr() string
-}
-
-type runStdError struct {
- err error
- stderr string
- errMsg string
-}
-
-func (r *runStdError) Error() string {
- // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message
- // But a lof of code only checks `strings.Contains(err.Error(), "git error")`
- if r.errMsg == "" {
- r.errMsg = ConcatenateError(r.err, r.stderr).Error()
- }
- return r.errMsg
-}
-
-func (r *runStdError) Unwrap() error {
- return r.err
-}
-
-func (r *runStdError) Stderr() string {
- return r.stderr
-}
-
-func IsErrorExitCode(err error, code int) bool {
- var exitError *exec.ExitError
- if errors.As(err, &exitError) {
- return exitError.ExitCode() == code
- }
- return false
+ return c.Wait()
}
// RunStdString runs the command and returns stdout/stderr as string. and store stderr to returned error (err combined with stderr).
@@ -467,22 +565,16 @@ func (c *Command) RunStdBytes(ctx context.Context) (stdout, stderr []byte, runEr
return c.WithParentCallerInfo().runStdBytes(ctx)
}
-func (c *Command) runStdBytes(ctx context.Context) ( /*stdout*/ []byte /*stderr*/, []byte /*runErr*/, RunStdError) {
- if c.opts.Stdout != nil || c.opts.Stderr != nil {
- // we must panic here, otherwise there would be bugs if developers set Stdin/Stderr by mistake, and it would be very difficult to debug
+func (c *Command) runStdBytes(ctx context.Context) ([]byte, []byte, RunStdError) {
+ if c.cmdStdout != nil || c.cmdStderr != nil {
+ // it must panic here, otherwise there would be bugs if developers set other Stdin/Stderr by mistake, and it would be very difficult to debug
panic("stdout and stderr field must be nil when using RunStdBytes")
}
stdoutBuf := &bytes.Buffer{}
- stderrBuf := &bytes.Buffer{}
- err := c.WithParentCallerInfo().
- WithStdout(stdoutBuf).
- WithStderr(stderrBuf).
- Run(ctx)
- if err != nil {
- // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message
- // But a lot of code depends on it, so we have to keep this behavior
- return nil, stderrBuf.Bytes(), &runStdError{err: err, stderr: util.UnsafeBytesToString(stderrBuf.Bytes())}
- }
- // even if there is no err, there could still be some stderr output
- return stdoutBuf.Bytes(), stderrBuf.Bytes(), nil
+ err := c.WithParentCallerInfo().WithStdoutBuffer(stdoutBuf).RunWithStderr(ctx)
+ return stdoutBuf.Bytes(), c.cmdManagedStderr.Bytes(), err
+}
+
+func (c *Command) DebugKill() {
+ _ = c.cmd.Process.Kill()
}
diff --git a/modules/git/gitcmd/command_race_test.go b/modules/git/gitcmd/command_race_test.go
deleted file mode 100644
index c2f0b124a2..0000000000
--- a/modules/git/gitcmd/command_race_test.go
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2017 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-//go:build race
-
-package gitcmd
-
-import (
- "context"
- "testing"
- "time"
-)
-
-func TestRunWithContextNoTimeout(t *testing.T) {
- maxLoops := 10
-
- // 'git --version' does not block so it must be finished before the timeout triggered.
- cmd := NewCommand("--version")
- for i := 0; i < maxLoops; i++ {
- if err := cmd.Run(t.Context()); err != nil {
- t.Fatal(err)
- }
- }
-}
-
-func TestRunWithContextTimeout(t *testing.T) {
- maxLoops := 10
-
- // 'git hash-object --stdin' blocks on stdin so we can have the timeout triggered.
- cmd := NewCommand("hash-object", "--stdin")
- for i := 0; i < maxLoops; i++ {
- if err := cmd.WithTimeout(1 * time.Millisecond).Run(t.Context()); err != nil {
- if err != context.DeadlineExceeded {
- t.Fatalf("Testing %d/%d: %v", i, maxLoops, err)
- }
- }
- }
-}
diff --git a/modules/git/gitcmd/command_test.go b/modules/git/gitcmd/command_test.go
index 1ba8b2e3e4..662356bc3f 100644
--- a/modules/git/gitcmd/command_test.go
+++ b/modules/git/gitcmd/command_test.go
@@ -4,26 +4,35 @@
package gitcmd
import (
+ "context"
"fmt"
"os"
"testing"
+ "time"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/tempdir"
+ "code.gitea.io/gitea/modules/testlogger"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
-func TestMain(m *testing.M) {
+func testMain(m *testing.M) int {
+ // FIXME: GIT-PACKAGE-DEPENDENCY: the dependency is not right.
+ // "setting.Git.HomePath" is initialized in "git" package but really used in "gitcmd" package
gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home")
if err != nil {
- _, _ = fmt.Fprintf(os.Stderr, "unable to create temp dir: %v", err)
- os.Exit(1)
+ testlogger.Panicf("failed to create temp dir: %v", err)
}
defer cleanup()
setting.Git.HomePath = gitHomePath
- os.Exit(m.Run())
+ return m.Run()
+}
+
+func TestMain(m *testing.M) {
+ os.Exit(testMain(m))
}
func TestRunWithContextStd(t *testing.T) {
@@ -42,7 +51,7 @@ func TestRunWithContextStd(t *testing.T) {
assert.Equal(t, stderr, err.Stderr())
assert.Equal(t, "fatal: Not a valid object name no-such\n", err.Stderr())
// FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message
- assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such\n", err.Error())
+ assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such", err.Error())
assert.Empty(t, stdout)
}
}
@@ -54,7 +63,7 @@ func TestRunWithContextStd(t *testing.T) {
assert.Equal(t, string(stderr), err.Stderr())
assert.Equal(t, "fatal: Not a valid object name no-such\n", err.Stderr())
// FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message
- assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such\n", err.Error())
+ assert.Equal(t, "exit status 128 - fatal: Not a valid object name no-such", err.Error())
assert.Empty(t, stdout)
}
}
@@ -97,3 +106,29 @@ func TestCommandString(t *testing.T) {
cmd = NewCommand("url: https://a:b@c/", "/root/dir-a/dir-b")
assert.Equal(t, cmd.prog+` "url: https://sanitized-credential@c/" .../dir-a/dir-b`, cmd.LogString())
}
+
+func TestRunStdError(t *testing.T) {
+ e := &runStdError{stderr: "some error"}
+ var err RunStdError = e
+
+ var asErr RunStdError
+ require.ErrorAs(t, err, &asErr)
+ require.Equal(t, "some error", asErr.Stderr())
+
+ require.ErrorAs(t, fmt.Errorf("wrapped %w", err), &asErr)
+}
+
+func TestRunWithContextTimeout(t *testing.T) {
+ t.Run("NoTimeout", func(t *testing.T) {
+ // 'git --version' does not block so it must be finished before the timeout triggered.
+ err := NewCommand("--version").Run(t.Context())
+ require.NoError(t, err)
+ })
+ t.Run("WithTimeout", func(t *testing.T) {
+ cmd := NewCommand("hash-object", "--stdin")
+ _, _, pipeClose := cmd.MakeStdinStdoutPipe()
+ defer pipeClose()
+ err := cmd.WithTimeout(1 * time.Millisecond).Run(t.Context())
+ require.ErrorIs(t, err, context.DeadlineExceeded)
+ })
+}
diff --git a/modules/git/gitcmd/context.go b/modules/git/gitcmd/context.go
new file mode 100644
index 0000000000..a32f92ff3a
--- /dev/null
+++ b/modules/git/gitcmd/context.go
@@ -0,0 +1,32 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitcmd
+
+import (
+ "context"
+)
+
+type Context interface {
+ context.Context
+
+ // CancelPipeline is a helper function to cancel the command context (kill the command) with a specific error cause,
+ // it returns the same error for convenience to break the PipelineFunc easily
+ CancelPipeline(err error) error
+
+ // In the future, this interface will be extended to support stdio pipe readers/writers
+}
+
+type cmdContext struct {
+ context.Context
+ cmd *Command
+}
+
+func (c *cmdContext) CancelPipeline(err error) error {
+ // pipelineError is used to distinguish between:
+ // * context canceled by pipeline caller with/without error (normal cancellation)
+ // * context canceled by parent context (still context.Canceled error)
+ // * other causes
+ c.cmd.cmdCancel(pipelineError{err})
+ return err
+}
diff --git a/modules/git/gitcmd/error.go b/modules/git/gitcmd/error.go
new file mode 100644
index 0000000000..066b37f10d
--- /dev/null
+++ b/modules/git/gitcmd/error.go
@@ -0,0 +1,101 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitcmd
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "os/exec"
+ "strings"
+)
+
+type RunStdError interface {
+ error
+ Unwrap() error
+ Stderr() string
+}
+
+type runStdError struct {
+ err error // usually the low-level error like `*exec.ExitError`
+ stderr string // git command's stderr output
+ errMsg string // the cached error message for Error() method
+}
+
+func (r *runStdError) Error() string {
+ // FIXME: GIT-CMD-STDERR: it is a bad design, the stderr should not be put in the error message
+ // But a lot of code only checks `strings.Contains(err.Error(), "git error")`
+ if r.errMsg == "" {
+ r.errMsg = fmt.Sprintf("%s - %s", r.err.Error(), strings.TrimSpace(r.stderr))
+ }
+ return r.errMsg
+}
+
+func (r *runStdError) Unwrap() error {
+ return r.err
+}
+
+func (r *runStdError) Stderr() string {
+ return r.stderr
+}
+
+func ErrorAsStderr(err error) (string, bool) {
+ var runErr RunStdError
+ if errors.As(err, &runErr) {
+ return runErr.Stderr(), true
+ }
+ return "", false
+}
+
+func StderrHasPrefix(err error, prefix string) bool {
+ stderr, ok := ErrorAsStderr(err)
+ if !ok {
+ return false
+ }
+ return strings.HasPrefix(stderr, prefix)
+}
+
+func IsErrorExitCode(err error, code int) bool {
+ var exitError *exec.ExitError
+ if errors.As(err, &exitError) {
+ return exitError.ExitCode() == code
+ }
+ return false
+}
+
+func IsErrorSignalKilled(err error) bool {
+ var exitError *exec.ExitError
+ return errors.As(err, &exitError) && exitError.String() == "signal: killed"
+}
+
+func IsErrorCanceledOrKilled(err error) bool {
+ // When "cancel()" a git command's context, the returned error of "Run()" could be one of them:
+ // - context.Canceled
+ // - *exec.ExitError: "signal: killed"
+ // TODO: in the future, we need to use unified error type from gitcmd.Run to check whether it is manually canceled
+ return errors.Is(err, context.Canceled) || IsErrorSignalKilled(err)
+}
+
+type pipelineError struct {
+ error
+}
+
+func (e pipelineError) Unwrap() error {
+ return e.error
+}
+
+func wrapPipelineError(err error) error {
+ if err == nil {
+ return nil
+ }
+ return pipelineError{err}
+}
+
+func UnwrapPipelineError(err error) (error, bool) { //nolint:revive // this is for error unwrapping
+ var pe pipelineError
+ if errors.As(err, &pe) {
+ return pe.error, true
+ }
+ return nil, false
+}
diff --git a/modules/git/gitcmd/pipe.go b/modules/git/gitcmd/pipe.go
new file mode 100644
index 0000000000..d0ce3e2dc6
--- /dev/null
+++ b/modules/git/gitcmd/pipe.go
@@ -0,0 +1,87 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitcmd
+
+import (
+ "io"
+ "os"
+)
+
+type PipeBufferReader interface {
+ // Read should be used in the same goroutine as command's Wait
+ // When Reader in one goroutine, command's Wait in another goroutine, then the command exits, the pipe will be closed:
+ // * If the Reader goroutine reads faster, it will read all remaining data and then get io.EOF
+ // * But this io.EOF doesn't mean the Reader has gotten complete data, the data might still be corrupted
+ // * If the Reader goroutine reads slower, it will get os.ErrClosed because the os.Pipe is closed ahead when the command exits
+ //
+ // When using 2 goroutines, no clear solution to distinguish these two cases or make Reader knows whether the data is complete
+ // It should avoid using Reader in a different goroutine than the command if the Read error needs to be handled.
+ Read(p []byte) (n int, err error)
+ Bytes() []byte
+}
+
+type PipeBufferWriter interface {
+ Write(p []byte) (n int, err error)
+ Bytes() []byte
+}
+
+type PipeReader interface {
+ io.ReadCloser
+ internalOnly()
+}
+
+type pipeReader struct {
+ f *os.File
+}
+
+func (r *pipeReader) internalOnly() {}
+
+func (r *pipeReader) Read(p []byte) (n int, err error) {
+ return r.f.Read(p)
+}
+
+func (r *pipeReader) Close() error {
+ return r.f.Close()
+}
+
+type PipeWriter interface {
+ io.WriteCloser
+ internalOnly()
+}
+
+type pipeWriter struct {
+ f *os.File
+}
+
+func (w *pipeWriter) internalOnly() {}
+
+func (w *pipeWriter) Close() error {
+ return w.f.Close()
+}
+
+func (w *pipeWriter) Write(p []byte) (n int, err error) {
+ return w.f.Write(p)
+}
+
+func (w *pipeWriter) DrainBeforeClose() error {
+ return nil
+}
+
+type pipeNull struct {
+ err error
+}
+
+func (p *pipeNull) internalOnly() {}
+
+func (p *pipeNull) Read([]byte) (n int, err error) {
+ return 0, p.err
+}
+
+func (p *pipeNull) Write([]byte) (n int, err error) {
+ return 0, p.err
+}
+
+func (p *pipeNull) Close() error {
+ return nil
+}
diff --git a/modules/git/gitcmd/utils.go b/modules/git/gitcmd/utils.go
deleted file mode 100644
index ee24eb6a9a..0000000000
--- a/modules/git/gitcmd/utils.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2025 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package gitcmd
-
-import "fmt"
-
-// ConcatenateError concatenats an error with stderr string
-func ConcatenateError(err error, stderr string) error {
- if len(stderr) == 0 {
- return err
- }
- return fmt.Errorf("%w - %s", err, stderr)
-}
diff --git a/modules/git/grep.go b/modules/git/grep.go
index ed69a788a4..051a7a1d40 100644
--- a/modules/git/grep.go
+++ b/modules/git/grep.go
@@ -5,11 +5,9 @@ package git
import (
"bufio"
- "bytes"
"context"
"errors"
"fmt"
- "os"
"slices"
"strconv"
"strings"
@@ -42,15 +40,6 @@ type GrepOptions struct {
}
func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepOptions) ([]*GrepResult, error) {
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- return nil, fmt.Errorf("unable to create os pipe to grep: %w", err)
- }
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
-
/*
The output is like this ( "^@" means \x00):
@@ -83,14 +72,11 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO
cmd.AddDynamicArguments(util.IfZero(opts.RefName, "HEAD"))
cmd.AddDashesAndList(opts.PathspecList...)
opts.MaxResultLimit = util.IfZero(opts.MaxResultLimit, 50)
- stderr := bytes.Buffer{}
- err = cmd.WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithStderr(&stderr).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
- defer stdoutReader.Close()
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+ err := cmd.WithDir(repo.Path).
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
isInBlock := false
rd := bufio.NewReaderSize(stdoutReader, util.IfZero(opts.MaxLineLength, 16*1024))
var res *GrepResult
@@ -116,8 +102,7 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO
}
if line == "" {
if len(results) >= opts.MaxResultLimit {
- cancel()
- break
+ return ctx.CancelPipeline(nil)
}
isInBlock = false
continue
@@ -133,17 +118,17 @@ func GrepSearch(ctx context.Context, repo *Repository, search string, opts GrepO
}
return nil
}).
- Run(ctx)
+ RunWithStderr(ctx)
// git grep exits by cancel (killed), usually it is caused by the limit of results
- if gitcmd.IsErrorExitCode(err, -1) && stderr.Len() == 0 {
+ if gitcmd.IsErrorExitCode(err, -1) && err.Stderr() == "" {
return results, nil
}
// git grep exits with 1 if no results are found
- if gitcmd.IsErrorExitCode(err, 1) && stderr.Len() == 0 {
+ if gitcmd.IsErrorExitCode(err, 1) && err.Stderr() == "" {
return nil, nil
}
if err != nil && !errors.Is(err, context.Canceled) {
- return nil, fmt.Errorf("unable to run git grep: %w, stderr: %s", err, stderr.String())
+ return nil, fmt.Errorf("unable to run git grep: %w", err)
}
return results, nil
}
diff --git a/modules/git/languagestats/language_stats_gogit.go b/modules/git/languagestats/language_stats_gogit.go
index 418c05b157..ec03ca3159 100644
--- a/modules/git/languagestats/language_stats_gogit.go
+++ b/modules/git/languagestats/language_stats_gogit.go
@@ -108,7 +108,7 @@ func GetLanguageStats(repo *git_module.Repository, commitID string) (map[string]
if (!isVendored.Has() && analyze.IsVendor(f.Name)) ||
enry.IsDotFile(f.Name) ||
(!isDocumentation.Has() && enry.IsDocumentation(f.Name)) ||
- enry.IsConfiguration(f.Name) {
+ (!isDetectable.Has() && enry.IsConfiguration(f.Name)) {
return nil
}
diff --git a/modules/git/languagestats/language_stats_nogogit.go b/modules/git/languagestats/language_stats_nogogit.go
index 94cf9fff8c..442313d495 100644
--- a/modules/git/languagestats/language_stats_nogogit.go
+++ b/modules/git/languagestats/language_stats_nogogit.go
@@ -22,33 +22,28 @@ import (
func GetLanguageStats(repo *git.Repository, commitID string) (map[string]int64, error) {
// We will feed the commit IDs in order into cat-file --batch, followed by blobs as necessary.
// so let's create a batch stdin and stdout
- batchStdinWriter, batchReader, cancel, err := repo.CatFileBatch(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
- writeID := func(id string) error {
- _, err := batchStdinWriter.Write([]byte(id + "\n"))
- return err
- }
-
- if err := writeID(commitID); err != nil {
+ commitInfo, batchReader, err := batch.QueryContent(commitID)
+ if err != nil {
return nil, err
}
- shaBytes, typ, size, err := git.ReadBatchLine(batchReader)
- if typ != "commit" {
+ if commitInfo.Type != "commit" {
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
return nil, git.ErrNotExist{ID: commitID}
}
- sha, err := git.NewIDFromString(string(shaBytes))
+ sha, err := git.NewIDFromString(commitInfo.ID)
if err != nil {
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
return nil, git.ErrNotExist{ID: commitID}
}
- commit, err := git.CommitFromReader(repo, sha, io.LimitReader(batchReader, size))
+ commit, err := git.CommitFromReader(repo, sha, io.LimitReader(batchReader, commitInfo.Size))
if err != nil {
log.Debug("Unable to get commit for: %s. Err: %v", commitID, err)
return nil, err
@@ -137,27 +132,23 @@ func GetLanguageStats(repo *git.Repository, commitID string) (map[string]int64,
if (!isVendored.Has() && analyze.IsVendor(f.Name())) ||
enry.IsDotFile(f.Name()) ||
(!isDocumentation.Has() && enry.IsDocumentation(f.Name())) ||
- enry.IsConfiguration(f.Name()) {
+ (!isDetectable.Has() && enry.IsConfiguration(f.Name())) {
continue
}
// If content can not be read or file is too big just do detection by filename
if f.Size() <= bigFileSize {
- if err := writeID(f.ID.String()); err != nil {
- return nil, err
- }
- _, _, size, err := git.ReadBatchLine(batchReader)
+ info, _, err := batch.QueryContent(f.ID.String())
if err != nil {
- log.Debug("Error reading blob: %s Err: %v", f.ID.String(), err)
return nil, err
}
- sizeToRead := size
+ sizeToRead := info.Size
discard := int64(1)
- if size > fileSizeLimit {
+ if info.Size > fileSizeLimit {
sizeToRead = fileSizeLimit
- discard = size - fileSizeLimit + 1
+ discard = info.Size - fileSizeLimit + 1
}
_, err = contentBuf.ReadFrom(io.LimitReader(batchReader, sizeToRead))
diff --git a/modules/git/languagestats/main_test.go b/modules/git/languagestats/main_test.go
index b8f9ded005..bf860f2a18 100644
--- a/modules/git/languagestats/main_test.go
+++ b/modules/git/languagestats/main_test.go
@@ -4,37 +4,11 @@
package languagestats
import (
- "fmt"
- "os"
"testing"
"code.gitea.io/gitea/modules/git"
- "code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/util"
)
-func testRun(m *testing.M) error {
- gitHomePath, err := os.MkdirTemp(os.TempDir(), "git-home")
- if err != nil {
- return fmt.Errorf("unable to create temp dir: %w", err)
- }
- defer util.RemoveAll(gitHomePath)
- setting.Git.HomePath = gitHomePath
-
- if err = git.InitFull(); err != nil {
- return fmt.Errorf("failed to call Init: %w", err)
- }
-
- exitCode := m.Run()
- if exitCode != 0 {
- return fmt.Errorf("run test failed, ExitCode=%d", exitCode)
- }
- return nil
-}
-
func TestMain(m *testing.M) {
- if err := testRun(m); err != nil {
- _, _ = fmt.Fprintf(os.Stderr, "Test failed: %v", err)
- os.Exit(1)
- }
+ git.RunGitTests(m)
}
diff --git a/modules/git/last_commit_cache.go b/modules/git/last_commit_cache.go
index cff2556083..a013773b47 100644
--- a/modules/git/last_commit_cache.go
+++ b/modules/git/last_commit_cache.go
@@ -55,12 +55,12 @@ func (c *LastCommitCache) Put(ref, entryPath, commitID string) error {
// Get gets the last commit information by commit id and entry path
func (c *LastCommitCache) Get(ref, entryPath string) (*Commit, error) {
if c == nil || c.cache == nil {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when cache is not available
}
commitID, ok := c.cache.Get(getCacheKey(c.repoPath, ref, entryPath))
if !ok || commitID == "" {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when cache miss
}
log.Debug("LastCommitCache hit level 1: [%s:%s:%s]", ref, entryPath, commitID)
diff --git a/modules/git/log_name_status.go b/modules/git/log_name_status.go
index 72e513000b..6e6d9985ae 100644
--- a/modules/git/log_name_status.go
+++ b/modules/git/log_name_status.go
@@ -15,25 +15,12 @@ import (
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/git/gitcmd"
-
- "github.com/djherbis/buffer"
- "github.com/djherbis/nio/v3"
+ "code.gitea.io/gitea/modules/log"
)
// LogNameStatusRepo opens git log --raw in the provided repo and returns a stdin pipe, a stdout reader and cancel function
func LogNameStatusRepo(ctx context.Context, repository, head, treepath string, paths ...string) (*bufio.Reader, func()) {
- // We often want to feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary.
- // so let's create a batch stdin and stdout
- stdoutReader, stdoutWriter := nio.Pipe(buffer.New(32 * 1024))
-
// Lets also create a context so that we can absolutely ensure that the command should die when we're done
- ctx, ctxCancel := context.WithCancel(ctx)
-
- cancel := func() {
- ctxCancel()
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }
cmd := gitcmd.NewCommand()
cmd.AddArguments("log", "--name-status", "-c", "--format=commit%x00%H %P%x00", "--parents", "--no-renames", "-t", "-z").AddDynamicArguments(head)
@@ -63,24 +50,21 @@ func LogNameStatusRepo(ctx context.Context, repository, head, treepath string, p
}
cmd.AddDashesAndList(files...)
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ ctx, ctxCancel := context.WithCancel(ctx)
go func() {
- stderr := strings.Builder{}
- err := cmd.WithDir(repository).
- WithStdout(stdoutWriter).
- WithStderr(&stderr).
- Run(ctx)
- if err != nil {
- _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
- return
+ err := cmd.WithDir(repository).RunWithStderr(ctx)
+ if err != nil && !errors.Is(err, context.Canceled) {
+ log.Error("Unable to run git command %v: %v", cmd.LogString(), err)
}
-
- _ = stdoutWriter.Close()
}()
- // For simplicities sake we'll us a buffered reader to read from the cat-file --batch
bufReader := bufio.NewReaderSize(stdoutReader, 32*1024)
- return bufReader, cancel
+ return bufReader, func() {
+ ctxCancel()
+ stdoutReaderClose()
+ }
}
// LogNameStatusRepoParser parses a git log raw output from LogRawRepo
@@ -122,7 +106,7 @@ func (g *LogNameStatusRepoParser) Next(treepath string, paths2ids map[string]int
case bufio.ErrBufferFull:
g.buffull = true
case io.EOF:
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to signal EOF
default:
return nil, err
}
@@ -137,7 +121,7 @@ func (g *LogNameStatusRepoParser) Next(treepath string, paths2ids map[string]int
case bufio.ErrBufferFull:
g.buffull = true
case io.EOF:
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to signal EOF
default:
return nil, err
}
diff --git a/modules/git/parse.go b/modules/git/parse.go
index d4ff0ecb23..94020e690d 100644
--- a/modules/git/parse.go
+++ b/modules/git/parse.go
@@ -46,8 +46,8 @@ func parseLsTreeLine(line []byte) (*LsTreeEntry, error) {
entry.Size = optional.Some(size)
}
- entry.EntryMode, err = ParseEntryMode(string(entryMode))
- if err != nil || entry.EntryMode == EntryModeNoEntry {
+ entry.EntryMode = ParseEntryMode(string(entryMode))
+ if entry.EntryMode == EntryModeNoEntry {
return nil, fmt.Errorf("invalid ls-tree output (invalid mode): %q, err: %w", line, err)
}
diff --git a/modules/git/parse_treeentry.go b/modules/git/parse_treeentry.go
index e14d9f17b5..d46cd3344d 100644
--- a/modules/git/parse_treeentry.go
+++ b/modules/git/parse_treeentry.go
@@ -4,7 +4,6 @@
package git
import (
- "bufio"
"bytes"
"fmt"
"io"
@@ -47,7 +46,7 @@ func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) {
return entries, nil
}
-func catBatchParseTreeEntries(objectFormat ObjectFormat, ptree *Tree, rd *bufio.Reader, sz int64) ([]*TreeEntry, error) {
+func catBatchParseTreeEntries(objectFormat ObjectFormat, ptree *Tree, rd BufferedReader, sz int64) ([]*TreeEntry, error) {
fnameBuf := make([]byte, 4096)
modeBuf := make([]byte, 40)
shaBuf := make([]byte, objectFormat.FullLength())
diff --git a/modules/git/pipeline/catfile.go b/modules/git/pipeline/catfile.go
index a4d1ff64cf..3d005e28f1 100644
--- a/modules/git/pipeline/catfile.go
+++ b/modules/git/pipeline/catfile.go
@@ -5,81 +5,34 @@ package pipeline
import (
"bufio"
- "bytes"
"context"
- "fmt"
"io"
"strconv"
"strings"
- "sync"
"code.gitea.io/gitea/modules/git/gitcmd"
- "code.gitea.io/gitea/modules/log"
)
// CatFileBatchCheck runs cat-file with --batch-check
-func CatFileBatchCheck(ctx context.Context, shasToCheckReader *io.PipeReader, catFileCheckWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string) {
- defer wg.Done()
- defer shasToCheckReader.Close()
- defer catFileCheckWriter.Close()
-
- stderr := new(bytes.Buffer)
- var errbuf strings.Builder
- cmd := gitcmd.NewCommand("cat-file", "--batch-check")
- if err := cmd.WithDir(tmpBasePath).
- WithStdin(shasToCheckReader).
- WithStdout(catFileCheckWriter).
- WithStderr(stderr).
- Run(ctx); err != nil {
- _ = catFileCheckWriter.CloseWithError(fmt.Errorf("git cat-file --batch-check [%s]: %w - %s", tmpBasePath, err, errbuf.String()))
- }
+func CatFileBatchCheck(ctx context.Context, cmd *gitcmd.Command, tmpBasePath string) error {
+ cmd.AddArguments("cat-file", "--batch-check")
+ return cmd.WithDir(tmpBasePath).RunWithStderr(ctx)
}
// CatFileBatchCheckAllObjects runs cat-file with --batch-check --batch-all
-func CatFileBatchCheckAllObjects(ctx context.Context, catFileCheckWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string, errChan chan<- error) {
- defer wg.Done()
- defer catFileCheckWriter.Close()
-
- stderr := new(bytes.Buffer)
- var errbuf strings.Builder
- cmd := gitcmd.NewCommand("cat-file", "--batch-check", "--batch-all-objects")
- if err := cmd.WithDir(tmpBasePath).
- WithStdout(catFileCheckWriter).
- WithStderr(stderr).
- Run(ctx); err != nil {
- log.Error("git cat-file --batch-check --batch-all-object [%s]: %v - %s", tmpBasePath, err, errbuf.String())
- err = fmt.Errorf("git cat-file --batch-check --batch-all-object [%s]: %w - %s", tmpBasePath, err, errbuf.String())
- _ = catFileCheckWriter.CloseWithError(err)
- errChan <- err
- }
+func CatFileBatchCheckAllObjects(ctx context.Context, cmd *gitcmd.Command, tmpBasePath string) error {
+ return cmd.AddArguments("cat-file", "--batch-check", "--batch-all-objects").WithDir(tmpBasePath).RunWithStderr(ctx)
}
// CatFileBatch runs cat-file --batch
-func CatFileBatch(ctx context.Context, shasToBatchReader *io.PipeReader, catFileBatchWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string) {
- defer wg.Done()
- defer shasToBatchReader.Close()
- defer catFileBatchWriter.Close()
-
- stderr := new(bytes.Buffer)
- var errbuf strings.Builder
- if err := gitcmd.NewCommand("cat-file", "--batch").
- WithDir(tmpBasePath).
- WithStdin(shasToBatchReader).
- WithStdout(catFileBatchWriter).
- WithStderr(stderr).
- Run(ctx); err != nil {
- _ = shasToBatchReader.CloseWithError(fmt.Errorf("git rev-list [%s]: %w - %s", tmpBasePath, err, errbuf.String()))
- }
+func CatFileBatch(ctx context.Context, cmd *gitcmd.Command, tmpBasePath string) error {
+ return cmd.AddArguments("cat-file", "--batch").WithDir(tmpBasePath).RunWithStderr(ctx)
}
// BlobsLessThan1024FromCatFileBatchCheck reads a pipeline from cat-file --batch-check and returns the blobs <1024 in size
-func BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader *io.PipeReader, shasToBatchWriter *io.PipeWriter, wg *sync.WaitGroup) {
- defer wg.Done()
- defer catFileCheckReader.Close()
- scanner := bufio.NewScanner(catFileCheckReader)
- defer func() {
- _ = shasToBatchWriter.CloseWithError(scanner.Err())
- }()
+func BlobsLessThan1024FromCatFileBatchCheck(in io.ReadCloser, out io.WriteCloser) error {
+ defer out.Close()
+ scanner := bufio.NewScanner(in)
for scanner.Scan() {
line := scanner.Text()
if len(line) == 0 {
@@ -95,12 +48,12 @@ func BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader *io.PipeReader, s
}
toWrite := []byte(fields[0] + "\n")
for len(toWrite) > 0 {
- n, err := shasToBatchWriter.Write(toWrite)
+ n, err := out.Write(toWrite)
if err != nil {
- _ = catFileCheckReader.CloseWithError(err)
- break
+ return err
}
toWrite = toWrite[n:]
}
}
+ return scanner.Err()
}
diff --git a/modules/git/pipeline/lfs_common.go b/modules/git/pipeline/lfs_common.go
index 188e7d4d65..914aefbeaf 100644
--- a/modules/git/pipeline/lfs_common.go
+++ b/modules/git/pipeline/lfs_common.go
@@ -4,7 +4,6 @@
package pipeline
import (
- "fmt"
"time"
"code.gitea.io/gitea/modules/git"
@@ -26,7 +25,3 @@ type lfsResultSlice []*LFSResult
func (a lfsResultSlice) Len() int { return len(a) }
func (a lfsResultSlice) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a lfsResultSlice) Less(i, j int) bool { return a[j].When.After(a[i].When) }
-
-func lfsError(msg string, err error) error {
- return fmt.Errorf("LFS error occurred, %s: err: %w", msg, err)
-}
diff --git a/modules/git/pipeline/lfs_gogit.go b/modules/git/pipeline/lfs_gogit.go
index adcf8ed09c..c12397569c 100644
--- a/modules/git/pipeline/lfs_gogit.go
+++ b/modules/git/pipeline/lfs_gogit.go
@@ -6,11 +6,10 @@
package pipeline
import (
- "bufio"
+ "fmt"
"io"
"sort"
"strings"
- "sync"
"code.gitea.io/gitea/modules/git"
@@ -24,7 +23,6 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
resultsMap := map[string]*LFSResult{}
results := make([]*LFSResult, 0)
- basePath := repo.Path
gogitRepo := repo.GoGitRepo()
commitsIter, err := gogitRepo.Log(&gogit.LogOptions{
@@ -32,7 +30,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
All: true,
})
if err != nil {
- return nil, lfsError("failed to get GoGit CommitsIter", err)
+ return nil, fmt.Errorf("LFS error occurred, failed to get GoGit CommitsIter: err: %w", err)
}
err = commitsIter.ForEach(func(gitCommit *object.Commit) error {
@@ -66,7 +64,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
return nil
})
if err != nil && err != io.EOF {
- return nil, lfsError("failure in CommitIter.ForEach", err)
+ return nil, fmt.Errorf("LFS error occurred, failure in CommitIter.ForEach: %w", err)
}
for _, result := range resultsMap {
@@ -82,65 +80,6 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
}
sort.Sort(lfsResultSlice(results))
-
- // Should really use a go-git function here but name-rev is not completed and recapitulating it is not simple
- shasToNameReader, shasToNameWriter := io.Pipe()
- nameRevStdinReader, nameRevStdinWriter := io.Pipe()
- errChan := make(chan error, 1)
- wg := sync.WaitGroup{}
- wg.Add(3)
-
- go func() {
- defer wg.Done()
- scanner := bufio.NewScanner(nameRevStdinReader)
- i := 0
- for scanner.Scan() {
- line := scanner.Text()
- if len(line) == 0 {
- continue
- }
- result := results[i]
- result.FullCommitName = line
- result.BranchName = strings.Split(line, "~")[0]
- i++
- }
- }()
- go NameRevStdin(repo.Ctx, shasToNameReader, nameRevStdinWriter, &wg, basePath)
- go func() {
- defer wg.Done()
- defer shasToNameWriter.Close()
- for _, result := range results {
- i := 0
- if i < len(result.SHA) {
- n, err := shasToNameWriter.Write([]byte(result.SHA)[i:])
- if err != nil {
- errChan <- err
- break
- }
- i += n
- }
- n := 0
- for n < 1 {
- n, err = shasToNameWriter.Write([]byte{'\n'})
- if err != nil {
- errChan <- err
- break
- }
-
- }
-
- }
- }()
-
- wg.Wait()
-
- select {
- case err, has := <-errChan:
- if has {
- return nil, lfsError("unable to obtain name for LFS files", err)
- }
- default:
- }
-
- return results, nil
+ err = fillResultNameRev(repo.Ctx, repo.Path, results)
+ return results, err
}
diff --git a/modules/git/pipeline/lfs_nogogit.go b/modules/git/pipeline/lfs_nogogit.go
index 4881a2be64..91bda0d0e5 100644
--- a/modules/git/pipeline/lfs_nogogit.go
+++ b/modules/git/pipeline/lfs_nogogit.go
@@ -8,46 +8,34 @@ package pipeline
import (
"bufio"
"bytes"
+ "encoding/hex"
"io"
"sort"
"strings"
- "sync"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
)
// FindLFSFile finds commits that contain a provided pointer file hash
-func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, error) {
+func FindLFSFile(repo *git.Repository, objectID git.ObjectID) (results []*LFSResult, _ error) {
+ cmd := gitcmd.NewCommand("rev-list", "--all")
+ revListReader, revListReaderClose := cmd.MakeStdoutPipe()
+ defer revListReaderClose()
+ err := cmd.WithDir(repo.Path).
+ WithPipelineFunc(func(context gitcmd.Context) (err error) {
+ results, err = findLFSFileFunc(repo, objectID, revListReader)
+ return err
+ }).RunWithStderr(repo.Ctx)
+ return results, err
+}
+
+func findLFSFileFunc(repo *git.Repository, objectID git.ObjectID, revListReader io.Reader) ([]*LFSResult, error) {
resultsMap := map[string]*LFSResult{}
results := make([]*LFSResult, 0)
-
- basePath := repo.Path
-
- // Use rev-list to provide us with all commits in order
- revListReader, revListWriter := io.Pipe()
- defer func() {
- _ = revListWriter.Close()
- _ = revListReader.Close()
- }()
-
- go func() {
- stderr := strings.Builder{}
- err := gitcmd.NewCommand("rev-list", "--all").
- WithDir(repo.Path).
- WithStdout(revListWriter).
- WithStderr(&stderr).
- Run(repo.Ctx)
- if err != nil {
- _ = revListWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
- } else {
- _ = revListWriter.Close()
- }
- }()
-
// Next feed the commits in order into cat-file --batch, followed by their trees and sub trees as necessary.
// so let's create a batch stdin and stdout
- batchStdinWriter, batchReader, cancel, err := repo.CatFileBatch(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
@@ -55,7 +43,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
// We'll use a scanner for the revList because it's simpler than a bufio.Reader
scan := bufio.NewScanner(revListReader)
- trees := [][]byte{}
+ trees := []string{}
paths := []string{}
fnameBuf := make([]byte, 4096)
@@ -64,14 +52,10 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
for scan.Scan() {
// Get the next commit ID
- commitID := scan.Bytes()
+ commitID := scan.Text()
// push the commit to the cat-file --batch process
- _, err := batchStdinWriter.Write(commitID)
- if err != nil {
- return nil, err
- }
- _, err = batchStdinWriter.Write([]byte{'\n'})
+ info, batchReader, err := batch.QueryContent(commitID)
if err != nil {
return nil, err
}
@@ -81,26 +65,20 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
commitReadingLoop:
for {
- _, typ, size, err := git.ReadBatchLine(batchReader)
- if err != nil {
- return nil, err
- }
-
- switch typ {
+ switch info.Type {
case "tag":
// This shouldn't happen but if it does well just get the commit and try again
- id, err := git.ReadTagObjectID(batchReader, size)
+ id, err := git.ReadTagObjectID(batchReader, info.Size)
if err != nil {
return nil, err
}
- _, err = batchStdinWriter.Write([]byte(id + "\n"))
- if err != nil {
+ if info, batchReader, err = batch.QueryContent(id); err != nil {
return nil, err
}
continue
case "commit":
// Read in the commit to get its tree and in case this is one of the last used commits
- curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(string(commitID)), io.LimitReader(batchReader, size))
+ curCommit, err = git.CommitFromReader(repo, git.MustIDFromString(commitID), io.LimitReader(batchReader, info.Size))
if err != nil {
return nil, err
}
@@ -108,13 +86,13 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
return nil, err
}
- if _, err := batchStdinWriter.Write([]byte(curCommit.Tree.ID.String() + "\n")); err != nil {
+ if info, _, err = batch.QueryContent(curCommit.Tree.ID.String()); err != nil {
return nil, err
}
curPath = ""
case "tree":
var n int64
- for n < size {
+ for n < info.Size {
mode, fname, binObjectID, count, err := git.ParseCatFileTreeLine(objectID.Type(), batchReader, modeBuf, fnameBuf, workingShaBuf)
if err != nil {
return nil, err
@@ -130,9 +108,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
}
resultsMap[curCommit.ID.String()+":"+curPath+string(fname)] = &result
} else if string(mode) == git.EntryModeTree.String() {
- hexObjectID := make([]byte, objectID.Type().FullLength())
- git.BinToHex(objectID.Type(), binObjectID, hexObjectID)
- trees = append(trees, hexObjectID)
+ trees = append(trees, hex.EncodeToString(binObjectID))
paths = append(paths, curPath+string(fname)+"/")
}
}
@@ -140,11 +116,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
return nil, err
}
if len(trees) > 0 {
- _, err := batchStdinWriter.Write(trees[len(trees)-1])
- if err != nil {
- return nil, err
- }
- _, err = batchStdinWriter.Write([]byte("\n"))
+ info, _, err = batch.QueryContent(trees[len(trees)-1])
if err != nil {
return nil, err
}
@@ -155,7 +127,7 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
break commitReadingLoop
}
default:
- if err := git.DiscardFull(batchReader, size+1); err != nil {
+ if err := git.DiscardFull(batchReader, info.Size+1); err != nil {
return nil, err
}
}
@@ -179,56 +151,6 @@ func FindLFSFile(repo *git.Repository, objectID git.ObjectID) ([]*LFSResult, err
}
sort.Sort(lfsResultSlice(results))
-
- // Should really use a go-git function here but name-rev is not completed and recapitulating it is not simple
- shasToNameReader, shasToNameWriter := io.Pipe()
- nameRevStdinReader, nameRevStdinWriter := io.Pipe()
- errChan := make(chan error, 1)
- wg := sync.WaitGroup{}
- wg.Add(3)
-
- go func() {
- defer wg.Done()
- scanner := bufio.NewScanner(nameRevStdinReader)
- i := 0
- for scanner.Scan() {
- line := scanner.Text()
- if len(line) == 0 {
- continue
- }
- result := results[i]
- result.FullCommitName = line
- result.BranchName = strings.Split(line, "~")[0]
- i++
- }
- }()
- go NameRevStdin(repo.Ctx, shasToNameReader, nameRevStdinWriter, &wg, basePath)
- go func() {
- defer wg.Done()
- defer shasToNameWriter.Close()
- for _, result := range results {
- _, err := shasToNameWriter.Write([]byte(result.SHA))
- if err != nil {
- errChan <- err
- break
- }
- _, err = shasToNameWriter.Write([]byte{'\n'})
- if err != nil {
- errChan <- err
- break
- }
- }
- }()
-
- wg.Wait()
-
- select {
- case err, has := <-errChan:
- if has {
- return nil, lfsError("unable to obtain name for LFS files", err)
- }
- default:
- }
-
- return results, nil
+ err = fillResultNameRev(repo.Ctx, repo.Path, results)
+ return results, err
}
diff --git a/modules/git/pipeline/lfs_test.go b/modules/git/pipeline/lfs_test.go
new file mode 100644
index 0000000000..30fe2f93c2
--- /dev/null
+++ b/modules/git/pipeline/lfs_test.go
@@ -0,0 +1,38 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pipeline
+
+import (
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/modules/git"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestFindLFSFile(t *testing.T) {
+ repoPath := "../../../tests/gitea-repositories-meta/user2/lfs.git"
+ gitRepo, err := git.OpenRepository(t.Context(), repoPath)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ objectID := git.MustIDFromString("2b6c6c4eaefa24b22f2092c3d54b263ff26feb58")
+
+ stats, err := FindLFSFile(gitRepo, objectID)
+ require.NoError(t, err)
+
+ tm, err := time.Parse(time.RFC3339, "2022-12-21T17:56:42-05:00")
+ require.NoError(t, err)
+
+ assert.Len(t, stats, 1)
+ assert.Equal(t, "CONTRIBUTING.md", stats[0].Name)
+ assert.Equal(t, "73cf03db6ece34e12bf91e8853dc58f678f2f82d", stats[0].SHA)
+ assert.Equal(t, "Initial commit", stats[0].Summary)
+ assert.Equal(t, tm, stats[0].When)
+ assert.Empty(t, stats[0].ParentHashes)
+ assert.Equal(t, "master", stats[0].BranchName)
+ assert.Equal(t, "master", stats[0].FullCommitName)
+}
diff --git a/modules/git/pipeline/main_test.go b/modules/git/pipeline/main_test.go
new file mode 100644
index 0000000000..fa5832b68c
--- /dev/null
+++ b/modules/git/pipeline/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pipeline
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/git"
+)
+
+func TestMain(m *testing.M) {
+ git.RunGitTests(m)
+}
diff --git a/modules/git/pipeline/namerev.go b/modules/git/pipeline/namerev.go
index 782b5f0531..24de442940 100644
--- a/modules/git/pipeline/namerev.go
+++ b/modules/git/pipeline/namerev.go
@@ -4,30 +4,54 @@
package pipeline
import (
- "bytes"
+ "bufio"
"context"
- "fmt"
- "io"
+ "errors"
"strings"
- "sync"
"code.gitea.io/gitea/modules/git/gitcmd"
+
+ "golang.org/x/sync/errgroup"
)
-// NameRevStdin runs name-rev --stdin
-func NameRevStdin(ctx context.Context, shasToNameReader *io.PipeReader, nameRevStdinWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath string) {
- defer wg.Done()
- defer shasToNameReader.Close()
- defer nameRevStdinWriter.Close()
+func fillResultNameRev(ctx context.Context, basePath string, results []*LFSResult) error {
+ // Should really use a go-git function here but name-rev is not completed and recapitulating it is not simple
+ wg := errgroup.Group{}
+ cmd := gitcmd.NewCommand("name-rev", "--stdin", "--name-only", "--always").WithDir(basePath)
+ stdin, stdinClose := cmd.MakeStdinPipe()
+ stdout, stdoutClose := cmd.MakeStdoutPipe()
+ defer stdinClose()
+ defer stdoutClose()
- stderr := new(bytes.Buffer)
- var errbuf strings.Builder
- if err := gitcmd.NewCommand("name-rev", "--stdin", "--name-only", "--always").
- WithDir(tmpBasePath).
- WithStdin(shasToNameReader).
- WithStdout(nameRevStdinWriter).
- WithStderr(stderr).
- Run(ctx); err != nil {
- _ = shasToNameReader.CloseWithError(fmt.Errorf("git name-rev [%s]: %w - %s", tmpBasePath, err, errbuf.String()))
- }
+ wg.Go(func() error {
+ scanner := bufio.NewScanner(stdout)
+ i := 0
+ for scanner.Scan() {
+ line := scanner.Text()
+ if len(line) == 0 {
+ continue
+ }
+ result := results[i]
+ result.FullCommitName = line
+ result.BranchName = strings.Split(line, "~")[0]
+ i++
+ }
+ return scanner.Err()
+ })
+ wg.Go(func() error {
+ defer stdinClose()
+ for _, result := range results {
+ _, err := stdin.Write([]byte(result.SHA))
+ if err != nil {
+ return err
+ }
+ _, err = stdin.Write([]byte{'\n'})
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ })
+ err := cmd.RunWithStderr(ctx)
+ return errors.Join(err, wg.Wait())
}
diff --git a/modules/git/pipeline/revlist.go b/modules/git/pipeline/revlist.go
index 755b165a65..28d4751bd8 100644
--- a/modules/git/pipeline/revlist.go
+++ b/modules/git/pipeline/revlist.go
@@ -5,63 +5,26 @@ package pipeline
import (
"bufio"
- "bytes"
"context"
- "fmt"
"io"
"strings"
- "sync"
"code.gitea.io/gitea/modules/git/gitcmd"
- "code.gitea.io/gitea/modules/log"
)
-// RevListAllObjects runs rev-list --objects --all and writes to a pipewriter
-func RevListAllObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.WaitGroup, basePath string, errChan chan<- error) {
- defer wg.Done()
- defer revListWriter.Close()
-
- stderr := new(bytes.Buffer)
- var errbuf strings.Builder
- cmd := gitcmd.NewCommand("rev-list", "--objects", "--all")
- if err := cmd.WithDir(basePath).
- WithStdout(revListWriter).
- WithStderr(stderr).
- Run(ctx); err != nil {
- log.Error("git rev-list --objects --all [%s]: %v - %s", basePath, err, errbuf.String())
- err = fmt.Errorf("git rev-list --objects --all [%s]: %w - %s", basePath, err, errbuf.String())
- _ = revListWriter.CloseWithError(err)
- errChan <- err
- }
-}
-
// RevListObjects run rev-list --objects from headSHA to baseSHA
-func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath, headSHA, baseSHA string, errChan chan<- error) {
- defer wg.Done()
- defer revListWriter.Close()
- stderr := new(bytes.Buffer)
- var errbuf strings.Builder
- cmd := gitcmd.NewCommand("rev-list", "--objects").AddDynamicArguments(headSHA)
+func RevListObjects(ctx context.Context, cmd *gitcmd.Command, tmpBasePath, headSHA, baseSHA string) error {
+ cmd.AddArguments("rev-list", "--objects").AddDynamicArguments(headSHA)
if baseSHA != "" {
cmd = cmd.AddArguments("--not").AddDynamicArguments(baseSHA)
}
- if err := cmd.WithDir(tmpBasePath).
- WithStdout(revListWriter).
- WithStderr(stderr).
- Run(ctx); err != nil {
- log.Error("git rev-list [%s]: %v - %s", tmpBasePath, err, errbuf.String())
- errChan <- fmt.Errorf("git rev-list [%s]: %w - %s", tmpBasePath, err, errbuf.String())
- }
+ return cmd.WithDir(tmpBasePath).RunWithStderr(ctx)
}
// BlobsFromRevListObjects reads a RevListAllObjects and only selects blobs
-func BlobsFromRevListObjects(revListReader *io.PipeReader, shasToCheckWriter *io.PipeWriter, wg *sync.WaitGroup) {
- defer wg.Done()
- defer revListReader.Close()
- scanner := bufio.NewScanner(revListReader)
- defer func() {
- _ = shasToCheckWriter.CloseWithError(scanner.Err())
- }()
+func BlobsFromRevListObjects(in io.ReadCloser, out io.WriteCloser) error {
+ defer out.Close()
+ scanner := bufio.NewScanner(in)
for scanner.Scan() {
line := scanner.Text()
if len(line) == 0 {
@@ -73,12 +36,12 @@ func BlobsFromRevListObjects(revListReader *io.PipeReader, shasToCheckWriter *io
}
toWrite := []byte(fields[0] + "\n")
for len(toWrite) > 0 {
- n, err := shasToCheckWriter.Write(toWrite)
+ n, err := out.Write(toWrite)
if err != nil {
- _ = revListReader.CloseWithError(err)
- break
+ return err
}
toWrite = toWrite[n:]
}
}
+ return scanner.Err()
}
diff --git a/modules/git/ref.go b/modules/git/ref.go
index 56b2db858a..7b63d06b38 100644
--- a/modules/git/ref.go
+++ b/modules/git/ref.go
@@ -220,3 +220,14 @@ func (ref RefName) RefWebLinkPath() string {
}
return string(refType) + "/" + util.PathEscapeSegments(ref.ShortName())
}
+
+func ParseRefSuffix(ref string) (string, string) {
+ // Partially support https://git-scm.com/docs/gitrevisions
+ if idx := strings.Index(ref, "@{"); idx != -1 {
+ return ref[:idx], ref[idx:]
+ }
+ if idx := strings.Index(ref, "^"); idx != -1 {
+ return ref[:idx], ref[idx:]
+ }
+ return ref, ""
+}
diff --git a/modules/git/remote.go b/modules/git/remote.go
index 1999ad4b94..ae56c5576a 100644
--- a/modules/git/remote.go
+++ b/modules/git/remote.go
@@ -74,9 +74,9 @@ func (err *ErrInvalidCloneAddr) Unwrap() error {
func IsRemoteNotExistError(err error) bool {
// see: https://github.com/go-gitea/gitea/issues/32889#issuecomment-2571848216
// Should not add space in the end, sometimes git will add a `:`
- prefix1 := "exit status 128 - fatal: No such remote" // git < 2.30
- prefix2 := "exit status 2 - error: No such remote" // git >= 2.30
- return strings.HasPrefix(err.Error(), prefix1) || strings.HasPrefix(err.Error(), prefix2)
+ prefix1 := "fatal: No such remote" // git < 2.30, exit status 128
+ prefix2 := "error: No such remote" // git >= 2.30. exit status 2
+ return gitcmd.StderrHasPrefix(err, prefix1) || gitcmd.StderrHasPrefix(err, prefix2)
}
// ParseRemoteAddr checks if given remote address is valid,
diff --git a/modules/git/repo.go b/modules/git/repo.go
index 579accf92e..1e31eb1b80 100644
--- a/modules/git/repo.go
+++ b/modules/git/repo.go
@@ -8,7 +8,6 @@ import (
"bytes"
"context"
"fmt"
- "io"
"net/url"
"os"
"path"
@@ -83,22 +82,19 @@ func InitRepository(ctx context.Context, repoPath string, bare bool, objectForma
// IsEmpty Check if repository is empty.
func (repo *Repository) IsEmpty() (bool, error) {
- var errbuf, output strings.Builder
- if err := gitcmd.NewCommand().
+ stdout, _, err := gitcmd.NewCommand().
AddOptionFormat("--git-dir=%s", repo.Path).
AddArguments("rev-list", "-n", "1", "--all").
WithDir(repo.Path).
- WithStdout(&output).
- WithStderr(&errbuf).
- Run(repo.Ctx); err != nil {
- if (err.Error() == "exit status 1" && strings.TrimSpace(errbuf.String()) == "") || err.Error() == "exit status 129" {
+ RunStdString(repo.Ctx)
+ if err != nil {
+ if (gitcmd.IsErrorExitCode(err, 1) && err.Stderr() == "") || gitcmd.IsErrorExitCode(err, 129) {
// git 2.11 exits with 129 if the repo is empty
return true, nil
}
- return true, fmt.Errorf("check empty: %w - %s", err, errbuf.String())
+ return true, fmt.Errorf("check empty: %w", err)
}
-
- return strings.TrimSpace(output.String()) == "", nil
+ return strings.TrimSpace(stdout) == "", nil
}
// CloneRepoOptions options when clone a repository
@@ -171,21 +167,16 @@ func Clone(ctx context.Context, from, to string, opts CloneRepoOptions) error {
}
}
- stderr := new(bytes.Buffer)
- if err := cmd.
+ return cmd.
WithTimeout(opts.Timeout).
WithEnv(envs).
- WithStdout(io.Discard).
- WithStderr(stderr).
- Run(ctx); err != nil {
- return gitcmd.ConcatenateError(err, stderr.String())
- }
- return nil
+ RunWithStderr(ctx)
}
// PushOptions options when push to remote
type PushOptions struct {
Remote string
+ LocalRefName string
Branch string
Force bool
ForceWithLease string
@@ -207,7 +198,13 @@ func Push(ctx context.Context, repoPath string, opts PushOptions) error {
}
remoteBranchArgs := []string{opts.Remote}
if len(opts.Branch) > 0 {
- remoteBranchArgs = append(remoteBranchArgs, opts.Branch)
+ var refspec string
+ if opts.LocalRefName != "" {
+ refspec = fmt.Sprintf("%s:%s", opts.LocalRefName, opts.Branch)
+ } else {
+ refspec = opts.Branch
+ }
+ remoteBranchArgs = append(remoteBranchArgs, refspec)
}
cmd.AddDashesAndList(remoteBranchArgs...)
diff --git a/modules/git/repo_archive.go b/modules/git/repo_archive.go
deleted file mode 100644
index 8a9eec9e6a..0000000000
--- a/modules/git/repo_archive.go
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright 2015 The Gogs Authors. All rights reserved.
-// Copyright 2020 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package git
-
-import (
- "context"
- "fmt"
- "io"
- "path/filepath"
- "strings"
-
- "code.gitea.io/gitea/modules/git/gitcmd"
-)
-
-// ArchiveType archive types
-type ArchiveType int
-
-const (
- ArchiveUnknown ArchiveType = iota
- ArchiveZip // 1
- ArchiveTarGz // 2
- ArchiveBundle // 3
-)
-
-// String converts an ArchiveType to string: the extension of the archive file without prefix dot
-func (a ArchiveType) String() string {
- switch a {
- case ArchiveZip:
- return "zip"
- case ArchiveTarGz:
- return "tar.gz"
- case ArchiveBundle:
- return "bundle"
- }
- return "unknown"
-}
-
-func SplitArchiveNameType(s string) (string, ArchiveType) {
- switch {
- case strings.HasSuffix(s, ".zip"):
- return strings.TrimSuffix(s, ".zip"), ArchiveZip
- case strings.HasSuffix(s, ".tar.gz"):
- return strings.TrimSuffix(s, ".tar.gz"), ArchiveTarGz
- case strings.HasSuffix(s, ".bundle"):
- return strings.TrimSuffix(s, ".bundle"), ArchiveBundle
- }
- return s, ArchiveUnknown
-}
-
-// CreateArchive create archive content to the target path
-func (repo *Repository) CreateArchive(ctx context.Context, format ArchiveType, target io.Writer, usePrefix bool, commitID string) error {
- if format.String() == "unknown" {
- return fmt.Errorf("unknown format: %v", format)
- }
-
- cmd := gitcmd.NewCommand("archive")
- if usePrefix {
- cmd.AddOptionFormat("--prefix=%s", filepath.Base(strings.TrimSuffix(repo.Path, ".git"))+"/")
- }
- cmd.AddOptionFormat("--format=%s", format.String())
- cmd.AddDynamicArguments(commitID)
-
- var stderr strings.Builder
- err := cmd.WithDir(repo.Path).
- WithStdout(target).
- WithStderr(&stderr).
- Run(ctx)
- if err != nil {
- return gitcmd.ConcatenateError(err, stderr.String())
- }
- return nil
-}
diff --git a/modules/git/repo_base_nogogit.go b/modules/git/repo_base_nogogit.go
index 17c71da5ef..775bbd4a09 100644
--- a/modules/git/repo_base_nogogit.go
+++ b/modules/git/repo_base_nogogit.go
@@ -7,9 +7,9 @@
package git
import (
- "bufio"
"context"
"path/filepath"
+ "sync"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
@@ -23,11 +23,9 @@ type Repository struct {
tagCache *ObjectCache[*Tag]
- batchInUse bool
- batch *Batch
-
- checkInUse bool
- check *Batch
+ mu sync.Mutex
+ catFileBatchCloser CatFileBatchCloser
+ catFileBatchInUse bool
Ctx context.Context
LastCommitCache *LastCommitCache
@@ -56,69 +54,47 @@ func OpenRepository(ctx context.Context, repoPath string) (*Repository, error) {
}, nil
}
-// CatFileBatch obtains a CatFileBatch for this repository
-func (repo *Repository) CatFileBatch(ctx context.Context) (WriteCloserError, *bufio.Reader, func(), error) {
- if repo.batch == nil {
- var err error
- repo.batch, err = NewBatch(ctx, repo.Path)
+// CatFileBatch obtains a "batch object provider" for this repository.
+// It reuses an existing one if available, otherwise creates a new one.
+func (repo *Repository) CatFileBatch(ctx context.Context) (_ CatFileBatch, closeFunc func(), err error) {
+ repo.mu.Lock()
+ defer repo.mu.Unlock()
+
+ if repo.catFileBatchCloser == nil {
+ repo.catFileBatchCloser, err = NewBatch(ctx, repo.Path)
if err != nil {
- return nil, nil, nil, err
+ repo.catFileBatchCloser = nil // otherwise it is "interface(nil)" and will cause wrong logic
+ return nil, nil, err
}
}
- if !repo.batchInUse {
- repo.batchInUse = true
- return repo.batch.Writer, repo.batch.Reader, func() {
- repo.batchInUse = false
+ if !repo.catFileBatchInUse {
+ repo.catFileBatchInUse = true
+ return CatFileBatch(repo.catFileBatchCloser), func() {
+ repo.mu.Lock()
+ defer repo.mu.Unlock()
+ repo.catFileBatchInUse = false
}, nil
}
log.Debug("Opening temporary cat file batch for: %s", repo.Path)
tempBatch, err := NewBatch(ctx, repo.Path)
if err != nil {
- return nil, nil, nil, err
+ return nil, nil, err
}
- return tempBatch.Writer, tempBatch.Reader, tempBatch.Close, nil
-}
-
-// CatFileBatchCheck obtains a CatFileBatchCheck for this repository
-func (repo *Repository) CatFileBatchCheck(ctx context.Context) (WriteCloserError, *bufio.Reader, func(), error) {
- if repo.check == nil {
- var err error
- repo.check, err = NewBatchCheck(ctx, repo.Path)
- if err != nil {
- return nil, nil, nil, err
- }
- }
-
- if !repo.checkInUse {
- repo.checkInUse = true
- return repo.check.Writer, repo.check.Reader, func() {
- repo.checkInUse = false
- }, nil
- }
-
- log.Debug("Opening temporary cat file batch-check for: %s", repo.Path)
- tempBatchCheck, err := NewBatchCheck(ctx, repo.Path)
- if err != nil {
- return nil, nil, nil, err
- }
- return tempBatchCheck.Writer, tempBatchCheck.Reader, tempBatchCheck.Close, nil
+ return tempBatch, tempBatch.Close, nil
}
func (repo *Repository) Close() error {
if repo == nil {
return nil
}
- if repo.batch != nil {
- repo.batch.Close()
- repo.batch = nil
- repo.batchInUse = false
- }
- if repo.check != nil {
- repo.check.Close()
- repo.check = nil
- repo.checkInUse = false
+ repo.mu.Lock()
+ defer repo.mu.Unlock()
+ if repo.catFileBatchCloser != nil {
+ repo.catFileBatchCloser.Close()
+ repo.catFileBatchCloser = nil
+ repo.catFileBatchInUse = false
}
repo.LastCommitCache = nil
repo.tagCache = nil
diff --git a/modules/git/repo_base_nogogit_test.go b/modules/git/repo_base_nogogit_test.go
new file mode 100644
index 0000000000..a12bbb73c2
--- /dev/null
+++ b/modules/git/repo_base_nogogit_test.go
@@ -0,0 +1,26 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+//go:build !gogit
+
+package git
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestRepoCatFileBatch(t *testing.T) {
+ t.Run("MissingRepoAndClose", func(t *testing.T) {
+ repo, err := OpenRepository(t.Context(), filepath.Join(testReposDir, "repo1_bare"))
+ require.NoError(t, err)
+ repo.Path = "/no-such" // when the repo is missing (it usually occurs during testing because the fixtures are synced frequently)
+ _, _, err = repo.CatFileBatch(t.Context())
+ require.Error(t, err)
+ require.NoError(t, repo.Close()) // shouldn't panic
+ })
+
+ // TODO: test more methods and concurrency queries
+}
diff --git a/modules/git/repo_blob.go b/modules/git/repo_blob.go
index 698b6c7074..ff930a3432 100644
--- a/modules/git/repo_blob.go
+++ b/modules/git/repo_blob.go
@@ -9,5 +9,11 @@ func (repo *Repository) GetBlob(idStr string) (*Blob, error) {
if err != nil {
return nil, err
}
- return repo.getBlob(id)
+ if id.IsZero() {
+ return nil, ErrNotExist{id.String(), ""}
+ }
+ return &Blob{
+ ID: id,
+ repo: repo,
+ }, nil
}
diff --git a/modules/git/repo_blob_gogit.go b/modules/git/repo_blob_gogit.go
deleted file mode 100644
index 66c8c2775c..0000000000
--- a/modules/git/repo_blob_gogit.go
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright 2018 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-//go:build gogit
-
-package git
-
-import (
- "github.com/go-git/go-git/v5/plumbing"
-)
-
-func (repo *Repository) getBlob(id ObjectID) (*Blob, error) {
- encodedObj, err := repo.gogitRepo.Storer.EncodedObject(plumbing.AnyObject, plumbing.Hash(id.RawValue()))
- if err != nil {
- return nil, ErrNotExist{id.String(), ""}
- }
-
- return &Blob{
- ID: id,
- gogitEncodedObj: encodedObj,
- }, nil
-}
diff --git a/modules/git/repo_blob_nogogit.go b/modules/git/repo_blob_nogogit.go
deleted file mode 100644
index 04b0fb00ff..0000000000
--- a/modules/git/repo_blob_nogogit.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright 2020 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-//go:build !gogit
-
-package git
-
-func (repo *Repository) getBlob(id ObjectID) (*Blob, error) {
- if id.IsZero() {
- return nil, ErrNotExist{id.String(), ""}
- }
- return &Blob{
- ID: id,
- repo: repo,
- }, nil
-}
diff --git a/modules/git/repo_branch_nogogit.go b/modules/git/repo_branch_nogogit.go
index f1b26b06ab..f925aab3e4 100644
--- a/modules/git/repo_branch_nogogit.go
+++ b/modules/git/repo_branch_nogogit.go
@@ -8,7 +8,6 @@ package git
import (
"bufio"
- "bytes"
"context"
"io"
"strings"
@@ -18,24 +17,24 @@ import (
)
// IsObjectExist returns true if the given object exists in the repository.
+// FIXME: this function doesn't seem right, it is only used by GarbageCollectLFSMetaObjectsForRepo
func (repo *Repository) IsObjectExist(name string) bool {
if name == "" {
return false
}
- wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
- log.Debug("Error writing to CatFileBatchCheck %v", err)
+ log.Debug("Error opening CatFileBatch %v", err)
return false
}
defer cancel()
- _, err = wr.Write([]byte(name + "\n"))
+ info, err := batch.QueryInfo(name)
if err != nil {
- log.Debug("Error writing to CatFileBatchCheck %v", err)
+ log.Debug("Error checking object info %v", err)
return false
}
- sha, _, _, err := ReadBatchLine(rd)
- return err == nil && bytes.HasPrefix(sha, []byte(strings.TrimSpace(name)))
+ return strings.HasPrefix(info.ID, name) // FIXME: this logic doesn't seem right, why "HasPrefix"
}
// IsReferenceExist returns true if given reference exists in the repository.
@@ -44,18 +43,13 @@ func (repo *Repository) IsReferenceExist(name string) bool {
return false
}
- wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
- log.Debug("Error writing to CatFileBatchCheck %v", err)
+ log.Error("Error opening CatFileBatch %v", err)
return false
}
defer cancel()
- _, err = wr.Write([]byte(name + "\n"))
- if err != nil {
- log.Debug("Error writing to CatFileBatchCheck %v", err)
- return false
- }
- _, _, _, err = ReadBatchLine(rd)
+ _, err = batch.QueryInfo(name)
return err == nil
}
@@ -100,94 +94,81 @@ func callShowRef(ctx context.Context, repoPath, trimPrefix string, extraArgs git
}
func WalkShowRef(ctx context.Context, repoPath string, extraArgs gitcmd.TrustedCmdArgs, skip, limit int, walkfn func(sha1, refname string) error) (countAll int, err error) {
- stdoutReader, stdoutWriter := io.Pipe()
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
-
- go func() {
- stderrBuilder := &strings.Builder{}
- args := gitcmd.TrustedCmdArgs{"for-each-ref", "--format=%(objectname) %(refname)"}
- args = append(args, extraArgs...)
- err := gitcmd.NewCommand(args...).
- WithDir(repoPath).
- WithStdout(stdoutWriter).
- WithStderr(stderrBuilder).
- Run(ctx)
- if err != nil {
- if stderrBuilder.Len() == 0 {
- _ = stdoutWriter.Close()
- return
- }
- _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, stderrBuilder.String()))
- } else {
- _ = stdoutWriter.Close()
- }
- }()
-
i := 0
- bufReader := bufio.NewReader(stdoutReader)
- for i < skip {
- _, isPrefix, err := bufReader.ReadLine()
- if err == io.EOF {
- return i, nil
- }
- if err != nil {
- return 0, err
- }
- if !isPrefix {
- i++
- }
+ args := gitcmd.TrustedCmdArgs{"for-each-ref", "--format=%(objectname) %(refname)"}
+ args = append(args, extraArgs...)
+ cmd := gitcmd.NewCommand(args...)
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+ cmd.WithDir(repoPath).
+ WithPipelineFunc(func(gitcmd.Context) error {
+ bufReader := bufio.NewReader(stdoutReader)
+ for i < skip {
+ _, isPrefix, err := bufReader.ReadLine()
+ if err == io.EOF {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ if !isPrefix {
+ i++
+ }
+ }
+ for limit == 0 || i < skip+limit {
+ // The output of show-ref is simply a list:
+ // SP [ LF
+ sha, err := bufReader.ReadString(' ')
+ if err == io.EOF {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+
+ branchName, err := bufReader.ReadString('\n')
+ if err == io.EOF {
+ // This shouldn't happen... but we'll tolerate it for the sake of peace
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+
+ if len(branchName) > 0 {
+ branchName = branchName[:len(branchName)-1]
+ }
+
+ if len(sha) > 0 {
+ sha = sha[:len(sha)-1]
+ }
+
+ err = walkfn(sha, branchName)
+ if err != nil {
+ return err
+ }
+ i++
+ }
+ // count all refs
+ for limit != 0 {
+ _, isPrefix, err := bufReader.ReadLine()
+ if err == io.EOF {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ if !isPrefix {
+ i++
+ }
+ }
+ return nil
+ })
+ err = cmd.RunWithStderr(ctx)
+ if errPipeline, ok := gitcmd.UnwrapPipelineError(err); ok {
+ return i, errPipeline // keep the old behavior: return pipeline error directly
}
- for limit == 0 || i < skip+limit {
- // The output of show-ref is simply a list:
- // SP ][ LF
- sha, err := bufReader.ReadString(' ')
- if err == io.EOF {
- return i, nil
- }
- if err != nil {
- return 0, err
- }
-
- branchName, err := bufReader.ReadString('\n')
- if err == io.EOF {
- // This shouldn't happen... but we'll tolerate it for the sake of peace
- return i, nil
- }
- if err != nil {
- return i, err
- }
-
- if len(branchName) > 0 {
- branchName = branchName[:len(branchName)-1]
- }
-
- if len(sha) > 0 {
- sha = sha[:len(sha)-1]
- }
-
- err = walkfn(sha, branchName)
- if err != nil {
- return i, err
- }
- i++
- }
- // count all refs
- for limit != 0 {
- _, isPrefix, err := bufReader.ReadLine()
- if err == io.EOF {
- return i, nil
- }
- if err != nil {
- return 0, err
- }
- if !isPrefix {
- i++
- }
- }
- return i, nil
+ return i, err
}
// GetRefsBySha returns all references filtered with prefix that belong to a sha commit hash
diff --git a/modules/git/repo_commit.go b/modules/git/repo_commit.go
index 4a441429f4..c10f73690c 100644
--- a/modules/git/repo_commit.go
+++ b/modules/git/repo_commit.go
@@ -226,66 +226,55 @@ type CommitsByFileAndRangeOptions struct {
// CommitsByFileAndRange return the commits according revision file and the page
func (repo *Repository) CommitsByFileAndRange(opts CommitsByFileAndRangeOptions) ([]*Commit, error) {
- stdoutReader, stdoutWriter := io.Pipe()
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
- go func() {
- stderr := strings.Builder{}
- gitCmd := gitcmd.NewCommand("rev-list").
- AddOptionFormat("--max-count=%d", setting.Git.CommitsRangeSize).
- AddOptionFormat("--skip=%d", (opts.Page-1)*setting.Git.CommitsRangeSize)
- gitCmd.AddDynamicArguments(opts.Revision)
+ gitCmd := gitcmd.NewCommand("rev-list").
+ AddOptionFormat("--max-count=%d", setting.Git.CommitsRangeSize).
+ AddOptionFormat("--skip=%d", (opts.Page-1)*setting.Git.CommitsRangeSize)
+ gitCmd.AddDynamicArguments(opts.Revision)
- if opts.Not != "" {
- gitCmd.AddOptionValues("--not", opts.Not)
- }
- if opts.Since != "" {
- gitCmd.AddOptionFormat("--since=%s", opts.Since)
- }
- if opts.Until != "" {
- gitCmd.AddOptionFormat("--until=%s", opts.Until)
- }
-
- gitCmd.AddDashesAndList(opts.File)
- err := gitCmd.WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithStderr(&stderr).
- Run(repo.Ctx)
- if err != nil {
- _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, (&stderr).String()))
- } else {
- _ = stdoutWriter.Close()
- }
- }()
-
- objectFormat, err := repo.GetObjectFormat()
- if err != nil {
- return nil, err
+ if opts.Not != "" {
+ gitCmd.AddOptionValues("--not", opts.Not)
}
+ if opts.Since != "" {
+ gitCmd.AddOptionFormat("--since=%s", opts.Since)
+ }
+ if opts.Until != "" {
+ gitCmd.AddOptionFormat("--until=%s", opts.Until)
+ }
+ gitCmd.AddDashesAndList(opts.File)
- length := objectFormat.FullLength()
- commits := []*Commit{}
- shaline := make([]byte, length+1)
- for {
- n, err := io.ReadFull(stdoutReader, shaline)
- if err != nil || n < length {
- if err == io.EOF {
- err = nil
+ var commits []*Commit
+ stdoutReader, stdoutReaderClose := gitCmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+ err := gitCmd.WithDir(repo.Path).
+ WithPipelineFunc(func(context gitcmd.Context) error {
+ objectFormat, err := repo.GetObjectFormat()
+ if err != nil {
+ return err
}
- return commits, err
- }
- objectID, err := NewIDFromString(string(shaline[0:length]))
- if err != nil {
- return nil, err
- }
- commit, err := repo.getCommit(objectID)
- if err != nil {
- return nil, err
- }
- commits = append(commits, commit)
- }
+
+ length := objectFormat.FullLength()
+ shaline := make([]byte, length+1)
+ for {
+ n, err := io.ReadFull(stdoutReader, shaline)
+ if err != nil || n < length {
+ if err == io.EOF {
+ err = nil
+ }
+ return err
+ }
+ objectID, err := NewIDFromString(string(shaline[0:length]))
+ if err != nil {
+ return err
+ }
+ commit, err := repo.getCommit(objectID)
+ if err != nil {
+ return err
+ }
+ commits = append(commits, commit)
+ }
+ }).
+ RunWithStderr(repo.Ctx)
+ return commits, err
}
// FilesCountBetween return the number of files changed between two commits
diff --git a/modules/git/repo_commit_gogit.go b/modules/git/repo_commit_gogit.go
index c84aabde1a..550d153722 100644
--- a/modules/git/repo_commit_gogit.go
+++ b/modules/git/repo_commit_gogit.go
@@ -67,16 +67,6 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) {
return NewIDFromString(actualCommitID)
}
-// IsCommitExist returns true if given commit exists in current repository.
-func (repo *Repository) IsCommitExist(name string) bool {
- hash, err := repo.ConvertToGitID(name)
- if err != nil {
- return false
- }
- _, err = repo.gogitRepo.CommitObject(plumbing.Hash(hash.RawValue()))
- return err == nil
-}
-
func (repo *Repository) getCommit(id ObjectID) (*Commit, error) {
var tagObject *object.Tag
diff --git a/modules/git/repo_commit_nogogit.go b/modules/git/repo_commit_nogogit.go
index 3f27833fa6..2ddb527502 100644
--- a/modules/git/repo_commit_nogogit.go
+++ b/modules/git/repo_commit_nogogit.go
@@ -6,7 +6,6 @@
package git
import (
- "bufio"
"errors"
"io"
"strings"
@@ -37,50 +36,31 @@ func (repo *Repository) ResolveReference(name string) (string, error) {
// GetRefCommitID returns the last commit ID string of given reference (branch or tag).
func (repo *Repository) GetRefCommitID(name string) (string, error) {
- wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return "", err
}
defer cancel()
- _, err = wr.Write([]byte(name + "\n"))
- if err != nil {
- return "", err
- }
- shaBs, _, _, err := ReadBatchLine(rd)
+ info, err := batch.QueryInfo(name)
if IsErrNotExist(err) {
return "", ErrNotExist{name, ""}
+ } else if err != nil {
+ return "", err
}
-
- return string(shaBs), nil
-}
-
-// IsCommitExist returns true if given commit exists in current repository.
-func (repo *Repository) IsCommitExist(name string) bool {
- if err := ensureValidGitRepository(repo.Ctx, repo.Path); err != nil {
- log.Error("IsCommitExist: %v", err)
- return false
- }
- _, _, err := gitcmd.NewCommand("cat-file", "-e").
- AddDynamicArguments(name).
- WithDir(repo.Path).
- RunStdString(repo.Ctx)
- return err == nil
+ return info.ID, nil
}
func (repo *Repository) getCommit(id ObjectID) (*Commit, error) {
- wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
-
- _, _ = wr.Write([]byte(id.String() + "\n"))
-
- return repo.getCommitFromBatchReader(wr, rd, id)
+ return repo.getCommitWithBatch(batch, id)
}
-func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio.Reader, id ObjectID) (*Commit, error) {
- _, typ, size, err := ReadBatchLine(rd)
+func (repo *Repository) getCommitWithBatch(batch CatFileBatch, id ObjectID) (*Commit, error) {
+ info, rd, err := batch.QueryContent(id.String())
if err != nil {
if errors.Is(err, io.EOF) || IsErrNotExist(err) {
return nil, ErrNotExist{ID: id.String()}
@@ -88,13 +68,13 @@ func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio.
return nil, err
}
- switch typ {
+ switch info.Type {
case "missing":
return nil, ErrNotExist{ID: id.String()}
case "tag":
// then we need to parse the tag
// and load the commit
- data, err := io.ReadAll(io.LimitReader(rd, size))
+ data, err := io.ReadAll(io.LimitReader(rd, info.Size))
if err != nil {
return nil, err
}
@@ -106,19 +86,9 @@ func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio.
if err != nil {
return nil, err
}
-
- if _, err := wr.Write([]byte(tag.Object.String() + "\n")); err != nil {
- return nil, err
- }
-
- commit, err := repo.getCommitFromBatchReader(wr, rd, tag.Object)
- if err != nil {
- return nil, err
- }
-
- return commit, nil
+ return repo.getCommitWithBatch(batch, tag.Object)
case "commit":
- commit, err := CommitFromReader(repo, id, io.LimitReader(rd, size))
+ commit, err := CommitFromReader(repo, id, io.LimitReader(rd, info.Size))
if err != nil {
return nil, err
}
@@ -129,8 +99,8 @@ func (repo *Repository) getCommitFromBatchReader(wr WriteCloserError, rd *bufio.
return commit, nil
default:
- log.Debug("Unknown typ: %s", typ)
- if err := DiscardFull(rd, size+1); err != nil {
+ log.Debug("Unknown cat-file object type: %s", info.Type)
+ if err := DiscardFull(rd, info.Size+1); err != nil {
return nil, err
}
return nil, ErrNotExist{
@@ -152,16 +122,12 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) {
}
}
- wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
- _, err = wr.Write([]byte(commitID + "\n"))
- if err != nil {
- return nil, err
- }
- sha, _, _, err := ReadBatchLine(rd)
+ info, err := batch.QueryInfo(commitID)
if err != nil {
if IsErrNotExist(err) {
return nil, ErrNotExist{commitID, ""}
@@ -169,5 +135,5 @@ func (repo *Repository) ConvertToGitID(commitID string) (ObjectID, error) {
return nil, err
}
- return MustIDFromString(string(sha)), nil
+ return MustIDFromString(info.ID), nil
}
diff --git a/modules/git/repo_compare.go b/modules/git/repo_compare.go
index f60696a763..aa25e2ec20 100644
--- a/modules/git/repo_compare.go
+++ b/modules/git/repo_compare.go
@@ -18,32 +18,6 @@ import (
"code.gitea.io/gitea/modules/git/gitcmd"
)
-// GetMergeBase checks and returns merge base of two branches and the reference used as base.
-func (repo *Repository) GetMergeBase(tmpRemote, base, head string) (string, string, error) {
- if tmpRemote == "" {
- tmpRemote = "origin"
- }
-
- if tmpRemote != "origin" {
- tmpBaseName := RemotePrefix + tmpRemote + "/tmp_" + base
- // Fetch commit into a temporary branch in order to be able to handle commits and tags
- _, _, err := gitcmd.NewCommand("fetch", "--no-tags").
- AddDynamicArguments(tmpRemote).
- AddDashesAndList(base + ":" + tmpBaseName).
- WithDir(repo.Path).
- RunStdString(repo.Ctx)
- if err == nil {
- base = tmpBaseName
- }
- }
-
- stdout, _, err := gitcmd.NewCommand("merge-base").
- AddDashesAndList(base, head).
- WithDir(repo.Path).
- RunStdString(repo.Ctx)
- return strings.TrimSpace(stdout), base, err
-}
-
type lineCountWriter struct {
numLines int
}
@@ -60,7 +34,6 @@ func (l *lineCountWriter) Write(p []byte) (n int, err error) {
func (repo *Repository) GetDiffNumChangedFiles(base, head string, directComparison bool) (int, error) {
// Now there is git diff --shortstat but this appears to be slower than simply iterating with --nameonly
w := &lineCountWriter{}
- stderr := new(bytes.Buffer)
separator := "..."
if directComparison {
@@ -72,25 +45,22 @@ func (repo *Repository) GetDiffNumChangedFiles(base, head string, directComparis
AddDynamicArguments(base + separator + head).
AddArguments("--").
WithDir(repo.Path).
- WithStdout(w).
- WithStderr(stderr).
- Run(repo.Ctx); err != nil {
- if strings.Contains(stderr.String(), "no merge base") {
+ WithStdoutCopy(w).
+ RunWithStderr(repo.Ctx); err != nil {
+ if strings.Contains(err.Stderr(), "no merge base") {
// git >= 2.28 now returns an error if base and head have become unrelated.
// previously it would return the results of git diff -z --name-only base head so let's try that...
w = &lineCountWriter{}
- stderr.Reset()
if err = gitcmd.NewCommand("diff", "-z", "--name-only").
AddDynamicArguments(base, head).
AddArguments("--").
WithDir(repo.Path).
- WithStdout(w).
- WithStderr(stderr).
- Run(repo.Ctx); err == nil {
+ WithStdoutCopy(w).
+ RunWithStderr(repo.Ctx); err == nil {
return w.numLines, nil
}
}
- return 0, fmt.Errorf("%w: Stderr: %s", err, stderr)
+ return 0, err
}
return w.numLines, nil
}
@@ -99,11 +69,9 @@ var patchCommits = regexp.MustCompile(`^From\s(\w+)\s`)
// GetDiff generates and returns patch data between given revisions, optimized for human readability
func (repo *Repository) GetDiff(compareArg string, w io.Writer) error {
- stderr := new(bytes.Buffer)
return gitcmd.NewCommand("diff", "-p").AddDynamicArguments(compareArg).
WithDir(repo.Path).
- WithStdout(w).
- WithStderr(stderr).
+ WithStdoutCopy(w).
Run(repo.Ctx)
}
@@ -112,17 +80,15 @@ func (repo *Repository) GetDiffBinary(compareArg string, w io.Writer) error {
return gitcmd.NewCommand("diff", "-p", "--binary", "--histogram").
AddDynamicArguments(compareArg).
WithDir(repo.Path).
- WithStdout(w).
+ WithStdoutCopy(w).
Run(repo.Ctx)
}
// GetPatch generates and returns format-patch data between given revisions, able to be used with `git apply`
func (repo *Repository) GetPatch(compareArg string, w io.Writer) error {
- stderr := new(bytes.Buffer)
return gitcmd.NewCommand("format-patch", "--binary", "--stdout").AddDynamicArguments(compareArg).
WithDir(repo.Path).
- WithStdout(w).
- WithStderr(stderr).
+ WithStdoutCopy(w).
Run(repo.Ctx)
}
diff --git a/modules/git/repo_index.go b/modules/git/repo_index.go
index 4068f86bb2..1d040d5e0a 100644
--- a/modules/git/repo_index.go
+++ b/modules/git/repo_index.go
@@ -101,21 +101,17 @@ func (repo *Repository) RemoveFilesFromIndex(filenames ...string) error {
return err
}
cmd := gitcmd.NewCommand("update-index", "--remove", "-z", "--index-info")
- stdout := new(bytes.Buffer)
- stderr := new(bytes.Buffer)
- buffer := new(bytes.Buffer)
+ input := new(bytes.Buffer)
for _, file := range filenames {
if file != "" {
// using format: mode SP type SP sha1 TAB path
- buffer.WriteString("0 blob " + objectFormat.EmptyObjectID().String() + "\t" + file + "\000")
+ input.WriteString("0 blob " + objectFormat.EmptyObjectID().String() + "\t" + file + "\000")
}
}
return cmd.
WithDir(repo.Path).
- WithStdin(bytes.NewReader(buffer.Bytes())).
- WithStdout(stdout).
- WithStderr(stderr).
- Run(repo.Ctx)
+ WithStdinBytes(input.Bytes()).
+ RunWithStderr(repo.Ctx)
}
type IndexObjectInfo struct {
@@ -127,19 +123,15 @@ type IndexObjectInfo struct {
// AddObjectsToIndex adds the provided object hashes to the index at the provided filenames
func (repo *Repository) AddObjectsToIndex(objects ...IndexObjectInfo) error {
cmd := gitcmd.NewCommand("update-index", "--add", "--replace", "-z", "--index-info")
- stdout := new(bytes.Buffer)
- stderr := new(bytes.Buffer)
- buffer := new(bytes.Buffer)
+ input := new(bytes.Buffer)
for _, object := range objects {
// using format: mode SP type SP sha1 TAB path
- buffer.WriteString(object.Mode + " blob " + object.Object.String() + "\t" + object.Filename + "\000")
+ input.WriteString(object.Mode + " blob " + object.Object.String() + "\t" + object.Filename + "\000")
}
return cmd.
WithDir(repo.Path).
- WithStdin(bytes.NewReader(buffer.Bytes())).
- WithStdout(stdout).
- WithStderr(stderr).
- Run(repo.Ctx)
+ WithStdinBytes(input.Bytes()).
+ RunWithStderr(repo.Ctx)
}
// AddObjectToIndex adds the provided object hash to the index at the provided filename
diff --git a/modules/git/repo_object.go b/modules/git/repo_object.go
index 2a39a3c4d8..38e16b4646 100644
--- a/modules/git/repo_object.go
+++ b/modules/git/repo_object.go
@@ -5,8 +5,6 @@
package git
import (
- "bytes"
- "io"
"strings"
"code.gitea.io/gitea/modules/git/gitcmd"
@@ -33,18 +31,12 @@ func (o ObjectType) Bytes() []byte {
return []byte(o)
}
-type EmptyReader struct{}
-
-func (EmptyReader) Read(p []byte) (int, error) {
- return 0, io.EOF
-}
-
func (repo *Repository) GetObjectFormat() (ObjectFormat, error) {
if repo != nil && repo.objectFormat != nil {
return repo.objectFormat, nil
}
- str, err := repo.hashObject(EmptyReader{}, false)
+ str, err := repo.hashObjectBytes(nil, false)
if err != nil {
return nil, err
}
@@ -58,32 +50,28 @@ func (repo *Repository) GetObjectFormat() (ObjectFormat, error) {
return repo.objectFormat, nil
}
-// HashObject takes a reader and returns hash for that reader
-func (repo *Repository) HashObject(reader io.Reader) (ObjectID, error) {
- idStr, err := repo.hashObject(reader, true)
+// HashObjectBytes returns hash for the content
+func (repo *Repository) HashObjectBytes(buf []byte) (ObjectID, error) {
+ idStr, err := repo.hashObjectBytes(buf, true)
if err != nil {
return nil, err
}
return NewIDFromString(idStr)
}
-func (repo *Repository) hashObject(reader io.Reader, save bool) (string, error) {
+func (repo *Repository) hashObjectBytes(buf []byte, save bool) (string, error) {
var cmd *gitcmd.Command
if save {
cmd = gitcmd.NewCommand("hash-object", "-w", "--stdin")
} else {
cmd = gitcmd.NewCommand("hash-object", "--stdin")
}
- stdout := new(bytes.Buffer)
- stderr := new(bytes.Buffer)
- err := cmd.
+ stdout, _, err := cmd.
WithDir(repo.Path).
- WithStdin(reader).
- WithStdout(stdout).
- WithStderr(stderr).
- Run(repo.Ctx)
+ WithStdinBytes(buf).
+ RunStdString(repo.Ctx)
if err != nil {
return "", err
}
- return strings.TrimSpace(stdout.String()), nil
+ return strings.TrimSpace(stdout), nil
}
diff --git a/modules/git/repo_ref_nogogit.go b/modules/git/repo_ref_nogogit.go
index 09bb0df7b8..c58992fa9d 100644
--- a/modules/git/repo_ref_nogogit.go
+++ b/modules/git/repo_ref_nogogit.go
@@ -15,75 +15,61 @@ import (
// GetRefsFiltered returns all references of the repository that matches patterm exactly or starting with.
func (repo *Repository) GetRefsFiltered(pattern string) ([]*Reference, error) {
- stdoutReader, stdoutWriter := io.Pipe()
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
-
- go func() {
- stderrBuilder := &strings.Builder{}
- err := gitcmd.NewCommand("for-each-ref").
- WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithStderr(stderrBuilder).
- Run(repo.Ctx)
- if err != nil {
- _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, stderrBuilder.String()))
- } else {
- _ = stdoutWriter.Close()
- }
- }()
-
refs := make([]*Reference, 0)
- bufReader := bufio.NewReader(stdoutReader)
- for {
- // The output of for-each-ref is simply a list:
- // SP TAB ][ LF
- sha, err := bufReader.ReadString(' ')
- if err == io.EOF {
- break
- }
- if err != nil {
- return nil, err
- }
- sha = sha[:len(sha)-1]
+ cmd := gitcmd.NewCommand("for-each-ref")
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+ err := cmd.WithDir(repo.Path).
+ WithPipelineFunc(func(context gitcmd.Context) error {
+ bufReader := bufio.NewReader(stdoutReader)
+ for {
+ // The output of for-each-ref is simply a list:
+ // SP TAB ][ LF
+ sha, err := bufReader.ReadString(' ')
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return err
+ }
+ sha = sha[:len(sha)-1]
- typ, err := bufReader.ReadString('\t')
- if err == io.EOF {
- // This should not happen, but we'll tolerate it
- break
- }
- if err != nil {
- return nil, err
- }
- typ = typ[:len(typ)-1]
+ typ, err := bufReader.ReadString('\t')
+ if err == io.EOF {
+ // This should not happen, but we'll tolerate it
+ break
+ }
+ if err != nil {
+ return err
+ }
+ typ = typ[:len(typ)-1]
- refName, err := bufReader.ReadString('\n')
- if err == io.EOF {
- // This should not happen, but we'll tolerate it
- break
- }
- if err != nil {
- return nil, err
- }
- refName = refName[:len(refName)-1]
+ refName, err := bufReader.ReadString('\n')
+ if err == io.EOF {
+ // This should not happen, but we'll tolerate it
+ break
+ }
+ if err != nil {
+ return err
+ }
+ refName = refName[:len(refName)-1]
- // refName cannot be HEAD but can be remotes or stash
- if strings.HasPrefix(refName, RemotePrefix) || refName == "/refs/stash" {
- continue
- }
+ // refName cannot be HEAD but can be remotes or stash
+ if strings.HasPrefix(refName, RemotePrefix) || refName == "/refs/stash" {
+ continue
+ }
- if pattern == "" || strings.HasPrefix(refName, pattern) {
- r := &Reference{
- Name: refName,
- Object: MustIDFromString(sha),
- Type: typ,
- repo: repo,
+ if pattern == "" || strings.HasPrefix(refName, pattern) {
+ r := &Reference{
+ Name: refName,
+ Object: MustIDFromString(sha),
+ Type: typ,
+ repo: repo,
+ }
+ refs = append(refs, r)
+ }
}
- refs = append(refs, r)
- }
- }
-
- return refs, nil
+ return nil
+ }).RunWithStderr(repo.Ctx)
+ return refs, err
}
diff --git a/modules/git/repo_stats.go b/modules/git/repo_stats.go
index cfb35288fe..1dd77f05d4 100644
--- a/modules/git/repo_stats.go
+++ b/modules/git/repo_stats.go
@@ -5,9 +5,7 @@ package git
import (
"bufio"
- "context"
"fmt"
- "os"
"sort"
"strconv"
"strings"
@@ -55,15 +53,6 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string)
}
stats.CommitCountInAllBranches = c
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- return nil, err
- }
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
-
gitCmd := gitcmd.NewCommand("log", "--numstat", "--no-merges", "--pretty=format:---%n%h%n%aN%n%aE%n", "--date=iso").
AddOptionFormat("--since=%s", since)
if len(branch) == 0 {
@@ -72,13 +61,11 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string)
gitCmd.AddArguments("--first-parent").AddDynamicArguments(branch)
}
- stderr := new(strings.Builder)
+ stdoutReader, stdoutReaderClose := gitCmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
err = gitCmd.
WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithStderr(stderr).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
scanner := bufio.NewScanner(stdoutReader)
scanner.Split(bufio.ScanLines)
stats.CommitCount = 0
@@ -129,7 +116,6 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string)
}
}
if err = scanner.Err(); err != nil {
- _ = stdoutReader.Close()
return fmt.Errorf("GetCodeActivityStats scan: %w", err)
}
a := make([]*CodeActivityAuthor, 0, len(authors))
@@ -143,12 +129,11 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string)
stats.AuthorCount = int64(len(authors))
stats.ChangedFiles = int64(len(files))
stats.Authors = a
- _ = stdoutReader.Close()
return nil
}).
- Run(repo.Ctx)
+ RunWithStderr(repo.Ctx)
if err != nil {
- return nil, fmt.Errorf("Failed to get GetCodeActivityStats for repository.\nError: %w\nStderr: %s", err, stderr)
+ return nil, fmt.Errorf("GetCodeActivityStats: %w", err)
}
return stats, nil
diff --git a/modules/git/repo_tag.go b/modules/git/repo_tag.go
index 4ad0c6e5ab..2599236ae0 100644
--- a/modules/git/repo_tag.go
+++ b/modules/git/repo_tag.go
@@ -6,7 +6,6 @@ package git
import (
"fmt"
- "io"
"strings"
"code.gitea.io/gitea/modules/git/foreachref"
@@ -115,51 +114,42 @@ func (repo *Repository) GetTagInfos(page, pageSize int) ([]*Tag, int, error) {
// https://git-scm.com/docs/git-for-each-ref#Documentation/git-for-each-ref.txt-refname
forEachRefFmt := foreachref.NewFormat("objecttype", "refname:lstrip=2", "object", "objectname", "creator", "contents", "contents:signature")
- stdoutReader, stdoutWriter := io.Pipe()
- defer stdoutReader.Close()
- defer stdoutWriter.Close()
- stderr := strings.Builder{}
-
- go func() {
- err := gitcmd.NewCommand("for-each-ref").
- AddOptionFormat("--format=%s", forEachRefFmt.Flag()).
- AddArguments("--sort", "-*creatordate", "refs/tags").
- WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithStderr(&stderr).
- Run(repo.Ctx)
- if err != nil {
- _ = stdoutWriter.CloseWithError(gitcmd.ConcatenateError(err, stderr.String()))
- } else {
- _ = stdoutWriter.Close()
- }
- }()
-
var tags []*Tag
- parser := forEachRefFmt.Parser(stdoutReader)
- for {
- ref := parser.Next()
- if ref == nil {
- break
- }
+ var tagsTotal int
+ cmd := gitcmd.NewCommand("for-each-ref")
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+ err := cmd.AddOptionFormat("--format=%s", forEachRefFmt.Flag()).
+ AddArguments("--sort", "-*creatordate", "refs/tags").
+ WithDir(repo.Path).
+ WithPipelineFunc(func(context gitcmd.Context) error {
+ parser := forEachRefFmt.Parser(stdoutReader)
+ for {
+ ref := parser.Next()
+ if ref == nil {
+ break
+ }
- tag, err := parseTagRef(ref)
- if err != nil {
- return nil, 0, fmt.Errorf("GetTagInfos: parse tag: %w", err)
- }
- tags = append(tags, tag)
- }
- if err := parser.Err(); err != nil {
- return nil, 0, fmt.Errorf("GetTagInfos: parse output: %w", err)
- }
+ tag, err := parseTagRef(ref)
+ if err != nil {
+ return fmt.Errorf("GetTagInfos: parse tag: %w", err)
+ }
+ tags = append(tags, tag)
+ }
+ if err := parser.Err(); err != nil {
+ return fmt.Errorf("GetTagInfos: parse output: %w", err)
+ }
- sortTagsByTime(tags)
- tagsTotal := len(tags)
- if page != 0 {
- tags = util.PaginateSlice(tags, page, pageSize).([]*Tag)
- }
+ sortTagsByTime(tags)
+ tagsTotal = len(tags)
+ if page != 0 {
+ tags = util.PaginateSlice(tags, page, pageSize).([]*Tag)
+ }
+ return nil
+ }).
+ RunWithStderr(repo.Ctx)
- return tags, tagsTotal, nil
+ return tags, tagsTotal, err
}
// parseTagRef parses a tag from a 'git for-each-ref'-produced reference.
diff --git a/modules/git/repo_tag_nogogit.go b/modules/git/repo_tag_nogogit.go
index 5f79b68a9a..a9ac040821 100644
--- a/modules/git/repo_tag_nogogit.go
+++ b/modules/git/repo_tag_nogogit.go
@@ -24,23 +24,19 @@ func (repo *Repository) IsTagExist(name string) bool {
// GetTagType gets the type of the tag, either commit (simple) or tag (annotated)
func (repo *Repository) GetTagType(id ObjectID) (string, error) {
- wr, rd, cancel, err := repo.CatFileBatchCheck(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return "", err
}
defer cancel()
- _, err = wr.Write([]byte(id.String() + "\n"))
- if err != nil {
- return "", err
- }
- _, typ, _, err := ReadBatchLine(rd)
+ info, err := batch.QueryInfo(id.String())
if err != nil {
if IsErrNotExist(err) {
return "", ErrNotExist{ID: id.String()}
}
return "", err
}
- return typ, nil
+ return info.Type, nil
}
func (repo *Repository) getTag(tagID ObjectID, name string) (*Tag, error) {
@@ -88,22 +84,20 @@ func (repo *Repository) getTag(tagID ObjectID, name string) (*Tag, error) {
}
// The tag is an annotated tag with a message.
- wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
- if _, err := wr.Write([]byte(tagID.String() + "\n")); err != nil {
- return nil, err
- }
- _, typ, size, err := ReadBatchLine(rd)
+ info, rd, err := batch.QueryContent(tagID.String())
if err != nil {
if errors.Is(err, io.EOF) || IsErrNotExist(err) {
return nil, ErrNotExist{ID: tagID.String()}
}
return nil, err
}
+ typ, size := info.Type, info.Size
if typ != "tag" {
if err := DiscardFull(rd, size+1); err != nil {
return nil, err
diff --git a/modules/git/repo_tree.go b/modules/git/repo_tree.go
index 964342ba00..e65e2441ed 100644
--- a/modules/git/repo_tree.go
+++ b/modules/git/repo_tree.go
@@ -58,16 +58,12 @@ func (repo *Repository) CommitTree(author, committer *Signature, tree *Tree, opt
cmd.AddArguments("--no-gpg-sign")
}
- stdout := new(bytes.Buffer)
- stderr := new(bytes.Buffer)
- err := cmd.WithEnv(env).
+ stdout, _, err := cmd.WithEnv(env).
WithDir(repo.Path).
- WithStdin(messageBytes).
- WithStdout(stdout).
- WithStderr(stderr).
- Run(repo.Ctx)
+ WithStdinBytes(messageBytes.Bytes()).
+ RunStdString(repo.Ctx)
if err != nil {
- return nil, gitcmd.ConcatenateError(err, stderr.String())
+ return nil, err
}
- return NewIDFromString(strings.TrimSpace(stdout.String()))
+ return NewIDFromString(strings.TrimSpace(stdout))
}
diff --git a/modules/git/repo_tree_nogogit.go b/modules/git/repo_tree_nogogit.go
index 1954f85162..82a61072c9 100644
--- a/modules/git/repo_tree_nogogit.go
+++ b/modules/git/repo_tree_nogogit.go
@@ -10,24 +10,21 @@ import (
)
func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
- wr, rd, cancel, err := repo.CatFileBatch(repo.Ctx)
+ batch, cancel, err := repo.CatFileBatch(repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
- _, _ = wr.Write([]byte(id.String() + "\n"))
-
- // ignore the SHA
- _, typ, size, err := ReadBatchLine(rd)
+ info, rd, err := batch.QueryContent(id.String())
if err != nil {
return nil, err
}
- switch typ {
+ switch info.Type {
case "tag":
resolvedID := id
- data, err := io.ReadAll(io.LimitReader(rd, size))
+ data, err := io.ReadAll(io.LimitReader(rd, info.Size))
if err != nil {
return nil, err
}
@@ -36,17 +33,14 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
return nil, err
}
- if _, err := wr.Write([]byte(tag.Object.String() + "\n")); err != nil {
- return nil, err
- }
- commit, err := repo.getCommitFromBatchReader(wr, rd, tag.Object)
+ commit, err := repo.getCommitWithBatch(batch, tag.Object)
if err != nil {
return nil, err
}
commit.Tree.ResolvedID = resolvedID
return &commit.Tree, nil
case "commit":
- commit, err := CommitFromReader(repo, id, io.LimitReader(rd, size))
+ commit, err := CommitFromReader(repo, id, io.LimitReader(rd, info.Size))
if err != nil {
return nil, err
}
@@ -62,14 +56,14 @@ func (repo *Repository) getTree(id ObjectID) (*Tree, error) {
if err != nil {
return nil, err
}
- tree.entries, err = catBatchParseTreeEntries(objectFormat, tree, rd, size)
+ tree.entries, err = catBatchParseTreeEntries(objectFormat, tree, rd, info.Size)
if err != nil {
return nil, err
}
tree.entriesParsed = true
return tree, nil
default:
- if err := DiscardFull(rd, size+1); err != nil {
+ if err := DiscardFull(rd, info.Size+1); err != nil {
return nil, err
}
return nil, ErrNotExist{
diff --git a/modules/git/submodule.go b/modules/git/submodule.go
index 45059eae77..ed69cbe55d 100644
--- a/modules/git/submodule.go
+++ b/modules/git/submodule.go
@@ -7,7 +7,6 @@ import (
"bufio"
"context"
"fmt"
- "os"
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/log"
@@ -21,23 +20,15 @@ type TemplateSubmoduleCommit struct {
// GetTemplateSubmoduleCommits returns a list of submodules paths and their commits from a repository
// This function is only for generating new repos based on existing template, the template couldn't be too large.
func GetTemplateSubmoduleCommits(ctx context.Context, repoPath string) (submoduleCommits []TemplateSubmoduleCommit, _ error) {
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- return nil, err
- }
-
- err = gitcmd.NewCommand("ls-tree", "-r", "--", "HEAD").
- WithDir(repoPath).
- WithStdout(stdoutWriter).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
- defer stdoutReader.Close()
-
+ cmd := gitcmd.NewCommand("ls-tree", "-r", "--", "HEAD")
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+ err := cmd.WithDir(repoPath).
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
scanner := bufio.NewScanner(stdoutReader)
for scanner.Scan() {
entry, err := parseLsTreeLine(scanner.Bytes())
if err != nil {
- cancel()
return err
}
if entry.EntryMode == EntryModeCommit {
diff --git a/modules/git/tree_entry_gogit.go b/modules/git/tree_entry_gogit.go
index 27877a2e28..f092e70636 100644
--- a/modules/git/tree_entry_gogit.go
+++ b/modules/git/tree_entry_gogit.go
@@ -53,14 +53,9 @@ func (te *TreeEntry) Size() int64 {
// Blob returns the blob object the entry
func (te *TreeEntry) Blob() *Blob {
- encodedObj, err := te.ptree.repo.gogitRepo.Storer.EncodedObject(plumbing.AnyObject, te.toGogitTreeEntry().Hash)
- if err != nil {
- return nil
- }
-
return &Blob{
- ID: te.ID,
- gogitEncodedObj: encodedObj,
- name: te.Name(),
+ ID: te.ID,
+ repo: te.ptree.repo,
+ name: te.Name(),
}
}
diff --git a/modules/git/tree_entry_mode.go b/modules/git/tree_entry_mode.go
index f36c07bc2a..2ceba11374 100644
--- a/modules/git/tree_entry_mode.go
+++ b/modules/git/tree_entry_mode.go
@@ -4,7 +4,6 @@
package git
import (
- "fmt"
"strconv"
)
@@ -55,21 +54,38 @@ func (e EntryMode) IsExecutable() bool {
return e == EntryModeExec
}
-func ParseEntryMode(mode string) (EntryMode, error) {
+func ParseEntryMode(mode string) EntryMode {
switch mode {
case "000000":
- return EntryModeNoEntry, nil
+ return EntryModeNoEntry
case "100644":
- return EntryModeBlob, nil
+ return EntryModeBlob
case "100755":
- return EntryModeExec, nil
+ return EntryModeExec
case "120000":
- return EntryModeSymlink, nil
+ return EntryModeSymlink
case "160000":
- return EntryModeCommit, nil
- case "040000", "040755": // git uses 040000 for tree object, but some users may get 040755 for unknown reasons
- return EntryModeTree, nil
+ return EntryModeCommit
+ case "040000":
+ return EntryModeTree
default:
- return 0, fmt.Errorf("unparsable entry mode: %s", mode)
+ // git uses 040000 for tree object, but some users may get 040755 from non-standard git implementations
+ m, _ := strconv.ParseInt(mode, 8, 32)
+ modeInt := EntryMode(m)
+ switch modeInt & 0o770000 {
+ case 0o040000:
+ return EntryModeTree
+ case 0o160000:
+ return EntryModeCommit
+ case 0o120000:
+ return EntryModeSymlink
+ case 0o100000:
+ if modeInt&0o777 == 0o755 {
+ return EntryModeExec
+ }
+ return EntryModeBlob
+ default:
+ return EntryModeNoEntry
+ }
}
}
diff --git a/modules/git/tree_entry_nogogit.go b/modules/git/tree_entry_nogogit.go
index fd2f3c567f..0a19b38d3e 100644
--- a/modules/git/tree_entry_nogogit.go
+++ b/modules/git/tree_entry_nogogit.go
@@ -15,23 +15,19 @@ func (te *TreeEntry) Size() int64 {
return te.size
}
- wr, rd, cancel, err := te.ptree.repo.CatFileBatchCheck(te.ptree.repo.Ctx)
+ batch, cancel, err := te.ptree.repo.CatFileBatch(te.ptree.repo.Ctx)
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err)
return 0
}
defer cancel()
- _, err = wr.Write([]byte(te.ID.String() + "\n"))
- if err != nil {
- log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err)
- return 0
- }
- _, _, te.size, err = ReadBatchLine(rd)
+ info, err := batch.QueryInfo(te.ID.String())
if err != nil {
log.Debug("error whilst reading size for %s in %s. Error: %v", te.ID.String(), te.ptree.repo.Path, err)
return 0
}
+ te.size = info.Size
te.sized = true
return te.size
}
diff --git a/modules/git/tree_entry_test.go b/modules/git/tree_entry_test.go
index b28abfb545..3df6eeab68 100644
--- a/modules/git/tree_entry_test.go
+++ b/modules/git/tree_entry_test.go
@@ -22,8 +22,36 @@ func TestEntriesCustomSort(t *testing.T) {
&TreeEntry{name: "b-file", entryMode: EntryModeBlob},
}
expected := slices.Clone(entries)
- rand.Shuffle(len(entries), func(i, j int) { entries[i], entries[j] = entries[j], entries[i] })
- assert.NotEqual(t, expected, entries)
+ for slices.Equal(expected, entries) {
+ rand.Shuffle(len(entries), func(i, j int) { entries[i], entries[j] = entries[j], entries[i] })
+ }
entries.CustomSort(strings.Compare)
assert.Equal(t, expected, entries)
}
+
+func TestParseEntryMode(t *testing.T) {
+ tests := []struct {
+ modeStr string
+ expectMod EntryMode
+ }{
+ {"000000", EntryModeNoEntry},
+ {"000755", EntryModeNoEntry},
+
+ {"100644", EntryModeBlob},
+ {"100755", EntryModeExec},
+
+ {"120000", EntryModeSymlink},
+ {"120755", EntryModeSymlink},
+ {"160000", EntryModeCommit},
+ {"160755", EntryModeCommit},
+
+ {"040000", EntryModeTree},
+ {"040755", EntryModeTree},
+
+ {"777777", EntryModeNoEntry}, // invalid mode
+ }
+ for _, test := range tests {
+ mod := ParseEntryMode(test.modeStr)
+ assert.Equal(t, test.expectMod, mod, "modeStr: %s", test.modeStr)
+ }
+}
diff --git a/modules/git/tree_nogogit.go b/modules/git/tree_nogogit.go
index d0ddb1d041..d50c1ad629 100644
--- a/modules/git/tree_nogogit.go
+++ b/modules/git/tree_nogogit.go
@@ -27,30 +27,29 @@ func (t *Tree) ListEntries() (Entries, error) {
}
if t.repo != nil {
- wr, rd, cancel, err := t.repo.CatFileBatch(t.repo.Ctx)
+ batch, cancel, err := t.repo.CatFileBatch(t.repo.Ctx)
if err != nil {
return nil, err
}
defer cancel()
- _, _ = wr.Write([]byte(t.ID.String() + "\n"))
- _, typ, sz, err := ReadBatchLine(rd)
+ info, rd, err := batch.QueryContent(t.ID.String())
if err != nil {
return nil, err
}
- if typ == "commit" {
- treeID, err := ReadTreeID(rd, sz)
+
+ if info.Type == "commit" {
+ treeID, err := ReadTreeID(rd, info.Size)
if err != nil && err != io.EOF {
return nil, err
}
- _, _ = wr.Write([]byte(treeID + "\n"))
- _, typ, sz, err = ReadBatchLine(rd)
+ info, rd, err = batch.QueryContent(treeID)
if err != nil {
return nil, err
}
}
- if typ == "tree" {
- t.entries, err = catBatchParseTreeEntries(t.ID.Type(), t, rd, sz)
+ if info.Type == "tree" {
+ t.entries, err = catBatchParseTreeEntries(t.ID.Type(), t, rd, info.Size)
if err != nil {
return nil, err
}
@@ -59,7 +58,7 @@ func (t *Tree) ListEntries() (Entries, error) {
}
// Not a tree just use ls-tree instead
- if err := DiscardFull(rd, sz+1); err != nil {
+ if err := DiscardFull(rd, info.Size+1); err != nil {
return nil, err
}
}
diff --git a/modules/gitrepo/archive.go b/modules/gitrepo/archive.go
index b78922e126..191a1bd2c0 100644
--- a/modules/gitrepo/archive.go
+++ b/modules/gitrepo/archive.go
@@ -8,7 +8,9 @@ import (
"fmt"
"io"
"os"
+ "path"
"path/filepath"
+ "slices"
"strings"
"code.gitea.io/gitea/modules/git/gitcmd"
@@ -16,7 +18,7 @@ import (
)
// CreateArchive create archive content to the target path
-func CreateArchive(ctx context.Context, repo Repository, format string, target io.Writer, usePrefix bool, commitID string) error {
+func CreateArchive(ctx context.Context, repo Repository, format string, target io.Writer, usePrefix bool, commitID string, paths []string) error {
if format == "unknown" {
return fmt.Errorf("unknown format: %v", format)
}
@@ -28,11 +30,13 @@ func CreateArchive(ctx context.Context, repo Repository, format string, target i
cmd.AddOptionFormat("--format=%s", format)
cmd.AddDynamicArguments(commitID)
- var stderr strings.Builder
- if err := RunCmd(ctx, repo, cmd.WithStdout(target).WithStderr(&stderr)); err != nil {
- return gitcmd.ConcatenateError(err, stderr.String())
+ paths = slices.Clone(paths)
+ for i := range paths {
+ // although "git archive" already ensures the paths won't go outside the repo, we still clean them here for safety
+ paths[i] = path.Clean(paths[i])
}
- return nil
+ cmd.AddDynamicArguments(paths...)
+ return RunCmdWithStderr(ctx, repo, cmd.WithStdoutCopy(target))
}
// CreateBundle create bundle content to the target path
diff --git a/modules/gitrepo/blame.go b/modules/gitrepo/blame.go
index bd64c748d4..2352da1760 100644
--- a/modules/gitrepo/blame.go
+++ b/modules/gitrepo/blame.go
@@ -8,20 +8,19 @@ import (
"bytes"
"context"
"io"
- "os"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
- "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
func LineBlame(ctx context.Context, repo Repository, revision, file string, line uint) (string, error) {
- return RunCmdString(ctx, repo,
+ stdout, _, err := RunCmdString(ctx, repo,
gitcmd.NewCommand("blame").
AddOptionFormat("-L %d,%d", line, line).
AddOptionValues("-p", revision).
AddDashesAndList(file))
+ return stdout, err
}
// BlamePart represents block of blame - continuous lines with one sha
@@ -34,8 +33,6 @@ type BlamePart struct {
// BlameReader returns part of file blame one by one
type BlameReader struct {
- output io.WriteCloser
- reader io.ReadCloser
bufferedReader *bufio.Reader
done chan error
lastSha *string
@@ -131,34 +128,42 @@ func (r *BlameReader) Close() error {
err := <-r.done
r.bufferedReader = nil
- _ = r.reader.Close()
- _ = r.output.Close()
- for _, cleanup := range r.cleanupFuncs {
- if cleanup != nil {
- cleanup()
- }
- }
+ r.cleanup()
return err
}
+func (r *BlameReader) cleanup() {
+ for _, cleanup := range r.cleanupFuncs {
+ cleanup()
+ }
+}
+
// CreateBlameReader creates reader for given repository, commit and file
-func CreateBlameReader(ctx context.Context, objectFormat git.ObjectFormat, repo Repository, commit *git.Commit, file string, bypassBlameIgnore bool) (rd *BlameReader, err error) {
- var ignoreRevsFileName string
- var ignoreRevsFileCleanup func()
+func CreateBlameReader(ctx context.Context, objectFormat git.ObjectFormat, repo Repository, commit *git.Commit, file string, bypassBlameIgnore bool) (rd *BlameReader, retErr error) {
defer func() {
- if err != nil && ignoreRevsFileCleanup != nil {
- ignoreRevsFileCleanup()
+ if retErr != nil {
+ rd.cleanup()
}
}()
+ rd = &BlameReader{
+ done: make(chan error, 1),
+ objectFormat: objectFormat,
+ }
+
cmd := gitcmd.NewCommand("blame", "--porcelain")
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ rd.bufferedReader = bufio.NewReader(stdoutReader)
+ rd.cleanupFuncs = append(rd.cleanupFuncs, stdoutReaderClose)
+
if git.DefaultFeatures().CheckVersionAtLeast("2.23") && !bypassBlameIgnore {
- ignoreRevsFileName, ignoreRevsFileCleanup, err = tryCreateBlameIgnoreRevsFile(commit)
+ ignoreRevsFileName, ignoreRevsFileCleanup, err := tryCreateBlameIgnoreRevsFile(commit)
if err != nil && !git.IsErrNotExist(err) {
return nil, err
- }
- if ignoreRevsFileName != "" {
+ } else if err == nil {
+ rd.ignoreRevsFile = ignoreRevsFileName
+ rd.cleanupFuncs = append(rd.cleanupFuncs, ignoreRevsFileCleanup)
// Possible improvement: use --ignore-revs-file /dev/stdin on unix
// There is no equivalent on Windows. May be implemented if Gitea uses an external git backend.
cmd.AddOptionValues("--ignore-revs-file", ignoreRevsFileName)
@@ -167,35 +172,12 @@ func CreateBlameReader(ctx context.Context, objectFormat git.ObjectFormat, repo
cmd.AddDynamicArguments(commit.ID.String()).AddDashesAndList(file)
- done := make(chan error, 1)
- reader, stdout, err := os.Pipe()
- if err != nil {
- return nil, err
- }
go func() {
- stderr := bytes.Buffer{}
// TODO: it doesn't work for directories (the directories shouldn't be "blamed"), and the "err" should be returned by "Read" but not by "Close"
- err := RunCmd(ctx, repo, cmd.WithUseContextTimeout(true).
- WithStdout(stdout).
- WithStderr(&stderr),
- )
- done <- err
- _ = stdout.Close()
- if err != nil {
- log.Error("Error running git blame (dir: %v): %v, stderr: %v", repoPath, err, stderr.String())
- }
+ rd.done <- RunCmdWithStderr(ctx, repo, cmd)
}()
- bufferedReader := bufio.NewReader(reader)
- return &BlameReader{
- output: stdout,
- reader: reader,
- bufferedReader: bufferedReader,
- done: done,
- ignoreRevsFile: ignoreRevsFileName,
- objectFormat: objectFormat,
- cleanupFuncs: []func(){ignoreRevsFileCleanup},
- }, nil
+ return rd, nil
}
func tryCreateBlameIgnoreRevsFile(commit *git.Commit) (string, func(), error) {
diff --git a/modules/gitrepo/branch.go b/modules/gitrepo/branch.go
index e05d75caf8..4c40d1fba3 100644
--- a/modules/gitrepo/branch.go
+++ b/modules/gitrepo/branch.go
@@ -36,14 +36,14 @@ func GetBranchCommitID(ctx context.Context, repo Repository, branch string) (str
// SetDefaultBranch sets default branch of repository.
func SetDefaultBranch(ctx context.Context, repo Repository, name string) error {
- _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD").
+ _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD").
AddDynamicArguments(git.BranchPrefix+name))
return err
}
// GetDefaultBranch gets default branch of repository.
func GetDefaultBranch(ctx context.Context, repo Repository) (string, error) {
- stdout, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD"))
+ stdout, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("symbolic-ref", "HEAD"))
if err != nil {
return "", err
}
@@ -56,7 +56,7 @@ func GetDefaultBranch(ctx context.Context, repo Repository) (string, error) {
// IsReferenceExist returns true if given reference exists in the repository.
func IsReferenceExist(ctx context.Context, repo Repository, name string) bool {
- _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "--verify").AddDashesAndList(name))
+ _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "--verify").AddDashesAndList(name))
return err == nil
}
@@ -76,7 +76,7 @@ func DeleteBranch(ctx context.Context, repo Repository, name string, force bool)
}
cmd.AddDashesAndList(name)
- _, err := RunCmdString(ctx, repo, cmd)
+ _, _, err := RunCmdString(ctx, repo, cmd)
return err
}
@@ -85,12 +85,12 @@ func CreateBranch(ctx context.Context, repo Repository, branch, oldbranchOrCommi
cmd := gitcmd.NewCommand("branch")
cmd.AddDashesAndList(branch, oldbranchOrCommit)
- _, err := RunCmdString(ctx, repo, cmd)
+ _, _, err := RunCmdString(ctx, repo, cmd)
return err
}
// RenameBranch rename a branch
func RenameBranch(ctx context.Context, repo Repository, from, to string) error {
- _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("branch", "-m").AddDynamicArguments(from, to))
+ _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("branch", "-m").AddDynamicArguments(from, to))
return err
}
diff --git a/modules/gitrepo/cat_file.go b/modules/gitrepo/cat_file.go
index c6ac74756f..42ca23acde 100644
--- a/modules/gitrepo/cat_file.go
+++ b/modules/gitrepo/cat_file.go
@@ -9,6 +9,6 @@ import (
"code.gitea.io/gitea/modules/git"
)
-func NewBatch(ctx context.Context, repo Repository) (*git.Batch, error) {
+func NewBatch(ctx context.Context, repo Repository) (git.CatFileBatchCloser, error) {
return git.NewBatch(ctx, repoPath(repo))
}
diff --git a/modules/gitrepo/command.go b/modules/gitrepo/command.go
index d4cb6093fc..fd21b9a725 100644
--- a/modules/gitrepo/command.go
+++ b/modules/gitrepo/command.go
@@ -13,11 +13,14 @@ func RunCmd(ctx context.Context, repo Repository, cmd *gitcmd.Command) error {
return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().Run(ctx)
}
-func RunCmdString(ctx context.Context, repo Repository, cmd *gitcmd.Command) (string, error) {
- res, _, err := cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunStdString(ctx)
- return res, err
+func RunCmdString(ctx context.Context, repo Repository, cmd *gitcmd.Command) (string, string, gitcmd.RunStdError) {
+ return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunStdString(ctx)
}
-func RunCmdBytes(ctx context.Context, repo Repository, cmd *gitcmd.Command) ([]byte, []byte, error) {
+func RunCmdBytes(ctx context.Context, repo Repository, cmd *gitcmd.Command) ([]byte, []byte, gitcmd.RunStdError) {
return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunStdBytes(ctx)
}
+
+func RunCmdWithStderr(ctx context.Context, repo Repository, cmd *gitcmd.Command) gitcmd.RunStdError {
+ return cmd.WithDir(repoPath(repo)).WithParentCallerInfo().RunWithStderr(ctx)
+}
diff --git a/modules/gitrepo/commit.go b/modules/gitrepo/commit.go
index da0f3b85a2..0ab17862fe 100644
--- a/modules/gitrepo/commit.go
+++ b/modules/gitrepo/commit.go
@@ -88,7 +88,7 @@ func AllCommitsCount(ctx context.Context, repo Repository, hidePRRefs bool, file
cmd.AddDashesAndList(files...)
}
- stdout, err := RunCmdString(ctx, repo, cmd)
+ stdout, _, err := RunCmdString(ctx, repo, cmd)
if err != nil {
return 0, err
}
@@ -102,7 +102,7 @@ func GetFullCommitID(ctx context.Context, repo Repository, shortID string) (stri
// GetLatestCommitTime returns time for latest commit in repository (across all branches)
func GetLatestCommitTime(ctx context.Context, repo Repository) (time.Time, error) {
- stdout, err := RunCmdString(ctx, repo,
+ stdout, _, err := RunCmdString(ctx, repo,
gitcmd.NewCommand("for-each-ref", "--sort=-committerdate", git.BranchPrefix, "--count", "1", "--format=%(committerdate)"))
if err != nil {
return time.Time{}, err
diff --git a/modules/gitrepo/commit_file.go b/modules/gitrepo/commit_file.go
index cd4bb340d0..437b3b51ad 100644
--- a/modules/gitrepo/commit_file.go
+++ b/modules/gitrepo/commit_file.go
@@ -5,7 +5,6 @@ package gitrepo
import (
"bufio"
- "bytes"
"context"
"io"
@@ -68,24 +67,20 @@ func parseCommitFileStatus(fileStatus *CommitFileStatus, stdout io.Reader) {
// GetCommitFileStatus returns file status of commit in given repository.
func GetCommitFileStatus(ctx context.Context, repo Repository, commitID string) (*CommitFileStatus, error) {
- stdout, w := io.Pipe()
+ cmd := gitcmd.NewCommand("log", "--name-status", "-m", "--pretty=format:", "--first-parent", "--no-renames", "-z", "-1")
+ stdout, stdoutClose := cmd.MakeStdoutPipe()
+ defer stdoutClose()
done := make(chan struct{})
fileStatus := NewCommitFileStatus()
go func() {
parseCommitFileStatus(fileStatus, stdout)
close(done)
}()
-
- stderr := new(bytes.Buffer)
- err := gitcmd.NewCommand("log", "--name-status", "-m", "--pretty=format:", "--first-parent", "--no-renames", "-z", "-1").
- AddDynamicArguments(commitID).
+ err := cmd.AddDynamicArguments(commitID).
WithDir(repoPath(repo)).
- WithStdout(w).
- WithStderr(stderr).
- Run(ctx)
- w.Close() // Close writer to exit parsing goroutine
+ RunWithStderr(ctx)
if err != nil {
- return nil, gitcmd.ConcatenateError(err, stderr.String())
+ return nil, err
}
<-done
diff --git a/modules/gitrepo/compare.go b/modules/gitrepo/compare.go
index b8e4c30d6c..06cf880d99 100644
--- a/modules/gitrepo/compare.go
+++ b/modules/gitrepo/compare.go
@@ -22,7 +22,7 @@ type DivergeObject struct {
func GetDivergingCommits(ctx context.Context, repo Repository, baseBranch, targetBranch string) (*DivergeObject, error) {
cmd := gitcmd.NewCommand("rev-list", "--count", "--left-right").
AddDynamicArguments(baseBranch + "..." + targetBranch).AddArguments("--")
- stdout, err1 := RunCmdString(ctx, repo, cmd)
+ stdout, _, err1 := RunCmdString(ctx, repo, cmd)
if err1 != nil {
return nil, err1
}
diff --git a/modules/gitrepo/config.go b/modules/gitrepo/config.go
index bc1746fc3f..9be3ef94ae 100644
--- a/modules/gitrepo/config.go
+++ b/modules/gitrepo/config.go
@@ -12,7 +12,7 @@ import (
)
func GitConfigGet(ctx context.Context, repo Repository, key string) (string, error) {
- result, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--get").
+ result, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--get").
AddDynamicArguments(key))
if err != nil {
return "", err
@@ -27,7 +27,7 @@ func getRepoConfigLockKey(repoStoragePath string) string {
// GitConfigAdd add a git configuration key to a specific value for the given repository.
func GitConfigAdd(ctx context.Context, repo Repository, key, value string) error {
return globallock.LockAndDo(ctx, getRepoConfigLockKey(repo.RelativePath()), func(ctx context.Context) error {
- _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--add").
+ _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config", "--add").
AddDynamicArguments(key, value))
return err
})
@@ -38,7 +38,7 @@ func GitConfigAdd(ctx context.Context, repo Repository, key, value string) error
// If the key exists, it will be updated to the new value.
func GitConfigSet(ctx context.Context, repo Repository, key, value string) error {
return globallock.LockAndDo(ctx, getRepoConfigLockKey(repo.RelativePath()), func(ctx context.Context) error {
- _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config").
+ _, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("config").
AddDynamicArguments(key, value))
return err
})
diff --git a/modules/gitrepo/diff.go b/modules/gitrepo/diff.go
index ad7f24762f..0092cf0bb8 100644
--- a/modules/gitrepo/diff.go
+++ b/modules/gitrepo/diff.go
@@ -4,7 +4,6 @@
package gitrepo
import (
- "bytes"
"context"
"fmt"
"io"
@@ -22,7 +21,7 @@ func GetDiffShortStatByCmdArgs(ctx context.Context, repo Repository, trustedArgs
// we get:
// " 9902 files changed, 2034198 insertions(+), 298800 deletions(-)\n"
cmd := gitcmd.NewCommand("diff", "--shortstat").AddArguments(trustedArgs...).AddDynamicArguments(dynamicArgs...)
- stdout, err := RunCmdString(ctx, repo, cmd)
+ stdout, _, err := RunCmdString(ctx, repo, cmd)
if err != nil {
return 0, 0, 0, err
}
@@ -65,12 +64,8 @@ func parseDiffStat(stdout string) (numFiles, totalAdditions, totalDeletions int,
// GetReverseRawDiff dumps the reverse diff results of repository in given commit ID to io.Writer.
func GetReverseRawDiff(ctx context.Context, repo Repository, commitID string, writer io.Writer) error {
- stderr := new(bytes.Buffer)
- if err := RunCmd(ctx, repo, gitcmd.NewCommand("show", "--pretty=format:revert %H%n", "-R").
+ return RunCmdWithStderr(ctx, repo, gitcmd.NewCommand("show", "--pretty=format:revert %H%n", "-R").
AddDynamicArguments(commitID).
- WithStdout(writer).
- WithStderr(stderr)); err != nil {
- return fmt.Errorf("GetReverseRawDiff: %w - %s", err, stderr)
- }
- return nil
+ WithStdoutCopy(writer),
+ )
}
diff --git a/modules/gitrepo/fetch.go b/modules/gitrepo/fetch.go
new file mode 100644
index 0000000000..0474d6111e
--- /dev/null
+++ b/modules/gitrepo/fetch.go
@@ -0,0 +1,28 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitrepo
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/globallock"
+)
+
+// FetchRemoteCommit fetches a specific commit and its related objects from a remote
+// repository into the managed repository.
+//
+// If no reference (branch, tag, or other ref) points to the fetched commit, it will
+// be treated as unreachable and cleaned up by `git gc` after the default prune
+// expiration period (2 weeks). Ref: https://www.kernel.org/pub/software/scm/git/docs/git-gc.html
+//
+// This behavior is sufficient for temporary operations, such as determining the
+// merge base between commits.
+func FetchRemoteCommit(ctx context.Context, repo, remoteRepo Repository, commitID string) error {
+ return globallock.LockAndDo(ctx, getRepoWriteLockKey(repo.RelativePath()), func(ctx context.Context) error {
+ return RunCmd(ctx, repo, gitcmd.NewCommand("fetch", "--no-tags").
+ AddDynamicArguments(repoPath(remoteRepo)).
+ AddDynamicArguments(commitID))
+ })
+}
diff --git a/modules/gitrepo/gitrepo.go b/modules/gitrepo/gitrepo.go
index 3a9b0a1c89..535d72ed98 100644
--- a/modules/gitrepo/gitrepo.go
+++ b/modules/gitrepo/gitrepo.go
@@ -80,7 +80,12 @@ func DeleteRepository(ctx context.Context, repo Repository) error {
// RenameRepository renames a repository's name on disk
func RenameRepository(ctx context.Context, repo, newRepo Repository) error {
- if err := util.Rename(repoPath(repo), repoPath(newRepo)); err != nil {
+ dstDir := repoPath(newRepo)
+ if err := os.MkdirAll(filepath.Dir(dstDir), os.ModePerm); err != nil {
+ return fmt.Errorf("Failed to create dir %s: %w", filepath.Dir(dstDir), err)
+ }
+
+ if err := util.Rename(repoPath(repo), dstDir); err != nil {
return fmt.Errorf("rename repository directory: %w", err)
}
return nil
@@ -116,5 +121,8 @@ func RemoveRepoFileOrDir(ctx context.Context, repo Repository, relativeFileOrDir
func CreateRepoFile(ctx context.Context, repo Repository, relativeFilePath string) (io.WriteCloser, error) {
absoluteFilePath := filepath.Join(repoPath(repo), relativeFilePath)
+ if err := os.MkdirAll(filepath.Dir(absoluteFilePath), os.ModePerm); err != nil {
+ return nil, err
+ }
return os.Create(absoluteFilePath)
}
diff --git a/modules/gitrepo/main_test.go b/modules/gitrepo/main_test.go
index 6e6636ce77..08afdffcc6 100644
--- a/modules/gitrepo/main_test.go
+++ b/modules/gitrepo/main_test.go
@@ -4,29 +4,22 @@
package gitrepo
import (
- "os"
"path/filepath"
"testing"
- "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/tempdir"
- "code.gitea.io/gitea/modules/test"
)
func TestMain(m *testing.M) {
- gitHomePath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("git-home")
- if err != nil {
- log.Fatal("Unable to create temp dir: %v", err)
- }
- defer cleanup()
-
// resolve repository path relative to the test directory
- testRootDir := test.SetupGiteaRoot()
+ setting.SetupGiteaTestEnv()
+ giteaRoot := setting.GetGiteaTestSourceRoot()
repoPath = func(repo Repository) string {
- return filepath.Join(testRootDir, "/modules/git/tests/repos", repo.RelativePath())
+ if filepath.IsAbs(repo.RelativePath()) {
+ return repo.RelativePath() // for testing purpose only
+ }
+ return filepath.Join(giteaRoot, "modules/git/tests/repos", repo.RelativePath())
}
-
- setting.Git.HomePath = gitHomePath
- os.Exit(m.Run())
+ git.RunGitTests(m)
}
diff --git a/modules/gitrepo/merge.go b/modules/gitrepo/merge.go
new file mode 100644
index 0000000000..8d58e21c8d
--- /dev/null
+++ b/modules/gitrepo/merge.go
@@ -0,0 +1,22 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitrepo
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/modules/git/gitcmd"
+)
+
+// MergeBase checks and returns merge base of two commits.
+func MergeBase(ctx context.Context, repo Repository, baseCommitID, headCommitID string) (string, error) {
+ mergeBase, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base").
+ AddDashesAndList(baseCommitID, headCommitID))
+ if err != nil {
+ return "", fmt.Errorf("get merge-base of %s and %s failed: %w", baseCommitID, headCommitID, err)
+ }
+ return strings.TrimSpace(mergeBase), nil
+}
diff --git a/modules/gitrepo/merge_tree.go b/modules/gitrepo/merge_tree.go
new file mode 100644
index 0000000000..6151b1179f
--- /dev/null
+++ b/modules/gitrepo/merge_tree.go
@@ -0,0 +1,59 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitrepo
+
+import (
+ "bufio"
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/util"
+)
+
+const MaxConflictedDetectFiles = 10
+
+// MergeTree performs a merge between two commits (baseRef and headRef) with an optional merge base.
+// It returns the resulting tree hash, a list of conflicted files (if any), and an error if the operation fails.
+// If there are no conflicts, the list of conflicted files will be nil.
+func MergeTree(ctx context.Context, repo Repository, baseRef, headRef, mergeBase string) (treeID string, isErrHasConflicts bool, conflictFiles []string, _ error) {
+ cmd := gitcmd.NewCommand("merge-tree", "--write-tree", "-z", "--name-only", "--no-messages").
+ AddOptionFormat("--merge-base=%s", mergeBase).
+ AddDynamicArguments(baseRef, headRef)
+
+ stdout, stdoutClose := cmd.MakeStdoutPipe()
+ defer stdoutClose()
+ cmd.WithPipelineFunc(func(ctx gitcmd.Context) error {
+ // https://git-scm.com/docs/git-merge-tree/2.38.0#OUTPUT
+ // For a conflicted merge, the output is:
+ // NUL
+ // NUL
+ // NUL
+ // ...
+ scanner := bufio.NewScanner(stdout)
+ scanner.Split(util.BufioScannerSplit(0))
+ for scanner.Scan() {
+ line := scanner.Text()
+ if treeID == "" { // first line is tree ID
+ treeID = line
+ continue
+ }
+ conflictFiles = append(conflictFiles, line)
+ if len(conflictFiles) >= MaxConflictedDetectFiles {
+ break
+ }
+ }
+ return scanner.Err()
+ })
+
+ err := RunCmdWithStderr(ctx, repo, cmd)
+ // For a successful, non-conflicted merge, the exit status is 0. When the merge has conflicts, the exit status is 1.
+ // A merge can have conflicts without having individual files conflict
+ // https://git-scm.com/docs/git-merge-tree/2.38.0#_mistakes_to_avoid
+ isErrHasConflicts = gitcmd.IsErrorExitCode(err, 1)
+ if err == nil || isErrHasConflicts {
+ return treeID, isErrHasConflicts, conflictFiles, nil
+ }
+ return "", false, nil, fmt.Errorf("run merge-tree failed: %w", err)
+}
diff --git a/modules/gitrepo/merge_tree_test.go b/modules/gitrepo/merge_tree_test.go
new file mode 100644
index 0000000000..9327a0c3d8
--- /dev/null
+++ b/modules/gitrepo/merge_tree_test.go
@@ -0,0 +1,82 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitrepo
+
+import (
+ "path/filepath"
+ "testing"
+
+ "code.gitea.io/gitea/modules/git/gitcmd"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func prepareRepoDirRenameConflict(t *testing.T) string {
+ repoDir := filepath.Join(t.TempDir(), "repo-dir-rename-conflict.git")
+ require.NoError(t, gitcmd.NewCommand("init", "--bare").AddDynamicArguments(repoDir).Run(t.Context()))
+ stdin := `blob
+mark :1
+data 2
+b
+
+blob
+mark :2
+data 2
+c
+
+reset refs/heads/master
+commit refs/heads/master
+mark :3
+author test 1769202331 -0800
+committer test 1769202331 -0800
+data 2
+O
+M 100644 :1 z/b
+M 100644 :2 z/c
+
+commit refs/heads/split
+mark :4
+author test 1769202336 -0800
+committer test 1769202336 -0800
+data 2
+A
+from :3
+M 100644 :2 w/c
+M 100644 :1 y/b
+D z/b
+D z/c
+
+blob
+mark :5
+data 2
+d
+
+commit refs/heads/add
+mark :6
+author test 1769202342 -0800
+committer test 1769202342 -0800
+data 2
+B
+from :3
+M 100644 :5 z/d
+`
+ require.NoError(t, gitcmd.NewCommand("fast-import").WithDir(repoDir).WithStdinBytes([]byte(stdin)).Run(t.Context()))
+ return repoDir
+}
+
+func TestMergeTreeDirectoryRenameConflictWithoutFiles(t *testing.T) {
+ repoDir := prepareRepoDirRenameConflict(t)
+ require.DirExists(t, repoDir)
+ repo := &mockRepository{path: repoDir}
+
+ mergeBase, err := MergeBase(t.Context(), repo, "add", "split")
+ require.NoError(t, err)
+
+ treeID, conflicted, conflictedFiles, err := MergeTree(t.Context(), repo, "add", "split", mergeBase)
+ require.NoError(t, err)
+ assert.True(t, conflicted)
+ assert.Empty(t, conflictedFiles)
+ assert.Equal(t, "5e3dd4cfc5b11e278a35b2daa83b7274175e3ab1", treeID)
+}
diff --git a/modules/gitrepo/remote.go b/modules/gitrepo/remote.go
index ce43988461..3cbc34eedb 100644
--- a/modules/gitrepo/remote.go
+++ b/modules/gitrepo/remote.go
@@ -6,8 +6,6 @@ package gitrepo
import (
"context"
"errors"
- "io"
- "time"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
@@ -36,7 +34,7 @@ func GitRemoteAdd(ctx context.Context, repo Repository, remoteName, remoteURL st
return errors.New("unknown remote option: " + string(options[0]))
}
}
- _, err := RunCmdString(ctx, repo, cmd.AddDynamicArguments(remoteName, remoteURL))
+ _, _, err := RunCmdString(ctx, repo, cmd.AddDynamicArguments(remoteName, remoteURL))
return err
})
}
@@ -44,7 +42,7 @@ func GitRemoteAdd(ctx context.Context, repo Repository, remoteName, remoteURL st
func GitRemoteRemove(ctx context.Context, repo Repository, remoteName string) error {
return globallock.LockAndDo(ctx, getRepoConfigLockKey(repo.RelativePath()), func(ctx context.Context) error {
cmd := gitcmd.NewCommand("remote", "rm").AddDynamicArguments(remoteName)
- _, err := RunCmdString(ctx, repo, cmd)
+ _, _, err := RunCmdString(ctx, repo, cmd)
return err
})
}
@@ -60,21 +58,3 @@ func GitRemoteGetURL(ctx context.Context, repo Repository, remoteName string) (*
}
return giturl.ParseGitURL(addr)
}
-
-// GitRemotePrune prunes the remote branches that no longer exist in the remote repository.
-func GitRemotePrune(ctx context.Context, repo Repository, remoteName string, timeout time.Duration, stdout, stderr io.Writer) error {
- return RunCmd(ctx, repo, gitcmd.NewCommand("remote", "prune").
- AddDynamicArguments(remoteName).
- WithTimeout(timeout).
- WithStdout(stdout).
- WithStderr(stderr))
-}
-
-// GitRemoteUpdatePrune updates the remote branches and prunes the ones that no longer exist in the remote repository.
-func GitRemoteUpdatePrune(ctx context.Context, repo Repository, remoteName string, timeout time.Duration, stdout, stderr io.Writer) error {
- return RunCmd(ctx, repo, gitcmd.NewCommand("remote", "update", "--prune").
- AddDynamicArguments(remoteName).
- WithTimeout(timeout).
- WithStdout(stdout).
- WithStderr(stderr))
-}
diff --git a/modules/gitrepo/repo_lock.go b/modules/gitrepo/repo_lock.go
new file mode 100644
index 0000000000..2eb89ce807
--- /dev/null
+++ b/modules/gitrepo/repo_lock.go
@@ -0,0 +1,10 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitrepo
+
+// getRepoWriteLockKey returns the global lock key for write operations on the repository.
+// Parallel write operations on the same git repository should be avoided to prevent data corruption.
+func getRepoWriteLockKey(repoStoragePath string) string {
+ return "repo-write:" + repoStoragePath
+}
diff --git a/modules/graceful/manager.go b/modules/graceful/manager.go
index ee1872b999..51bd5a2334 100644
--- a/modules/graceful/manager.go
+++ b/modules/graceful/manager.go
@@ -74,12 +74,6 @@ func (g *Manager) RunWithCancel(rc RunCanceler) {
g.RunAtShutdown(context.Background(), rc.Cancel)
g.runningServerWaitGroup.Add(1)
defer g.runningServerWaitGroup.Done()
- defer func() {
- if err := recover(); err != nil {
- log.Critical("PANIC during RunWithCancel: %v\nStacktrace: %s", err, log.Stack(2))
- g.doShutdown()
- }
- }()
rc.Run()
}
@@ -89,12 +83,6 @@ func (g *Manager) RunWithCancel(rc RunCanceler) {
func (g *Manager) RunWithShutdownContext(run func(context.Context)) {
g.runningServerWaitGroup.Add(1)
defer g.runningServerWaitGroup.Done()
- defer func() {
- if err := recover(); err != nil {
- log.Critical("PANIC during RunWithShutdownContext: %v\nStacktrace: %s", err, log.Stack(2))
- g.doShutdown()
- }
- }()
ctx := g.ShutdownContext()
pprof.SetGoroutineLabels(ctx) // We don't have a label to restore back to but I think this is fine
run(ctx)
diff --git a/modules/graceful/net_unix.go b/modules/graceful/net_unix.go
index 796e00507c..a06f78dafe 100644
--- a/modules/graceful/net_unix.go
+++ b/modules/graceful/net_unix.go
@@ -290,11 +290,11 @@ func getActiveListenersToUnlink() []bool {
func getNotifySocket() (*net.UnixConn, error) {
if err := getProvidedFDs(); err != nil {
// This error will be logged elsewhere
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when no provided FDs are available
}
if notifySocketAddr == "" {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when notify socket is not configured
}
socketAddr := &net.UnixAddr{
diff --git a/modules/hcaptcha/hcaptcha_test.go b/modules/hcaptcha/hcaptcha_test.go
index 5906faf17c..6b207bfb77 100644
--- a/modules/hcaptcha/hcaptcha_test.go
+++ b/modules/hcaptcha/hcaptcha_test.go
@@ -8,7 +8,6 @@ import (
"io"
"net/http"
"net/url"
- "os"
"strings"
"testing"
"time"
@@ -20,10 +19,6 @@ const (
dummyToken = "10000000-aaaa-bbbb-cccc-000000000001"
)
-func TestMain(m *testing.M) {
- os.Exit(m.Run())
-}
-
type mockTransport struct{}
func (mockTransport) RoundTrip(req *http.Request) (*http.Response, error) {
diff --git a/modules/highlight/highlight.go b/modules/highlight/highlight.go
index 2b13e9c4ce..c7416c7a10 100644
--- a/modules/highlight/highlight.go
+++ b/modules/highlight/highlight.go
@@ -11,49 +11,41 @@ import (
gohtml "html"
"html/template"
"io"
- "path"
- "path/filepath"
"strings"
"sync"
- "code.gitea.io/gitea/modules/analyze"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"github.com/alecthomas/chroma/v2"
"github.com/alecthomas/chroma/v2/formatters/html"
- "github.com/alecthomas/chroma/v2/lexers"
"github.com/alecthomas/chroma/v2/styles"
- lru "github.com/hashicorp/golang-lru/v2"
)
// don't index files larger than this many bytes for performance purposes
const sizeLimit = 1024 * 1024
+type globalVarsType struct {
+ highlightMapping map[string]string
+ githubStyles *chroma.Style
+}
+
var (
- // For custom user mapping
- highlightMapping = map[string]string{}
-
- once sync.Once
-
- cache *lru.TwoQueueCache[string, any]
-
- githubStyles = styles.Get("github")
+ globalVarsMu sync.Mutex
+ globalVarsPtr *globalVarsType
)
-// NewContext loads custom highlight map from local config
-func NewContext() {
- once.Do(func() {
- highlightMapping = setting.GetHighlightMapping()
-
- // The size 512 is simply a conservative rule of thumb
- c, err := lru.New2Q[string, any](512)
- if err != nil {
- panic(fmt.Sprintf("failed to initialize LRU cache for highlighter: %s", err))
- }
- cache = c
- })
+func globalVars() *globalVarsType {
+ // in the future, the globalVars might need to be re-initialized when settings change, so don't use sync.Once here
+ globalVarsMu.Lock()
+ defer globalVarsMu.Unlock()
+ if globalVarsPtr == nil {
+ globalVarsPtr = &globalVarsType{}
+ globalVarsPtr.githubStyles = styles.Get("github")
+ globalVarsPtr.highlightMapping = setting.GetHighlightMapping()
+ }
+ return globalVarsPtr
}
// UnsafeSplitHighlightedLines splits highlighted code into lines preserving HTML tags
@@ -88,59 +80,25 @@ func UnsafeSplitHighlightedLines(code template.HTML) (ret [][]byte) {
}
}
-// Code returns an HTML version of code string with chroma syntax highlighting classes and the matched lexer name
-func Code(fileName, language, code string) (output template.HTML, lexerName string) {
- NewContext()
-
+// RenderCodeSlowGuess tries to get a lexer by file name and language first,
+// if not found, it will try to guess the lexer by code content, which is slow (more than several hundreds of milliseconds).
+func RenderCodeSlowGuess(fileName, language, code string) (output template.HTML, lexer chroma.Lexer, lexerDisplayName string) {
// diff view newline will be passed as empty, change to literal '\n' so it can be copied
// preserve literal newline in blame view
if code == "" || code == "\n" {
- return "\n", ""
+ return "\n", nil, ""
}
if len(code) > sizeLimit {
- return template.HTML(template.HTMLEscapeString(code)), ""
+ return template.HTML(template.HTMLEscapeString(code)), nil, ""
}
- var lexer chroma.Lexer
-
- if len(language) > 0 {
- lexer = lexers.Get(language)
-
- if lexer == nil {
- // Attempt stripping off the '?'
- if before, _, ok := strings.Cut(language, "?"); ok {
- lexer = lexers.Get(before)
- }
- }
- }
-
- if lexer == nil {
- if val, ok := highlightMapping[path.Ext(fileName)]; ok {
- // use mapped value to find lexer
- lexer = lexers.Get(val)
- }
- }
-
- if lexer == nil {
- if l, ok := cache.Get(fileName); ok {
- lexer = l.(chroma.Lexer)
- }
- }
-
- if lexer == nil {
- lexer = lexers.Match(fileName)
- if lexer == nil {
- lexer = lexers.Fallback
- }
- cache.Add(fileName, lexer)
- }
-
- return CodeFromLexer(lexer, code), formatLexerName(lexer.Config().Name)
+ lexer = detectChromaLexerWithAnalyze(fileName, language, util.UnsafeStringToBytes(code)) // it is also slow
+ return RenderCodeByLexer(lexer, code), lexer, formatLexerName(lexer.Config().Name)
}
-// CodeFromLexer returns a HTML version of code string with chroma syntax highlighting classes
-func CodeFromLexer(lexer chroma.Lexer, code string) template.HTML {
+// RenderCodeByLexer returns a HTML version of code string with chroma syntax highlighting classes
+func RenderCodeByLexer(lexer chroma.Lexer, code string) template.HTML {
formatter := html.New(html.WithClasses(true),
html.WithLineNumbers(false),
html.PreventSurroundingPre(true),
@@ -155,7 +113,7 @@ func CodeFromLexer(lexer chroma.Lexer, code string) template.HTML {
return template.HTML(template.HTMLEscapeString(code))
}
// style not used for live site but need to pass something
- err = formatter.Format(htmlw, githubStyles, iterator)
+ err = formatter.Format(htmlw, globalVars().githubStyles, iterator)
if err != nil {
log.Error("Can't format code: %v", err)
return template.HTML(template.HTMLEscapeString(code))
@@ -167,12 +125,10 @@ func CodeFromLexer(lexer chroma.Lexer, code string) template.HTML {
return template.HTML(strings.TrimSuffix(htmlbuf.String(), "\n"))
}
-// File returns a slice of chroma syntax highlighted HTML lines of code and the matched lexer name
-func File(fileName, language string, code []byte) ([]template.HTML, string, error) {
- NewContext()
-
+// RenderFullFile returns a slice of chroma syntax highlighted HTML lines of code and the matched lexer name
+func RenderFullFile(fileName, language string, code []byte) ([]template.HTML, string, error) {
if len(code) > sizeLimit {
- return PlainText(code), "", nil
+ return RenderPlainText(code), "", nil
}
formatter := html.New(html.WithClasses(true),
@@ -180,31 +136,7 @@ func File(fileName, language string, code []byte) ([]template.HTML, string, erro
html.PreventSurroundingPre(true),
)
- var lexer chroma.Lexer
-
- // provided language overrides everything
- if language != "" {
- lexer = lexers.Get(language)
- }
-
- if lexer == nil {
- if val, ok := highlightMapping[filepath.Ext(fileName)]; ok {
- lexer = lexers.Get(val)
- }
- }
-
- if lexer == nil {
- guessLanguage := analyze.GetCodeLanguage(fileName, code)
-
- lexer = lexers.Get(guessLanguage)
- if lexer == nil {
- lexer = lexers.Match(fileName)
- if lexer == nil {
- lexer = lexers.Fallback
- }
- }
- }
-
+ lexer := detectChromaLexerWithAnalyze(fileName, language, code)
lexerName := formatLexerName(lexer.Config().Name)
iterator, err := lexer.Tokenise(nil, string(code))
@@ -218,7 +150,7 @@ func File(fileName, language string, code []byte) ([]template.HTML, string, erro
lines := make([]template.HTML, 0, len(tokensLines))
for _, tokens := range tokensLines {
iterator = chroma.Literator(tokens...)
- err = formatter.Format(htmlBuf, githubStyles, iterator)
+ err = formatter.Format(htmlBuf, globalVars().githubStyles, iterator)
if err != nil {
return nil, "", fmt.Errorf("can't format code: %w", err)
}
@@ -229,8 +161,8 @@ func File(fileName, language string, code []byte) ([]template.HTML, string, erro
return lines, lexerName, nil
}
-// PlainText returns non-highlighted HTML for code
-func PlainText(code []byte) []template.HTML {
+// RenderPlainText returns non-highlighted HTML for code
+func RenderPlainText(code []byte) []template.HTML {
r := bufio.NewReader(bytes.NewReader(code))
m := make([]template.HTML, 0, bytes.Count(code, []byte{'\n'})+1)
for {
diff --git a/modules/highlight/highlight_test.go b/modules/highlight/highlight_test.go
index 52873427a8..d026210475 100644
--- a/modules/highlight/highlight_test.go
+++ b/modules/highlight/highlight_test.go
@@ -108,11 +108,17 @@ c=2
),
lexerName: "Python",
},
+ {
+ name: "test.sql",
+ code: "--\nSELECT",
+ want: []template.HTML{"--\n", `SELECT`},
+ lexerName: "SQL",
+ },
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- out, lexerName, err := File(tt.name, "", []byte(tt.code))
+ out, lexerName, err := RenderFullFile(tt.name, "", []byte(tt.code))
assert.NoError(t, err)
assert.Equal(t, tt.want, out)
assert.Equal(t, tt.lexerName, lexerName)
@@ -176,7 +182,7 @@ c=2`),
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- out := PlainText([]byte(tt.code))
+ out := RenderPlainText([]byte(tt.code))
assert.Equal(t, tt.want, out)
})
}
diff --git a/modules/highlight/lexerdetect.go b/modules/highlight/lexerdetect.go
new file mode 100644
index 0000000000..5d98578f35
--- /dev/null
+++ b/modules/highlight/lexerdetect.go
@@ -0,0 +1,307 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package highlight
+
+import (
+ "path"
+ "strings"
+ "sync"
+
+ "code.gitea.io/gitea/modules/analyze"
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/go-enry/go-enry/v2"
+)
+
+const mapKeyLowerPrefix = "lower/"
+
+// chromaLexers is fully managed by us to do fast lookup for chroma lexers by file name or language name
+// Don't use lexers.Get because it is very slow in many cases (iterate all rules, filepath glob match, etc.)
+var chromaLexers = sync.OnceValue(func() (ret struct {
+ conflictingExtLangMap map[string]string
+ conflictingAliasLangMap map[string]string
+
+ lowerNameMap map[string]chroma.Lexer // lexer name (lang name) in lower-case
+ fileBaseMap map[string]chroma.Lexer
+ fileExtMap map[string]chroma.Lexer
+ fileParts []struct {
+ part string
+ lexer chroma.Lexer
+ }
+},
+) {
+ ret.lowerNameMap = make(map[string]chroma.Lexer)
+ ret.fileBaseMap = make(map[string]chroma.Lexer)
+ ret.fileExtMap = make(map[string]chroma.Lexer)
+
+ // Chroma has conflicts in file extension for different languages,
+ // When we need to do fast render, there is no way to detect the language by content,
+ // So we can only choose some default languages for the conflicted file extensions.
+ ret.conflictingExtLangMap = map[string]string{
+ ".as": "ActionScript 3", // ActionScript
+ ".asm": "NASM", // TASM, NASM, RGBDS Assembly, Z80 Assembly
+ ".ASM": "NASM",
+ ".bas": "VB.net", // QBasic
+ ".bf": "Beef", // Brainfuck
+ ".fs": "FSharp", // Forth
+ ".gd": "GDScript", // GDScript3
+ ".h": "C", // Objective-C
+ ".hcl": "Terraform", // HCL
+ ".hh": "C++", // HolyC
+ ".inc": "PHP", // ObjectPascal, POVRay, SourcePawn, PHTML
+ ".m": "Objective-C", // Matlab, Mathematica, Mason
+ ".mc": "Mason", // MonkeyC
+ ".network": "SYSTEMD", // INI
+ ".php": "PHP", // PHTML
+ ".php3": "PHP", // PHTML
+ ".php4": "PHP", // PHTML
+ ".php5": "PHP", // PHTML
+ ".pl": "Perl", // Prolog, Raku
+ ".pm": "Perl", // Promela, Raku
+ ".pp": "ObjectPascal", // Puppet
+ ".s": "ArmAsm", // GAS
+ ".S": "ArmAsm", // R, GAS
+ ".service": "SYSTEMD", // INI
+ ".socket": "SYSTEMD", // INI
+ ".sql": "SQL", // MySQL
+ ".t": "Perl", // Raku
+ ".ts": "TypeScript", // TypoScript
+ ".v": "V", // verilog
+ ".xslt": "HTML", // XML
+ }
+ // use widely used language names as the default mapping to resolve name alias conflict
+ ret.conflictingAliasLangMap = map[string]string{
+ "hcl": "HCL", // Terraform
+ "v": "V", // verilog
+ }
+
+ isPlainPattern := func(key string) bool {
+ return !strings.ContainsAny(key, "*?[]") // only support simple patterns
+ }
+
+ setFileNameMapWithLowerKey := func(m map[string]chroma.Lexer, key string, lexer chroma.Lexer) {
+ if _, conflict := m[key]; conflict {
+ panic("duplicate key in lexer map: " + key + ", need to add it to conflictingExtLangMap")
+ }
+ m[key] = lexer
+ m[mapKeyLowerPrefix+strings.ToLower(key)] = lexer
+ }
+
+ processFileName := func(fileName string, lexer chroma.Lexer) bool {
+ if isPlainPattern(fileName) {
+ // full base name match
+ setFileNameMapWithLowerKey(ret.fileBaseMap, fileName, lexer)
+ return true
+ }
+ if strings.HasPrefix(fileName, "*") {
+ // ext name match: "*.js"
+ fileExt := strings.Trim(fileName, "*")
+ if isPlainPattern(fileExt) {
+ presetName := ret.conflictingExtLangMap[fileExt]
+ if presetName == "" || lexer.Config().Name == presetName {
+ setFileNameMapWithLowerKey(ret.fileExtMap, fileExt, lexer)
+ }
+ return true
+ }
+ }
+ if strings.HasSuffix(fileName, "*") {
+ // part match: "*.env.*"
+ filePart := strings.Trim(fileName, "*")
+ if isPlainPattern(filePart) {
+ ret.fileParts = append(ret.fileParts, struct {
+ part string
+ lexer chroma.Lexer
+ }{
+ part: filePart,
+ lexer: lexer,
+ })
+ return true
+ }
+ }
+ return false
+ }
+
+ expandGlobPatterns := func(patterns []string) []string {
+ // expand patterns like "file.[ch]" to "file.c" and "file.h", only one pair of "[]" is supported, enough for current Chroma lexers
+ for idx, s := range patterns {
+ idx1 := strings.IndexByte(s, '[')
+ idx2 := strings.IndexByte(s, ']')
+ if idx1 != -1 && idx2 != -1 && idx2 > idx1+1 {
+ left, mid, right := s[:idx1], s[idx1+1:idx2], s[idx2+1:]
+ patterns[idx] = left + mid[0:1] + right
+ for i := 1; i < len(mid); i++ {
+ patterns = append(patterns, left+mid[i:i+1]+right)
+ }
+ }
+ }
+ return patterns
+ }
+
+ processLexerNameAliases := func(lexer chroma.Lexer) {
+ cfg := lexer.Config()
+ lowerName := strings.ToLower(cfg.Name)
+ if _, conflicted := ret.lowerNameMap[lowerName]; conflicted {
+ panic("duplicate language name in lexer map: " + lowerName)
+ }
+ ret.lowerNameMap[lowerName] = lexer
+
+ for _, name := range cfg.Aliases {
+ lowerName := strings.ToLower(name)
+ if overriddenName, overridden := ret.conflictingAliasLangMap[lowerName]; overridden && overriddenName != cfg.Name {
+ continue
+ }
+ if existingLexer, conflict := ret.lowerNameMap[lowerName]; conflict && existingLexer.Config().Name != cfg.Name {
+ panic("duplicate alias in lexer map: " + name + ", conflict between " + existingLexer.Config().Name + " and " + cfg.Name)
+ }
+ ret.lowerNameMap[lowerName] = lexer
+ }
+ }
+
+ // the main loop: build our lookup maps for lexers
+ for _, lexer := range lexers.GlobalLexerRegistry.Lexers {
+ cfg := lexer.Config()
+ processLexerNameAliases(lexer)
+ for _, s := range expandGlobPatterns(cfg.Filenames) {
+ if !processFileName(s, lexer) {
+ panic("unsupported file name pattern in lexer: " + s)
+ }
+ }
+ for _, s := range expandGlobPatterns(cfg.AliasFilenames) {
+ if !processFileName(s, lexer) {
+ panic("unsupported alias file name pattern in lexer: " + s)
+ }
+ }
+ }
+
+ // final check: make sure the default overriding mapping is correct, nothing is missing
+ for lowerName, lexerName := range ret.conflictingAliasLangMap {
+ if lexer, ok := ret.lowerNameMap[lowerName]; !ok || lexer.Config().Name != lexerName {
+ panic("missing default name-lang mapping for: " + lowerName)
+ }
+ }
+ for ext, lexerName := range ret.conflictingExtLangMap {
+ if lexer, ok := ret.fileExtMap[ext]; !ok || lexer.Config().Name != lexerName {
+ panic("missing default ext-lang mapping for: " + ext)
+ }
+ }
+ return ret
+})
+
+func normalizeFileNameLang(fileName, fileLang string) (string, string) {
+ fileName = path.Base(fileName)
+ fileLang, _, _ = strings.Cut(fileLang, "?") // maybe, the value from gitattributes might contain `?` parameters?
+ ext := path.Ext(fileName)
+ // the "lang" might come from enry or gitattributes, it has different naming for some languages
+ switch fileLang {
+ case "F#":
+ fileLang = "FSharp"
+ case "Pascal":
+ fileLang = "ObjectPascal"
+ case "C":
+ if ext == ".C" || ext == ".H" {
+ fileLang = "C++"
+ }
+ }
+ return fileName, fileLang
+}
+
+func DetectChromaLexerByFileName(fileName, fileLang string) chroma.Lexer {
+ lexer, _ := detectChromaLexerByFileName(fileName, fileLang)
+ return lexer
+}
+
+func detectChromaLexerByFileName(fileName, fileLang string) (_ chroma.Lexer, byLang bool) {
+ fileName, fileLang = normalizeFileNameLang(fileName, fileLang)
+ fileExt := path.Ext(fileName)
+
+ // apply custom mapping for file extension, highest priority, for example:
+ // * ".my-js" -> ".js"
+ // * ".my-html" -> "HTML"
+ if fileExt != "" {
+ if val, ok := globalVars().highlightMapping[fileExt]; ok {
+ if strings.HasPrefix(val, ".") {
+ fileName = "dummy" + val
+ fileLang = ""
+ } else {
+ fileLang = val
+ }
+ }
+ }
+
+ // try to use language for lexer name
+ if fileLang != "" {
+ lexer := chromaLexers().lowerNameMap[strings.ToLower(fileLang)]
+ if lexer != nil {
+ return lexer, true
+ }
+ }
+
+ if fileName == "" {
+ return lexers.Fallback, false
+ }
+
+ // try base name
+ {
+ baseName := path.Base(fileName)
+ if lexer, ok := chromaLexers().fileBaseMap[baseName]; ok {
+ return lexer, false
+ } else if lexer, ok = chromaLexers().fileBaseMap[mapKeyLowerPrefix+strings.ToLower(baseName)]; ok {
+ return lexer, false
+ }
+ }
+
+ if fileExt == "" {
+ return lexers.Fallback, false
+ }
+
+ // try ext name
+ {
+ if lexer, ok := chromaLexers().fileExtMap[fileExt]; ok {
+ return lexer, false
+ } else if lexer, ok = chromaLexers().fileExtMap[mapKeyLowerPrefix+strings.ToLower(fileExt)]; ok {
+ return lexer, false
+ }
+ }
+
+ // try file part match, for example: ".env.local" for "*.env.*"
+ // it assumes that there must be a dot in filename (fileExt isn't empty)
+ for _, item := range chromaLexers().fileParts {
+ if strings.Contains(fileName, item.part) {
+ return item.lexer, false
+ }
+ }
+ return lexers.Fallback, false
+}
+
+// detectChromaLexerWithAnalyze returns a chroma lexer by given file name, language and code content. All parameters can be optional.
+// When code content is provided, it will be slow if no lexer is found by file name or language.
+// If no lexer is found, it will return the fallback lexer.
+func detectChromaLexerWithAnalyze(fileName, lang string, code []byte) chroma.Lexer {
+ lexer, byLang := detectChromaLexerByFileName(fileName, lang)
+
+ // if lang is provided, and it matches a lexer, use it directly
+ if byLang {
+ return lexer
+ }
+
+ // if a lexer is detected and there is no conflict for the file extension, use it directly
+ fileExt := path.Ext(fileName)
+ _, hasConflicts := chromaLexers().conflictingExtLangMap[fileExt]
+ if !hasConflicts && lexer != lexers.Fallback {
+ return lexer
+ }
+
+ // try to detect language by content, for best guessing for the language
+ // when using "code" to detect, analyze.GetCodeLanguage is slow, it iterates many rules to detect language from content
+ analyzedLanguage := analyze.GetCodeLanguage(fileName, code)
+ lexer = DetectChromaLexerByFileName(fileName, analyzedLanguage)
+ if lexer == lexers.Fallback {
+ if analyzedLanguage != enry.OtherLanguage {
+ log.Warn("No chroma lexer found for enry detected language: %s (file: %s), need to fix the language mapping between enry and chroma.", analyzedLanguage, fileName)
+ }
+ }
+ return lexer
+}
diff --git a/modules/highlight/lexerdetect_test.go b/modules/highlight/lexerdetect_test.go
new file mode 100644
index 0000000000..a06053be0c
--- /dev/null
+++ b/modules/highlight/lexerdetect_test.go
@@ -0,0 +1,116 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package highlight
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/stretchr/testify/assert"
+)
+
+func BenchmarkDetectChromaLexerByFileName(b *testing.B) {
+ for b.Loop() {
+ // BenchmarkDetectChromaLexerByFileName-12 18214717 61.35 ns/op
+ DetectChromaLexerByFileName("a.sql", "")
+ }
+}
+
+func BenchmarkDetectChromaLexerWithAnalyze(b *testing.B) {
+ b.StopTimer()
+ code := []byte(strings.Repeat("SELECT * FROM table;\n", 1000))
+ b.StartTimer()
+ for b.Loop() {
+ // BenchmarkRenderCodeSlowGuess-12 87946 13310 ns/op
+ detectChromaLexerWithAnalyze("a", "", code)
+ }
+}
+
+func BenchmarkChromaAnalyze(b *testing.B) {
+ b.StopTimer()
+ code := strings.Repeat("SELECT * FROM table;\n", 1000)
+ b.StartTimer()
+ for b.Loop() {
+ // comparing to detectChromaLexerWithAnalyze (go-enry), "chroma/lexers.Analyse" is very slow
+ // BenchmarkChromaAnalyze-12 519 2247104 ns/op
+ lexers.Analyse(code)
+ }
+}
+
+func BenchmarkRenderCodeByLexer(b *testing.B) {
+ b.StopTimer()
+ code := strings.Repeat("SELECT * FROM table;\n", 1000)
+ lexer := DetectChromaLexerByFileName("a.sql", "")
+ b.StartTimer()
+ for b.Loop() {
+ // Really slow ....... the regexp2 used by Chroma takes most of the time
+ // BenchmarkRenderCodeByLexer-12 22 47159038 ns/op
+ RenderCodeByLexer(lexer, code)
+ }
+}
+
+func TestDetectChromaLexer(t *testing.T) {
+ globalVars().highlightMapping[".my-html"] = "HTML"
+ t.Cleanup(func() { delete(globalVars().highlightMapping, ".my-html") })
+
+ casesWithContent := []struct {
+ fileName string
+ language string
+ content string
+ expected string
+ }{
+ {"test.v", "", "", "V"},
+ {"test.v", "any-lang-name", "", "V"},
+
+ {"any-file", "javascript", "", "JavaScript"},
+ {"any-file", "", "/* vim: set filetype=python */", "Python"},
+ {"any-file", "", "", "fallback"},
+
+ {"test.fs", "", "", "FSharp"},
+ {"test.fs", "F#", "", "FSharp"},
+ {"test.fs", "", "let x = 1", "FSharp"},
+
+ {"test.c", "", "", "C"},
+ {"test.C", "", "", "C++"},
+ {"OLD-CODE.PAS", "", "", "ObjectPascal"},
+ {"test.my-html", "", "", "HTML"},
+
+ {"a.php", "", "", "PHP"},
+ {"a.sql", "", "", "SQL"},
+ {"dhcpd.conf", "", "", "ISCdhcpd"},
+ {".env.my-production", "", "", "Bash"},
+
+ {"a.hcl", "", "", "HCL"}, // not the same as Chroma, enry detects "*.hcl" as "HCL"
+ {"a.hcl", "HCL", "", "HCL"},
+ {"a.hcl", "Terraform", "", "Terraform"},
+ }
+ for _, c := range casesWithContent {
+ lexer := detectChromaLexerWithAnalyze(c.fileName, c.language, []byte(c.content))
+ if assert.NotNil(t, lexer, "case: %+v", c) {
+ assert.Equal(t, c.expected, lexer.Config().Name, "case: %+v", c)
+ }
+ }
+
+ casesNameLang := []struct {
+ fileName string
+ language string
+ expected string
+ byLang bool
+ }{
+ {"a.v", "", "V", false},
+ {"a.v", "V", "V", true},
+ {"a.v", "verilog", "verilog", true},
+ {"a.v", "any-lang-name", "V", false},
+
+ {"a.hcl", "", "Terraform", false}, // not the same as enry
+ {"a.hcl", "HCL", "HCL", true},
+ {"a.hcl", "Terraform", "Terraform", true},
+ }
+ for _, c := range casesNameLang {
+ lexer, byLang := detectChromaLexerByFileName(c.fileName, c.language)
+ assert.Equal(t, c.expected, lexer.Config().Name, "case: %+v", c)
+ assert.Equal(t, c.byLang, byLang, "case: %+v", c)
+ }
+}
diff --git a/modules/htmlutil/html.go b/modules/htmlutil/html.go
index efbc174b2e..8dbfe0c22e 100644
--- a/modules/htmlutil/html.go
+++ b/modules/htmlutil/html.go
@@ -6,6 +6,7 @@ package htmlutil
import (
"fmt"
"html/template"
+ "io"
"slices"
"strings"
)
@@ -31,7 +32,7 @@ func ParseSizeAndClass(defaultSize int, defaultClass string, others ...any) (int
return size, class
}
-func HTMLFormat(s template.HTML, rawArgs ...any) template.HTML {
+func htmlFormatArgs(s template.HTML, rawArgs []any) []any {
if !strings.Contains(string(s), "%") || len(rawArgs) == 0 {
panic("HTMLFormat requires one or more arguments")
}
@@ -50,5 +51,35 @@ func HTMLFormat(s template.HTML, rawArgs ...any) template.HTML {
args[i] = template.HTMLEscapeString(fmt.Sprint(v))
}
}
- return template.HTML(fmt.Sprintf(string(s), args...))
+ return args
+}
+
+func HTMLFormat(s template.HTML, rawArgs ...any) template.HTML {
+ return template.HTML(fmt.Sprintf(string(s), htmlFormatArgs(s, rawArgs)...))
+}
+
+func HTMLPrintf(w io.Writer, s template.HTML, rawArgs ...any) (int, error) {
+ return fmt.Fprintf(w, string(s), htmlFormatArgs(s, rawArgs)...)
+}
+
+func HTMLPrint(w io.Writer, s template.HTML) (int, error) {
+ return io.WriteString(w, string(s))
+}
+
+func HTMLPrintTag(w io.Writer, tag template.HTML, attrs map[string]string) (written int, err error) {
+ n, err := io.WriteString(w, "<"+string(tag))
+ written += n
+ if err != nil {
+ return written, err
+ }
+ for k, v := range attrs {
+ n, err = fmt.Fprintf(w, ` %s="%s"`, template.HTMLEscapeString(k), template.HTMLEscapeString(v))
+ written += n
+ if err != nil {
+ return written, err
+ }
+ }
+ n, err = io.WriteString(w, ">")
+ written += n
+ return written, err
}
diff --git a/modules/indexer/code/bleve/bleve.go b/modules/indexer/code/bleve/bleve.go
index 5f6a7f6082..010ee39660 100644
--- a/modules/indexer/code/bleve/bleve.go
+++ b/modules/indexer/code/bleve/bleve.go
@@ -4,7 +4,6 @@
package bleve
import (
- "bufio"
"context"
"fmt"
"io"
@@ -151,7 +150,7 @@ func NewIndexer(indexDir string) *Indexer {
}
}
-func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserError, batchReader *bufio.Reader, commitSha string,
+func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, commitSha string,
update internal.FileUpdate, repo *repo_model.Repository, batch *inner_bleve.FlushingBatch,
) error {
// Ignore vendored files in code search
@@ -164,7 +163,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
var err error
if !update.Sized {
var stdout string
- stdout, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha))
+ stdout, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha))
if err != nil {
return err
}
@@ -177,16 +176,11 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
return b.addDelete(update.Filename, repo, batch)
}
- if _, err := batchWriter.Write([]byte(update.BlobSha + "\n")); err != nil {
- return err
- }
-
- _, _, size, err = git.ReadBatchLine(batchReader)
+ info, batchReader, err := catFileBatch.QueryContent(update.BlobSha)
if err != nil {
return err
}
-
- fileContents, err := io.ReadAll(io.LimitReader(batchReader, size))
+ fileContents, err := io.ReadAll(io.LimitReader(batchReader, info.Size))
if err != nil {
return err
} else if !typesniffer.DetectContentType(fileContents).IsText() {
@@ -218,18 +212,17 @@ func (b *Indexer) addDelete(filename string, repo *repo_model.Repository, batch
func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *internal.RepoChanges) error {
batch := inner_bleve.NewFlushingBatch(b.inner.Indexer, maxBatchSize)
if len(changes.Updates) > 0 {
- gitBatch, err := gitrepo.NewBatch(ctx, repo)
+ catfileBatch, err := gitrepo.NewBatch(ctx, repo)
if err != nil {
return err
}
- defer gitBatch.Close()
+ defer catfileBatch.Close()
for _, update := range changes.Updates {
- if err := b.addUpdate(ctx, gitBatch.Writer, gitBatch.Reader, sha, update, repo, batch); err != nil {
+ if err := b.addUpdate(ctx, catfileBatch, sha, update, repo, batch); err != nil {
return err
}
}
- gitBatch.Close()
}
for _, filename := range changes.RemovedFilenames {
if err := b.addDelete(filename, repo, batch); err != nil {
diff --git a/modules/indexer/code/elasticsearch/elasticsearch.go b/modules/indexer/code/elasticsearch/elasticsearch.go
index a7027051d2..99f974b646 100644
--- a/modules/indexer/code/elasticsearch/elasticsearch.go
+++ b/modules/indexer/code/elasticsearch/elasticsearch.go
@@ -4,7 +4,6 @@
package elasticsearch
import (
- "bufio"
"context"
"fmt"
"io"
@@ -139,7 +138,7 @@ const (
}`
)
-func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserError, batchReader *bufio.Reader, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]elastic.BulkableRequest, error) {
+func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]elastic.BulkableRequest, error) {
// Ignore vendored files in code search
if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) {
return nil, nil
@@ -149,7 +148,7 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
var err error
if !update.Sized {
var stdout string
- stdout, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha))
+ stdout, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("cat-file", "-s").AddDynamicArguments(update.BlobSha))
if err != nil {
return nil, err
}
@@ -162,16 +161,12 @@ func (b *Indexer) addUpdate(ctx context.Context, batchWriter git.WriteCloserErro
return []elastic.BulkableRequest{b.addDelete(update.Filename, repo)}, nil
}
- if _, err := batchWriter.Write([]byte(update.BlobSha + "\n")); err != nil {
- return nil, err
- }
-
- _, _, size, err = git.ReadBatchLine(batchReader)
+ info, batchReader, err := catFileBatch.QueryContent(update.BlobSha)
if err != nil {
return nil, err
}
- fileContents, err := io.ReadAll(io.LimitReader(batchReader, size))
+ fileContents, err := io.ReadAll(io.LimitReader(batchReader, info.Size))
if err != nil {
return nil, err
} else if !typesniffer.DetectContentType(fileContents).IsText() {
@@ -217,7 +212,7 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st
defer batch.Close()
for _, update := range changes.Updates {
- updateReqs, err := b.addUpdate(ctx, batch.Writer, batch.Reader, sha, update, repo)
+ updateReqs, err := b.addUpdate(ctx, batch, sha, update, repo)
if err != nil {
return err
}
@@ -225,7 +220,6 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st
reqs = append(reqs, updateReqs...)
}
}
- batch.Close()
}
for _, filename := range changes.RemovedFilenames {
diff --git a/modules/indexer/code/git.go b/modules/indexer/code/git.go
index ca9c6a2974..a17b10551d 100644
--- a/modules/indexer/code/git.go
+++ b/modules/indexer/code/git.go
@@ -18,7 +18,7 @@ import (
)
func getDefaultBranchSha(ctx context.Context, repo *repo_model.Repository) (string, error) {
- stdout, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "-s").AddDynamicArguments(git.BranchPrefix+repo.DefaultBranch))
+ stdout, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("show-ref", "-s").AddDynamicArguments(git.BranchPrefix+repo.DefaultBranch))
if err != nil {
return "", err
}
@@ -35,7 +35,7 @@ func getRepoChanges(ctx context.Context, repo *repo_model.Repository, revision s
needGenesis := len(status.CommitSha) == 0
if !needGenesis {
hasAncestorCmd := gitcmd.NewCommand("merge-base").AddDynamicArguments(status.CommitSha, revision)
- stdout, _ := gitrepo.RunCmdString(ctx, repo, hasAncestorCmd)
+ stdout, _, _ := gitrepo.RunCmdString(ctx, repo, hasAncestorCmd) // FIXME: error is not handled
needGenesis = len(stdout) == 0
}
@@ -101,7 +101,7 @@ func genesisChanges(ctx context.Context, repo *repo_model.Repository, revision s
// nonGenesisChanges get changes since the previous indexer update
func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revision string) (*internal.RepoChanges, error) {
diffCmd := gitcmd.NewCommand("diff", "--name-status").AddDynamicArguments(repo.CodeIndexerStatus.CommitSha, revision)
- stdout, runErr := gitrepo.RunCmdString(ctx, repo, diffCmd)
+ stdout, _, runErr := gitrepo.RunCmdString(ctx, repo, diffCmd)
if runErr != nil {
// previous commit sha may have been removed by a force push, so
// try rebuilding from scratch
diff --git a/modules/indexer/code/search.go b/modules/indexer/code/search.go
index a7a5d7d2e3..eb20b70e71 100644
--- a/modules/indexer/code/search.go
+++ b/modules/indexer/code/search.go
@@ -72,10 +72,11 @@ func writeStrings(buf *bytes.Buffer, strs ...string) error {
func HighlightSearchResultCode(filename, language string, lineNums []int, code string) []*ResultLine {
// we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting
- hl, _ := highlight.Code(filename, language, code)
+ lexer := highlight.DetectChromaLexerByFileName(filename, language)
+ hl := highlight.RenderCodeByLexer(lexer, code)
highlightedLines := strings.Split(string(hl), "\n")
- // The lineNums outputted by highlight.Code might not match the original lineNums, because "highlight" removes the last `\n`
+ // The lineNums outputted by render might not match the original lineNums, because "highlight" removes the last `\n`
lines := make([]*ResultLine, min(len(highlightedLines), len(lineNums)))
for i := range lines {
lines[i] = &ResultLine{
diff --git a/modules/indexer/issues/meilisearch/meilisearch.go b/modules/indexer/issues/meilisearch/meilisearch.go
index b7fae6ee9a..5715cf4794 100644
--- a/modules/indexer/issues/meilisearch/meilisearch.go
+++ b/modules/indexer/issues/meilisearch/meilisearch.go
@@ -124,7 +124,7 @@ func (b *Indexer) Delete(_ context.Context, ids ...int64) error {
}
for _, id := range ids {
- _, err := b.inner.Client.Index(b.inner.VersionedIndexName()).DeleteDocument(strconv.FormatInt(id, 10))
+ _, err := b.inner.Client.Index(b.inner.VersionedIndexName()).DeleteDocument(strconv.FormatInt(id, 10), nil)
if err != nil {
return err
}
diff --git a/modules/lfs/pointer_scanner_gogit.go b/modules/lfs/pointer_scanner_gogit.go
index e153b8e24e..ccfb16b6c0 100644
--- a/modules/lfs/pointer_scanner_gogit.go
+++ b/modules/lfs/pointer_scanner_gogit.go
@@ -15,7 +15,7 @@ import (
)
// SearchPointerBlobs scans the whole repository for LFS pointer files
-func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob, errChan chan<- error) {
+func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob) error {
gitRepo := repo.GoGitRepo()
err := func() error {
@@ -49,14 +49,7 @@ func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan c
return nil
})
}()
- if err != nil {
- select {
- case <-ctx.Done():
- default:
- errChan <- err
- }
- }
close(pointerChan)
- close(errChan)
+ return err
}
diff --git a/modules/lfs/pointer_scanner_nogogit.go b/modules/lfs/pointer_scanner_nogogit.go
index c37a93e73b..29f5d0e346 100644
--- a/modules/lfs/pointer_scanner_nogogit.go
+++ b/modules/lfs/pointer_scanner_nogogit.go
@@ -8,96 +8,84 @@ package lfs
import (
"bufio"
"context"
+ "errors"
"io"
"strconv"
"strings"
- "sync"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/git/pipeline"
+ "code.gitea.io/gitea/modules/util"
+
+ "golang.org/x/sync/errgroup"
)
// SearchPointerBlobs scans the whole repository for LFS pointer files
-func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob, errChan chan<- error) {
- basePath := repo.Path
+func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan chan<- PointerBlob) error {
+ cmd1AllObjs, cmd3BatchContent := gitcmd.NewCommand(), gitcmd.NewCommand()
- catFileCheckReader, catFileCheckWriter := io.Pipe()
- shasToBatchReader, shasToBatchWriter := io.Pipe()
- catFileBatchReader, catFileBatchWriter := io.Pipe()
+ cmd1AllObjsStdout, cmd1AllObjsStdoutClose := cmd1AllObjs.MakeStdoutPipe()
+ defer cmd1AllObjsStdoutClose()
- wg := sync.WaitGroup{}
- wg.Add(4)
-
- // Create the go-routines in reverse order.
+ cmd3BatchContentIn, cmd3BatchContentOut, cmd3BatchContentClose := cmd3BatchContent.MakeStdinStdoutPipe()
+ defer cmd3BatchContentClose()
+ // Create the go-routines in reverse order (update: the order is not needed any more, the pipes are properly prepared)
+ wg := errgroup.Group{}
// 4. Take the output of cat-file --batch and check if each file in turn
// to see if they're pointers to files in the LFS store
- go createPointerResultsFromCatFileBatch(ctx, catFileBatchReader, &wg, pointerChan)
+ wg.Go(func() error {
+ return createPointerResultsFromCatFileBatch(cmd3BatchContentOut, pointerChan)
+ })
// 3. Take the shas of the blobs and batch read them
- go pipeline.CatFileBatch(ctx, shasToBatchReader, catFileBatchWriter, &wg, basePath)
+ wg.Go(func() error {
+ return pipeline.CatFileBatch(ctx, cmd3BatchContent, repo.Path)
+ })
// 2. From the provided objects restrict to blobs <=1k
- go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg)
+ wg.Go(func() error {
+ return pipeline.BlobsLessThan1024FromCatFileBatchCheck(cmd1AllObjsStdout, cmd3BatchContentIn)
+ })
// 1. Run batch-check on all objects in the repository
- if !git.DefaultFeatures().CheckVersionAtLeast("2.6.0") {
- revListReader, revListWriter := io.Pipe()
- shasToCheckReader, shasToCheckWriter := io.Pipe()
- wg.Add(2)
- go pipeline.CatFileBatchCheck(ctx, shasToCheckReader, catFileCheckWriter, &wg, basePath)
- go pipeline.BlobsFromRevListObjects(revListReader, shasToCheckWriter, &wg)
- go pipeline.RevListAllObjects(ctx, revListWriter, &wg, basePath, errChan)
- } else {
- go pipeline.CatFileBatchCheckAllObjects(ctx, catFileCheckWriter, &wg, basePath, errChan)
- }
- wg.Wait()
-
+ wg.Go(func() error {
+ return pipeline.CatFileBatchCheckAllObjects(ctx, cmd1AllObjs, repo.Path)
+ })
+ err := wg.Wait()
close(pointerChan)
- close(errChan)
+ return err
}
-func createPointerResultsFromCatFileBatch(ctx context.Context, catFileBatchReader *io.PipeReader, wg *sync.WaitGroup, pointerChan chan<- PointerBlob) {
- defer wg.Done()
+func createPointerResultsFromCatFileBatch(catFileBatchReader io.ReadCloser, pointerChan chan<- PointerBlob) error {
defer catFileBatchReader.Close()
bufferedReader := bufio.NewReader(catFileBatchReader)
buf := make([]byte, 1025)
-loop:
for {
- select {
- case <-ctx.Done():
- break loop
- default:
- }
-
// File descriptor line: sha
sha, err := bufferedReader.ReadString(' ')
if err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return util.Iif(errors.Is(err, io.EOF), nil, err)
}
sha = strings.TrimSpace(sha)
// Throw away the blob
if _, err := bufferedReader.ReadString(' '); err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
sizeStr, err := bufferedReader.ReadString('\n')
if err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
size, err := strconv.Atoi(sizeStr[:len(sizeStr)-1])
if err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
pointerBuf := buf[:size+1]
if _, err := io.ReadFull(bufferedReader, pointerBuf); err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
pointerBuf = pointerBuf[:size]
// Now we need to check if the pointerBuf is an LFS pointer
@@ -105,7 +93,6 @@ loop:
if !pointer.IsValid() {
continue
}
-
pointerChan <- PointerBlob{Hash: sha, Pointer: pointer}
}
}
diff --git a/modules/lfs/shared.go b/modules/lfs/shared.go
index cd9488e3db..e04c089e51 100644
--- a/modules/lfs/shared.go
+++ b/modules/lfs/shared.go
@@ -66,6 +66,21 @@ type Link struct {
ExpiresAt *time.Time `json:"expires_at,omitempty"`
}
+func NewLink(href string) *Link {
+ return &Link{Href: href}
+}
+
+func (l *Link) WithHeader(k, v string) *Link {
+ if v == "" {
+ return l
+ }
+ if l.Header == nil {
+ l.Header = make(map[string]string)
+ }
+ l.Header[k] = v
+ return l
+}
+
// ObjectError defines the JSON structure returned to the client in case of an error.
type ObjectError struct {
Code int `json:"code"`
diff --git a/modules/log/init.go b/modules/log/init.go
index 3fb5200ad7..ccaab50de3 100644
--- a/modules/log/init.go
+++ b/modules/log/init.go
@@ -35,10 +35,10 @@ func init() {
}
}
-func newProcessTypedContext(parent context.Context, desc string) (ctx context.Context, cancel context.CancelFunc) {
+func newProcessTypedContext(parent context.Context, desc string) (context.Context, context.CancelFunc) {
// the "process manager" also calls "log.Trace()" to output logs, so if we want to create new contexts by the manager, we need to disable the trace temporarily
process.TraceLogDisable(true)
defer process.TraceLogDisable(false)
- ctx, _, cancel = process.GetManager().AddTypedContext(parent, desc, process.SystemProcessType, false)
- return ctx, cancel
+ ctx, _, finished := process.GetManager().AddTypedContext(parent, desc, process.SystemProcessType, false)
+ return ctx, context.CancelFunc(finished)
}
diff --git a/modules/markup/asciicast/asciicast.go b/modules/markup/asciicast/asciicast.go
index d86d61d7c4..b3af5eef09 100644
--- a/modules/markup/asciicast/asciicast.go
+++ b/modules/markup/asciicast/asciicast.go
@@ -20,14 +20,12 @@ func init() {
// See https://github.com/asciinema/asciinema/blob/develop/doc/asciicast-v2.md
type Renderer struct{}
-// Name implements markup.Renderer
func (Renderer) Name() string {
return "asciicast"
}
-// Extensions implements markup.Renderer
-func (Renderer) Extensions() []string {
- return []string{".cast"}
+func (Renderer) FileNamePatterns() []string {
+ return []string{"*.cast"}
}
const (
@@ -35,12 +33,10 @@ const (
playerSrcAttr = "data-asciinema-player-src"
)
-// SanitizerRules implements markup.Renderer
func (Renderer) SanitizerRules() []setting.MarkupSanitizerRule {
return []setting.MarkupSanitizerRule{{Element: "div", AllowAttr: playerSrcAttr}}
}
-// Render implements markup.Renderer
func (Renderer) Render(ctx *markup.RenderContext, _ io.Reader, output io.Writer) error {
rawURL := fmt.Sprintf("%s/%s/%s/raw/%s/%s",
setting.AppSubURL,
diff --git a/modules/markup/common/footnote.go b/modules/markup/common/footnote.go
index 1ece436c66..e552a28237 100644
--- a/modules/markup/common/footnote.go
+++ b/modules/markup/common/footnote.go
@@ -405,9 +405,9 @@ func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byt
if entering {
n := node.(*FootnoteLink)
is := strconv.Itoa(n.Index)
- _, _ = w.WriteString(`]
`
- defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableAdditionalAttributes, true)()
result, err := markdown.RenderString(markup.NewTestRenderContext(localMetas), input)
assert.NoError(t, err)
assert.Equal(t, expected, string(result))
+
+ t.Run("LocalCommitAndCompare", func(t *testing.T) {
+ input := `http://localhost:3000/user/repo/commit/88fc37a3c0a4dda553bdcfc80c178a58247f42fb
+http://localhost:3000/user/repo/compare/88fc37a3c0a4dda553bdcfc80c178a58247f42fb...12fc37a3c0a4dda553bdcfc80c178a58247f42fb#hash`
+
+ expected := `88fc37a3c0
+88fc37a3c0...12fc37a3c0 (hash)
+`
+ result, err := markdown.RenderString(markup.NewTestRenderContext(localMetas), input)
+ assert.NoError(t, err)
+ assert.Equal(t, expected, string(result))
+ })
}
func TestMarkdownLink(t *testing.T) {
@@ -545,5 +560,11 @@ func TestMarkdownLink(t *testing.T) {
assert.Equal(t, `link1
link2
link3
+`, string(result))
+
+ input = "https://example.com/__init__.py"
+ result, err = markdown.RenderString(markup.NewTestRenderContext("/base", localMetas), input)
+ assert.NoError(t, err)
+ assert.Equal(t, `https://example.com/__init__.py
`, string(result))
}
diff --git a/modules/markup/markdown/prefixed_id.go b/modules/markup/markdown/prefixed_id.go
deleted file mode 100644
index 63d7fadc0a..0000000000
--- a/modules/markup/markdown/prefixed_id.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2024 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package markdown
-
-import (
- "bytes"
- "fmt"
-
- "code.gitea.io/gitea/modules/container"
- "code.gitea.io/gitea/modules/markup/common"
- "code.gitea.io/gitea/modules/util"
-
- "github.com/yuin/goldmark/ast"
-)
-
-type prefixedIDs struct {
- values container.Set[string]
-}
-
-// Generate generates a new element id.
-func (p *prefixedIDs) Generate(value []byte, kind ast.NodeKind) []byte {
- dft := []byte("id")
- if kind == ast.KindHeading {
- dft = []byte("heading")
- }
- return p.GenerateWithDefault(value, dft)
-}
-
-// GenerateWithDefault generates a new element id.
-func (p *prefixedIDs) GenerateWithDefault(value, dft []byte) []byte {
- result := common.CleanValue(value)
- if len(result) == 0 {
- result = dft
- }
- if !bytes.HasPrefix(result, []byte("user-content-")) {
- result = append([]byte("user-content-"), result...)
- }
- if p.values.Add(util.UnsafeBytesToString(result)) {
- return result
- }
- for i := 1; ; i++ {
- newResult := fmt.Sprintf("%s-%d", result, i)
- if p.values.Add(newResult) {
- return []byte(newResult)
- }
- }
-}
-
-// Put puts a given element id to the used ids table.
-func (p *prefixedIDs) Put(value []byte) {
- p.values.Add(util.UnsafeBytesToString(value))
-}
-
-func newPrefixedIDs() *prefixedIDs {
- return &prefixedIDs{
- values: make(container.Set[string]),
- }
-}
diff --git a/modules/markup/markdown/toc.go b/modules/markup/markdown/toc.go
deleted file mode 100644
index a11b9d0390..0000000000
--- a/modules/markup/markdown/toc.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2020 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package markdown
-
-import (
- "net/url"
-
- "code.gitea.io/gitea/modules/translation"
-
- "github.com/yuin/goldmark/ast"
-)
-
-// Header holds the data about a header.
-type Header struct {
- Level int
- Text string
- ID string
-}
-
-func createTOCNode(toc []Header, lang string, detailsAttrs map[string]string) ast.Node {
- details := NewDetails()
- summary := NewSummary()
-
- for k, v := range detailsAttrs {
- details.SetAttributeString(k, []byte(v))
- }
-
- summary.AppendChild(summary, ast.NewString([]byte(translation.NewLocale(lang).TrString("toc"))))
- details.AppendChild(details, summary)
- ul := ast.NewList('-')
- details.AppendChild(details, ul)
- currentLevel := 6
- for _, header := range toc {
- if header.Level < currentLevel {
- currentLevel = header.Level
- }
- }
- for _, header := range toc {
- for currentLevel > header.Level {
- ul = ul.Parent().(*ast.List)
- currentLevel--
- }
- for currentLevel < header.Level {
- newL := ast.NewList('-')
- ul.AppendChild(ul, newL)
- currentLevel++
- ul = newL
- }
- li := ast.NewListItem(currentLevel * 2)
- a := ast.NewLink()
- a.Destination = []byte("#" + url.QueryEscape(header.ID))
- a.AppendChild(a, ast.NewString([]byte(header.Text)))
- li.AppendChild(li, a)
- ul.AppendChild(ul, li)
- }
-
- return details
-}
diff --git a/modules/markup/markdown/transform_heading.go b/modules/markup/markdown/transform_heading.go
deleted file mode 100644
index a229a7b1a4..0000000000
--- a/modules/markup/markdown/transform_heading.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright 2024 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package markdown
-
-import (
- "fmt"
-
- "code.gitea.io/gitea/modules/markup"
- "code.gitea.io/gitea/modules/util"
-
- "github.com/yuin/goldmark/ast"
- "github.com/yuin/goldmark/text"
-)
-
-func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]Header) {
- for _, attr := range v.Attributes() {
- if _, ok := attr.Value.([]byte); !ok {
- v.SetAttribute(attr.Name, fmt.Appendf(nil, "%v", attr.Value))
- }
- }
- txt := v.Text(reader.Source()) //nolint:staticcheck // Text is deprecated
- header := Header{
- Text: util.UnsafeBytesToString(txt),
- Level: v.Level,
- }
- if id, found := v.AttributeString("id"); found {
- header.ID = util.UnsafeBytesToString(id.([]byte))
- }
- *tocList = append(*tocList, header)
- g.applyElementDir(v)
-}
diff --git a/modules/markup/mdstripper/mdstripper.go b/modules/markup/mdstripper/mdstripper.go
index 5a6504416a..bf69051e87 100644
--- a/modules/markup/mdstripper/mdstripper.go
+++ b/modules/markup/mdstripper/mdstripper.go
@@ -46,7 +46,7 @@ func (r *stripRenderer) Render(w io.Writer, source []byte, doc ast.Node) error {
coalesce := prevSibIsText
r.processString(
w,
- v.Text(source), //nolint:staticcheck // Text is deprecated
+ v.Value(source),
coalesce)
if v.SoftLineBreak() {
r.doubleSpace(w)
@@ -165,7 +165,6 @@ func StripMarkdownBytes(rawBytes []byte) ([]byte, []string) {
),
goldmark.WithParserOptions(
parser.WithAttribute(),
- parser.WithAutoHeadingID(),
),
goldmark.WithRendererOptions(
html.WithUnsafe(),
diff --git a/modules/markup/orgmode/orgmode.go b/modules/markup/orgmode/orgmode.go
index 93c335d244..fd3071645a 100644
--- a/modules/markup/orgmode/orgmode.go
+++ b/modules/markup/orgmode/orgmode.go
@@ -5,7 +5,6 @@ package orgmode
import (
"fmt"
- "html"
"html/template"
"io"
"strings"
@@ -17,7 +16,6 @@ import (
"code.gitea.io/gitea/modules/setting"
"github.com/alecthomas/chroma/v2"
- "github.com/alecthomas/chroma/v2/lexers"
"github.com/niklasfasching/go-org/org"
)
@@ -33,20 +31,16 @@ var (
_ markup.PostProcessRenderer = (*renderer)(nil)
)
-// Name implements markup.Renderer
func (renderer) Name() string {
return "orgmode"
}
-// NeedPostProcess implements markup.PostProcessRenderer
func (renderer) NeedPostProcess() bool { return true }
-// Extensions implements markup.Renderer
-func (renderer) Extensions() []string {
- return []string{".org"}
+func (renderer) FileNamePatterns() []string {
+ return []string{"*.org"}
}
-// SanitizerRules implements markup.Renderer
func (renderer) SanitizerRules() []setting.MarkupSanitizerRule {
return []setting.MarkupSanitizerRule{}
}
@@ -57,40 +51,20 @@ func Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error
htmlWriter.HighlightCodeBlock = func(source, lang string, inline bool, params map[string]string) string {
defer func() {
if err := recover(); err != nil {
+ // catch the panic, log the error and return empty result
log.Error("Panic in HighlightCodeBlock: %v\n%s", err, log.Stack(2))
- panic(err)
}
}()
- w := &strings.Builder{}
- lexer := lexers.Get(lang)
- if lexer == nil && lang == "" {
- lexer = lexers.Analyse(source)
- if lexer == nil {
- lexer = lexers.Fallback
- }
- lang = strings.ToLower(lexer.Config().Name)
- }
+ lexer := highlight.DetectChromaLexerByFileName("", lang) // don't use content to detect, it is too slow
+ lexer = chroma.Coalesce(lexer)
+ sb := &strings.Builder{}
// include language-x class as part of commonmark spec
- if err := ctx.RenderInternal.FormatWithSafeAttrs(w, ``, lang); err != nil {
- return ""
- }
- if lexer == nil {
- if _, err := w.WriteString(html.EscapeString(source)); err != nil {
- return ""
- }
- } else {
- lexer = chroma.Coalesce(lexer)
- if _, err := w.WriteString(string(highlight.CodeFromLexer(lexer, source))); err != nil {
- return ""
- }
- }
- if _, err := w.WriteString("
"); err != nil {
- return ""
- }
-
- return w.String()
+ _ = ctx.RenderInternal.FormatWithSafeAttrs(sb, ``, strings.ToLower(lexer.Config().Name))
+ _, _ = sb.WriteString(string(highlight.RenderCodeByLexer(lexer, source)))
+ _, _ = sb.WriteString("
")
+ return sb.String()
}
w := &orgWriter{rctx: ctx, HTMLWriter: htmlWriter}
diff --git a/modules/markup/render.go b/modules/markup/render.go
index c645749065..5785dc5ad5 100644
--- a/modules/markup/render.go
+++ b/modules/markup/render.go
@@ -4,6 +4,7 @@
package markup
import (
+ "bytes"
"context"
"fmt"
"html/template"
@@ -16,9 +17,9 @@ import (
"code.gitea.io/gitea/modules/htmlutil"
"code.gitea.io/gitea/modules/markup/internal"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/typesniffer"
"code.gitea.io/gitea/modules/util"
- "github.com/yuin/goldmark/ast"
"golang.org/x/sync/errgroup"
)
@@ -54,6 +55,23 @@ type RenderOptions struct {
// used by external render. the router "/org/repo/render/..." will output the rendered content in a standalone page
InStandalonePage bool
+
+ // EnableHeadingIDGeneration controls whether to auto-generate IDs for HTML headings without id attribute.
+ // This should be enabled for repository files and wiki pages, but disabled for comments to avoid duplicate IDs.
+ EnableHeadingIDGeneration bool
+}
+
+type TocShowInSectionType string
+
+const (
+ TocShowInSidebar TocShowInSectionType = "sidebar"
+ TocShowInMain TocShowInSectionType = "main"
+)
+
+type TocHeadingItem struct {
+ HeadingLevel int
+ AnchorID string
+ InnerText string
}
// RenderContext represents a render context
@@ -63,7 +81,8 @@ type RenderContext struct {
// the context might be used by the "render" function, but it might also be used by "postProcess" function
usedByRender bool
- SidebarTocNode ast.Node
+ TocShowInSection TocShowInSectionType
+ TocHeadingItems []*TocHeadingItem
RenderHelper RenderHelper
RenderOptions RenderOptions
@@ -112,6 +131,11 @@ func (ctx *RenderContext) WithInStandalonePage(v bool) *RenderContext {
return ctx
}
+func (ctx *RenderContext) WithEnableHeadingIDGeneration(v bool) *RenderContext {
+ ctx.RenderOptions.EnableHeadingIDGeneration = v
+ return ctx
+}
+
func (ctx *RenderContext) WithUseAbsoluteLink(v bool) *RenderContext {
ctx.RenderOptions.UseAbsoluteLink = v
return ctx
@@ -122,22 +146,29 @@ func (ctx *RenderContext) WithHelper(helper RenderHelper) *RenderContext {
return ctx
}
-// FindRendererByContext finds renderer by RenderContext
-// TODO: it should be merged with other similar functions like GetRendererByFileName, DetectMarkupTypeByFileName, etc
-func FindRendererByContext(ctx *RenderContext) (Renderer, error) {
+func (ctx *RenderContext) DetectMarkupRenderer(prefetchBuf []byte) Renderer {
if ctx.RenderOptions.MarkupType == "" && ctx.RenderOptions.RelativePath != "" {
- ctx.RenderOptions.MarkupType = DetectMarkupTypeByFileName(ctx.RenderOptions.RelativePath)
- if ctx.RenderOptions.MarkupType == "" {
- return nil, util.NewInvalidArgumentErrorf("unsupported file to render: %q", ctx.RenderOptions.RelativePath)
+ var sniffedType typesniffer.SniffedType
+ if len(prefetchBuf) > 0 {
+ sniffedType = typesniffer.DetectContentType(prefetchBuf)
}
+ ctx.RenderOptions.MarkupType = DetectRendererTypeByPrefetch(ctx.RenderOptions.RelativePath, sniffedType, prefetchBuf)
}
+ return renderers[ctx.RenderOptions.MarkupType]
+}
- renderer := renderers[ctx.RenderOptions.MarkupType]
+func (ctx *RenderContext) DetectMarkupRendererByReader(in io.Reader) (Renderer, io.Reader, error) {
+ prefetchBuf := make([]byte, 512)
+ n, err := util.ReadAtMost(in, prefetchBuf)
+ if err != nil && err != io.EOF {
+ return nil, nil, err
+ }
+ prefetchBuf = prefetchBuf[:n]
+ renderer := ctx.DetectMarkupRenderer(prefetchBuf)
if renderer == nil {
- return nil, util.NewNotExistErrorf("unsupported markup type: %q", ctx.RenderOptions.MarkupType)
+ return nil, nil, util.NewInvalidArgumentErrorf("unable to find a render")
}
-
- return renderer, nil
+ return renderer, io.MultiReader(bytes.NewReader(prefetchBuf), in), nil
}
func RendererNeedPostProcess(renderer Renderer) bool {
@@ -148,12 +179,12 @@ func RendererNeedPostProcess(renderer Renderer) bool {
}
// Render renders markup file to HTML with all specific handling stuff.
-func Render(ctx *RenderContext, input io.Reader, output io.Writer) error {
- renderer, err := FindRendererByContext(ctx)
+func Render(rctx *RenderContext, origInput io.Reader, output io.Writer) error {
+ renderer, input, err := rctx.DetectMarkupRendererByReader(origInput)
if err != nil {
return err
}
- return RenderWithRenderer(ctx, renderer, input, output)
+ return RenderWithRenderer(rctx, renderer, input, output)
}
// RenderString renders Markup string to HTML with all specific handling stuff and return string
@@ -265,12 +296,14 @@ func Init(renderHelpFuncs *RenderHelperFuncs) {
}
// since setting maybe changed extensions, this will reload all renderer extensions mapping
- extRenderers = make(map[string]Renderer)
+ fileNameRenderers = make(map[string]Renderer)
for _, renderer := range renderers {
- for _, ext := range renderer.Extensions() {
- extRenderers[strings.ToLower(ext)] = renderer
+ for _, pattern := range renderer.FileNamePatterns() {
+ fileNameRenderers[pattern] = renderer
}
}
+
+ RefreshFileNamePatterns()
}
func ComposeSimpleDocumentMetas() map[string]string {
diff --git a/modules/markup/renderer.go b/modules/markup/renderer.go
index 82e9348706..c62c28ad2a 100644
--- a/modules/markup/renderer.go
+++ b/modules/markup/renderer.go
@@ -14,8 +14,8 @@ import (
// Renderer defines an interface for rendering markup file to HTML
type Renderer interface {
- Name() string // markup format name
- Extensions() []string
+ Name() string // markup format name, also the renderer type, also the external tool name
+ FileNamePatterns() []string
SanitizerRules() []setting.MarkupSanitizerRule
Render(ctx *RenderContext, input io.Reader, output io.Writer) error
}
@@ -43,26 +43,52 @@ type RendererContentDetector interface {
}
var (
- extRenderers = make(map[string]Renderer)
- renderers = make(map[string]Renderer)
+ fileNameRenderers = make(map[string]Renderer)
+ renderers = make(map[string]Renderer)
)
// RegisterRenderer registers a new markup file renderer
func RegisterRenderer(renderer Renderer) {
+ // TODO: need to handle conflicts
renderers[renderer.Name()] = renderer
- for _, ext := range renderer.Extensions() {
- extRenderers[strings.ToLower(ext)] = renderer
+}
+
+func RefreshFileNamePatterns() {
+ // TODO: need to handle conflicts
+ fileNameRenderers = make(map[string]Renderer)
+ for _, renderer := range renderers {
+ for _, ext := range renderer.FileNamePatterns() {
+ fileNameRenderers[strings.ToLower(ext)] = renderer
+ }
}
}
-// GetRendererByFileName get renderer by filename
-func GetRendererByFileName(filename string) Renderer {
- extension := strings.ToLower(path.Ext(filename))
- return extRenderers[extension]
+func DetectRendererTypeByFilename(filename string) Renderer {
+ basename := path.Base(strings.ToLower(filename))
+ ext1 := path.Ext(basename)
+ if renderer := fileNameRenderers[basename]; renderer != nil {
+ return renderer
+ }
+ if renderer := fileNameRenderers["*"+ext1]; renderer != nil {
+ return renderer
+ }
+ if basename, ok := strings.CutSuffix(basename, ext1); ok {
+ ext2 := path.Ext(basename)
+ if renderer := fileNameRenderers["*"+ext2+ext1]; renderer != nil {
+ return renderer
+ }
+ }
+ return nil
}
-// DetectRendererType detects the markup type of the content
-func DetectRendererType(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) string {
+// DetectRendererTypeByPrefetch detects the markup type of the content
+func DetectRendererTypeByPrefetch(filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte) string {
+ if filename != "" {
+ byExt := DetectRendererTypeByFilename(filename)
+ if byExt != nil {
+ return byExt.Name()
+ }
+ }
for _, renderer := range renderers {
if detector, ok := renderer.(RendererContentDetector); ok && detector.CanRender(filename, sniffedType, prefetchBuf) {
return renderer.Name()
@@ -71,18 +97,12 @@ func DetectRendererType(filename string, sniffedType typesniffer.SniffedType, pr
return ""
}
-// DetectMarkupTypeByFileName returns the possible markup format type via the filename
-func DetectMarkupTypeByFileName(filename string) string {
- if parser := GetRendererByFileName(filename); parser != nil {
- return parser.Name()
- }
- return ""
-}
-
func PreviewableExtensions() []string {
- extensions := make([]string, 0, len(extRenderers))
- for extension := range extRenderers {
- extensions = append(extensions, extension)
+ exts := make([]string, 0, len(fileNameRenderers))
+ for p := range fileNameRenderers {
+ if s, ok := strings.CutPrefix(p, "*"); ok {
+ exts = append(exts, s)
+ }
}
- return extensions
+ return exts
}
diff --git a/modules/markup/sanitizer_description_test.go b/modules/markup/sanitizer_description_test.go
index ca72491f26..51833414f4 100644
--- a/modules/markup/sanitizer_description_test.go
+++ b/modules/markup/sanitizer_description_test.go
@@ -16,7 +16,7 @@ func TestDescriptionSanitizer(t *testing.T) {
`THUMBS UP`, `THUMBS UP`,
`Hello World`, `Hello World`,
`
`, ``,
- `https://example.com`, `https://example.com`,
+ `https://example.com`, `https://example.com`,
`data`, `data`,
`Important!`, `Important!`,
`Click me! Nothing to see here.
`, `Click me! Nothing to see here.`,
diff --git a/modules/migration/release.go b/modules/migration/release.go
index f92cf25e7b..e25e7e4428 100644
--- a/modules/migration/release.go
+++ b/modules/migration/release.go
@@ -10,9 +10,12 @@ import (
// ReleaseAsset represents a release asset
type ReleaseAsset struct {
- ID int64
- Name string
- ContentType *string `yaml:"content_type"`
+ ID int64
+ Name string
+
+ // There was a field "ContentType (content_type)" because Some forges can provide that for assets,
+ // but we don't need it when migrating, so the field is omitted here.
+
Size *int
DownloadCount *int `yaml:"download_count"`
Created time.Time
diff --git a/modules/nosql/manager.go b/modules/nosql/manager.go
index 375c2b5d00..9d3ab49c9a 100644
--- a/modules/nosql/manager.go
+++ b/modules/nosql/manager.go
@@ -20,7 +20,7 @@ var manager *Manager
// Manager is the nosql connection manager
type Manager struct {
ctx context.Context
- finished context.CancelFunc
+ finished process.FinishedFunc
mutex sync.Mutex
RedisConnections map[string]*redisClientHolder
diff --git a/modules/optional/serialization.go b/modules/optional/serialization.go
index b120a0edf6..345ce56268 100644
--- a/modules/optional/serialization.go
+++ b/modules/optional/serialization.go
@@ -37,7 +37,7 @@ func (o *Option[T]) UnmarshalYAML(value *yaml.Node) error {
func (o Option[T]) MarshalYAML() (any, error) {
if !o.Has() {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate no value to marshal
}
value := new(yaml.Node)
diff --git a/modules/packages/nuget/symbol_extractor.go b/modules/packages/nuget/symbol_extractor.go
index 9c952e1f10..2eadee5463 100644
--- a/modules/packages/nuget/symbol_extractor.go
+++ b/modules/packages/nuget/symbol_extractor.go
@@ -142,8 +142,8 @@ func ParseDebugHeaderID(r io.ReadSeeker) (string, error) {
if _, err := r.Read(b); err != nil {
return "", err
}
- if i := bytes.IndexByte(b, 0); i != -1 {
- buf.Write(b[:i])
+ if before, _, ok := bytes.Cut(b, []byte{0}); ok {
+ buf.Write(before)
return buf.String(), nil
}
buf.Write(b)
diff --git a/modules/packages/rubygems/metadata.go b/modules/packages/rubygems/metadata.go
index 8a9794860e..5c01abc743 100644
--- a/modules/packages/rubygems/metadata.go
+++ b/modules/packages/rubygems/metadata.go
@@ -9,6 +9,7 @@ import (
"io"
"regexp"
"strings"
+ "sync"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/validation"
@@ -25,7 +26,9 @@ var (
ErrInvalidVersion = util.NewInvalidArgumentErrorf("package version is invalid")
)
-var versionMatcher = regexp.MustCompile(`\A[0-9]+(?:\.[0-9a-zA-Z]+)*(?:-[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?\z`)
+var versionMatcher = sync.OnceValue(func() *regexp.Regexp {
+ return regexp.MustCompile(`\A[0-9]+(?:\.[0-9a-zA-Z]+)*(?:-[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?\z`)
+})
// Package represents a RubyGems package
type Package struct {
@@ -128,7 +131,7 @@ func (r requirement) AsVersionRequirement() []VersionRequirement {
continue
}
version, ok := versionInt.(string)
- if !ok || version == "0" {
+ if !ok || (version == "0" && restriction == ">=") {
continue
}
@@ -176,7 +179,7 @@ func parseMetadataFile(r io.Reader) (*Package, error) {
return nil, ErrInvalidName
}
- if !versionMatcher.MatchString(spec.Version.Version) {
+ if !versionMatcher().MatchString(spec.Version.Version) {
return nil, ErrInvalidVersion
}
diff --git a/modules/packages/rubygems/metadata_test.go b/modules/packages/rubygems/metadata_test.go
index ec2fa08b6b..9a98c06653 100644
--- a/modules/packages/rubygems/metadata_test.go
+++ b/modules/packages/rubygems/metadata_test.go
@@ -4,42 +4,30 @@
package rubygems
import (
- "archive/tar"
- "bytes"
- "encoding/base64"
- "io"
"testing"
+ "code.gitea.io/gitea/modules/test"
+
"github.com/stretchr/testify/assert"
)
func TestParsePackageMetaData(t *testing.T) {
- createArchive := func(filename string, content []byte) io.Reader {
- var buf bytes.Buffer
- tw := tar.NewWriter(&buf)
- hdr := &tar.Header{
- Name: filename,
- Mode: 0o600,
- Size: int64(len(content)),
- }
- tw.WriteHeader(hdr)
- tw.Write(content)
- tw.Close()
- return &buf
- }
-
t.Run("MissingMetadataFile", func(t *testing.T) {
- data := createArchive("dummy.txt", []byte{0})
-
+ data := test.WriteTarArchive(map[string]string{"dummy.txt": ""})
rp, err := ParsePackageMetaData(data)
assert.ErrorIs(t, err, ErrMissingMetadataFile)
assert.Nil(t, rp)
})
t.Run("Valid", func(t *testing.T) {
- content, _ := base64.StdEncoding.DecodeString("H4sICHC/I2EEAG1ldGFkYXRhAAEeAOH/bmFtZTogZwp2ZXJzaW9uOgogIHZlcnNpb246IDEKWw35Tx4AAAA=")
- data := createArchive("metadata.gz", content)
-
+ metadataContent := test.CompressGzip(`
+name: g
+version:
+ version: 1
+`)
+ data := test.WriteTarArchive(map[string]string{
+ "metadata.gz": metadataContent.String(),
+ })
rp, err := ParsePackageMetaData(data)
assert.NoError(t, err)
assert.NotNil(t, rp)
@@ -47,17 +35,86 @@ func TestParsePackageMetaData(t *testing.T) {
}
func TestParseMetadataFile(t *testing.T) {
- content, _ := base64.StdEncoding.DecodeString(`H4sIAMe7I2ECA9VVTW/UMBC9+1eYXvaUbJpSQBZUHJAqDlwK4kCFIseZzZrGH9iTqisEv52Js9nd
-0KqggiqRXWnX45n3ZuZ5nCzL+JPQ15ulq7+AQnEORoj3HpReaSVRO8usNCB4qxEku4YQySbuCPo4
-bjHOd07HeZGfMt9JXLlgBB9imOxx7UIULOPnCZMMLsDXXgeiYbW2jQ6C0y9TELBSa6kJ6/IzaySS
-R1mUx1nxIitPeFGI9M2L6eGfWAMebANWaUgktzN9M3lsKNmxutBb1AYyCibbNhsDFu+q9GK/Tc4z
-d2IcLBl9js5eHaXFsLyvXeNz0LQyL/YoLx8EsiCMBZlx46k6sS2PDD5AgA5kJPNKdhH2elWzOv7n
-uv9Q9Aau/6ngP84elvNpXh5oRVlB5/yW7BH0+qu0G4gqaI/JdEHBFBS5l+pKtsARIjIwUnfj8Le0
-+TrdJLl2DG5A9SjrjgZ1mG+4QbAD+G4ZZBUap6qVnnzGf6Rwp+vliBRqtnYGPBEKvkb0USyXE8mS
-dVoR6hj07u0HZgAl3SRS8G/fmXcRK20jyq6rDMSYQFgidamqkXbbuspLXE/0k7GphtKqe67GuRC/
-yjAbmt9LsOMp8xMamFkSQ38fP5EFjdz8LA4do2C69VvqWXAJgrPbKZb58/xZXrKoW6ttW13Bhvzi
-4ftn7/yUxd4YGcglvTmmY8aGY3ZwRn4CqcWcidUGAAA=`)
- rp, err := parseMetadataFile(bytes.NewReader(content))
+ content := test.CompressGzip(`--- !ruby/object:Gem::Specification
+name: gitea
+version: !ruby/object:Gem::Version
+ version: 1.0.5
+platform: ruby
+authors:
+- Gitea
+autorequire:
+bindir: bin
+cert_chain: []
+date: 2021-08-23 00:00:00.000000000 Z
+dependencies:
+- !ruby/object:Gem::Dependency
+ name: runtime-dep
+ requirement: !ruby/object:Gem::Requirement
+ requirements:
+ - - ">="
+ - !ruby/object:Gem::Version
+ version: 1.2.0
+ - - "<"
+ - !ruby/object:Gem::Version
+ version: '2.0'
+ type: :runtime
+ prerelease: false
+ version_requirements: !ruby/object:Gem::Requirement
+ requirements:
+ - - ">="
+ - !ruby/object:Gem::Version
+ version: 1.2.0
+ - - "<"
+ - !ruby/object:Gem::Version
+ version: '2.0'
+- !ruby/object:Gem::Dependency
+ name: dev-dep
+ requirement: !ruby/object:Gem::Requirement
+ requirements:
+ - - "~>"
+ - !ruby/object:Gem::Version
+ version: '0'
+ type: :development
+ prerelease: false
+ version_requirements: !ruby/object:Gem::Requirement
+ requirements:
+ - - "~>"
+ - !ruby/object:Gem::Version
+ version: '5.2'
+description: RubyGems package test
+email: rubygems@gitea.io
+executables: []
+extensions: []
+extra_rdoc_files: []
+files:
+- lib/gitea.rb
+homepage: https://gitea.io/
+licenses:
+- MIT
+metadata: {}
+post_install_message:
+rdoc_options: []
+require_paths:
+- lib
+required_ruby_version: !ruby/object:Gem::Requirement
+ requirements:
+ - - ">="
+ - !ruby/object:Gem::Version
+ version: 2.3.0
+required_rubygems_version: !ruby/object:Gem::Requirement
+ requirements:
+ - - ">="
+ - !ruby/object:Gem::Version
+ version: '0'
+requirements: []
+rubyforge_project:
+rubygems_version: 2.7.6.2
+signing_key:
+specification_version: 4
+summary: Gitea package
+test_files: []
+`)
+ rp, err := parseMetadataFile(content)
assert.NoError(t, err)
assert.NotNil(t, rp)
@@ -84,5 +141,5 @@ yjAbmt9LsOMp8xMamFkSQ38fP5EFjdz8LA4do2C69VvqWXAJgrPbKZb58/xZXrKoW6ttW13Bhvzi
assert.Equal(t, "dev-dep", rp.Metadata.DevelopmentDependencies[0].Name)
assert.Len(t, rp.Metadata.DevelopmentDependencies[0].Version, 1)
assert.Equal(t, "~>", rp.Metadata.DevelopmentDependencies[0].Version[0].Restriction)
- assert.Equal(t, "5.2", rp.Metadata.DevelopmentDependencies[0].Version[0].Version)
+ assert.Equal(t, "0", rp.Metadata.DevelopmentDependencies[0].Version[0].Version)
}
diff --git a/modules/process/manager.go b/modules/process/manager.go
index 661511ce8d..c51d6c76f9 100644
--- a/modules/process/manager.go
+++ b/modules/process/manager.go
@@ -13,6 +13,7 @@ import (
"time"
"code.gitea.io/gitea/modules/gtprof"
+ "code.gitea.io/gitea/modules/util"
)
// TODO: This packages still uses a singleton for the Manager.
@@ -27,12 +28,14 @@ var (
DefaultContext = context.Background()
)
-// IDType is a pid type
-type IDType string
+type (
+ // IDType is a pid type
+ IDType string
-// FinishedFunc is a function that marks that the process is finished and can be removed from the process table
-// - it is simply an alias for context.CancelFunc and is only for documentary purposes
-type FinishedFunc = context.CancelFunc
+ CancelCauseFunc func(cause ...error)
+ // FinishedFunc is a function that marks that the process is finished and can be removed from the process table
+ FinishedFunc func()
+)
var (
traceDisabled atomic.Int64
@@ -84,6 +87,10 @@ func GetManager() *Manager {
return manager
}
+func cancelCauseFunc(cancelCause context.CancelCauseFunc) CancelCauseFunc {
+ return func(cause ...error) { cancelCause(util.OptionalArg(cause)) }
+}
+
// AddContext creates a new context and adds it as a process. Once the process is finished, finished must be called
// to remove the process from the process table. It should not be called until the process is finished but must always be called.
//
@@ -92,11 +99,10 @@ func GetManager() *Manager {
//
// Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the
// process table.
-func (pm *Manager) AddContext(parent context.Context, description string) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) {
- ctx, cancel = context.WithCancel(parent)
-
- ctx, _, finished = pm.Add(ctx, description, cancel, NormalProcessType, true)
-
+func (pm *Manager) AddContext(parent context.Context, description string) (context.Context, CancelCauseFunc, FinishedFunc) {
+ ctx, ctxCancel := context.WithCancelCause(parent)
+ cancel := cancelCauseFunc(ctxCancel)
+ ctx, _, finished := pm.Add(ctx, description, cancel, NormalProcessType, true)
return ctx, cancel, finished
}
@@ -108,11 +114,10 @@ func (pm *Manager) AddContext(parent context.Context, description string) (ctx c
//
// Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the
// process table.
-func (pm *Manager) AddTypedContext(parent context.Context, description, processType string, currentlyRunning bool) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) {
- ctx, cancel = context.WithCancel(parent)
-
- ctx, _, finished = pm.Add(ctx, description, cancel, processType, currentlyRunning)
-
+func (pm *Manager) AddTypedContext(parent context.Context, description, processType string, currentlyRunning bool) (context.Context, CancelCauseFunc, FinishedFunc) {
+ ctx, ctxCancel := context.WithCancelCause(parent)
+ cancel := cancelCauseFunc(ctxCancel)
+ ctx, _, finished := pm.Add(ctx, description, cancel, processType, currentlyRunning)
return ctx, cancel, finished
}
@@ -124,21 +129,23 @@ func (pm *Manager) AddTypedContext(parent context.Context, description, processT
//
// Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the
// process table.
-func (pm *Manager) AddContextTimeout(parent context.Context, timeout time.Duration, description string) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) {
+func (pm *Manager) AddContextTimeout(parent context.Context, timeout time.Duration, description string) (context.Context, CancelCauseFunc, FinishedFunc) {
if timeout <= 0 {
// it's meaningless to use timeout <= 0, and it must be a bug! so we must panic here to tell developers to make the timeout correct
panic("the timeout must be greater than zero, otherwise the context will be cancelled immediately")
}
-
- ctx, cancel = context.WithTimeout(parent, timeout)
-
- ctx, _, finished = pm.Add(ctx, description, cancel, NormalProcessType, true)
-
+ ctx, ctxCancelTimeout := context.WithTimeout(parent, timeout)
+ ctx, ctxCancelCause := context.WithCancelCause(ctx)
+ cancel := func(cause ...error) {
+ ctxCancelCause(util.OptionalArg(cause))
+ ctxCancelTimeout()
+ }
+ ctx, _, finished := pm.Add(ctx, description, cancel, NormalProcessType, true)
return ctx, cancel, finished
}
// Add create a new process
-func (pm *Manager) Add(ctx context.Context, description string, cancel context.CancelFunc, processType string, currentlyRunning bool) (context.Context, IDType, FinishedFunc) {
+func (pm *Manager) Add(ctx context.Context, description string, cancel CancelCauseFunc, processType string, currentlyRunning bool) (context.Context, IDType, FinishedFunc) {
parentPID := GetParentPID(ctx)
pm.mutex.Lock()
diff --git a/modules/process/process.go b/modules/process/process.go
index 06a28c4a60..d81f5ffa1d 100644
--- a/modules/process/process.go
+++ b/modules/process/process.go
@@ -4,7 +4,6 @@
package process
import (
- "context"
"time"
)
@@ -21,7 +20,7 @@ type process struct {
ParentPID IDType
Description string
Start time.Time
- Cancel context.CancelFunc
+ Cancel CancelCauseFunc
Type string
}
diff --git a/modules/public/public.go b/modules/public/public.go
index a7eace1538..3a5a76637e 100644
--- a/modules/public/public.go
+++ b/modules/public/public.go
@@ -36,7 +36,7 @@ func FileHandlerFunc() http.HandlerFunc {
resp.WriteHeader(http.StatusMethodNotAllowed)
return
}
- handleRequest(resp, req, assetFS, req.URL.Path)
+ handleRequest(resp, req, http.FS(assetFS), req.URL.Path)
}
}
diff --git a/modules/queue/workerqueue.go b/modules/queue/workerqueue.go
index 0f5b105551..d8b0722caf 100644
--- a/modules/queue/workerqueue.go
+++ b/modules/queue/workerqueue.go
@@ -21,7 +21,7 @@ import (
// It can use different underlying (base) queue types
type WorkerPoolQueue[T any] struct {
ctxRun context.Context
- ctxRunCancel context.CancelFunc
+ ctxRunCancel process.FinishedFunc
shutdownDone chan struct{}
shutdownTimeout atomic.Int64 // in case some buggy handlers (workers) would hang forever, "shutdown" should finish in predictable time
diff --git a/modules/references/references.go b/modules/references/references.go
index 592bd4cbe4..ef3568ebea 100644
--- a/modules/references/references.go
+++ b/modules/references/references.go
@@ -248,7 +248,7 @@ func FindAllIssueReferencesMarkdown(content string) []IssueReference {
func findAllIssueReferencesMarkdown(content string) []*rawReference {
bcontent, links := mdstripper.StripMarkdownBytes([]byte(content))
- return findAllIssueReferencesBytes(bcontent, links)
+ return findAllIssueReferencesBytes(bcontent, links, []byte(content))
}
func convertFullHTMLReferencesToShortRefs(re *regexp.Regexp, contentBytes *[]byte) {
@@ -326,7 +326,7 @@ func FindAllIssueReferences(content string) []IssueReference {
} else {
log.Debug("No GiteaIssuePullPattern pattern")
}
- return rawToIssueReferenceList(findAllIssueReferencesBytes(contentBytes, []string{}))
+ return rawToIssueReferenceList(findAllIssueReferencesBytes(contentBytes, []string{}, nil))
}
// FindRenderizableReferenceNumeric returns the first unvalidated reference found in a string.
@@ -406,7 +406,8 @@ func FindRenderizableReferenceAlphanumeric(content string) *RenderizableReferenc
}
// FindAllIssueReferencesBytes returns a list of unvalidated references found in a byte slice.
-func findAllIssueReferencesBytes(content []byte, links []string) []*rawReference {
+// originalContent is optional and used to detect closing/reopening keywords for URL references.
+func findAllIssueReferencesBytes(content []byte, links []string, originalContent []byte) []*rawReference {
ret := make([]*rawReference, 0, 10)
pos := 0
@@ -470,10 +471,27 @@ func findAllIssueReferencesBytes(content []byte, links []string) []*rawReference
default:
continue
}
- // Note: closing/reopening keywords not supported with URLs
- bytes := []byte(parts[1] + "/" + parts[2] + sep + parts[4])
- if ref := getCrossReference(bytes, 0, len(bytes), true, false); ref != nil {
+ refBytes := []byte(parts[1] + "/" + parts[2] + sep + parts[4])
+ if ref := getCrossReference(refBytes, 0, len(refBytes), true, false); ref != nil {
ref.refLocation = nil
+ // Detect closing/reopening keywords by finding the URL position in original content
+ if originalContent != nil {
+ if idx := bytes.Index(originalContent, []byte(link)); idx > 0 {
+ // For markdown links [text](url), find the opening bracket before the URL
+ // to properly detect keywords like "closes [text](url)"
+ searchStart := idx
+ if idx >= 2 && originalContent[idx-1] == '(' {
+ // Find the matching '[' for this markdown link
+ bracketIdx := bytes.LastIndex(originalContent[:idx-1], []byte{'['})
+ if bracketIdx >= 0 {
+ searchStart = bracketIdx
+ }
+ }
+ action, location := findActionKeywords(originalContent, searchStart)
+ ref.action = action
+ ref.actionLocation = location
+ }
+ }
ret = append(ret, ref)
}
}
diff --git a/modules/references/references_test.go b/modules/references/references_test.go
index a15ae99f79..5922a9f5a9 100644
--- a/modules/references/references_test.go
+++ b/modules/references/references_test.go
@@ -227,6 +227,62 @@ func TestFindAllIssueReferences(t *testing.T) {
testFixtures(t, fixtures, "default")
+ // Test closing/reopening keywords with URLs (issue #27549)
+ // Uses the same AppURL as testFixtures (https://gitea.com:3000/)
+ urlFixtures := []testFixture{
+ {
+ "Closes [this issue](https://gitea.com:3000/user/repo/issues/123)",
+ []testResult{
+ {123, "user", "repo", "123", false, XRefActionCloses, nil, &RefSpan{Start: 0, End: 6}, ""},
+ },
+ },
+ {
+ "This fixes [#456](https://gitea.com:3000/org/project/issues/456)",
+ []testResult{
+ {456, "org", "project", "456", false, XRefActionCloses, nil, &RefSpan{Start: 5, End: 10}, ""},
+ },
+ },
+ {
+ "Reopens [PR](https://gitea.com:3000/owner/repo/pulls/789)",
+ []testResult{
+ {789, "owner", "repo", "789", true, XRefActionReopens, nil, &RefSpan{Start: 0, End: 7}, ""},
+ },
+ },
+ {
+ "See [issue](https://gitea.com:3000/user/repo/issues/100) but closes [another](https://gitea.com:3000/user/repo/issues/200)",
+ []testResult{
+ {100, "user", "repo", "100", false, XRefActionNone, nil, nil, ""},
+ {200, "user", "repo", "200", false, XRefActionCloses, nil, &RefSpan{Start: 61, End: 67}, ""},
+ },
+ },
+ }
+
+ testFixtures(t, urlFixtures, "url-keywords")
+
+ // Test bare URLs (not markdown links) with closing keywords
+ // These use FindAllIssueReferences (non-markdown) which converts full URLs to short refs first
+ setting.AppURL = "https://gitea.com:3000/"
+ bareURLTests := []struct {
+ name string
+ input string
+ expected XRefAction
+ }{
+ {"Fixes bare URL", "Fixes https://gitea.com:3000/org/project/issues/456", XRefActionCloses},
+ {"Fixes with colon", "Fixes: https://gitea.com:3000/org/project/issues/456", XRefActionCloses},
+ {"Closes bare URL", "Closes https://gitea.com:3000/user/repo/issues/123", XRefActionCloses},
+ {"Closes with colon", "Closes: https://gitea.com:3000/user/repo/issues/123", XRefActionCloses},
+ }
+
+ for _, tt := range bareURLTests {
+ t.Run(tt.name, func(t *testing.T) {
+ refs := FindAllIssueReferences(tt.input)
+ assert.Len(t, refs, 1, "Expected 1 reference for: %s", tt.input)
+ if len(refs) > 0 {
+ assert.Equal(t, tt.expected, refs[0].Action, "Expected action %v for: %s", tt.expected, tt.input)
+ }
+ })
+ }
+
type alnumFixture struct {
input string
issue string
diff --git a/modules/repository/branch.go b/modules/repository/branch.go
index 30aa0a6e85..0a8f7cc464 100644
--- a/modules/repository/branch.go
+++ b/modules/repository/branch.go
@@ -17,6 +17,13 @@ import (
"code.gitea.io/gitea/modules/timeutil"
)
+// SyncResult describes a reference update detected during sync.
+type SyncResult struct {
+ RefName git.RefName
+ OldCommitID string
+ NewCommitID string
+}
+
// SyncRepoBranches synchronizes branch table with repository branches
func SyncRepoBranches(ctx context.Context, repoID, doerID int64) (int64, error) {
repo, err := repo_model.GetRepositoryByID(ctx, repoID)
@@ -33,18 +40,19 @@ func SyncRepoBranches(ctx context.Context, repoID, doerID int64) (int64, error)
}
defer gitRepo.Close()
- return SyncRepoBranchesWithRepo(ctx, repo, gitRepo, doerID)
+ count, _, err := SyncRepoBranchesWithRepo(ctx, repo, gitRepo, doerID)
+ return count, err
}
-func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, doerID int64) (int64, error) {
+func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, doerID int64) (int64, []*SyncResult, error) {
objFmt, err := gitRepo.GetObjectFormat()
if err != nil {
- return 0, fmt.Errorf("GetObjectFormat: %w", err)
+ return 0, nil, fmt.Errorf("GetObjectFormat: %w", err)
}
if objFmt.Name() != repo.ObjectFormatName {
repo.ObjectFormatName = objFmt.Name()
if err = repo_model.UpdateRepositoryColsWithAutoTime(ctx, repo, "object_format_name"); err != nil {
- return 0, fmt.Errorf("UpdateRepositoryColsWithAutoTime: %w", err)
+ return 0, nil, fmt.Errorf("UpdateRepositoryColsWithAutoTime: %w", err)
}
}
@@ -52,7 +60,7 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository,
{
branches, _, err := gitRepo.GetBranchNames(0, 0)
if err != nil {
- return 0, err
+ return 0, nil, err
}
log.Trace("SyncRepoBranches[%s]: branches[%d]: %v", repo.FullName(), len(branches), branches)
for _, branch := range branches {
@@ -67,7 +75,7 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository,
RepoID: repo.ID,
})
if err != nil {
- return 0, err
+ return 0, nil, err
}
for _, branch := range branches {
dbBranches[branch.Name] = branch
@@ -77,11 +85,12 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository,
var toAdd []*git_model.Branch
var toUpdate []*git_model.Branch
var toRemove []int64
+ var syncResults []*SyncResult
for branch := range allBranches {
dbb := dbBranches[branch]
commit, err := gitRepo.GetBranchCommit(branch)
if err != nil {
- return 0, err
+ return 0, nil, err
}
if dbb == nil {
toAdd = append(toAdd, &git_model.Branch{
@@ -92,7 +101,12 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository,
PusherID: doerID,
CommitTime: timeutil.TimeStamp(commit.Committer.When.Unix()),
})
- } else if commit.ID.String() != dbb.CommitID {
+ syncResults = append(syncResults, &SyncResult{
+ RefName: git.RefNameFromBranch(branch),
+ OldCommitID: "",
+ NewCommitID: commit.ID.String(),
+ })
+ } else if commit.ID.String() != dbb.CommitID || dbb.IsDeleted {
toUpdate = append(toUpdate, &git_model.Branch{
ID: dbb.ID,
RepoID: repo.ID,
@@ -102,19 +116,29 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository,
PusherID: doerID,
CommitTime: timeutil.TimeStamp(commit.Committer.When.Unix()),
})
+ syncResults = append(syncResults, &SyncResult{
+ RefName: git.RefNameFromBranch(branch),
+ OldCommitID: dbb.CommitID,
+ NewCommitID: commit.ID.String(),
+ })
}
}
for _, dbBranch := range dbBranches {
if !allBranches.Contains(dbBranch.Name) && !dbBranch.IsDeleted {
toRemove = append(toRemove, dbBranch.ID)
+ syncResults = append(syncResults, &SyncResult{
+ RefName: git.RefNameFromBranch(dbBranch.Name),
+ OldCommitID: dbBranch.CommitID,
+ NewCommitID: "",
+ })
}
}
log.Trace("SyncRepoBranches[%s]: toAdd: %v, toUpdate: %v, toRemove: %v", repo.FullName(), toAdd, toUpdate, toRemove)
if len(toAdd) == 0 && len(toRemove) == 0 && len(toUpdate) == 0 {
- return int64(len(allBranches)), nil
+ return int64(len(allBranches)), syncResults, nil
}
if err := db.WithTx(ctx, func(ctx context.Context) error {
@@ -140,7 +164,7 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository,
return nil
}); err != nil {
- return 0, err
+ return 0, nil, err
}
- return int64(len(allBranches)), nil
+ return int64(len(allBranches)), syncResults, nil
}
diff --git a/modules/repository/repo.go b/modules/repository/repo.go
index ad4a53b858..76125f5e61 100644
--- a/modules/repository/repo.go
+++ b/modules/repository/repo.go
@@ -53,7 +53,8 @@ func SyncRepoTags(ctx context.Context, repoID int64) error {
}
defer gitRepo.Close()
- return SyncReleasesWithTags(ctx, repo, gitRepo)
+ _, err = SyncReleasesWithTags(ctx, repo, gitRepo)
+ return err
}
// StoreMissingLfsObjectsInRepository downloads missing LFS objects
@@ -62,7 +63,9 @@ func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *repo_model.Re
pointerChan := make(chan lfs.PointerBlob)
errChan := make(chan error, 1)
- go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan)
+ go func() {
+ errChan <- lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan)
+ }()
downloadObjects := func(pointers []lfs.Pointer) error {
err := lfsClient.Download(ctx, pointers, func(p lfs.Pointer, content io.ReadCloser, objectError error) error {
@@ -150,13 +153,12 @@ func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *repo_model.Re
}
}
- err, has := <-errChan
- if has {
+ err := <-errChan
+ if err != nil {
log.Error("Repo[%-v]: Error enumerating LFS objects for repository: %v", repo, err)
- return err
}
- return nil
+ return err
}
// shortRelease to reduce load memory, this struct can replace repo_model.Release
@@ -177,13 +179,14 @@ func (shortRelease) TableName() string {
// upstream. Hence, after each sync we want the release set to be
// identical to the upstream tag set. This is much more efficient for
// repositories like https://github.com/vim/vim (with over 13000 tags).
-func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository) error {
+func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository) ([]*SyncResult, error) {
log.Debug("SyncReleasesWithTags: in Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name)
tags, _, err := gitRepo.GetTagInfos(0, 0)
if err != nil {
- return fmt.Errorf("unable to GetTagInfos in pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
+ return nil, fmt.Errorf("unable to GetTagInfos in pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
}
var added, deleted, updated int
+ var syncResults []*SyncResult
err = db.WithTx(ctx, func(ctx context.Context) error {
dbReleases, err := db.Find[shortRelease](ctx, repo_model.FindReleasesOptions{
RepoID: repo.ID,
@@ -194,7 +197,45 @@ func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitR
return fmt.Errorf("unable to FindReleases in pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
}
+ dbReleasesByID := make(map[int64]*shortRelease, len(dbReleases))
+ dbReleasesByTag := make(map[string]*shortRelease, len(dbReleases))
+ for _, release := range dbReleases {
+ dbReleasesByID[release.ID] = release
+ dbReleasesByTag[release.TagName] = release
+ }
+
inserts, deletes, updates := calcSync(tags, dbReleases)
+ syncResults = make([]*SyncResult, 0, len(inserts)+len(deletes)+len(updates))
+ for _, tag := range inserts {
+ syncResults = append(syncResults, &SyncResult{
+ RefName: git.RefNameFromTag(tag.Name),
+ OldCommitID: "",
+ NewCommitID: tag.Object.String(),
+ })
+ }
+ for _, deleteID := range deletes {
+ release := dbReleasesByID[deleteID]
+ if release == nil {
+ continue
+ }
+ syncResults = append(syncResults, &SyncResult{
+ RefName: git.RefNameFromTag(release.TagName),
+ OldCommitID: release.Sha1,
+ NewCommitID: "",
+ })
+ }
+ for _, tag := range updates {
+ release := dbReleasesByTag[tag.Name]
+ oldSha := ""
+ if release != nil {
+ oldSha = release.Sha1
+ }
+ syncResults = append(syncResults, &SyncResult{
+ RefName: git.RefNameFromTag(tag.Name),
+ OldCommitID: oldSha,
+ NewCommitID: tag.Object.String(),
+ })
+ }
//
// make release set identical to upstream tags
//
@@ -233,15 +274,15 @@ func SyncReleasesWithTags(ctx context.Context, repo *repo_model.Repository, gitR
return fmt.Errorf("unable to update tag %s for pull-mirror Repo[%d:%s/%s]: %w", tag.Name, repo.ID, repo.OwnerName, repo.Name, err)
}
}
- added, deleted, updated = len(deletes), len(updates), len(inserts)
+ added, deleted, updated = len(inserts), len(deletes), len(updates)
return nil
})
if err != nil {
- return fmt.Errorf("unable to rebuild release table for pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
+ return nil, fmt.Errorf("unable to rebuild release table for pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err)
}
log.Trace("SyncReleasesWithTags: %d tags added, %d tags deleted, %d tags updated", added, deleted, updated)
- return nil
+ return syncResults, nil
}
func calcSync(destTags []*git.Tag, dbTags []*shortRelease) ([]*git.Tag, []int64, []*git.Tag) {
diff --git a/modules/session/virtual.go b/modules/session/virtual.go
index 35a995d2d0..597b9e55c1 100644
--- a/modules/session/virtual.go
+++ b/modules/session/virtual.go
@@ -65,7 +65,6 @@ func (o *VirtualSessionProvider) Read(sid string) (session.RawStore, error) {
return nil, fmt.Errorf("check if '%s' exist failed: %w", sid, err)
}
kv := make(map[any]any)
- kv["_old_uid"] = "0"
return NewVirtualStore(o, sid, kv), nil
}
@@ -160,7 +159,7 @@ func (s *VirtualStore) Release() error {
// Now need to lock the provider
s.p.lock.Lock()
defer s.p.lock.Unlock()
- if oldUID, ok := s.data["_old_uid"]; (ok && (oldUID != "0" || len(s.data) > 1)) || (!ok && len(s.data) > 0) {
+ if len(s.data) > 0 {
// Now ensure that we don't exist!
realProvider := s.p.provider
diff --git a/modules/setting/actions.go b/modules/setting/actions.go
index 34346b62cf..7a91ecb593 100644
--- a/modules/setting/actions.go
+++ b/modules/setting/actions.go
@@ -4,6 +4,7 @@
package setting
import (
+ "errors"
"fmt"
"strings"
"time"
@@ -25,10 +26,12 @@ var (
EndlessTaskTimeout time.Duration `ini:"ENDLESS_TASK_TIMEOUT"`
AbandonedJobTimeout time.Duration `ini:"ABANDONED_JOB_TIMEOUT"`
SkipWorkflowStrings []string `ini:"SKIP_WORKFLOW_STRINGS"`
+ WorkflowDirs []string `ini:"WORKFLOW_DIRS"`
}{
Enabled: true,
DefaultActionsURL: defaultActionsURLGitHub,
SkipWorkflowStrings: []string{"[skip ci]", "[ci skip]", "[no ci]", "[skip actions]", "[actions skip]"},
+ WorkflowDirs: []string{".gitea/workflows", ".github/workflows"},
}
)
@@ -119,5 +122,20 @@ func loadActionsFrom(rootCfg ConfigProvider) error {
return fmt.Errorf("invalid [actions] LOG_COMPRESSION: %q", Actions.LogCompression)
}
+ workflowDirs := make([]string, 0, len(Actions.WorkflowDirs))
+ for _, dir := range Actions.WorkflowDirs {
+ dir = strings.TrimSpace(dir)
+ if dir == "" {
+ continue
+ }
+ dir = strings.ReplaceAll(dir, `\`, `/`)
+ dir = strings.TrimRight(dir, "/")
+ workflowDirs = append(workflowDirs, dir)
+ }
+ if len(workflowDirs) == 0 {
+ return errors.New("[actions] WORKFLOW_DIRS must contain at least one entry")
+ }
+ Actions.WorkflowDirs = workflowDirs
+
return nil
}
diff --git a/modules/setting/actions_test.go b/modules/setting/actions_test.go
index 353cc657fa..5c7ab268c1 100644
--- a/modules/setting/actions_test.go
+++ b/modules/setting/actions_test.go
@@ -97,6 +97,65 @@ STORAGE_TYPE = minio
assert.Equal(t, "actions_artifacts", filepath.Base(Actions.ArtifactStorage.Path))
}
+func Test_WorkflowDirs(t *testing.T) {
+ oldActions := Actions
+ defer func() {
+ Actions = oldActions
+ }()
+
+ tests := []struct {
+ name string
+ iniStr string
+ wantDirs []string
+ wantErr bool
+ }{
+ {
+ name: "default",
+ iniStr: `[actions]`,
+ wantDirs: []string{".gitea/workflows", ".github/workflows"},
+ },
+ {
+ name: "single dir",
+ iniStr: "[actions]\nWORKFLOW_DIRS = .github/workflows",
+ wantDirs: []string{".github/workflows"},
+ },
+ {
+ name: "custom order",
+ iniStr: "[actions]\nWORKFLOW_DIRS = .github/workflows,.gitea/workflows",
+ wantDirs: []string{".github/workflows", ".gitea/workflows"},
+ },
+ {
+ name: "whitespace trimming",
+ iniStr: "[actions]\nWORKFLOW_DIRS = .gitea/workflows , .github/workflows ",
+ wantDirs: []string{".gitea/workflows", ".github/workflows"},
+ },
+ {
+ name: "trailing slash normalization",
+ iniStr: "[actions]\nWORKFLOW_DIRS = .gitea/workflows/,.github/workflows/",
+ wantDirs: []string{".gitea/workflows", ".github/workflows"},
+ },
+ {
+ name: "only commas and whitespace",
+ iniStr: "[actions]\nWORKFLOW_DIRS = , , ,",
+ wantErr: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(tt.iniStr)
+ require.NoError(t, err)
+ err = loadActionsFrom(cfg)
+ if tt.wantErr {
+ require.Error(t, err)
+ return
+ }
+ require.NoError(t, err)
+ assert.Equal(t, tt.wantDirs, Actions.WorkflowDirs)
+ })
+ }
+}
+
func Test_getDefaultActionsURLForActions(t *testing.T) {
oldActions := Actions
oldAppURL := AppURL
diff --git a/modules/setting/config_env.go b/modules/setting/config_env.go
index 4758eb72cb..e956e34439 100644
--- a/modules/setting/config_env.go
+++ b/modules/setting/config_env.go
@@ -167,24 +167,13 @@ func EnvironmentToConfig(cfg ConfigProvider, envs []string) (changed bool) {
return changed
}
-// InitGiteaEnvVars initializes the environment variables for gitea
-func InitGiteaEnvVars() {
+func UnsetUnnecessaryEnvVars() {
// Ideally Gitea should only accept the environment variables which it clearly knows instead of unsetting the ones it doesn't want,
- // but the ideal behavior would be a breaking change, and it seems not bringing enough benefits to end users,
- // so at the moment we could still keep "unsetting the unnecessary environments"
+ // but the ideal behavior would be a breaking change, and it seems not bringing enough benefits to end users.
+ // So at the moment we just keep "unsetting the unnecessary environment variables".
// HOME is managed by Gitea, Gitea's git should use "HOME/.gitconfig".
// But git would try "XDG_CONFIG_HOME/git/config" first if "HOME/.gitconfig" does not exist,
// then our git.InitFull would still write to "XDG_CONFIG_HOME/git/config" if XDG_CONFIG_HOME is set.
_ = os.Unsetenv("XDG_CONFIG_HOME")
}
-
-func InitGiteaEnvVarsForTesting() {
- InitGiteaEnvVars()
- _ = os.Unsetenv("GIT_AUTHOR_NAME")
- _ = os.Unsetenv("GIT_AUTHOR_EMAIL")
- _ = os.Unsetenv("GIT_AUTHOR_DATE")
- _ = os.Unsetenv("GIT_COMMITTER_NAME")
- _ = os.Unsetenv("GIT_COMMITTER_EMAIL")
- _ = os.Unsetenv("GIT_COMMITTER_DATE")
-}
diff --git a/modules/setting/config_provider.go b/modules/setting/config_provider.go
index 57dc23b17f..a734f3bf8f 100644
--- a/modules/setting/config_provider.go
+++ b/modules/setting/config_provider.go
@@ -348,23 +348,6 @@ func deprecatedSettingDB(rootCfg ConfigProvider, oldSection, oldKey string) {
}
}
-// NewConfigProviderForLocale loads locale configuration from source and others. "string" if for a local file path, "[]byte" is for INI content
-func NewConfigProviderForLocale(source any, others ...any) (ConfigProvider, error) {
- iniFile, err := ini.LoadSources(ini.LoadOptions{
- IgnoreInlineComment: true,
- UnescapeValueCommentSymbols: true,
- IgnoreContinuation: true,
- }, source, others...)
- if err != nil {
- return nil, fmt.Errorf("unable to load locale ini: %w", err)
- }
- iniFile.BlockMode = false
- return &iniConfigProvider{
- ini: iniFile,
- loadedFromEmpty: true,
- }, nil
-}
-
func init() {
ini.PrettyFormat = false
}
diff --git a/modules/setting/config_provider_test.go b/modules/setting/config_provider_test.go
index 63121f0074..dcbe280e82 100644
--- a/modules/setting/config_provider_test.go
+++ b/modules/setting/config_provider_test.go
@@ -113,24 +113,6 @@ func TestNewConfigProviderFromFile(t *testing.T) {
assert.Equal(t, "[foo]\nk1 = a\n\n[bar]\nk1 = b\n", string(bs))
}
-func TestNewConfigProviderForLocale(t *testing.T) {
- // load locale from file
- localeFile := t.TempDir() + "/locale.ini"
- _ = os.WriteFile(localeFile, []byte(`k1=a`), 0o644)
- cfg, err := NewConfigProviderForLocale(localeFile)
- assert.NoError(t, err)
- assert.Equal(t, "a", cfg.Section("").Key("k1").String())
-
- // load locale from bytes
- cfg, err = NewConfigProviderForLocale([]byte("k1=foo\nk2=bar"))
- assert.NoError(t, err)
- assert.Equal(t, "foo", cfg.Section("").Key("k1").String())
- cfg, err = NewConfigProviderForLocale([]byte("k1=foo\nk2=bar"), []byte("k2=xxx"))
- assert.NoError(t, err)
- assert.Equal(t, "foo", cfg.Section("").Key("k1").String())
- assert.Equal(t, "xxx", cfg.Section("").Key("k2").String())
-}
-
func TestDisableSaving(t *testing.T) {
testFile := t.TempDir() + "/test.ini"
_ = os.WriteFile(testFile, []byte("k1=a\nk2=b"), 0o644)
diff --git a/modules/setting/git.go b/modules/setting/git.go
index 318f2c0cac..29fd3daf8a 100644
--- a/modules/setting/git.go
+++ b/modules/setting/git.go
@@ -33,11 +33,8 @@ var Git = struct {
DisablePartialClone bool
DiffRenameSimilarityThreshold string
Timeout struct {
- Default int
Migrate int
Mirror int
- Clone int
- Pull int
GC int `ini:"GC"`
} `ini:"git.timeout"`
}{
@@ -56,18 +53,12 @@ var Git = struct {
DisablePartialClone: false,
DiffRenameSimilarityThreshold: "50%",
Timeout: struct {
- Default int
Migrate int
Mirror int
- Clone int
- Pull int
GC int `ini:"GC"`
}{
- Default: 360,
Migrate: 600,
Mirror: 300,
- Clone: 300,
- Pull: 300,
GC: 60,
},
}
diff --git a/modules/setting/mailer_test.go b/modules/setting/mailer_test.go
index ceef35b051..f281715973 100644
--- a/modules/setting/mailer_test.go
+++ b/modules/setting/mailer_test.go
@@ -11,12 +11,12 @@ import (
func Test_loadMailerFrom(t *testing.T) {
kases := map[string]*Mailer{
- "smtp.mydomain.com": {
- SMTPAddr: "smtp.mydomain.com",
+ "smtp.mydomain.test": {
+ SMTPAddr: "smtp.mydomain.test",
SMTPPort: "465",
},
- "smtp.mydomain.com:123": {
- SMTPAddr: "smtp.mydomain.com",
+ "smtp.mydomain.test:123": {
+ SMTPAddr: "smtp.mydomain.test",
SMTPPort: "123",
},
":123": {
diff --git a/modules/setting/markup.go b/modules/setting/markup.go
index caf0d5f8d9..921af60ff5 100644
--- a/modules/setting/markup.go
+++ b/modules/setting/markup.go
@@ -6,6 +6,7 @@ package setting
import (
"regexp"
"strings"
+ "sync"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/util"
@@ -43,22 +44,20 @@ var Markdown = struct {
RenderOptionsRepoFile MarkdownRenderOptions `ini:"-"`
CustomURLSchemes []string `ini:"CUSTOM_URL_SCHEMES"` // Actually it is a "markup" option because it is used in "post processor"
- FileExtensions []string
+ FileNamePatterns []string `ini:"-"`
EnableMath bool
MathCodeBlockDetection []string
MathCodeBlockOptions MarkdownMathCodeBlockOptions `ini:"-"`
}{
- FileExtensions: strings.Split(".md,.markdown,.mdown,.mkd,.livemd", ","),
- EnableMath: true,
+ EnableMath: true,
}
// MarkupRenderer defines the external parser configured in ini
type MarkupRenderer struct {
- Enabled bool
MarkupName string
Command string
- FileExtensions []string
+ FilePatterns []string
IsInputFile bool
NeedPostProcess bool
MarkupSanitizerRules []MarkupSanitizerRule
@@ -77,6 +76,13 @@ type MarkupSanitizerRule struct {
func loadMarkupFrom(rootCfg ConfigProvider) {
mustMapSetting(rootCfg, "markdown", &Markdown)
+
+ markdownFileExtensions := rootCfg.Section("markdown").Key("FILE_EXTENSIONS").Strings(",")
+ if len(markdownFileExtensions) == 0 || len(markdownFileExtensions) == 1 && markdownFileExtensions[0] == "" {
+ markdownFileExtensions = []string{".md", ".markdown", ".mdown", ".mkd", ".livemd"}
+ }
+ Markdown.FileNamePatterns = fileExtensionsToPatterns("markdown", markdownFileExtensions)
+
const none = "none"
const renderOptionShortIssuePattern = "short-issue-pattern"
@@ -215,21 +221,30 @@ func createMarkupSanitizerRule(name string, sec ConfigSection) (MarkupSanitizerR
return rule, true
}
-func newMarkupRenderer(name string, sec ConfigSection) {
- extensionReg := regexp.MustCompile(`\.\w`)
+var extensionReg = sync.OnceValue(func() *regexp.Regexp {
+ return regexp.MustCompile(`^(\.[-\w]+)+$`)
+})
- extensions := sec.Key("FILE_EXTENSIONS").Strings(",")
- exts := make([]string, 0, len(extensions))
+func fileExtensionsToPatterns(sectionName string, extensions []string) []string {
+ patterns := make([]string, 0, len(extensions))
for _, extension := range extensions {
- if !extensionReg.MatchString(extension) {
- log.Warn(sec.Name() + " file extension " + extension + " is invalid. Extension ignored")
+ if !extensionReg().MatchString(extension) {
+ log.Warn("Config section %s file extension %s is invalid. Extension ignored", sectionName, extension)
} else {
- exts = append(exts, extension)
+ patterns = append(patterns, "*"+extension)
}
}
+ return patterns
+}
- if len(exts) == 0 {
- log.Warn(sec.Name() + " file extension is empty, markup " + name + " ignored")
+func newMarkupRenderer(name string, sec ConfigSection) {
+ if !sec.Key("ENABLED").MustBool(false) {
+ return
+ }
+
+ fileNamePatterns := fileExtensionsToPatterns(name, sec.Key("FILE_EXTENSIONS").Strings(","))
+ if len(fileNamePatterns) == 0 {
+ log.Warn("Config section %s file extension is empty, markup render is ignored", name)
return
}
@@ -262,11 +277,10 @@ func newMarkupRenderer(name string, sec ConfigSection) {
}
ExternalMarkupRenderers = append(ExternalMarkupRenderers, &MarkupRenderer{
- Enabled: sec.Key("ENABLED").MustBool(false),
- MarkupName: name,
- FileExtensions: exts,
- Command: command,
- IsInputFile: sec.Key("IS_INPUT_FILE").MustBool(false),
+ MarkupName: name,
+ FilePatterns: fileNamePatterns,
+ Command: command,
+ IsInputFile: sec.Key("IS_INPUT_FILE").MustBool(false),
RenderContentMode: renderContentMode,
RenderContentSandbox: renderContentSandbox,
diff --git a/modules/setting/storage.go b/modules/setting/storage.go
index ee246158d9..9ee3f01633 100644
--- a/modules/setting/storage.go
+++ b/modules/setting/storage.go
@@ -172,11 +172,11 @@ func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection,
targetType := targetSec.Key("STORAGE_TYPE").String()
if targetType == "" {
if !IsValidStorageType(StorageType(typ)) {
- return nil, 0, fmt.Errorf("unknow storage type %q", typ)
+ return nil, 0, fmt.Errorf("unknown storage type %q", typ)
}
targetSec.Key("STORAGE_TYPE").SetValue(typ)
} else if !IsValidStorageType(StorageType(targetType)) {
- return nil, 0, fmt.Errorf("unknow storage type %q for section storage.%v", targetType, typ)
+ return nil, 0, fmt.Errorf("unknown storage type %q for section storage.%v", targetType, typ)
}
return targetSec, targetSecIsTyp, nil
@@ -202,7 +202,7 @@ func getStorageTargetSection(rootCfg ConfigProvider, name, typ string, sec Confi
}
}
- // check stoarge name thirdly
+ // check storage name thirdly
targetSec, _ := rootCfg.GetSection(storageSectionName + "." + name)
if targetSec != nil {
targetType := targetSec.Key("STORAGE_TYPE").String()
diff --git a/modules/setting/testenv.go b/modules/setting/testenv.go
new file mode 100644
index 0000000000..853521c328
--- /dev/null
+++ b/modules/setting/testenv.go
@@ -0,0 +1,72 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+
+ "code.gitea.io/gitea/modules/util"
+)
+
+var giteaTestSourceRoot *string
+
+func GetGiteaTestSourceRoot() string {
+ return *giteaTestSourceRoot
+}
+
+func SetupGiteaTestEnv() {
+ if giteaTestSourceRoot != nil {
+ return // already initialized
+ }
+
+ IsInTesting = true
+ giteaRoot := os.Getenv("GITEA_TEST_ROOT")
+ if giteaRoot == "" {
+ _, filename, _, _ := runtime.Caller(0)
+ giteaRoot = filepath.Dir(filepath.Dir(filepath.Dir(filename)))
+ fixturesDir := filepath.Join(giteaRoot, "models", "fixtures")
+ if _, err := os.Stat(fixturesDir); err != nil {
+ panic("in gitea source code directory, fixtures directory not found: " + fixturesDir)
+ }
+ }
+
+ appWorkPathBuiltin = giteaRoot
+ AppWorkPath = giteaRoot
+ AppPath = filepath.Join(giteaRoot, "gitea") + util.Iif(IsWindows, ".exe", "")
+ StaticRootPath = giteaRoot // need to load assets (options, public) from the source code directory for testing
+
+ // giteaConf (GITEA_CONF) must be relative because it is used in the git hooks as "$GITEA_ROOT/$GITEA_CONF"
+ giteaConf := os.Getenv("GITEA_TEST_CONF")
+ if giteaConf == "" {
+ // By default, use sqlite.ini for testing, then IDE like GoLand can start the test process with debugger.
+ // It's easier for developers to debug bugs step by step with a debugger.
+ // Notice: when doing "ssh push", Gitea executes sub processes, debugger won't work for the sub processes.
+ giteaConf = "tests/sqlite.ini"
+ _, _ = fmt.Fprintf(os.Stderr, "Environment variable GITEA_TEST_CONF not set - defaulting to %s\n", giteaConf)
+ if !EnableSQLite3 {
+ _, _ = fmt.Fprintf(os.Stderr, "sqlite3 requires: -tags sqlite,sqlite_unlock_notify\n")
+ os.Exit(1)
+ }
+ }
+ // CustomConf must be absolute path to make tests pass,
+ CustomConf = filepath.Join(AppWorkPath, giteaConf)
+
+ // also unset unnecessary env vars for testing (only keep "GITEA_TEST_*" ones)
+ UnsetUnnecessaryEnvVars()
+ for _, env := range os.Environ() {
+ if strings.HasPrefix(env, "GIT_") || (strings.HasPrefix(env, "GITEA_") && !strings.HasPrefix(env, "GITEA_TEST_")) {
+ k, _, _ := strings.Cut(env, "=")
+ _ = os.Unsetenv(k)
+ }
+ }
+
+ // TODO: some git repo hooks (test fixtures) still use these env variables, need to be refactored in the future
+ _ = os.Setenv("GITEA_ROOT", giteaRoot)
+ _ = os.Setenv("GITEA_CONF", giteaConf) // test fixture git hooks use "$GITEA_ROOT/$GITEA_CONF" in their scripts
+ giteaTestSourceRoot = &giteaRoot
+}
diff --git a/modules/setting/ui.go b/modules/setting/ui.go
index 13cb0f5c66..77a5b45d0a 100644
--- a/modules/setting/ui.go
+++ b/modules/setting/ui.go
@@ -29,6 +29,7 @@ var UI = struct {
DefaultTheme string
Themes []string
FileIconTheme string
+ FolderIconTheme string
Reactions []string
ReactionsLookup container.Set[string] `ini:"-"`
CustomEmojis []string
@@ -88,6 +89,7 @@ var UI = struct {
MaxDisplayFileSize: 8388608,
DefaultTheme: `gitea-auto`,
FileIconTheme: `material`,
+ FolderIconTheme: `basic`,
Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`},
CustomEmojis: []string{`git`, `gitea`, `codeberg`, `gitlab`, `github`, `gogs`},
CustomEmojisMap: map[string]string{"git": ":git:", "gitea": ":gitea:", "codeberg": ":codeberg:", "gitlab": ":gitlab:", "github": ":github:", "gogs": ":gogs:"},
diff --git a/modules/structs/pull.go b/modules/structs/pull.go
index 7cc58217a0..3ad2f78bd3 100644
--- a/modules/structs/pull.go
+++ b/modules/structs/pull.go
@@ -140,6 +140,8 @@ type CreatePullRequestOption struct {
Reviewers []string `json:"reviewers"`
// The list of team reviewer names
TeamReviewers []string `json:"team_reviewers"`
+ // Whether maintainers can edit the pull request
+ AllowMaintainerEdit *bool `json:"allow_maintainer_edit"`
}
// EditPullRequestOption options when modify pull request
diff --git a/modules/structs/pull_review.go b/modules/structs/pull_review.go
index e93e4e9720..f44d2f84f5 100644
--- a/modules/structs/pull_review.go
+++ b/modules/structs/pull_review.go
@@ -72,7 +72,7 @@ type PullReviewComment struct {
HTMLPullURL string `json:"pull_request_url"`
}
-// CreatePullReviewOptions are options to create a pull review
+// CreatePullReviewOptions are options to create a pull request review
type CreatePullReviewOptions struct {
Event ReviewStateType `json:"event"`
Body string `json:"body"`
@@ -91,19 +91,19 @@ type CreatePullReviewComment struct {
NewLineNum int64 `json:"new_position"`
}
-// SubmitPullReviewOptions are options to submit a pending pull review
+// SubmitPullReviewOptions are options to submit a pending pull request review
type SubmitPullReviewOptions struct {
Event ReviewStateType `json:"event"`
Body string `json:"body"`
}
-// DismissPullReviewOptions are options to dismiss a pull review
+// DismissPullReviewOptions are options to dismiss a pull request review
type DismissPullReviewOptions struct {
Message string `json:"message"`
Priors bool `json:"priors"`
}
-// PullReviewRequestOptions are options to add or remove pull review requests
+// PullReviewRequestOptions are options to add or remove pull request review requests
type PullReviewRequestOptions struct {
Reviewers []string `json:"reviewers"`
TeamReviewers []string `json:"team_reviewers"`
diff --git a/modules/structs/repo.go b/modules/structs/repo.go
index 47973a5f6a..765546a5aa 100644
--- a/modules/structs/repo.go
+++ b/modules/structs/repo.go
@@ -58,26 +58,27 @@ type Repository struct {
Fork bool `json:"fork"`
Template bool `json:"template"`
// the original repository if this repository is a fork, otherwise null
- Parent *Repository `json:"parent,omitempty"`
- Mirror bool `json:"mirror"`
- Size int `json:"size"`
- Language string `json:"language"`
- LanguagesURL string `json:"languages_url"`
- HTMLURL string `json:"html_url"`
- URL string `json:"url"`
- Link string `json:"link"`
- SSHURL string `json:"ssh_url"`
- CloneURL string `json:"clone_url"`
- OriginalURL string `json:"original_url"`
- Website string `json:"website"`
- Stars int `json:"stars_count"`
- Forks int `json:"forks_count"`
- Watchers int `json:"watchers_count"`
- OpenIssues int `json:"open_issues_count"`
- OpenPulls int `json:"open_pr_counter"`
- Releases int `json:"release_counter"`
- DefaultBranch string `json:"default_branch"`
- Archived bool `json:"archived"`
+ Parent *Repository `json:"parent,omitempty"`
+ Mirror bool `json:"mirror"`
+ Size int `json:"size"`
+ Language string `json:"language"`
+ LanguagesURL string `json:"languages_url"`
+ HTMLURL string `json:"html_url"`
+ URL string `json:"url"`
+ Link string `json:"link"`
+ SSHURL string `json:"ssh_url"`
+ CloneURL string `json:"clone_url"`
+ OriginalURL string `json:"original_url"`
+ Website string `json:"website"`
+ Stars int `json:"stars_count"`
+ Forks int `json:"forks_count"`
+ Watchers int `json:"watchers_count"`
+ OpenIssues int `json:"open_issues_count"`
+ OpenPulls int `json:"open_pr_counter"`
+ Releases int `json:"release_counter"`
+ DefaultBranch string `json:"default_branch"`
+ DefaultTargetBranch string `json:"default_target_branch,omitempty"`
+ Archived bool `json:"archived"`
// swagger:strfmt date-time
Created time.Time `json:"created_at"`
// swagger:strfmt date-time
diff --git a/modules/templates/base.go b/modules/templates/base.go
index 2c2f35bbed..c8697cc7ef 100644
--- a/modules/templates/base.go
+++ b/modules/templates/base.go
@@ -4,9 +4,6 @@
package templates
import (
- "slices"
- "strings"
-
"code.gitea.io/gitea/modules/assetfs"
"code.gitea.io/gitea/modules/setting"
)
@@ -18,23 +15,3 @@ func AssetFS() *assetfs.LayeredFS {
func CustomAssets() *assetfs.Layer {
return assetfs.Local("custom", setting.CustomPath, "templates")
}
-
-func ListWebTemplateAssetNames(assets *assetfs.LayeredFS) ([]string, error) {
- files, err := assets.ListAllFiles(".", true)
- if err != nil {
- return nil, err
- }
- return slices.DeleteFunc(files, func(file string) bool {
- return strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl")
- }), nil
-}
-
-func ListMailTemplateAssetNames(assets *assetfs.LayeredFS) ([]string, error) {
- files, err := assets.ListAllFiles(".", true)
- if err != nil {
- return nil, err
- }
- return slices.DeleteFunc(files, func(file string) bool {
- return !strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl")
- }), nil
-}
diff --git a/modules/templates/helper.go b/modules/templates/helper.go
index a7aa321811..11c52bd5a7 100644
--- a/modules/templates/helper.go
+++ b/modules/templates/helper.go
@@ -25,8 +25,6 @@ import (
// NewFuncMap returns functions for injecting to templates
func NewFuncMap() template.FuncMap {
return map[string]any{
- "ctx": func() any { return nil }, // template context function
-
"DumpVar": dumpVar,
"NIL": func() any { return nil },
diff --git a/modules/templates/helper_test.go b/modules/templates/helper_test.go
index 7e3a952e7b..f90818c0ad 100644
--- a/modules/templates/helper_test.go
+++ b/modules/templates/helper_test.go
@@ -168,3 +168,10 @@ func TestQueryBuild(t *testing.T) {
assert.Equal(t, "&a=b&c=d&e=f", string(QueryBuild("&a=b&c=d&e=f", "k", "")))
})
}
+
+func TestQueryEscape(t *testing.T) {
+ // this test is a reference for "urlQueryEscape" in JS
+ in := "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" // all non-letter & non-number chars
+ expected := "%21%22%23%24%25%26%27%28%29%2A%2B%2C-.%2F%3A%3B%3C%3D%3E%3F%40%5B%5C%5D%5E_%60%7B%7C%7D~"
+ assert.Equal(t, expected, string(queryEscape(in)))
+}
diff --git a/modules/templates/htmlrenderer.go b/modules/templates/htmlrenderer.go
index 8073a6e5f5..59b95cdd80 100644
--- a/modules/templates/htmlrenderer.go
+++ b/modules/templates/htmlrenderer.go
@@ -6,21 +6,18 @@ package templates
import (
"bufio"
"bytes"
- "context"
"errors"
"fmt"
+ "html/template"
"io"
- "net/http"
"path/filepath"
"regexp"
"strconv"
"strings"
- "sync"
"sync/atomic"
texttemplate "text/template"
"code.gitea.io/gitea/modules/assetfs"
- "code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates/scopedtmpl"
@@ -31,58 +28,27 @@ type TemplateExecutor scopedtmpl.TemplateExecutor
type TplName string
-type HTMLRender struct {
+type tmplRender struct {
templates atomic.Pointer[scopedtmpl.ScopedTemplate]
+
+ collectTemplateNames func() ([]string, error)
+ readTemplateContent func(name string) ([]byte, error)
}
-var (
- htmlRender *HTMLRender
- htmlRenderOnce sync.Once
-)
-
-var ErrTemplateNotInitialized = errors.New("template system is not initialized, check your log for errors")
-
-func (h *HTMLRender) HTML(w io.Writer, status int, tplName TplName, data any, ctx context.Context) error { //nolint:revive // we don't use ctx, only pass it to the template executor
- name := string(tplName)
- if respWriter, ok := w.(http.ResponseWriter); ok {
- if respWriter.Header().Get("Content-Type") == "" {
- respWriter.Header().Set("Content-Type", "text/html; charset=utf-8")
- }
- respWriter.WriteHeader(status)
- }
- t, err := h.TemplateLookup(name, ctx)
- if err != nil {
- return texttemplate.ExecError{Name: name, Err: err}
- }
- return t.Execute(w, data)
+func (h *tmplRender) Templates() *scopedtmpl.ScopedTemplate {
+ return h.templates.Load()
}
-func (h *HTMLRender) TemplateLookup(name string, ctx context.Context) (TemplateExecutor, error) { //nolint:revive // we don't use ctx, only pass it to the template executor
- tmpls := h.templates.Load()
- if tmpls == nil {
- return nil, ErrTemplateNotInitialized
- }
- m := NewFuncMap()
- m["ctx"] = func() any { return ctx }
- return tmpls.Executor(name, m)
-}
-
-func (h *HTMLRender) CompileTemplates() error {
- assets := AssetFS()
- extSuffix := ".tmpl"
+func (h *tmplRender) recompileTemplates(dummyFuncMap template.FuncMap) error {
tmpls := scopedtmpl.NewScopedTemplate()
- tmpls.Funcs(NewFuncMap())
- files, err := ListWebTemplateAssetNames(assets)
+ tmpls.Funcs(dummyFuncMap)
+ names, err := h.collectTemplateNames()
if err != nil {
- return nil
+ return err
}
- for _, file := range files {
- if !strings.HasSuffix(file, extSuffix) {
- continue
- }
- name := strings.TrimSuffix(file, extSuffix)
+ for _, name := range names {
tmpl := tmpls.New(filepath.ToSlash(name))
- buf, err := assets.ReadFile(file)
+ buf, err := h.readTemplateContent(name)
if err != nil {
return err
}
@@ -95,55 +61,20 @@ func (h *HTMLRender) CompileTemplates() error {
return nil
}
-// HTMLRenderer init once and returns the globally shared html renderer
-func HTMLRenderer() *HTMLRender {
- htmlRenderOnce.Do(initHTMLRenderer)
- return htmlRender
+func ReloadAllTemplates() error {
+ return errors.Join(PageRendererReload(), MailRendererReload())
}
-func ReloadHTMLTemplates() error {
- log.Trace("Reloading HTML templates")
- if err := htmlRender.CompileTemplates(); err != nil {
- log.Error("Template error: %v\n%s", err, log.Stack(2))
- return err
- }
- return nil
-}
-
-func initHTMLRenderer() {
- rendererType := "static"
- if !setting.IsProd {
- rendererType = "auto-reloading"
- }
- log.Debug("Creating %s HTML Renderer", rendererType)
-
- htmlRender = &HTMLRender{}
- if err := htmlRender.CompileTemplates(); err != nil {
- p := &templateErrorPrettier{assets: AssetFS()}
- wrapTmplErrMsg(p.handleFuncNotDefinedError(err))
- wrapTmplErrMsg(p.handleUnexpectedOperandError(err))
- wrapTmplErrMsg(p.handleExpectedEndError(err))
- wrapTmplErrMsg(p.handleGenericTemplateError(err))
- wrapTmplErrMsg(fmt.Sprintf("CompileTemplates error: %v", err))
- }
-
- if !setting.IsProd {
- go AssetFS().WatchLocalChanges(graceful.GetManager().ShutdownContext(), func() {
- _ = ReloadHTMLTemplates()
- })
- }
-}
-
-func wrapTmplErrMsg(msg string) {
- if msg == "" {
+func processStartupTemplateError(err error) {
+ if err == nil {
return
}
- if setting.IsProd {
+ if setting.IsProd || setting.IsInTesting {
// in prod mode, Gitea must have correct templates to run
- log.Fatal("Gitea can't run with template errors: %s", msg)
+ log.Fatal("Gitea can't run with template errors: %v", err)
}
// in dev mode, do not need to really exit, because the template errors could be fixed by developer soon and the templates get reloaded
- log.Error("There are template errors but Gitea continues to run in dev mode: %s", msg)
+ log.Error("There are template errors but Gitea continues to run in dev mode: %v", err)
}
type templateErrorPrettier struct {
diff --git a/modules/templates/mail.go b/modules/templates/mail.go
new file mode 100644
index 0000000000..ca13626468
--- /dev/null
+++ b/modules/templates/mail.go
@@ -0,0 +1,195 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package templates
+
+import (
+ "html/template"
+ "io"
+ "regexp"
+ "slices"
+ "strings"
+ "sync"
+ texttmpl "text/template"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type MailRender struct {
+ TemplateNames []string
+ BodyTemplates struct {
+ HasTemplate func(name string) bool
+ ExecuteTemplate func(w io.Writer, name string, data any) error
+ }
+
+ // FIXME: MAIL-TEMPLATE-SUBJECT: only "issue" related messages support using subject from templates
+ // It is an incomplete implementation from "Use templates for issue e-mail subject and body" https://github.com/go-gitea/gitea/pull/8329
+ SubjectTemplates *texttmpl.Template
+
+ tmplRenderer *tmplRender
+
+ mockedBodyTemplates map[string]*template.Template
+}
+
+// mailSubjectTextFuncMap returns functions for injecting to text templates, it's only used for mail subject
+func mailSubjectTextFuncMap() texttmpl.FuncMap {
+ return texttmpl.FuncMap{
+ "dict": dict,
+ "Eval": evalTokens,
+
+ "EllipsisString": util.EllipsisDisplayString,
+ "AppName": func() string {
+ return setting.AppName
+ },
+ "AppDomain": func() string { // documented in mail-templates.md
+ return setting.Domain
+ },
+ }
+}
+
+var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}\s*$`)
+
+func newMailRenderer() (*MailRender, error) {
+ subjectTemplates := texttmpl.New("")
+ subjectTemplates.Funcs(mailSubjectTextFuncMap())
+
+ renderer := &MailRender{
+ SubjectTemplates: subjectTemplates,
+ }
+
+ assetFS := AssetFS()
+
+ renderer.tmplRenderer = &tmplRender{
+ collectTemplateNames: func() ([]string, error) {
+ names, err := assetFS.ListAllFiles(".", true)
+ if err != nil {
+ return nil, err
+ }
+ names = slices.DeleteFunc(names, func(file string) bool {
+ return !strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl")
+ })
+ for i, name := range names {
+ names[i] = strings.TrimSuffix(strings.TrimPrefix(name, "mail/"), ".tmpl")
+ }
+ renderer.TemplateNames = names
+ return names, nil
+ },
+ readTemplateContent: func(name string) ([]byte, error) {
+ content, err := assetFS.ReadFile("mail/" + name + ".tmpl")
+ if err != nil {
+ return nil, err
+ }
+ var subjectContent []byte
+ bodyContent := content
+ loc := mailSubjectSplit.FindIndex(content)
+ if loc != nil {
+ subjectContent, bodyContent = content[0:loc[0]], content[loc[1]:]
+ }
+ _, err = renderer.SubjectTemplates.New(name).Parse(string(subjectContent))
+ if err != nil {
+ return nil, err
+ }
+ return bodyContent, nil
+ },
+ }
+
+ renderer.BodyTemplates.HasTemplate = func(name string) bool {
+ if renderer.mockedBodyTemplates[name] != nil {
+ return true
+ }
+ return renderer.tmplRenderer.Templates().HasTemplate(name)
+ }
+
+ staticFuncMap := NewFuncMap()
+ renderer.BodyTemplates.ExecuteTemplate = func(w io.Writer, name string, data any) error {
+ if t, ok := renderer.mockedBodyTemplates[name]; ok {
+ return t.Execute(w, data)
+ }
+ t, err := renderer.tmplRenderer.Templates().Executor(name, staticFuncMap)
+ if err != nil {
+ return err
+ }
+ return t.Execute(w, data)
+ }
+
+ err := renderer.tmplRenderer.recompileTemplates(staticFuncMap)
+ if err != nil {
+ return nil, err
+ }
+ return renderer, nil
+}
+
+func (r *MailRender) MockTemplate(name, subject, body string) func() {
+ if r.mockedBodyTemplates == nil {
+ r.mockedBodyTemplates = make(map[string]*template.Template)
+ }
+ oldSubject := r.SubjectTemplates
+ r.SubjectTemplates, _ = r.SubjectTemplates.Clone()
+ texttmpl.Must(r.SubjectTemplates.New(name).Parse(subject))
+
+ oldBody, hasOldBody := r.mockedBodyTemplates[name]
+ mockFuncMap := NewFuncMap()
+ r.mockedBodyTemplates[name] = template.Must(template.New(name).Funcs(mockFuncMap).Parse(body))
+ return func() {
+ r.SubjectTemplates = oldSubject
+ if hasOldBody {
+ r.mockedBodyTemplates[name] = oldBody
+ } else {
+ delete(r.mockedBodyTemplates, name)
+ }
+ }
+}
+
+var (
+ globalMailRenderer *MailRender
+ globalMailRendererMu sync.RWMutex
+)
+
+func MailRendererReload() error {
+ globalMailRendererMu.Lock()
+ defer globalMailRendererMu.Unlock()
+ r, err := newMailRenderer()
+ if err != nil {
+ return err
+ }
+ globalMailRenderer = r
+ return nil
+}
+
+func MailRenderer() *MailRender {
+ globalMailRendererMu.RLock()
+ r := globalMailRenderer
+ globalMailRendererMu.RUnlock()
+ if r != nil {
+ return r
+ }
+
+ globalMailRendererMu.Lock()
+ defer globalMailRendererMu.Unlock()
+ if globalMailRenderer != nil {
+ return globalMailRenderer
+ }
+
+ var err error
+ globalMailRenderer, err = newMailRenderer()
+ if err != nil {
+ log.Fatal("Failed to initialize mail renderer: %v", err)
+ }
+
+ if !setting.IsProd {
+ go AssetFS().WatchLocalChanges(graceful.GetManager().ShutdownContext(), func() {
+ globalMailRendererMu.Lock()
+ defer globalMailRendererMu.Unlock()
+ r, err := newMailRenderer()
+ if err != nil {
+ log.Error("Mail template error: %v", err)
+ return
+ }
+ globalMailRenderer = r
+ })
+ }
+ return globalMailRenderer
+}
diff --git a/modules/templates/mailer.go b/modules/templates/mailer.go
deleted file mode 100644
index c43b760777..0000000000
--- a/modules/templates/mailer.go
+++ /dev/null
@@ -1,117 +0,0 @@
-// Copyright 2022 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package templates
-
-import (
- "context"
- "fmt"
- "html/template"
- "regexp"
- "strings"
- "sync/atomic"
- texttmpl "text/template"
-
- "code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/util"
-)
-
-type MailTemplates struct {
- TemplateNames []string
- BodyTemplates *template.Template
- SubjectTemplates *texttmpl.Template
-}
-
-var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}\s*$`)
-
-// mailSubjectTextFuncMap returns functions for injecting to text templates, it's only used for mail subject
-func mailSubjectTextFuncMap() texttmpl.FuncMap {
- return texttmpl.FuncMap{
- "dict": dict,
- "Eval": evalTokens,
-
- "EllipsisString": util.EllipsisDisplayString,
- "AppName": func() string {
- return setting.AppName
- },
- "AppDomain": func() string { // documented in mail-templates.md
- return setting.Domain
- },
- }
-}
-
-func buildSubjectBodyTemplate(stpl *texttmpl.Template, btpl *template.Template, name string, content []byte) error {
- // Split template into subject and body
- var subjectContent []byte
- bodyContent := content
- loc := mailSubjectSplit.FindIndex(content)
- if loc != nil {
- subjectContent = content[0:loc[0]]
- bodyContent = content[loc[1]:]
- }
- if _, err := stpl.New(name).Parse(string(subjectContent)); err != nil {
- return fmt.Errorf("failed to parse template [%s/subject]: %w", name, err)
- }
- if _, err := btpl.New(name).Parse(string(bodyContent)); err != nil {
- return fmt.Errorf("failed to parse template [%s/body]: %w", name, err)
- }
- return nil
-}
-
-// LoadMailTemplates provides the templates required for sending notification mails.
-func LoadMailTemplates(ctx context.Context, loadedTemplates *atomic.Pointer[MailTemplates]) {
- assetFS := AssetFS()
- refreshTemplates := func(firstRun bool) {
- var templateNames []string
- subjectTemplates := texttmpl.New("")
- bodyTemplates := template.New("")
-
- subjectTemplates.Funcs(mailSubjectTextFuncMap())
- bodyTemplates.Funcs(NewFuncMap())
-
- if !firstRun {
- log.Trace("Reloading mail templates")
- }
- assetPaths, err := ListMailTemplateAssetNames(assetFS)
- if err != nil {
- log.Error("Failed to list mail templates: %v", err)
- return
- }
-
- for _, assetPath := range assetPaths {
- content, layerName, err := assetFS.ReadLayeredFile(assetPath)
- if err != nil {
- log.Warn("Failed to read mail template %s by %s: %v", assetPath, layerName, err)
- continue
- }
- tmplName := strings.TrimPrefix(strings.TrimSuffix(assetPath, ".tmpl"), "mail/")
- if firstRun {
- log.Trace("Adding mail template %s: %s by %s", tmplName, assetPath, layerName)
- }
- templateNames = append(templateNames, tmplName)
- if err = buildSubjectBodyTemplate(subjectTemplates, bodyTemplates, tmplName, content); err != nil {
- if firstRun {
- log.Fatal("Failed to parse mail template, err: %v", err)
- }
- log.Error("Failed to parse mail template, err: %v", err)
- }
- }
- loaded := &MailTemplates{
- TemplateNames: templateNames,
- BodyTemplates: bodyTemplates,
- SubjectTemplates: subjectTemplates,
- }
- loadedTemplates.Store(loaded)
- }
-
- refreshTemplates(true)
-
- if !setting.IsProd {
- // Now subjectTemplates and bodyTemplates are both synchronized
- // thus it is safe to call refresh from a different goroutine
- go assetFS.WatchLocalChanges(ctx, func() {
- refreshTemplates(false)
- })
- }
-}
diff --git a/modules/templates/page.go b/modules/templates/page.go
new file mode 100644
index 0000000000..8f6c82fc4b
--- /dev/null
+++ b/modules/templates/page.go
@@ -0,0 +1,98 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package templates
+
+import (
+ "context"
+ "html/template"
+ "io"
+ "net/http"
+ "slices"
+ "strings"
+ "sync"
+ texttemplate "text/template"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type pageRenderer struct {
+ tmplRenderer *tmplRender
+}
+
+func (r *pageRenderer) funcMap(ctx context.Context) template.FuncMap {
+ pageFuncMap := NewFuncMap()
+ pageFuncMap["ctx"] = func() any { return ctx }
+ return pageFuncMap
+}
+
+func (r *pageRenderer) funcMapDummy() template.FuncMap {
+ dummyFuncMap := NewFuncMap()
+ dummyFuncMap["ctx"] = func() any { return nil } // for template compilation only, no context available
+ return dummyFuncMap
+}
+
+func (r *pageRenderer) TemplateLookup(tmpl string, templateCtx context.Context) (TemplateExecutor, error) { //nolint:revive // we don't use ctx, only pass it to the template executor
+ return r.tmplRenderer.Templates().Executor(tmpl, r.funcMap(templateCtx))
+}
+
+func (r *pageRenderer) HTML(w io.Writer, status int, tplName TplName, data any, templateCtx context.Context) error { //nolint:revive // we don't use ctx, only pass it to the template executor
+ name := string(tplName)
+ if respWriter, ok := w.(http.ResponseWriter); ok {
+ if respWriter.Header().Get("Content-Type") == "" {
+ respWriter.Header().Set("Content-Type", "text/html; charset=utf-8")
+ }
+ respWriter.WriteHeader(status)
+ }
+ t, err := r.TemplateLookup(name, templateCtx)
+ if err != nil {
+ return texttemplate.ExecError{Name: name, Err: err}
+ }
+ return t.Execute(w, data)
+}
+
+var PageRenderer = sync.OnceValue(func() *pageRenderer {
+ rendererType := util.Iif(setting.IsProd, "static", "auto-reloading")
+ log.Debug("Creating %s HTML Renderer", rendererType)
+
+ assetFS := AssetFS()
+ tr := &tmplRender{
+ collectTemplateNames: func() ([]string, error) {
+ names, err := assetFS.ListAllFiles(".", true)
+ if err != nil {
+ return nil, err
+ }
+ names = slices.DeleteFunc(names, func(file string) bool {
+ return strings.HasPrefix(file, "mail/") || !strings.HasSuffix(file, ".tmpl")
+ })
+ for i, file := range names {
+ names[i] = strings.TrimSuffix(file, ".tmpl")
+ }
+ return names, nil
+ },
+ readTemplateContent: func(name string) ([]byte, error) {
+ return assetFS.ReadFile(name + ".tmpl")
+ },
+ }
+
+ pr := &pageRenderer{tmplRenderer: tr}
+ if err := tr.recompileTemplates(pr.funcMapDummy()); err != nil {
+ processStartupTemplateError(err)
+ }
+
+ if !setting.IsProd {
+ go AssetFS().WatchLocalChanges(graceful.GetManager().ShutdownContext(), func() {
+ if err := tr.recompileTemplates(pr.funcMapDummy()); err != nil {
+ log.Error("Template error: %v\n%s", err, log.Stack(2))
+ }
+ })
+ }
+ return pr
+})
+
+func PageRendererReload() error {
+ return PageRenderer().tmplRenderer.recompileTemplates(PageRenderer().funcMapDummy())
+}
diff --git a/modules/templates/scopedtmpl/scopedtmpl.go b/modules/templates/scopedtmpl/scopedtmpl.go
index 34e8b9ad70..de066124b9 100644
--- a/modules/templates/scopedtmpl/scopedtmpl.go
+++ b/modules/templates/scopedtmpl/scopedtmpl.go
@@ -61,6 +61,10 @@ func (t *ScopedTemplate) Freeze() {
t.all.Funcs(m)
}
+func (t *ScopedTemplate) HasTemplate(name string) bool {
+ return t.all.Lookup(name) != nil
+}
+
func (t *ScopedTemplate) Executor(name string, funcMap template.FuncMap) (TemplateExecutor, error) {
t.scopedMu.RLock()
scopedTmplSet, ok := t.scopedTemplateSets[name]
diff --git a/modules/templates/util_date.go b/modules/templates/util_date.go
index fc3f3f2339..1b36722c43 100644
--- a/modules/templates/util_date.go
+++ b/modules/templates/util_date.go
@@ -93,14 +93,14 @@ func dateTimeFormat(format string, datetime any) template.HTML {
attrs := []string{`weekday=""`, `year="numeric"`}
switch format {
case "short", "long": // date only
- attrs = append(attrs, `month="`+format+`"`, `day="numeric"`)
- return template.HTML(fmt.Sprintf(`%s`, strings.Join(attrs, " "), datetimeEscaped, textEscaped))
+ attrs = append(attrs, `threshold="P0Y"`, `month="`+format+`"`, `day="numeric"`, `prefix=""`)
case "full": // full date including time
attrs = append(attrs, `format="datetime"`, `month="short"`, `day="numeric"`, `hour="numeric"`, `minute="numeric"`, `second="numeric"`, `data-tooltip-content`, `data-tooltip-interactive="true"`)
- return template.HTML(fmt.Sprintf(`%s`, strings.Join(attrs, " "), datetimeEscaped, textEscaped))
default:
panic("Unsupported format " + format)
}
+
+ return template.HTML(fmt.Sprintf(`%s`, strings.Join(attrs, " "), datetimeEscaped, textEscaped))
}
func timeSinceTo(then any, now time.Time) template.HTML {
diff --git a/modules/templates/util_date_test.go b/modules/templates/util_date_test.go
index 2c1f2d242e..b74bbb0cee 100644
--- a/modules/templates/util_date_test.go
+++ b/modules/templates/util_date_test.go
@@ -32,10 +32,10 @@ func TestDateTime(t *testing.T) {
assert.EqualValues(t, "-", du.AbsoluteShort(timeutil.TimeStamp(0)))
actual := du.AbsoluteShort(refTime)
- assert.EqualValues(t, `2018-01-01`, actual)
+ assert.EqualValues(t, `2018-01-01`, actual)
actual = du.AbsoluteShort(refTimeStamp)
- assert.EqualValues(t, `2017-12-31`, actual)
+ assert.EqualValues(t, `2017-12-31`, actual)
actual = du.FullTime(refTimeStamp)
assert.EqualValues(t, `2017-12-31 19:00:00 -05:00`, actual)
diff --git a/modules/templates/util_render_comment.go b/modules/templates/util_render_comment.go
new file mode 100644
index 0000000000..73f36ad21c
--- /dev/null
+++ b/modules/templates/util_render_comment.go
@@ -0,0 +1,48 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package templates
+
+import (
+ "html/template"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/htmlutil"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/svg"
+ "code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/util"
+)
+
+func commentTimelineEventIsWipToggle(c *issues_model.Comment) (isToggle, isWip bool) {
+ title1, ok1 := issues_model.CutWorkInProgressPrefix(c.OldTitle)
+ title2, ok2 := issues_model.CutWorkInProgressPrefix(c.NewTitle)
+ return ok1 != ok2 && strings.TrimSpace(title1) == strings.TrimSpace(title2), ok2
+}
+
+func (ut *RenderUtils) RenderTimelineEventBadge(c *issues_model.Comment) template.HTML {
+ if c.Type == issues_model.CommentTypeChangeTitle {
+ isToggle, isWip := commentTimelineEventIsWipToggle(c)
+ if !isToggle {
+ return svg.RenderHTML("octicon-pencil")
+ }
+ return util.Iif(isWip, svg.RenderHTML("octicon-git-pull-request-draft"), svg.RenderHTML("octicon-eye"))
+ }
+ setting.PanicInDevOrTesting("unimplemented comment type %v: %v", c.Type, c)
+ return htmlutil.HTMLFormat("(CommentType:%v)", c.Type)
+}
+
+func (ut *RenderUtils) RenderTimelineEventComment(c *issues_model.Comment, createdStr template.HTML) template.HTML {
+ if c.Type == issues_model.CommentTypeChangeTitle {
+ locale := ut.ctx.Value(translation.ContextKey).(translation.Locale)
+ isToggle, isWip := commentTimelineEventIsWipToggle(c)
+ if !isToggle {
+ return locale.Tr("repo.issues.change_title_at", ut.RenderEmoji(c.OldTitle), ut.RenderEmoji(c.NewTitle), createdStr)
+ }
+ trKey := util.Iif(isWip, "repo.pulls.marked_as_work_in_progress_at", "repo.pulls.marked_as_ready_for_review_at")
+ return locale.Tr(trKey, createdStr)
+ }
+ setting.PanicInDevOrTesting("unimplemented comment type %v: %v", c.Type, c)
+ return htmlutil.HTMLFormat("(Comment:%v,%v)", c.Type, c.Content)
+}
diff --git a/modules/templates/util_render_comment_test.go b/modules/templates/util_render_comment_test.go
new file mode 100644
index 0000000000..27e67bd354
--- /dev/null
+++ b/modules/templates/util_render_comment_test.go
@@ -0,0 +1,31 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package templates
+
+import (
+ "html/template"
+ "testing"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/reqctx"
+ "code.gitea.io/gitea/modules/translation"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRenderTimelineEventComment(t *testing.T) {
+ ctx := reqctx.NewRequestContextForTest(t.Context())
+ ctx.SetContextValue(translation.ContextKey, &translation.MockLocale{})
+ ut := &RenderUtils{ctx: ctx}
+ var createdStr template.HTML = "(created-at)"
+
+ c := &issues_model.Comment{Type: issues_model.CommentTypeChangeTitle, OldTitle: "WIP: title", NewTitle: "title"}
+ assert.Equal(t, "repo.pulls.marked_as_ready_for_review_at:(created-at)", string(ut.RenderTimelineEventComment(c, createdStr)))
+
+ c = &issues_model.Comment{Type: issues_model.CommentTypeChangeTitle, OldTitle: "title", NewTitle: "WIP: title"}
+ assert.Equal(t, "repo.pulls.marked_as_work_in_progress_at:(created-at)", string(ut.RenderTimelineEventComment(c, createdStr)))
+
+ c = &issues_model.Comment{Type: issues_model.CommentTypeChangeTitle, OldTitle: "title", NewTitle: "WIP: new title"}
+ assert.Equal(t, "repo.issues.change_title_at:title,WIP: new title,(created-at)", string(ut.RenderTimelineEventComment(c, createdStr)))
+}
diff --git a/modules/templates/vars/vars.go b/modules/templates/vars/vars.go
index 500078d4b8..60d11ea609 100644
--- a/modules/templates/vars/vars.go
+++ b/modules/templates/vars/vars.go
@@ -10,25 +10,6 @@ import (
"unicode/utf8"
)
-// ErrWrongSyntax represents a wrong syntax with a template
-type ErrWrongSyntax struct {
- Template string
-}
-
-func (err ErrWrongSyntax) Error() string {
- return "wrong syntax found in " + err.Template
-}
-
-// ErrVarMissing represents an error that no matched variable
-type ErrVarMissing struct {
- Template string
- Var string
-}
-
-func (err ErrVarMissing) Error() string {
- return fmt.Sprintf("the variable %s is missing for %s", err.Var, err.Template)
-}
-
// Expand replaces all variables like {var} by `vars` map, it always returns the expanded string regardless of errors
// if error occurs, the error part doesn't change and is returned as it is.
func Expand(template string, vars map[string]string) (string, error) {
@@ -66,14 +47,14 @@ func Expand(template string, vars map[string]string) (string, error) {
posBegin = posEnd
if part == "{}" || part[len(part)-1] != '}' {
// treat "{}" or "{..." as error
- err = ErrWrongSyntax{Template: template}
+ err = fmt.Errorf("wrong syntax found in %s", template)
buf.WriteString(part)
} else {
// now we get a valid key "{...}"
key := part[1 : len(part)-1]
keyFirst, _ := utf8.DecodeRuneInString(key)
if unicode.IsSpace(keyFirst) || unicode.IsPunct(keyFirst) || unicode.IsControl(keyFirst) {
- // the if key doesn't start with a letter, then we do not treat it as a var now
+ // if the key doesn't start with a letter, then we do not treat it as a var now
buf.WriteString(part)
} else {
// look up in the map
@@ -82,7 +63,7 @@ func Expand(template string, vars map[string]string) (string, error) {
} else {
// write the non-existing var as it is
buf.WriteString(part)
- err = ErrVarMissing{Template: template, Var: key}
+ err = fmt.Errorf("the variable %s is missing for %s", key, template)
}
}
}
diff --git a/modules/test/utils.go b/modules/test/utils.go
index 53c6a3ed52..4996f5a13b 100644
--- a/modules/test/utils.go
+++ b/modules/test/utils.go
@@ -4,11 +4,12 @@
package test
import (
+ "archive/tar"
+ "bytes"
+ "compress/gzip"
+ "io"
"net/http"
"net/http/httptest"
- "os"
- "path/filepath"
- "runtime"
"strings"
"code.gitea.io/gitea/modules/json"
@@ -56,18 +57,67 @@ func MockVariableValue[T any](p *T, v ...T) (reset func()) {
return func() { *p = old }
}
-// SetupGiteaRoot Sets GITEA_ROOT if it is not already set and returns the value
-func SetupGiteaRoot() string {
- giteaRoot := os.Getenv("GITEA_ROOT")
- if giteaRoot != "" {
- return giteaRoot
+func ReadAllTarGzContent(r io.Reader) (map[string]string, error) {
+ gzr, err := gzip.NewReader(r)
+ if err != nil {
+ return nil, err
}
- _, filename, _, _ := runtime.Caller(0)
- giteaRoot = filepath.Dir(filepath.Dir(filepath.Dir(filename)))
- fixturesDir := filepath.Join(giteaRoot, "models", "fixtures")
- if exist, _ := util.IsDir(fixturesDir); !exist {
- panic("fixtures directory not found: " + fixturesDir)
+
+ content := make(map[string]string)
+
+ tr := tar.NewReader(gzr)
+ for {
+ hd, err := tr.Next()
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ buf, err := io.ReadAll(tr)
+ if err != nil {
+ return nil, err
+ }
+
+ content[hd.Name] = string(buf)
}
- _ = os.Setenv("GITEA_ROOT", giteaRoot)
- return giteaRoot
+ return content, nil
+}
+
+func WriteTarArchive(files map[string]string) *bytes.Buffer {
+ return WriteTarCompression(func(w io.Writer) io.WriteCloser { return util.NopCloser{Writer: w} }, files)
+}
+
+func WriteTarCompression[F func(io.Writer) io.WriteCloser | func(io.Writer) (io.WriteCloser, error)](compression F, files map[string]string) *bytes.Buffer {
+ buf := &bytes.Buffer{}
+ var cw io.WriteCloser
+ switch compressFunc := any(compression).(type) {
+ case func(io.Writer) io.WriteCloser:
+ cw = compressFunc(buf)
+ case func(io.Writer) (io.WriteCloser, error):
+ cw, _ = compressFunc(buf)
+ }
+ tw := tar.NewWriter(cw)
+
+ for name, content := range files {
+ hdr := &tar.Header{
+ Name: name,
+ Mode: 0o600,
+ Size: int64(len(content)),
+ }
+ _ = tw.WriteHeader(hdr)
+ _, _ = tw.Write([]byte(content))
+ }
+ _ = tw.Close()
+ _ = cw.Close()
+ return buf
+}
+
+func CompressGzip(content string) *bytes.Buffer {
+ buf := &bytes.Buffer{}
+ cw := gzip.NewWriter(buf)
+ _, _ = cw.Write([]byte(content))
+ _ = cw.Close()
+ return buf
}
diff --git a/modules/test/utils_test.go b/modules/test/utils_test.go
deleted file mode 100644
index 0469ce97f2..0000000000
--- a/modules/test/utils_test.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2024 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package test
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestSetupGiteaRoot(t *testing.T) {
- t.Setenv("GITEA_ROOT", "test")
- assert.Equal(t, "test", SetupGiteaRoot())
- t.Setenv("GITEA_ROOT", "")
- assert.NotEqual(t, "test", SetupGiteaRoot())
-}
diff --git a/modules/testlogger/testlogger.go b/modules/testlogger/testlogger.go
index 60e281d403..39232a3eed 100644
--- a/modules/testlogger/testlogger.go
+++ b/modules/testlogger/testlogger.go
@@ -4,6 +4,7 @@
package testlogger
import (
+ "context"
"fmt"
"os"
"runtime"
@@ -108,30 +109,33 @@ func PrintCurrentTest(t testing.TB, skip ...int) func() {
actualSkip := util.OptionalArg(skip) + 1
_, filename, line, _ := runtime.Caller(actualSkip)
+ getRuntimeStackAll := func() string {
+ stack := make([]byte, 1024*1024)
+ n := runtime.Stack(stack, true)
+ return util.UnsafeBytesToString(stack[:n])
+ }
+
+ deferHasRun := false
+ t.Cleanup(func() {
+ if !deferHasRun {
+ Printf("!!! %s defer function hasn't been run but Cleanup is called, usually caused by panic", t.Name())
+ }
+ })
Printf("=== %s (%s:%d)\n", log.NewColoredValue(t.Name()), strings.TrimPrefix(filename, prefix), line)
WriterCloser.pushT(t)
timeoutChecker := time.AfterFunc(TestTimeout, func() {
- l := 128 * 1024
- var stack []byte
- for {
- stack = make([]byte, l)
- n := runtime.Stack(stack, true)
- if n <= l {
- stack = stack[:n]
- break
- }
- l = n
- }
- Printf("!!! %s ... timeout: %v ... stacktrace:\n%s\n\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestTimeout, string(stack))
+ Printf("!!! %s ... timeout: %v ... stacktrace:\n%s\n\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestTimeout, getRuntimeStackAll())
})
return func() {
+ deferHasRun = true
flushStart := time.Now()
slowFlushChecker := time.AfterFunc(TestSlowFlush, func() {
Printf("+++ %s ... still flushing after %v ...\n", log.NewColoredValue(t.Name(), log.Bold, log.FgRed), TestSlowFlush)
})
if err := queue.GetManager().FlushAll(t.Context(), -1); err != nil {
- t.Errorf("Flushing queues failed with error %v", err)
+ // if panic occurs, then the t.Context() is also cancelled ahead, so here it shows "context canceled" error.
+ t.Errorf("Flushing queues failed with error %q, cause %q", err, context.Cause(t.Context()))
}
slowFlushChecker.Stop()
timeoutChecker.Stop()
@@ -167,19 +171,9 @@ func Init() {
prefix = strings.TrimSuffix(filename, relFilePath)
log.RegisterEventWriter("test", newTestLoggerWriter)
-
- duration, err := time.ParseDuration(os.Getenv("GITEA_TEST_SLOW_RUN"))
- if err == nil && duration > 0 {
- TestSlowRun = duration
- }
-
- duration, err = time.ParseDuration(os.Getenv("GITEA_TEST_SLOW_FLUSH"))
- if err == nil && duration > 0 {
- TestSlowFlush = duration
- }
}
-func Fatalf(format string, args ...any) {
- Printf(format+"\n", args...)
- os.Exit(1)
+func Panicf(format string, args ...any) {
+ // don't call os.Exit, otherwise the "defer" functions won't be executed
+ panic(fmt.Sprintf(format, args...))
}
diff --git a/modules/timeutil/since_test.go b/modules/timeutil/since_test.go
index 40fefe8700..bf848bd05a 100644
--- a/modules/timeutil/since_test.go
+++ b/modules/timeutil/since_test.go
@@ -32,11 +32,7 @@ func TestMain(m *testing.M) {
// setup
translation.InitLocales(context.Background())
BaseDate = time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC)
-
- // run the tests
- retVal := m.Run()
-
- os.Exit(retVal)
+ os.Exit(m.Run())
}
func TestTimeSincePro(t *testing.T) {
diff --git a/modules/typesniffer/typesniffer.go b/modules/typesniffer/typesniffer.go
index 2e8d9c4a1e..0c4867d8f0 100644
--- a/modules/typesniffer/typesniffer.go
+++ b/modules/typesniffer/typesniffer.go
@@ -107,6 +107,17 @@ func detectFileTypeBox(data []byte) (brands []string, found bool) {
return brands, true
}
+func isEmbeddedOpenType(data []byte) bool {
+ // https://www.w3.org/submissions/EOT
+ if len(data) < 80 {
+ return false
+ }
+ version := binary.LittleEndian.Uint32(data[8:]) // Actually this standard is abandoned (for IE6-IE11 only), there are only 3 versions defined
+ magic := binary.LittleEndian.Uint16(data[34:36]) // MagicNumber: 0x504C ("LP")
+ reserved := data[64:80] // Reserved 1-4 (each: unsigned long)
+ return (version == 0x00010000 || version == 0x00020001 || version == 0x00020002) && magic == 0x504C && bytes.Count(reserved, []byte{0}) == len(reserved)
+}
+
// DetectContentType extends http.DetectContentType with more content types. Defaults to text/plain if input is empty.
func DetectContentType(data []byte) SniffedType {
if len(data) == 0 {
@@ -119,6 +130,18 @@ func DetectContentType(data []byte) SniffedType {
data = data[:SniffContentSize]
}
+ const typeMsFontObject = "application/vnd.ms-fontobject"
+ if ct == typeMsFontObject {
+ // Stupid Golang blindly detects any content with 34th-35th bytes being "LP" as font.
+ // If it is not really for ".eot" content, we try to detect it again by hiding the "LP", see the test for more details.
+ if isEmbeddedOpenType(data) {
+ return SniffedType{typeMsFontObject}
+ }
+ data = slices.Clone(data)
+ data[34] = 'l'
+ ct = http.DetectContentType(data)
+ }
+
vars := globalVars()
// SVG is unsupported by http.DetectContentType, https://github.com/golang/go/issues/15888
detectByHTML := strings.Contains(ct, "text/plain") || strings.Contains(ct, "text/html")
diff --git a/modules/typesniffer/typesniffer_test.go b/modules/typesniffer/typesniffer_test.go
index ad3f78afdc..17d67f41f7 100644
--- a/modules/typesniffer/typesniffer_test.go
+++ b/modules/typesniffer/typesniffer_test.go
@@ -6,6 +6,7 @@ package typesniffer
import (
"encoding/base64"
"encoding/hex"
+ "net/http"
"strings"
"testing"
@@ -154,3 +155,25 @@ func TestDetectContentTypeAvif(t *testing.T) {
st := DetectContentType(buf)
assert.Equal(t, MimeTypeImageAvif, st.contentType)
}
+
+func TestDetectContentTypeIncorrectFont(t *testing.T) {
+ s := "Stupid Golang keep detecting 34th LP as font"
+ // They don't want to have any improvement to it: https://github.com/golang/go/issues/77172
+ golangDetected := http.DetectContentType([]byte(s))
+ assert.Equal(t, "application/vnd.ms-fontobject", golangDetected)
+ // We have to make our patch to make it work correctly
+ ourDetected := DetectContentType([]byte(s))
+ assert.Equal(t, "text/plain; charset=utf-8", ourDetected.contentType)
+
+ // For binary content, ensure it still detects as font. The content is from "opensans-regular.eot"
+ b := []byte{
+ 0x3d, 0x30, 0x00, 0x00, 0x6b, 0x2f, 0x00, 0x00, 0x02, 0x00, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00,
+ 0x02, 0x0b, 0x06, 0x06, 0x03, 0x05, 0x04, 0x02, 0x02, 0x04, 0x01, 0x00, 0x90, 0x01, 0x00, 0x00,
+ 0x04, 0x00, 0x4c, 0x50, 0xef, 0x02, 0x00, 0xe0, 0x5b, 0x20, 0x00, 0x40, 0x28, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x9f, 0x01, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x63, 0xf4, 0x17, 0x14,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x12, 0x00, 0x4f, 0x00, 0x70, 0x00, 0x65, 0x00, 0x6e, 0x00, 0x20, 0x00, 0x53, 0x00,
+ }
+ assert.Equal(t, "application/vnd.ms-fontobject", http.DetectContentType(b))
+ assert.Equal(t, "application/vnd.ms-fontobject", DetectContentType(b).contentType)
+}
diff --git a/modules/util/buffer.go b/modules/util/buffer.go
new file mode 100644
index 0000000000..c5af750292
--- /dev/null
+++ b/modules/util/buffer.go
@@ -0,0 +1,22 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package util
+
+import "bytes"
+
+func BufioScannerSplit(b byte) func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ // reference: bufio.ScanLines
+ return func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ if atEOF && len(data) == 0 {
+ return 0, nil, nil
+ }
+ if i := bytes.IndexByte(data, b); i >= 0 {
+ return i + 1, data[0:i], nil
+ }
+ if atEOF {
+ return len(data), data, nil
+ }
+ return 0, nil, nil
+ }
+}
diff --git a/modules/util/util.go b/modules/util/util.go
index dd8e073888..d7702439d6 100644
--- a/modules/util/util.go
+++ b/modules/util/util.go
@@ -90,12 +90,12 @@ func CryptoRandomBytes(length int64) ([]byte, error) {
return buf, err
}
-// ToUpperASCII returns s with all ASCII letters mapped to their upper case.
-func ToUpperASCII(s string) string {
+// ToLowerASCII returns s with all ASCII letters mapped to their lower case.
+func ToLowerASCII(s string) string {
b := []byte(s)
for i, c := range b {
- if 'a' <= c && c <= 'z' {
- b[i] -= 'a' - 'A'
+ if 'A' <= c && c <= 'Z' {
+ b[i] += 'a' - 'A'
}
}
return string(b)
@@ -197,11 +197,6 @@ func ToFloat64(number any) (float64, error) {
return value, nil
}
-// ToPointer returns the pointer of a copy of any given value
-func ToPointer[T any](val T) *T {
- return &val
-}
-
// Iif is an "inline-if", it returns "trueVal" if "condition" is true, otherwise "falseVal"
func Iif[T any](condition bool, trueVal, falseVal T) T {
if condition {
diff --git a/modules/util/util_test.go b/modules/util/util_test.go
index fe4125cdb5..fd677f5c11 100644
--- a/modules/util/util_test.go
+++ b/modules/util/util_test.go
@@ -178,30 +178,26 @@ type StringTest struct {
in, out string
}
-var upperTests = []StringTest{
+var lowerTests = []StringTest{
{"", ""},
- {"ONLYUPPER", "ONLYUPPER"},
- {"abc", "ABC"},
- {"AbC123", "ABC123"},
- {"azAZ09_", "AZAZ09_"},
- {"longStrinGwitHmixofsmaLLandcAps", "LONGSTRINGWITHMIXOFSMALLANDCAPS"},
- {"long\u0250string\u0250with\u0250nonascii\u2C6Fchars", "LONG\u0250STRING\u0250WITH\u0250NONASCII\u2C6FCHARS"},
- {"\u0250\u0250\u0250\u0250\u0250", "\u0250\u0250\u0250\u0250\u0250"},
- {"a\u0080\U0010FFFF", "A\u0080\U0010FFFF"},
- {"lél", "LéL"},
+ {"ABC", "abc"},
+ {"AbC123_", "abc123_"},
+ {"LONG\u0250string\u0250WITH\u0250non-ascii\u2C6FCHARS\u0080\uFFFF", "long\u0250string\u0250with\u0250non-ascii\u2C6Fchars\u0080\uFFFF"},
+ {"lél", "lél"},
+ {"LÉL", "lÉl"},
}
-func TestToUpperASCII(t *testing.T) {
- for _, tc := range upperTests {
- assert.Equal(t, ToUpperASCII(tc.in), tc.out)
+func TestToLowerASCII(t *testing.T) {
+ for _, tc := range lowerTests {
+ assert.Equal(t, ToLowerASCII(tc.in), tc.out)
}
}
-func BenchmarkToUpper(b *testing.B) {
- for _, tc := range upperTests {
+func BenchmarkToLower(b *testing.B) {
+ for _, tc := range lowerTests {
b.Run(tc.in, func(b *testing.B) {
for b.Loop() {
- ToUpperASCII(tc.in)
+ ToLowerASCII(tc.in)
}
})
}
@@ -212,15 +208,6 @@ func TestToTitleCase(t *testing.T) {
assert.Equal(t, `Foo Bar Baz`, ToTitleCase(`FOO BAR BAZ`))
}
-func TestToPointer(t *testing.T) {
- assert.Equal(t, "abc", *ToPointer("abc"))
- assert.Equal(t, 123, *ToPointer(123))
- abc := "abc"
- assert.NotSame(t, &abc, ToPointer(abc))
- val123 := 123
- assert.NotSame(t, &val123, ToPointer(val123))
-}
-
func TestReserveLineBreakForTextarea(t *testing.T) {
assert.Equal(t, "test\ndata", ReserveLineBreakForTextarea("test\r\ndata"))
assert.Equal(t, "test\ndata\n", ReserveLineBreakForTextarea("test\r\ndata\r\n"))
diff --git a/modules/validation/binding.go b/modules/validation/binding.go
index 3ecc532613..3f40e5ec97 100644
--- a/modules/validation/binding.go
+++ b/modules/validation/binding.go
@@ -219,8 +219,8 @@ func portOnly(hostport string) string {
if !ok {
return ""
}
- if i := strings.Index(hostport, "]:"); i != -1 {
- return hostport[i+len("]:"):]
+ if _, after2, ok2 := strings.Cut(hostport, "]:"); ok2 {
+ return after2
}
if strings.Contains(hostport, "]") {
return ""
diff --git a/modules/web/middleware/cookie.go b/modules/web/middleware/cookie.go
index ad9aee6478..f98aceba10 100644
--- a/modules/web/middleware/cookie.go
+++ b/modules/web/middleware/cookie.go
@@ -14,14 +14,24 @@ import (
"code.gitea.io/gitea/modules/util"
)
+const cookieRedirectTo = "redirect_to"
+
+func GetRedirectToCookie(req *http.Request) string {
+ return GetSiteCookie(req, cookieRedirectTo)
+}
+
// SetRedirectToCookie convenience function to set the RedirectTo cookie consistently
func SetRedirectToCookie(resp http.ResponseWriter, value string) {
- SetSiteCookie(resp, "redirect_to", value, 0)
+ SetSiteCookie(resp, cookieRedirectTo, value, 0)
}
// DeleteRedirectToCookie convenience function to delete most cookies consistently
func DeleteRedirectToCookie(resp http.ResponseWriter) {
- SetSiteCookie(resp, "redirect_to", "", -1)
+ SetSiteCookie(resp, cookieRedirectTo, "", -1)
+}
+
+func RedirectLinkUserLogin(req *http.Request) string {
+ return setting.AppSubURL + "/user/login?redirect_to=" + url.QueryEscape(setting.AppSubURL+req.URL.RequestURI())
}
// GetSiteCookie returns given cookie value from request header.
diff --git a/modules/web/router_test.go b/modules/web/router_test.go
index f216aa6180..ab5fbb502c 100644
--- a/modules/web/router_test.go
+++ b/modules/web/router_test.go
@@ -69,7 +69,7 @@ func TestRouter(t *testing.T) {
chiCtx := chi.RouteContext(req.Context())
res.method = req.Method
res.pathParams = chiURLParamsToMap(chiCtx)
- res.chiRoutePattern = util.ToPointer(chiCtx.RoutePattern())
+ res.chiRoutePattern = new(chiCtx.RoutePattern())
if mark != "" {
res.handlerMarks = append(res.handlerMarks, mark)
}
@@ -139,7 +139,7 @@ func TestRouter(t *testing.T) {
testRoute(t, "GET /the-user/the-repo/other", resultStruct{
method: "GET",
handlerMarks: []string{"not-found:/"},
- chiRoutePattern: util.ToPointer(""),
+ chiRoutePattern: new(""),
})
testRoute(t, "GET /the-user/the-repo/pulls", resultStruct{
method: "GET",
@@ -150,7 +150,7 @@ func TestRouter(t *testing.T) {
method: "GET",
pathParams: map[string]string{"username": "the-user", "reponame": "the-repo", "type": "issues", "index": "123"},
handlerMarks: []string{"view-issue"},
- chiRoutePattern: util.ToPointer("/{username}/{reponame}/{type:issues|pulls}/{index}"),
+ chiRoutePattern: new("/{username}/{reponame}/{type:issues|pulls}/{index}"),
})
testRoute(t, "GET /the-user/the-repo/issues/123?stop=hijack", resultStruct{
method: "GET",
@@ -228,7 +228,7 @@ func TestRouter(t *testing.T) {
method: "GET",
pathParams: map[string]string{"username": "the-user", "reponame": "the-repo", "*": "d1/d2/fn", "dir": "d1/d2", "file": "fn"},
handlerMarks: []string{"s1", "s2", "s3"},
- chiRoutePattern: util.ToPointer("/api/v1/repos/{username}/{reponame}/branches//"),
+ chiRoutePattern: new("/api/v1/repos/{username}/{reponame}/branches//"),
})
})
}
diff --git a/modules/webhook/type.go b/modules/webhook/type.go
index 89c6a4bfe5..18a4086710 100644
--- a/modules/webhook/type.go
+++ b/modules/webhook/type.go
@@ -98,6 +98,20 @@ func (h HookEventType) IsPullRequest() bool {
return h.Event() == "pull_request"
}
+// IsPullRequestReview returns true for pull request review events
+// (approved, rejected, comment). These events use the same PullRequestPayload
+// as regular pull_request events.
+func (h HookEventType) IsPullRequestReview() bool {
+ switch h {
+ case HookEventPullRequestReviewApproved,
+ HookEventPullRequestReviewRejected,
+ HookEventPullRequestReviewComment:
+ return true
+ default:
+ return false
+ }
+}
+
// HookType is the type of a webhook
type HookType = string
diff --git a/options/fileicon/material-icon-rules.json b/options/fileicon/material-icon-rules.json
index 6b17e5be67..8ee5dde0b2 100644
--- a/options/fileicon/material-icon-rules.json
+++ b/options/fileicon/material-icon-rules.json
@@ -460,6 +460,22 @@
".blog": "folder-docs",
"_blog": "folder-docs",
"__blog__": "folder-docs",
+ "knowledge": "folder-docs",
+ ".knowledge": "folder-docs",
+ "_knowledge": "folder-docs",
+ "__knowledge__": "folder-docs",
+ "diary": "folder-docs",
+ ".diary": "folder-docs",
+ "_diary": "folder-docs",
+ "__diary__": "folder-docs",
+ "note": "folder-docs",
+ ".note": "folder-docs",
+ "_note": "folder-docs",
+ "__note__": "folder-docs",
+ "notes": "folder-docs",
+ ".notes": "folder-docs",
+ "_notes": "folder-docs",
+ "__notes__": "folder-docs",
"github/workflows": "folder-gh-workflows",
".github/workflows": "folder-gh-workflows",
"_github/workflows": "folder-gh-workflows",
@@ -916,6 +932,14 @@
".sql": "folder-database",
"_sql": "folder-database",
"__sql__": "folder-database",
+ "migrations": "folder-migrations",
+ ".migrations": "folder-migrations",
+ "_migrations": "folder-migrations",
+ "__migrations__": "folder-migrations",
+ "migration": "folder-migrations",
+ ".migration": "folder-migrations",
+ "_migration": "folder-migrations",
+ "__migration__": "folder-migrations",
"log": "folder-log",
".log": "folder-log",
"_log": "folder-log",
@@ -1008,6 +1032,14 @@
".recordings": "folder-audio",
"_recordings": "folder-audio",
"__recordings__": "folder-audio",
+ "playlist": "folder-audio",
+ ".playlist": "folder-audio",
+ "_playlist": "folder-audio",
+ "__playlist__": "folder-audio",
+ "playlists": "folder-audio",
+ ".playlists": "folder-audio",
+ "_playlists": "folder-audio",
+ "__playlists__": "folder-audio",
"vid": "folder-video",
".vid": "folder-video",
"_vid": "folder-video",
@@ -1544,6 +1576,22 @@
".backends": "folder-server",
"_backends": "folder-server",
"__backends__": "folder-server",
+ "inventory": "folder-server",
+ ".inventory": "folder-server",
+ "_inventory": "folder-server",
+ "__inventory__": "folder-server",
+ "inventories": "folder-server",
+ ".inventories": "folder-server",
+ "_inventories": "folder-server",
+ "__inventories__": "folder-server",
+ "infrastructure": "folder-server",
+ ".infrastructure": "folder-server",
+ "_infrastructure": "folder-server",
+ "__infrastructure__": "folder-server",
+ "infra": "folder-server",
+ ".infra": "folder-server",
+ "_infra": "folder-server",
+ "__infra__": "folder-server",
"client": "folder-client",
".client": "folder-client",
"_client": "folder-client",
@@ -1992,6 +2040,14 @@
".calculations": "folder-functions",
"_calculations": "folder-functions",
"__calculations__": "folder-functions",
+ "composable": "folder-functions",
+ ".composable": "folder-functions",
+ "_composable": "folder-functions",
+ "__composable__": "folder-functions",
+ "composables": "folder-functions",
+ ".composables": "folder-functions",
+ "_composables": "folder-functions",
+ "__composables__": "folder-functions",
"generator": "folder-generator",
".generator": "folder-generator",
"_generator": "folder-generator",
@@ -2936,6 +2992,14 @@
".projects": "folder-project",
"_projects": "folder-project",
"__projects__": "folder-project",
+ "proj": "folder-project",
+ ".proj": "folder-project",
+ "_proj": "folder-project",
+ "__proj__": "folder-project",
+ "projs": "folder-project",
+ ".projs": "folder-project",
+ "_projs": "folder-project",
+ "__projs__": "folder-project",
"prompt": "folder-prompts",
".prompt": "folder-prompts",
"_prompt": "folder-prompts",
@@ -3431,6 +3495,10 @@
"..cursor": "folder-cursor",
"_.cursor": "folder-cursor",
"__.cursor__": "folder-cursor",
+ ".gemini": "folder-gemini-ai",
+ "..gemini": "folder-gemini-ai",
+ "_.gemini": "folder-gemini-ai",
+ "__.gemini__": "folder-gemini-ai",
"input": "folder-input",
".input": "folder-input",
"_input": "folder-input",
@@ -3447,6 +3515,14 @@
".in": "folder-input",
"_in": "folder-input",
"__in__": "folder-input",
+ "salt": "folder-salt",
+ ".salt": "folder-salt",
+ "_salt": "folder-salt",
+ "__salt__": "folder-salt",
+ "saltstack": "folder-salt",
+ ".saltstack": "folder-salt",
+ "_saltstack": "folder-salt",
+ "__saltstack__": "folder-salt",
"simulations": "folder-simulations",
".simulations": "folder-simulations",
"_simulations": "folder-simulations",
@@ -3961,6 +4037,22 @@
".blog": "folder-docs-open",
"_blog": "folder-docs-open",
"__blog__": "folder-docs-open",
+ "knowledge": "folder-docs-open",
+ ".knowledge": "folder-docs-open",
+ "_knowledge": "folder-docs-open",
+ "__knowledge__": "folder-docs-open",
+ "diary": "folder-docs-open",
+ ".diary": "folder-docs-open",
+ "_diary": "folder-docs-open",
+ "__diary__": "folder-docs-open",
+ "note": "folder-docs-open",
+ ".note": "folder-docs-open",
+ "_note": "folder-docs-open",
+ "__note__": "folder-docs-open",
+ "notes": "folder-docs-open",
+ ".notes": "folder-docs-open",
+ "_notes": "folder-docs-open",
+ "__notes__": "folder-docs-open",
"github/workflows": "folder-gh-workflows-open",
".github/workflows": "folder-gh-workflows-open",
"_github/workflows": "folder-gh-workflows-open",
@@ -4417,6 +4509,14 @@
".sql": "folder-database-open",
"_sql": "folder-database-open",
"__sql__": "folder-database-open",
+ "migrations": "folder-migrations-open",
+ ".migrations": "folder-migrations-open",
+ "_migrations": "folder-migrations-open",
+ "__migrations__": "folder-migrations-open",
+ "migration": "folder-migrations-open",
+ ".migration": "folder-migrations-open",
+ "_migration": "folder-migrations-open",
+ "__migration__": "folder-migrations-open",
"log": "folder-log-open",
".log": "folder-log-open",
"_log": "folder-log-open",
@@ -4509,6 +4609,14 @@
".recordings": "folder-audio-open",
"_recordings": "folder-audio-open",
"__recordings__": "folder-audio-open",
+ "playlist": "folder-audio-open",
+ ".playlist": "folder-audio-open",
+ "_playlist": "folder-audio-open",
+ "__playlist__": "folder-audio-open",
+ "playlists": "folder-audio-open",
+ ".playlists": "folder-audio-open",
+ "_playlists": "folder-audio-open",
+ "__playlists__": "folder-audio-open",
"vid": "folder-video-open",
".vid": "folder-video-open",
"_vid": "folder-video-open",
@@ -5045,6 +5153,22 @@
".backends": "folder-server-open",
"_backends": "folder-server-open",
"__backends__": "folder-server-open",
+ "inventory": "folder-server-open",
+ ".inventory": "folder-server-open",
+ "_inventory": "folder-server-open",
+ "__inventory__": "folder-server-open",
+ "inventories": "folder-server-open",
+ ".inventories": "folder-server-open",
+ "_inventories": "folder-server-open",
+ "__inventories__": "folder-server-open",
+ "infrastructure": "folder-server-open",
+ ".infrastructure": "folder-server-open",
+ "_infrastructure": "folder-server-open",
+ "__infrastructure__": "folder-server-open",
+ "infra": "folder-server-open",
+ ".infra": "folder-server-open",
+ "_infra": "folder-server-open",
+ "__infra__": "folder-server-open",
"client": "folder-client-open",
".client": "folder-client-open",
"_client": "folder-client-open",
@@ -5493,6 +5617,14 @@
".calculations": "folder-functions-open",
"_calculations": "folder-functions-open",
"__calculations__": "folder-functions-open",
+ "composable": "folder-functions-open",
+ ".composable": "folder-functions-open",
+ "_composable": "folder-functions-open",
+ "__composable__": "folder-functions-open",
+ "composables": "folder-functions-open",
+ ".composables": "folder-functions-open",
+ "_composables": "folder-functions-open",
+ "__composables__": "folder-functions-open",
"generator": "folder-generator-open",
".generator": "folder-generator-open",
"_generator": "folder-generator-open",
@@ -6437,6 +6569,14 @@
".projects": "folder-project-open",
"_projects": "folder-project-open",
"__projects__": "folder-project-open",
+ "proj": "folder-project-open",
+ ".proj": "folder-project-open",
+ "_proj": "folder-project-open",
+ "__proj__": "folder-project-open",
+ "projs": "folder-project-open",
+ ".projs": "folder-project-open",
+ "_projs": "folder-project-open",
+ "__projs__": "folder-project-open",
"prompt": "folder-prompts-open",
".prompt": "folder-prompts-open",
"_prompt": "folder-prompts-open",
@@ -6932,6 +7072,10 @@
"..cursor": "folder-cursor-open",
"_.cursor": "folder-cursor-open",
"__.cursor__": "folder-cursor-open",
+ ".gemini": "folder-gemini-ai-open",
+ "..gemini": "folder-gemini-ai-open",
+ "_.gemini": "folder-gemini-ai-open",
+ "__.gemini__": "folder-gemini-ai-open",
"input": "folder-input-open",
".input": "folder-input-open",
"_input": "folder-input-open",
@@ -6948,6 +7092,14 @@
".in": "folder-input-open",
"_in": "folder-input-open",
"__in__": "folder-input-open",
+ "salt": "folder-salt-open",
+ ".salt": "folder-salt-open",
+ "_salt": "folder-salt-open",
+ "__salt__": "folder-salt-open",
+ "saltstack": "folder-salt-open",
+ ".saltstack": "folder-salt-open",
+ "_saltstack": "folder-salt-open",
+ "__saltstack__": "folder-salt-open",
"simulations": "folder-simulations-open",
".simulations": "folder-simulations-open",
"_simulations": "folder-simulations-open",
@@ -7213,6 +7365,7 @@
"csproj": "visualstudio",
"ruleset": "visualstudio",
"sln": "visualstudio",
+ "slnf": "visualstudio",
"slnx": "visualstudio",
"suo": "visualstudio",
"vb": "visualstudio",
@@ -7715,6 +7868,7 @@
"tfvars": "terraform",
"tfstate": "terraform",
"tfbackend": "terraform",
+ "terraformignore": "terraform",
"tofu": "opentofu",
"blade.php": "laravel",
"inky.php": "laravel",
@@ -8162,6 +8316,7 @@
"toc": "toc",
"cue": "cue",
"lean": "lean",
+ "sls": "salt",
"cljx": "clojure",
"clojure": "clojure",
"edn": "clojure",
@@ -8453,6 +8608,7 @@
".ruff.toml": "ruff",
"uv.toml": "uv",
".uv.toml": "uv",
+ "uv.lock": "uv",
"sconstruct": "scons",
"sconscript": "scons",
"scsub": "scons",
@@ -10150,6 +10306,30 @@
"esbuild.config.ts": "esbuild",
"esbuild.config.mts": "esbuild",
"esbuild.config.cts": "esbuild",
+ "esbuild.dev.js": "esbuild",
+ "esbuild.dev.mjs": "esbuild",
+ "esbuild.dev.cjs": "esbuild",
+ "esbuild.dev.ts": "esbuild",
+ "esbuild.dev.mts": "esbuild",
+ "esbuild.dev.cts": "esbuild",
+ "esbuild.stage.js": "esbuild",
+ "esbuild.stage.mjs": "esbuild",
+ "esbuild.stage.cjs": "esbuild",
+ "esbuild.stage.ts": "esbuild",
+ "esbuild.stage.mts": "esbuild",
+ "esbuild.stage.cts": "esbuild",
+ "esbuild.prod.js": "esbuild",
+ "esbuild.prod.mjs": "esbuild",
+ "esbuild.prod.cjs": "esbuild",
+ "esbuild.prod.ts": "esbuild",
+ "esbuild.prod.mts": "esbuild",
+ "esbuild.prod.cts": "esbuild",
+ "esbuild.test.js": "esbuild",
+ "esbuild.test.mjs": "esbuild",
+ "esbuild.test.cjs": "esbuild",
+ "esbuild.test.ts": "esbuild",
+ "esbuild.test.mts": "esbuild",
+ "esbuild.test.cts": "esbuild",
"drizzle.config.ts": "drizzle",
"drizzle.config.dev.ts": "drizzle",
"drizzle.config.prod.ts": "drizzle",
diff --git a/options/fileicon/material-icon-svgs.json b/options/fileicon/material-icon-svgs.json
index f5254099ad..6713c4afa9 100644
--- a/options/fileicon/material-icon-svgs.json
+++ b/options/fileicon/material-icon-svgs.json
@@ -67,7 +67,7 @@
"biome": "",
"bitbucket": "",
"bithound": "",
- "blender": "",
+ "blender": "",
"blink": "",
"blink_light": "",
"blitz": "",
@@ -102,7 +102,7 @@
"circleci_light": "",
"citation": "",
"clangd": "",
- "claude": "",
+ "claude": "",
"cline": "",
"clojure": "",
"cloudfoundry": "",
@@ -151,8 +151,8 @@
"database": "",
"deepsource": "",
"denizenscript": "",
- "deno": "",
- "deno_light": "",
+ "deno": "",
+ "deno_light": "",
"dependabot": "",
"dependencies-update": "",
"dhall": "",
@@ -237,8 +237,8 @@
"folder-bibliography": "",
"folder-bicep-open": "",
"folder-bicep": "",
- "folder-blender-open": "",
- "folder-blender": "",
+ "folder-blender-open": "",
+ "folder-blender": "",
"folder-bloc-open": "",
"folder-bloc": "",
"folder-bower-open": "",
@@ -255,8 +255,8 @@
"folder-circleci": "",
"folder-class-open": "",
"folder-class": "",
- "folder-claude-open": "",
- "folder-claude": "",
+ "folder-claude-open": "",
+ "folder-claude": "",
"folder-client-open": "",
"folder-client": "",
"folder-cline-open": "",
@@ -379,6 +379,8 @@
"folder-functions": "",
"folder-gamemaker-open": "",
"folder-gamemaker": "",
+ "folder-gemini-ai-open": "",
+ "folder-gemini-ai": "",
"folder-generator-open": "",
"folder-generator": "",
"folder-gh-workflows-open": "",
@@ -495,6 +497,8 @@
"folder-metro": "",
"folder-middleware-open": "",
"folder-middleware": "",
+ "folder-migrations-open": "",
+ "folder-migrations": "",
"folder-mjml-open": "",
"folder-mjml": "",
"folder-mobile-open": "",
@@ -604,6 +608,8 @@
"folder-rules": "",
"folder-rust-open": "",
"folder-rust": "",
+ "folder-salt-open": "",
+ "folder-salt": "",
"folder-sandbox-open": "",
"folder-sandbox": "",
"folder-sass-open": "",
@@ -1024,6 +1030,7 @@
"ruff": "",
"rust": "",
"salesforce": "",
+ "salt": "",
"san": "",
"sas": "",
"sass": "",
diff --git a/options/license/Unlicense b/options/license/Unlicense
index cde4ac6981..efb9808816 100644
--- a/options/license/Unlicense
+++ b/options/license/Unlicense
@@ -1,10 +1,24 @@
This is free and unencumbered software released into the public domain.
-Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means.
+Anyone is free to copy, modify, publish, use, compile, sell, or
+distribute this software, either in source code form or as a compiled
+binary, for any purpose, commercial or non-commercial, and by any
+means.
-In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and
-successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law.
+In jurisdictions that recognize copyright laws, the author or authors
+of this software dedicate any and all copyright interest in the
+software to the public domain. We make this dedication for the benefit
+of the public at large and to the detriment of our heirs and
+successors. We intend this dedication to be an overt act of
+relinquishment in perpetuity of all present and future rights to this
+software under copyright law.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
-For more information, please refer to
+For more information, please refer to
diff --git a/options/locale/locale_en-US.json b/options/locale/locale_en-US.json
index 15fdda738e..0a91a59579 100644
--- a/options/locale/locale_en-US.json
+++ b/options/locale/locale_en-US.json
@@ -148,6 +148,13 @@
"filter.private": "Private",
"no_results_found": "No results found.",
"internal_error_skipped": "Internal error occurred but is skipped: %s",
+ "characters_spaces": "Spaces",
+ "characters_tabs": "Tabs",
+ "text_indent_style": "Indent style",
+ "text_indent_size": "Indent size",
+ "text_line_wrap": "Wrap",
+ "text_line_nowrap": "No wrap",
+ "text_line_wrap_mode": "Line wrap mode",
"search.search": "Search…",
"search.type_tooltip": "Search type",
"search.fuzzy": "Fuzzy",
@@ -751,6 +758,7 @@
"settings.add_email": "Add Email Address",
"settings.add_openid": "Add OpenID URI",
"settings.add_email_confirmation_sent": "A confirmation email has been sent to \"%s\". Please check your inbox within the next %s to confirm your email address.",
+ "settings.email_primary_not_found": "The selected email address could not be found.",
"settings.add_email_success": "The new email address has been added.",
"settings.email_preference_set_success": "Email preference has been set successfully.",
"settings.add_openid_success": "The new OpenID address has been added.",
@@ -977,6 +985,7 @@
"repo.fork.blocked_user": "Cannot fork the repository because you are blocked by the repository owner.",
"repo.use_template": "Use this template",
"repo.open_with_editor": "Open with %s",
+ "repo.download_directory_as": "Download directory as %s",
"repo.download_zip": "Download ZIP",
"repo.download_tar": "Download TAR.GZ",
"repo.download_bundle": "Download BUNDLE",
@@ -1489,6 +1498,7 @@
"repo.issues.filter_sort.feweststars": "Fewest stars",
"repo.issues.filter_sort.mostforks": "Most forks",
"repo.issues.filter_sort.fewestforks": "Fewest forks",
+ "repo.issues.quick_goto": "Go to issue",
"repo.issues.action_open": "Open",
"repo.issues.action_close": "Close",
"repo.issues.action_label": "Label",
@@ -1701,6 +1711,7 @@
"repo.issues.review.content.empty": "You need to leave a comment indicating the requested change(s).",
"repo.issues.review.reject": "requested changes %s",
"repo.issues.review.wait": "was requested for review %s",
+ "repo.issues.review.codeowners_rules": "CODEOWNERS rules",
"repo.issues.review.add_review_request": "requested review from %s %s",
"repo.issues.review.remove_review_request": "removed review request for %s %s",
"repo.issues.review.remove_review_request_self": "declined to review %s",
@@ -1736,8 +1747,11 @@
"repo.issues.reference_link": "Reference: %s",
"repo.compare.compare_base": "base",
"repo.compare.compare_head": "compare",
+ "repo.compare.title": "Comparing changes",
+ "repo.compare.description": "Choose two branches or tags to see what’s changed or to start a new pull request.",
"repo.pulls.desc": "Enable pull requests and code reviews.",
"repo.pulls.new": "New Pull Request",
+ "repo.pulls.new.description": "Discuss and review the changes in this comparison with others.",
"repo.pulls.new.blocked_user": "Cannot create pull request because you are blocked by the repository owner.",
"repo.pulls.new.must_collaborator": "You must be a collaborator to create pull request.",
"repo.pulls.new.already_existed": "A pull request between these branches already exists",
@@ -1747,7 +1761,6 @@
"repo.pulls.allow_edits_from_maintainers": "Allow edits from maintainers",
"repo.pulls.allow_edits_from_maintainers_desc": "Users with write access to the base branch can also push to this branch",
"repo.pulls.allow_edits_from_maintainers_err": "Updating failed",
- "repo.pulls.compare_changes_desc": "Select the branch to merge into and the branch to pull from.",
"repo.pulls.has_viewed_file": "Viewed",
"repo.pulls.has_changed_since_last_review": "Changed since your last review",
"repo.pulls.viewed_files_label": "%[1]d / %[2]d files viewed",
@@ -1773,6 +1786,8 @@
"repo.pulls.title_desc": "wants to merge %[1]d commits from %[2]s into %[3]s",
"repo.pulls.merged_title_desc": "merged %[1]d commits from %[2]s into %[3]s %[4]s",
"repo.pulls.change_target_branch_at": "changed target branch from %s to %s %s",
+ "repo.pulls.marked_as_work_in_progress_at": "marked the pull request as work in progress %s",
+ "repo.pulls.marked_as_ready_for_review_at": "marked the pull request as ready for review %s",
"repo.pulls.tab_conversation": "Conversation",
"repo.pulls.tab_commits": "Commits",
"repo.pulls.tab_files": "Files Changed",
@@ -1791,6 +1806,7 @@
"repo.pulls.remove_prefix": "Remove %s prefix",
"repo.pulls.data_broken": "This pull request is broken due to missing fork information.",
"repo.pulls.files_conflicted": "This pull request has changes conflicting with the target branch.",
+ "repo.pulls.files_conflicted_no_listed_files": "(No conflicting files listed)",
"repo.pulls.is_checking": "Checking for merge conflicts…",
"repo.pulls.is_ancestor": "This branch is already included in the target branch. There is nothing to merge.",
"repo.pulls.is_empty": "The changes on this branch are already on the target branch. This will be an empty commit.",
@@ -1845,7 +1861,8 @@
"repo.pulls.status_checking": "Some checks are pending",
"repo.pulls.status_checks_success": "All checks were successful",
"repo.pulls.status_checks_warning": "Some checks reported warnings",
- "repo.pulls.status_checks_failure": "Some checks failed",
+ "repo.pulls.status_checks_failure_required": "Some required checks failed",
+ "repo.pulls.status_checks_failure_optional": "Some optional checks failed",
"repo.pulls.status_checks_error": "Some checks reported errors",
"repo.pulls.status_checks_requested": "Required",
"repo.pulls.status_checks_details": "Details",
@@ -2115,6 +2132,8 @@
"repo.settings.pulls.ignore_whitespace": "Ignore Whitespace for Conflicts",
"repo.settings.pulls.enable_autodetect_manual_merge": "Enable autodetect manual merge (Note: In some special cases, misjudgments can occur)",
"repo.settings.pulls.allow_rebase_update": "Enable updating pull request branch by rebase",
+ "repo.settings.pulls.default_target_branch": "Default target branch for new pull requests",
+ "repo.settings.pulls.default_target_branch_default": "Default branch (%s)",
"repo.settings.pulls.default_delete_branch_after_merge": "Delete pull request branch after merge by default",
"repo.settings.pulls.default_allow_edits_from_maintainers": "Allow edits from maintainers by default",
"repo.settings.releases_desc": "Enable Repository Releases",
@@ -2427,9 +2446,10 @@
"repo.settings.block_outdated_branch_desc": "Merging will not be possible when head branch is behind base branch.",
"repo.settings.block_admin_merge_override": "Administrators must follow branch protection rules",
"repo.settings.block_admin_merge_override_desc": "Administrators must follow branch protection rules and cannot circumvent it.",
- "repo.settings.default_branch_desc": "Select a default repository branch for pull requests and code commits:",
+ "repo.settings.default_branch_desc": "Select a default branch for code commits.",
+ "repo.settings.default_target_branch_desc": "Pull requests can use different default target branch if it is set in the Pull Requests section of Repository Advance Settings.",
"repo.settings.merge_style_desc": "Merge Styles",
- "repo.settings.default_merge_style_desc": "Default Merge Style",
+ "repo.settings.default_merge_style_desc": "Default merge style",
"repo.settings.choose_branch": "Choose a branch…",
"repo.settings.no_protected_branch": "There are no protected branches.",
"repo.settings.edit_protected_branch": "Edit",
@@ -2540,8 +2560,8 @@
"repo.diff.too_many_files": "Some files were not shown because too many files have changed in this diff",
"repo.diff.show_more": "Show More",
"repo.diff.load": "Load Diff",
- "repo.diff.generated": "generated",
- "repo.diff.vendored": "vendored",
+ "repo.diff.generated": "Generated",
+ "repo.diff.vendored": "Vendored",
"repo.diff.comment.add_line_comment": "Add line comment",
"repo.diff.comment.placeholder": "Leave a comment",
"repo.diff.comment.add_single_comment": "Add single comment",
@@ -2641,7 +2661,7 @@
"repo.branch.restore_success": "Branch \"%s\" has been restored.",
"repo.branch.restore_failed": "Failed to restore branch \"%s\".",
"repo.branch.protected_deletion_failed": "Branch \"%s\" is protected. It cannot be deleted.",
- "repo.branch.default_deletion_failed": "Branch \"%s\" is the default branch. It cannot be deleted.",
+ "repo.branch.default_deletion_failed": "Branch \"%s\" is the default or pull request target branch. It cannot be deleted.",
"repo.branch.default_branch_not_exist": "Default branch \"%s\" does not exist.",
"repo.branch.restore": "Restore Branch \"%s\"",
"repo.branch.download": "Download Branch \"%s\"",
@@ -2658,7 +2678,7 @@
"repo.branch.new_branch_from": "Create new branch from \"%s\"",
"repo.branch.renamed": "Branch %s was renamed to %s.",
"repo.branch.rename_default_or_protected_branch_error": "Only admins can rename default or protected branches.",
- "repo.branch.rename_protected_branch_failed": "This branch is protected by glob-based protection rules.",
+ "repo.branch.rename_protected_branch_failed": "Failed to rename branch due to branch protection rules.",
"repo.branch.commits_divergence_from": "Commit divergence: %[1]d behind and %[2]d ahead of %[3]s",
"repo.branch.commits_no_divergence": "The same as branch %[1]s",
"repo.tag.create_tag": "Create tag %s",
@@ -3067,6 +3087,7 @@
"admin.auths.attribute_mail": "Email Attribute",
"admin.auths.attribute_ssh_public_key": "Public SSH Key Attribute",
"admin.auths.attribute_avatar": "Avatar Attribute",
+ "admin.auths.ssh_keys_are_verified": "SSH keys in LDAP are considered as verified",
"admin.auths.attributes_in_bind": "Fetch Attributes in Bind DN Context",
"admin.auths.allow_deactivate_all": "Allow an empty search result to deactivate all users",
"admin.auths.use_paged_search": "Use Paged Search",
@@ -3278,8 +3299,6 @@
"admin.config.git_gc_args": "GC Arguments",
"admin.config.git_migrate_timeout": "Migration Timeout",
"admin.config.git_mirror_timeout": "Mirror Update Timeout",
- "admin.config.git_clone_timeout": "Clone Operation Timeout",
- "admin.config.git_pull_timeout": "Pull Operation Timeout",
"admin.config.git_gc_timeout": "GC Operation Timeout",
"admin.config.log_config": "Log Configuration",
"admin.config.logger_name_fmt": "Logger: %s",
@@ -3770,8 +3789,8 @@
"projects.workflows.error.at_least_one_action": "At least one action must be configured",
"git.filemode.changed_filemode": "%[1]s → %[2]s",
"git.filemode.directory": "Directory",
- "git.filemode.normal_file": "Normal file",
- "git.filemode.executable_file": "Executable file",
- "git.filemode.symbolic_link": "Symbolic link",
+ "git.filemode.normal_file": "Regular",
+ "git.filemode.executable_file": "Executable",
+ "git.filemode.symbolic_link": "Symlink",
"git.filemode.submodule": "Submodule"
}
diff --git a/options/locale/locale_fr-FR.json b/options/locale/locale_fr-FR.json
index 10424db497..22be98d2ff 100644
--- a/options/locale/locale_fr-FR.json
+++ b/options/locale/locale_fr-FR.json
@@ -3,6 +3,7 @@
"dashboard": "Tableau de bord",
"explore_title": "Explorateur",
"help": "Aide",
+ "logo": "Logo",
"sign_in": "Connexion",
"sign_in_with_provider": "Se connecter avec %s",
"sign_in_or": "ou",
@@ -10,9 +11,12 @@
"sign_up": "S'inscrire",
"link_account": "Lier un Compte",
"register": "S'inscrire",
+ "version": "Version",
"powered_by": "Propulsé par %s",
+ "page": "Page",
"template": "Modèle",
"language": "Langue",
+ "notifications": "Notifications",
"active_stopwatch": "Suivi du temps actif",
"tracked_time_summary": "Résumé du pointage d’après les filtres de la liste des tickets",
"create_new": "Créer…",
@@ -28,6 +32,7 @@
"password": "Mot de passe",
"access_token": "Jeton d’accès",
"re_type": "Confirmez le mot de passe",
+ "captcha": "CAPTCHA",
"twofa": "Authentification à deux facteurs",
"twofa_scratch": "Code de secours pour l'authentification à deux facteurs",
"passcode": "Code d'accès",
@@ -63,6 +68,7 @@
"your_starred": "Favoris",
"your_settings": "Configuration",
"all": "Tous",
+ "sources": "Sources",
"mirrors": "Miroirs",
"collaborative": "Collaboratif",
"forks": "Bifurcations",
@@ -70,6 +76,7 @@
"pull_requests": "Demandes d'ajout",
"issues": "Tickets",
"milestones": "Jalons",
+ "ok": "Ok",
"cancel": "Annuler",
"retry": "Réessayez",
"rerun": "Relancer",
@@ -82,6 +89,7 @@
"remove_label_str": "Supprimer l’élément « %s »",
"edit": "Éditer",
"view": "Voir",
+ "test": "Test",
"enabled": "Activé",
"disabled": "Désactivé",
"locked": "Verrouillée",
@@ -114,6 +122,7 @@
"expired": "Expiré",
"confirm_delete_artifact": "Êtes-vous sûr de vouloir supprimer l’artefact « %s » ?",
"archived": "Archivé",
+ "concept_system_global": "Global",
"concept_user_individual": "Individuel",
"concept_code_repository": "Dépôt",
"concept_user_organization": "Organisation",
@@ -135,6 +144,7 @@
"filter.not_mirror": "Non miroité",
"filter.is_template": "Modèle",
"filter.not_template": "Pas un modèle",
+ "filter.public": "Publique",
"filter.private": "Privé",
"no_results_found": "Aucun résultat trouvé.",
"internal_error_skipped": "Une erreur interne est survenue, mais ignorée : %s",
@@ -144,7 +154,9 @@
"search.fuzzy_tooltip": "Inclure également les résultats proches de la recherche",
"search.words": "Mots",
"search.words_tooltip": "Inclure uniquement les résultats qui correspondent exactement aux mots recherchés",
+ "search.regexp": "Regexp",
"search.regexp_tooltip": "Inclure uniquement les résultats qui correspondent à l’expression régulière recherchée",
+ "search.exact": "Exact",
"search.exact_tooltip": "Inclure uniquement les résultats qui correspondent exactement au terme de recherche",
"search.repo_kind": "Chercher des dépôts…",
"search.user_kind": "Chercher des utilisateurs…",
@@ -191,6 +203,8 @@
"editor.buttons.switch_to_legacy.tooltip": "Utiliser l’ancien éditeur à la place",
"editor.buttons.enable_monospace_font": "Activer la police à chasse fixe",
"editor.buttons.disable_monospace_font": "Désactiver la police à chasse fixe",
+ "filter.string.asc": "A–Z",
+ "filter.string.desc": "Z–A",
"error.occurred": "Une erreur s’est produite",
"error.report_message": "Si vous pensez qu’il s’agit d’un bug Gitea, veuillez consulter notre board GitHub ou ouvrir un nouveau ticket si nécessaire.",
"error.not_found": "La cible n'a pu être trouvée.",
@@ -202,7 +216,9 @@
"startpage.platform_desc": "Gitea tourne partout où Go peut être compilé : Windows, macOS, Linux, ARM, etc. Choisissez votre préféré !",
"startpage.lightweight": "Léger",
"startpage.lightweight_desc": "Gitea utilise peu de ressources. Il peut même tourner sur un Raspberry Pi très bon marché. Économisez l'énergie de vos serveurs !",
+ "startpage.license": "Open Source",
"startpage.license_desc": "Venez récupérer %[2]s ! Rejoignez-nous en contribuant à rendre ce projet encore meilleur !",
+ "install.install": "Installation",
"install.installing_desc": "Installation en cours, veuillez patienter…",
"install.title": "Configuration initiale",
"install.docker_helper": "Si vous exécutez Gitea dans Docker, veuillez lire la documentation avant de modifier les paramètres.",
@@ -215,6 +231,7 @@
"install.db_name": "Nom de base de données",
"install.db_schema": "Schéma",
"install.db_schema_helper": "Laisser vide pour la base de données par défaut (\"public\").",
+ "install.ssl_mode": "SSL",
"install.path": "Emplacement",
"install.sqlite_helper": "Chemin d'accès pour la base de données SQLite3.
Entrer un chemin absolu si vous exécutez Gitea en tant que service.",
"install.reinstall_error": "Vous essayez d'installer dans une base de données Gitea existante",
@@ -344,6 +361,7 @@
"explore.users": "Utilisateurs",
"explore.organizations": "Organisations",
"explore.go_to": "Atteindre",
+ "explore.code": "Code",
"explore.code_last_indexed_at": "Dernière indexation %s",
"explore.relevant_repositories_tooltip": "Les dépôts qui sont des forks ou qui n'ont aucun sujet, aucune icône et aucune description sont cachés.",
"explore.relevant_repositories": "Seuls les dépôts pertinents sont affichés, afficher les résultats non filtrés.",
@@ -391,6 +409,7 @@
"auth.twofa_scratch_token_incorrect": "Votre code de secours est incorrect.",
"auth.twofa_required": "Vous devez configurer l’authentification à deux facteurs pour avoir accès aux dépôts, ou essayer de vous reconnecter.",
"auth.login_userpass": "Connexion",
+ "auth.login_openid": "OpenID",
"auth.oauth_signup_tab": "Créer un compte",
"auth.oauth_signup_title": "Compléter le nouveau compte",
"auth.oauth_signup_submit": "Finaliser la création du compte",
@@ -586,6 +605,7 @@
"user.show_more": "Voir plus",
"user.starred": "Dépôts favoris",
"user.watched": "Dépôts surveillés",
+ "user.code": "Code",
"user.projects": "Projets",
"user.overview": "Vue d'ensemble",
"user.following": "Abonnements",
@@ -617,6 +637,7 @@
"user.block.info_6": "ouvrir ou commenter vos tickets et demandes d’ajouts",
"user.block.info_7": "réagir à vos commentaires dans les tickets ou les demandes d’ajout",
"user.block.user_to_block": "Utilisateur à bloquer",
+ "user.block.note": "Note",
"user.block.note.title": "Note facultative :",
"user.block.note.info": "La note n’est pas visible par l’utilisateur bloqué.",
"user.block.note.edit": "Modifier la note",
@@ -627,14 +648,17 @@
"settings.appearance": "Apparence",
"settings.password": "Mot de passe",
"settings.security": "Sécurité",
+ "settings.avatar": "Avatar",
"settings.ssh_gpg_keys": "Clés SSH / GPG",
"settings.social": "Réseaux Sociaux",
+ "settings.applications": "Applications",
"settings.orgs": "Gérer les organisations",
"settings.repos": "Dépôts",
"settings.delete": "Supprimer le compte",
"settings.twofa": "Authentification à deux facteurs (TOTP)",
"settings.account_link": "Comptes liés",
"settings.organization": "Organisations",
+ "settings.uid": "UID",
"settings.webauthn": "Authentification à deux facteurs (Clés de sécurité)",
"settings.public_profile": "Profil public",
"settings.biography_placeholder": "Parlez-nous un peu de vous ! (Vous pouvez utiliser Markdown)",
@@ -663,6 +687,7 @@
"settings.hidden_comment_types.ref_tooltip": "Commentaires où ce ticket a été référencé sur un autre ticket, révision, etc.",
"settings.hidden_comment_types.issue_ref_tooltip": "Commentaires où l’utilisateur change la branche/étiquette associée au ticket",
"settings.comment_type_group_reference": "Référence",
+ "settings.comment_type_group_label": "Label",
"settings.comment_type_group_milestone": "Jalon",
"settings.comment_type_group_assignee": "Assigné à",
"settings.comment_type_group_title": "Titre",
@@ -915,6 +940,7 @@
"settings.email_notifications.actions.desc": "Notification pour les executions de workflows sur les dépôts configurés avec les Actions Gitea.",
"settings.email_notifications.actions.failure_only": "Ne notifier que pour les exécutions échouées",
"settings.visibility": "Visibilité de l'utilisateur",
+ "settings.visibility.public": "Publique",
"settings.visibility.public_tooltip": "Visible par tout le monde",
"settings.visibility.limited": "Limité",
"settings.visibility.limited_tooltip": "Visible uniquement pour les utilisateurs authentifiés",
@@ -951,11 +977,13 @@
"repo.fork.blocked_user": "Impossible de bifurquer le dépôt car vous êtes bloqué par son propriétaire.",
"repo.use_template": "Utiliser ce modèle",
"repo.open_with_editor": "Ouvrir avec %s",
+ "repo.download_directory_as": "Télécharger le répertoire en tant que %s",
"repo.download_zip": "Télécharger le ZIP",
"repo.download_tar": "Télécharger le TAR.GZ",
"repo.download_bundle": "Télécharger le BUNDLE",
"repo.generate_repo": "Générer un dépôt",
"repo.generate_from": "Générer depuis",
+ "repo.repo_desc": "Description",
"repo.repo_desc_helper": "Décrire brièvement votre dépôt",
"repo.repo_no_desc": "Aucune description fournie",
"repo.repo_lang": "Langue",
@@ -1034,11 +1062,14 @@
"repo.desc.template": "Modèle",
"repo.desc.internal": "Interne",
"repo.desc.archived": "Archivé",
+ "repo.desc.sha256": "SHA256",
"repo.template.items": "Élément du modèle",
"repo.template.git_content": "Contenu Git (branche par défaut)",
"repo.template.git_hooks": "Déclencheurs Git",
"repo.template.git_hooks_tooltip": "Vous ne pouvez actuellement pas modifier ou supprimer les déclencheurs Git ajoutés. Sélectionnez cette option uniquement si vous faites confiance au modèle de dépôt.",
+ "repo.template.webhooks": "Déclencheurs web",
"repo.template.topics": "Sujets",
+ "repo.template.avatar": "Avatar",
"repo.template.issue_labels": "Labels de ticket",
"repo.template.one_item": "Vous devez sélectionner au moins un élément du modèle",
"repo.template.invalid": "Vous devez sélectionner un modèle de dépôt",
@@ -1060,7 +1091,9 @@
"repo.migrate_options_lfs_endpoint.description.local": "Un chemin de serveur local est également pris en charge.",
"repo.migrate_options_lfs_endpoint.placeholder": "Si laissé vide, le point d’accès sera déterminé à partir de l’URL de clonage.",
"repo.migrate_items": "Éléments à migrer",
+ "repo.migrate_items_wiki": "Wiki",
"repo.migrate_items_milestones": "Jalons",
+ "repo.migrate_items_labels": "Labels",
"repo.migrate_items_issues": "Tickets",
"repo.migrate_items_pullrequests": "Demandes d'ajout",
"repo.migrate_items_merge_requests": "Demandes de fusion",
@@ -1129,17 +1162,21 @@
"repo.empty_message": "Ce dépôt n’a pas de contenu.",
"repo.broken_message": "Les données git de ce dépôt ne peuvent pas être lues. Contactez l'administrateur de cette instance ou supprimez ce dépôt.",
"repo.no_branch": "Ce dépôt n’a aucune branche.",
+ "repo.code": "Code",
"repo.code.desc": "Accéder au code source, fichiers, révisions et branches.",
"repo.branch": "Branche",
"repo.tree": "Aborescence",
"repo.clear_ref": "Effacer la référence actuelle",
"repo.filter_branch_and_tag": "Filtrer une branche ou une étiquette",
"repo.find_tag": "Rechercher une étiquette",
+ "repo.branches": "Branches",
"repo.tags": "Étiquettes",
"repo.issues": "Tickets",
"repo.pulls": "Demandes d'ajout",
"repo.projects": "Projets",
"repo.packages": "Paquets",
+ "repo.actions": "Actions",
+ "repo.labels": "Labels",
"repo.org_labels_desc": "Les labels d'une organisation peuvent être utilisés avec tous les dépôts de cette organisation.",
"repo.org_labels_desc_manage": "gérer",
"repo.milestone": "Jalon",
@@ -1149,6 +1186,7 @@
"repo.release": "Publications",
"repo.releases": "Publications",
"repo.tag": "Étiquette",
+ "repo.git_tag": "Étiquette git",
"repo.released_this": "a publié ceci",
"repo.tagged_this": "a étiqueté",
"repo.file.title": "%s sur %s",
@@ -1282,6 +1320,8 @@
"repo.commits.search_branch": "Cette branche",
"repo.commits.search_all": "Toutes les branches",
"repo.commits.author": "Auteur",
+ "repo.commits.message": "Message",
+ "repo.commits.date": "Date",
"repo.commits.older": "Précédemment",
"repo.commits.newer": "Récemment",
"repo.commits.signed_by": "Signé par",
@@ -1306,6 +1346,7 @@
"repo.ext_issues.desc": "Lien vers un gestionnaire de tickets externe.",
"repo.projects.desc": "Gérer les tickets et les demandes d’ajouts dans les projets.",
"repo.projects.description": "Description (facultative)",
+ "repo.projects.description_placeholder": "Description",
"repo.projects.create": "Créer un projet",
"repo.projects.title": "Titre",
"repo.projects.new": "Nouveau projet",
@@ -1350,6 +1391,7 @@
"repo.issues.filter_no_results_placeholder": "Essayez d’ajuster vos filtres de recherche.",
"repo.issues.new": "Nouveau ticket",
"repo.issues.new.title_empty": "Le titre ne peut pas être vide",
+ "repo.issues.new.labels": "Labels",
"repo.issues.new.no_label": "Sans labels",
"repo.issues.new.clear_labels": "Effacer les labels",
"repo.issues.new.projects": "Projets",
@@ -1379,6 +1421,7 @@
"repo.issues.create": "Créer un ticket",
"repo.issues.new_label": "Nouveau label",
"repo.issues.new_label_placeholder": "Nom du label",
+ "repo.issues.new_label_desc_placeholder": "Description",
"repo.issues.create_label": "Créer un label",
"repo.issues.label_templates.title": "Charger un ensemble prédéfini de label",
"repo.issues.label_templates.info": "Il n'existe pas encore de label. Créez-en un avec « Nouveau label » ou utilisez un jeu de label prédéfini :",
@@ -1408,6 +1451,7 @@
"repo.issues.remove_ref_at": "a supprimé la référence %s %s.",
"repo.issues.add_ref_at": "a ajouté la référence %s %s.",
"repo.issues.delete_branch_at": "a supprimé la branche %s %s.",
+ "repo.issues.filter_label": "Label",
"repo.issues.filter_label_exclude": "Utilisez Alt + Clic/entrée pour exclure les labels.",
"repo.issues.filter_label_no_select": "Toutes les labels",
"repo.issues.filter_label_select_no_label": "Aucun label",
@@ -1425,6 +1469,7 @@
"repo.issues.filter_poster": "Auteur",
"repo.issues.filter_user_placeholder": "Rechercher des utilisateurs",
"repo.issues.filter_user_no_select": "Tous les utilisateurs",
+ "repo.issues.filter_type": "Type",
"repo.issues.filter_type.all_issues": "Tous les tickets",
"repo.issues.filter_type.all_pull_requests": "Toutes les demandes d’ajout",
"repo.issues.filter_type.assigned_to_you": "Qui vous sont assignés",
@@ -1447,6 +1492,7 @@
"repo.issues.filter_sort.fewestforks": "Bifurcations (croissant)",
"repo.issues.action_open": "Ouvrir",
"repo.issues.action_close": "Fermer",
+ "repo.issues.action_label": "Label",
"repo.issues.action_milestone": "Jalon",
"repo.issues.action_milestone_no_select": "Aucun jalon",
"repo.issues.action_assignee": "Assigné à",
@@ -1526,6 +1572,7 @@
"repo.issues.label_exclusive_warning": "Tout label d'une portée en conflit sera retiré lors de la modification des labels d’un ticket ou d’une demande d’ajout.",
"repo.issues.label_exclusive_order": "Ordre de tri",
"repo.issues.label_exclusive_order_tooltip": "Les labels exclusifs partageant la même portée seront triées selon cet ordre numérique.",
+ "repo.issues.label_count": "%d label(s)",
"repo.issues.label_open_issues": "%d tickets ouverts",
"repo.issues.label_edit": "Éditer",
"repo.issues.label_delete": "Supprimer",
@@ -1591,6 +1638,7 @@
"repo.issues.del_time_history": "a supprimé son temps de travail %s.",
"repo.issues.add_time_manually": "Temps pointé manuellement",
"repo.issues.add_time_hours": "Heures",
+ "repo.issues.add_time_minutes": "Minutes",
"repo.issues.add_time_sum_to_small": "Aucun minuteur n'a été saisi.",
"repo.issues.time_spent_total": "Temps passé total",
"repo.issues.time_spent_from_all_authors": "Temps passé total : %s",
@@ -1654,6 +1702,7 @@
"repo.issues.review.content.empty": "Vous devez laisser un commentaire indiquant le(s) changement(s) demandé(s).",
"repo.issues.review.reject": "a requis les changements %s",
"repo.issues.review.wait": "a été sollicité pour évaluer cette demande d’ajout %s.",
+ "repo.issues.review.codeowners_rules": "Règles des CODEOWNERS",
"repo.issues.review.add_review_request": "a demandé à %s une évaluation %s.",
"repo.issues.review.remove_review_request": "a retiré la demande d’évaluation pour %s %s.",
"repo.issues.review.remove_review_request_self": "a décliné son invitation à évaluer %s.",
@@ -1685,19 +1734,24 @@
"repo.issues.content_history.created": "a créé",
"repo.issues.content_history.delete_from_history": "Supprimer de l’historique",
"repo.issues.content_history.delete_from_history_confirm": "Supprimer de l’historique ?",
+ "repo.issues.content_history.options": "Options",
"repo.issues.reference_link": "Référence : %s",
+ "repo.compare.compare_base": "base",
"repo.compare.compare_head": "comparer",
+ "repo.compare.title": "Comparer les changements",
+ "repo.compare.description": "Choisissez deux branches ou étiquettes git pour voir les différences ou faire une demande d’ajout.",
"repo.pulls.desc": "Active les demandes d’ajouts et l’évaluation du code.",
"repo.pulls.new": "Nouvelle demande d'ajout",
+ "repo.pulls.new.description": "Discutez et examinez les changements dans cette comparaison.",
"repo.pulls.new.blocked_user": "Impossible de créer une demande d’ajout car vous êtes bloqué par le propriétaire du dépôt.",
"repo.pulls.new.must_collaborator": "Vous devez être un collaborateur pour créer une demande d’ajout.",
+ "repo.pulls.new.already_existed": "Une demande d’ajout entre ces branches existe déjà",
"repo.pulls.edit.already_changed": "Impossible d’enregistrer la demande d’ajout. Il semble que le contenu ait été modifié par un autre utilisateur. Veuillez rafraîchir la page et réessayer afin d’éviter d’écraser leurs modifications.",
"repo.pulls.view": "Voir la demande d'ajout",
"repo.pulls.compare_changes": "Nouvelle demande d’ajout",
"repo.pulls.allow_edits_from_maintainers": "Autoriser les modifications des mainteneurs",
"repo.pulls.allow_edits_from_maintainers_desc": "Les utilisateurs ayant un accès en écriture à la branche de base peuvent également soumettre sur cette branche",
"repo.pulls.allow_edits_from_maintainers_err": "La mise à jour à échoué",
- "repo.pulls.compare_changes_desc": "Sélectionnez la branche dans laquelle fusionner et la branche depuis laquelle tirer les modifications.",
"repo.pulls.has_viewed_file": "Consulté",
"repo.pulls.has_changed_since_last_review": "Modifié depuis votre dernier passage",
"repo.pulls.viewed_files_label": "%[1]d / %[2]d fichiers vus",
@@ -1795,7 +1849,8 @@
"repo.pulls.status_checking": "Certains contrôles sont en attente",
"repo.pulls.status_checks_success": "Tous les contrôles ont réussi",
"repo.pulls.status_checks_warning": "Quelques vérifications ont signalé des avertissements",
- "repo.pulls.status_checks_failure": "Certaines vérifications ont échoué",
+ "repo.pulls.status_checks_failure_required": "Des vérifications obligatoires ont échoué",
+ "repo.pulls.status_checks_failure_optional": "Des vérifications optionnelles ont échoué",
"repo.pulls.status_checks_error": "Quelques vérifications ont signalé des erreurs",
"repo.pulls.status_checks_requested": "Requis",
"repo.pulls.status_checks_details": "Détails",
@@ -1850,6 +1905,7 @@
"repo.milestones.completeness": "%d%% complété",
"repo.milestones.create": "Créer un Jalon",
"repo.milestones.title": "Titre",
+ "repo.milestones.desc": "Description",
"repo.milestones.due_date": "Date d'échéance (facultatif)",
"repo.milestones.clear": "Effacer",
"repo.milestones.invalid_due_date_format": "Le format de la date d'échéance est invalide, il doit être comme suit 'aaaa-mm-jj'.",
@@ -1884,11 +1940,14 @@
"repo.signing.wont_sign.not_signed_in": "Vous n'êtes pas connecté.",
"repo.ext_wiki": "Accès au wiki externe",
"repo.ext_wiki.desc": "Lier un wiki externe.",
+ "repo.wiki": "Wiki",
"repo.wiki.welcome": "Bienvenue sur le Wiki.",
"repo.wiki.welcome_desc": "Le wiki vous permet d'écrire ou de partager de la documentation avec vos collaborateurs.",
"repo.wiki.desc": "Écrire et partager de la documentation avec vos collaborateurs.",
"repo.wiki.create_first_page": "Créer la première page",
+ "repo.wiki.page": "Page",
"repo.wiki.filter_page": "Filtrer la page",
+ "repo.wiki.new_page": "Page",
"repo.wiki.page_title": "Titre de la page",
"repo.wiki.page_content": "Contenu de la page",
"repo.wiki.default_commit_message": "Écrire une note concernant cette mise à jour (optionnel).",
@@ -1903,6 +1962,7 @@
"repo.wiki.delete_page_notice_1": "Supprimer la page de wiki \"%s\" est irréversible. Continuer ?",
"repo.wiki.page_already_exists": "Une page de wiki avec le même nom existe déjà.",
"repo.wiki.reserved_page": "Le nom de page de wiki \"%s\" est réservé.",
+ "repo.wiki.pages": "Pages",
"repo.wiki.last_updated": "Dernière mise à jour: %s",
"repo.wiki.page_name_desc": "Entrez un nom pour cette page Wiki. Certains noms spéciaux sont « Home », « _Sidebar » et « _Footer ».",
"repo.wiki.original_git_entry_tooltip": "Voir le fichier Git original au lieu d'utiliser un lien convivial.",
@@ -1995,6 +2055,7 @@
"repo.settings.collaboration.owner": "Propriétaire",
"repo.settings.collaboration.undefined": "Indéfini",
"repo.settings.collaboration.per_unit": "Permissions de ressource",
+ "repo.settings.hooks": "Déclencheurs web",
"repo.settings.githooks": "Déclencheurs Git",
"repo.settings.basic_settings": "Paramètres de base",
"repo.settings.mirror_settings": "Réglages Miroir",
@@ -2012,6 +2073,7 @@
"repo.settings.mirror_settings.docs.pulling_remote_title": "Tirer depuis un dépôt distant",
"repo.settings.mirror_settings.mirrored_repository": "Dépôt en miroir",
"repo.settings.mirror_settings.pushed_repository": "Dépôt sortant",
+ "repo.settings.mirror_settings.direction": "Sens",
"repo.settings.mirror_settings.direction.pull": "Tirer",
"repo.settings.mirror_settings.direction.push": "Soumission",
"repo.settings.mirror_settings.last_update": "Dernière mise à jour",
@@ -2180,6 +2242,7 @@
"repo.settings.payload_url": "URL cible",
"repo.settings.http_method": "Méthode HTTP",
"repo.settings.content_type": "Type de contenu POST",
+ "repo.settings.secret": "Secret",
"repo.settings.webhook_secret_desc": "Si le serveur webhook supporte l’usage de secrets, vous pouvez indiquer un secret ici en vous basant sur leur documentation.",
"repo.settings.slack_username": "Nom d'utilisateur",
"repo.settings.slack_icon_url": "URL de l'icône",
@@ -2197,6 +2260,7 @@
"repo.settings.event_delete_desc": "Branche ou étiquette supprimée.",
"repo.settings.event_fork": "Bifurcation",
"repo.settings.event_fork_desc": "Dépôt bifurqué.",
+ "repo.settings.event_wiki": "Wiki",
"repo.settings.event_wiki_desc": "Page wiki créée, renommée, modifiée ou supprimée.",
"repo.settings.event_statuses": "Statuts",
"repo.settings.event_statuses_desc": "Statut de validation mis à jour depuis l’API.",
@@ -2262,6 +2326,19 @@
"repo.settings.slack_domain": "Domaine",
"repo.settings.slack_channel": "Canal",
"repo.settings.add_web_hook_desc": "Intégrez %s dans votre dépôt.",
+ "repo.settings.web_hook_name_gitea": "Gitea",
+ "repo.settings.web_hook_name_gogs": "Gogs",
+ "repo.settings.web_hook_name_slack": "Slack",
+ "repo.settings.web_hook_name_discord": "Discord",
+ "repo.settings.web_hook_name_dingtalk": "DingTalk",
+ "repo.settings.web_hook_name_telegram": "Telegram",
+ "repo.settings.web_hook_name_matrix": "Matrix",
+ "repo.settings.web_hook_name_msteams": "Microsoft Teams",
+ "repo.settings.web_hook_name_feishu_or_larksuite": "Suite Feishu / Lark",
+ "repo.settings.web_hook_name_feishu": "Feishu",
+ "repo.settings.web_hook_name_larksuite": "Suite Lark",
+ "repo.settings.web_hook_name_wechatwork": "WeCom (Wechat Work)",
+ "repo.settings.web_hook_name_packagist": "Packagist",
"repo.settings.packagist_username": "Nom d'utilisateur Packagist",
"repo.settings.packagist_api_token": "Jeton API",
"repo.settings.packagist_package_url": "URL du paquet Packagist",
@@ -2279,6 +2356,7 @@
"repo.settings.deploy_key_deletion": "Supprimer une clé de déploiement",
"repo.settings.deploy_key_deletion_desc": "La suppression d’une clé de déploiement révoque son accès à ce dépôt. Continuer ?",
"repo.settings.deploy_key_deletion_success": "La clé de déploiement a été supprimée.",
+ "repo.settings.branches": "Branches",
"repo.settings.protected_branch": "Protection de branche",
"repo.settings.protected_branch.save_rule": "Enregistrer la règle",
"repo.settings.protected_branch.delete_rule": "Supprimer la règle",
@@ -2406,6 +2484,7 @@
"repo.settings.unarchive.success": "Le dépôt a bien été réhabilité.",
"repo.settings.unarchive.error": "Une erreur est survenue en essayant deréhabiliter ce dépôt. Voir le journal pour plus de détails.",
"repo.settings.update_avatar_success": "L'avatar du dépôt a été mis à jour.",
+ "repo.settings.lfs": "LFS",
"repo.settings.lfs_filelist": "Fichiers LFS stockés dans ce dépôt",
"repo.settings.lfs_no_lfs_files": "Aucun fichier LFS stocké dans ce dépôt",
"repo.settings.lfs_findcommits": "Trouver des révisions",
@@ -2425,6 +2504,7 @@
"repo.settings.lfs_force_unlock": "Forcer le déverrouillage",
"repo.settings.lfs_pointers.found": "%d pointeur(s) trouvés : %d associés, %d non associés (%d manquant dans le magasin)",
"repo.settings.lfs_pointers.sha": "SHA du Blob",
+ "repo.settings.lfs_pointers.oid": "OID",
"repo.settings.lfs_pointers.inRepo": "Dans le dépôt",
"repo.settings.lfs_pointers.exists": "Existe en magasin",
"repo.settings.lfs_pointers.accessible": "Accessible à l'utilisateur",
@@ -2438,6 +2518,7 @@
"repo.diff.browse_source": "Parcourir la source",
"repo.diff.parent": "Parent",
"repo.diff.commit": "révision",
+ "repo.diff.git-notes": "Notes",
"repo.diff.data_not_available": "Contenu de la comparaison indisponible",
"repo.diff.options_button": "Option de Diff",
"repo.diff.download_patch": "Télécharger le Fichier Patch",
@@ -2451,6 +2532,7 @@
"repo.diff.whitespace_ignore_at_eol": "Ignorer les blancs en fin de ligne",
"repo.diff.stats_desc": " %d fichiers modifiés avec %d ajouts et %d suppressions",
"repo.diff.stats_desc_file": "%d modifications: %d ajouts et %d suppressions",
+ "repo.diff.bin": "BIN",
"repo.diff.bin_not_shown": "Fichier binaire non affiché.",
"repo.diff.view_file": "Voir le fichier",
"repo.diff.file_before": "Avant",
@@ -2463,8 +2545,8 @@
"repo.diff.too_many_files": "Certains fichiers ne sont pas affichés car ce diff contient trop de modifications",
"repo.diff.show_more": "Voir plus",
"repo.diff.load": "Voir la Diff",
- "repo.diff.generated": "générée",
- "repo.diff.vendored": "externe",
+ "repo.diff.generated": "Générée",
+ "repo.diff.vendored": "Externe",
"repo.diff.comment.add_line_comment": "Commenter cette ligne",
"repo.diff.comment.placeholder": "Laisser un commentaire",
"repo.diff.comment.add_single_comment": "Commenter (simple)",
@@ -2497,6 +2579,7 @@
"repo.release.new_release": "Nouvelle publication",
"repo.release.draft": "Brouillon",
"repo.release.prerelease": "Pré-publication",
+ "repo.release.stable": "Stable",
"repo.release.latest": "Dernière",
"repo.release.compare": "Comparer",
"repo.release.edit": "Éditer",
@@ -2537,6 +2620,13 @@
"repo.release.add_tag": "Créer uniquement l'étiquette",
"repo.release.releases_for": "Publications pour %s",
"repo.release.tags_for": "Étiquettes pour %s",
+ "repo.release.notes": "Notes de publication",
+ "repo.release.generate_notes": "Générer des notes de publication",
+ "repo.release.generate_notes_desc": "Ajoute automatiquement les demandes d’ajouts fusionnées, et ajoute un lien vers la liste des changements pour cette publication.",
+ "repo.release.previous_tag": "Étiquette précédente",
+ "repo.release.generate_notes_tag_not_found": "L’étiquette « %s » n’existe pas dans ce dépôt.",
+ "repo.release.generate_notes_target_not_found": "La cible de la publication « %s » est introuvable.",
+ "repo.release.generate_notes_missing_tag": "Entrez un nom d’étiquette pour générer les notes de publication.",
"repo.branch.name": "Nom de la branche",
"repo.branch.already_exists": "Une branche nommée \"%s\" existe déjà.",
"repo.branch.delete_head": "Supprimer",
@@ -2597,6 +2687,7 @@
"graphs.component_loading_info": "Ça prend son temps…",
"graphs.component_failed_to_load": "Une erreur inattendue s’est produite.",
"graphs.code_frequency.what": "fréquence du code",
+ "graphs.contributors.what": "contributions",
"graphs.recent_commits.what": "révisions récentes",
"org.org_name_holder": "Nom de l'organisation",
"org.org_full_name_holder": "Nom complet de l'organisation",
@@ -2605,11 +2696,14 @@
"org.repo_updated": "Actualisé",
"org.members": "Membres",
"org.teams": "Équipes",
+ "org.code": "Code",
"org.lower_members": "Membres",
"org.lower_repositories": "dépôts",
"org.create_new_team": "Nouvelle équipe",
"org.create_team": "Créer une équipe",
+ "org.org_desc": "Description",
"org.team_name": "Nom de l'équipe",
+ "org.team_desc": "Description",
"org.team_name_helper": "Le nom d'équipe doit être court et mémorable.",
"org.team_desc_helper": "Décrire le but ou le rôle de l’équipe.",
"org.team_access_desc": "Accès au dépôt",
@@ -2634,6 +2728,7 @@
"org.settings.change_visibility_notices_2": "Les non-membres ne pourront plus accéder aux dépôts de l’organisation si la visibilité devient privée.",
"org.settings.change_visibility_success": "La visibilité de l’organisation %s a été modifiée.",
"org.settings.visibility_desc": "Changez qui peut voir l’organisation et ses dépôts.",
+ "org.settings.visibility.public": "Publique",
"org.settings.visibility.limited": "Limité (Visible uniquement aux utilisateurs authentifiés)",
"org.settings.visibility.limited_shortname": "Limité",
"org.settings.visibility.private": "Privé (Visible uniquement aux membres de l’organisation)",
@@ -2738,6 +2833,7 @@
"org.worktime.by_repositories": "Par dépôts",
"org.worktime.by_milestones": "Par jalons",
"org.worktime.by_members": "Par membres",
+ "admin.maintenance": "Maintenance",
"admin.dashboard": "Tableau de bord",
"admin.self_check": "Autodiagnostique",
"admin.identity_access": "Identité et accès",
@@ -2749,6 +2845,7 @@
"admin.integrations": "Intégrations",
"admin.authentication": "Sources d'authentification",
"admin.emails": "Courriels de l’utilisateur",
+ "admin.config": "Configuration",
"admin.config_summary": "Résumé",
"admin.config_settings": "Paramètres",
"admin.notices": "Informations",
@@ -2795,6 +2892,7 @@
"admin.dashboard.git_gc_repos": "Exécuter le ramasse-miette des dépôts",
"admin.dashboard.resync_all_sshkeys": "Mettre à jour le fichier « ssh/authorized_keys » avec les clés SSH Gitea.",
"admin.dashboard.resync_all_sshprincipals": "Mettre à jour le fichier « .ssh/authorized_principals » avec les principaux de Gitea SSH.",
+ "admin.dashboard.resync_all_hooks": "Resynchroniser les déclencheurs git de tous les dépôts (pre-receive, update, post-receive, proc-receive …)",
"admin.dashboard.reinit_missing_repos": "Réinitialiser tous les dépôts Git manquants pour lesquels un enregistrement existe",
"admin.dashboard.sync_external_users": "Synchroniser les données de l’utilisateur externe",
"admin.dashboard.cleanup_hook_task_table": "Nettoyer la table hook_task",
@@ -2852,6 +2950,7 @@
"admin.users.reserved": "Réservé",
"admin.users.bot": "Robot",
"admin.users.remote": "Distant",
+ "admin.users.2fa": "2FA",
"admin.users.repos": "Dépôts",
"admin.users.created": "Créés",
"admin.users.last_login": "Dernière connexion",
@@ -2936,6 +3035,8 @@
"admin.packages.owner": "Propriétaire",
"admin.packages.creator": "Créateur",
"admin.packages.name": "Nom",
+ "admin.packages.version": "Version",
+ "admin.packages.type": "Type",
"admin.packages.repository": "Dépôt",
"admin.packages.size": "Taille",
"admin.packages.published": "Publiés",
@@ -2950,6 +3051,7 @@
"admin.auths.auth_manage_panel": "Gestion des sources d'authentification",
"admin.auths.new": "Ajouter une source d'authentification",
"admin.auths.name": "Nom",
+ "admin.auths.type": "Type",
"admin.auths.enabled": "Activé",
"admin.auths.syncenabled": "Activer la synchronisation des utilisateurs",
"admin.auths.updated": "Mis à jour",
@@ -2958,6 +3060,8 @@
"admin.auths.security_protocol": "Protocole de sécurité",
"admin.auths.domain": "Domaine",
"admin.auths.host": "Hôte",
+ "admin.auths.port": "Port",
+ "admin.auths.bind_dn": "Lien DN",
"admin.auths.bind_password": "Bind mot de passe",
"admin.auths.user_base": "Utilisateur Search Base",
"admin.auths.user_dn": "Utilisateur DN",
@@ -2968,6 +3072,7 @@
"admin.auths.attribute_mail": "Attribut courriel",
"admin.auths.attribute_ssh_public_key": "Attribut clé SSH publique",
"admin.auths.attribute_avatar": "Attribut de l'avatar",
+ "admin.auths.ssh_keys_are_verified": "Les clés SSH du LDAP sont considérées comme vérifiées",
"admin.auths.attributes_in_bind": "Aller chercher les attributs dans le contexte de liaison DN",
"admin.auths.allow_deactivate_all": "Permettre à un résultat de recherche vide de désactiver tous les utilisateurs",
"admin.auths.use_paged_search": "Utiliser la recherche paginée",
@@ -3086,6 +3191,7 @@
"admin.config.ssh_enabled": "Activé",
"admin.config.ssh_start_builtin_server": "Utiliser le serveur incorporé",
"admin.config.ssh_domain": "Domaine du serveur SSH",
+ "admin.config.ssh_port": "Port",
"admin.config.ssh_listen_port": "Port d'écoute",
"admin.config.ssh_root_path": "Emplacement racine",
"admin.config.ssh_minimum_key_size_check": "Vérification de la longueur de clé minimale",
@@ -3095,10 +3201,12 @@
"admin.config.lfs_content_path": "Chemin de contenu LFS",
"admin.config.lfs_http_auth_expiry": "Expiration de l'authentification HTTP LFS",
"admin.config.db_config": "Configuration de la base de données",
+ "admin.config.db_type": "Type",
"admin.config.db_host": "Hôte",
"admin.config.db_name": "Nom",
"admin.config.db_user": "Nom d'utilisateur",
"admin.config.db_schema": "Schéma",
+ "admin.config.db_ssl_mode": "SSL",
"admin.config.db_path": "Emplacement",
"admin.config.service_config": "Configuration du service",
"admin.config.register_email_confirm": "Exiger la confirmation du courriel lors de l’inscription",
@@ -3176,15 +3284,15 @@
"admin.config.git_gc_args": "Arguments de GC",
"admin.config.git_migrate_timeout": "Délai imparti pour une migration",
"admin.config.git_mirror_timeout": "Délai imparti pour mettre à jour le miroir",
- "admin.config.git_clone_timeout": "Délai imparti pour l'opération \"Clone\"",
- "admin.config.git_pull_timeout": "Délai imparti pour l'opération \"Pull\"",
"admin.config.git_gc_timeout": "Délai imparti pour l'opération \"GC\"",
"admin.config.log_config": "Configuration du journal",
+ "admin.config.logger_name_fmt": "Journal : %s",
"admin.config.disabled_logger": "Désactivé",
"admin.config.access_log_mode": "Mode de journalisation d'accès",
"admin.config.access_log_template": "Modèle de journal d'accès",
"admin.config.xorm_log_sql": "Activer la journalisation SQL",
"admin.config.set_setting_failed": "Impossible de définir le paramètre %s",
+ "admin.monitor.stats": "Stats",
"admin.monitor.cron": "Tâches récurrentes",
"admin.monitor.name": "Nom",
"admin.monitor.schedule": "Planification",
@@ -3193,9 +3301,11 @@
"admin.monitor.execute_times": "Exécutions",
"admin.monitor.process": "Processus en cours d'exécution",
"admin.monitor.stacktrace": "Piles d'execution",
+ "admin.monitor.trace": "Trace",
"admin.monitor.performance_logs": "Journaux de performance",
"admin.monitor.processes_count": "%d processus",
"admin.monitor.download_diagnosis_report": "Télécharger le rapport de diagnostic",
+ "admin.monitor.desc": "Description",
"admin.monitor.start": "Heure de démarrage",
"admin.monitor.execute_time": "Heure d'Éxécution",
"admin.monitor.last_execution_result": "Résultat",
@@ -3205,6 +3315,7 @@
"admin.monitor.queues": "Files d'attente",
"admin.monitor.queue": "File d'attente : %s",
"admin.monitor.queue.name": "Nom",
+ "admin.monitor.queue.type": "Type",
"admin.monitor.queue.exemplar": "Type d'exemple",
"admin.monitor.queue.numberworkers": "Nombre de processus",
"admin.monitor.queue.activeworkers": "Processus actifs",
@@ -3228,8 +3339,10 @@
"admin.notices.inverse_selection": "Inverser la sélection",
"admin.notices.delete_selected": "Supprimer les éléments sélectionnés",
"admin.notices.delete_all": "Supprimer toutes les notifications",
+ "admin.notices.type": "Type",
"admin.notices.type_1": "Dépôt",
"admin.notices.type_2": "Tâche",
+ "admin.notices.desc": "Description",
"admin.notices.op": "Opération",
"admin.notices.delete_success": "Les informations systèmes ont été supprimées.",
"admin.self_check.no_problem_found": "Aucun problème trouvé pour l’instant.",
@@ -3274,22 +3387,26 @@
"tool.now": "maintenant",
"tool.future": "futur",
"tool.1s": "1 seconde",
+ "tool.1m": "1 minute",
"tool.1h": "1 heure",
"tool.1d": "1 jour",
"tool.1w": "1 semaine",
"tool.1mon": "1 mois",
"tool.1y": "1 an",
"tool.seconds": "%d secondes",
+ "tool.minutes": "%d minutes",
"tool.hours": "%d heures",
"tool.days": "%d jours",
"tool.weeks": "%d semaines",
"tool.months": "%d mois",
"tool.years": "%d ans",
"tool.raw_seconds": "secondes",
+ "tool.raw_minutes": "minutes",
"dropzone.default_message": "Déposez les fichiers ou cliquez ici pour téléverser.",
"dropzone.invalid_input_type": "Vous ne pouvez pas téléverser des fichiers de ce type.",
"dropzone.file_too_big": "La taille du fichier ({{filesize}} Mo) dépasse la taille maximale ({{maxFilesize}} Mo).",
"dropzone.remove_file": "Supprimer le fichier",
+ "notification.notifications": "Notifications",
"notification.unread": "Non lue(s)",
"notification.read": "Lue(s)",
"notification.no_unread": "Aucune notification non lue.",
@@ -3320,12 +3437,14 @@
"packages.empty.documentation": "Pour plus d'informations sur le registre de paquets, voir la documentation.",
"packages.empty.repo": "Avez-vous téléchargé un paquet, mais il n'est pas affiché ici? Allez dans les paramètres du paquet et liez le à ce dépôt.",
"packages.registry.documentation": "Pour plus d’informations sur le registre %s, voir la documentation.",
+ "packages.filter.type": "Type",
"packages.filter.type.all": "Tous",
"packages.filter.no_result": "Votre filtre n'affiche aucun résultat.",
"packages.filter.container.tagged": "Balisé",
"packages.filter.container.untagged": "Débalisé",
"packages.published_by": "%[1]s publié par %[3]s",
"packages.published_by_in": "%[1]s publié par %[3]s en %[5]s",
+ "packages.installation": "Installation",
"packages.about": "À propos de ce paquet",
"packages.requirements": "Exigences",
"packages.dependencies": "Dépendances",
@@ -3337,18 +3456,24 @@
"packages.details.documentation_site": "Site de documentation",
"packages.details.license": "Licence",
"packages.assets": "Ressources",
+ "packages.versions": "Versions",
"packages.versions.view_all": "Voir tout",
+ "packages.dependency.id": "ID",
+ "packages.dependency.version": "Version",
"packages.search_in_external_registry": "Rechercher dans %s",
"packages.alpine.registry": "Configurez ce registre en ajoutant l’URL dans votre fichier /etc/apk/repositories :",
"packages.alpine.registry.key": "Téléchargez la clé RSA publique du registre dans le dossier /etc/apk/keys/ pour vérifier la signature de l'index :",
"packages.alpine.registry.info": "Choisissez $branch et $repository dans la liste ci-dessous.",
"packages.alpine.install": "Pour installer le paquet, exécutez la commande suivante :",
"packages.alpine.repository": "Informations sur le Dépôt",
+ "packages.alpine.repository.branches": "Branches",
"packages.alpine.repository.repositories": "Dépôts",
+ "packages.alpine.repository.architectures": "Architectures",
"packages.arch.registry": "Ajouter un serveur avec un dépôt et une architecture liés dans /etc/pacman.conf :",
"packages.arch.install": "Synchroniser le paquet avec pacman :",
"packages.arch.repository": "Informations sur le Dépôt",
"packages.arch.repository.repositories": "Dépôts",
+ "packages.arch.repository.architectures": "Architectures",
"packages.cargo.registry": "Configurez ce registre dans le fichier de configuration Cargo (par exemple ~/.cargo/config.toml) :",
"packages.cargo.install": "Pour installer le paquet en utilisant Cargo, exécutez la commande suivante :",
"packages.chef.registry": "Configurer ce registre dans votre fichier ~/.chef/config.rb :",
@@ -3365,9 +3490,11 @@
"packages.container.details.type": "Type d'image",
"packages.container.details.platform": "Plateforme",
"packages.container.pull": "Tirez l'image depuis un terminal :",
+ "packages.container.images": "Images",
"packages.container.digest": "Empreinte",
"packages.container.multi_arch": "SE / Arch",
"packages.container.layers": "Calques d'image",
+ "packages.container.labels": "Labels",
"packages.container.labels.key": "Clé",
"packages.container.labels.value": "Valeur",
"packages.cran.registry": "Configurez ce registre dans le fichier Rprofile.site :",
@@ -3376,7 +3503,9 @@
"packages.debian.registry.info": "Choisissez $distribution et $component dans la liste ci-dessous.",
"packages.debian.install": "Pour installer le paquet, exécutez la commande suivante :",
"packages.debian.repository": "Infos sur le Dépôt",
+ "packages.debian.repository.distributions": "Distributions",
"packages.debian.repository.components": "Composants",
+ "packages.debian.repository.architectures": "Architectures",
"packages.generic.download": "Télécharger le paquet depuis un terminal :",
"packages.go.install": "Installer le paquet à partir de la ligne de commande :",
"packages.helm.registry": "Configurer ce registre à partir d'un terminal :",
@@ -3405,6 +3534,7 @@
"packages.rpm.distros.suse": "sur les distributions basées sur SUSE",
"packages.rpm.install": "Pour installer le paquet, exécutez la commande suivante :",
"packages.rpm.repository": "Informations sur le Dépôt",
+ "packages.rpm.repository.architectures": "Architectures",
"packages.rpm.repository.multiple_groups": "Ce paquet est disponible en plusieurs groupes.",
"packages.rubygems.install": "Pour installer le paquet en utilisant gem, exécutez la commande suivante :",
"packages.rubygems.install2": "ou ajoutez-le au Gemfile :",
@@ -3462,8 +3592,10 @@
"packages.owner.settings.chef.title": "Dépôt Chef",
"packages.owner.settings.chef.keypair": "Générer une paire de clés",
"packages.owner.settings.chef.keypair.description": "Une paire de clés est nécessaire pour s'authentifier au registre Chef. Si vous avez déjà généré une paire de clés, la génération d'une nouvelle paire de clés supprimera l'ancienne.",
+ "secrets.secrets": "Secrets",
"secrets.description": "Les secrets seront transmis à certaines actions et ne pourront pas être lus autrement.",
"secrets.none": "Il n'y a pas encore de secrets.",
+ "secrets.creation.description": "Description",
"secrets.creation.name_placeholder": "Caractères alphanumériques ou tirets bas uniquement, insensibles à la casse, ne peut commencer par GITEA_ ou GITHUB_.",
"secrets.creation.value_placeholder": "Entrez n’importe quoi. Les blancs cernant seront taillés.",
"secrets.creation.description_placeholder": "Décrire brièvement votre dépôt (optionnel).",
@@ -3476,6 +3608,7 @@
"secrets.deletion.success": "Le secret a été supprimé.",
"secrets.deletion.failed": "Impossible de supprimer le secret.",
"secrets.management": "Gestion des Secrets",
+ "actions.actions": "Actions",
"actions.unit.desc": "Gérer les actions",
"actions.status.unknown": "Inconnu",
"actions.status.waiting": "En attente",
@@ -3490,7 +3623,11 @@
"actions.runners.new": "Créer un nouvel exécuteur",
"actions.runners.new_notice": "Comment démarrer un exécuteur",
"actions.runners.status": "Statut",
+ "actions.runners.id": "ID",
"actions.runners.name": "Nom",
+ "actions.runners.owner_type": "Type",
+ "actions.runners.description": "Description",
+ "actions.runners.labels": "Labels",
"actions.runners.last_online": "Dernière fois en ligne",
"actions.runners.runner_title": "Exécuteur",
"actions.runners.task_list": "Tâches récentes sur cet exécuteur",
@@ -3514,6 +3651,7 @@
"actions.runners.status.idle": "Inactif",
"actions.runners.status.active": "Actif",
"actions.runners.status.offline": "Hors-ligne",
+ "actions.runners.version": "Version",
"actions.runners.reset_registration_token": "Réinitialiser le jeton d'enregistrement",
"actions.runners.reset_registration_token_confirm": "Voulez-vous révoquer le jeton actuel et en générer un nouveau ?",
"actions.runners.reset_registration_token_success": "Le jeton d’inscription de l’exécuteur a été réinitialisé avec succès",
@@ -3554,6 +3692,7 @@
"actions.workflow.has_no_workflow_dispatch": "Le flux de travail %s n’a pas de déclencheur d’événement workflow_dispatch.",
"actions.need_approval_desc": "Besoin d’approbation pour exécuter des flux de travail pour une demande d’ajout de bifurcation.",
"actions.approve_all_success": "Tous les flux de travail ont été acceptés.",
+ "actions.variables": "Variables",
"actions.variables.management": "Gestion des variables",
"actions.variables.creation": "Ajouter une variable",
"actions.variables.none": "Il n'y a pas encore de variables.",
@@ -3584,6 +3723,7 @@
"projects.type-3.display_name": "Projet d’organisation",
"projects.enter_fullscreen": "Plein écran",
"projects.exit_fullscreen": "Quitter le plein écran",
+ "git.filemode.changed_filemode": "%[1]s → %[2]s",
"git.filemode.directory": "Dossier",
"git.filemode.normal_file": "Fichier normal",
"git.filemode.executable_file": "Fichier exécutable",
diff --git a/options/locale/locale_ga-IE.json b/options/locale/locale_ga-IE.json
index 679963630a..14123c5002 100644
--- a/options/locale/locale_ga-IE.json
+++ b/options/locale/locale_ga-IE.json
@@ -32,6 +32,7 @@
"password": "Pasfhocal",
"access_token": "Comhartha Rochtana",
"re_type": "Deimhnigh Pasfhocal",
+ "captcha": "CAPTCHA",
"twofa": "Fíordheimhniú Dhá-Fhachtóir",
"twofa_scratch": "Cód Scratch Dhá-Fhachtóra",
"passcode": "Paschód",
@@ -132,6 +133,7 @@
"confirm_delete_selected": "Deimhnigh chun gach earra roghnaithe a scriosadh?",
"name": "Ainm",
"value": "Luach",
+ "readme": "Léigh-mé",
"filter_title": "Scagaire",
"filter.clear": "Scagaire Soiléir",
"filter.is_archived": "Cartlannaithe",
@@ -229,6 +231,7 @@
"install.db_name": "Ainm Bunachar Sonraí",
"install.db_schema": "Scéim",
"install.db_schema_helper": "Fág bán le haghaidh réamhshocraithe bunachar sonraí (\"poiblí\").",
+ "install.ssl_mode": "SSL",
"install.path": "Cosán",
"install.sqlite_helper": "Conair comhad don bhunachar sonraí SQLite3. Cuir
isteach cosán iomlán má reáchtáil tú Gitea mar sheirbhís.",
"install.reinstall_error": "Tá tú ag iarraidh a shuiteáil i mbunachar sonraí Gitea atá ann cheana",
@@ -406,6 +409,7 @@
"auth.twofa_scratch_token_incorrect": "Tá do chód scratch mícheart.",
"auth.twofa_required": "Ní mór duit fíordheimhniú dhá fhachtóir a shocrú chun rochtain a fháil ar stórtha, nó iarracht a dhéanamh logáil isteach arís.",
"auth.login_userpass": "Sínigh isteach",
+ "auth.login_openid": "OpenID",
"auth.oauth_signup_tab": "Cláraigh Cuntas Nua",
"auth.oauth_signup_title": "Comhlánaigh Cuntas Nua",
"auth.oauth_signup_submit": "Cuntas Comhlánaigh",
@@ -654,6 +658,7 @@
"settings.twofa": "Fíordheimhniú Dhá Fachtóir (TOTP)",
"settings.account_link": "Cuntais Nasctha",
"settings.organization": "Eagraíochtaí",
+ "settings.uid": "UID",
"settings.webauthn": "Fíordheimhniú Dhá-Fachtóir (Eochracha Slándála)",
"settings.public_profile": "Próifíl Phoiblí",
"settings.biography_placeholder": "Inis dúinn beagán fút féin! (Is féidir leat Markdown a úsáid)",
@@ -972,6 +977,7 @@
"repo.fork.blocked_user": "Ní féidir an stór a fhorcáil toisc go bhfuil úinéir an stórais bac ort.",
"repo.use_template": "Úsáid an teimpléad seo",
"repo.open_with_editor": "Oscail le %s",
+ "repo.download_directory_as": "Íoslódáil an eolaire mar %s",
"repo.download_zip": "Íoslódáil ZIP",
"repo.download_tar": "Íoslódáil TAR.GZ",
"repo.download_bundle": "Íoslódáil BUNDLE",
@@ -991,6 +997,7 @@
"repo.multiple_licenses": "Ceadúnais Iolracha",
"repo.object_format": "Formáid Oibiacht",
"repo.object_format_helper": "Formáid oibiacht an stór. Ní féidir é a athrú níos déanaí. Is é SHA1 an comhoiriúnacht is fearr.",
+ "repo.readme": "LÉIGHMÉ",
"repo.readme_helper": "Roghnaigh comhad teimpléad README.",
"repo.readme_helper_desc": "Seo an áit inar féidir leat cur síos iomlán a scríobh do thionscadal.",
"repo.auto_init": "Taisce a thionscnamh (Cuireann sé .gitignore, Ceadúnas agus README)",
@@ -1055,6 +1062,7 @@
"repo.desc.template": "Teimpléad",
"repo.desc.internal": "Inmheánach",
"repo.desc.archived": "Cartlannaithe",
+ "repo.desc.sha256": "SHA256",
"repo.template.items": "Míreanna Teimpléad",
"repo.template.git_content": "Ábhar Git (Brainse Réamhshocraithe)",
"repo.template.git_hooks": "Crúcanna Git",
@@ -1083,6 +1091,7 @@
"repo.migrate_options_lfs_endpoint.description.local": "Tacaítear le cosán freastalaí áitiúil freisin.",
"repo.migrate_options_lfs_endpoint.placeholder": "Mura bhfágtar bán é, díorthófar an críochphointe ón URL clónála.",
"repo.migrate_items": "Míreanna Imirce",
+ "repo.migrate_items_wiki": "Vicí",
"repo.migrate_items_milestones": "Clocha míle",
"repo.migrate_items_labels": "Lipéid",
"repo.migrate_items_issues": "Saincheisteanna",
@@ -1481,6 +1490,7 @@
"repo.issues.filter_sort.feweststars": "An líon réaltaí is lú",
"repo.issues.filter_sort.mostforks": "An líon forcanna is mó",
"repo.issues.filter_sort.fewestforks": "An líon forcanna is lú",
+ "repo.issues.quick_goto": "Téigh go dtí an cheist",
"repo.issues.action_open": "Oscailte",
"repo.issues.action_close": "Dún",
"repo.issues.action_label": "Lipéad",
@@ -1693,6 +1703,7 @@
"repo.issues.review.content.empty": "Ní mór duit trácht a fhágáil a léiríonn an t-athrú (í) iarrtha.",
"repo.issues.review.reject": "athruithe iarrtha %s",
"repo.issues.review.wait": "iarradh athbhreithniú %s",
+ "repo.issues.review.codeowners_rules": "Rialacha CÓDÚINÉIRÍ",
"repo.issues.review.add_review_request": "athbhreithniú iarrtha ó %s %s",
"repo.issues.review.remove_review_request": "iarratas athbhreithnithe bainte le haghaidh %s %s",
"repo.issues.review.remove_review_request_self": "dhiúltaigh athbhreithniú a dhéanamh ar %s",
@@ -1728,8 +1739,11 @@
"repo.issues.reference_link": "Tagairt: %s",
"repo.compare.compare_base": "bonn",
"repo.compare.compare_head": "déan comparáid",
+ "repo.compare.title": "Athruithe a chur i gcomparáid",
+ "repo.compare.description": "Roghnaigh dhá bhrainse nó clib chun a fheiceáil cad atá athraithe nó chun iarratas tarraingthe nua a thosú.",
"repo.pulls.desc": "Cumasaigh iarratais tarraingthe agus athbhreithnithe cód.",
"repo.pulls.new": "Iarratas Tarraingthe Nua",
+ "repo.pulls.new.description": "Pléigh agus athbhreithnigh na hathruithe sa chomparáid seo le daoine eile.",
"repo.pulls.new.blocked_user": "Ní féidir iarratas tarraingthe a chruthú toisc go bhfuil úinéir an stórais bac ort.",
"repo.pulls.new.must_collaborator": "Caithfidh tú a bheith ina chomhoibritheoir chun iarratas tarraingthe a chruthú.",
"repo.pulls.new.already_existed": "Tá iarratas tarraingthe idir na brainsí seo ann cheana féin",
@@ -1739,7 +1753,6 @@
"repo.pulls.allow_edits_from_maintainers": "Ceadaigh eagarthóirí ó chothabhálaí",
"repo.pulls.allow_edits_from_maintainers_desc": "Is féidir le húsáideoirí a bhfuil rochtain scríofa acu ar an mbunbhrainse brú chuig an bhrainse",
"repo.pulls.allow_edits_from_maintainers_err": "Theip ar nuashonrú",
- "repo.pulls.compare_changes_desc": "Roghnaigh an brainse le cumasc isteach agus an brainse le tarraingt uaidh.",
"repo.pulls.has_viewed_file": "Breathnaithe",
"repo.pulls.has_changed_since_last_review": "Athraithe ó d'athbhreithniú deire",
"repo.pulls.viewed_files_label": "Breathnaíodh ar %[1]d / %[2]d comhaid",
@@ -1765,6 +1778,8 @@
"repo.pulls.title_desc": "ag iarraidh %[1]d gealltanas a chumasc ó %[2]s go %[3]s",
"repo.pulls.merged_title_desc": "cumasc %[1]d tiomantas ó %[2]s go %[3]s %[4]s",
"repo.pulls.change_target_branch_at": "athraigh an spriocbhrainse ó %s go %s %s",
+ "repo.pulls.marked_as_work_in_progress_at": "marcáladh an iarratas tarraingthe mar obair ar siúl %s",
+ "repo.pulls.marked_as_ready_for_review_at": "marcáladh an iarratas tarraingthe mar réidh le haghaidh athbhreithnithe %s",
"repo.pulls.tab_conversation": "Comhrá",
"repo.pulls.tab_commits": "Tiomáintí",
"repo.pulls.tab_files": "Comhaid Athraithe",
@@ -1783,6 +1798,7 @@
"repo.pulls.remove_prefix": "Bain an réimír %s",
"repo.pulls.data_broken": "Tá an t-iarratas tarraingthe seo briste mar gheall ar fhaisnéis forc a bheith in easnamh.",
"repo.pulls.files_conflicted": "Tá athruithe ag an iarratas tarraingthe seo atá contrártha leis an spriocbhrainse.",
+ "repo.pulls.files_conflicted_no_listed_files": "(Níl aon chomhaid choinbhleacha liostaithe)",
"repo.pulls.is_checking": "Ag seiceáil le haghaidh coinbhleachtaí cumaisc…",
"repo.pulls.is_ancestor": "Tá an brainse seo san áireamh cheana féin sa spriocbhrainse. Níl aon rud le cumasc.",
"repo.pulls.is_empty": "Tá na hathruithe ar an mbrainse seo ar an spriocbhrainse cheana féin. Is tiomantas folamh é seo.",
@@ -1837,7 +1853,8 @@
"repo.pulls.status_checking": "Tá roinnt seiceála ar feitheamh",
"repo.pulls.status_checks_success": "D'éirigh le gach seiceáil",
"repo.pulls.status_checks_warning": "Thuairiscigh roinnt seiceálacha rabhaidh",
- "repo.pulls.status_checks_failure": "Theip ar roinnt seiceálacha",
+ "repo.pulls.status_checks_failure_required": "Theip ar roinnt seiceálacha riachtanacha",
+ "repo.pulls.status_checks_failure_optional": "Theip ar roinnt seiceálacha roghnacha",
"repo.pulls.status_checks_error": "Thug roinnt seiceálacha earráidí",
"repo.pulls.status_checks_requested": "Riachtanach",
"repo.pulls.status_checks_details": "Sonraí",
@@ -2107,6 +2124,8 @@
"repo.settings.pulls.ignore_whitespace": "Déan neamhaird de spás bán le haghaidh coinbhleachtaí",
"repo.settings.pulls.enable_autodetect_manual_merge": "Cumasaigh cumasc láimhe autodetector (Nóta: I roinnt cásanna speisialta, is féidir míbhreithiúnais tarlú)",
"repo.settings.pulls.allow_rebase_update": "Cumasaigh brainse iarratais tarraingthe a nuashonrú trí athbhunú",
+ "repo.settings.pulls.default_target_branch": "Brainse sprice réamhshocraithe le haghaidh iarratais tarraingthe nua",
+ "repo.settings.pulls.default_target_branch_default": "Brainse réamhshocraithe (%s)",
"repo.settings.pulls.default_delete_branch_after_merge": "Scrios brainse an iarratais tarraingthe tar éis cumasc de réir réamhshocraithe",
"repo.settings.pulls.default_allow_edits_from_maintainers": "Ceadaigh eagarthóirí ó chothabhálaí de réir réamhshocraithe",
"repo.settings.releases_desc": "Cumasaigh Eisiúintí Stórais",
@@ -2313,8 +2332,19 @@
"repo.settings.slack_domain": "Fearann",
"repo.settings.slack_channel": "Cainéal",
"repo.settings.add_web_hook_desc": "Comhtháthaigh %s isteach i do stóras.",
+ "repo.settings.web_hook_name_gitea": "Gitea",
+ "repo.settings.web_hook_name_gogs": "Gogs",
+ "repo.settings.web_hook_name_slack": "Slack",
+ "repo.settings.web_hook_name_discord": "Discord",
+ "repo.settings.web_hook_name_dingtalk": "DingTalk",
"repo.settings.web_hook_name_telegram": "Teileagram",
"repo.settings.web_hook_name_matrix": "Maitrís",
+ "repo.settings.web_hook_name_msteams": "Microsoft Teams",
+ "repo.settings.web_hook_name_feishu_or_larksuite": "Feishu / Lark Suite",
+ "repo.settings.web_hook_name_feishu": "Feishu",
+ "repo.settings.web_hook_name_larksuite": "Lark Suite",
+ "repo.settings.web_hook_name_wechatwork": "WeCom (Wechat Work)",
+ "repo.settings.web_hook_name_packagist": "Packagist",
"repo.settings.packagist_username": "Ainm úsáideora Pacagist",
"repo.settings.packagist_api_token": "Comhartha API",
"repo.settings.packagist_package_url": "URL pacáiste Packagist",
@@ -2408,9 +2438,10 @@
"repo.settings.block_outdated_branch_desc": "Ní bheidh cumasc indéanta nuair a bhíonn ceannbhrainse taobh thiar de bhronnbhrainse.",
"repo.settings.block_admin_merge_override": "Ní mór do riarthóirí rialacha cosanta brainse a leanúint",
"repo.settings.block_admin_merge_override_desc": "Ní mór do riarthóirí rialacha cosanta brainse a leanúint agus ní féidir leo iad a sheachaint.",
- "repo.settings.default_branch_desc": "Roghnaigh brainse stóras réamhshocraithe le haghaidh iarratas tarraingte agus geallann an cód:",
+ "repo.settings.default_branch_desc": "Roghnaigh brainse réamhshocraithe le haghaidh tiomnuithe cóid.",
+ "repo.settings.default_target_branch_desc": "Is féidir le hiarratais tarraingthe brainse sprice réamhshocraithe difriúil a úsáid má tá sé socraithe sa rannán Iarratais Tarraingthe de na Socruithe Ardleibhéil Stórála.",
"repo.settings.merge_style_desc": "Stíleanna Cumaisc",
- "repo.settings.default_merge_style_desc": "Stíl Cumaisc Réamhshocraithe",
+ "repo.settings.default_merge_style_desc": "Stíl chumasc réamhshocraithe",
"repo.settings.choose_branch": "Roghnaigh brainse…",
"repo.settings.no_protected_branch": "Níl aon bhrainsí cosanta ann.",
"repo.settings.edit_protected_branch": "Cuir in eagar",
@@ -2460,6 +2491,7 @@
"repo.settings.unarchive.success": "Rinneadh an stóras a dhíchartlann go rathúil.",
"repo.settings.unarchive.error": "Tharla earráid agus tú ag iarraidh an stóras a dhíchartlannú. Féach an logáil le haghaidh tuilleadh sonraí.",
"repo.settings.update_avatar_success": "Nuashonraíodh avatar an stóras.",
+ "repo.settings.lfs": "LFS",
"repo.settings.lfs_filelist": "Comhaid LFS a stóráiltear sa stóras seo",
"repo.settings.lfs_no_lfs_files": "Níl aon chomhaid LFS stóráilte sa stóras seo",
"repo.settings.lfs_findcommits": "Aimsigh gealltanais",
@@ -2479,6 +2511,7 @@
"repo.settings.lfs_force_unlock": "Díghlasáil Fórsa",
"repo.settings.lfs_pointers.found": "Fuarthas %d pointeoir(í) bloba — %d gaolmhar, %d neamhghaolmhar (%d ar iarraidh ón stóras)",
"repo.settings.lfs_pointers.sha": "SHA Blob",
+ "repo.settings.lfs_pointers.oid": "OID",
"repo.settings.lfs_pointers.inRepo": "I Stóras",
"repo.settings.lfs_pointers.exists": "Ann sa siopa",
"repo.settings.lfs_pointers.accessible": "Inrochtana don Úsáideoir",
@@ -2519,8 +2552,8 @@
"repo.diff.too_many_files": "Níor taispeánadh roinnt comhad mar go bhfuil an iomarca comhad athraithe sa difríocht seo",
"repo.diff.show_more": "Taispeáin Tuilleadh",
"repo.diff.load": "Difríocht Luchtaigh",
- "repo.diff.generated": "a ghintear",
- "repo.diff.vendored": "curtha ar fáil",
+ "repo.diff.generated": "Gineadh",
+ "repo.diff.vendored": "Díoltóir",
"repo.diff.comment.add_line_comment": "Cuir trácht líne leis",
"repo.diff.comment.placeholder": "Fág trácht",
"repo.diff.comment.add_single_comment": "Cuir trácht aonair leis",
@@ -2620,7 +2653,7 @@
"repo.branch.restore_success": "Tá brainse \"%s\" curtha ar ais.",
"repo.branch.restore_failed": "Theip ar chur ar ais brainse \"%s\".",
"repo.branch.protected_deletion_failed": "Tá brainse \"%s\" cosanta. Ní féidir é a scriosadh.",
- "repo.branch.default_deletion_failed": "Is é brainse \"%s\" an brainse réamhshocraithe. Ní féidir é a scriosadh.",
+ "repo.branch.default_deletion_failed": "Is é brainse \"%s\" an brainse sprioc réamhshocraithe nó an brainse sprioc don iarratas tarraingthe. Ní féidir é a scriosadh.",
"repo.branch.default_branch_not_exist": "Níl an brainse réamhshocraithe \"%s\" ann.",
"repo.branch.restore": "Athchóirigh Brainse \"%s\"",
"repo.branch.download": "Brainse Íosluchtaithe \"%s\"",
@@ -2637,7 +2670,7 @@
"repo.branch.new_branch_from": "Cruthaigh brainse nua ó \"%s\"",
"repo.branch.renamed": "Ainmníodh brainse %s go %s.",
"repo.branch.rename_default_or_protected_branch_error": "Ní féidir ach le riarthóirí brainsí réamhshocraithe nó cosanta a athainmniú.",
- "repo.branch.rename_protected_branch_failed": "Tá an brainse seo faoi chosaint ag rialacha cosanta domhanda.",
+ "repo.branch.rename_protected_branch_failed": "Theip ar athainmniú na brainse mar gheall ar rialacha cosanta brainse.",
"repo.branch.commits_divergence_from": "Difríocht tiomantais: %[1]d taobh thiar agus %[2]d chun tosaigh ar %[3]s",
"repo.branch.commits_no_divergence": "Mar an gcéanna le brainse %[1]s",
"repo.tag.create_tag": "Cruthaigh clib %s",
@@ -2844,6 +2877,7 @@
"admin.dashboard.task.finished": "Tasc: Tá %[1]s tosaithe ag %[2]s críochnaithe",
"admin.dashboard.task.unknown": "Tasc anaithnid: %[1]s",
"admin.dashboard.cron.started": "Cron tosaithe: %[1]s",
+ "admin.dashboard.cron.process": "Cron: %[1]s",
"admin.dashboard.cron.cancelled": "Cron: %[1]s cealaithe: %[3]s",
"admin.dashboard.cron.error": "Earráid i gCron: %s: %[3]s",
"admin.dashboard.cron.finished": "Cron: %[1]s críochnaithe",
@@ -2923,6 +2957,7 @@
"admin.users.reserved": "In áirithe",
"admin.users.bot": "Bota",
"admin.users.remote": "Iargúlta",
+ "admin.users.2fa": "2FA",
"admin.users.repos": "Stórais",
"admin.users.created": "Cruthaithe",
"admin.users.last_login": "Sínigh Isteach Deiridh",
@@ -3044,6 +3079,7 @@
"admin.auths.attribute_mail": "Tréith ríomhphoist",
"admin.auths.attribute_ssh_public_key": "Tréith Eochair SSH Phoiblí",
"admin.auths.attribute_avatar": "Tréith Avatar",
+ "admin.auths.ssh_keys_are_verified": "Meastar gur fíoraithe iad eochracha SSH in LDAP",
"admin.auths.attributes_in_bind": "Faigh tréithe i gComhthéacs Bind DN",
"admin.auths.allow_deactivate_all": "Lig do thoradh cuardaigh folamh gach úsáideoir a dhíghníomhachtú",
"admin.auths.use_paged_search": "Úsáid Cuardach Leathanaigh",
@@ -3177,6 +3213,7 @@
"admin.config.db_name": "Ainm",
"admin.config.db_user": "Ainm úsáideora",
"admin.config.db_schema": "Scéim",
+ "admin.config.db_ssl_mode": "SSL",
"admin.config.db_path": "Cosán",
"admin.config.service_config": "Cumraíocht Seirbhíse",
"admin.config.register_email_confirm": "Deimhniú Ríomhphost a éileamh chun Clárú",
@@ -3254,8 +3291,6 @@
"admin.config.git_gc_args": "Argóintí GC",
"admin.config.git_migrate_timeout": "Teorainn Ama Imirce",
"admin.config.git_mirror_timeout": "Teorainn Ama Nuashonraithe Scátháin",
- "admin.config.git_clone_timeout": "Teorainn Ama Oibríochta Clón",
- "admin.config.git_pull_timeout": "Tarraing Am Oibríochta",
"admin.config.git_gc_timeout": "Teorainn Ama Oibriúcháin GC",
"admin.config.log_config": "Cumraíocht Logáil",
"admin.config.logger_name_fmt": "Logálaí: %s",
@@ -3430,6 +3465,7 @@
"packages.assets": "Sócmhainní",
"packages.versions": "Leaganacha",
"packages.versions.view_all": "Féach ar gach",
+ "packages.dependency.id": "ID",
"packages.dependency.version": "Leagan",
"packages.search_in_external_registry": "Cuardaigh i %s",
"packages.alpine.registry": "Socraigh an clárlann seo tríd an URL a chur i do chomhad /etc/apk/repositories:",
@@ -3594,6 +3630,7 @@
"actions.runners.new": "Cruthaigh reathaí nua",
"actions.runners.new_notice": "Conas reathaí a thosú",
"actions.runners.status": "Stádas",
+ "actions.runners.id": "ID",
"actions.runners.name": "Ainm",
"actions.runners.owner_type": "Cineál",
"actions.runners.description": "Cur síos",
@@ -3693,9 +3730,10 @@
"projects.type-3.display_name": "Tionscadal Eagrúcháin",
"projects.enter_fullscreen": "Lánscáileán",
"projects.exit_fullscreen": "Scoir Lánscáileáin",
+ "git.filemode.changed_filemode": "%[1]s → %[2]s",
"git.filemode.directory": "Eolaire",
- "git.filemode.normal_file": "Comhad gnáth",
- "git.filemode.executable_file": "Comhad infheidhmithe",
+ "git.filemode.normal_file": "Rialta",
+ "git.filemode.executable_file": "Inrite",
"git.filemode.symbolic_link": "Nasc siombalach",
"git.filemode.submodule": "Fo-mhodúl"
}
diff --git a/options/locale/locale_ja-JP.json b/options/locale/locale_ja-JP.json
index 4c62a758b6..ffbd8c0d9e 100644
--- a/options/locale/locale_ja-JP.json
+++ b/options/locale/locale_ja-JP.json
@@ -12,6 +12,7 @@
"link_account": "アカウント連携",
"register": "登録",
"version": "バージョン",
+ "powered_by": "Powered by %s",
"page": "ページ",
"template": "テンプレート",
"language": "言語",
@@ -31,6 +32,7 @@
"password": "パスワード",
"access_token": "アクセストークン",
"re_type": "パスワード確認",
+ "captcha": "CAPTCHA",
"twofa": "2要素認証",
"twofa_scratch": "2要素認証スクラッチコード",
"passcode": "パスコード",
@@ -74,6 +76,7 @@
"pull_requests": "プルリクエスト",
"issues": "イシュー",
"milestones": "マイルストーン",
+ "ok": "OK",
"cancel": "キャンセル",
"retry": "再試行",
"rerun": "再実行",
@@ -130,6 +133,7 @@
"confirm_delete_selected": "選択したすべてのアイテムを削除してよろしいですか?",
"name": "名称",
"value": "値",
+ "readme": "Readme",
"filter_title": "フィルター",
"filter.clear": "フィルターをクリア",
"filter.is_archived": "アーカイブ",
@@ -144,6 +148,13 @@
"filter.private": "プライベート",
"no_results_found": "見つかりません。",
"internal_error_skipped": "内部エラーが発生しましたがスキップされました: %s",
+ "characters_spaces": "スペース",
+ "characters_tabs": "タブ",
+ "text_indent_style": "インデントスタイル",
+ "text_indent_size": "インデントサイズ",
+ "text_line_wrap": "折り返す",
+ "text_line_nowrap": "折り返さない",
+ "text_line_wrap_mode": "行折り返しモード",
"search.search": "検索…",
"search.type_tooltip": "検索タイプ",
"search.fuzzy": "あいまい",
@@ -183,6 +194,7 @@
"editor.buttons.heading.tooltip": "見出し追加",
"editor.buttons.bold.tooltip": "太字追加",
"editor.buttons.italic.tooltip": "イタリック体追加",
+ "editor.buttons.strikethrough.tooltip": "取り消し線のテキストを追加",
"editor.buttons.quote.tooltip": "引用",
"editor.buttons.code.tooltip": "コード追加",
"editor.buttons.link.tooltip": "リンク追加",
@@ -198,6 +210,8 @@
"editor.buttons.switch_to_legacy.tooltip": "レガシーエディタを使用する",
"editor.buttons.enable_monospace_font": "等幅フォントを有効にする",
"editor.buttons.disable_monospace_font": "等幅フォントを無効にする",
+ "filter.string.asc": "A–Z",
+ "filter.string.desc": "Z–A",
"error.occurred": "エラーが発生しました",
"error.report_message": "Gitea のバグが疑われる場合は、GitHubでIssueを検索して、見つからなければ新しいIssueを作成してください。",
"error.not_found": "ターゲットが見つかりませんでした。",
@@ -224,6 +238,7 @@
"install.db_name": "データベース名",
"install.db_schema": "スキーマ",
"install.db_schema_helper": "空の場合はデータベースのデフォルト(\"public\")となります。",
+ "install.ssl_mode": "SSL",
"install.path": "パス",
"install.sqlite_helper": "SQLite3のデータベースファイルパス。
Giteaをサービスとして実行する場合は絶対パスを入力します。",
"install.reinstall_error": "既存のGiteaデータベースへインストールしようとしています",
@@ -401,6 +416,7 @@
"auth.twofa_scratch_token_incorrect": "スクラッチコードが正しくありません。",
"auth.twofa_required": "リポジトリにアクセスするには2段階認証を設定するか、再度ログインしてください。",
"auth.login_userpass": "サインイン",
+ "auth.login_openid": "OpenID",
"auth.oauth_signup_tab": "新規アカウント登録",
"auth.oauth_signup_title": "新規アカウントの仕上げ",
"auth.oauth_signup_submit": "アカウント登録完了",
@@ -505,6 +521,7 @@
"form.Password": "パスワード",
"form.Retype": "パスワード確認",
"form.SSHTitle": "SSHキー名",
+ "form.HttpsUrl": "HTTPS URL",
"form.PayloadUrl": "ペイロードのURL",
"form.TeamName": "チーム名",
"form.AuthName": "承認名",
@@ -648,6 +665,7 @@
"settings.twofa": "2要素認証 (TOTP)",
"settings.account_link": "連携アカウント",
"settings.organization": "組織",
+ "settings.uid": "UID",
"settings.webauthn": "2要素認証 (セキュリティキー)",
"settings.public_profile": "公開プロフィール",
"settings.biography_placeholder": "自己紹介してください!(Markdownを使うことができます)",
@@ -740,6 +758,7 @@
"settings.add_email": "メールアドレスを追加",
"settings.add_openid": "OpenID URIを追加する",
"settings.add_email_confirmation_sent": "\"%s\" に確認メールを送信しました。 %s以内に受信トレイを確認し、メールアドレス確認を行ってください。",
+ "settings.email_primary_not_found": "選択したメールアドレスが見つかりませんでした。",
"settings.add_email_success": "新しいメールアドレスを追加しました。",
"settings.email_preference_set_success": "メール設定を保存しました。",
"settings.add_openid_success": "新しいOpenIDアドレスを追加しました。",
@@ -966,6 +985,7 @@
"repo.fork.blocked_user": "リポジトリのオーナーがあなたをブロックしているため、リポジトリをフォークできません。",
"repo.use_template": "このテンプレートを使用",
"repo.open_with_editor": "%s で開く",
+ "repo.download_directory_as": "%sとしてディレクトリをダウンロード",
"repo.download_zip": "ZIPファイルをダウンロード",
"repo.download_tar": "TAR.GZファイルをダウンロード",
"repo.download_bundle": "バンドルをダウンロード",
@@ -985,6 +1005,7 @@
"repo.multiple_licenses": "複数のライセンス",
"repo.object_format": "オブジェクトのフォーマット",
"repo.object_format_helper": "リポジトリのオブジェクトフォーマット。後で変更することはできません。SHA1 は最も互換性があります。",
+ "repo.readme": "README",
"repo.readme_helper": "READMEファイル テンプレートを選択してください。",
"repo.readme_helper_desc": "プロジェクトについての説明をひととおり書く場所です。",
"repo.auto_init": "リポジトリの初期設定 (.gitignore、ライセンスファイル、READMEファイルの追加)",
@@ -997,6 +1018,7 @@
"repo.default_branch": "デフォルトブランチ",
"repo.default_branch_label": "デフォルト",
"repo.default_branch_helper": "デフォルトブランチはプルリクエストとコードコミットのベースブランチとなります。",
+ "repo.mirror_prune": "Prune",
"repo.mirror_prune_desc": "不要になった古いリモートトラッキング参照を削除",
"repo.mirror_interval": "ミラー間隔 (有効な時間の単位は'h'、'm'、's')。 定期的な同期を無効にする場合は0。(最小間隔: %s)",
"repo.mirror_interval_invalid": "ミラー間隔が不正です。",
@@ -1006,6 +1028,7 @@
"repo.mirror_address_desc": "必要な資格情報は「認証」セクションに設定してください。",
"repo.mirror_address_url_invalid": "入力したURLは無効です。 URLの構成要素はすべて正しくエスケープしてください。",
"repo.mirror_address_protocol_invalid": "入力したURLは無効です。 ミラーできるのは、http(s):// または git:// からだけです。",
+ "repo.mirror_lfs": "Large File Storage (LFS)",
"repo.mirror_lfs_desc": "LFS データのミラーリングを有効にする。",
"repo.mirror_lfs_endpoint": "LFS エンドポイント",
"repo.mirror_lfs_endpoint_desc": "同期するときは、クローンURLをもとにLFSサーバーを決定しようとします。 リポジトリのLFSデータがほかの場所に保存されている場合は、独自のエンドポイントを指定することができます。",
@@ -1047,6 +1070,7 @@
"repo.desc.template": "テンプレート",
"repo.desc.internal": "内部",
"repo.desc.archived": "アーカイブ",
+ "repo.desc.sha256": "SHA256",
"repo.template.items": "テンプレート項目",
"repo.template.git_content": "Gitコンテンツ (デフォルトブランチ)",
"repo.template.git_hooks": "Gitフック",
@@ -1075,6 +1099,7 @@
"repo.migrate_options_lfs_endpoint.description.local": "ローカルサーバーのパスもサポートされています。",
"repo.migrate_options_lfs_endpoint.placeholder": "空にするとエンドポイントはクローン URL から決定されます。",
"repo.migrate_items": "移行する項目",
+ "repo.migrate_items_wiki": "Wiki",
"repo.migrate_items_milestones": "マイルストーン",
"repo.migrate_items_labels": "ラベル",
"repo.migrate_items_issues": "イシュー",
@@ -1124,6 +1149,7 @@
"repo.migration_status": "移行状況",
"repo.mirror_from": "ミラー元",
"repo.forked_from": "フォーク元",
+ "repo.generated_from": "generated from",
"repo.fork_from_self": "自分が所有しているリポジトリはフォークできません。",
"repo.fork_guest_user": "リポジトリをフォークするにはサインインしてください。",
"repo.watch_guest_user": "リポジトリをウォッチするにはサインインしてください。",
@@ -1157,6 +1183,7 @@
"repo.pulls": "プルリクエスト",
"repo.projects": "プロジェクト",
"repo.packages": "パッケージ",
+ "repo.actions": "Actions",
"repo.labels": "ラベル",
"repo.org_labels_desc": "組織で定義されているラベル (組織のすべてのリポジトリで使用可能なもの)",
"repo.org_labels_desc_manage": "編集",
@@ -1167,8 +1194,11 @@
"repo.release": "リリース",
"repo.releases": "リリース",
"repo.tag": "タグ",
+ "repo.git_tag": "Gitタグ",
"repo.released_this": "がこれをリリース",
"repo.tagged_this": "がタグ付け",
+ "repo.file.title": "%s at %s",
+ "repo.file_raw": "Raw",
"repo.file_history": "履歴",
"repo.file_view_source": "ソースを表示",
"repo.file_view_rendered": "レンダリング表示",
@@ -1204,6 +1234,7 @@
"repo.commit.contained_in_default_branch": "このコミットはデフォルトブランチに含まれています",
"repo.commit.load_referencing_branches_and_tags": "このコミットを参照しているブランチやタグを取得",
"repo.commit.merged_in_pr": "このコミットはプルリクエスト %s でマージされました。",
+ "repo.blame": "Blame",
"repo.download_file": "ファイルをダウンロード",
"repo.normal_view": "通常表示",
"repo.line": "行",
@@ -1467,6 +1498,7 @@
"repo.issues.filter_sort.feweststars": "スターが少ない順",
"repo.issues.filter_sort.mostforks": "フォークが多い順",
"repo.issues.filter_sort.fewestforks": "フォークが少ない順",
+ "repo.issues.quick_goto": "イシューへ移動",
"repo.issues.action_open": "オープン",
"repo.issues.action_close": "クローズ",
"repo.issues.action_label": "ラベル",
@@ -1627,6 +1659,7 @@
"repo.issues.push_commits_n": "が %d コミット追加 %s",
"repo.issues.force_push_codes": "が %[1]s を強制プッシュ ( %[2]s から %[4]s へ ) %[6]s",
"repo.issues.force_push_compare": "比較",
+ "repo.issues.due_date_form": "yyyy-mm-dd",
"repo.issues.due_date_form_add": "期日の追加",
"repo.issues.due_date_form_edit": "変更",
"repo.issues.due_date_form_remove": "削除",
@@ -1678,6 +1711,7 @@
"repo.issues.review.content.empty": "修正を指示するコメントを残す必要があります。",
"repo.issues.review.reject": "が変更を要請 %s",
"repo.issues.review.wait": "にレビュー依頼 %s",
+ "repo.issues.review.codeowners_rules": "CODEOWNERSルール",
"repo.issues.review.add_review_request": "が %s にレビューを依頼 %s",
"repo.issues.review.remove_review_request": "が %s へのレビュー依頼を取り消し %s",
"repo.issues.review.remove_review_request_self": "がレビューを辞退 %s",
@@ -1713,17 +1747,20 @@
"repo.issues.reference_link": "リファレンス: %s",
"repo.compare.compare_base": "基準",
"repo.compare.compare_head": "比較",
+ "repo.compare.title": "変更の比較",
+ "repo.compare.description": "ふたつのブランチまたはタグを選び、変更された内容を確認、あるいは新しいプルリクエストを開始してください。",
"repo.pulls.desc": "プルリクエストとコードレビューの有効化。",
"repo.pulls.new": "新しいプルリクエスト",
+ "repo.pulls.new.description": "この比較における変更点について議論し、レビューします。",
"repo.pulls.new.blocked_user": "リポジトリのオーナーがあなたをブロックしているため、プルリクエストを作成できません。",
"repo.pulls.new.must_collaborator": "プルリクエストを作成するには、共同作業者である必要があります。",
+ "repo.pulls.new.already_existed": "これらのブランチのプルリクエストはすでに存在します",
"repo.pulls.edit.already_changed": "プルリクエストの変更を保存できません。 他のユーザーによって内容がすでに変更されているようです。 変更を上書きしないようにするため、ページを更新してからもう一度編集してください。",
"repo.pulls.view": "プルリクエストを表示",
"repo.pulls.compare_changes": "新規プルリクエスト",
"repo.pulls.allow_edits_from_maintainers": "メンテナーからの編集を許可する",
"repo.pulls.allow_edits_from_maintainers_desc": "ベースブランチへの書き込みアクセス権を持つユーザーは、このブランチにプッシュすることもできます",
"repo.pulls.allow_edits_from_maintainers_err": "更新に失敗しました",
- "repo.pulls.compare_changes_desc": "マージ先ブランチとプル元ブランチを選択。",
"repo.pulls.has_viewed_file": "閲覧済",
"repo.pulls.has_changed_since_last_review": "前回のレビュー後に変更あり",
"repo.pulls.viewed_files_label": "%[1]d / %[2]d ファイル閲覧済み",
@@ -1749,6 +1786,8 @@
"repo.pulls.title_desc": "が %[2]s から %[3]s への %[1]d コミットのマージを希望しています",
"repo.pulls.merged_title_desc": "が %[1]d 個のコミットを %[2]s から %[3]s へマージ %[4]s",
"repo.pulls.change_target_branch_at": "がターゲットブランチを %s から %s に変更 %s",
+ "repo.pulls.marked_as_work_in_progress_at": "がこのプルリクエストを作業中(WIP)とマーク %s",
+ "repo.pulls.marked_as_ready_for_review_at": "がこのプルリクエストをレビュー可とマーク %s",
"repo.pulls.tab_conversation": "会話",
"repo.pulls.tab_commits": "コミット",
"repo.pulls.tab_files": "変更されたファイル",
@@ -1767,6 +1806,7 @@
"repo.pulls.remove_prefix": "先頭の %s を除去",
"repo.pulls.data_broken": "このプルリクエストは、フォークの情報が見つからないため壊れています。",
"repo.pulls.files_conflicted": "このプルリクエストは、ターゲットブランチと競合する変更を含んでいます。",
+ "repo.pulls.files_conflicted_no_listed_files": "(競合するファイルはありません)",
"repo.pulls.is_checking": "マージのコンフリクトを確認中…",
"repo.pulls.is_ancestor": "このブランチは既にターゲットブランチに含まれています。マージするものはありません。",
"repo.pulls.is_empty": "このブランチの変更は既にターゲットブランチにあります。これは空のコミットになります。",
@@ -1821,7 +1861,8 @@
"repo.pulls.status_checking": "いくつかのステータスチェックが待機中です",
"repo.pulls.status_checks_success": "ステータスチェックはすべて成功しました",
"repo.pulls.status_checks_warning": "ステータスチェックにより警告が出ています",
- "repo.pulls.status_checks_failure": "失敗したステータスチェックがあります",
+ "repo.pulls.status_checks_failure_required": "必須チェックに失敗しています",
+ "repo.pulls.status_checks_failure_optional": "必須ではないチェックが失敗しています",
"repo.pulls.status_checks_error": "ステータスチェックによりエラーが出ています",
"repo.pulls.status_checks_requested": "必須",
"repo.pulls.status_checks_details": "詳細",
@@ -1911,6 +1952,7 @@
"repo.signing.wont_sign.not_signed_in": "サインインしていません。",
"repo.ext_wiki": "外部Wikiへのアクセス",
"repo.ext_wiki.desc": "外部Wikiへのリンク。",
+ "repo.wiki": "Wiki",
"repo.wiki.welcome": "Wikiへようこそ。",
"repo.wiki.welcome_desc": "Wikiを使って共同作業者とドキュメンテーションの作成と共有ができます。",
"repo.wiki.desc": "共同作業者とのドキュメンテーションの作成と共有。",
@@ -1937,6 +1979,7 @@
"repo.wiki.page_name_desc": "この Wiki ページの名前を入力してください。いくつかの特別な名前として 'Home', '_Sidebar' と '_Footer' があります。",
"repo.wiki.original_git_entry_tooltip": "フレンドリーリンクを使用する代わりにオリジナルのGitファイルを表示します。",
"repo.activity": "アクティビティ",
+ "repo.activity.navbar.pulse": "Pulse",
"repo.activity.navbar.code_frequency": "コード更新頻度",
"repo.activity.navbar.contributors": "貢献者",
"repo.activity.navbar.recent_commits": "最近のコミット",
@@ -2089,6 +2132,8 @@
"repo.settings.pulls.ignore_whitespace": "空白文字のコンフリクトを無視する",
"repo.settings.pulls.enable_autodetect_manual_merge": "手動マージの自動検出を有効にする (注意: 特殊なケースでは判定ミスが発生する場合があります)",
"repo.settings.pulls.allow_rebase_update": "リベースでプルリクエストのブランチの更新を可能にする",
+ "repo.settings.pulls.default_target_branch": "新しいプルリクエストのデフォルトのターゲットブランチ",
+ "repo.settings.pulls.default_target_branch_default": "デフォルトブランチ (%s)",
"repo.settings.pulls.default_delete_branch_after_merge": "デフォルトでプルリクエストのブランチをマージ後に削除する",
"repo.settings.pulls.default_allow_edits_from_maintainers": "デフォルトでメンテナからの編集を許可する",
"repo.settings.releases_desc": "リリースを有効にする",
@@ -2210,6 +2255,8 @@
"repo.settings.add_webhook_desc": "GiteaはターゲットURLに、指定したContent TypeでPOSTリクエストを送ります。 詳細はWebhookガイドへ。",
"repo.settings.payload_url": "ターゲットURL",
"repo.settings.http_method": "HTTPメソッド",
+ "repo.settings.content_type": "POST Content Type",
+ "repo.settings.secret": "シークレット",
"repo.settings.webhook_secret_desc": "Webhookサーバーがsecretの使用をサポートしている場合は、webhookのマニュアルに従いここにsecretを入力できます。",
"repo.settings.slack_username": "ユーザー名",
"repo.settings.slack_icon_url": "アイコンのURL",
@@ -2227,6 +2274,7 @@
"repo.settings.event_delete_desc": "ブランチやタグが削除されたとき。",
"repo.settings.event_fork": "フォーク",
"repo.settings.event_fork_desc": "リポジトリがフォークされたとき。",
+ "repo.settings.event_wiki": "Wiki",
"repo.settings.event_wiki_desc": "Wikiページが作成・名前変更・編集・削除されたとき。",
"repo.settings.event_statuses": "ステータス",
"repo.settings.event_statuses_desc": "APIによってコミットのステータスが更新されたとき。",
@@ -2292,6 +2340,19 @@
"repo.settings.slack_domain": "ドメイン",
"repo.settings.slack_channel": "チャンネル",
"repo.settings.add_web_hook_desc": "%s をリポジトリと組み合わせます。",
+ "repo.settings.web_hook_name_gitea": "Gitea",
+ "repo.settings.web_hook_name_gogs": "Gogs",
+ "repo.settings.web_hook_name_slack": "Slack",
+ "repo.settings.web_hook_name_discord": "Discord",
+ "repo.settings.web_hook_name_dingtalk": "DingTalk",
+ "repo.settings.web_hook_name_telegram": "Telegram",
+ "repo.settings.web_hook_name_matrix": "Matrix",
+ "repo.settings.web_hook_name_msteams": "Microsoft Teams",
+ "repo.settings.web_hook_name_feishu_or_larksuite": "Feishu / Lark Suite",
+ "repo.settings.web_hook_name_feishu": "Feishu",
+ "repo.settings.web_hook_name_larksuite": "Lark Suite",
+ "repo.settings.web_hook_name_wechatwork": "WeCom (Wechat Work)",
+ "repo.settings.web_hook_name_packagist": "Packagist",
"repo.settings.packagist_username": "Packagist ユーザー名",
"repo.settings.packagist_api_token": "API トークン",
"repo.settings.packagist_package_url": "Packagist パッケージ URL",
@@ -2385,7 +2446,8 @@
"repo.settings.block_outdated_branch_desc": "baseブランチがheadブランチより進んでいる場合、マージできないようにします。",
"repo.settings.block_admin_merge_override": "管理者もブランチ保護のルールに従う",
"repo.settings.block_admin_merge_override_desc": "管理者はブランチ保護のルールに従う必要があり、回避することはできません。",
- "repo.settings.default_branch_desc": "プルリクエストやコミット表示のデフォルトのブランチを選択:",
+ "repo.settings.default_branch_desc": "コミット表示のデフォルトのブランチを選択します。",
+ "repo.settings.default_target_branch_desc": "プルリクエストでは、リポジトリ拡張設定の「プルリクエスト」セクションで設定することで、別のデフォルトターゲットブランチを使用できます。",
"repo.settings.merge_style_desc": "マージ スタイル",
"repo.settings.default_merge_style_desc": "デフォルトのマージスタイル",
"repo.settings.choose_branch": "ブランチを選択…",
@@ -2437,6 +2499,7 @@
"repo.settings.unarchive.success": "リポジトリのアーカイブを解除しました。",
"repo.settings.unarchive.error": "リポジトリのアーカイブ解除でエラーが発生しました。 詳細はログを確認してください。",
"repo.settings.update_avatar_success": "リポジトリのアバターを更新しました。",
+ "repo.settings.lfs": "LFS",
"repo.settings.lfs_filelist": "このリポジトリに含まれているLFSファイル",
"repo.settings.lfs_no_lfs_files": "このリポジトリにLFSファイルはありません",
"repo.settings.lfs_findcommits": "コミットを検索",
@@ -2455,6 +2518,8 @@
"repo.settings.lfs_lock_file_no_exist": "ロックしたファイルがデフォルトブランチにありません",
"repo.settings.lfs_force_unlock": "強制ロック解除",
"repo.settings.lfs_pointers.found": "%d件のblobポインタ — 登録済 %d件、未登録 %d件 (実体ファイルなし %d件)",
+ "repo.settings.lfs_pointers.sha": "Blob SHA",
+ "repo.settings.lfs_pointers.oid": "OID",
"repo.settings.lfs_pointers.inRepo": "Repo内",
"repo.settings.lfs_pointers.exists": "実ファイルあり",
"repo.settings.lfs_pointers.accessible": "アクセス可",
@@ -2468,6 +2533,7 @@
"repo.diff.browse_source": "ソースを参照",
"repo.diff.parent": "親",
"repo.diff.commit": "コミット",
+ "repo.diff.git-notes": "Notes",
"repo.diff.data_not_available": "差分はありません",
"repo.diff.options_button": "差分オプション",
"repo.diff.download_patch": "Patchファイルをダウンロード",
@@ -2494,6 +2560,8 @@
"repo.diff.too_many_files": "変更されたファイルが多すぎるため、一部のファイルは表示されません",
"repo.diff.show_more": "さらに表示",
"repo.diff.load": "差分を読み込み",
+ "repo.diff.generated": "生成ファイル",
+ "repo.diff.vendored": "ベンダーファイル",
"repo.diff.comment.add_line_comment": "行コメントを追加",
"repo.diff.comment.placeholder": "コメントを残す",
"repo.diff.comment.add_single_comment": "単独のコメントを追加",
@@ -2508,6 +2576,7 @@
"repo.diff.review.self_reject": "プルリクエストの作成者は自分のプルリクエストで変更要請できません",
"repo.diff.review.reject": "変更要請",
"repo.diff.review.self_approve": "プルリクエストの作成者は自分のプルリクエストを承認できません",
+ "repo.diff.committed_by": "committed by",
"repo.diff.protected": "保護されているファイル",
"repo.diff.image.side_by_side": "並べて表示",
"repo.diff.image.swipe": "スワイプ",
@@ -2566,6 +2635,13 @@
"repo.release.add_tag": "タグのみ作成",
"repo.release.releases_for": "%s のリリース",
"repo.release.tags_for": "%s のタグ",
+ "repo.release.notes": "リリースノート",
+ "repo.release.generate_notes": "リリースノートを生成",
+ "repo.release.generate_notes_desc": "マージされたプルリクエストと変更履歴のリンクを自動的に追加します。",
+ "repo.release.previous_tag": "前回のタグ",
+ "repo.release.generate_notes_tag_not_found": "このリポジトリにタグ \"%s\" は存在しません。",
+ "repo.release.generate_notes_target_not_found": "リリースターゲット \"%s\" が見つかりません。",
+ "repo.release.generate_notes_missing_tag": "リリースノートを生成するタグ名を入力してください。",
"repo.branch.name": "ブランチ名",
"repo.branch.already_exists": "ブランチ \"%s\" は既に存在します。",
"repo.branch.delete_head": "削除",
@@ -2585,7 +2661,7 @@
"repo.branch.restore_success": "ブランチ \"%s\" を復元しました。",
"repo.branch.restore_failed": "ブランチ \"%s\" の復元に失敗しました。",
"repo.branch.protected_deletion_failed": "ブランチ \"%s\" は保護されています。 削除できません。",
- "repo.branch.default_deletion_failed": "ブランチ \"%s\" はデフォルトブランチです。 削除できません。",
+ "repo.branch.default_deletion_failed": "ブランチ \"%s\" は、デフォルトブランチまたはプルリクエストのターゲットブランチです。 削除できません。",
"repo.branch.default_branch_not_exist": "デフォルトブランチ \"%s\" がありません。",
"repo.branch.restore": "ブランチ \"%s\" の復元",
"repo.branch.download": "ブランチ \"%s\" をダウンロード",
@@ -2602,7 +2678,7 @@
"repo.branch.new_branch_from": "\"%s\" から新しいブランチを作成",
"repo.branch.renamed": "ブランチ %s は %s にリネームされました。",
"repo.branch.rename_default_or_protected_branch_error": "デフォルトブランチや保護ブランチのリネームが可能なのは管理者だけです。",
- "repo.branch.rename_protected_branch_failed": "このブランチはglobベースの保護ルールに従って保護されています。",
+ "repo.branch.rename_protected_branch_failed": "ブランチ保護ルールにより、ブランチ名の変更は失敗しました。",
"repo.branch.commits_divergence_from": "コミットの乖離: %[3]s より %[1]d 件遅れ %[2]d 件先行",
"repo.branch.commits_no_divergence": "%[1]s ブランチと一致",
"repo.tag.create_tag": "タグ %s を作成",
@@ -2809,6 +2885,7 @@
"admin.dashboard.task.finished": "タスク: %[2]s が開始したタスク %[1]s が完了",
"admin.dashboard.task.unknown": "不明なタスクです: %[1]s",
"admin.dashboard.cron.started": "Cronを開始しました: %[1]s",
+ "admin.dashboard.cron.process": "Cron: %[1]s",
"admin.dashboard.cron.cancelled": "Cron: %[1]s をキャンセル: %[3]s",
"admin.dashboard.cron.error": "Cronでエラー: %s: %[3]s",
"admin.dashboard.cron.finished": "Cron: %[1]s が完了",
@@ -2886,7 +2963,9 @@
"admin.users.admin": "管理者",
"admin.users.restricted": "制限あり",
"admin.users.reserved": "予約済み",
+ "admin.users.bot": "Bot",
"admin.users.remote": "リモート",
+ "admin.users.2fa": "2FA",
"admin.users.repos": "リポジトリ",
"admin.users.created": "作成日",
"admin.users.last_login": "前回のサインイン",
@@ -3008,6 +3087,7 @@
"admin.auths.attribute_mail": "メールアドレス",
"admin.auths.attribute_ssh_public_key": "SSH公開鍵",
"admin.auths.attribute_avatar": "アバター",
+ "admin.auths.ssh_keys_are_verified": "LDAPからのSSHキーを検証済みとする",
"admin.auths.attributes_in_bind": "バインドDNのコンテクストから属性を取得する",
"admin.auths.allow_deactivate_all": "サーチ結果が空のときは全ユーザーを非アクティブ化",
"admin.auths.use_paged_search": "ページ分割検索を使用",
@@ -3141,6 +3221,7 @@
"admin.config.db_name": "データベース名",
"admin.config.db_user": "ユーザー名",
"admin.config.db_schema": "スキーマ",
+ "admin.config.db_ssl_mode": "SSL",
"admin.config.db_path": "パス",
"admin.config.service_config": "サービス設定",
"admin.config.register_email_confirm": "登録にはメールによる確認が必要",
@@ -3179,6 +3260,7 @@
"admin.config.mailer_sendmail_path": "Sendmailのパス",
"admin.config.mailer_sendmail_args": "Sendmailの追加引数",
"admin.config.mailer_sendmail_timeout": "Sendmail のタイムアウト",
+ "admin.config.mailer_use_dummy": "Dummy",
"admin.config.test_email_placeholder": "メールアドレス (例 test@example.com)",
"admin.config.send_test_mail": "テストメールを送信",
"admin.config.send_test_mail_submit": "送信",
@@ -3217,8 +3299,6 @@
"admin.config.git_gc_args": "GC引数",
"admin.config.git_migrate_timeout": "移行タイムアウト",
"admin.config.git_mirror_timeout": "ミラー更新タイムアウト",
- "admin.config.git_clone_timeout": "クローン操作のタイムアウト",
- "admin.config.git_pull_timeout": "プル操作のタイムアウト",
"admin.config.git_gc_timeout": "GC操作のタイムアウト",
"admin.config.log_config": "ログ設定",
"admin.config.logger_name_fmt": "ロガー: %s",
@@ -3393,6 +3473,7 @@
"packages.assets": "アセット",
"packages.versions": "バージョン",
"packages.versions.view_all": "すべて表示",
+ "packages.dependency.id": "ID",
"packages.dependency.version": "バージョン",
"packages.search_in_external_registry": "%s で検索",
"packages.alpine.registry": "あなたの /etc/apk/repositories ファイルにURLを追加して、このレジストリをセットアップします:",
@@ -3402,10 +3483,12 @@
"packages.alpine.repository": "リポジトリ情報",
"packages.alpine.repository.branches": "ブランチ",
"packages.alpine.repository.repositories": "リポジトリ",
+ "packages.alpine.repository.architectures": "Architectures",
"packages.arch.registry": "/etc/pacman.conf にリポジトリとアーキテクチャを含めてサーバーを追加します:",
"packages.arch.install": "pacmanでパッケージを同期します:",
"packages.arch.repository": "リポジトリ情報",
"packages.arch.repository.repositories": "リポジトリ",
+ "packages.arch.repository.architectures": "Architectures",
"packages.cargo.registry": "Cargo 設定ファイルでこのレジストリをセットアップします。(例 ~/.cargo/config.toml):",
"packages.cargo.install": "Cargo を使用してパッケージをインストールするには、次のコマンドを実行します:",
"packages.chef.registry": "あなたの ~/.chef/config.rb ファイルに、このレジストリをセットアップします:",
@@ -3435,6 +3518,9 @@
"packages.debian.registry.info": "$distribution と $component は下にあるリストから選んでください。",
"packages.debian.install": "パッケージをインストールするには、次のコマンドを実行します:",
"packages.debian.repository": "リポジトリ情報",
+ "packages.debian.repository.distributions": "Distributions",
+ "packages.debian.repository.components": "Components",
+ "packages.debian.repository.architectures": "Architectures",
"packages.generic.download": "コマンドラインでパッケージをダウンロードします:",
"packages.go.install": "コマンドラインでパッケージをインストール:",
"packages.helm.registry": "このレジストリをコマンドラインからセットアップします:",
@@ -3463,6 +3549,7 @@
"packages.rpm.distros.suse": "SUSE系ディストリビューションの場合",
"packages.rpm.install": "パッケージをインストールするには、次のコマンドを実行します:",
"packages.rpm.repository": "リポジトリ情報",
+ "packages.rpm.repository.architectures": "Architectures",
"packages.rpm.repository.multiple_groups": "このパッケージは複数のグループで利用可能です。",
"packages.rubygems.install": "gem を使用してパッケージをインストールするには、次のコマンドを実行します:",
"packages.rubygems.install2": "または Gemfile に追加します:",
@@ -3536,6 +3623,7 @@
"secrets.deletion.success": "シークレットを削除しました。",
"secrets.deletion.failed": "シークレットの削除に失敗しました。",
"secrets.management": "シークレット管理",
+ "actions.actions": "Actions",
"actions.unit.desc": "Actionsの管理",
"actions.status.unknown": "不明",
"actions.status.waiting": "待機中",
@@ -3550,6 +3638,7 @@
"actions.runners.new": "新しいランナーを作成",
"actions.runners.new_notice": "ランナーの開始方法",
"actions.runners.status": "ステータス",
+ "actions.runners.id": "ID",
"actions.runners.name": "名称",
"actions.runners.owner_type": "タイプ",
"actions.runners.description": "説明",
@@ -3584,6 +3673,7 @@
"actions.runs.all_workflows": "すべてのワークフロー",
"actions.runs.commit": "コミット",
"actions.runs.scheduled": "スケジュール済み",
+ "actions.runs.pushed_by": "pushed by",
"actions.runs.invalid_workflow_helper": "ワークフロー設定ファイルは無効です。あなたの設定ファイルを確認してください: %s",
"actions.runs.no_matching_online_runner_helper": "ラベルに一致するオンラインのランナーが見つかりません: %s",
"actions.runs.no_job_without_needs": "ワークフローには依存関係のないジョブが少なくとも1つ含まれている必要があります。",
@@ -3648,6 +3738,7 @@
"projects.type-3.display_name": "組織プロジェクト",
"projects.enter_fullscreen": "フルスクリーン",
"projects.exit_fullscreen": "フルスクリーンを終了",
+ "git.filemode.changed_filemode": "%[1]s → %[2]s",
"git.filemode.directory": "ディレクトリ",
"git.filemode.normal_file": "ノーマルファイル",
"git.filemode.executable_file": "実行可能ファイル",
diff --git a/options/locale/locale_tr-TR.json b/options/locale/locale_tr-TR.json
index 4503d13930..edd2db2e39 100644
--- a/options/locale/locale_tr-TR.json
+++ b/options/locale/locale_tr-TR.json
@@ -3,6 +3,7 @@
"dashboard": "Pano",
"explore_title": "Keşfet",
"help": "Yardım",
+ "logo": "Logo",
"sign_in": "Giriş Yap",
"sign_in_with_provider": "%s ile oturum aç",
"sign_in_or": "veya",
@@ -31,6 +32,7 @@
"password": "Parola",
"access_token": "Erişim Kodu",
"re_type": "Parolayı Doğrula",
+ "captcha": "CAPTCHA",
"twofa": "İki Aşamalı Doğrulama",
"twofa_scratch": "İki aşamalı kazınmış kod",
"passcode": "Şifre",
@@ -146,12 +148,20 @@
"filter.private": "Özel",
"no_results_found": "Sonuç bulunamadı.",
"internal_error_skipped": "Dahili bir hata oluştu ama atlandı: %s",
+ "characters_spaces": "Boşluklar",
+ "characters_tabs": "Sekmeler",
+ "text_indent_style": "Girinti biçimi",
+ "text_indent_size": "Girinti boyutu",
+ "text_line_wrap": "Metni kaydır",
+ "text_line_nowrap": "Metni kaydırma",
+ "text_line_wrap_mode": "Satır sarma kipi",
"search.search": "Ara...",
"search.type_tooltip": "Arama türü",
"search.fuzzy": "Bulanık",
"search.fuzzy_tooltip": "Arama terimine benzeyen sonuçları da içer",
"search.words": "Kelimeler",
"search.words_tooltip": "Sadece arama terimi kelimeleriyle eşleşen sonuçları içer",
+ "search.regexp": "Regexp",
"search.regexp_tooltip": "Sadece regexp arama terimiyle tamamen eşleşen sonuçları içer",
"search.exact": "Tam",
"search.exact_tooltip": "Sadece arama terimiyle tamamen eşleşen sonuçları içer",
@@ -171,7 +181,7 @@
"search.runner_kind": "Çalıştırıcıları ara...",
"search.no_results": "Eşleşen sonuç bulunamadı.",
"search.issue_kind": "Konuları ara...",
- "search.pull_kind": "Değişiklikleri ara...",
+ "search.pull_kind": "Değişiklik isteklerini ara…",
"search.keyword_search_unavailable": "Anahtar kelime ile arama şu an mevcut değil. Lütfen site yöneticinizle iletişime geçin.",
"aria.navbar": "Gezinti Çubuğu",
"aria.footer": "Alt Bilgi",
@@ -184,6 +194,7 @@
"editor.buttons.heading.tooltip": "Başlık ekle",
"editor.buttons.bold.tooltip": "Kalın metin ekle",
"editor.buttons.italic.tooltip": "Eğik metin ekle",
+ "editor.buttons.strikethrough.tooltip": "Üstü çizili metin ekle",
"editor.buttons.quote.tooltip": "Metni alıntıla",
"editor.buttons.code.tooltip": "Kod ekle",
"editor.buttons.link.tooltip": "Bağlantı ekle",
@@ -227,6 +238,7 @@
"install.db_name": "Veritabanı Adı",
"install.db_schema": "Şema",
"install.db_schema_helper": "Veritabanı varsayılanı için boş bırakın (\"genel\").",
+ "install.ssl_mode": "SSL",
"install.path": "Yol",
"install.sqlite_helper": "SQLite3 veritabanı dosya yolu.
Gitea'yı servis olarak çalıştırıyorsanız tam yol adını girin.",
"install.reinstall_error": "Mevcut bir Gitea veritabanına yüklemeye çalışıyorsunuz",
@@ -643,6 +655,7 @@
"settings.appearance": "Görünüm",
"settings.password": "Parola",
"settings.security": "Güvenlik",
+ "settings.avatar": "Avatar",
"settings.ssh_gpg_keys": "SSH / GPG Anahtarları",
"settings.social": "Sosyal Medya Hesapları",
"settings.applications": "Uygulamalar",
@@ -652,6 +665,7 @@
"settings.twofa": "İki Aşamalı Kimlik Doğrulama (TOTP)",
"settings.account_link": "Bağlı Hesaplar",
"settings.organization": "Organizasyonlar",
+ "settings.uid": "UID",
"settings.webauthn": "İki-Aşamalı Kimlik Doğrulama (Güvenlik Anahtarları)",
"settings.public_profile": "Herkese Açık Profil",
"settings.biography_placeholder": "Bize kendiniz hakkında birşeyler söyleyin! (Markdown kullanabilirsiniz)",
@@ -744,6 +758,7 @@
"settings.add_email": "E-posta Adresi Ekle",
"settings.add_openid": "Açık Kimlik URI 'si ekle",
"settings.add_email_confirmation_sent": "\"%s\" adresine bir doğrulama e-postası gönderildi. E-postanızı doğrulamak için %s içinde gelen kutunuzu kontrol ediniz.",
+ "settings.email_primary_not_found": "Seçilen e-posta adresi bulunamıyor.",
"settings.add_email_success": "Yeni e-posta adresi eklendi.",
"settings.email_preference_set_success": "E-posta tercihi başarıyla ayarlandı.",
"settings.add_openid_success": "Yeni OpenID adresi eklendi.",
@@ -970,6 +985,7 @@
"repo.fork.blocked_user": "Depo çatallanamıyor, depo sahibi tarafından engellenmişsiniz.",
"repo.use_template": "Bu şablonu kullan",
"repo.open_with_editor": "%s ile aç",
+ "repo.download_directory_as": "Dizini %s olarak indir",
"repo.download_zip": "ZIP indir",
"repo.download_tar": "TAR.GZ indir",
"repo.download_bundle": "BUNDLE indir",
@@ -989,6 +1005,7 @@
"repo.multiple_licenses": "Çoklu Lisans",
"repo.object_format": "Nesne Biçimi",
"repo.object_format_helper": "Deponun nesne biçimi. Daha sonra değiştirilemez. SHA1 en uyumlu olandır.",
+ "repo.readme": "README",
"repo.readme_helper": "Bir README dosyası şablonu seçin.",
"repo.readme_helper_desc": "Projeniz için eksiksiz bir açıklama yazabileceğiniz yer burasıdır.",
"repo.auto_init": "Depoyu başlat (.gitignore, Lisans ve README dosyalarını ekler)",
@@ -1053,6 +1070,7 @@
"repo.desc.template": "Şablon",
"repo.desc.internal": "Dahili",
"repo.desc.archived": "Arşivlenmiş",
+ "repo.desc.sha256": "SHA256",
"repo.template.items": "Şablon Öğeleri",
"repo.template.git_content": "Git İçeriği (Varsayılan Dal)",
"repo.template.git_hooks": "Git İstemcileri",
@@ -1081,6 +1099,7 @@
"repo.migrate_options_lfs_endpoint.description.local": "Yerel bir sunucu yolu da destekleniyor.",
"repo.migrate_options_lfs_endpoint.placeholder": "Boş bırakılırsa, uç nokta klon URL'sinden türetilecektir",
"repo.migrate_items": "Göç Öğeleri",
+ "repo.migrate_items_wiki": "Viki",
"repo.migrate_items_milestones": "Dönüm noktaları",
"repo.migrate_items_labels": "Etiketler",
"repo.migrate_items_issues": "Konular",
@@ -1175,6 +1194,7 @@
"repo.release": "Sürüm",
"repo.releases": "Sürüm",
"repo.tag": "Etiket",
+ "repo.git_tag": "Git Etiketi",
"repo.released_this": "bu sürümü yayınladı",
"repo.tagged_this": "şunu etiketledi",
"repo.file.title": "%s dalındaki/etiketindeki %s",
@@ -1234,8 +1254,11 @@
"repo.editor.must_be_on_a_branch": "Bu dosyada değişiklik yapmak veya önermek için bir dalda olmalısınız.",
"repo.editor.fork_before_edit": "Bu dosyada değişiklik yapmak veya önermek için depoyu çatallamalısınız.",
"repo.editor.delete_this_file": "Dosyayı Sil",
+ "repo.editor.delete_this_directory": "Dizini Sil",
"repo.editor.must_have_write_access": "Bu dosyada değişiklikler yapmak veya önermek için yazma erişiminizin olması gerekir.",
"repo.editor.file_delete_success": "\"%s\" dosyası silindi.",
+ "repo.editor.directory_delete_success": "\"%s\" dizini silindi.",
+ "repo.editor.delete_directory": "'%s' dizinini sil",
"repo.editor.name_your_file": "Dosyanızı isimlendirin…",
"repo.editor.filename_help": "Bölü ('/') işaretiyle ismini yazarak bir dizin ekleyebilirsiniz. Dizini silmek için girdi sahasının başına backspace yazmalısınız.",
"repo.editor.or": "veya",
@@ -1356,6 +1379,7 @@
"repo.projects.column.new": "Yeni Sütun",
"repo.projects.column.set_default": "Varsayılanı Ayarla",
"repo.projects.column.set_default_desc": "Bu sütunu kategorize edilmemiş konular ve değişiklik istekleri için varsayılan olarak ayarlayın",
+ "repo.projects.column.default_column_hint": "Bu projeye eklenen yeni konular bu sütuna eklenecektir",
"repo.projects.column.delete": "Sutün Sil",
"repo.projects.column.deletion_desc": "Bir proje sütununun silinmesi, ilgili tüm konuları varsayılan sütuna taşır. Devam edilsin mi?",
"repo.projects.column.color": "Renk",
@@ -1474,6 +1498,7 @@
"repo.issues.filter_sort.feweststars": "En az yıldızlılar",
"repo.issues.filter_sort.mostforks": "En çok çatallananlar",
"repo.issues.filter_sort.fewestforks": "En az çatallananlar",
+ "repo.issues.quick_goto": "Konuya Git",
"repo.issues.action_open": "Açık",
"repo.issues.action_close": "Kapat",
"repo.issues.action_label": "Etiket",
@@ -1686,6 +1711,7 @@
"repo.issues.review.content.empty": "İstenen değişiklik(ler)i belirten bir yorum bırakmanız gerekir.",
"repo.issues.review.reject": "%s değişiklik istedi",
"repo.issues.review.wait": "için %s inceleme isteği",
+ "repo.issues.review.codeowners_rules": "CODEOWNERS kuralları",
"repo.issues.review.add_review_request": "%s tarafından %s inceleme istedi",
"repo.issues.review.remove_review_request": "%s %s için inceleme isteği kaldırıldı",
"repo.issues.review.remove_review_request_self": "%s incelemeyi reddetti",
@@ -1721,17 +1747,20 @@
"repo.issues.reference_link": "Referans: %s",
"repo.compare.compare_base": "temel",
"repo.compare.compare_head": "karşılaştır",
+ "repo.compare.title": "Değişiklikleri karşılaştırma",
+ "repo.compare.description": "Değişiklikleri görmek veya yeni bir değişiklik isteği başlatmak için iki dal veya etiket seçin.",
"repo.pulls.desc": "Değişiklik isteklerini ve kod incelemelerini etkinleştir.",
"repo.pulls.new": "Yeni Değişiklik İsteği",
+ "repo.pulls.new.description": "Bu karşılaştırmadaki değişiklikleri başkalarıyla tartışın ve gözden geçirin.",
"repo.pulls.new.blocked_user": "Değişiklik isteği oluşturulamıyor, depo sahibi tarafından engellenmişsiniz.",
"repo.pulls.new.must_collaborator": "Değişiklik isteği oluşturmak için bir katkıcı olmalısınız.",
+ "repo.pulls.new.already_existed": "Bu dallar arasında halihazırda bir değişiklik isteği mevcut",
"repo.pulls.edit.already_changed": "Değişiklik isteğine yapılan değişiklikler kaydedilemiyor. İçerik başka kullanıcı tarafından değiştirilmiş gözüküyor. Diğerlerinin değişikliklerinin üzerine yazmamak için lütfen sayfayı yenileyin ve tekrar düzenlemeye çalışın.",
"repo.pulls.view": "Değişiklik İsteği Görüntüle",
"repo.pulls.compare_changes": "Yeni Değişiklik İsteği",
"repo.pulls.allow_edits_from_maintainers": "Bakımcıların düzenlemelerine izin ver",
"repo.pulls.allow_edits_from_maintainers_desc": "Ana dala yazma hakkı olan kullanıcılar bu dala da gönderebilirler",
"repo.pulls.allow_edits_from_maintainers_err": "Güncelleme başarısız oldu",
- "repo.pulls.compare_changes_desc": "Birleştirmek için hedef ve kaynak dalı seçin.",
"repo.pulls.has_viewed_file": "Görüldü",
"repo.pulls.has_changed_since_last_review": "Son incelemenizden sonra değişti",
"repo.pulls.viewed_files_label": "%[1]d / %[2]d dosya görüldü",
@@ -1757,6 +1786,8 @@
"repo.pulls.title_desc": "%[2]s içindeki %[1]d işlemeyi %[3]s ile birleştirmek istiyor",
"repo.pulls.merged_title_desc": "%[4]s %[2]s içindeki %[1]d işlemeyi %[3]s ile birleştirdi",
"repo.pulls.change_target_branch_at": "hedef dal %s adresinden %s%s adresine değiştirildi",
+ "repo.pulls.marked_as_work_in_progress_at": "değişiklik isteğini devam eden iş olarak işaretledi %s",
+ "repo.pulls.marked_as_ready_for_review_at": "değişiklik isteğini incelemeye hazır olarak işaretledi %s",
"repo.pulls.tab_conversation": "Sohbet",
"repo.pulls.tab_commits": "İşleme",
"repo.pulls.tab_files": "Değiştirilen Dosyalar",
@@ -1775,6 +1806,7 @@
"repo.pulls.remove_prefix": "%s ön ekini kaldır",
"repo.pulls.data_broken": "Bu değişiklik isteği, çatallama bilgilerinin eksik olması nedeniyle bozuldu.",
"repo.pulls.files_conflicted": "Bu değişiklik isteğinde, hedef dalla çakışan değişiklikler var.",
+ "repo.pulls.files_conflicted_no_listed_files": "(Çakışan dosya yok)",
"repo.pulls.is_checking": "Birleştirme çakışması denetleniyor…",
"repo.pulls.is_ancestor": "Bu dal zaten hedef dalda mevcut. Birleştirilecek bir şey yok.",
"repo.pulls.is_empty": "Bu daldaki değişiklikler zaten hedef dalda mevcut. Bu boş bir işleme olacaktır.",
@@ -1829,12 +1861,16 @@
"repo.pulls.status_checking": "Bazı denetlemeler beklemede",
"repo.pulls.status_checks_success": "Tüm denetlemeler başarılı oldu",
"repo.pulls.status_checks_warning": "Bazı kontroller uyarılar bildirdi",
- "repo.pulls.status_checks_failure": "Bazı kontroller başarısız oldu",
+ "repo.pulls.status_checks_failure_required": "Bazı gerekli denetlemeler başarısız oldu",
+ "repo.pulls.status_checks_failure_optional": "Bazı isteğe bağlı denetlemeler başarısız oldu",
"repo.pulls.status_checks_error": "Bazı kontroller hatalar bildirdi",
"repo.pulls.status_checks_requested": "Gerekli",
"repo.pulls.status_checks_details": "Ayrıntılar",
"repo.pulls.status_checks_hide_all": "Tüm denetlemeleri gizle",
"repo.pulls.status_checks_show_all": "Tüm denetlemeleri göster",
+ "repo.pulls.status_checks_approve_all": "Tüm iş akışlarını onayla",
+ "repo.pulls.status_checks_need_approvals": "%d iş akışı onay bekliyor",
+ "repo.pulls.status_checks_need_approvals_helper": "İş akışı ancak depo sahibinin onayı sonrasında çalışabilir.",
"repo.pulls.update_branch": "Dalı birleştirmeyle güncelle",
"repo.pulls.update_branch_rebase": "Dalı yeniden yapılandırmayla güncelle",
"repo.pulls.update_branch_success": "Dal güncellemesi başarıyla gerçekleştirildi",
@@ -1916,6 +1952,7 @@
"repo.signing.wont_sign.not_signed_in": "Oturum açmadınız.",
"repo.ext_wiki": "Harici Vikiye Erişim",
"repo.ext_wiki.desc": "Harici bir wiki'ye bağlantı.",
+ "repo.wiki": "Viki",
"repo.wiki.welcome": "Wiki'ye Hoşgeldiniz.",
"repo.wiki.welcome_desc": "Wiki, katkıcılarla belge yazmanıza ve paylaşmanıza olanak tanır.",
"repo.wiki.desc": "Katkıcılarla belgeler yazın ve paylaşın.",
@@ -2095,6 +2132,8 @@
"repo.settings.pulls.ignore_whitespace": "Çakışmalar için Boşlukları Gözardı Et",
"repo.settings.pulls.enable_autodetect_manual_merge": "Kendiliğinden algılamalı elle birleştirmeyi etkinleştir (Not: Bazı özel durumlarda yanlış kararlar olabilir)",
"repo.settings.pulls.allow_rebase_update": "Değişiklik isteği dalının yeniden yapılandırmayla güncellenmesine izin ver",
+ "repo.settings.pulls.default_target_branch": "Yeni değişiklik istekleri için varsayılan hedef dal",
+ "repo.settings.pulls.default_target_branch_default": "Varsayılan dal (%s)",
"repo.settings.pulls.default_delete_branch_after_merge": "Varsayılan olarak birleştirmeden sonra değişiklik isteği dalını sil",
"repo.settings.pulls.default_allow_edits_from_maintainers": "Bakımcıların düzenlemelerine izin ver",
"repo.settings.releases_desc": "Depo Sürümlerini Etkinleştir",
@@ -2284,6 +2323,9 @@
"repo.settings.event_package": "Paket",
"repo.settings.event_package_desc": "Bir depoda paket oluşturuldu veya silindi.",
"repo.settings.branch_filter": "Dal filtresi",
+ "repo.settings.branch_filter_desc_1": "İtme, dal oluşturma ve dal silme olayları için dal (ve referans adı) izin listesi, glob kalıbı olarak belirtilir. Boş veya * ise, tüm dallar ve etiketler için olaylar raporlanır.",
+ "repo.settings.branch_filter_desc_2": "Tam referans adlarını eşleştirmek için refs/heads/ veya refs/tags/ önekini kullanın.",
+ "repo.settings.branch_filter_desc_doc": "Sözdizimi için %[2]s belgelerine bakın.",
"repo.settings.authorization_header": "Yetkilendirme Başlığı",
"repo.settings.authorization_header_desc": "Mevcutsa isteklere yetkilendirme başlığı olarak eklenecektir. Örnekler: %s.",
"repo.settings.active": "Etkin",
@@ -2298,6 +2340,19 @@
"repo.settings.slack_domain": "Alan Adı",
"repo.settings.slack_channel": "Kanal",
"repo.settings.add_web_hook_desc": "%s web kancasını deponuza ekleyin.",
+ "repo.settings.web_hook_name_gitea": "Gitea",
+ "repo.settings.web_hook_name_gogs": "Gogs",
+ "repo.settings.web_hook_name_slack": "Slack",
+ "repo.settings.web_hook_name_discord": "Discord",
+ "repo.settings.web_hook_name_dingtalk": "DingTalk",
+ "repo.settings.web_hook_name_telegram": "Telegram",
+ "repo.settings.web_hook_name_matrix": "Matrix",
+ "repo.settings.web_hook_name_msteams": "Microsoft Teams",
+ "repo.settings.web_hook_name_feishu_or_larksuite": "Feishu / Lark Suite",
+ "repo.settings.web_hook_name_feishu": "Feishu",
+ "repo.settings.web_hook_name_larksuite": "Lark Suite",
+ "repo.settings.web_hook_name_wechatwork": "WeCom (Wechat Work)",
+ "repo.settings.web_hook_name_packagist": "Packagist",
"repo.settings.packagist_username": "Packagist kullanıcı adı",
"repo.settings.packagist_api_token": "API erişim anahtarı",
"repo.settings.packagist_package_url": "Packagist paket URL'si",
@@ -2391,9 +2446,10 @@
"repo.settings.block_outdated_branch_desc": "Baş dal taban dalın arkasındayken birleştirme mümkün olmayacaktır.",
"repo.settings.block_admin_merge_override": "Yöneticiler dal koruma kurallarına uymalıdır",
"repo.settings.block_admin_merge_override_desc": "Yöneticiler dal koruma kurallarına uymalıdır ve kurallardan kaçınamazlar.",
- "repo.settings.default_branch_desc": "Değişiklik istekleri ve kod işlemeleri için varsayılan bir depo dalı seçin:",
+ "repo.settings.default_branch_desc": "Kod işlemeleri için varsayılan bir depo dalı seçin.",
+ "repo.settings.default_target_branch_desc": "Değişiklik istekleri, Depo Gelişmiş Ayarları'nın Değişiklik İstekleri bölümünde ayarlanmışsa farklı varsayılan hedef dal kullanabilir.",
"repo.settings.merge_style_desc": "Biçimleri Birleştir",
- "repo.settings.default_merge_style_desc": "Değişiklik istekleri için varsayılan birleştirme tarzı",
+ "repo.settings.default_merge_style_desc": "Varsayılan birleştirme tarzı",
"repo.settings.choose_branch": "Bir dal seç…",
"repo.settings.no_protected_branch": "Korumalı dal yok.",
"repo.settings.edit_protected_branch": "Düzenle",
@@ -2443,6 +2499,7 @@
"repo.settings.unarchive.success": "Depo başarıyla arşivden çıkarıldı.",
"repo.settings.unarchive.error": "Depoyu arşivden çıkarmaya çalışırken bir hata oluştu. Daha fazla ayrıntı için günlüğe bakın.",
"repo.settings.update_avatar_success": "Depo resmi güncellendi.",
+ "repo.settings.lfs": "LFS",
"repo.settings.lfs_filelist": "Bu depoda barındırılan LFS dosyaları",
"repo.settings.lfs_no_lfs_files": "Bu depoda barındırılan herhangi bir LFS dosyası yok",
"repo.settings.lfs_findcommits": "İşleme bul",
@@ -2461,6 +2518,8 @@
"repo.settings.lfs_lock_file_no_exist": "Kilitli dosya varsayılan dalda mevcut değil",
"repo.settings.lfs_force_unlock": "Kilidi Açmaya Zorla",
"repo.settings.lfs_pointers.found": "Bulunan %d blob işaretçi(leri) - %d ilişkili, %d ilişkilendirilmemiş (%d mağazadan eksik)",
+ "repo.settings.lfs_pointers.sha": "Blob SHA",
+ "repo.settings.lfs_pointers.oid": "OID",
"repo.settings.lfs_pointers.inRepo": "Depoda",
"repo.settings.lfs_pointers.exists": "Mağazada var",
"repo.settings.lfs_pointers.accessible": "Kullanıcı tarafından erişilebilir",
@@ -2488,6 +2547,7 @@
"repo.diff.whitespace_ignore_at_eol": "Satır sonundaki boşluk değişiklikleri yoksay",
"repo.diff.stats_desc": " %d değiştirilmiş dosya ile %d ekleme ve %d silme",
"repo.diff.stats_desc_file": "%d değişiklik: %d ekleme ve %d silme",
+ "repo.diff.bin": "BIN",
"repo.diff.bin_not_shown": "İkili dosya gösterilmiyor.",
"repo.diff.view_file": "Dosyayı Görüntüle",
"repo.diff.file_before": "Önce",
@@ -2500,8 +2560,8 @@
"repo.diff.too_many_files": "Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor",
"repo.diff.show_more": "Daha Fazla Göster",
"repo.diff.load": "Fark Yükle",
- "repo.diff.generated": "üretilen",
- "repo.diff.vendored": "sağlanmış",
+ "repo.diff.generated": "Üretilen",
+ "repo.diff.vendored": "Sağlanmış",
"repo.diff.comment.add_line_comment": "Satır yorum ekle",
"repo.diff.comment.placeholder": "Yorum Yap",
"repo.diff.comment.add_single_comment": "Bir yorum ekle",
@@ -2575,6 +2635,13 @@
"repo.release.add_tag": "Yalnızca Etiket Oluştur",
"repo.release.releases_for": "%s sürümleri",
"repo.release.tags_for": "%s etiketleri",
+ "repo.release.notes": "Sürüm notları",
+ "repo.release.generate_notes": "Sürüm notlarını oluştur",
+ "repo.release.generate_notes_desc": "Bu sürüm için birleştirilmiş değişiklik isteklerini ve değişiklik günlüğü bağlantısını otomatik olarak ekleyin.",
+ "repo.release.previous_tag": "Önceki etiket",
+ "repo.release.generate_notes_tag_not_found": "Bu depoda \"%s\" etiketi yok.",
+ "repo.release.generate_notes_target_not_found": "\"%s\" sürüm hedefi bulunamadı.",
+ "repo.release.generate_notes_missing_tag": "Sürüm notlarını oluşturmak için bir etiket ismi giriniz.",
"repo.branch.name": "Dal Adı",
"repo.branch.already_exists": "\"%s\" isimli bir dal zaten mevcut.",
"repo.branch.delete_head": "Sil",
@@ -2594,7 +2661,7 @@
"repo.branch.restore_success": "\"%s\" dalı geri yüklendi.",
"repo.branch.restore_failed": "\"%s\" dalı geri yüklenemedi.",
"repo.branch.protected_deletion_failed": "\"%s\" dalı korunuyor. Silinemez.",
- "repo.branch.default_deletion_failed": "\"%s\" dalı varsayılan daldır. Silinemez.",
+ "repo.branch.default_deletion_failed": "\"%s\" dalı varsayılan veya değişiklik isteği hedef dalıdır. Silinemez.",
"repo.branch.default_branch_not_exist": "Varsayılan dal \"%s\" mevcut değil.",
"repo.branch.restore": "\"%s\" Dalını Geri Yükle",
"repo.branch.download": "\"%s\" Dalını İndir",
@@ -2611,7 +2678,7 @@
"repo.branch.new_branch_from": "\"%s\" dalından yeni dal oluştur",
"repo.branch.renamed": "%s dalının adı %s olarak değiştirildi.",
"repo.branch.rename_default_or_protected_branch_error": "Varsayılan veya korunan dalların adını sadece yöneticiler değiştirebilir.",
- "repo.branch.rename_protected_branch_failed": "Bu dal glob tabanlı koruma kurallarıyla korunuyor.",
+ "repo.branch.rename_protected_branch_failed": "Dal koruma kuralları nedeniyle dalın adı değiştirilemedi.",
"repo.branch.commits_divergence_from": "İşleme farklılığı: %[3]s dalının %[1]d işleme gerisinde ve %[2]d işleme ilerisinde",
"repo.branch.commits_no_divergence": "%[1]s dalı ile aynı",
"repo.tag.create_tag": "%s etiketi oluştur",
@@ -2818,6 +2885,7 @@
"admin.dashboard.task.finished": "Görev: %[1]s %[2]s tarafından başlatıldı ve bitti",
"admin.dashboard.task.unknown": "Bilinmeyen görev: %[1]s",
"admin.dashboard.cron.started": "Cron Başlatıldı: %[1]s",
+ "admin.dashboard.cron.process": "Cron: %[1]s",
"admin.dashboard.cron.cancelled": "Cron: %[1]s iptal edildi: %[3]s",
"admin.dashboard.cron.error": "Cron Hatası: %s: %[3]s",
"admin.dashboard.cron.finished": "Cron: %[1]s bitti",
@@ -2839,6 +2907,7 @@
"admin.dashboard.git_gc_repos": "Tüm depolardaki atıkları temizle",
"admin.dashboard.resync_all_sshkeys": "'.ssh/authority_keys' dosyasını Gitea SSH anahtarlarıyla güncelle",
"admin.dashboard.resync_all_sshprincipals": "'.ssh/authorized_principals' dosyasını Gitea SSH sorumlularıyla güncelleyin",
+ "admin.dashboard.resync_all_hooks": "Tüm depoların git kancalarını yeniden senkronize edin (pre-receive, update, post-receive, proc-receive, ...)",
"admin.dashboard.reinit_missing_repos": "Kayıtları bulunanlar için tüm eksik Git depolarını yeniden başlat",
"admin.dashboard.sync_external_users": "Harici kullanıcı verisini senkronize et",
"admin.dashboard.cleanup_hook_task_table": "Hook_task tablosunu temizle",
@@ -2894,6 +2963,7 @@
"admin.users.admin": "Yönetici",
"admin.users.restricted": "Kısıtlanmış",
"admin.users.reserved": "Rezerve",
+ "admin.users.bot": "Bot",
"admin.users.remote": "Uzak",
"admin.users.2fa": "2FD",
"admin.users.repos": "Depolar",
@@ -3017,6 +3087,7 @@
"admin.auths.attribute_mail": "E-posta Özelliği",
"admin.auths.attribute_ssh_public_key": "Açık SSH Anahtarı Özelliği",
"admin.auths.attribute_avatar": "Avatar Özelliği",
+ "admin.auths.ssh_keys_are_verified": "LDAP'taki SSH anahtarları doğrulanmış kabul ediliyor",
"admin.auths.attributes_in_bind": "Bağlı DN tabanındaki özellikleri çek",
"admin.auths.allow_deactivate_all": "Boş bir arama sonucunun tüm kullanıcıları devre dışı bırakmasına izin ver",
"admin.auths.use_paged_search": "Sayfalı Aramayı Kullan",
@@ -3150,6 +3221,7 @@
"admin.config.db_name": "İsim",
"admin.config.db_user": "Kullanıcı adı",
"admin.config.db_schema": "Şema",
+ "admin.config.db_ssl_mode": "SSL",
"admin.config.db_path": "Yol",
"admin.config.service_config": "Servis Yapılandırması",
"admin.config.register_email_confirm": "Kayıt Olmak İçin E-posta Onayı Gereksin",
@@ -3227,8 +3299,6 @@
"admin.config.git_gc_args": "GC Argümanları",
"admin.config.git_migrate_timeout": "Göç İşlemi Zaman Aşımı",
"admin.config.git_mirror_timeout": "Yansı Güncelleme Zaman Aşımı",
- "admin.config.git_clone_timeout": "Klonlama İşlemi Zaman Aşımı",
- "admin.config.git_pull_timeout": "Çekme İşlemi Zaman Aşımı",
"admin.config.git_gc_timeout": "GC İşlemi Zaman Aşımı",
"admin.config.log_config": "Log Yapılandırması",
"admin.config.logger_name_fmt": "Günlükçü: %s",
@@ -3433,6 +3503,7 @@
"packages.conda.registry": "Bu kütüğü .condarc dosyasında bir Conda deposu olarak ayarlayın:",
"packages.conda.install": "Conda ile paket kurmak için aşağıdaki komutu çalıştırın:",
"packages.container.details.type": "Görüntü Türü",
+ "packages.container.details.platform": "Platform",
"packages.container.pull": "Görüntüyü komut satırını kullanarak çekin:",
"packages.container.images": "Görüntüler",
"packages.container.digest": "Özet",
@@ -3635,6 +3706,7 @@
"actions.workflow.has_workflow_dispatch": "Bu iş akışının workflow_dispatch olay tetikleyicisi var.",
"actions.workflow.has_no_workflow_dispatch": "'%s' iş akışının workflow_dispatch olay tetikleyicisi yok.",
"actions.need_approval_desc": "Değişiklik isteği çatalında iş akışı çalıştırmak için onay gerekiyor.",
+ "actions.approve_all_success": "Tüm iş akışı çalıştırmaları başarıyla onaylandı.",
"actions.variables": "Değişkenler",
"actions.variables.management": "Değişken Yönetimi",
"actions.variables.creation": "Değişken Ekle",
@@ -3652,12 +3724,21 @@
"actions.variables.update.success": "Değişken düzenlendi.",
"actions.logs.always_auto_scroll": "Günlükleri her zaman otomatik kaydır",
"actions.logs.always_expand_running": "Çalıştırma günlüklerini her zaman genişlet",
+ "actions.general": "Genel",
+ "actions.general.enable_actions": "Eylemleri Etkinleştir",
+ "actions.general.collaborative_owners_management": "Ortak Sahiplerin Yönetimi",
+ "actions.general.collaborative_owners_management_help": "Ortak sahip, özel deposu bu deponun eylemlerine ve iş akışlarına erişimi olan bir kullanıcı veya kuruluştur.",
+ "actions.general.add_collaborative_owner": "Ortak Sahip Ekle",
+ "actions.general.collaborative_owner_not_exist": "Ortak sahip mevcut değil.",
+ "actions.general.remove_collaborative_owner": "Ortak Sahip Kaldır",
+ "actions.general.remove_collaborative_owner_desc": "Ortak sahip kaldırıldığında, bu sahibin depoları artık bu depodaki eylemlere erişemez. Devam etmek istiyor musunuz?",
"projects.deleted.display_name": "Silinmiş Proje",
"projects.type-1.display_name": "Kişisel Proje",
"projects.type-2.display_name": "Depo Projesi",
"projects.type-3.display_name": "Organizasyon Projesi",
"projects.enter_fullscreen": "Tam Ekran",
"projects.exit_fullscreen": "Tam Ekrandan Çık",
+ "git.filemode.changed_filemode": "%[1]s → %[2]s",
"git.filemode.directory": "Dizin",
"git.filemode.normal_file": "Normal dosya",
"git.filemode.executable_file": "Çalıştırılabilir dosya",
diff --git a/options/locale/locale_zh-CN.json b/options/locale/locale_zh-CN.json
index 2d031d2d58..861090cea7 100644
--- a/options/locale/locale_zh-CN.json
+++ b/options/locale/locale_zh-CN.json
@@ -148,6 +148,13 @@
"filter.private": "私有",
"no_results_found": "未找到结果",
"internal_error_skipped": "发生内部错误,但已跳过: %s",
+ "characters_spaces": "空格",
+ "characters_tabs": "制表符",
+ "text_indent_style": "缩进风格",
+ "text_indent_size": "缩进大小",
+ "text_line_wrap": "换行",
+ "text_line_nowrap": "无换行",
+ "text_line_wrap_mode": "换行模式",
"search.search": "搜索…",
"search.type_tooltip": "搜索类型",
"search.fuzzy": "模糊",
@@ -271,7 +278,7 @@
"install.smtp_port": "SMTP 端口",
"install.smtp_from": "邮件发件人",
"install.smtp_from_invalid": "「邮件发件人」地址无效",
- "install.smtp_from_helper": "请输入一个用于 Gitea 的邮箱地址,或者使用完整格式:「名称」。",
+ "install.smtp_from_helper": "请输入一个用于 Gitea 的邮箱地址,或者使用完整格式:\"名称\" 。",
"install.mailer_user": "SMTP 用户名",
"install.mailer_password": "SMTP 密码",
"install.register_confirm": "需要邮件确认注册",
@@ -751,6 +758,7 @@
"settings.add_email": "新增邮箱地址",
"settings.add_openid": "添加 OpenID URI",
"settings.add_email_confirmation_sent": "一封确认邮件已经发送至「%s」,请检查您的收件箱并在 %s 内完成确认注册操作。",
+ "settings.email_primary_not_found": "找不到选定的电子邮件地址。",
"settings.add_email_success": "新邮箱地址已添加。",
"settings.email_preference_set_success": "邮件首选项已成功设置。",
"settings.add_openid_success": "新的 OpenID 地址已添加。",
@@ -786,7 +794,7 @@
"settings.gpg_token": "令牌",
"settings.gpg_token_help": "您可以使用以下方式生成签名:",
"settings.gpg_token_signature": "GPG 增强签名",
- "settings.key_signature_gpg_placeholder": "以 '-----BEGIN PGP PUBLIC KEY BLOCK-----' 开头",
+ "settings.key_signature_gpg_placeholder": "以 '-----BEGIN PGP SIGNATURE-----' 开头",
"settings.verify_gpg_key_success": "GPG 密钥「%s」已验证。",
"settings.ssh_key_verified": "已验证的密钥",
"settings.ssh_key_verified_long": "密钥已通过令牌验证,可用于验证与该用户任何已激活邮箱地址匹配的提交。",
@@ -977,6 +985,7 @@
"repo.fork.blocked_user": "无法克隆仓库,因为您被仓库所有者屏蔽。",
"repo.use_template": "使用此模板",
"repo.open_with_editor": "用 %s 打开",
+ "repo.download_directory_as": "下载目录为 %s",
"repo.download_zip": "下载 ZIP",
"repo.download_tar": "下载 TAR.GZ",
"repo.download_bundle": "下载 BUNDLE",
@@ -1370,7 +1379,7 @@
"repo.projects.column.new": "创建列",
"repo.projects.column.set_default": "设为默认",
"repo.projects.column.set_default_desc": "设置此列为未分类问题和合并请求的默认值",
- "repo.projects.column.default_column_hint": "添加到此项目的新议题将被添加到此列",
+ "repo.projects.column.default_column_hint": "添加到此项目的新工单将被添加到此列",
"repo.projects.column.delete": "删除列",
"repo.projects.column.deletion_desc": "删除项目列会将所有相关问题移至默认列。是否继续?",
"repo.projects.column.color": "颜色",
@@ -1489,6 +1498,7 @@
"repo.issues.filter_sort.feweststars": "点赞由少到多",
"repo.issues.filter_sort.mostforks": "派生由多到少",
"repo.issues.filter_sort.fewestforks": "派生由少到多",
+ "repo.issues.quick_goto": "前往工单",
"repo.issues.action_open": "开启",
"repo.issues.action_close": "关闭",
"repo.issues.action_label": "标签",
@@ -1633,7 +1643,7 @@
"repo.issues.cancel_tracking": "取消",
"repo.issues.cancel_tracking_history": "取消时间跟踪 %s",
"repo.issues.del_time": "删除此时间跟踪日志",
- "repo.issues.add_time_history": "于 %[2]s 添加计时 %[1]",
+ "repo.issues.add_time_history": "于 %[2]s 添加计时 %[1]s",
"repo.issues.del_time_history": "已删除时间 %s",
"repo.issues.add_time_manually": "手动添加时间",
"repo.issues.add_time_hours": "小时",
@@ -1701,6 +1711,7 @@
"repo.issues.review.content.empty": "您需要留下一个注释,表明需要的更改。",
"repo.issues.review.reject": "请求变更 %s",
"repo.issues.review.wait": "已请求 %s 评审",
+ "repo.issues.review.codeowners_rules": "CODEOWNERS 规则",
"repo.issues.review.add_review_request": "于 %[2]s 请求 %[1]s 评审",
"repo.issues.review.remove_review_request": "取消对 %s 的评审请求 %s",
"repo.issues.review.remove_review_request_self": "拒绝评审 %s",
@@ -1736,8 +1747,11 @@
"repo.issues.reference_link": "引用:%s",
"repo.compare.compare_base": "基准分支",
"repo.compare.compare_head": "比较",
+ "repo.compare.title": "对比变更",
+ "repo.compare.description": "选择两个分支或标签以查看变更或发起一个新的合并请求。",
"repo.pulls.desc": "启用合并请求和代码评审。",
"repo.pulls.new": "创建合并请求",
+ "repo.pulls.new.description": "将此对比中的变更与其他版本一起进行讨论和评审。",
"repo.pulls.new.blocked_user": "无法创建合并请求,因为您已被仓库所有者屏蔽。",
"repo.pulls.new.must_collaborator": "您必须是仓库的协作者才能创建合并请求。",
"repo.pulls.new.already_existed": "这些分支之间的合并请求已存在",
@@ -1747,7 +1761,6 @@
"repo.pulls.allow_edits_from_maintainers": "允许维护者编辑",
"repo.pulls.allow_edits_from_maintainers_desc": "对基础分支有写入权限的用户也可以推送到此分支",
"repo.pulls.allow_edits_from_maintainers_err": "更新失败",
- "repo.pulls.compare_changes_desc": "选择合并的目标分支和源分支。",
"repo.pulls.has_viewed_file": "已查看",
"repo.pulls.has_changed_since_last_review": "自您上次审核以来已更改",
"repo.pulls.viewed_files_label": "%[1]d / %[2]d 文件已查看",
@@ -1773,6 +1786,8 @@
"repo.pulls.title_desc": "请求将 %[1]d 次代码提交从 %[2]s 合并至 %[3]s",
"repo.pulls.merged_title_desc": "于 %[4]s 将 %[1]d 次代码提交从 %[2]s合并至 %[3]s",
"repo.pulls.change_target_branch_at": "将目标分支从 %s 更改为 %s %s",
+ "repo.pulls.marked_as_work_in_progress_at": "已将合并请求标记为进行中 %s",
+ "repo.pulls.marked_as_ready_for_review_at": "已将合并请求标记为准备评审 %s",
"repo.pulls.tab_conversation": "对话内容",
"repo.pulls.tab_commits": "代码提交",
"repo.pulls.tab_files": "文件变动",
@@ -1791,6 +1806,7 @@
"repo.pulls.remove_prefix": "删除 %s 前缀",
"repo.pulls.data_broken": "此合并请求因为派生仓库信息缺失而中断。",
"repo.pulls.files_conflicted": "此合并请求有变更与目标分支冲突。",
+ "repo.pulls.files_conflicted_no_listed_files": "(未列出冲突文件)",
"repo.pulls.is_checking": "正在进行合并冲突检查…",
"repo.pulls.is_ancestor": "此分支已经包含在目标分支中,没有什么可以合并。",
"repo.pulls.is_empty": "此分支上的更改已经在目标分支上。这将是一个空提交。",
@@ -1845,7 +1861,8 @@
"repo.pulls.status_checking": "一些检查仍在等待运行",
"repo.pulls.status_checks_success": "所有检查均成功",
"repo.pulls.status_checks_warning": "一些检查报告了警告",
- "repo.pulls.status_checks_failure": "一些检查失败了",
+ "repo.pulls.status_checks_failure_required": "一些必要检查失败",
+ "repo.pulls.status_checks_failure_optional": "一些可选检查失败",
"repo.pulls.status_checks_error": "一些检查报告了错误",
"repo.pulls.status_checks_requested": "必须",
"repo.pulls.status_checks_details": "详情",
@@ -2115,6 +2132,8 @@
"repo.settings.pulls.ignore_whitespace": "忽略空白冲突",
"repo.settings.pulls.enable_autodetect_manual_merge": "启用自动检查手动合并(注意:在某些特殊情况下可能会出现误判)",
"repo.settings.pulls.allow_rebase_update": "允许通过变基更新合并请求分支",
+ "repo.settings.pulls.default_target_branch": "新合并请求的默认目标分支",
+ "repo.settings.pulls.default_target_branch_default": "默认分支(%s)",
"repo.settings.pulls.default_delete_branch_after_merge": "默认合并后删除合并请求分支",
"repo.settings.pulls.default_allow_edits_from_maintainers": "默认允许维护者编辑",
"repo.settings.releases_desc": "启用仓库发布",
@@ -2427,7 +2446,8 @@
"repo.settings.block_outdated_branch_desc": "当头部分支落后基础分支时,不能合并。",
"repo.settings.block_admin_merge_override": "管理员须遵守分支保护规则",
"repo.settings.block_admin_merge_override_desc": "管理员须遵守分支保护规则,不能规避该规则。",
- "repo.settings.default_branch_desc": "请选择一个默认的分支用于合并请求和提交:",
+ "repo.settings.default_branch_desc": "选择一个默认分支用于提交代码。",
+ "repo.settings.default_target_branch_desc": "如果在仓库高级设置的合并请求部分中进行了设置,则合并请求可以使用不同的默认目标分支。",
"repo.settings.merge_style_desc": "合并方式",
"repo.settings.default_merge_style_desc": "默认合并风格",
"repo.settings.choose_branch": "选择一个分支…",
@@ -2513,7 +2533,7 @@
"repo.diff.browse_source": "浏览代码",
"repo.diff.parent": "父节点",
"repo.diff.commit": "当前提交",
- "repo.diff.git-notes": "Notes",
+ "repo.diff.git-notes": "备注",
"repo.diff.data_not_available": "比较内容不可用",
"repo.diff.options_button": "Diff 选项",
"repo.diff.download_patch": "下载 Patch 文件",
@@ -2540,8 +2560,8 @@
"repo.diff.too_many_files": "某些文件未显示,因为此 diff 中更改的文件太多",
"repo.diff.show_more": "显示更多",
"repo.diff.load": "加载差异",
- "repo.diff.generated": "自动生成的",
- "repo.diff.vendored": "vendored",
+ "repo.diff.generated": "自动生成",
+ "repo.diff.vendored": "第三方依赖",
"repo.diff.comment.add_line_comment": "添加行内评论",
"repo.diff.comment.placeholder": "留下评论",
"repo.diff.comment.add_single_comment": "添加单条评论",
@@ -2641,7 +2661,7 @@
"repo.branch.restore_success": "分支「%s」已还原。",
"repo.branch.restore_failed": "分支「%s」还原失败。",
"repo.branch.protected_deletion_failed": "不能删除受保护的分支「%s」。",
- "repo.branch.default_deletion_failed": "不能删除默认分支「%s」。",
+ "repo.branch.default_deletion_failed": "分支「%s」是默认分支或合并请求目标分支,无法删除。",
"repo.branch.default_branch_not_exist": "默认分支「%s」不存在。",
"repo.branch.restore": "还原分支「%s」",
"repo.branch.download": "下载分支「%s」",
@@ -2665,7 +2685,7 @@
"repo.tag.create_tag_operation": "创建 Git 标签",
"repo.tag.confirm_create_tag": "创建 Git 标签",
"repo.tag.create_tag_from": "基于「%s」创建新 Git 标签",
- "repo.tag.create_success": "Git 标签「%s」已存在。",
+ "repo.tag.create_success": "Git 标签「%s」创建成功。",
"repo.topic.manage_topics": "管理主题",
"repo.topic.done": "保存",
"repo.topic.count_prompt": "您最多选择25个主题",
@@ -2926,7 +2946,7 @@
"admin.dashboard.delete_old_actions.started": "已开始从数据库中删除所有旧工作流记录。",
"admin.dashboard.update_checker": "更新检查器",
"admin.dashboard.delete_old_system_notices": "从数据库中删除所有旧系统通知",
- "admin.dashboard.gc_lfs": "垃圾回收 LFS 元数据",
+ "admin.dashboard.gc_lfs": "对 LFS 元数据进行垃圾回收",
"admin.dashboard.stop_zombie_tasks": "停止僵尸工作流任务",
"admin.dashboard.stop_endless_tasks": "停止无限循环的工作流任务",
"admin.dashboard.cancel_abandoned_jobs": "取消已放弃的工作流任务",
@@ -3067,6 +3087,7 @@
"admin.auths.attribute_mail": "电子邮箱属性",
"admin.auths.attribute_ssh_public_key": "SSH公钥属性",
"admin.auths.attribute_avatar": "头像属性",
+ "admin.auths.ssh_keys_are_verified": "LDAP 中的 SSH 密钥被视为已验证",
"admin.auths.attributes_in_bind": "从 Bind DN 中拉取属性信息",
"admin.auths.allow_deactivate_all": "允许在搜索结果为空时停用所有用户",
"admin.auths.use_paged_search": "使用分页搜索",
@@ -3239,7 +3260,7 @@
"admin.config.mailer_sendmail_path": "Sendmail 路径",
"admin.config.mailer_sendmail_args": "Sendmail 的额外参数",
"admin.config.mailer_sendmail_timeout": "Sendmail 超时",
- "admin.config.mailer_use_dummy": "Dummy",
+ "admin.config.mailer_use_dummy": "模拟",
"admin.config.test_email_placeholder": "电子邮箱(例如,test@example.com)",
"admin.config.send_test_mail": "发送测试邮件",
"admin.config.send_test_mail_submit": "发送",
@@ -3278,8 +3299,6 @@
"admin.config.git_gc_args": "GC 参数",
"admin.config.git_migrate_timeout": "迁移操作超时",
"admin.config.git_mirror_timeout": "镜像更新操作超时",
- "admin.config.git_clone_timeout": "克隆操作超时",
- "admin.config.git_pull_timeout": "拉取操作超时",
"admin.config.git_gc_timeout": "GC 操作超时",
"admin.config.log_config": "日志配置",
"admin.config.logger_name_fmt": "%s 日志",
@@ -3319,7 +3338,7 @@
"admin.monitor.queue.numberinqueue": "队列中的数量",
"admin.monitor.queue.review_add": "查看 / 添加工作者",
"admin.monitor.queue.settings.title": "池设置",
- "admin.monitor.queue.settings.desc": "因为工作者队列阻塞,池正在动态扩展。",
+ "admin.monitor.queue.settings.desc": "工作池会根据其工作队列阻塞情况动态增长。",
"admin.monitor.queue.settings.maxnumberworkers": "最大工作者数量",
"admin.monitor.queue.settings.maxnumberworkers.placeholder": "当前 %[1]d",
"admin.monitor.queue.settings.maxnumberworkers.error": "最大工作者数必须是数字",
@@ -3488,7 +3507,7 @@
"packages.container.pull": "从命令行拉取镜像:",
"packages.container.images": "镜像",
"packages.container.digest": "摘要",
- "packages.container.multi_arch": "OS / Arch",
+ "packages.container.multi_arch": "OS / 架构",
"packages.container.layers": "镜像层",
"packages.container.labels": "标签",
"packages.container.labels.key": "键",
diff --git a/package.json b/package.json
index a541828eb9..92c218cddb 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"type": "module",
- "packageManager": "pnpm@10.26.0",
+ "packageManager": "pnpm@10.30.0",
"engines": {
"node": ">= 22.6.0",
"pnpm": ">= 10.0.0"
@@ -8,118 +8,118 @@
"dependencies": {
"@citation-js/core": "0.7.21",
"@citation-js/plugin-bibtex": "0.7.21",
- "@citation-js/plugin-csl": "0.7.21",
- "@citation-js/plugin-software-formats": "0.6.1",
+ "@citation-js/plugin-csl": "0.7.22",
+ "@citation-js/plugin-software-formats": "0.6.2",
"@github/markdown-toolbar-element": "2.2.3",
"@github/paste-markdown": "1.5.3",
"@github/relative-time-element": "5.0.0",
- "@github/text-expander-element": "2.9.2",
+ "@github/text-expander-element": "2.9.4",
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
- "@primer/octicons": "19.21.1",
+ "@mermaid-js/layout-elk": "0.2.0",
+ "@primer/octicons": "19.22.0",
"@resvg/resvg-wasm": "2.6.2",
"@silverwind/vue3-calendar-heatmap": "2.1.1",
"@techknowlogick/license-checker-webpack-plugin": "0.3.0",
"add-asset-webpack-plugin": "3.1.1",
"ansi_up": "6.0.6",
- "asciinema-player": "3.13.5",
+ "asciinema-player": "3.14.15",
"camelcase-keys": "10.0.1",
"chart.js": "4.5.1",
"chartjs-adapter-dayjs-4": "1.0.4",
"chartjs-plugin-zoom": "2.2.0",
- "clippie": "4.1.9",
+ "clippie": "4.1.10",
"compare-versions": "6.1.1",
"cropperjs": "1.6.2",
- "css-loader": "7.1.2",
+ "css-loader": "7.1.4",
"dayjs": "1.11.19",
"dropzone": "6.0.0-beta.2",
"easymde": "2.20.0",
- "esbuild-loader": "4.4.0",
+ "esbuild-loader": "4.4.2",
"htmx.org": "2.0.8",
"idiomorph": "0.7.4",
- "jquery": "3.7.1",
- "katex": "0.16.27",
- "mermaid": "11.12.2",
- "mini-css-extract-plugin": "2.9.4",
+ "jquery": "4.0.0",
+ "js-yaml": "4.1.1",
+ "katex": "0.16.28",
+ "mermaid": "11.12.3",
+ "mini-css-extract-plugin": "2.10.0",
"monaco-editor": "0.55.1",
"monaco-editor-webpack-plugin": "7.1.1",
- "online-3d-viewer": "0.17.0",
+ "online-3d-viewer": "0.18.0",
"pdfobject": "2.3.1",
- "perfect-debounce": "2.0.0",
+ "perfect-debounce": "2.1.0",
"postcss": "8.5.6",
- "postcss-loader": "8.2.0",
- "sortablejs": "1.15.6",
- "swagger-ui-dist": "5.31.0",
+ "postcss-loader": "8.2.1",
+ "sortablejs": "1.15.7",
+ "swagger-ui-dist": "5.31.1",
"tailwindcss": "3.4.17",
"throttle-debounce": "5.0.2",
- "tinycolor2": "1.6.0",
+ "colord": "2.9.3",
"tippy.js": "6.3.7",
"toastify-js": "1.12.0",
"tributejs": "5.1.3",
"uint8-to-base64": "0.2.1",
"vanilla-colorful": "0.7.2",
- "vue": "3.5.25",
+ "vue": "3.5.28",
"vue-bar-graph": "2.2.0",
"vue-chartjs": "5.3.3",
"vue-loader": "17.4.2",
- "webpack": "5.104.0",
+ "webpack": "5.105.2",
"webpack-cli": "6.0.1",
"wrap-ansi": "9.0.2"
},
"devDependencies": {
- "@eslint-community/eslint-plugin-eslint-comments": "4.5.0",
+ "@eslint-community/eslint-plugin-eslint-comments": "4.6.0",
"@eslint/json": "0.14.0",
- "@playwright/test": "1.57.0",
- "@stylistic/eslint-plugin": "5.6.1",
- "@stylistic/stylelint-plugin": "4.0.0",
+ "@playwright/test": "1.58.2",
+ "@stylistic/eslint-plugin": "5.8.0",
+ "@stylistic/stylelint-plugin": "5.0.1",
"@types/codemirror": "5.60.17",
"@types/dropzone": "5.7.9",
"@types/jquery": "3.5.33",
- "@types/katex": "0.16.7",
+ "@types/js-yaml": "4.0.9",
+ "@types/katex": "0.16.8",
+ "@types/node": "25.2.3",
"@types/pdfobject": "2.2.5",
"@types/sortablejs": "1.15.9",
"@types/swagger-ui-dist": "3.30.6",
"@types/throttle-debounce": "5.0.2",
- "@types/tinycolor2": "1.4.6",
"@types/toastify-js": "1.12.4",
- "@typescript-eslint/parser": "8.50.0",
- "@vitejs/plugin-vue": "6.0.3",
- "@vitest/eslint-plugin": "1.5.2",
+ "@typescript-eslint/parser": "8.56.0",
+ "@vitejs/plugin-vue": "6.0.4",
+ "@vitest/eslint-plugin": "1.6.9",
"eslint": "9.39.2",
"eslint-import-resolver-typescript": "4.4.4",
"eslint-plugin-array-func": "5.1.0",
"eslint-plugin-github": "6.0.0",
"eslint-plugin-import-x": "4.16.1",
- "eslint-plugin-playwright": "2.4.0",
- "eslint-plugin-regexp": "2.10.0",
- "eslint-plugin-sonarjs": "3.0.5",
- "eslint-plugin-unicorn": "62.0.0",
- "eslint-plugin-vue": "10.6.2",
+ "eslint-plugin-playwright": "2.5.1",
+ "eslint-plugin-regexp": "3.0.0",
+ "eslint-plugin-sonarjs": "3.0.7",
+ "eslint-plugin-unicorn": "63.0.0",
+ "eslint-plugin-vue": "10.8.0",
"eslint-plugin-vue-scoped-css": "2.12.0",
- "eslint-plugin-wc": "3.0.2",
- "globals": "16.5.0",
- "happy-dom": "20.0.11",
+ "eslint-plugin-wc": "3.1.0",
+ "globals": "17.3.0",
+ "happy-dom": "20.6.1",
"jiti": "2.6.1",
"markdownlint-cli": "0.47.0",
- "material-icon-theme": "5.29.0",
+ "material-icon-theme": "5.31.0",
"nolyfill": "1.0.44",
- "postcss-html": "1.8.0",
- "spectral-cli-bundle": "1.0.3",
- "stylelint": "16.26.1",
- "stylelint-config-recommended": "17.0.0",
- "stylelint-declaration-block-no-ignored-properties": "2.8.0",
+ "postcss-html": "1.8.1",
+ "spectral-cli-bundle": "1.0.7",
+ "stylelint": "17.3.0",
+ "stylelint-config-recommended": "18.0.0",
+ "stylelint-declaration-block-no-ignored-properties": "3.0.0",
"stylelint-declaration-strict-value": "1.10.11",
- "stylelint-value-no-unknown-custom-properties": "6.0.1",
+ "stylelint-value-no-unknown-custom-properties": "6.1.1",
"svgo": "4.0.0",
"typescript": "5.9.3",
- "typescript-eslint": "8.50.0",
- "updates": "17.0.7",
- "vite-string-plugin": "1.4.9",
- "vitest": "4.0.16",
- "vue-tsc": "3.1.8"
+ "typescript-eslint": "8.56.0",
+ "updates": "17.5.7",
+ "vite-string-plugin": "2.0.1",
+ "vitest": "4.0.18",
+ "vue-tsc": "3.2.4"
},
- "browserslist": [
- "defaults"
- ],
"pnpm": {
"overrides": {
"array-includes": "npm:@nolyfill/array-includes@^1",
diff --git a/playwright.config.ts b/playwright.config.ts
index d1cd299e25..9e3396465a 100644
--- a/playwright.config.ts
+++ b/playwright.config.ts
@@ -2,7 +2,7 @@ import {devices} from '@playwright/test';
import {env} from 'node:process';
import type {PlaywrightTestConfig} from '@playwright/test';
-const BASE_URL = env.GITEA_URL?.replace?.(/\/$/g, '') || 'http://localhost:3000';
+const BASE_URL = env.GITEA_TEST_SERVER_URL?.replace?.(/\/$/g, '') || 'http://localhost:3000';
export default {
testDir: './tests/e2e/',
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 663d804d92..c8af82b03e 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -33,11 +33,11 @@ importers:
specifier: 0.7.21
version: 0.7.21(@citation-js/core@0.7.21)
'@citation-js/plugin-csl':
- specifier: 0.7.21
- version: 0.7.21(@citation-js/core@0.7.21)
+ specifier: 0.7.22
+ version: 0.7.22(@citation-js/core@0.7.21)
'@citation-js/plugin-software-formats':
- specifier: 0.6.1
- version: 0.6.1
+ specifier: 0.6.2
+ version: 0.6.2
'@github/markdown-toolbar-element':
specifier: 2.2.3
version: 2.2.3
@@ -48,32 +48,35 @@ importers:
specifier: 5.0.0
version: 5.0.0
'@github/text-expander-element':
- specifier: 2.9.2
- version: 2.9.2
+ specifier: 2.9.4
+ version: 2.9.4
'@mcaptcha/vanilla-glue':
specifier: 0.1.0-alpha-3
version: 0.1.0-alpha-3
+ '@mermaid-js/layout-elk':
+ specifier: 0.2.0
+ version: 0.2.0(mermaid@11.12.3)
'@primer/octicons':
- specifier: 19.21.1
- version: 19.21.1
+ specifier: 19.22.0
+ version: 19.22.0
'@resvg/resvg-wasm':
specifier: 2.6.2
version: 2.6.2
'@silverwind/vue3-calendar-heatmap':
specifier: 2.1.1
- version: 2.1.1(tippy.js@6.3.7)(vue@3.5.25(typescript@5.9.3))
+ version: 2.1.1(tippy.js@6.3.7)(vue@3.5.28(typescript@5.9.3))
'@techknowlogick/license-checker-webpack-plugin':
specifier: 0.3.0
- version: 0.3.0(webpack@5.104.0)
+ version: 0.3.0(webpack@5.105.2)
add-asset-webpack-plugin:
specifier: 3.1.1
- version: 3.1.1(webpack@5.104.0)
+ version: 3.1.1(webpack@5.105.2)
ansi_up:
specifier: 6.0.6
version: 6.0.6
asciinema-player:
- specifier: 3.13.5
- version: 3.13.5
+ specifier: 3.14.15
+ version: 3.14.15
camelcase-keys:
specifier: 10.0.1
version: 10.0.1
@@ -87,8 +90,11 @@ importers:
specifier: 2.2.0
version: 2.2.0(chart.js@4.5.1)
clippie:
- specifier: 4.1.9
- version: 4.1.9
+ specifier: 4.1.10
+ version: 4.1.10
+ colord:
+ specifier: 2.9.3
+ version: 2.9.3
compare-versions:
specifier: 6.1.1
version: 6.1.1
@@ -96,8 +102,8 @@ importers:
specifier: 1.6.2
version: 1.6.2
css-loader:
- specifier: 7.1.2
- version: 7.1.2(webpack@5.104.0)
+ specifier: 7.1.4
+ version: 7.1.4(webpack@5.105.2)
dayjs:
specifier: 1.11.19
version: 1.11.19
@@ -108,8 +114,8 @@ importers:
specifier: 2.20.0
version: 2.20.0
esbuild-loader:
- specifier: 4.4.0
- version: 4.4.0(webpack@5.104.0)
+ specifier: 4.4.2
+ version: 4.4.2(webpack@5.105.2)
htmx.org:
specifier: 2.0.8
version: 2.0.8
@@ -117,53 +123,53 @@ importers:
specifier: 0.7.4
version: 0.7.4
jquery:
- specifier: 3.7.1
- version: 3.7.1
+ specifier: 4.0.0
+ version: 4.0.0
+ js-yaml:
+ specifier: 4.1.1
+ version: 4.1.1
katex:
- specifier: 0.16.27
- version: 0.16.27
+ specifier: 0.16.28
+ version: 0.16.28
mermaid:
- specifier: 11.12.2
- version: 11.12.2
+ specifier: 11.12.3
+ version: 11.12.3
mini-css-extract-plugin:
- specifier: 2.9.4
- version: 2.9.4(webpack@5.104.0)
+ specifier: 2.10.0
+ version: 2.10.0(webpack@5.105.2)
monaco-editor:
specifier: 0.55.1
version: 0.55.1
monaco-editor-webpack-plugin:
specifier: 7.1.1
- version: 7.1.1(monaco-editor@0.55.1)(webpack@5.104.0)
+ version: 7.1.1(monaco-editor@0.55.1)(webpack@5.105.2)
online-3d-viewer:
- specifier: 0.17.0
- version: 0.17.0
+ specifier: 0.18.0
+ version: 0.18.0
pdfobject:
specifier: 2.3.1
version: 2.3.1
perfect-debounce:
- specifier: 2.0.0
- version: 2.0.0
+ specifier: 2.1.0
+ version: 2.1.0
postcss:
specifier: 8.5.6
version: 8.5.6
postcss-loader:
- specifier: 8.2.0
- version: 8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.0)
+ specifier: 8.2.1
+ version: 8.2.1(postcss@8.5.6)(typescript@5.9.3)(webpack@5.105.2)
sortablejs:
- specifier: 1.15.6
- version: 1.15.6
+ specifier: 1.15.7
+ version: 1.15.7
swagger-ui-dist:
- specifier: 5.31.0
- version: 5.31.0
+ specifier: 5.31.1
+ version: 5.31.1
tailwindcss:
specifier: 3.4.17
version: 3.4.17
throttle-debounce:
specifier: 5.0.2
version: 5.0.2
- tinycolor2:
- specifier: 1.6.0
- version: 1.6.0
tippy.js:
specifier: 6.3.7
version: 6.3.7
@@ -180,42 +186,42 @@ importers:
specifier: 0.7.2
version: 0.7.2
vue:
- specifier: 3.5.25
- version: 3.5.25(typescript@5.9.3)
+ specifier: 3.5.28
+ version: 3.5.28(typescript@5.9.3)
vue-bar-graph:
specifier: 2.2.0
version: 2.2.0(typescript@5.9.3)
vue-chartjs:
specifier: 5.3.3
- version: 5.3.3(chart.js@4.5.1)(vue@3.5.25(typescript@5.9.3))
+ version: 5.3.3(chart.js@4.5.1)(vue@3.5.28(typescript@5.9.3))
vue-loader:
specifier: 17.4.2
- version: 17.4.2(vue@3.5.25(typescript@5.9.3))(webpack@5.104.0)
+ version: 17.4.2(vue@3.5.28(typescript@5.9.3))(webpack@5.105.2)
webpack:
- specifier: 5.104.0
- version: 5.104.0(webpack-cli@6.0.1)
+ specifier: 5.105.2
+ version: 5.105.2(webpack-cli@6.0.1)
webpack-cli:
specifier: 6.0.1
- version: 6.0.1(webpack@5.104.0)
+ version: 6.0.1(webpack@5.105.2)
wrap-ansi:
specifier: 9.0.2
version: 9.0.2
devDependencies:
'@eslint-community/eslint-plugin-eslint-comments':
- specifier: 4.5.0
- version: 4.5.0(eslint@9.39.2(jiti@2.6.1))
+ specifier: 4.6.0
+ version: 4.6.0(eslint@9.39.2(jiti@2.6.1))
'@eslint/json':
specifier: 0.14.0
version: 0.14.0
'@playwright/test':
- specifier: 1.57.0
- version: 1.57.0
+ specifier: 1.58.2
+ version: 1.58.2
'@stylistic/eslint-plugin':
- specifier: 5.6.1
- version: 5.6.1(eslint@9.39.2(jiti@2.6.1))
+ specifier: 5.8.0
+ version: 5.8.0(eslint@9.39.2(jiti@2.6.1))
'@stylistic/stylelint-plugin':
- specifier: 4.0.0
- version: 4.0.0(stylelint@16.26.1(typescript@5.9.3))
+ specifier: 5.0.1
+ version: 5.0.1(stylelint@17.3.0(typescript@5.9.3))
'@types/codemirror':
specifier: 5.60.17
version: 5.60.17
@@ -225,9 +231,15 @@ importers:
'@types/jquery':
specifier: 3.5.33
version: 3.5.33
+ '@types/js-yaml':
+ specifier: 4.0.9
+ version: 4.0.9
'@types/katex':
- specifier: 0.16.7
- version: 0.16.7
+ specifier: 0.16.8
+ version: 0.16.8
+ '@types/node':
+ specifier: 25.2.3
+ version: 25.2.3
'@types/pdfobject':
specifier: 2.2.5
version: 2.2.5
@@ -240,27 +252,24 @@ importers:
'@types/throttle-debounce':
specifier: 5.0.2
version: 5.0.2
- '@types/tinycolor2':
- specifier: 1.4.6
- version: 1.4.6
'@types/toastify-js':
specifier: 1.12.4
version: 1.12.4
'@typescript-eslint/parser':
- specifier: 8.50.0
- version: 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ specifier: 8.56.0
+ version: 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
'@vitejs/plugin-vue':
- specifier: 6.0.3
- version: 6.0.3(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))(vue@3.5.25(typescript@5.9.3))
+ specifier: 6.0.4
+ version: 6.0.4(vite@7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))(vue@3.5.28(typescript@5.9.3))
'@vitest/eslint-plugin':
- specifier: 1.5.2
- version: 1.5.2(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))
+ specifier: 1.6.9
+ version: 1.6.9(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.18(@types/node@25.2.3)(happy-dom@20.6.1)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))
eslint:
specifier: 9.39.2
version: 9.39.2(jiti@2.6.1)
eslint-import-resolver-typescript:
specifier: 4.4.4
- version: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1))
+ version: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-array-func:
specifier: 5.1.0
version: 5.1.0(eslint@9.39.2(jiti@2.6.1))
@@ -269,34 +278,34 @@ importers:
version: 6.0.0(@types/eslint@9.6.1)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-import-x:
specifier: 4.16.1
- version: 4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1))
+ version: 4.16.1(@typescript-eslint/utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-playwright:
- specifier: 2.4.0
- version: 2.4.0(eslint@9.39.2(jiti@2.6.1))
+ specifier: 2.5.1
+ version: 2.5.1(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-regexp:
- specifier: 2.10.0
- version: 2.10.0(eslint@9.39.2(jiti@2.6.1))
+ specifier: 3.0.0
+ version: 3.0.0(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-sonarjs:
- specifier: 3.0.5
- version: 3.0.5(eslint@9.39.2(jiti@2.6.1))
+ specifier: 3.0.7
+ version: 3.0.7(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-unicorn:
- specifier: 62.0.0
- version: 62.0.0(eslint@9.39.2(jiti@2.6.1))
+ specifier: 63.0.0
+ version: 63.0.0(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-vue:
- specifier: 10.6.2
- version: 10.6.2(@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1)))
+ specifier: 10.8.0
+ version: 10.8.0(@stylistic/eslint-plugin@5.8.0(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@9.39.2(jiti@2.6.1)))
eslint-plugin-vue-scoped-css:
specifier: 2.12.0
- version: 2.12.0(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1)))
+ version: 2.12.0(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@9.39.2(jiti@2.6.1)))
eslint-plugin-wc:
- specifier: 3.0.2
- version: 3.0.2(eslint@9.39.2(jiti@2.6.1))
+ specifier: 3.1.0
+ version: 3.1.0(eslint@9.39.2(jiti@2.6.1))
globals:
- specifier: 16.5.0
- version: 16.5.0
+ specifier: 17.3.0
+ version: 17.3.0
happy-dom:
- specifier: 20.0.11
- version: 20.0.11
+ specifier: 20.6.1
+ version: 20.6.1
jiti:
specifier: 2.6.1
version: 2.6.1
@@ -304,32 +313,32 @@ importers:
specifier: 0.47.0
version: 0.47.0
material-icon-theme:
- specifier: 5.29.0
- version: 5.29.0
+ specifier: 5.31.0
+ version: 5.31.0
nolyfill:
specifier: 1.0.44
version: 1.0.44
postcss-html:
- specifier: 1.8.0
- version: 1.8.0
+ specifier: 1.8.1
+ version: 1.8.1
spectral-cli-bundle:
- specifier: 1.0.3
- version: 1.0.3
+ specifier: 1.0.7
+ version: 1.0.7
stylelint:
- specifier: 16.26.1
- version: 16.26.1(typescript@5.9.3)
+ specifier: 17.3.0
+ version: 17.3.0(typescript@5.9.3)
stylelint-config-recommended:
- specifier: 17.0.0
- version: 17.0.0(stylelint@16.26.1(typescript@5.9.3))
+ specifier: 18.0.0
+ version: 18.0.0(stylelint@17.3.0(typescript@5.9.3))
stylelint-declaration-block-no-ignored-properties:
- specifier: 2.8.0
- version: 2.8.0(stylelint@16.26.1(typescript@5.9.3))
+ specifier: 3.0.0
+ version: 3.0.0(stylelint@17.3.0(typescript@5.9.3))
stylelint-declaration-strict-value:
specifier: 1.10.11
- version: 1.10.11(stylelint@16.26.1(typescript@5.9.3))
+ version: 1.10.11(stylelint@17.3.0(typescript@5.9.3))
stylelint-value-no-unknown-custom-properties:
- specifier: 6.0.1
- version: 6.0.1(stylelint@16.26.1(typescript@5.9.3))
+ specifier: 6.1.1
+ version: 6.1.1(stylelint@17.3.0(typescript@5.9.3))
svgo:
specifier: 4.0.0
version: 4.0.0
@@ -337,20 +346,20 @@ importers:
specifier: 5.9.3
version: 5.9.3
typescript-eslint:
- specifier: 8.50.0
- version: 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ specifier: 8.56.0
+ version: 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
updates:
- specifier: 17.0.7
- version: 17.0.7
+ specifier: 17.5.7
+ version: 17.5.7
vite-string-plugin:
- specifier: 1.4.9
- version: 1.4.9
+ specifier: 2.0.1
+ version: 2.0.1(vite@7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))
vitest:
- specifier: 4.0.16
- version: 4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2)
+ specifier: 4.0.18
+ version: 4.0.18(@types/node@25.2.3)(happy-dom@20.6.1)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)
vue-tsc:
- specifier: 3.1.8
- version: 3.1.8(typescript@5.9.3)
+ specifier: 3.2.4
+ version: 3.2.4(typescript@5.9.3)
packages:
@@ -361,8 +370,8 @@ packages:
'@antfu/install-pkg@1.1.0':
resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==}
- '@babel/code-frame@7.27.1':
- resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==}
+ '@babel/code-frame@7.29.0':
+ resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==}
engines: {node: '>=6.9.0'}
'@babel/helper-string-parser@7.27.1':
@@ -373,42 +382,42 @@ packages:
resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==}
engines: {node: '>=6.9.0'}
- '@babel/parser@7.28.5':
- resolution: {integrity: sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==}
+ '@babel/parser@7.29.0':
+ resolution: {integrity: sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==}
engines: {node: '>=6.0.0'}
hasBin: true
- '@babel/runtime@7.28.4':
- resolution: {integrity: sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==}
+ '@babel/runtime@7.28.6':
+ resolution: {integrity: sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==}
engines: {node: '>=6.9.0'}
- '@babel/types@7.28.5':
- resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==}
+ '@babel/types@7.29.0':
+ resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==}
engines: {node: '>=6.9.0'}
- '@braintree/sanitize-url@7.1.1':
- resolution: {integrity: sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==}
+ '@braintree/sanitize-url@7.1.2':
+ resolution: {integrity: sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==}
- '@cacheable/memory@2.0.6':
- resolution: {integrity: sha512-7e8SScMocHxcAb8YhtkbMhGG+EKLRIficb1F5sjvhSYsWTZGxvg4KIDp8kgxnV2PUJ3ddPe6J9QESjKvBWRDkg==}
+ '@cacheable/memory@2.0.7':
+ resolution: {integrity: sha512-RbxnxAMf89Tp1dLhXMS7ceft/PGsDl1Ip7T20z5nZ+pwIAsQ1p2izPjVG69oCLv/jfQ7HDPHTWK0c9rcAWXN3A==}
- '@cacheable/utils@2.3.2':
- resolution: {integrity: sha512-8kGE2P+HjfY8FglaOiW+y8qxcaQAfAhVML+i66XJR3YX5FtyDqn6Txctr3K2FrbxLKixRRYYBWMbuGciOhYNDg==}
+ '@cacheable/utils@2.3.4':
+ resolution: {integrity: sha512-knwKUJEYgIfwShABS1BX6JyJJTglAFcEU7EXqzTdiGCXur4voqkiJkdgZIQtWNFhynzDWERcTYv/sETMu3uJWA==}
- '@chevrotain/cst-dts-gen@11.0.3':
- resolution: {integrity: sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==}
+ '@chevrotain/cst-dts-gen@11.1.1':
+ resolution: {integrity: sha512-fRHyv6/f542qQqiRGalrfJl/evD39mAvbJLCekPazhiextEatq1Jx1K/i9gSd5NNO0ds03ek0Cbo/4uVKmOBcw==}
- '@chevrotain/gast@11.0.3':
- resolution: {integrity: sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==}
+ '@chevrotain/gast@11.1.1':
+ resolution: {integrity: sha512-Ko/5vPEYy1vn5CbCjjvnSO4U7GgxyGm+dfUZZJIWTlQFkXkyym0jFYrWEU10hyCjrA7rQtiHtBr0EaZqvHFZvg==}
- '@chevrotain/regexp-to-ast@11.0.3':
- resolution: {integrity: sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==}
+ '@chevrotain/regexp-to-ast@11.1.1':
+ resolution: {integrity: sha512-ctRw1OKSXkOrR8VTvOxrQ5USEc4sNrfwXHa1NuTcR7wre4YbjPcKw+82C2uylg/TEwFRgwLmbhlln4qkmDyteg==}
- '@chevrotain/types@11.0.3':
- resolution: {integrity: sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==}
+ '@chevrotain/types@11.1.1':
+ resolution: {integrity: sha512-wb2ToxG8LkgPYnKe9FH8oGn3TMCBdnwiuNC5l5y+CtlaVRbCytU0kbVsk6CGrqTL4ZN4ksJa0TXOYbxpbthtqw==}
- '@chevrotain/utils@11.0.3':
- resolution: {integrity: sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==}
+ '@chevrotain/utils@11.1.1':
+ resolution: {integrity: sha512-71eTYMzYXYSFPrbg/ZwftSaSDld7UYlS8OQa3lNnn9jzNtpFbaReRRyghzqS7rI3CDaorqpPJJcXGHK+FE1TVQ==}
'@citation-js/core@0.7.21':
resolution: {integrity: sha512-Vobv2/Yfnn6C6BVO/pvj7madQ7Mfzl83/jAWwixbemGF6ZThhGMz8++FD9hWHyHXDMYuLGa6fK68c2VsolZmTA==}
@@ -428,407 +437,256 @@ packages:
peerDependencies:
'@citation-js/core': ^0.7.0
- '@citation-js/plugin-cff@0.6.1':
- resolution: {integrity: sha512-tLjTgsfzNOdQWGn5mNc2NAaydHnlRucSERoyAXLN7u0BQBfp7j5zwdxCmxcQD/N7hH3fpDKMG+qDzbqpJuKyNA==}
+ '@citation-js/plugin-cff@0.6.2':
+ resolution: {integrity: sha512-jvERDFbtQQOBb9s+E8VbRIYsEIb2YEbcLH3yVDxXK0xqBGQDE5m8JZAYUkENy4FmbaD979l0+xJTWAsYN1pV/w==}
engines: {node: '>=14.0.0'}
- '@citation-js/plugin-csl@0.7.21':
- resolution: {integrity: sha512-23ySPYCWDiU1JqhvqTYLn6+C11LkeJxTfyPKKXCK2wan9iF9ODvY7JYNl/OJgtwvu485dKfDpqqptWv6i71Meg==}
+ '@citation-js/plugin-csl@0.7.22':
+ resolution: {integrity: sha512-/rGdtbeP3nS4uZDdEbQUHT8PrUcIs0da2t+sWMKYXoOhXQqfw3oJJ7p4tUD+R8lptyIR5Eq20/DFk/kQDdLpYg==}
engines: {node: '>=16.0.0'}
peerDependencies:
'@citation-js/core': ^0.7.0
- '@citation-js/plugin-github@0.6.1':
- resolution: {integrity: sha512-1ZeSgQ5AoYsa8n2acVooUeRk76oA8rLszYNBjzj5z6MPa11BZlQJ9O+Gy4tHjlImvsENLbLPx5f8/V1VHXaCfQ==}
+ '@citation-js/plugin-github@0.6.2':
+ resolution: {integrity: sha512-NKq/1Ja060o4II1Z4p1+utwpvMsx+XIWdNiFvnJDfR2Z9E1xGETjByPpdobGBsteUTpJPEe9OVfF8Dee/Q7zLQ==}
engines: {node: '>=14.0.0'}
- '@citation-js/plugin-npm@0.6.1':
- resolution: {integrity: sha512-rojJA+l/p2KBpDoY+8n0YfNyQO1Aw03fQR5BN+gXD1LNAP1V+8wqvdPsaHnzPsrhrd4ZXDR7ch/Nk0yynPkJ3Q==}
+ '@citation-js/plugin-npm@0.6.2':
+ resolution: {integrity: sha512-mbQg/N9HM+gOqHJCdDZEElSW+h/oM94snKCl3llXuZ4MEH3tHraElS6CYRW/vW7s8KUTTHhgE62Q6ua5aRml8g==}
engines: {node: '>=14.0.0'}
- '@citation-js/plugin-software-formats@0.6.1':
- resolution: {integrity: sha512-BDF9rqi56K0hoTgYTVANCFVRSbWKC9V06Uap7oa8SjqCTgnHJAy8t/F3NxsyYPPG+zmRsLW9VNbcIsJOl0eu/w==}
+ '@citation-js/plugin-software-formats@0.6.2':
+ resolution: {integrity: sha512-x1IG0LBKglBU6SuiiKfvOtn7g7o7s+YhQhB44o7zrFaKEO8jkyQ5qMKtM5VFdCBL7teLfzZLjpjNkdJXtZ6XZw==}
engines: {node: '>=14.0.0'}
- '@citation-js/plugin-yaml@0.6.1':
- resolution: {integrity: sha512-XEVVks1cJTqRbjy+nmthfw/puR6NwRB3fyJWi1tX13UYXlkhP/h45nsv4zjgLLGekdcMHQvhad9MAYunOftGKA==}
+ '@citation-js/plugin-yaml@0.6.2':
+ resolution: {integrity: sha512-qw53Uy2fDekKAzNhv8pkAWpIccIxyKQ3nQuClMgzDPdyeWg34ElIs4bDub9ZZup15fy+X//2gP8k12RJqNo4lA==}
engines: {node: '>=14.0.0'}
- '@citation-js/plugin-zenodo@0.6.1':
- resolution: {integrity: sha512-bUybENHoZqJ6gheUqgkumjI+mu+fA2bg6VoniDmZTb7Qng9iEpi+IWEAR26/vBE0gK0EWrJjczyDW3HCwrhvVw==}
+ '@citation-js/plugin-zenodo@0.6.2':
+ resolution: {integrity: sha512-3XQOO3u4WXY/7AWZyQ+9SuBzS8bYTlJ+NF1uCgrZO64g36nK5iIc5YV9cBl2TL2QhHF6S36nvAsXsj5fX9FeHw==}
engines: {node: '>=14.0.0'}
- '@csstools/css-parser-algorithms@3.0.5':
- resolution: {integrity: sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==}
- engines: {node: '>=18'}
+ '@csstools/css-calc@3.1.1':
+ resolution: {integrity: sha512-HJ26Z/vmsZQqs/o3a6bgKslXGFAungXGbinULZO3eMsOyNJHeBBZfup5FiZInOghgoM4Hwnmw+OgbJCNg1wwUQ==}
+ engines: {node: '>=20.19.0'}
peerDependencies:
- '@csstools/css-tokenizer': ^3.0.4
+ '@csstools/css-parser-algorithms': ^4.0.0
+ '@csstools/css-tokenizer': ^4.0.0
- '@csstools/css-syntax-patches-for-csstree@1.0.21':
- resolution: {integrity: sha512-plP8N8zKfEZ26figX4Nvajx8DuzfuRpLTqglQ5d0chfnt35Qt3X+m6ASZ+rG0D0kxe/upDVNwSIVJP5n4FuNfw==}
- engines: {node: '>=18'}
-
- '@csstools/css-tokenizer@3.0.4':
- resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==}
- engines: {node: '>=18'}
-
- '@csstools/media-query-list-parser@4.0.3':
- resolution: {integrity: sha512-HAYH7d3TLRHDOUQK4mZKf9k9Ph/m8Akstg66ywKR4SFAigjs3yBiUeZtFxywiTm5moZMAp/5W/ZuFnNXXYLuuQ==}
- engines: {node: '>=18'}
+ '@csstools/css-parser-algorithms@4.0.0':
+ resolution: {integrity: sha512-+B87qS7fIG3L5h3qwJ/IFbjoVoOe/bpOdh9hAjXbvx0o8ImEmUsGXN0inFOnk2ChCFgqkkGFQ+TpM5rbhkKe4w==}
+ engines: {node: '>=20.19.0'}
peerDependencies:
- '@csstools/css-parser-algorithms': ^3.0.5
- '@csstools/css-tokenizer': ^3.0.4
+ '@csstools/css-tokenizer': ^4.0.0
- '@csstools/selector-specificity@5.0.0':
- resolution: {integrity: sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==}
- engines: {node: '>=18'}
+ '@csstools/css-syntax-patches-for-csstree@1.0.27':
+ resolution: {integrity: sha512-sxP33Jwg1bviSUXAV43cVYdmjt2TLnLXNqCWl9xmxHawWVjGz/kEbdkr7F9pxJNBN2Mh+dq0crgItbW6tQvyow==}
+
+ '@csstools/css-tokenizer@4.0.0':
+ resolution: {integrity: sha512-QxULHAm7cNu72w97JUNCBFODFaXpbDg+dP8b/oWFAZ2MTRppA3U00Y2L1HqaS4J6yBqxwa/Y3nMBaxVKbB/NsA==}
+ engines: {node: '>=20.19.0'}
+
+ '@csstools/media-query-list-parser@5.0.0':
+ resolution: {integrity: sha512-T9lXmZOfnam3eMERPsszjY5NK0jX8RmThmmm99FZ8b7z8yMaFZWKwLWGZuTwdO3ddRY5fy13GmmEYZXB4I98Eg==}
+ engines: {node: '>=20.19.0'}
peerDependencies:
- postcss-selector-parser: ^7.0.0
+ '@csstools/css-parser-algorithms': ^4.0.0
+ '@csstools/css-tokenizer': ^4.0.0
+
+ '@csstools/selector-resolve-nested@4.0.0':
+ resolution: {integrity: sha512-9vAPxmp+Dx3wQBIUwc1v7Mdisw1kbbaGqXUM8QLTgWg7SoPGYtXBsMXvsFs/0Bn5yoFhcktzxNZGNaUt0VjgjA==}
+ engines: {node: '>=20.19.0'}
+ peerDependencies:
+ postcss-selector-parser: ^7.1.1
+
+ '@csstools/selector-specificity@6.0.0':
+ resolution: {integrity: sha512-4sSgl78OtOXEX/2d++8A83zHNTgwCJMaR24FvsYL7Uf/VS8HZk9PTwR51elTbGqMuwH3szLvvOXEaVnqn0Z3zA==}
+ engines: {node: '>=20.19.0'}
+ peerDependencies:
+ postcss-selector-parser: ^7.1.1
'@discoveryjs/json-ext@0.6.3':
resolution: {integrity: sha512-4B4OijXeVNOPZlYA2oEwWOTkzyltLao+xbotHQeqN++Rv27Y6s818+n2Qkp8q+Fxhn0t/5lA5X1Mxktud8eayQ==}
engines: {node: '>=14.17.0'}
- '@dual-bundle/import-meta-resolve@4.2.1':
- resolution: {integrity: sha512-id+7YRUgoUX6CgV0DtuhirQWodeeA7Lf4i2x71JS/vtA5pRb/hIGWlw+G6MeXvsM+MXrz0VAydTGElX1rAfgPg==}
+ '@emnapi/core@1.8.1':
+ resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==}
- '@emnapi/core@1.7.1':
- resolution: {integrity: sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==}
-
- '@emnapi/runtime@1.7.1':
- resolution: {integrity: sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==}
+ '@emnapi/runtime@1.8.1':
+ resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==}
'@emnapi/wasi-threads@1.1.0':
resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==}
- '@esbuild/aix-ppc64@0.25.12':
- resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==}
+ '@esbuild/aix-ppc64@0.27.3':
+ resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
- '@esbuild/aix-ppc64@0.27.2':
- resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==}
- engines: {node: '>=18'}
- cpu: [ppc64]
- os: [aix]
-
- '@esbuild/android-arm64@0.25.12':
- resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==}
+ '@esbuild/android-arm64@0.27.3':
+ resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
- '@esbuild/android-arm64@0.27.2':
- resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [android]
-
- '@esbuild/android-arm@0.25.12':
- resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==}
+ '@esbuild/android-arm@0.27.3':
+ resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
- '@esbuild/android-arm@0.27.2':
- resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==}
- engines: {node: '>=18'}
- cpu: [arm]
- os: [android]
-
- '@esbuild/android-x64@0.25.12':
- resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==}
+ '@esbuild/android-x64@0.27.3':
+ resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
- '@esbuild/android-x64@0.27.2':
- resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [android]
-
- '@esbuild/darwin-arm64@0.25.12':
- resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==}
+ '@esbuild/darwin-arm64@0.27.3':
+ resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
- '@esbuild/darwin-arm64@0.27.2':
- resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [darwin]
-
- '@esbuild/darwin-x64@0.25.12':
- resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==}
+ '@esbuild/darwin-x64@0.27.3':
+ resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
- '@esbuild/darwin-x64@0.27.2':
- resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [darwin]
-
- '@esbuild/freebsd-arm64@0.25.12':
- resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==}
+ '@esbuild/freebsd-arm64@0.27.3':
+ resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
- '@esbuild/freebsd-arm64@0.27.2':
- resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [freebsd]
-
- '@esbuild/freebsd-x64@0.25.12':
- resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==}
+ '@esbuild/freebsd-x64@0.27.3':
+ resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
- '@esbuild/freebsd-x64@0.27.2':
- resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [freebsd]
-
- '@esbuild/linux-arm64@0.25.12':
- resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==}
+ '@esbuild/linux-arm64@0.27.3':
+ resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
- '@esbuild/linux-arm64@0.27.2':
- resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [linux]
-
- '@esbuild/linux-arm@0.25.12':
- resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==}
+ '@esbuild/linux-arm@0.27.3':
+ resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
- '@esbuild/linux-arm@0.27.2':
- resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==}
- engines: {node: '>=18'}
- cpu: [arm]
- os: [linux]
-
- '@esbuild/linux-ia32@0.25.12':
- resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==}
+ '@esbuild/linux-ia32@0.27.3':
+ resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
- '@esbuild/linux-ia32@0.27.2':
- resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==}
- engines: {node: '>=18'}
- cpu: [ia32]
- os: [linux]
-
- '@esbuild/linux-loong64@0.25.12':
- resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==}
+ '@esbuild/linux-loong64@0.27.3':
+ resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
- '@esbuild/linux-loong64@0.27.2':
- resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==}
- engines: {node: '>=18'}
- cpu: [loong64]
- os: [linux]
-
- '@esbuild/linux-mips64el@0.25.12':
- resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==}
+ '@esbuild/linux-mips64el@0.27.3':
+ resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
- '@esbuild/linux-mips64el@0.27.2':
- resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==}
- engines: {node: '>=18'}
- cpu: [mips64el]
- os: [linux]
-
- '@esbuild/linux-ppc64@0.25.12':
- resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==}
+ '@esbuild/linux-ppc64@0.27.3':
+ resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
- '@esbuild/linux-ppc64@0.27.2':
- resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==}
- engines: {node: '>=18'}
- cpu: [ppc64]
- os: [linux]
-
- '@esbuild/linux-riscv64@0.25.12':
- resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==}
+ '@esbuild/linux-riscv64@0.27.3':
+ resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
- '@esbuild/linux-riscv64@0.27.2':
- resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==}
- engines: {node: '>=18'}
- cpu: [riscv64]
- os: [linux]
-
- '@esbuild/linux-s390x@0.25.12':
- resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==}
+ '@esbuild/linux-s390x@0.27.3':
+ resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
- '@esbuild/linux-s390x@0.27.2':
- resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==}
- engines: {node: '>=18'}
- cpu: [s390x]
- os: [linux]
-
- '@esbuild/linux-x64@0.25.12':
- resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==}
+ '@esbuild/linux-x64@0.27.3':
+ resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
- '@esbuild/linux-x64@0.27.2':
- resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [linux]
-
- '@esbuild/netbsd-arm64@0.25.12':
- resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==}
+ '@esbuild/netbsd-arm64@0.27.3':
+ resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
- '@esbuild/netbsd-arm64@0.27.2':
- resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [netbsd]
-
- '@esbuild/netbsd-x64@0.25.12':
- resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==}
+ '@esbuild/netbsd-x64@0.27.3':
+ resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
- '@esbuild/netbsd-x64@0.27.2':
- resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [netbsd]
-
- '@esbuild/openbsd-arm64@0.25.12':
- resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==}
+ '@esbuild/openbsd-arm64@0.27.3':
+ resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
- '@esbuild/openbsd-arm64@0.27.2':
- resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [openbsd]
-
- '@esbuild/openbsd-x64@0.25.12':
- resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==}
+ '@esbuild/openbsd-x64@0.27.3':
+ resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
- '@esbuild/openbsd-x64@0.27.2':
- resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [openbsd]
-
- '@esbuild/openharmony-arm64@0.25.12':
- resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==}
+ '@esbuild/openharmony-arm64@0.27.3':
+ resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
- '@esbuild/openharmony-arm64@0.27.2':
- resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [openharmony]
-
- '@esbuild/sunos-x64@0.25.12':
- resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==}
+ '@esbuild/sunos-x64@0.27.3':
+ resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
- '@esbuild/sunos-x64@0.27.2':
- resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [sunos]
-
- '@esbuild/win32-arm64@0.25.12':
- resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==}
+ '@esbuild/win32-arm64@0.27.3':
+ resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
- '@esbuild/win32-arm64@0.27.2':
- resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==}
- engines: {node: '>=18'}
- cpu: [arm64]
- os: [win32]
-
- '@esbuild/win32-ia32@0.25.12':
- resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==}
+ '@esbuild/win32-ia32@0.27.3':
+ resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
- '@esbuild/win32-ia32@0.27.2':
- resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==}
- engines: {node: '>=18'}
- cpu: [ia32]
- os: [win32]
-
- '@esbuild/win32-x64@0.25.12':
- resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==}
+ '@esbuild/win32-x64@0.27.3':
+ resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
- '@esbuild/win32-x64@0.27.2':
- resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==}
- engines: {node: '>=18'}
- cpu: [x64]
- os: [win32]
-
- '@eslint-community/eslint-plugin-eslint-comments@4.5.0':
- resolution: {integrity: sha512-MAhuTKlr4y/CE3WYX26raZjy+I/kS2PLKSzvfmDCGrBLTFHOYwqROZdr4XwPgXwX3K9rjzMr4pSmUWGnzsUyMg==}
+ '@eslint-community/eslint-plugin-eslint-comments@4.6.0':
+ resolution: {integrity: sha512-2EX2bBQq1ez++xz2o9tEeEQkyvfieWgUFMH4rtJJri2q0Azvhja3hZGXsjPXs31R4fQkZDtWzNDDK2zQn5UE5g==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0
- '@eslint-community/eslint-utils@4.9.0':
- resolution: {integrity: sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==}
+ '@eslint-community/eslint-utils@4.9.1':
+ resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
peerDependencies:
eslint: ^6.0.0 || ^7.0.0 || >=8.0.0
- '@eslint-community/regexpp@4.12.1':
- resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==}
- engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}
-
'@eslint-community/regexpp@4.12.2':
resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==}
engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}
@@ -889,8 +747,8 @@ packages:
'@github/relative-time-element@5.0.0':
resolution: {integrity: sha512-L/2r0DNR/rMbmHWcsdmhtOiy2gESoGOhItNFD4zJ3nZfHl79Dx3N18Vfx/pYr2lruMOdk1cJZb4wEumm+Dxm1w==}
- '@github/text-expander-element@2.9.2':
- resolution: {integrity: sha512-XY8EUMqM4GAloNxXNA1Py1ny+engWwYntbgsnpstQN4piaTI9rIlfYldyd0nnPXhxjGCVqHPmP6yg17Q0/n9Vg==}
+ '@github/text-expander-element@2.9.4':
+ resolution: {integrity: sha512-+zxSlek2r0NrbFmRfymVtYhES9YU033acc/mouXUkN2bs8DaYScPucvBhwg/5d0hsEb2rIykKnkA/2xxWSqCTw==}
'@humanfs/core@0.19.1':
resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==}
@@ -922,10 +780,14 @@ packages:
resolution: {integrity: sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==}
engines: {node: 20 || >=22}
- '@isaacs/brace-expansion@5.0.0':
- resolution: {integrity: sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==}
+ '@isaacs/brace-expansion@5.0.1':
+ resolution: {integrity: sha512-WMz71T1JS624nWj2n2fnYAuPovhv7EUhk69R6i9dsVyzxt5eM3bjwvgk9L+APE1TRscGysAVMANkB0jh0LQZrQ==}
engines: {node: 20 || >=22}
+ '@isaacs/cliui@9.0.0':
+ resolution: {integrity: sha512-AokJm4tuBHillT+FpMtxQ60n8ObyXBatq7jD2/JA9dxbDDokKQm8KMht5ibGzLVU9IJDIKK4TPKgMHEYMn3lMg==}
+ engines: {node: '>=18'}
+
'@jridgewell/gen-mapping@0.3.13':
resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==}
@@ -942,11 +804,11 @@ packages:
'@jridgewell/trace-mapping@0.3.31':
resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==}
- '@keyv/bigmap@1.3.0':
- resolution: {integrity: sha512-KT01GjzV6AQD5+IYrcpoYLkCu1Jod3nau1Z7EsEuViO3TZGRacSbO9MfHmbJ1WaOXFtWLxPVj169cn2WNKPkIg==}
+ '@keyv/bigmap@1.3.1':
+ resolution: {integrity: sha512-WbzE9sdmQtKy8vrNPa9BRnwZh5UF4s1KTmSK0KUVLo3eff5BlQNNWDnFOouNpKfPKDnms9xynJjsMYjMaT/aFQ==}
engines: {node: '>= 18'}
peerDependencies:
- keyv: ^5.5.4
+ keyv: ^5.6.0
'@keyv/serialize@1.1.1':
resolution: {integrity: sha512-dXn3FZhPv0US+7dtJsIi2R+c7qWYiReoEh5zUntWCf4oSpMNib8FDhSoed6m3QyZdx5hK7iLFkYk3rNxwt8vTA==}
@@ -960,8 +822,13 @@ packages:
'@mcaptcha/vanilla-glue@0.1.0-alpha-3':
resolution: {integrity: sha512-GT6TJBgmViGXcXiT5VOr+h/6iOnThSlZuCoOWncubyTZU9R3cgU5vWPkF7G6Ob6ee2CBe3yqBxxk24CFVGTVXw==}
- '@mermaid-js/parser@0.6.3':
- resolution: {integrity: sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==}
+ '@mermaid-js/layout-elk@0.2.0':
+ resolution: {integrity: sha512-vjjYGnCCjYlIA/rR7M//eFi0rHM6dsMyN1JQKfckpt30DTC/esrw36hcrvA2FNPHaqh3Q/SyBWzddyaky8EtUQ==}
+ peerDependencies:
+ mermaid: ^11.0.2
+
+ '@mermaid-js/parser@1.0.0':
+ resolution: {integrity: sha512-vvK0Hi/VWndxoh03Mmz6wa1KDriSPjS2XMZL/1l19HFwygiObEEoEwSDxOqyLzzAI6J2PU3261JjTMTO7x+BPw==}
'@napi-rs/wasm-runtime@0.2.12':
resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==}
@@ -1045,131 +912,159 @@ packages:
resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==}
engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
- '@playwright/test@1.57.0':
- resolution: {integrity: sha512-6TyEnHgd6SArQO8UO2OMTxshln3QMWBtPGrOCgs3wVEmQmwyuNtB10IZMfmYDE0riwNR1cu4q+pPcxMVtaG3TA==}
+ '@playwright/test@1.58.2':
+ resolution: {integrity: sha512-akea+6bHYBBfA9uQqSYmlJXn61cTa+jbO87xVLCWbTqbWadRVmhxlXATaOjOgcBaWU4ePo0wB41KMFv3o35IXA==}
engines: {node: '>=18'}
hasBin: true
'@popperjs/core@2.11.8':
resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==}
- '@primer/octicons@19.21.1':
- resolution: {integrity: sha512-7tgtBkCNcg75YJnckinzvES+uxysYQCe+CHSEnzr3VYgxttzKRvfmrnVogl3aEuHCQP4xhiE9k2lFDhYwGtTzQ==}
+ '@primer/octicons@19.22.0':
+ resolution: {integrity: sha512-nWoh9PlE6u7xbiZF3KcUm3ktLpN2rQPt11trwp/t4EsKuYRNVWVbBp1LkCBsvZq7ScckNKUURLigIU0wS1FQdw==}
'@resvg/resvg-wasm@2.6.2':
resolution: {integrity: sha512-FqALmHI8D4o6lk/LRWDnhw95z5eO+eAa6ORjVg09YRR7BkcM6oPHU9uyC0gtQG5vpFLvgpeU4+zEAz2H8APHNw==}
engines: {node: '>= 10'}
- '@rolldown/pluginutils@1.0.0-beta.53':
- resolution: {integrity: sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==}
+ '@rolldown/pluginutils@1.0.0-rc.2':
+ resolution: {integrity: sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==}
- '@rollup/rollup-android-arm-eabi@4.53.5':
- resolution: {integrity: sha512-iDGS/h7D8t7tvZ1t6+WPK04KD0MwzLZrG0se1hzBjSi5fyxlsiggoJHwh18PCFNn7tG43OWb6pdZ6Y+rMlmyNQ==}
+ '@rollup/rollup-android-arm-eabi@4.57.1':
+ resolution: {integrity: sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==}
cpu: [arm]
os: [android]
- '@rollup/rollup-android-arm64@4.53.5':
- resolution: {integrity: sha512-wrSAViWvZHBMMlWk6EJhvg8/rjxzyEhEdgfMMjREHEq11EtJ6IP6yfcCH57YAEca2Oe3FNCE9DSTgU70EIGmVw==}
+ '@rollup/rollup-android-arm64@4.57.1':
+ resolution: {integrity: sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==}
cpu: [arm64]
os: [android]
- '@rollup/rollup-darwin-arm64@4.53.5':
- resolution: {integrity: sha512-S87zZPBmRO6u1YXQLwpveZm4JfPpAa6oHBX7/ghSiGH3rz/KDgAu1rKdGutV+WUI6tKDMbaBJomhnT30Y2t4VQ==}
+ '@rollup/rollup-darwin-arm64@4.57.1':
+ resolution: {integrity: sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==}
cpu: [arm64]
os: [darwin]
- '@rollup/rollup-darwin-x64@4.53.5':
- resolution: {integrity: sha512-YTbnsAaHo6VrAczISxgpTva8EkfQus0VPEVJCEaboHtZRIb6h6j0BNxRBOwnDciFTZLDPW5r+ZBmhL/+YpTZgA==}
+ '@rollup/rollup-darwin-x64@4.57.1':
+ resolution: {integrity: sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==}
cpu: [x64]
os: [darwin]
- '@rollup/rollup-freebsd-arm64@4.53.5':
- resolution: {integrity: sha512-1T8eY2J8rKJWzaznV7zedfdhD1BqVs1iqILhmHDq/bqCUZsrMt+j8VCTHhP0vdfbHK3e1IQ7VYx3jlKqwlf+vw==}
+ '@rollup/rollup-freebsd-arm64@4.57.1':
+ resolution: {integrity: sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==}
cpu: [arm64]
os: [freebsd]
- '@rollup/rollup-freebsd-x64@4.53.5':
- resolution: {integrity: sha512-sHTiuXyBJApxRn+VFMaw1U+Qsz4kcNlxQ742snICYPrY+DDL8/ZbaC4DVIB7vgZmp3jiDaKA0WpBdP0aqPJoBQ==}
+ '@rollup/rollup-freebsd-x64@4.57.1':
+ resolution: {integrity: sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==}
cpu: [x64]
os: [freebsd]
- '@rollup/rollup-linux-arm-gnueabihf@4.53.5':
- resolution: {integrity: sha512-dV3T9MyAf0w8zPVLVBptVlzaXxka6xg1f16VAQmjg+4KMSTWDvhimI/Y6mp8oHwNrmnmVl9XxJ/w/mO4uIQONA==}
+ '@rollup/rollup-linux-arm-gnueabihf@4.57.1':
+ resolution: {integrity: sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==}
cpu: [arm]
os: [linux]
+ libc: [glibc]
- '@rollup/rollup-linux-arm-musleabihf@4.53.5':
- resolution: {integrity: sha512-wIGYC1x/hyjP+KAu9+ewDI+fi5XSNiUi9Bvg6KGAh2TsNMA3tSEs+Sh6jJ/r4BV/bx/CyWu2ue9kDnIdRyafcQ==}
+ '@rollup/rollup-linux-arm-musleabihf@4.57.1':
+ resolution: {integrity: sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==}
cpu: [arm]
os: [linux]
+ libc: [musl]
- '@rollup/rollup-linux-arm64-gnu@4.53.5':
- resolution: {integrity: sha512-Y+qVA0D9d0y2FRNiG9oM3Hut/DgODZbU9I8pLLPwAsU0tUKZ49cyV1tzmB/qRbSzGvY8lpgGkJuMyuhH7Ma+Vg==}
+ '@rollup/rollup-linux-arm64-gnu@4.57.1':
+ resolution: {integrity: sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==}
cpu: [arm64]
os: [linux]
+ libc: [glibc]
- '@rollup/rollup-linux-arm64-musl@4.53.5':
- resolution: {integrity: sha512-juaC4bEgJsyFVfqhtGLz8mbopaWD+WeSOYr5E16y+1of6KQjc0BpwZLuxkClqY1i8sco+MdyoXPNiCkQou09+g==}
+ '@rollup/rollup-linux-arm64-musl@4.57.1':
+ resolution: {integrity: sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==}
cpu: [arm64]
os: [linux]
+ libc: [musl]
- '@rollup/rollup-linux-loong64-gnu@4.53.5':
- resolution: {integrity: sha512-rIEC0hZ17A42iXtHX+EPJVL/CakHo+tT7W0pbzdAGuWOt2jxDFh7A/lRhsNHBcqL4T36+UiAgwO8pbmn3dE8wA==}
+ '@rollup/rollup-linux-loong64-gnu@4.57.1':
+ resolution: {integrity: sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==}
cpu: [loong64]
os: [linux]
+ libc: [glibc]
- '@rollup/rollup-linux-ppc64-gnu@4.53.5':
- resolution: {integrity: sha512-T7l409NhUE552RcAOcmJHj3xyZ2h7vMWzcwQI0hvn5tqHh3oSoclf9WgTl+0QqffWFG8MEVZZP1/OBglKZx52Q==}
+ '@rollup/rollup-linux-loong64-musl@4.57.1':
+ resolution: {integrity: sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==}
+ cpu: [loong64]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-linux-ppc64-gnu@4.57.1':
+ resolution: {integrity: sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==}
cpu: [ppc64]
os: [linux]
+ libc: [glibc]
- '@rollup/rollup-linux-riscv64-gnu@4.53.5':
- resolution: {integrity: sha512-7OK5/GhxbnrMcxIFoYfhV/TkknarkYC1hqUw1wU2xUN3TVRLNT5FmBv4KkheSG2xZ6IEbRAhTooTV2+R5Tk0lQ==}
+ '@rollup/rollup-linux-ppc64-musl@4.57.1':
+ resolution: {integrity: sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==}
+ cpu: [ppc64]
+ os: [linux]
+ libc: [musl]
+
+ '@rollup/rollup-linux-riscv64-gnu@4.57.1':
+ resolution: {integrity: sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==}
cpu: [riscv64]
os: [linux]
+ libc: [glibc]
- '@rollup/rollup-linux-riscv64-musl@4.53.5':
- resolution: {integrity: sha512-GwuDBE/PsXaTa76lO5eLJTyr2k8QkPipAyOrs4V/KJufHCZBJ495VCGJol35grx9xryk4V+2zd3Ri+3v7NPh+w==}
+ '@rollup/rollup-linux-riscv64-musl@4.57.1':
+ resolution: {integrity: sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==}
cpu: [riscv64]
os: [linux]
+ libc: [musl]
- '@rollup/rollup-linux-s390x-gnu@4.53.5':
- resolution: {integrity: sha512-IAE1Ziyr1qNfnmiQLHBURAD+eh/zH1pIeJjeShleII7Vj8kyEm2PF77o+lf3WTHDpNJcu4IXJxNO0Zluro8bOw==}
+ '@rollup/rollup-linux-s390x-gnu@4.57.1':
+ resolution: {integrity: sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==}
cpu: [s390x]
os: [linux]
+ libc: [glibc]
- '@rollup/rollup-linux-x64-gnu@4.53.5':
- resolution: {integrity: sha512-Pg6E+oP7GvZ4XwgRJBuSXZjcqpIW3yCBhK4BcsANvb47qMvAbCjR6E+1a/U2WXz1JJxp9/4Dno3/iSJLcm5auw==}
+ '@rollup/rollup-linux-x64-gnu@4.57.1':
+ resolution: {integrity: sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==}
cpu: [x64]
os: [linux]
+ libc: [glibc]
- '@rollup/rollup-linux-x64-musl@4.53.5':
- resolution: {integrity: sha512-txGtluxDKTxaMDzUduGP0wdfng24y1rygUMnmlUJ88fzCCULCLn7oE5kb2+tRB+MWq1QDZT6ObT5RrR8HFRKqg==}
+ '@rollup/rollup-linux-x64-musl@4.57.1':
+ resolution: {integrity: sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==}
cpu: [x64]
os: [linux]
+ libc: [musl]
- '@rollup/rollup-openharmony-arm64@4.53.5':
- resolution: {integrity: sha512-3DFiLPnTxiOQV993fMc+KO8zXHTcIjgaInrqlG8zDp1TlhYl6WgrOHuJkJQ6M8zHEcntSJsUp1XFZSY8C1DYbg==}
+ '@rollup/rollup-openbsd-x64@4.57.1':
+ resolution: {integrity: sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==}
+ cpu: [x64]
+ os: [openbsd]
+
+ '@rollup/rollup-openharmony-arm64@4.57.1':
+ resolution: {integrity: sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==}
cpu: [arm64]
os: [openharmony]
- '@rollup/rollup-win32-arm64-msvc@4.53.5':
- resolution: {integrity: sha512-nggc/wPpNTgjGg75hu+Q/3i32R00Lq1B6N1DO7MCU340MRKL3WZJMjA9U4K4gzy3dkZPXm9E1Nc81FItBVGRlA==}
+ '@rollup/rollup-win32-arm64-msvc@4.57.1':
+ resolution: {integrity: sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==}
cpu: [arm64]
os: [win32]
- '@rollup/rollup-win32-ia32-msvc@4.53.5':
- resolution: {integrity: sha512-U/54pTbdQpPLBdEzCT6NBCFAfSZMvmjr0twhnD9f4EIvlm9wy3jjQ38yQj1AGznrNO65EWQMgm/QUjuIVrYF9w==}
+ '@rollup/rollup-win32-ia32-msvc@4.57.1':
+ resolution: {integrity: sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==}
cpu: [ia32]
os: [win32]
- '@rollup/rollup-win32-x64-gnu@4.53.5':
- resolution: {integrity: sha512-2NqKgZSuLH9SXBBV2dWNRCZmocgSOx8OJSdpRaEcRlIfX8YrKxUT6z0F1NpvDVhOsl190UFTRh2F2WDWWCYp3A==}
+ '@rollup/rollup-win32-x64-gnu@4.57.1':
+ resolution: {integrity: sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==}
cpu: [x64]
os: [win32]
- '@rollup/rollup-win32-x64-msvc@4.53.5':
- resolution: {integrity: sha512-JRpZUhCfhZ4keB5v0fe02gQJy05GqboPOaxvjugW04RLSYYoB/9t2lx2u/tMs/Na/1NXfY8QYjgRljRpN+MjTQ==}
+ '@rollup/rollup-win32-x64-msvc@4.57.1':
+ resolution: {integrity: sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==}
cpu: [x64]
os: [win32]
@@ -1189,6 +1084,10 @@ packages:
'@simonwep/pickr@1.9.0':
resolution: {integrity: sha512-oEYvv15PyfZzjoAzvXYt3UyNGwzsrpFxLaZKzkOSd0WYBVwLd19iJerePDONxC1iF6+DpcswPdLIM2KzCJuYFg==}
+ '@sindresorhus/merge-streams@4.0.0':
+ resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==}
+ engines: {node: '>=18'}
+
'@solid-primitives/refs@1.1.2':
resolution: {integrity: sha512-K7tf2thy7L+YJjdqXspXOg5xvNEOH8tgEWsp0+1mQk3obHBRD6hEjYZk7p7FlJphSZImS35je3UfmWuD7MhDfg==}
peerDependencies:
@@ -1207,17 +1106,17 @@ packages:
'@standard-schema/spec@1.1.0':
resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==}
- '@stylistic/eslint-plugin@5.6.1':
- resolution: {integrity: sha512-JCs+MqoXfXrRPGbGmho/zGS/jMcn3ieKl/A8YImqib76C8kjgZwq5uUFzc30lJkMvcchuRn6/v8IApLxli3Jyw==}
+ '@stylistic/eslint-plugin@5.8.0':
+ resolution: {integrity: sha512-WNPVF/FfBAjyi3OA7gok8swRiImNLKI4dmV3iK/GC/0xSJR7eCzBFsw9hLZVgb1+MYNLy7aDsjohxN1hA/FIfQ==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
eslint: '>=9.0.0'
- '@stylistic/stylelint-plugin@4.0.0':
- resolution: {integrity: sha512-CFwt3K4Y/7bygNCLCQ8Sy4Hzgbhxq3BsNW0FIuYxl17HD3ywptm54ocyeiLVRrk5jtz1Zwks7Xr9eiZt8SWHAw==}
- engines: {node: ^18.12 || >=20.9}
+ '@stylistic/stylelint-plugin@5.0.1':
+ resolution: {integrity: sha512-NaVwCNVZ2LyPA3TnUwvjO9c6P6VUjgRB8UP8SOW+cAOJBVqPPuOIDawsvvtql/LhkuR3JuTdGvr/RM3dUl8l2Q==}
+ engines: {node: '>=20.19.0'}
peerDependencies:
- stylelint: ^16.22.0
+ stylelint: ^17.0.0
'@swc/helpers@0.2.14':
resolution: {integrity: sha512-wpCQMhf5p5GhNg2MmGKXzUNwxe7zRiCsmqYsamez2beP7mKPCSiu+BjZcdN95yYSzO857kr0VfQewmGpS77nqA==}
@@ -1308,8 +1207,8 @@ packages:
'@types/d3-selection@3.0.11':
resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==}
- '@types/d3-shape@3.1.7':
- resolution: {integrity: sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==}
+ '@types/d3-shape@3.1.8':
+ resolution: {integrity: sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==}
'@types/d3-time-format@4.0.3':
resolution: {integrity: sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==}
@@ -1344,6 +1243,9 @@ packages:
'@types/eslint@9.6.1':
resolution: {integrity: sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==}
+ '@types/esrecurse@4.3.1':
+ resolution: {integrity: sha512-xJBAbDifo5hpffDBuHl0Y8ywswbiAp/Wi7Y/GtAgSlZyIABppyurxVueOPE8LUQOxdlgi6Zqce7uoEpqNTeiUw==}
+
'@types/estree@1.0.8':
resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
@@ -1356,14 +1258,17 @@ packages:
'@types/jquery@3.5.33':
resolution: {integrity: sha512-SeyVJXlCZpEki5F0ghuYe+L+PprQta6nRZqhONt9F13dWBtR/ftoaIbdRQ7cis7womE+X2LKhsDdDtkkDhJS6g==}
+ '@types/js-yaml@4.0.9':
+ resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==}
+
'@types/json-schema@7.0.15':
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
'@types/json5@0.0.29':
resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==}
- '@types/katex@0.16.7':
- resolution: {integrity: sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==}
+ '@types/katex@0.16.8':
+ resolution: {integrity: sha512-trgaNyfU+Xh2Tc+ABIb44a5AYUpicB3uwirOioeOkNPPbmgRNtcWyDeeFRzjPZENO9Vq8gvVqfhaaXWLlevVwg==}
'@types/marked@4.3.2':
resolution: {integrity: sha512-a79Yc3TOk6dGdituy8hmTTJXjOkZ7zsFYV10L337ttq/rec8lRMDBpV7fL3uLx6TgbFCa5DU/h8FmIBQPSbU0w==}
@@ -1371,11 +1276,8 @@ packages:
'@types/ms@2.1.0':
resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==}
- '@types/node@20.19.27':
- resolution: {integrity: sha512-N2clP5pJhB2YnZJ3PIHFk5RkygRX5WO/5f0WC08tp0wd+sv0rsJk3MqWn3CbNmT2J505a5336jaQj4ph1AdMug==}
-
- '@types/node@25.0.3':
- resolution: {integrity: sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==}
+ '@types/node@25.2.3':
+ resolution: {integrity: sha512-m0jEgYlYz+mDJZ2+F4v8D1AyQb+QzsNqRuI7xg1VQX/KlKS0qT9r1Mo16yo5F/MtifXFgaofIFsdFMox2SxIbQ==}
'@types/pdfobject@2.2.5':
resolution: {integrity: sha512-7gD5tqc/RUDq0PyoLemL0vEHxBYi+zY0WVaFAx/Y0jBsXFgot1vB9No1GhDZGwRGJMCIZbgAb74QG9MTyTNU/g==}
@@ -1395,9 +1297,6 @@ packages:
'@types/throttle-debounce@5.0.2':
resolution: {integrity: sha512-pDzSNulqooSKvSNcksnV72nk8p7gRqN8As71Sp28nov1IgmPKWbOEIwAWvBME5pPTtaXJAvG3O4oc76HlQ4kqQ==}
- '@types/tinycolor2@1.4.6':
- resolution: {integrity: sha512-iEN8J0BoMnsWBqjVbWH/c0G0Hh7O21lpR2/+PrvAVgWdzL7eexIFm4JN/Wn10PTcmNdtS6U67r499mlWMXOxNw==}
-
'@types/toastify-js@1.12.4':
resolution: {integrity: sha512-zfZHU4tKffPCnZRe7pjv/eFKzTVHozKewFCKaCjZ4gFinKgJRz/t0bkZiMCXJxPhv/ZoeDGNOeRD09R0kQZ/nw==}
@@ -1410,63 +1309,66 @@ packages:
'@types/whatwg-mimetype@3.0.2':
resolution: {integrity: sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==}
- '@typescript-eslint/eslint-plugin@8.50.0':
- resolution: {integrity: sha512-O7QnmOXYKVtPrfYzMolrCTfkezCJS9+ljLdKW/+DCvRsc3UAz+sbH6Xcsv7p30+0OwUbeWfUDAQE0vpabZ3QLg==}
+ '@types/ws@8.18.1':
+ resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==}
+
+ '@typescript-eslint/eslint-plugin@8.56.0':
+ resolution: {integrity: sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
- '@typescript-eslint/parser': ^8.50.0
- eslint: ^8.57.0 || ^9.0.0
+ '@typescript-eslint/parser': ^8.56.0
+ eslint: ^8.57.0 || ^9.0.0 || ^10.0.0
typescript: '>=4.8.4 <6.0.0'
- '@typescript-eslint/parser@8.50.0':
- resolution: {integrity: sha512-6/cmF2piao+f6wSxUsJLZjck7OQsYyRtcOZS02k7XINSNlz93v6emM8WutDQSXnroG2xwYlEVHJI+cPA7CPM3Q==}
+ '@typescript-eslint/parser@8.56.0':
+ resolution: {integrity: sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
- eslint: ^8.57.0 || ^9.0.0
+ eslint: ^8.57.0 || ^9.0.0 || ^10.0.0
typescript: '>=4.8.4 <6.0.0'
- '@typescript-eslint/project-service@8.50.0':
- resolution: {integrity: sha512-Cg/nQcL1BcoTijEWyx4mkVC56r8dj44bFDvBdygifuS20f3OZCHmFbjF34DPSi07kwlFvqfv/xOLnJ5DquxSGQ==}
+ '@typescript-eslint/project-service@8.56.0':
+ resolution: {integrity: sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
typescript: '>=4.8.4 <6.0.0'
- '@typescript-eslint/scope-manager@8.50.0':
- resolution: {integrity: sha512-xCwfuCZjhIqy7+HKxBLrDVT5q/iq7XBVBXLn57RTIIpelLtEIZHXAF/Upa3+gaCpeV1NNS5Z9A+ID6jn50VD4A==}
+ '@typescript-eslint/scope-manager@8.56.0':
+ resolution: {integrity: sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
- '@typescript-eslint/tsconfig-utils@8.50.0':
- resolution: {integrity: sha512-vxd3G/ybKTSlm31MOA96gqvrRGv9RJ7LGtZCn2Vrc5htA0zCDvcMqUkifcjrWNNKXHUU3WCkYOzzVSFBd0wa2w==}
+ '@typescript-eslint/tsconfig-utils@8.56.0':
+ resolution: {integrity: sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
typescript: '>=4.8.4 <6.0.0'
- '@typescript-eslint/type-utils@8.50.0':
- resolution: {integrity: sha512-7OciHT2lKCewR0mFoBrvZJ4AXTMe/sYOe87289WAViOocEmDjjv8MvIOT2XESuKj9jp8u3SZYUSh89QA4S1kQw==}
+ '@typescript-eslint/type-utils@8.56.0':
+ resolution: {integrity: sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
- eslint: ^8.57.0 || ^9.0.0
+ eslint: ^8.57.0 || ^9.0.0 || ^10.0.0
typescript: '>=4.8.4 <6.0.0'
- '@typescript-eslint/types@8.50.0':
- resolution: {integrity: sha512-iX1mgmGrXdANhhITbpp2QQM2fGehBse9LbTf0sidWK6yg/NE+uhV5dfU1g6EYPlcReYmkE9QLPq/2irKAmtS9w==}
+ '@typescript-eslint/types@8.56.0':
+ resolution: {integrity: sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
- '@typescript-eslint/typescript-estree@8.50.0':
- resolution: {integrity: sha512-W7SVAGBR/IX7zm1t70Yujpbk+zdPq/u4soeFSknWFdXIFuWsBGBOUu/Tn/I6KHSKvSh91OiMuaSnYp3mtPt5IQ==}
+ '@typescript-eslint/typescript-estree@8.56.0':
+ resolution: {integrity: sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
typescript: '>=4.8.4 <6.0.0'
- '@typescript-eslint/utils@8.50.0':
- resolution: {integrity: sha512-87KgUXET09CRjGCi2Ejxy3PULXna63/bMYv72tCAlDJC3Yqwln0HiFJ3VJMst2+mEtNtZu5oFvX4qJGjKsnAgg==}
+ '@typescript-eslint/utils@8.56.0':
+ resolution: {integrity: sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
- eslint: ^8.57.0 || ^9.0.0
+ eslint: ^8.57.0 || ^9.0.0 || ^10.0.0
typescript: '>=4.8.4 <6.0.0'
- '@typescript-eslint/visitor-keys@8.50.0':
- resolution: {integrity: sha512-Xzmnb58+Db78gT/CCj/PVCvK+zxbnsw6F+O1oheYszJbBSdEjVhQi3C/Xttzxgi/GLmpvOggRs1RFpiJ8+c34Q==}
+ '@typescript-eslint/visitor-keys@8.56.0':
+ resolution: {integrity: sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
'@unrs/resolver-binding-android-arm-eabi@1.11.1':
@@ -1508,41 +1410,49 @@ packages:
resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==}
cpu: [arm64]
os: [linux]
+ libc: [glibc]
'@unrs/resolver-binding-linux-arm64-musl@1.11.1':
resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==}
cpu: [arm64]
os: [linux]
+ libc: [musl]
'@unrs/resolver-binding-linux-ppc64-gnu@1.11.1':
resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==}
cpu: [ppc64]
os: [linux]
+ libc: [glibc]
'@unrs/resolver-binding-linux-riscv64-gnu@1.11.1':
resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==}
cpu: [riscv64]
os: [linux]
+ libc: [glibc]
'@unrs/resolver-binding-linux-riscv64-musl@1.11.1':
resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==}
cpu: [riscv64]
os: [linux]
+ libc: [musl]
'@unrs/resolver-binding-linux-s390x-gnu@1.11.1':
resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==}
cpu: [s390x]
os: [linux]
+ libc: [glibc]
'@unrs/resolver-binding-linux-x64-gnu@1.11.1':
resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==}
cpu: [x64]
os: [linux]
+ libc: [glibc]
'@unrs/resolver-binding-linux-x64-musl@1.11.1':
resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==}
cpu: [x64]
os: [linux]
+ libc: [musl]
'@unrs/resolver-binding-wasm32-wasi@1.11.1':
resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==}
@@ -1564,15 +1474,15 @@ packages:
cpu: [x64]
os: [win32]
- '@vitejs/plugin-vue@6.0.3':
- resolution: {integrity: sha512-TlGPkLFLVOY3T7fZrwdvKpjprR3s4fxRln0ORDo1VQ7HHyxJwTlrjKU3kpVWTlaAjIEuCTokmjkZnr8Tpc925w==}
+ '@vitejs/plugin-vue@6.0.4':
+ resolution: {integrity: sha512-uM5iXipgYIn13UUQCZNdWkYk+sysBeA97d5mHsAoAt1u/wpN3+zxOmsVJWosuzX+IMGRzeYUNytztrYznboIkQ==}
engines: {node: ^20.19.0 || >=22.12.0}
peerDependencies:
vite: ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0
vue: ^3.2.25
- '@vitest/eslint-plugin@1.5.2':
- resolution: {integrity: sha512-2t1F2iecXB/b1Ox4U137lhD3chihEE3dRVtu3qMD35tc6UqUjg1VGRJoS1AkFKwpT8zv8OQInzPQO06hrRkeqw==}
+ '@vitest/eslint-plugin@1.6.9':
+ resolution: {integrity: sha512-9WfPx1OwJ19QLCSRLkqVO7//1WcWnK3fE/3fJhKMAmDe8+9G4rB47xCNIIeCq3FdEzkIoLTfDlwDlPBaUTMhow==}
engines: {node: '>=18'}
peerDependencies:
eslint: '>=8.57.0'
@@ -1584,11 +1494,11 @@ packages:
vitest:
optional: true
- '@vitest/expect@4.0.16':
- resolution: {integrity: sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==}
+ '@vitest/expect@4.0.18':
+ resolution: {integrity: sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==}
- '@vitest/mocker@4.0.16':
- resolution: {integrity: sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==}
+ '@vitest/mocker@4.0.18':
+ resolution: {integrity: sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==}
peerDependencies:
msw: ^2.4.9
vite: ^6.0.0 || ^7.0.0-0
@@ -1598,66 +1508,61 @@ packages:
vite:
optional: true
- '@vitest/pretty-format@4.0.16':
- resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==}
+ '@vitest/pretty-format@4.0.18':
+ resolution: {integrity: sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==}
- '@vitest/runner@4.0.16':
- resolution: {integrity: sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==}
+ '@vitest/runner@4.0.18':
+ resolution: {integrity: sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==}
- '@vitest/snapshot@4.0.16':
- resolution: {integrity: sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==}
+ '@vitest/snapshot@4.0.18':
+ resolution: {integrity: sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==}
- '@vitest/spy@4.0.16':
- resolution: {integrity: sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==}
+ '@vitest/spy@4.0.18':
+ resolution: {integrity: sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==}
- '@vitest/utils@4.0.16':
- resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==}
+ '@vitest/utils@4.0.18':
+ resolution: {integrity: sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==}
- '@volar/language-core@2.4.26':
- resolution: {integrity: sha512-hH0SMitMxnB43OZpyF1IFPS9bgb2I3bpCh76m2WEK7BE0A0EzpYsRp0CCH2xNKshr7kacU5TQBLYn4zj7CG60A==}
+ '@volar/language-core@2.4.27':
+ resolution: {integrity: sha512-DjmjBWZ4tJKxfNC1F6HyYERNHPYS7L7OPFyCrestykNdUZMFYzI9WTyvwPcaNaHlrEUwESHYsfEw3isInncZxQ==}
- '@volar/source-map@2.4.26':
- resolution: {integrity: sha512-JJw0Tt/kSFsIRmgTQF4JSt81AUSI1aEye5Zl65EeZ8H35JHnTvFGmpDOBn5iOxd48fyGE+ZvZBp5FcgAy/1Qhw==}
+ '@volar/source-map@2.4.27':
+ resolution: {integrity: sha512-ynlcBReMgOZj2i6po+qVswtDUeeBRCTgDurjMGShbm8WYZgJ0PA4RmtebBJ0BCYol1qPv3GQF6jK7C9qoVc7lg==}
- '@volar/typescript@2.4.26':
- resolution: {integrity: sha512-N87ecLD48Sp6zV9zID/5yuS1+5foj0DfuYGdQ6KHj/IbKvyKv1zNX6VCmnKYwtmHadEO6mFc2EKISiu3RDPAvA==}
+ '@volar/typescript@2.4.27':
+ resolution: {integrity: sha512-eWaYCcl/uAPInSK2Lze6IqVWaBu/itVqR5InXcHXFyles4zO++Mglt3oxdgj75BDcv1Knr9Y93nowS8U3wqhxg==}
- '@vue/compiler-core@3.5.25':
- resolution: {integrity: sha512-vay5/oQJdsNHmliWoZfHPoVZZRmnSWhug0BYT34njkYTPqClh3DNWLkZNJBVSjsNMrg0CCrBfoKkjZQPM/QVUw==}
+ '@vue/compiler-core@3.5.28':
+ resolution: {integrity: sha512-kviccYxTgoE8n6OCw96BNdYlBg2GOWfBuOW4Vqwrt7mSKWKwFVvI8egdTltqRgITGPsTFYtKYfxIG8ptX2PJHQ==}
- '@vue/compiler-dom@3.5.25':
- resolution: {integrity: sha512-4We0OAcMZsKgYoGlMjzYvaoErltdFI2/25wqanuTu+S4gismOTRTBPi4IASOjxWdzIwrYSjnqONfKvuqkXzE2Q==}
+ '@vue/compiler-dom@3.5.28':
+ resolution: {integrity: sha512-/1ZepxAb159jKR1btkefDP+J2xuWL5V3WtleRmxaT+K2Aqiek/Ab/+Ebrw2pPj0sdHO8ViAyyJWfhXXOP/+LQA==}
- '@vue/compiler-sfc@3.5.25':
- resolution: {integrity: sha512-PUgKp2rn8fFsI++lF2sO7gwO2d9Yj57Utr5yEsDf3GNaQcowCLKL7sf+LvVFvtJDXUp/03+dC6f2+LCv5aK1ag==}
+ '@vue/compiler-sfc@3.5.28':
+ resolution: {integrity: sha512-6TnKMiNkd6u6VeVDhZn/07KhEZuBSn43Wd2No5zaP5s3xm8IqFTHBj84HJah4UepSUJTro5SoqqlOY22FKY96g==}
- '@vue/compiler-ssr@3.5.25':
- resolution: {integrity: sha512-ritPSKLBcParnsKYi+GNtbdbrIE1mtuFEJ4U1sWeuOMlIziK5GtOL85t5RhsNy4uWIXPgk+OUdpnXiTdzn8o3A==}
+ '@vue/compiler-ssr@3.5.28':
+ resolution: {integrity: sha512-JCq//9w1qmC6UGLWJX7RXzrGpKkroubey/ZFqTpvEIDJEKGgntuDMqkuWiZvzTzTA5h2qZvFBFHY7fAAa9475g==}
- '@vue/language-core@3.1.8':
- resolution: {integrity: sha512-PfwAW7BLopqaJbneChNL6cUOTL3GL+0l8paYP5shhgY5toBNidWnMXWM+qDwL7MC9+zDtzCF2enT8r6VPu64iw==}
+ '@vue/language-core@3.2.4':
+ resolution: {integrity: sha512-bqBGuSG4KZM45KKTXzGtoCl9cWju5jsaBKaJJe3h5hRAAWpZUuj5G+L+eI01sPIkm4H6setKRlw7E85wLdDNew==}
+
+ '@vue/reactivity@3.5.28':
+ resolution: {integrity: sha512-gr5hEsxvn+RNyu9/9o1WtdYdwDjg5FgjUSBEkZWqgTKlo/fvwZ2+8W6AfKsc9YN2k/+iHYdS9vZYAhpi10kNaw==}
+
+ '@vue/runtime-core@3.5.28':
+ resolution: {integrity: sha512-POVHTdbgnrBBIpnbYU4y7pOMNlPn2QVxVzkvEA2pEgvzbelQq4ZOUxbp2oiyo+BOtiYlm8Q44wShHJoBvDPAjQ==}
+
+ '@vue/runtime-dom@3.5.28':
+ resolution: {integrity: sha512-4SXxSF8SXYMuhAIkT+eBRqOkWEfPu6nhccrzrkioA6l0boiq7sp18HCOov9qWJA5HML61kW8p/cB4MmBiG9dSA==}
+
+ '@vue/server-renderer@3.5.28':
+ resolution: {integrity: sha512-pf+5ECKGj8fX95bNincbzJ6yp6nyzuLDhYZCeFxUNp8EBrQpPpQaLX3nNCp49+UbgbPun3CeVE+5CXVV1Xydfg==}
peerDependencies:
- typescript: '*'
- peerDependenciesMeta:
- typescript:
- optional: true
+ vue: 3.5.28
- '@vue/reactivity@3.5.25':
- resolution: {integrity: sha512-5xfAypCQepv4Jog1U4zn8cZIcbKKFka3AgWHEFQeK65OW+Ys4XybP6z2kKgws4YB43KGpqp5D/K3go2UPPunLA==}
-
- '@vue/runtime-core@3.5.25':
- resolution: {integrity: sha512-Z751v203YWwYzy460bzsYQISDfPjHTl+6Zzwo/a3CsAf+0ccEjQ8c+0CdX1WsumRTHeywvyUFtW6KvNukT/smA==}
-
- '@vue/runtime-dom@3.5.25':
- resolution: {integrity: sha512-a4WrkYFbb19i9pjkz38zJBg8wa/rboNERq3+hRRb0dHiJh13c+6kAbgqCPfMaJ2gg4weWD3APZswASOfmKwamA==}
-
- '@vue/server-renderer@3.5.25':
- resolution: {integrity: sha512-UJaXR54vMG61i8XNIzTSf2Q7MOqZHpp8+x3XLGtE3+fL+nQd+k7O5+X3D/uWrnQXOdMw5VPih+Uremcw+u1woQ==}
- peerDependencies:
- vue: 3.5.25
-
- '@vue/shared@3.5.25':
- resolution: {integrity: sha512-AbOPdQQnAnzs58H2FrrDxYj/TJfmeS2jdfEEhgiKINy+bnOANmVizIEgq1r+C5zsbs6l1CCQxtcj71rwNQ4jWg==}
+ '@vue/shared@3.5.28':
+ resolution: {integrity: sha512-cfWa1fCGBxrvaHRhvV3Is0MgmrbSCxYTXCSCau2I0a1Xw1N1pHAvkWCiXPRAqjvToILvguNyEwjevUqAuBQWvQ==}
'@webassemblyjs/ast@1.14.1':
resolution: {integrity: sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==}
@@ -1776,11 +1681,11 @@ packages:
ajv@6.12.6:
resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
- ajv@8.17.1:
- resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==}
+ ajv@8.18.0:
+ resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==}
- alien-signals@3.1.1:
- resolution: {integrity: sha512-ogkIWbVrLwKtHY6oOAXaYkAxP+cTH7V5FZ5+Tm4NZFd8VDZ6uNMDrfzqctTZ42eTMCSR3ne3otpcxmqSnFfPYA==}
+ alien-signals@3.1.2:
+ resolution: {integrity: sha512-d9dYqZTS90WLiU0I5c6DHj/HcKkF8ZyGN3G5x8wSbslulz70KOxaqCT0hQCo9KOyhVqzqGojvNdJXoTumZOtcw==}
ansi-regex@5.0.1:
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
@@ -1822,12 +1727,8 @@ packages:
resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==}
engines: {node: '>=0.10.0'}
- array-union@2.1.0:
- resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==}
- engines: {node: '>=8'}
-
- asciinema-player@3.13.5:
- resolution: {integrity: sha512-mgpJc9g6I+k4Tz5qVUNd0H+GoYlhiUwvlay6vD6IXiuiWOWhBOjxbvqQ1bcI/HPTrOYxhTyxZuzHIXM36Tw60Q==}
+ asciinema-player@3.14.15:
+ resolution: {integrity: sha512-M+6n0GXMc9X4Oaz9qOr5pfQGcmnPOW8QIBzf67eZvtSpyqiKexMA03CpP4S9ZaDuUQNQIF2RSjHl6QLgJYvJ9g==}
assertion-error@2.0.1:
resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==}
@@ -1845,8 +1746,8 @@ packages:
engines: {node: '>= 4.5.0'}
hasBin: true
- axe-core@4.11.0:
- resolution: {integrity: sha512-ilYanEU8vxxBexpJd8cWM4ElSQq4QctCLKih0TSfjIfCQTeyH/6zVrmIJfLPrKTKJRbiG+cfnZbQIjAlJmF1jQ==}
+ axe-core@4.11.1:
+ resolution: {integrity: sha512-BASOg+YwO2C+346x3LZOeoovTIoTrRqEsqMa6fmfAV0P+U9mFr9NsyOEpiYvFjbc64NMrSswhV50WdXzdb/Z5A==}
engines: {node: '>=4'}
axobject-query@4.1.0:
@@ -1856,14 +1757,19 @@ packages:
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
- balanced-match@2.0.0:
- resolution: {integrity: sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==}
+ balanced-match@3.0.1:
+ resolution: {integrity: sha512-vjtV3hiLqYDNRoiAv0zC4QaGAMPomEoq83PRmYIofPswwZurCeWR5LByXm7SyoL0Zh5+2z0+HC7jG8gSZJUh0w==}
+ engines: {node: '>= 16'}
+
+ balanced-match@4.0.2:
+ resolution: {integrity: sha512-x0K50QvKQ97fdEz2kPehIerj+YTeptKF9hyYkKf6egnwmMWAkADiO0QCzSp0R5xN8FTZgYaBfSaue46Ej62nMg==}
+ engines: {node: 20 || >=22}
base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
- baseline-browser-mapping@2.9.9:
- resolution: {integrity: sha512-V8fbOCSeOFvlDj7LLChUcqbZrdKD9RU/VR260piF1790vT0mfLSwGc/Qzxv3IqiTukOpNtItePa0HBpMAj7MDg==}
+ baseline-browser-mapping@2.9.19:
+ resolution: {integrity: sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==}
hasBin: true
big.js@5.2.2:
@@ -1882,6 +1788,10 @@ packages:
brace-expansion@2.0.2:
resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==}
+ brace-expansion@5.0.2:
+ resolution: {integrity: sha512-Pdk8c9poy+YhOgVWw1JNN22/HcivgKWwpxKq04M/jTmHyCZn12WPJebZxdjSa5TmBqISrUSgNYU3eRORljfCCw==}
+ engines: {node: 20 || >=22}
+
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
@@ -1909,8 +1819,8 @@ packages:
resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==}
engines: {node: '>= 0.8'}
- cacheable@2.3.1:
- resolution: {integrity: sha512-yr+FSHWn1ZUou5LkULX/S+jhfgfnLbuKQjE40tyEd4fxGZVMbBL5ifno0J0OauykS8UiCSgHi+DV/YD+rjFxFg==}
+ cacheable@2.3.2:
+ resolution: {integrity: sha512-w+ZuRNmex9c1TR9RcsxbfTKCjSL0rh1WA5SABbrWprIHeNBdmyQLSYonlDy9gpD+63XT8DgZ/wNh1Smvc9WnJA==}
callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
@@ -1928,11 +1838,11 @@ packages:
resolution: {integrity: sha512-8WB3Jcas3swSvjIeA2yvCJ+Miyz5l1ZmB6HFb9R1317dt9LCQoswg/BGrmAmkWVEszSrrg4RwmO46qIm2OEnSA==}
engines: {node: '>=16'}
- caniuse-lite@1.0.30001760:
- resolution: {integrity: sha512-7AAMPcueWELt1p3mi13HR/LHH0TJLT11cnwDJEs3xA4+CK/PLKeO9Kl1oru24htkyUKtkGCvAx4ohB0Ttry8Dw==}
+ caniuse-lite@1.0.30001770:
+ resolution: {integrity: sha512-x/2CLQ1jHENRbHg5PSId2sXq1CIO1CISvwWAj027ltMVG2UNgW+w9oH2+HzgEIRFembL8bUlXtfbBHR1fCg2xw==}
- chai@6.2.1:
- resolution: {integrity: sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==}
+ chai@6.2.2:
+ resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==}
engines: {node: '>=18'}
chalk@4.1.2:
@@ -1972,8 +1882,8 @@ packages:
peerDependencies:
chevrotain: ^11.0.0
- chevrotain@11.0.3:
- resolution: {integrity: sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==}
+ chevrotain@11.1.1:
+ resolution: {integrity: sha512-f0yv5CPKaFxfsPTBzX7vGuim4oIC1/gcS7LUGdBSwl2dU6+FON6LVUksdOo1qJjoUvXNn45urgh8C+0a24pACQ==}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
@@ -1986,8 +1896,8 @@ packages:
resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==}
engines: {node: '>=6.0'}
- ci-info@4.3.1:
- resolution: {integrity: sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==}
+ ci-info@4.4.0:
+ resolution: {integrity: sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg==}
engines: {node: '>=8'}
citeproc@2.4.63:
@@ -1997,8 +1907,8 @@ packages:
resolution: {integrity: sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==}
engines: {node: '>=4'}
- clippie@4.1.9:
- resolution: {integrity: sha512-YaNJI8f2bPRVVfdKDUeqSPuQEztyOowee7DIc/DJ48qNJGq/SziipiWN6oWT6q9FR4QN0JzFDpP+fDtkSZyFHw==}
+ clippie@4.1.10:
+ resolution: {integrity: sha512-zUjK2fLH8/wju2lks5mH0u8wSRYCOJoHfT1KQ61+aCT5O1ouONnSrnKQ3BTKvIYLUYJarbLZo4FLHyce/SLF2g==}
clone-deep@4.0.1:
resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==}
@@ -2007,8 +1917,8 @@ packages:
codemirror-spell-checker@1.1.2:
resolution: {integrity: sha512-2Tl6n0v+GJRsC9K3MLCdLaMOmvWL0uukajNJseorZJsslaxZyZMgENocPU8R0DyoTAiKsyqiemSOZo7kjGV0LQ==}
- codemirror@5.65.20:
- resolution: {integrity: sha512-i5dLDDxwkFCbhjvL2pNjShsojoL3XHyDwsGv1jqETUoW+lzpBKKqNTUWgQwVAOa0tUm4BwekT455ujafi8payA==}
+ codemirror@5.65.21:
+ resolution: {integrity: sha512-6teYk0bA0nR3QP0ihGMoxuKzpl5W80FpnHpBJpgy66NK3cZv5b/d/HY8PnRvfSsCG1MTfr92u2WUl+wT0E40mQ==}
color-convert@2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
@@ -2031,8 +1941,8 @@ packages:
resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==}
engines: {node: '>=18'}
- commander@14.0.2:
- resolution: {integrity: sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==}
+ commander@14.0.3:
+ resolution: {integrity: sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==}
engines: {node: '>=20'}
commander@2.20.3:
@@ -2050,8 +1960,8 @@ packages:
resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==}
engines: {node: '>= 12'}
- comment-parser@1.4.1:
- resolution: {integrity: sha512-buhp5kePrmda3vhc5B9t7pUQXAb2Tnd0qgpkIhPhkHXxJpiPJ11H0ZEU0oBpJ2QztSbzG/ZxMj/CHsYJqRHmyg==}
+ comment-parser@1.4.5:
+ resolution: {integrity: sha512-aRDkn3uyIlCFfk5NUA+VdwMmMsh8JGhc4hapfV4yxymHGQ3BVskMQfoXGpCo5IoBuQ9tS5iiVKhCpTcB4pW4qw==}
engines: {node: '>= 12.0.0'}
compare-versions@6.1.1:
@@ -2063,8 +1973,8 @@ packages:
confbox@0.1.8:
resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==}
- core-js-compat@3.47.0:
- resolution: {integrity: sha512-IGfuznZ/n7Kp9+nypamBhvwdwLsW6KC8IOaURw2doAK5e98AG3acVLdh0woOnEqCfUtS+Vu882JE4k/DAm3ItQ==}
+ core-js-compat@3.48.0:
+ resolution: {integrity: sha512-OM4cAF3D6VtH/WkLtWvyNC56EZVXsZdU3iqaMG2B4WvYrlqU831pc4UtG5yp0sE9z8Y02wVN7PjW5Zf9Gt0f1Q==}
core-js@3.32.2:
resolution: {integrity: sha512-pxXSw1mYZPDGvTQqEc5vgIb83jGQKFGYWY76z4a7weZXUolw3G+OvpZqSRcfYOoOVUQJYEPsWeQK8pKEnUtWxQ==}
@@ -2091,15 +2001,15 @@ packages:
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
engines: {node: '>= 8'}
- css-functions-list@3.2.3:
- resolution: {integrity: sha512-IQOkD3hbR5KrN93MtcYuad6YPuTSUhntLHDuLEbFWE+ff2/XSZNdZG+LcbbIW5AXKg/WFIfYItIzVoHngHXZzA==}
- engines: {node: '>=12 || >=16'}
+ css-functions-list@3.3.3:
+ resolution: {integrity: sha512-8HFEBPKhOpJPEPu70wJJetjKta86Gw9+CCyCnB3sui2qQfOvRyqBy4IKLKKAwdMpWb2lHXWk9Wb4Z6AmaUT1Pg==}
+ engines: {node: '>=12'}
- css-loader@7.1.2:
- resolution: {integrity: sha512-6WvYYn7l/XEGN8Xu2vWFt9nVzrCn39vKyTEFf/ExEyoksJjjSZV/0/35XPlMbpnr6VGhZIUg5yJrL8tGfes/FA==}
+ css-loader@7.1.4:
+ resolution: {integrity: sha512-vv3J9tlOl04WjiMvHQI/9tmIrCxVrj6PFbHemBB1iihpeRbi/I4h033eoFIhwxBBqLhI0KYFS7yvynBFhIZfTw==}
engines: {node: '>= 18.12.0'}
peerDependencies:
- '@rspack/core': 0.x || 1.x
+ '@rspack/core': 0.x || ^1.0.0 || ^2.0.0-0
webpack: ^5.27.0
peerDependenciesMeta:
'@rspack/core':
@@ -2207,8 +2117,8 @@ packages:
resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==}
engines: {node: '>=12'}
- d3-format@3.1.0:
- resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==}
+ d3-format@3.1.2:
+ resolution: {integrity: sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==}
engines: {node: '>=12'}
d3-geo@3.1.1:
@@ -2316,8 +2226,8 @@ packages:
supports-color:
optional: true
- decode-named-character-reference@1.2.0:
- resolution: {integrity: sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==}
+ decode-named-character-reference@1.3.0:
+ resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==}
decode-uri-component@0.2.2:
resolution: {integrity: sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==}
@@ -2347,10 +2257,6 @@ packages:
didyoumean@1.2.2:
resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==}
- dir-glob@3.0.1:
- resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==}
- engines: {node: '>=8'}
-
dlv@1.1.3:
resolution: {integrity: sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==}
@@ -2386,8 +2292,11 @@ packages:
easymde@2.20.0:
resolution: {integrity: sha512-V1Z5f92TfR42Na852OWnIZMbM7zotWQYTddNaLYZFVKj7APBbyZ3FYJ27gBw2grMW3R6Qdv9J8n5Ij7XRSIgXQ==}
- electron-to-chromium@1.5.267:
- resolution: {integrity: sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==}
+ electron-to-chromium@1.5.286:
+ resolution: {integrity: sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==}
+
+ elkjs@0.9.3:
+ resolution: {integrity: sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==}
emoji-regex@10.6.0:
resolution: {integrity: sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==}
@@ -2402,14 +2311,22 @@ packages:
resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==}
engines: {node: '>= 4'}
- enhanced-resolve@5.18.4:
- resolution: {integrity: sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==}
+ enhanced-resolve@5.19.0:
+ resolution: {integrity: sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==}
engines: {node: '>=10.13.0'}
entities@4.5.0:
resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==}
engines: {node: '>=0.12'}
+ entities@6.0.1:
+ resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==}
+ engines: {node: '>=0.12'}
+
+ entities@7.0.1:
+ resolution: {integrity: sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==}
+ engines: {node: '>=0.12'}
+
env-paths@2.2.1:
resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==}
engines: {node: '>=6'}
@@ -2428,18 +2345,13 @@ packages:
es-module-lexer@2.0.0:
resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==}
- esbuild-loader@4.4.0:
- resolution: {integrity: sha512-4J+hXTpTtEdzUNLoY8ReqDNJx2NoldfiljRCiKbeYUuZmVaiJeDqFgyAzz8uOopaekwRoCcqBFyEroGQLFVZ1g==}
+ esbuild-loader@4.4.2:
+ resolution: {integrity: sha512-8LdoT9sC7fzfvhxhsIAiWhzLJr9yT3ggmckXxsgvM07wgrRxhuT98XhLn3E7VczU5W5AFsPKv9DdWcZIubbWkQ==}
peerDependencies:
webpack: ^4.40.0 || ^5.0.0
- esbuild@0.25.12:
- resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==}
- engines: {node: '>=18'}
- hasBin: true
-
- esbuild@0.27.2:
- resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==}
+ esbuild@0.27.3:
+ resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==}
engines: {node: '>=18'}
hasBin: true
@@ -2579,14 +2491,14 @@ packages:
resolution: {integrity: sha512-brcKcxGnISN2CcVhXJ/kEQlNa0MEfGRtwKtWA16SkqXHKitaKIMrfemJKLKX1YqDU5C/5JY3PvZXd5jEW04e0Q==}
engines: {node: '>=5.0.0'}
- eslint-plugin-playwright@2.4.0:
- resolution: {integrity: sha512-MWNXfXlLfwXAjj4Z80PvCCFCXgCYy5OCHan57Z/beGrjkJ3maG1GanuGX8Ck6T6fagplBx2ZdkifxSfByftaTQ==}
+ eslint-plugin-playwright@2.5.1:
+ resolution: {integrity: sha512-q7oqVQTTfa3VXJQ8E+ln0QttPGrs/XmSO1FjOMzQYBMYF3btih4FIrhEYh34JF184GYDmq3lJ/n7CMa49OHBvA==}
engines: {node: '>=16.9.0'}
peerDependencies:
eslint: '>=8.40.0'
- eslint-plugin-prettier@5.5.4:
- resolution: {integrity: sha512-swNtI95SToIz05YINMA6Ox5R057IMAmWZ26GqPxusAp1TZzj+IdY9tXNWWD3vkF/wEqydCONcwjTFpxybBqZsg==}
+ eslint-plugin-prettier@5.5.5:
+ resolution: {integrity: sha512-hscXkbqUZ2sPithAuLm5MXL+Wph+U7wHngPBv9OMWwlP8iaflyxpjTYZkmdgB4/vPIhemRlBEoLrH7UC1n7aUw==}
engines: {node: ^14.18.0 || >=16.0.0}
peerDependencies:
'@types/eslint': '>=8.0.0'
@@ -2599,19 +2511,19 @@ packages:
eslint-config-prettier:
optional: true
- eslint-plugin-regexp@2.10.0:
- resolution: {integrity: sha512-ovzQT8ESVn5oOe5a7gIDPD5v9bCSjIFJu57sVPDqgPRXicQzOnYfFN21WoQBQF18vrhT5o7UMKFwJQVVjyJ0ng==}
- engines: {node: ^18 || >=20}
+ eslint-plugin-regexp@3.0.0:
+ resolution: {integrity: sha512-iW7hgAV8NOG6E2dz+VeKpq67YLQ9jaajOKYpoOSic2/q8y9BMdXBKkSR9gcMtbqEhNQzdW41E3wWzvhp8ExYwQ==}
+ engines: {node: ^20.19.0 || ^22.13.0 || >=24}
peerDependencies:
- eslint: '>=8.44.0'
+ eslint: '>=9.38.0'
- eslint-plugin-sonarjs@3.0.5:
- resolution: {integrity: sha512-dI62Ff3zMezUToi161hs2i1HX1ie8Ia2hO0jtNBfdgRBicAG4ydy2WPt0rMTrAe3ZrlqhpAO3w1jcQEdneYoFA==}
+ eslint-plugin-sonarjs@3.0.7:
+ resolution: {integrity: sha512-62jB20krIPvcwBLAyG3VVKa2ce2j2lL1yCb8Y0ylMRR/dLvCCTiQx8gQbXb+G81k1alPZ2/I3muZinqWQdBbzw==}
peerDependencies:
eslint: ^8.0.0 || ^9.0.0
- eslint-plugin-unicorn@62.0.0:
- resolution: {integrity: sha512-HIlIkGLkvf29YEiS/ImuDZQbP12gWyx5i3C6XrRxMvVdqMroCI9qoVYCoIl17ChN+U89pn9sVwLxhIWj5nEc7g==}
+ eslint-plugin-unicorn@63.0.0:
+ resolution: {integrity: sha512-Iqecl9118uQEXYh7adylgEmGfkn5es3/mlQTLLkd4pXkIk9CTGrAbeUux+YljSa2ohXCBmQQ0+Ej1kZaFgcfkA==}
engines: {node: ^20.10.0 || >=21.0.0}
peerDependencies:
eslint: '>=9.38.0'
@@ -2623,13 +2535,13 @@ packages:
eslint: '>=5.0.0'
vue-eslint-parser: '>=7.1.0'
- eslint-plugin-vue@10.6.2:
- resolution: {integrity: sha512-nA5yUs/B1KmKzvC42fyD0+l9Yd+LtEpVhWRbXuDj0e+ZURcTtyRbMDWUeJmTAh2wC6jC83raS63anNM2YT3NPw==}
+ eslint-plugin-vue@10.8.0:
+ resolution: {integrity: sha512-f1J/tcbnrpgC8suPN5AtdJ5MQjuXbSU9pGRSSYAuF3SHoiYCOdEX6O22pLaRyLHXvDcOe+O5ENgc1owQ587agA==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
'@stylistic/eslint-plugin': ^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0
'@typescript-eslint/parser': ^7.0.0 || ^8.0.0
- eslint: ^8.57.0 || ^9.0.0
+ eslint: ^8.57.0 || ^9.0.0 || ^10.0.0
vue-eslint-parser: ^10.0.0
peerDependenciesMeta:
'@stylistic/eslint-plugin':
@@ -2637,8 +2549,8 @@ packages:
'@typescript-eslint/parser':
optional: true
- eslint-plugin-wc@3.0.2:
- resolution: {integrity: sha512-siwTrxPTw6GU2JmP3faInw8nhi0ZCnKsiSRM3j7EAkZmBTGYdDAToeseLYsvPrc5Urp/vPz+g7Ewh7XcICLxww==}
+ eslint-plugin-wc@3.1.0:
+ resolution: {integrity: sha512-spvXHD2/GTTgYXxFB3xlMThnXGUeNJaiCwWuPGzjDOLXnVGLcQpDt0fyiN6yiLoaLs/yhsj+7G1FpBZKeigCSA==}
peerDependencies:
eslint: '>=8.40.0'
@@ -2654,6 +2566,10 @@ packages:
resolution: {integrity: sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
+ eslint-scope@9.1.0:
+ resolution: {integrity: sha512-CkWE42hOJsNj9FJRaoMX9waUFYhqY4jmyLFdAdzZr6VaCg3ynLYx4WnOdkaIifGfH4gsUcBTn4OZbHXkpLD0FQ==}
+ engines: {node: ^20.19.0 || ^22.13.0 || >=24}
+
eslint-visitor-keys@3.4.3:
resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==}
engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
@@ -2662,6 +2578,10 @@ packages:
resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
+ eslint-visitor-keys@5.0.0:
+ resolution: {integrity: sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==}
+ engines: {node: ^20.19.0 || ^22.13.0 || >=24}
+
eslint@9.39.2:
resolution: {integrity: sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
@@ -2676,8 +2596,12 @@ packages:
resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
- esquery@1.6.0:
- resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==}
+ espree@11.1.0:
+ resolution: {integrity: sha512-WFWYhO1fV4iYkqOOvq8FbqIhr2pYfoDY0kCotMkDeNtGpiGGkZ1iov2u8ydjtgM8yF8rzK7oaTbw2NAzbAbehw==}
+ engines: {node: ^20.19.0 || ^22.13.0 || >=24}
+
+ esquery@1.7.0:
+ resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==}
engines: {node: '>=0.10'}
esrecurse@4.3.0:
@@ -2736,8 +2660,8 @@ packages:
resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==}
engines: {node: '>= 4.9.1'}
- fastq@1.19.1:
- resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==}
+ fastq@1.20.1:
+ resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==}
fdir@6.5.0:
resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==}
@@ -2754,8 +2678,8 @@ packages:
fflate@0.8.2:
resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==}
- file-entry-cache@11.1.1:
- resolution: {integrity: sha512-TPVFSDE7q91Dlk1xpFLvFllf8r0HyOMOlnWy7Z2HBku5H3KhIeOGInexrIeg2D64DosVB/JXkrrk6N/7Wriq4A==}
+ file-entry-cache@11.1.2:
+ resolution: {integrity: sha512-N2WFfK12gmrK1c1GXOqiAJ1tc5YE+R53zvQ+t5P8S5XhnmKYVB5eZEiLNZKDSmoG8wqqbF9EXYBBW/nef19log==}
file-entry-cache@8.0.0:
resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==}
@@ -2781,8 +2705,8 @@ packages:
resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==}
engines: {node: '>=16'}
- flat-cache@6.1.19:
- resolution: {integrity: sha512-l/K33newPTZMTGAnnzaiqSl6NnH7Namh8jBNjrgjprWxGmZUuxx/sJNIRaijOh3n7q7ESbhNZC+pvVZMFdeU4A==}
+ flat-cache@6.1.20:
+ resolution: {integrity: sha512-AhHYqwvN62NVLp4lObVXGVluiABTHapoB57EyegZVmazN+hhGhLTn3uZbOofoTw4DSDvVCadzzyChXhOAvy8uQ==}
flat@5.0.2:
resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==}
@@ -2811,8 +2735,8 @@ packages:
resolution: {integrity: sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==}
engines: {node: '>=18'}
- get-tsconfig@4.13.0:
- resolution: {integrity: sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==}
+ get-tsconfig@4.13.6:
+ resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
@@ -2827,7 +2751,7 @@ packages:
glob@7.2.3:
resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==}
- deprecated: Glob versions prior to v9 are no longer supported
+ deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
global-modules@2.0.0:
resolution: {integrity: sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==}
@@ -2845,9 +2769,13 @@ packages:
resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==}
engines: {node: '>=18'}
- globby@11.1.0:
- resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==}
- engines: {node: '>=10'}
+ globals@17.3.0:
+ resolution: {integrity: sha512-yMqGUQVVCkD4tqjOJf3TnrvaaHDMYp4VlUSObbkIiuCPe/ofdMBFIAcBbCSRFWOnos6qRiTVStDwqPLUclaxIw==}
+ engines: {node: '>=18'}
+
+ globby@16.1.0:
+ resolution: {integrity: sha512-+A4Hq7m7Ze592k9gZRy4gJ27DrXRNnC1vPjxTt1qQxEY8RxagBkBxivkCwg7FxSTG0iLLEMaUx13oOr0R2/qcQ==}
+ engines: {node: '>=20'}
globjoin@0.1.4:
resolution: {integrity: sha512-xYfnw62CKG8nLkZBfWbhWwDw02CHty86jfPcc2cr3ZfeuK9ysoVPPEUxf21bAD/rWAgk52SuBrLJlefNy8mvFg==}
@@ -2862,27 +2790,31 @@ packages:
resolution: {integrity: sha512-tSQXBXS/MWQOn/RKckawJ61vvsDpCom87JgxiYdGwHdOa0ht0vzUWDlfioofFCRU0L+6NGDt6XzbgoJvZkMeRQ==}
engines: {node: '>=0.8.0'}
- happy-dom@20.0.11:
- resolution: {integrity: sha512-QsCdAUHAmiDeKeaNojb1OHOPF7NjcWPBR7obdu3NwH2a/oyQaLg5d0aaCy/9My6CdPChYF07dvz5chaXBGaD4g==}
+ happy-dom@20.6.1:
+ resolution: {integrity: sha512-+0vhESXXhFwkdjZnJ5DlmJIfUYGgIEEjzIjB+aKJbFuqlvvKyOi+XkI1fYbgYR9QCxG5T08koxsQ6HrQfa5gCQ==}
engines: {node: '>=20.0.0'}
has-flag@4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
+ has-flag@5.0.1:
+ resolution: {integrity: sha512-CsNUt5x9LUdx6hnk/E2SZLsDyvfqANZSUq4+D3D8RzDJ2M+HDTIkF60ibS1vHaK55vzgiZw1bEPFG9yH7l33wA==}
+ engines: {node: '>=12'}
+
hash-sum@2.0.0:
resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==}
- hashery@1.3.0:
- resolution: {integrity: sha512-fWltioiy5zsSAs9ouEnvhsVJeAXRybGCNNv0lvzpzNOSDbULXRy7ivFWwCCv4I5Am6kSo75hmbsCduOoc2/K4w==}
+ hashery@1.4.0:
+ resolution: {integrity: sha512-Wn2i1In6XFxl8Az55kkgnFRiAlIAushzh26PTjL2AKtQcEfXrcLa7Hn5QOWGZEf3LU057P9TwwZjFyxfS1VuvQ==}
engines: {node: '>=20'}
- hookified@1.14.0:
- resolution: {integrity: sha512-pi1ynXIMFx/uIIwpWJ/5CEtOHLGtnUB0WhGeeYT+fKcQ+WCQbm3/rrkAXnpfph++PgepNqPdTC2WTj8A6k6zoQ==}
+ hookified@1.15.1:
+ resolution: {integrity: sha512-MvG/clsADq1GPM2KGo2nyfaWVyn9naPiXrqIe4jYjXNZQt238kWyOGrsyc/DmRAQ+Re6yeo6yX/yoNCG5KAEVg==}
- html-tags@3.3.1:
- resolution: {integrity: sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==}
- engines: {node: '>=8'}
+ html-tags@5.1.0:
+ resolution: {integrity: sha512-n6l5uca7/y5joxZ3LUePhzmBFUJ+U2YWzhMa8XUTecSeSlQiZdF5XAd/Q3/WUl0VsXgUwWi8I7CNIwdI5WN1SQ==}
+ engines: {node: '>=20.10'}
htmlparser2@8.0.2:
resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==}
@@ -2923,6 +2855,9 @@ packages:
engines: {node: '>=8'}
hasBin: true
+ import-meta-resolve@4.2.0:
+ resolution: {integrity: sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==}
+
imurmurhash@0.1.4:
resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
engines: {node: '>=0.8.19'}
@@ -3001,6 +2936,10 @@ packages:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
+ is-path-inside@4.0.0:
+ resolution: {integrity: sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==}
+ engines: {node: '>=12'}
+
is-plain-object@2.0.4:
resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==}
engines: {node: '>=0.10.0'}
@@ -3022,6 +2961,10 @@ packages:
resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==}
engines: {node: '>=0.10.0'}
+ jackspeak@4.2.3:
+ resolution: {integrity: sha512-ykkVRwrYvFm1nb2AJfKKYPr0emF6IiXDYUaFx4Zn9ZuIH7MrzEZ3sD5RlqGXNRpHtvUHJyOnCEFxOlNDtGo7wg==}
+ engines: {node: 20 || >=22}
+
jest-worker@27.5.1:
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
engines: {node: '>= 10.13.0'}
@@ -3034,8 +2977,8 @@ packages:
resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==}
hasBin: true
- jquery@3.7.1:
- resolution: {integrity: sha512-m4avr8yL8kmFN8psrbFFFmB/If14iN5o9nw/NgnnM+kybDJpRsAynV2BsfpTYrTRysYUdADVD7CkUUizgkpLfg==}
+ jquery@4.0.0:
+ resolution: {integrity: sha512-TXCHVR3Lb6TZdtw1l3RTLf8RBWVGexdxL6AC8/e0xZKEpBflBsjh9/8LXw+dkNFuOyW9B7iB3O1sP7hS0Kiacg==}
js-levenshtein-esm@2.0.0:
resolution: {integrity: sha512-1n4LEPOL4wRXY8rOQcuA7Iuaphe5xCMayvufCzlLAi+hRsnBRDbSS6XPuV58CBVJxj5D9ApFLyjQ7KzFToyHBw==}
@@ -3050,9 +2993,9 @@ packages:
resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==}
hasBin: true
- jsdoc-type-pratt-parser@4.8.0:
- resolution: {integrity: sha512-iZ8Bdb84lWRuGHamRXFyML07r21pcwBrLkHEuHgEY5UbCouBwv7ECknDRKzsQIXMiqpPymqtIf8TC/shYKB5rw==}
- engines: {node: '>=12.0.0'}
+ jsdoc-type-pratt-parser@7.1.1:
+ resolution: {integrity: sha512-/2uqY7x6bsrpi3i9LVU6J89352C0rpMk0as8trXxCtvd4kPk1ke/Eyif6wqfSLvoNJqcDG9Vk4UsXgygzCt2xA==}
+ engines: {node: '>=20.0.0'}
jsesc@3.1.0:
resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==}
@@ -3101,15 +3044,15 @@ packages:
just-extend@5.1.1:
resolution: {integrity: sha512-b+z6yF1d4EOyDgylzQo5IminlUmzSeqR1hs/bzjBNjuGras4FXq/6TrzjxfN0j+TmI0ltJzTNlqXUMCniciwKQ==}
- katex@0.16.27:
- resolution: {integrity: sha512-aeQoDkuRWSqQN6nSvVCEFvfXdqo1OQiCmmW1kc9xSdjutPv7BGO7pqY9sQRJpMOGrEdfDgF2TfRXe5eUAD2Waw==}
+ katex@0.16.28:
+ resolution: {integrity: sha512-YHzO7721WbmAL6Ov1uzN/l5mY5WWWhJBSW+jq4tkfZfsxmo1hu6frS0EOswvjBUnWE6NtjEs48SFn5CQESRLZg==}
hasBin: true
keyv@4.5.4:
resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==}
- keyv@5.5.5:
- resolution: {integrity: sha512-FA5LmZVF1VziNc0bIdCSA1IoSVnDCqE8HJIZZv2/W8YmoAM50+tnUgJR/gQZwEeIMleuIOnRnHA/UaZRNeV4iQ==}
+ keyv@5.6.0:
+ resolution: {integrity: sha512-CYDD3SOtsHtyXeEORYRx2qBtpDJFjRTGXUtmNEMGyzYOKj1TE3tycdlho7kA1Ufx9OYWZzg52QFBGALTirzDSw==}
khroma@2.1.0:
resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==}
@@ -3125,9 +3068,9 @@ packages:
known-css-properties@0.37.0:
resolution: {integrity: sha512-JCDrsP4Z1Sb9JwG0aJ8Eo2r7k4Ou5MwmThS/6lcIe1ICyb7UBJKGRIUUdqc2ASdE/42lgz6zFUnzAIhtXnBVrQ==}
- langium@3.3.1:
- resolution: {integrity: sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==}
- engines: {node: '>=16.0.0'}
+ langium@4.2.1:
+ resolution: {integrity: sha512-zu9QWmjpzJcomzdJQAHgDVhLGq5bLosVak1KVa40NzQHXfqr4eAHupvnPOVXEoLkg6Ocefvf/93d//SB7du4YQ==}
+ engines: {node: '>=20.10.0', npm: '>=10.2.3'}
language-subtag-registry@0.3.23:
resolution: {integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==}
@@ -3172,11 +3115,8 @@ packages:
resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
engines: {node: '>=10'}
- lodash-es@4.17.21:
- resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==}
-
- lodash-es@4.17.22:
- resolution: {integrity: sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==}
+ lodash-es@4.17.23:
+ resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==}
lodash.camelcase@4.3.0:
resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==}
@@ -3199,8 +3139,8 @@ packages:
lodash.upperfirst@4.3.1:
resolution: {integrity: sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==}
- lodash@4.17.21:
- resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
+ lodash@4.17.23:
+ resolution: {integrity: sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==}
magic-string@0.30.21:
resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
@@ -3209,8 +3149,8 @@ packages:
resolution: {integrity: sha512-K6K2NgKnTXimT3779/4KxSvobxOtMmx1LBZ3NwRxT/MDIR3Br/fQ4Q+WCX5QxjyUR8zg5+RV9Tbf2c5pAWTD2A==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
- markdown-it@14.1.0:
- resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==}
+ markdown-it@14.1.1:
+ resolution: {integrity: sha512-BuU2qnTti9YKgK5N+IeMubp14ZUKUUw7yeJbkjtosvHiP0AZ5c8IAgEMk79D0eC8F23r4Ac/q8cAIFdm2FtyoA==}
hasBin: true
markdownlint-cli@0.47.0:
@@ -3237,12 +3177,12 @@ packages:
engines: {node: '>= 12'}
hasBin: true
- material-icon-theme@5.29.0:
- resolution: {integrity: sha512-Kr6D+NgLCWYJjsTjGuIOoKUFG/uomUpLREhyV/9g4qWJMNfm7b1BYYMglRIdQg1IiY7WKqyTws8Ufsad6oFLUA==}
+ material-icon-theme@5.31.0:
+ resolution: {integrity: sha512-PPeGSRa+8stQEKvCr2Xym9KIqf2SPwl1chc7cxbK+aY6ORpwOcowtARQEXstZBjQwXTE5GnfE0zg0MFFy+XPzA==}
engines: {vscode: ^1.55.0}
- mathml-tag-names@2.1.3:
- resolution: {integrity: sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==}
+ mathml-tag-names@4.0.0:
+ resolution: {integrity: sha512-aa6AU2Pcx0VP/XWnh8IGL0SYSgQHDT6Ucror2j2mXeFAlN3ahaNs8EZtG1YiticMkSLj3Gt6VPFfZogt7G5iFQ==}
mdn-data@2.0.28:
resolution: {integrity: sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==}
@@ -3253,9 +3193,9 @@ packages:
mdurl@2.0.0:
resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==}
- meow@13.2.0:
- resolution: {integrity: sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==}
- engines: {node: '>=18'}
+ meow@14.0.0:
+ resolution: {integrity: sha512-JhC3R1f6dbspVtmF3vKjAWz1EVIvwFrGGPLSdU6rK79xBwHWTuHoLnRX/t1/zHS1Ch1Y2UtIrih7DAHuH9JFJA==}
+ engines: {node: '>=20'}
merge-stream@2.0.0:
resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==}
@@ -3264,8 +3204,8 @@ packages:
resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
engines: {node: '>= 8'}
- mermaid@11.12.2:
- resolution: {integrity: sha512-n34QPDPEKmaeCG4WDMGy0OT6PSyxKCfy2pJgShP+Qow2KLrvWjclwbc3yXfSIf4BanqWEhQEpngWwNp/XhZt6w==}
+ mermaid@11.12.3:
+ resolution: {integrity: sha512-wN5ZSgJQIC+CHJut9xaKWsknLxaFBwCPwPkGTSUYrTiHORWvpT8RxGk849HPnpUAQ+/9BPRqYb80jTpearrHzQ==}
micromark-core-commonmark@2.0.3:
resolution: {integrity: sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==}
@@ -3354,14 +3294,22 @@ packages:
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
engines: {node: '>= 0.6'}
- mini-css-extract-plugin@2.9.4:
- resolution: {integrity: sha512-ZWYT7ln73Hptxqxk2DxPU9MmapXRhxkJD6tkSR04dnQxm8BGu2hzgKLugK5yySD97u/8yy7Ma7E76k9ZdvtjkQ==}
+ mini-css-extract-plugin@2.10.0:
+ resolution: {integrity: sha512-540P2c5dYnJlyJxTaSloliZexv8rji6rY8FhQN+WF/82iHQfA23j/xtJx97L+mXOML27EqksSek/g4eK7jaL3g==}
engines: {node: '>= 12.13.0'}
peerDependencies:
webpack: ^5.0.0
- minimatch@10.1.1:
- resolution: {integrity: sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==}
+ minimatch@10.1.2:
+ resolution: {integrity: sha512-fu656aJ0n2kcXwsnwnv9g24tkU5uSmOlTjd6WyyaKm2Z+h1qmY6bAjrcaIxF/BslFqbZ8UBtbJi7KgQOZD2PTw==}
+ engines: {node: 20 || >=22}
+
+ minimatch@10.1.3:
+ resolution: {integrity: sha512-IF6URNyBX7Z6XfvjpaNy5meRxPZiIf2OqtOoSLs+hLJ9pJAScnM1RjrFcbCaD85y42KcI+oZmKjFIJKYDFjQfg==}
+ engines: {node: 20 || >=22}
+
+ minimatch@10.2.1:
+ resolution: {integrity: sha512-MClCe8IL5nRRmawL6ib/eT4oLyeKMGCghibcDWK+J0hh0Q8kqSdia6BvbRMVk6mPa6WqUa5uR2oxt6C5jd533A==}
engines: {node: 20 || >=22}
minimatch@3.1.2:
@@ -3464,8 +3412,8 @@ packages:
once@1.4.0:
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
- online-3d-viewer@0.17.0:
- resolution: {integrity: sha512-CTymQf5hozDHCqgypWYTmwq6+moVyWSDZdCkSovGklipP1oQy7YCEupLvkmJjex27Sxeeyq2Q9GH3+cxKUwpvg==}
+ online-3d-viewer@0.18.0:
+ resolution: {integrity: sha512-y7ZlV/zkakNUyjqcXz6XecA7vXgLEUnaAey9tyx8o6/wcdV64RfjXAQOjGXGY2JOZoDi4Cg1ic9icSWMWAvRQA==}
optionator@0.9.4:
resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
@@ -3526,18 +3474,14 @@ packages:
path-parse@1.0.7:
resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
- path-type@4.0.0:
- resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==}
- engines: {node: '>=8'}
-
pathe@2.0.3:
resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==}
pdfobject@2.3.1:
resolution: {integrity: sha512-vluuGiSDmMGpOvWFGiUY4trNB8aGKLDVxIXuuGHjX0kK3bMxCANUVtLivctE7uejLBScWCnbVarKatFVvdwXaQ==}
- perfect-debounce@2.0.0:
- resolution: {integrity: sha512-fkEH/OBiKrqqI/yIgjR92lMfs2K8105zt/VT6+7eTjNwisrsh47CeIED9z58zI7DfKdH3uHAn25ziRZn3kgAow==}
+ perfect-debounce@2.1.0:
+ resolution: {integrity: sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==}
picocolors@1.1.1:
resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
@@ -3565,13 +3509,13 @@ packages:
pkg-types@1.3.1:
resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==}
- playwright-core@1.57.0:
- resolution: {integrity: sha512-agTcKlMw/mjBWOnD6kFZttAAGHgi/Nw0CZ2o6JqWSbMlI219lAFLZZCyqByTsvVAJq5XA5H8cA6PrvBRpBWEuQ==}
+ playwright-core@1.58.2:
+ resolution: {integrity: sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==}
engines: {node: '>=18'}
hasBin: true
- playwright@1.57.0:
- resolution: {integrity: sha512-ilYQj1s8sr2ppEJ2YVadYBN0Mb3mdo9J0wQ+UuDhzYqURwSoW4n1Xs5vs7ORwgDGmyEh33tRMeS8KhdkMoLXQw==}
+ playwright@1.58.2:
+ resolution: {integrity: sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==}
engines: {node: '>=18'}
hasBin: true
@@ -3585,8 +3529,8 @@ packages:
points-on-path@0.2.1:
resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==}
- postcss-html@1.8.0:
- resolution: {integrity: sha512-5mMeb1TgLWoRKxZ0Xh9RZDfwUUIqRrcxO2uXO+Ezl1N5lqpCiSU5Gk6+1kZediBfBHFtPCdopr2UZ2SgUsKcgQ==}
+ postcss-html@1.8.1:
+ resolution: {integrity: sha512-OLF6P7qctfAWayOhLpcVnTGqVeJzu2W3WpIYelfz2+JV5oGxfkcEvweN9U4XpeqE0P98dcD9ssusGwlF0TK0uQ==}
engines: {node: ^12 || >=14}
postcss-import@15.1.0:
@@ -3613,11 +3557,11 @@ packages:
ts-node:
optional: true
- postcss-loader@8.2.0:
- resolution: {integrity: sha512-tHX+RkpsXVcc7st4dSdDGliI+r4aAQDuv+v3vFYHixb6YgjreG5AG4SEB0kDK8u2s6htqEEpKlkhSBUTvWKYnA==}
+ postcss-loader@8.2.1:
+ resolution: {integrity: sha512-k98jtRzthjj3f76MYTs9JTpRqV1RaaMhEU0Lpw9OTmQZQdppg4B30VZ74BojuBHt3F4KyubHJoXCMUeM8Bqeow==}
engines: {node: '>= 18.12.0'}
peerDependencies:
- '@rspack/core': 0.x || 1.x
+ '@rspack/core': 0.x || ^1.0.0 || ^2.0.0-0
postcss: ^7.0.0 || ^8.0.1
webpack: ^5.0.0
peerDependenciesMeta:
@@ -3656,9 +3600,6 @@ packages:
peerDependencies:
postcss: ^8.2.14
- postcss-resolve-nested-selector@0.1.6:
- resolution: {integrity: sha512-0sglIs9Wmkzbr8lQwEyIzlDOOC9bGmfVKcJTaxv3vMmd3uo4o4DerC3En0bnmgceeql9BfC8hRkp7cg0fjdVqw==}
-
postcss-safe-parser@6.0.0:
resolution: {integrity: sha512-FARHN8pwH+WiS2OPCxJI8FuRJpTVnn6ZNFiqAM2aeW2LwTHWWmWgIyKC6cUo0L8aeKiF/14MNvnpls6R2PBeMQ==}
engines: {node: '>=12.0'}
@@ -3700,12 +3641,12 @@ packages:
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
engines: {node: '>= 0.8.0'}
- prettier-linter-helpers@1.0.0:
- resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==}
+ prettier-linter-helpers@1.0.1:
+ resolution: {integrity: sha512-SxToR7P8Y2lWmv/kTzVLC1t/GDI2WGjMwNhLLE9qtH8Q13C+aEmuRlzDst4Up4s0Wc8sF2M+J57iB3cMLqftfg==}
engines: {node: '>=6.0.0'}
- prettier@3.7.4:
- resolution: {integrity: sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==}
+ prettier@3.8.1:
+ resolution: {integrity: sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==}
engines: {node: '>=14'}
hasBin: true
@@ -3717,8 +3658,8 @@ packages:
resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
engines: {node: '>=6'}
- qified@0.5.3:
- resolution: {integrity: sha512-kXuQdQTB6oN3KhI6V4acnBSZx8D2I4xzZvn9+wFLLFCoBNQY/sFnCW6c43OL7pOQ2HvGV4lnWIXNmgfp7cTWhQ==}
+ qified@0.6.0:
+ resolution: {integrity: sha512-tsSGN1x3h569ZSU1u6diwhltLyfUWDp3YbFHedapTmpBl0B3P6U3+Qptg7xu+v+1io1EwhdPyyRHYbEw0KN2FA==}
engines: {node: '>=20'}
queue-microtask@1.2.3:
@@ -3793,8 +3734,8 @@ packages:
robust-predicates@3.0.2:
resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==}
- rollup@4.53.5:
- resolution: {integrity: sha512-iTNAbFSlRpcHeeWu73ywU/8KuU/LZmNCSxp6fjQkJBD3ivUb8tpDrXhIxEzA05HlYMEwmtaUnb3RP+YNv162OQ==}
+ rollup@4.57.1:
+ resolution: {integrity: sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==}
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
hasBin: true
@@ -3814,8 +3755,9 @@ packages:
sax@1.2.4:
resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==}
- sax@1.4.3:
- resolution: {integrity: sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==}
+ sax@1.4.4:
+ resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==}
+ engines: {node: '>=11.0.0'}
schema-utils@4.3.3:
resolution: {integrity: sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==}
@@ -3829,27 +3771,22 @@ packages:
resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
hasBin: true
- semver@7.7.2:
- resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==}
- engines: {node: '>=10'}
- hasBin: true
-
- semver@7.7.3:
- resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
+ semver@7.7.4:
+ resolution: {integrity: sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==}
engines: {node: '>=10'}
hasBin: true
serialize-javascript@6.0.2:
resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==}
- seroval-plugins@1.3.3:
- resolution: {integrity: sha512-16OL3NnUBw8JG1jBLUoZJsLnQq0n5Ua6aHalhJK4fMQkz1lqR7Osz1sA30trBtd9VUDc2NgkuRCn8+/pBwqZ+w==}
+ seroval-plugins@1.5.0:
+ resolution: {integrity: sha512-EAHqADIQondwRZIdeW2I636zgsODzoBDwb3PT/+7TLDWyw1Dy/Xv7iGUIEXXav7usHDE9HVhOU61irI3EnyyHA==}
engines: {node: '>=10'}
peerDependencies:
seroval: ^1.0
- seroval@1.3.2:
- resolution: {integrity: sha512-RbcPH1n5cfwKrru7v7+zrZvjLurgHhGyso3HTyGtRivGWgYjbOmGuivCQaORNELjNONoK35nj28EoWul9sb1zQ==}
+ seroval@1.5.0:
+ resolution: {integrity: sha512-OE4cvmJ1uSPrKorFIH9/w/Qwuvi/IMcGbv5RKgcJ/zjA/IohDLU6SVaxFN9FwajbP7nsX0dQqMDes1whk3y+yw==}
engines: {node: '>=10'}
shallow-clone@3.0.1:
@@ -3871,9 +3808,9 @@ packages:
resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==}
engines: {node: '>=14'}
- slash@3.0.0:
- resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
- engines: {node: '>=8'}
+ slash@5.1.0:
+ resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
+ engines: {node: '>=14.16'}
slice-ansi@4.0.0:
resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==}
@@ -3883,8 +3820,8 @@ packages:
resolution: {integrity: sha512-QlaZEqcAH3/RtNyet1IPIYPsEWAaYyXXv1Krsi+1L/QHppjX4Ifm8MQsBISz9vE8cHicIq3clogsheili5vhaQ==}
engines: {node: '>= 18'}
- solid-js@1.9.10:
- resolution: {integrity: sha512-Coz956cos/EPDlhs6+jsdTxKuJDPT7B5SVIWgABwROyxjY7Xbr8wkzD68Et+NxnV7DLJ3nJdAC2r9InuV/4Jew==}
+ solid-js@1.9.11:
+ resolution: {integrity: sha512-WEJtcc5mkh/BnHA6Yrg4whlF8g6QwpmXXRg4P2ztPmcKeHHlH4+djYecBLhSpecZY2RRECXYUwIc/C2r3yzQ4Q==}
solid-transition-group@0.2.3:
resolution: {integrity: sha512-iB72c9N5Kz9ykRqIXl0lQohOau4t0dhel9kjwFvx81UZJbVwaChMuBuyhiZmK24b8aKEK0w3uFM96ZxzcyZGdg==}
@@ -3892,8 +3829,8 @@ packages:
peerDependencies:
solid-js: ^1.6.12
- sortablejs@1.15.6:
- resolution: {integrity: sha512-aNfiuwMEpfBM/CN6LY0ibyhxPfPbyFeBTYJKCvzkJ2GkUpazIt3H+QIPAMHwqQ7tMKaHz1Qj+rJJCqljnf4p3A==}
+ sortablejs@1.15.7:
+ resolution: {integrity: sha512-Kk8wLQPlS+yi1ZEf48a4+fzHa4yxjC30M/Sr2AnQu+f/MPwvvX9XjZ6OWejiz8crBsLwSq8GHqaxaET7u6ux0A==}
source-list-map@2.0.1:
resolution: {integrity: sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==}
@@ -3938,8 +3875,8 @@ packages:
spdx-satisfies@5.0.1:
resolution: {integrity: sha512-Nwor6W6gzFp8XX4neaKQ7ChV4wmpSh2sSDemMFSzHxpTw460jxFYeOn+jq4ybnSSw/5sc3pjka9MQPouksQNpw==}
- spectral-cli-bundle@1.0.3:
- resolution: {integrity: sha512-LUsOK0XKl/C2IhlDwBlXz+7qU2rnGbSlu8nqSFB/K+TbPjjmqoCYjG82YFJCmEHurbthvTJ8WRP735vl+3rY2Q==}
+ spectral-cli-bundle@1.0.7:
+ resolution: {integrity: sha512-vIUC0nwv9tYxWV1xHdR3CTVDOEEtLKaDCcQpARZgO0Db7VmSpSWJ4xrnVPNSmO59hBtGwW2CVzHf0OimJBaKAA==}
engines: {node: '>=20'}
hasBin: true
@@ -3965,6 +3902,10 @@ packages:
resolution: {integrity: sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==}
engines: {node: '>=20'}
+ string-width@8.1.1:
+ resolution: {integrity: sha512-KpqHIdDL9KwYk22wEOg/VIqYbrnLeSApsKT/bSj6Ez7pn3CftUiLAv2Lccpq1ALcpLV9UX1Ppn92npZWu2w/aw==}
+ engines: {node: '>=20'}
+
strip-ansi@6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
@@ -3988,17 +3929,17 @@ packages:
style-search@0.1.0:
resolution: {integrity: sha512-Dj1Okke1C3uKKwQcetra4jSuk0DqbzbYtXipzFlFMZtowbF1x7BKJwB9AayVMyFARvU8EDrZdcax4At/452cAg==}
- stylelint-config-recommended@17.0.0:
- resolution: {integrity: sha512-WaMSdEiPfZTSFVoYmJbxorJfA610O0tlYuU2aEwY33UQhSPgFbClrVJYWvy3jGJx+XW37O+LyNLiZOEXhKhJmA==}
- engines: {node: '>=18.12.0'}
+ stylelint-config-recommended@18.0.0:
+ resolution: {integrity: sha512-mxgT2XY6YZ3HWWe3Di8umG6aBmWmHTblTgu/f10rqFXnyWxjKWwNdjSWkgkwCtxIKnqjSJzvFmPT5yabVIRxZg==}
+ engines: {node: '>=20.19.0'}
peerDependencies:
- stylelint: ^16.23.0
+ stylelint: ^17.0.0
- stylelint-declaration-block-no-ignored-properties@2.8.0:
- resolution: {integrity: sha512-Ws8Cav7Y+SPN0JsV407LrnNXWOrqGjxShf+37GBtnU/C58Syve9c0+I/xpLcFOosST3ternykn3Lp77f3ITnFw==}
- engines: {node: '>=6'}
+ stylelint-declaration-block-no-ignored-properties@3.0.0:
+ resolution: {integrity: sha512-3ml4NgSW6nkHQrk+/ounU7Qljfb7e7FayHzU7Mry6rF9X28RXyPLD2bNn4QVOO7t98d5EGCCVkNbHCZSx+bNUQ==}
+ engines: {node: '>=20.19.0'}
peerDependencies:
- stylelint: ^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0 || ^11.0.0 || ^12.0.0 || ^13.0.0 || ^14.0.0 || ^15.0.0 || ^16.0.0
+ stylelint: ^17.0.0
stylelint-declaration-strict-value@1.10.11:
resolution: {integrity: sha512-oVQvhZlFZAiDz9r2BPFZLtTGm1A2JVhdKObKAJoTjFfR4F/NpApC4bMBTxf4sZS76Na3njYKVOaAaKSZ4+FU+g==}
@@ -4006,15 +3947,15 @@ packages:
peerDependencies:
stylelint: '>=7 <=16'
- stylelint-value-no-unknown-custom-properties@6.0.1:
- resolution: {integrity: sha512-N60PTdaTknB35j6D4FhW0GL2LlBRV++bRpXMMldWMQZ240yFQaoltzlLY4lXXs7Z0J5mNUYZQ/gjyVtU2DhCMA==}
+ stylelint-value-no-unknown-custom-properties@6.1.1:
+ resolution: {integrity: sha512-eQ1zidKD5t9zMEaskjGUY4W47lH76qMlmsDSmCAPEwtaGzB4Ls7ORTfysC1D6hamp2zFC+vN1vpQ+GFz3Tw3lw==}
engines: {node: '>=18.12.0'}
peerDependencies:
stylelint: '>=16'
- stylelint@16.26.1:
- resolution: {integrity: sha512-v20V59/crfc8sVTAtge0mdafI3AdnzQ2KsWe6v523L4OA1bJO02S7MO2oyXDCS6iWb9ckIPnqAFVItqSBQr7jw==}
- engines: {node: '>=18.12.0'}
+ stylelint@17.3.0:
+ resolution: {integrity: sha512-1POV91lcEMhj6SLVaOeA0KlS9yattS+qq+cyWqP/nYzWco7K5jznpGH1ExngvPlTM9QF1Kjd2bmuzJu9TH2OcA==}
+ engines: {node: '>=20.19.0'}
hasBin: true
stylis@4.3.6:
@@ -4032,6 +3973,10 @@ packages:
superstruct@0.10.13:
resolution: {integrity: sha512-W4SitSZ9MOyMPbHreoZVEneSZyPEeNGbdfJo/7FkJyRs/M3wQRFzq+t3S/NBwlrFSWdx1ONLjLb9pB+UKe4IqQ==}
+ supports-color@10.2.2:
+ resolution: {integrity: sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==}
+ engines: {node: '>=18'}
+
supports-color@7.2.0:
resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
engines: {node: '>=8'}
@@ -4040,9 +3985,9 @@ packages:
resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==}
engines: {node: '>=10'}
- supports-hyperlinks@3.2.0:
- resolution: {integrity: sha512-zFObLMyZeEwzAoKCyu1B91U79K2t7ApXuQfo8OuxwXLDgcKxuwM+YvcbIhm6QWqz7mHUH1TVytR1PwVVjEuMig==}
- engines: {node: '>=14.18'}
+ supports-hyperlinks@4.4.0:
+ resolution: {integrity: sha512-UKbpT93hN5Nr9go5UY7bopIB9YQlMz9nm/ct4IXt/irb5YRkn9WaqrOBJGZ5Pwvsd5FQzSVeYlGdXoCAPQZrPg==}
+ engines: {node: '>=20'}
supports-preserve-symlinks-flag@1.0.0:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
@@ -4062,15 +4007,15 @@ packages:
svgson@5.3.1:
resolution: {integrity: sha512-qdPgvUNWb40gWktBJnbJRelWcPzkLed/ShhnRsjbayXz8OtdPOzbil9jtiZdrYvSDumAz/VNQr6JaNfPx/gvPA==}
- swagger-ui-dist@5.31.0:
- resolution: {integrity: sha512-zSUTIck02fSga6rc0RZP3b7J7wgHXwLea8ZjgLA3Vgnb8QeOl3Wou2/j5QkzSGeoz6HusP/coYuJl33aQxQZpg==}
+ swagger-ui-dist@5.31.1:
+ resolution: {integrity: sha512-XdgQ8wkRGj1P0H0Vvo0TRMOQNz+8Q8J64/vcPOhxlaFx9eB3PYvHMXeyNrP46PXa9SUs/cg7OW/jm9U34KzUfA==}
sync-fetch@0.4.5:
resolution: {integrity: sha512-esiWJ7ixSKGpd9DJPBTC4ckChqdOjIwJfYhVHkcQ2Gnm41323p1TRmEI+esTQ9ppD+b5opps2OTEGTCGX5kF+g==}
engines: {node: '>=14'}
- synckit@0.11.11:
- resolution: {integrity: sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==}
+ synckit@0.11.12:
+ resolution: {integrity: sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==}
engines: {node: ^14.18.0 || >=16.0.0}
table@6.9.0:
@@ -4102,8 +4047,8 @@ packages:
uglify-js:
optional: true
- terser@5.44.1:
- resolution: {integrity: sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==}
+ terser@5.46.0:
+ resolution: {integrity: sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==}
engines: {node: '>=10'}
hasBin: true
@@ -4124,9 +4069,6 @@ packages:
tinybench@2.9.0:
resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==}
- tinycolor2@1.6.0:
- resolution: {integrity: sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==}
-
tinyexec@1.0.2:
resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==}
engines: {node: '>=18'}
@@ -4155,8 +4097,8 @@ packages:
tributejs@5.1.3:
resolution: {integrity: sha512-B5CXihaVzXw+1UHhNFyAwUTMDk1EfoLP5Tj1VhD9yybZ1I8DZJEv8tZ1l0RJo0t0tk9ZhR8eG5tEsaCvRigmdQ==}
- ts-api-utils@2.1.0:
- resolution: {integrity: sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==}
+ ts-api-utils@2.4.0:
+ resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==}
engines: {node: '>=18.12'}
peerDependencies:
typescript: '>=4.8.4'
@@ -4182,11 +4124,11 @@ packages:
resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==}
engines: {node: '>=16'}
- typescript-eslint@8.50.0:
- resolution: {integrity: sha512-Q1/6yNUmCpH94fbgMUMg2/BSAr/6U7GBk61kZTv1/asghQOWOjTlp9K8mixS5NcJmm2creY+UFfGeW/+OcA64A==}
+ typescript-eslint@8.56.0:
+ resolution: {integrity: sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
- eslint: ^8.57.0 || ^9.0.0
+ eslint: ^8.57.0 || ^9.0.0 || ^10.0.0
typescript: '>=4.8.4 <6.0.0'
typescript@5.9.3:
@@ -4200,18 +4142,19 @@ packages:
uc.micro@2.1.0:
resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==}
- ufo@1.6.1:
- resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==}
+ ufo@1.6.3:
+ resolution: {integrity: sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==}
uint8-to-base64@0.2.1:
resolution: {integrity: sha512-uO/84GaoDUfiAxpa8EksjVLE77A9Kc7ZTziN4zRpq4de9yLaLcZn3jx1/sVjyupsywcVX6RKWbqLe7gUNyzH+Q==}
- undici-types@6.21.0:
- resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
-
undici-types@7.16.0:
resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
+ unicorn-magic@0.4.0:
+ resolution: {integrity: sha512-wH590V9VNgYH9g3lH9wWjTrUoKsjLF6sGLjhR4sH1LWpLmCOH0Zf7PukhDA8BiS7KHe4oPNkcTHqYkj7SOGUOw==}
+ engines: {node: '>=20'}
+
unrs-resolver@1.11.1:
resolution: {integrity: sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==}
@@ -4221,8 +4164,8 @@ packages:
peerDependencies:
browserslist: '>= 4.21.0'
- updates@17.0.7:
- resolution: {integrity: sha512-VyFnSuoXC5qxpq2XVM2BaR0sjTXpDGYj6aTCNu92KvxtnpBVh1nPYJsSXKktgRGx4jsGXFjLhPsXndYpv/o8AA==}
+ updates@17.5.7:
+ resolution: {integrity: sha512-tL/MWd5iQ04J2CP9PYIjKxgM/ZM2J5CCq1M1yY4wUB/LNKU/7Sh0Ss5SnZ/aOzSlzQlWlOVgI/7ABJ/pAUp6dw==}
engines: {node: '>=22'}
hasBin: true
@@ -4239,11 +4182,13 @@ packages:
vanilla-colorful@0.7.2:
resolution: {integrity: sha512-z2YZusTFC6KnLERx1cgoIRX2CjPRP0W75N+3CC6gbvdX5Ch47rZkEMGO2Xnf+IEmi3RiFLxS18gayMA27iU7Kg==}
- vite-string-plugin@1.4.9:
- resolution: {integrity: sha512-mO7PVkMs8+FuTK9ZjBBCRSjabC9cobvUEbN2EjWtGJo6nu35SbW99bYesOh5Ho39ug/KSbT4VwM4GPC26Xk/mQ==}
+ vite-string-plugin@2.0.1:
+ resolution: {integrity: sha512-L5B86yQkYrqH5d966w1vI91B0d+0vmICgB6tqjINvtBIGU9qhFY7izqjytED/ApggFC4QTDWNjfF6nWMqY/fQg==}
+ peerDependencies:
+ vite: '*'
- vite@7.3.0:
- resolution: {integrity: sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==}
+ vite@7.3.1:
+ resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==}
engines: {node: ^20.19.0 || >=22.12.0}
hasBin: true
peerDependencies:
@@ -4282,18 +4227,18 @@ packages:
yaml:
optional: true
- vitest@4.0.16:
- resolution: {integrity: sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==}
+ vitest@4.0.18:
+ resolution: {integrity: sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==}
engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0}
hasBin: true
peerDependencies:
'@edge-runtime/vm': '*'
'@opentelemetry/api': ^1.9.0
'@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0
- '@vitest/browser-playwright': 4.0.16
- '@vitest/browser-preview': 4.0.16
- '@vitest/browser-webdriverio': 4.0.16
- '@vitest/ui': 4.0.16
+ '@vitest/browser-playwright': 4.0.18
+ '@vitest/browser-preview': 4.0.18
+ '@vitest/browser-webdriverio': 4.0.18
+ '@vitest/ui': 4.0.18
happy-dom: '*'
jsdom: '*'
peerDependenciesMeta:
@@ -4333,9 +4278,6 @@ packages:
resolution: {integrity: sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==}
hasBin: true
- vscode-uri@3.0.8:
- resolution: {integrity: sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==}
-
vscode-uri@3.1.0:
resolution: {integrity: sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==}
@@ -4348,11 +4290,11 @@ packages:
chart.js: ^4.1.1
vue: ^3.0.0-0 || ^2.7.0
- vue-eslint-parser@10.2.0:
- resolution: {integrity: sha512-CydUvFOQKD928UzZhTp4pr2vWz1L+H99t7Pkln2QSPdvmURT0MoC4wUccfCnuEaihNsu9aYYyk+bep8rlfkUXw==}
+ vue-eslint-parser@10.4.0:
+ resolution: {integrity: sha512-Vxi9pJdbN3ZnVGLODVtZ7y4Y2kzAAE2Cm0CZ3ZDRvydVYxZ6VrnBhLikBsRS+dpwj4Jv4UCv21PTEwF5rQ9WXg==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
peerDependencies:
- eslint: ^8.57.0 || ^9.0.0
+ eslint: ^8.57.0 || ^9.0.0 || ^10.0.0
vue-loader@17.4.2:
resolution: {integrity: sha512-yTKOA4R/VN4jqjw4y5HrynFL8AK0Z3/Jt7eOJXEitsm0GMRHDBjCfCiuTiLP7OESvsZYo2pATCWhDqxC5ZrM6w==}
@@ -4366,22 +4308,22 @@ packages:
vue:
optional: true
- vue-tsc@3.1.8:
- resolution: {integrity: sha512-deKgwx6exIHeZwF601P1ktZKNF0bepaSN4jBU3AsbldPx9gylUc1JDxYppl82yxgkAgaz0Y0LCLOi+cXe9HMYA==}
+ vue-tsc@3.2.4:
+ resolution: {integrity: sha512-xj3YCvSLNDKt1iF9OcImWHhmYcihVu9p4b9s4PGR/qp6yhW+tZJaypGxHScRyOrdnHvaOeF+YkZOdKwbgGvp5g==}
hasBin: true
peerDependencies:
typescript: '>=5.0.0'
- vue@3.5.25:
- resolution: {integrity: sha512-YLVdgv2K13WJ6n+kD5owehKtEXwdwXuj2TTyJMsO7pSeKw2bfRNZGjhB7YzrpbMYj5b5QsUebHpOqR3R3ziy/g==}
+ vue@3.5.28:
+ resolution: {integrity: sha512-BRdrNfeoccSoIZeIhyPBfvWSLFP4q8J3u8Ju8Ug5vu3LdD+yTM13Sg4sKtljxozbnuMu1NB1X5HBHRYUzFocKg==}
peerDependencies:
typescript: '*'
peerDependenciesMeta:
typescript:
optional: true
- watchpack@2.4.4:
- resolution: {integrity: sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==}
+ watchpack@2.5.1:
+ resolution: {integrity: sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg==}
engines: {node: '>=10.13.0'}
webidl-conversions@3.0.1:
@@ -4408,12 +4350,12 @@ packages:
webpack-sources@1.4.3:
resolution: {integrity: sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==}
- webpack-sources@3.3.3:
- resolution: {integrity: sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==}
+ webpack-sources@3.3.4:
+ resolution: {integrity: sha512-7tP1PdV4vF+lYPnkMR0jMY5/la2ub5Fc/8VQrrU+lXkiM6C4TjVfGw7iKfyhnTQOsD+6Q/iKw0eFciziRgD58Q==}
engines: {node: '>=10.13.0'}
- webpack@5.104.0:
- resolution: {integrity: sha512-5DeICTX8BVgNp6afSPYXAFjskIgWGlygQH58bcozPOXgo2r/6xx39Y1+cULZ3gTxUYQP88jmwLj2anu4Xaq84g==}
+ webpack@5.105.2:
+ resolution: {integrity: sha512-dRXm0a2qcHPUBEzVk8uph0xWSjV/xZxenQQbLwnwP7caQCYpqG1qddwlyEkIDkYn0K8tvmcrZ+bOrzoQ3HxCDw==}
engines: {node: '>=10.13.0'}
hasBin: true
peerDependencies:
@@ -4461,9 +4403,21 @@ packages:
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
- write-file-atomic@5.0.1:
- resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==}
- engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
+ write-file-atomic@7.0.0:
+ resolution: {integrity: sha512-YnlPC6JqnZl6aO4uRc+dx5PHguiR9S6WeoLtpxNT9wIG+BDya7ZNE1q7KOjVgaA73hKhKLpVPgJ5QA9THQ5BRg==}
+ engines: {node: ^20.17.0 || >=22.9.0}
+
+ ws@8.19.0:
+ resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==}
+ engines: {node: '>=10.0.0'}
+ peerDependencies:
+ bufferutil: ^4.0.1
+ utf-8-validate: '>=5.0.2'
+ peerDependenciesMeta:
+ bufferutil:
+ optional: true
+ utf-8-validate:
+ optional: true
xml-lexer@0.2.2:
resolution: {integrity: sha512-G0i98epIwiUEiKmMcavmVdhtymW+pCAohMRgybyIME9ygfVu8QheIi+YoQh3ngiThsT0SQzJT4R0sKDEv8Ou0w==}
@@ -4493,7 +4447,7 @@ snapshots:
package-manager-detector: 1.6.0
tinyexec: 1.0.2
- '@babel/code-frame@7.27.1':
+ '@babel/code-frame@7.29.0':
dependencies:
'@babel/helper-validator-identifier': 7.28.5
js-tokens: 4.0.0
@@ -4503,47 +4457,47 @@ snapshots:
'@babel/helper-validator-identifier@7.28.5': {}
- '@babel/parser@7.28.5':
+ '@babel/parser@7.29.0':
dependencies:
- '@babel/types': 7.28.5
+ '@babel/types': 7.29.0
- '@babel/runtime@7.28.4': {}
+ '@babel/runtime@7.28.6': {}
- '@babel/types@7.28.5':
+ '@babel/types@7.29.0':
dependencies:
'@babel/helper-string-parser': 7.27.1
'@babel/helper-validator-identifier': 7.28.5
- '@braintree/sanitize-url@7.1.1': {}
+ '@braintree/sanitize-url@7.1.2': {}
- '@cacheable/memory@2.0.6':
+ '@cacheable/memory@2.0.7':
dependencies:
- '@cacheable/utils': 2.3.2
- '@keyv/bigmap': 1.3.0(keyv@5.5.5)
- hookified: 1.14.0
- keyv: 5.5.5
+ '@cacheable/utils': 2.3.4
+ '@keyv/bigmap': 1.3.1(keyv@5.6.0)
+ hookified: 1.15.1
+ keyv: 5.6.0
- '@cacheable/utils@2.3.2':
+ '@cacheable/utils@2.3.4':
dependencies:
- hashery: 1.3.0
- keyv: 5.5.5
+ hashery: 1.4.0
+ keyv: 5.6.0
- '@chevrotain/cst-dts-gen@11.0.3':
+ '@chevrotain/cst-dts-gen@11.1.1':
dependencies:
- '@chevrotain/gast': 11.0.3
- '@chevrotain/types': 11.0.3
- lodash-es: 4.17.21
+ '@chevrotain/gast': 11.1.1
+ '@chevrotain/types': 11.1.1
+ lodash-es: 4.17.23
- '@chevrotain/gast@11.0.3':
+ '@chevrotain/gast@11.1.1':
dependencies:
- '@chevrotain/types': 11.0.3
- lodash-es: 4.17.21
+ '@chevrotain/types': 11.1.1
+ lodash-es: 4.17.23
- '@chevrotain/regexp-to-ast@11.0.3': {}
+ '@chevrotain/regexp-to-ast@11.1.1': {}
- '@chevrotain/types@11.0.3': {}
+ '@chevrotain/types@11.1.1': {}
- '@chevrotain/utils@11.0.3': {}
+ '@chevrotain/utils@11.1.1': {}
'@citation-js/core@0.7.21':
dependencies:
@@ -4565,72 +4519,79 @@ snapshots:
'@citation-js/name': 0.4.2
moo: 0.5.2
- '@citation-js/plugin-cff@0.6.1':
+ '@citation-js/plugin-cff@0.6.2':
dependencies:
'@citation-js/date': 0.5.1
- '@citation-js/plugin-yaml': 0.6.1
+ '@citation-js/plugin-yaml': 0.6.2
- '@citation-js/plugin-csl@0.7.21(@citation-js/core@0.7.21)':
+ '@citation-js/plugin-csl@0.7.22(@citation-js/core@0.7.21)':
dependencies:
'@citation-js/core': 0.7.21
'@citation-js/date': 0.5.1
citeproc: 2.4.63
- '@citation-js/plugin-github@0.6.1':
+ '@citation-js/plugin-github@0.6.2':
dependencies:
'@citation-js/date': 0.5.1
'@citation-js/name': 0.4.2
- '@citation-js/plugin-npm@0.6.1':
+ '@citation-js/plugin-npm@0.6.2':
dependencies:
'@citation-js/date': 0.5.1
'@citation-js/name': 0.4.2
- '@citation-js/plugin-software-formats@0.6.1':
+ '@citation-js/plugin-software-formats@0.6.2':
dependencies:
- '@citation-js/plugin-cff': 0.6.1
- '@citation-js/plugin-github': 0.6.1
- '@citation-js/plugin-npm': 0.6.1
- '@citation-js/plugin-yaml': 0.6.1
- '@citation-js/plugin-zenodo': 0.6.1
+ '@citation-js/plugin-cff': 0.6.2
+ '@citation-js/plugin-github': 0.6.2
+ '@citation-js/plugin-npm': 0.6.2
+ '@citation-js/plugin-yaml': 0.6.2
+ '@citation-js/plugin-zenodo': 0.6.2
- '@citation-js/plugin-yaml@0.6.1':
+ '@citation-js/plugin-yaml@0.6.2':
dependencies:
js-yaml: 4.1.1
- '@citation-js/plugin-zenodo@0.6.1':
+ '@citation-js/plugin-zenodo@0.6.2':
dependencies:
'@citation-js/date': 0.5.1
'@citation-js/name': 0.4.2
- '@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4)':
+ '@csstools/css-calc@3.1.1(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)':
dependencies:
- '@csstools/css-tokenizer': 3.0.4
+ '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0)
+ '@csstools/css-tokenizer': 4.0.0
- '@csstools/css-syntax-patches-for-csstree@1.0.21': {}
-
- '@csstools/css-tokenizer@3.0.4': {}
-
- '@csstools/media-query-list-parser@4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)':
+ '@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0)':
dependencies:
- '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4)
- '@csstools/css-tokenizer': 3.0.4
+ '@csstools/css-tokenizer': 4.0.0
- '@csstools/selector-specificity@5.0.0(postcss-selector-parser@7.1.1)':
+ '@csstools/css-syntax-patches-for-csstree@1.0.27': {}
+
+ '@csstools/css-tokenizer@4.0.0': {}
+
+ '@csstools/media-query-list-parser@5.0.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)':
+ dependencies:
+ '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0)
+ '@csstools/css-tokenizer': 4.0.0
+
+ '@csstools/selector-resolve-nested@4.0.0(postcss-selector-parser@7.1.1)':
+ dependencies:
+ postcss-selector-parser: 7.1.1
+
+ '@csstools/selector-specificity@6.0.0(postcss-selector-parser@7.1.1)':
dependencies:
postcss-selector-parser: 7.1.1
'@discoveryjs/json-ext@0.6.3': {}
- '@dual-bundle/import-meta-resolve@4.2.1': {}
-
- '@emnapi/core@1.7.1':
+ '@emnapi/core@1.8.1':
dependencies:
'@emnapi/wasi-threads': 1.1.0
tslib: 2.8.1
optional: true
- '@emnapi/runtime@1.7.1':
+ '@emnapi/runtime@1.8.1':
dependencies:
tslib: 2.8.1
optional: true
@@ -4640,175 +4601,95 @@ snapshots:
tslib: 2.8.1
optional: true
- '@esbuild/aix-ppc64@0.25.12':
+ '@esbuild/aix-ppc64@0.27.3':
optional: true
- '@esbuild/aix-ppc64@0.27.2':
+ '@esbuild/android-arm64@0.27.3':
optional: true
- '@esbuild/android-arm64@0.25.12':
+ '@esbuild/android-arm@0.27.3':
optional: true
- '@esbuild/android-arm64@0.27.2':
+ '@esbuild/android-x64@0.27.3':
optional: true
- '@esbuild/android-arm@0.25.12':
+ '@esbuild/darwin-arm64@0.27.3':
optional: true
- '@esbuild/android-arm@0.27.2':
+ '@esbuild/darwin-x64@0.27.3':
optional: true
- '@esbuild/android-x64@0.25.12':
+ '@esbuild/freebsd-arm64@0.27.3':
optional: true
- '@esbuild/android-x64@0.27.2':
+ '@esbuild/freebsd-x64@0.27.3':
optional: true
- '@esbuild/darwin-arm64@0.25.12':
+ '@esbuild/linux-arm64@0.27.3':
optional: true
- '@esbuild/darwin-arm64@0.27.2':
+ '@esbuild/linux-arm@0.27.3':
optional: true
- '@esbuild/darwin-x64@0.25.12':
+ '@esbuild/linux-ia32@0.27.3':
optional: true
- '@esbuild/darwin-x64@0.27.2':
+ '@esbuild/linux-loong64@0.27.3':
optional: true
- '@esbuild/freebsd-arm64@0.25.12':
+ '@esbuild/linux-mips64el@0.27.3':
optional: true
- '@esbuild/freebsd-arm64@0.27.2':
+ '@esbuild/linux-ppc64@0.27.3':
optional: true
- '@esbuild/freebsd-x64@0.25.12':
+ '@esbuild/linux-riscv64@0.27.3':
optional: true
- '@esbuild/freebsd-x64@0.27.2':
+ '@esbuild/linux-s390x@0.27.3':
optional: true
- '@esbuild/linux-arm64@0.25.12':
+ '@esbuild/linux-x64@0.27.3':
optional: true
- '@esbuild/linux-arm64@0.27.2':
+ '@esbuild/netbsd-arm64@0.27.3':
optional: true
- '@esbuild/linux-arm@0.25.12':
+ '@esbuild/netbsd-x64@0.27.3':
optional: true
- '@esbuild/linux-arm@0.27.2':
+ '@esbuild/openbsd-arm64@0.27.3':
optional: true
- '@esbuild/linux-ia32@0.25.12':
+ '@esbuild/openbsd-x64@0.27.3':
optional: true
- '@esbuild/linux-ia32@0.27.2':
+ '@esbuild/openharmony-arm64@0.27.3':
optional: true
- '@esbuild/linux-loong64@0.25.12':
+ '@esbuild/sunos-x64@0.27.3':
optional: true
- '@esbuild/linux-loong64@0.27.2':
+ '@esbuild/win32-arm64@0.27.3':
optional: true
- '@esbuild/linux-mips64el@0.25.12':
+ '@esbuild/win32-ia32@0.27.3':
optional: true
- '@esbuild/linux-mips64el@0.27.2':
+ '@esbuild/win32-x64@0.27.3':
optional: true
- '@esbuild/linux-ppc64@0.25.12':
- optional: true
-
- '@esbuild/linux-ppc64@0.27.2':
- optional: true
-
- '@esbuild/linux-riscv64@0.25.12':
- optional: true
-
- '@esbuild/linux-riscv64@0.27.2':
- optional: true
-
- '@esbuild/linux-s390x@0.25.12':
- optional: true
-
- '@esbuild/linux-s390x@0.27.2':
- optional: true
-
- '@esbuild/linux-x64@0.25.12':
- optional: true
-
- '@esbuild/linux-x64@0.27.2':
- optional: true
-
- '@esbuild/netbsd-arm64@0.25.12':
- optional: true
-
- '@esbuild/netbsd-arm64@0.27.2':
- optional: true
-
- '@esbuild/netbsd-x64@0.25.12':
- optional: true
-
- '@esbuild/netbsd-x64@0.27.2':
- optional: true
-
- '@esbuild/openbsd-arm64@0.25.12':
- optional: true
-
- '@esbuild/openbsd-arm64@0.27.2':
- optional: true
-
- '@esbuild/openbsd-x64@0.25.12':
- optional: true
-
- '@esbuild/openbsd-x64@0.27.2':
- optional: true
-
- '@esbuild/openharmony-arm64@0.25.12':
- optional: true
-
- '@esbuild/openharmony-arm64@0.27.2':
- optional: true
-
- '@esbuild/sunos-x64@0.25.12':
- optional: true
-
- '@esbuild/sunos-x64@0.27.2':
- optional: true
-
- '@esbuild/win32-arm64@0.25.12':
- optional: true
-
- '@esbuild/win32-arm64@0.27.2':
- optional: true
-
- '@esbuild/win32-ia32@0.25.12':
- optional: true
-
- '@esbuild/win32-ia32@0.27.2':
- optional: true
-
- '@esbuild/win32-x64@0.25.12':
- optional: true
-
- '@esbuild/win32-x64@0.27.2':
- optional: true
-
- '@eslint-community/eslint-plugin-eslint-comments@4.5.0(eslint@9.39.2(jiti@2.6.1))':
+ '@eslint-community/eslint-plugin-eslint-comments@4.6.0(eslint@9.39.2(jiti@2.6.1))':
dependencies:
escape-string-regexp: 4.0.0
eslint: 9.39.2(jiti@2.6.1)
- ignore: 5.3.2
+ ignore: 7.0.5
- '@eslint-community/eslint-utils@4.9.0(eslint@9.39.2(jiti@2.6.1))':
+ '@eslint-community/eslint-utils@4.9.1(eslint@9.39.2(jiti@2.6.1))':
dependencies:
eslint: 9.39.2(jiti@2.6.1)
eslint-visitor-keys: 3.4.3
- '@eslint-community/regexpp@4.12.1': {}
-
'@eslint-community/regexpp@4.12.2': {}
'@eslint/compat@1.4.1(eslint@9.39.2(jiti@2.6.1))':
@@ -4873,7 +4754,7 @@ snapshots:
'@github/relative-time-element@5.0.0': {}
- '@github/text-expander-element@2.9.2':
+ '@github/text-expander-element@2.9.4':
dependencies:
'@github/combobox-nav': 2.3.1
dom-input-range: 2.0.1
@@ -4901,10 +4782,12 @@ snapshots:
'@isaacs/balanced-match@4.0.1': {}
- '@isaacs/brace-expansion@5.0.0':
+ '@isaacs/brace-expansion@5.0.1':
dependencies:
'@isaacs/balanced-match': 4.0.1
+ '@isaacs/cliui@9.0.0': {}
+
'@jridgewell/gen-mapping@0.3.13':
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
@@ -4924,11 +4807,11 @@ snapshots:
'@jridgewell/resolve-uri': 3.1.2
'@jridgewell/sourcemap-codec': 1.5.5
- '@keyv/bigmap@1.3.0(keyv@5.5.5)':
+ '@keyv/bigmap@1.3.1(keyv@5.6.0)':
dependencies:
- hashery: 1.3.0
- hookified: 1.14.0
- keyv: 5.5.5
+ hashery: 1.4.0
+ hookified: 1.15.1
+ keyv: 5.6.0
'@keyv/serialize@1.1.1': {}
@@ -4940,14 +4823,20 @@ snapshots:
dependencies:
'@mcaptcha/core-glue': 0.1.0-alpha-5
- '@mermaid-js/parser@0.6.3':
+ '@mermaid-js/layout-elk@0.2.0(mermaid@11.12.3)':
dependencies:
- langium: 3.3.1
+ d3: 7.9.0
+ elkjs: 0.9.3
+ mermaid: 11.12.3
+
+ '@mermaid-js/parser@1.0.0':
+ dependencies:
+ langium: 4.2.1
'@napi-rs/wasm-runtime@0.2.12':
dependencies:
- '@emnapi/core': 1.7.1
- '@emnapi/runtime': 1.7.1
+ '@emnapi/core': 1.8.1
+ '@emnapi/runtime': 1.8.1
'@tybys/wasm-util': 0.10.1
optional: true
@@ -4961,7 +4850,7 @@ snapshots:
'@nodelib/fs.walk@1.2.8':
dependencies:
'@nodelib/fs.scandir': 2.1.5
- fastq: 1.19.1
+ fastq: 1.20.1
'@nolyfill/array-includes@1.0.44':
dependencies:
@@ -5017,148 +4906,159 @@ snapshots:
'@pkgr/core@0.2.9': {}
- '@playwright/test@1.57.0':
+ '@playwright/test@1.58.2':
dependencies:
- playwright: 1.57.0
+ playwright: 1.58.2
'@popperjs/core@2.11.8': {}
- '@primer/octicons@19.21.1':
+ '@primer/octicons@19.22.0':
dependencies:
object-assign: 4.1.1
'@resvg/resvg-wasm@2.6.2': {}
- '@rolldown/pluginutils@1.0.0-beta.53': {}
+ '@rolldown/pluginutils@1.0.0-rc.2': {}
- '@rollup/rollup-android-arm-eabi@4.53.5':
+ '@rollup/rollup-android-arm-eabi@4.57.1':
optional: true
- '@rollup/rollup-android-arm64@4.53.5':
+ '@rollup/rollup-android-arm64@4.57.1':
optional: true
- '@rollup/rollup-darwin-arm64@4.53.5':
+ '@rollup/rollup-darwin-arm64@4.57.1':
optional: true
- '@rollup/rollup-darwin-x64@4.53.5':
+ '@rollup/rollup-darwin-x64@4.57.1':
optional: true
- '@rollup/rollup-freebsd-arm64@4.53.5':
+ '@rollup/rollup-freebsd-arm64@4.57.1':
optional: true
- '@rollup/rollup-freebsd-x64@4.53.5':
+ '@rollup/rollup-freebsd-x64@4.57.1':
optional: true
- '@rollup/rollup-linux-arm-gnueabihf@4.53.5':
+ '@rollup/rollup-linux-arm-gnueabihf@4.57.1':
optional: true
- '@rollup/rollup-linux-arm-musleabihf@4.53.5':
+ '@rollup/rollup-linux-arm-musleabihf@4.57.1':
optional: true
- '@rollup/rollup-linux-arm64-gnu@4.53.5':
+ '@rollup/rollup-linux-arm64-gnu@4.57.1':
optional: true
- '@rollup/rollup-linux-arm64-musl@4.53.5':
+ '@rollup/rollup-linux-arm64-musl@4.57.1':
optional: true
- '@rollup/rollup-linux-loong64-gnu@4.53.5':
+ '@rollup/rollup-linux-loong64-gnu@4.57.1':
optional: true
- '@rollup/rollup-linux-ppc64-gnu@4.53.5':
+ '@rollup/rollup-linux-loong64-musl@4.57.1':
optional: true
- '@rollup/rollup-linux-riscv64-gnu@4.53.5':
+ '@rollup/rollup-linux-ppc64-gnu@4.57.1':
optional: true
- '@rollup/rollup-linux-riscv64-musl@4.53.5':
+ '@rollup/rollup-linux-ppc64-musl@4.57.1':
optional: true
- '@rollup/rollup-linux-s390x-gnu@4.53.5':
+ '@rollup/rollup-linux-riscv64-gnu@4.57.1':
optional: true
- '@rollup/rollup-linux-x64-gnu@4.53.5':
+ '@rollup/rollup-linux-riscv64-musl@4.57.1':
optional: true
- '@rollup/rollup-linux-x64-musl@4.53.5':
+ '@rollup/rollup-linux-s390x-gnu@4.57.1':
optional: true
- '@rollup/rollup-openharmony-arm64@4.53.5':
+ '@rollup/rollup-linux-x64-gnu@4.57.1':
optional: true
- '@rollup/rollup-win32-arm64-msvc@4.53.5':
+ '@rollup/rollup-linux-x64-musl@4.57.1':
optional: true
- '@rollup/rollup-win32-ia32-msvc@4.53.5':
+ '@rollup/rollup-openbsd-x64@4.57.1':
optional: true
- '@rollup/rollup-win32-x64-gnu@4.53.5':
+ '@rollup/rollup-openharmony-arm64@4.57.1':
optional: true
- '@rollup/rollup-win32-x64-msvc@4.53.5':
+ '@rollup/rollup-win32-arm64-msvc@4.57.1':
+ optional: true
+
+ '@rollup/rollup-win32-ia32-msvc@4.57.1':
+ optional: true
+
+ '@rollup/rollup-win32-x64-gnu@4.57.1':
+ optional: true
+
+ '@rollup/rollup-win32-x64-msvc@4.57.1':
optional: true
'@rtsao/scc@1.1.0': {}
'@scarf/scarf@1.4.0': {}
- '@silverwind/vue3-calendar-heatmap@2.1.1(tippy.js@6.3.7)(vue@3.5.25(typescript@5.9.3))':
+ '@silverwind/vue3-calendar-heatmap@2.1.1(tippy.js@6.3.7)(vue@3.5.28(typescript@5.9.3))':
dependencies:
tippy.js: 6.3.7
- vue: 3.5.25(typescript@5.9.3)
+ vue: 3.5.28(typescript@5.9.3)
'@simonwep/pickr@1.9.0':
dependencies:
core-js: 3.32.2
nanopop: 2.3.0
- '@solid-primitives/refs@1.1.2(solid-js@1.9.10)':
- dependencies:
- '@solid-primitives/utils': 6.3.2(solid-js@1.9.10)
- solid-js: 1.9.10
+ '@sindresorhus/merge-streams@4.0.0': {}
- '@solid-primitives/transition-group@1.1.2(solid-js@1.9.10)':
+ '@solid-primitives/refs@1.1.2(solid-js@1.9.11)':
dependencies:
- solid-js: 1.9.10
+ '@solid-primitives/utils': 6.3.2(solid-js@1.9.11)
+ solid-js: 1.9.11
- '@solid-primitives/utils@6.3.2(solid-js@1.9.10)':
+ '@solid-primitives/transition-group@1.1.2(solid-js@1.9.11)':
dependencies:
- solid-js: 1.9.10
+ solid-js: 1.9.11
+
+ '@solid-primitives/utils@6.3.2(solid-js@1.9.11)':
+ dependencies:
+ solid-js: 1.9.11
'@standard-schema/spec@1.1.0': {}
- '@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@2.6.1))':
+ '@stylistic/eslint-plugin@5.8.0(eslint@9.39.2(jiti@2.6.1))':
dependencies:
- '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1))
- '@typescript-eslint/types': 8.50.0
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1))
+ '@typescript-eslint/types': 8.56.0
eslint: 9.39.2(jiti@2.6.1)
eslint-visitor-keys: 4.2.1
espree: 10.4.0
estraverse: 5.3.0
picomatch: 4.0.3
- '@stylistic/stylelint-plugin@4.0.0(stylelint@16.26.1(typescript@5.9.3))':
+ '@stylistic/stylelint-plugin@5.0.1(stylelint@17.3.0(typescript@5.9.3))':
dependencies:
- '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4)
- '@csstools/css-tokenizer': 3.0.4
- '@csstools/media-query-list-parser': 4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)
+ '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0)
+ '@csstools/css-tokenizer': 4.0.0
+ '@csstools/media-query-list-parser': 5.0.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)
postcss: 8.5.6
postcss-selector-parser: 7.1.1
postcss-value-parser: 4.2.0
style-search: 0.1.0
- stylelint: 16.26.1(typescript@5.9.3)
+ stylelint: 17.3.0(typescript@5.9.3)
'@swc/helpers@0.2.14': {}
- '@techknowlogick/license-checker-webpack-plugin@0.3.0(webpack@5.104.0)':
+ '@techknowlogick/license-checker-webpack-plugin@0.3.0(webpack@5.105.2)':
dependencies:
glob: 7.2.3
- lodash: 4.17.21
+ lodash: 4.17.23
minimatch: 3.1.2
semver: 6.3.1
spdx-expression-validate: 2.0.0
spdx-satisfies: 5.0.1
superstruct: 0.10.13
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
webpack-sources: 1.4.3
wrap-ansi: 6.2.0
@@ -5241,7 +5141,7 @@ snapshots:
'@types/d3-selection@3.0.11': {}
- '@types/d3-shape@3.1.7':
+ '@types/d3-shape@3.1.8':
dependencies:
'@types/d3-path': 3.1.1
@@ -5286,7 +5186,7 @@ snapshots:
'@types/d3-scale': 4.0.9
'@types/d3-scale-chromatic': 3.1.0
'@types/d3-selection': 3.0.11
- '@types/d3-shape': 3.1.7
+ '@types/d3-shape': 3.1.8
'@types/d3-time': 3.0.4
'@types/d3-time-format': 4.0.3
'@types/d3-timer': 3.0.2
@@ -5313,6 +5213,8 @@ snapshots:
'@types/estree': 1.0.8
'@types/json-schema': 7.0.15
+ '@types/esrecurse@4.3.1': {}
+
'@types/estree@1.0.8': {}
'@types/geojson@7946.0.16': {}
@@ -5323,21 +5225,19 @@ snapshots:
dependencies:
'@types/sizzle': 2.3.10
+ '@types/js-yaml@4.0.9': {}
+
'@types/json-schema@7.0.15': {}
'@types/json5@0.0.29': {}
- '@types/katex@0.16.7': {}
+ '@types/katex@0.16.8': {}
'@types/marked@4.3.2': {}
'@types/ms@2.1.0': {}
- '@types/node@20.19.27':
- dependencies:
- undici-types: 6.21.0
-
- '@types/node@25.0.3':
+ '@types/node@25.2.3':
dependencies:
undici-types: 7.16.0
@@ -5355,8 +5255,6 @@ snapshots:
'@types/throttle-debounce@5.0.2': {}
- '@types/tinycolor2@1.4.6': {}
-
'@types/toastify-js@1.12.4': {}
'@types/trusted-types@2.0.7':
@@ -5366,96 +5264,100 @@ snapshots:
'@types/whatwg-mimetype@3.0.2': {}
- '@typescript-eslint/eslint-plugin@8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
+ '@types/ws@8.18.1':
+ dependencies:
+ '@types/node': 25.2.3
+
+ '@typescript-eslint/eslint-plugin@8.56.0(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
'@eslint-community/regexpp': 4.12.2
- '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
- '@typescript-eslint/scope-manager': 8.50.0
- '@typescript-eslint/type-utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
- '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
- '@typescript-eslint/visitor-keys': 8.50.0
+ '@typescript-eslint/parser': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/scope-manager': 8.56.0
+ '@typescript-eslint/type-utils': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/visitor-keys': 8.56.0
eslint: 9.39.2(jiti@2.6.1)
ignore: 7.0.5
natural-compare: 1.4.0
- ts-api-utils: 2.1.0(typescript@5.9.3)
+ ts-api-utils: 2.4.0(typescript@5.9.3)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
+ '@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@typescript-eslint/scope-manager': 8.50.0
- '@typescript-eslint/types': 8.50.0
- '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3)
- '@typescript-eslint/visitor-keys': 8.50.0
+ '@typescript-eslint/scope-manager': 8.56.0
+ '@typescript-eslint/types': 8.56.0
+ '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3)
+ '@typescript-eslint/visitor-keys': 8.56.0
debug: 4.4.3
eslint: 9.39.2(jiti@2.6.1)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/project-service@8.50.0(typescript@5.9.3)':
+ '@typescript-eslint/project-service@8.56.0(typescript@5.9.3)':
dependencies:
- '@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3)
- '@typescript-eslint/types': 8.50.0
+ '@typescript-eslint/tsconfig-utils': 8.56.0(typescript@5.9.3)
+ '@typescript-eslint/types': 8.56.0
debug: 4.4.3
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/scope-manager@8.50.0':
+ '@typescript-eslint/scope-manager@8.56.0':
dependencies:
- '@typescript-eslint/types': 8.50.0
- '@typescript-eslint/visitor-keys': 8.50.0
+ '@typescript-eslint/types': 8.56.0
+ '@typescript-eslint/visitor-keys': 8.56.0
- '@typescript-eslint/tsconfig-utils@8.50.0(typescript@5.9.3)':
+ '@typescript-eslint/tsconfig-utils@8.56.0(typescript@5.9.3)':
dependencies:
typescript: 5.9.3
- '@typescript-eslint/type-utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
+ '@typescript-eslint/type-utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@typescript-eslint/types': 8.50.0
- '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3)
- '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/types': 8.56.0
+ '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
debug: 4.4.3
eslint: 9.39.2(jiti@2.6.1)
- ts-api-utils: 2.1.0(typescript@5.9.3)
+ ts-api-utils: 2.4.0(typescript@5.9.3)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/types@8.50.0': {}
+ '@typescript-eslint/types@8.56.0': {}
- '@typescript-eslint/typescript-estree@8.50.0(typescript@5.9.3)':
+ '@typescript-eslint/typescript-estree@8.56.0(typescript@5.9.3)':
dependencies:
- '@typescript-eslint/project-service': 8.50.0(typescript@5.9.3)
- '@typescript-eslint/tsconfig-utils': 8.50.0(typescript@5.9.3)
- '@typescript-eslint/types': 8.50.0
- '@typescript-eslint/visitor-keys': 8.50.0
+ '@typescript-eslint/project-service': 8.56.0(typescript@5.9.3)
+ '@typescript-eslint/tsconfig-utils': 8.56.0(typescript@5.9.3)
+ '@typescript-eslint/types': 8.56.0
+ '@typescript-eslint/visitor-keys': 8.56.0
debug: 4.4.3
minimatch: 9.0.5
- semver: 7.7.3
+ semver: 7.7.4
tinyglobby: 0.2.15
- ts-api-utils: 2.1.0(typescript@5.9.3)
+ ts-api-utils: 2.4.0(typescript@5.9.3)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
+ '@typescript-eslint/utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)':
dependencies:
- '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1))
- '@typescript-eslint/scope-manager': 8.50.0
- '@typescript-eslint/types': 8.50.0
- '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3)
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1))
+ '@typescript-eslint/scope-manager': 8.56.0
+ '@typescript-eslint/types': 8.56.0
+ '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3)
eslint: 9.39.2(jiti@2.6.1)
typescript: 5.9.3
transitivePeerDependencies:
- supports-color
- '@typescript-eslint/visitor-keys@8.50.0':
+ '@typescript-eslint/visitor-keys@8.56.0':
dependencies:
- '@typescript-eslint/types': 8.50.0
- eslint-visitor-keys: 4.2.1
+ '@typescript-eslint/types': 8.56.0
+ eslint-visitor-keys: 5.0.0
'@unrs/resolver-binding-android-arm-eabi@1.11.1':
optional: true
@@ -5516,139 +5418,137 @@ snapshots:
'@unrs/resolver-binding-win32-x64-msvc@1.11.1':
optional: true
- '@vitejs/plugin-vue@6.0.3(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))(vue@3.5.25(typescript@5.9.3))':
+ '@vitejs/plugin-vue@6.0.4(vite@7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))(vue@3.5.28(typescript@5.9.3))':
dependencies:
- '@rolldown/pluginutils': 1.0.0-beta.53
- vite: 7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2)
- vue: 3.5.25(typescript@5.9.3)
+ '@rolldown/pluginutils': 1.0.0-rc.2
+ vite: 7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)
+ vue: 3.5.28(typescript@5.9.3)
- '@vitest/eslint-plugin@1.5.2(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))':
+ '@vitest/eslint-plugin@1.6.9(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)(vitest@4.0.18(@types/node@25.2.3)(happy-dom@20.6.1)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))':
dependencies:
- '@typescript-eslint/scope-manager': 8.50.0
- '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/scope-manager': 8.56.0
+ '@typescript-eslint/utils': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
eslint: 9.39.2(jiti@2.6.1)
optionalDependencies:
typescript: 5.9.3
- vitest: 4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2)
+ vitest: 4.0.18(@types/node@25.2.3)(happy-dom@20.6.1)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)
transitivePeerDependencies:
- supports-color
- '@vitest/expect@4.0.16':
+ '@vitest/expect@4.0.18':
dependencies:
'@standard-schema/spec': 1.1.0
'@types/chai': 5.2.3
- '@vitest/spy': 4.0.16
- '@vitest/utils': 4.0.16
- chai: 6.2.1
+ '@vitest/spy': 4.0.18
+ '@vitest/utils': 4.0.18
+ chai: 6.2.2
tinyrainbow: 3.0.3
- '@vitest/mocker@4.0.16(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))':
+ '@vitest/mocker@4.0.18(vite@7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))':
dependencies:
- '@vitest/spy': 4.0.16
+ '@vitest/spy': 4.0.18
estree-walker: 3.0.3
magic-string: 0.30.21
optionalDependencies:
- vite: 7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2)
+ vite: 7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)
- '@vitest/pretty-format@4.0.16':
+ '@vitest/pretty-format@4.0.18':
dependencies:
tinyrainbow: 3.0.3
- '@vitest/runner@4.0.16':
+ '@vitest/runner@4.0.18':
dependencies:
- '@vitest/utils': 4.0.16
+ '@vitest/utils': 4.0.18
pathe: 2.0.3
- '@vitest/snapshot@4.0.16':
+ '@vitest/snapshot@4.0.18':
dependencies:
- '@vitest/pretty-format': 4.0.16
+ '@vitest/pretty-format': 4.0.18
magic-string: 0.30.21
pathe: 2.0.3
- '@vitest/spy@4.0.16': {}
+ '@vitest/spy@4.0.18': {}
- '@vitest/utils@4.0.16':
+ '@vitest/utils@4.0.18':
dependencies:
- '@vitest/pretty-format': 4.0.16
+ '@vitest/pretty-format': 4.0.18
tinyrainbow: 3.0.3
- '@volar/language-core@2.4.26':
+ '@volar/language-core@2.4.27':
dependencies:
- '@volar/source-map': 2.4.26
+ '@volar/source-map': 2.4.27
- '@volar/source-map@2.4.26': {}
+ '@volar/source-map@2.4.27': {}
- '@volar/typescript@2.4.26':
+ '@volar/typescript@2.4.27':
dependencies:
- '@volar/language-core': 2.4.26
+ '@volar/language-core': 2.4.27
path-browserify: 1.0.1
vscode-uri: 3.1.0
- '@vue/compiler-core@3.5.25':
+ '@vue/compiler-core@3.5.28':
dependencies:
- '@babel/parser': 7.28.5
- '@vue/shared': 3.5.25
- entities: 4.5.0
+ '@babel/parser': 7.29.0
+ '@vue/shared': 3.5.28
+ entities: 7.0.1
estree-walker: 2.0.2
source-map-js: 1.2.1
- '@vue/compiler-dom@3.5.25':
+ '@vue/compiler-dom@3.5.28':
dependencies:
- '@vue/compiler-core': 3.5.25
- '@vue/shared': 3.5.25
+ '@vue/compiler-core': 3.5.28
+ '@vue/shared': 3.5.28
- '@vue/compiler-sfc@3.5.25':
+ '@vue/compiler-sfc@3.5.28':
dependencies:
- '@babel/parser': 7.28.5
- '@vue/compiler-core': 3.5.25
- '@vue/compiler-dom': 3.5.25
- '@vue/compiler-ssr': 3.5.25
- '@vue/shared': 3.5.25
+ '@babel/parser': 7.29.0
+ '@vue/compiler-core': 3.5.28
+ '@vue/compiler-dom': 3.5.28
+ '@vue/compiler-ssr': 3.5.28
+ '@vue/shared': 3.5.28
estree-walker: 2.0.2
magic-string: 0.30.21
postcss: 8.5.6
source-map-js: 1.2.1
- '@vue/compiler-ssr@3.5.25':
+ '@vue/compiler-ssr@3.5.28':
dependencies:
- '@vue/compiler-dom': 3.5.25
- '@vue/shared': 3.5.25
+ '@vue/compiler-dom': 3.5.28
+ '@vue/shared': 3.5.28
- '@vue/language-core@3.1.8(typescript@5.9.3)':
+ '@vue/language-core@3.2.4':
dependencies:
- '@volar/language-core': 2.4.26
- '@vue/compiler-dom': 3.5.25
- '@vue/shared': 3.5.25
- alien-signals: 3.1.1
+ '@volar/language-core': 2.4.27
+ '@vue/compiler-dom': 3.5.28
+ '@vue/shared': 3.5.28
+ alien-signals: 3.1.2
muggle-string: 0.4.1
path-browserify: 1.0.1
picomatch: 4.0.3
- optionalDependencies:
- typescript: 5.9.3
- '@vue/reactivity@3.5.25':
+ '@vue/reactivity@3.5.28':
dependencies:
- '@vue/shared': 3.5.25
+ '@vue/shared': 3.5.28
- '@vue/runtime-core@3.5.25':
+ '@vue/runtime-core@3.5.28':
dependencies:
- '@vue/reactivity': 3.5.25
- '@vue/shared': 3.5.25
+ '@vue/reactivity': 3.5.28
+ '@vue/shared': 3.5.28
- '@vue/runtime-dom@3.5.25':
+ '@vue/runtime-dom@3.5.28':
dependencies:
- '@vue/reactivity': 3.5.25
- '@vue/runtime-core': 3.5.25
- '@vue/shared': 3.5.25
+ '@vue/reactivity': 3.5.28
+ '@vue/runtime-core': 3.5.28
+ '@vue/shared': 3.5.28
csstype: 3.2.3
- '@vue/server-renderer@3.5.25(vue@3.5.25(typescript@5.9.3))':
+ '@vue/server-renderer@3.5.28(vue@3.5.28(typescript@5.9.3))':
dependencies:
- '@vue/compiler-ssr': 3.5.25
- '@vue/shared': 3.5.25
- vue: 3.5.25(typescript@5.9.3)
+ '@vue/compiler-ssr': 3.5.28
+ '@vue/shared': 3.5.28
+ vue: 3.5.28(typescript@5.9.3)
- '@vue/shared@3.5.25': {}
+ '@vue/shared@3.5.28': {}
'@webassemblyjs/ast@1.14.1':
dependencies:
@@ -5726,20 +5626,20 @@ snapshots:
'@webassemblyjs/ast': 1.14.1
'@xtuc/long': 4.2.2
- '@webpack-cli/configtest@3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)':
+ '@webpack-cli/configtest@3.0.1(webpack-cli@6.0.1)(webpack@5.105.2)':
dependencies:
- webpack: 5.104.0(webpack-cli@6.0.1)
- webpack-cli: 6.0.1(webpack@5.104.0)
+ webpack: 5.105.2(webpack-cli@6.0.1)
+ webpack-cli: 6.0.1(webpack@5.105.2)
- '@webpack-cli/info@3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)':
+ '@webpack-cli/info@3.0.1(webpack-cli@6.0.1)(webpack@5.105.2)':
dependencies:
- webpack: 5.104.0(webpack-cli@6.0.1)
- webpack-cli: 6.0.1(webpack@5.104.0)
+ webpack: 5.105.2(webpack-cli@6.0.1)
+ webpack-cli: 6.0.1(webpack@5.105.2)
- '@webpack-cli/serve@3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)':
+ '@webpack-cli/serve@3.0.1(webpack-cli@6.0.1)(webpack@5.105.2)':
dependencies:
- webpack: 5.104.0(webpack-cli@6.0.1)
- webpack-cli: 6.0.1(webpack@5.104.0)
+ webpack: 5.105.2(webpack-cli@6.0.1)
+ webpack-cli: 6.0.1(webpack@5.105.2)
'@xtuc/ieee754@1.2.0': {}
@@ -5755,17 +5655,17 @@ snapshots:
acorn@8.15.0: {}
- add-asset-webpack-plugin@3.1.1(webpack@5.104.0):
+ add-asset-webpack-plugin@3.1.1(webpack@5.105.2):
optionalDependencies:
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
- ajv-formats@2.1.1(ajv@8.17.1):
+ ajv-formats@2.1.1(ajv@8.18.0):
optionalDependencies:
- ajv: 8.17.1
+ ajv: 8.18.0
- ajv-keywords@5.1.0(ajv@8.17.1):
+ ajv-keywords@5.1.0(ajv@8.18.0):
dependencies:
- ajv: 8.17.1
+ ajv: 8.18.0
fast-deep-equal: 3.1.3
ajv@6.12.6:
@@ -5775,14 +5675,14 @@ snapshots:
json-schema-traverse: 0.4.1
uri-js: 4.4.1
- ajv@8.17.1:
+ ajv@8.18.0:
dependencies:
fast-deep-equal: 3.1.3
fast-uri: 3.1.0
json-schema-traverse: 1.0.0
require-from-string: 2.0.2
- alien-signals@3.1.1: {}
+ alien-signals@3.1.2: {}
ansi-regex@5.0.1: {}
@@ -5811,13 +5711,11 @@ snapshots:
array-find-index@1.0.2: {}
- array-union@2.1.0: {}
-
- asciinema-player@3.13.5:
+ asciinema-player@3.14.15:
dependencies:
- '@babel/runtime': 7.28.4
- solid-js: 1.9.10
- solid-transition-group: 0.2.3(solid-js@1.9.10)
+ '@babel/runtime': 7.28.6
+ solid-js: 1.9.11
+ solid-transition-group: 0.2.3(solid-js@1.9.11)
assertion-error@2.0.1: {}
@@ -5827,17 +5725,21 @@ snapshots:
atob@2.1.2: {}
- axe-core@4.11.0: {}
+ axe-core@4.11.1: {}
axobject-query@4.1.0: {}
balanced-match@1.0.2: {}
- balanced-match@2.0.0: {}
+ balanced-match@3.0.1: {}
+
+ balanced-match@4.0.2:
+ dependencies:
+ jackspeak: 4.2.3
base64-js@1.5.1: {}
- baseline-browser-mapping@2.9.9: {}
+ baseline-browser-mapping@2.9.19: {}
big.js@5.2.2: {}
@@ -5854,15 +5756,19 @@ snapshots:
dependencies:
balanced-match: 1.0.2
+ brace-expansion@5.0.2:
+ dependencies:
+ balanced-match: 4.0.2
+
braces@3.0.3:
dependencies:
fill-range: 7.1.1
browserslist@4.28.1:
dependencies:
- baseline-browser-mapping: 2.9.9
- caniuse-lite: 1.0.30001760
- electron-to-chromium: 1.5.267
+ baseline-browser-mapping: 2.9.19
+ caniuse-lite: 1.0.30001770
+ electron-to-chromium: 1.5.286
node-releases: 2.0.27
update-browserslist-db: 1.2.3(browserslist@4.28.1)
@@ -5879,13 +5785,13 @@ snapshots:
bytes@3.1.2: {}
- cacheable@2.3.1:
+ cacheable@2.3.2:
dependencies:
- '@cacheable/memory': 2.0.6
- '@cacheable/utils': 2.3.2
- hookified: 1.14.0
- keyv: 5.5.5
- qified: 0.5.3
+ '@cacheable/memory': 2.0.7
+ '@cacheable/utils': 2.3.4
+ hookified: 1.15.1
+ keyv: 5.6.0
+ qified: 0.6.0
callsites@3.1.0: {}
@@ -5900,9 +5806,9 @@ snapshots:
camelcase@8.0.0: {}
- caniuse-lite@1.0.30001760: {}
+ caniuse-lite@1.0.30001770: {}
- chai@6.2.1: {}
+ chai@6.2.2: {}
chalk@4.1.2:
dependencies:
@@ -5932,19 +5838,19 @@ snapshots:
chart.js: 4.5.1
hammerjs: 2.0.8
- chevrotain-allstar@0.3.1(chevrotain@11.0.3):
+ chevrotain-allstar@0.3.1(chevrotain@11.1.1):
dependencies:
- chevrotain: 11.0.3
- lodash-es: 4.17.22
+ chevrotain: 11.1.1
+ lodash-es: 4.17.23
- chevrotain@11.0.3:
+ chevrotain@11.1.1:
dependencies:
- '@chevrotain/cst-dts-gen': 11.0.3
- '@chevrotain/gast': 11.0.3
- '@chevrotain/regexp-to-ast': 11.0.3
- '@chevrotain/types': 11.0.3
- '@chevrotain/utils': 11.0.3
- lodash-es: 4.17.21
+ '@chevrotain/cst-dts-gen': 11.1.1
+ '@chevrotain/gast': 11.1.1
+ '@chevrotain/regexp-to-ast': 11.1.1
+ '@chevrotain/types': 11.1.1
+ '@chevrotain/utils': 11.1.1
+ lodash-es: 4.17.23
chokidar@3.6.0:
dependencies:
@@ -5962,7 +5868,7 @@ snapshots:
chrome-trace-event@1.0.4: {}
- ci-info@4.3.1: {}
+ ci-info@4.4.0: {}
citeproc@2.4.63: {}
@@ -5970,7 +5876,7 @@ snapshots:
dependencies:
escape-string-regexp: 1.0.5
- clippie@4.1.9: {}
+ clippie@4.1.10: {}
clone-deep@4.0.1:
dependencies:
@@ -5982,7 +5888,7 @@ snapshots:
dependencies:
typo-js: 1.3.1
- codemirror@5.65.20: {}
+ codemirror@5.65.21: {}
color-convert@2.0.1:
dependencies:
@@ -5998,7 +5904,7 @@ snapshots:
commander@12.1.0: {}
- commander@14.0.2: {}
+ commander@14.0.3: {}
commander@2.20.3: {}
@@ -6008,7 +5914,7 @@ snapshots:
commander@8.3.0: {}
- comment-parser@1.4.1: {}
+ comment-parser@1.4.5: {}
compare-versions@6.1.1: {}
@@ -6016,7 +5922,7 @@ snapshots:
confbox@0.1.8: {}
- core-js-compat@3.47.0:
+ core-js-compat@3.48.0:
dependencies:
browserslist: 4.28.1
@@ -6047,9 +5953,9 @@ snapshots:
shebang-command: 2.0.0
which: 2.0.2
- css-functions-list@3.2.3: {}
+ css-functions-list@3.3.3: {}
- css-loader@7.1.2(webpack@5.104.0):
+ css-loader@7.1.4(webpack@5.105.2):
dependencies:
icss-utils: 5.1.0(postcss@8.5.6)
postcss: 8.5.6
@@ -6058,9 +5964,9 @@ snapshots:
postcss-modules-scope: 3.2.1(postcss@8.5.6)
postcss-modules-values: 4.0.0(postcss@8.5.6)
postcss-value-parser: 4.2.0
- semver: 7.7.3
+ semver: 7.7.4
optionalDependencies:
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
css-select@5.2.2:
dependencies:
@@ -6165,7 +6071,7 @@ snapshots:
d3-quadtree: 3.0.1
d3-timer: 3.0.1
- d3-format@3.1.0: {}
+ d3-format@3.1.2: {}
d3-geo@3.1.1:
dependencies:
@@ -6200,7 +6106,7 @@ snapshots:
d3-scale@4.0.2:
dependencies:
d3-array: 3.2.4
- d3-format: 3.1.0
+ d3-format: 3.1.2
d3-interpolate: 3.0.1
d3-time: 3.1.0
d3-time-format: 4.1.0
@@ -6257,7 +6163,7 @@ snapshots:
d3-ease: 3.0.1
d3-fetch: 3.0.1
d3-force: 3.0.0
- d3-format: 3.1.0
+ d3-format: 3.1.2
d3-geo: 3.1.1
d3-hierarchy: 3.1.2
d3-interpolate: 3.0.1
@@ -6278,7 +6184,7 @@ snapshots:
dagre-d3-es@7.0.13:
dependencies:
d3: 7.9.0
- lodash-es: 4.17.22
+ lodash-es: 4.17.23
damerau-levenshtein@1.0.8: {}
@@ -6292,7 +6198,7 @@ snapshots:
dependencies:
ms: 2.1.3
- decode-named-character-reference@1.2.0:
+ decode-named-character-reference@1.3.0:
dependencies:
character-entities: 2.0.2
@@ -6319,10 +6225,6 @@ snapshots:
didyoumean@1.2.2: {}
- dir-glob@3.0.1:
- dependencies:
- path-type: 4.0.0
-
dlv@1.1.3: {}
doctrine@2.1.0:
@@ -6366,11 +6268,13 @@ snapshots:
dependencies:
'@types/codemirror': 5.60.17
'@types/marked': 4.3.2
- codemirror: 5.65.20
+ codemirror: 5.65.21
codemirror-spell-checker: 1.1.2
marked: 4.3.0
- electron-to-chromium@1.5.267: {}
+ electron-to-chromium@1.5.286: {}
+
+ elkjs@0.9.3: {}
emoji-regex@10.6.0: {}
@@ -6380,13 +6284,17 @@ snapshots:
emojis-list@3.0.0: {}
- enhanced-resolve@5.18.4:
+ enhanced-resolve@5.19.0:
dependencies:
graceful-fs: 4.2.11
tapable: 2.3.0
entities@4.5.0: {}
+ entities@6.0.1: {}
+
+ entities@7.0.1: {}
+
env-paths@2.2.1: {}
envinfo@7.21.0: {}
@@ -6399,71 +6307,42 @@ snapshots:
es-module-lexer@2.0.0: {}
- esbuild-loader@4.4.0(webpack@5.104.0):
+ esbuild-loader@4.4.2(webpack@5.105.2):
dependencies:
- esbuild: 0.25.12
- get-tsconfig: 4.13.0
+ esbuild: 0.27.3
+ get-tsconfig: 4.13.6
loader-utils: 2.0.4
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
webpack-sources: 1.4.3
- esbuild@0.25.12:
+ esbuild@0.27.3:
optionalDependencies:
- '@esbuild/aix-ppc64': 0.25.12
- '@esbuild/android-arm': 0.25.12
- '@esbuild/android-arm64': 0.25.12
- '@esbuild/android-x64': 0.25.12
- '@esbuild/darwin-arm64': 0.25.12
- '@esbuild/darwin-x64': 0.25.12
- '@esbuild/freebsd-arm64': 0.25.12
- '@esbuild/freebsd-x64': 0.25.12
- '@esbuild/linux-arm': 0.25.12
- '@esbuild/linux-arm64': 0.25.12
- '@esbuild/linux-ia32': 0.25.12
- '@esbuild/linux-loong64': 0.25.12
- '@esbuild/linux-mips64el': 0.25.12
- '@esbuild/linux-ppc64': 0.25.12
- '@esbuild/linux-riscv64': 0.25.12
- '@esbuild/linux-s390x': 0.25.12
- '@esbuild/linux-x64': 0.25.12
- '@esbuild/netbsd-arm64': 0.25.12
- '@esbuild/netbsd-x64': 0.25.12
- '@esbuild/openbsd-arm64': 0.25.12
- '@esbuild/openbsd-x64': 0.25.12
- '@esbuild/openharmony-arm64': 0.25.12
- '@esbuild/sunos-x64': 0.25.12
- '@esbuild/win32-arm64': 0.25.12
- '@esbuild/win32-ia32': 0.25.12
- '@esbuild/win32-x64': 0.25.12
-
- esbuild@0.27.2:
- optionalDependencies:
- '@esbuild/aix-ppc64': 0.27.2
- '@esbuild/android-arm': 0.27.2
- '@esbuild/android-arm64': 0.27.2
- '@esbuild/android-x64': 0.27.2
- '@esbuild/darwin-arm64': 0.27.2
- '@esbuild/darwin-x64': 0.27.2
- '@esbuild/freebsd-arm64': 0.27.2
- '@esbuild/freebsd-x64': 0.27.2
- '@esbuild/linux-arm': 0.27.2
- '@esbuild/linux-arm64': 0.27.2
- '@esbuild/linux-ia32': 0.27.2
- '@esbuild/linux-loong64': 0.27.2
- '@esbuild/linux-mips64el': 0.27.2
- '@esbuild/linux-ppc64': 0.27.2
- '@esbuild/linux-riscv64': 0.27.2
- '@esbuild/linux-s390x': 0.27.2
- '@esbuild/linux-x64': 0.27.2
- '@esbuild/netbsd-arm64': 0.27.2
- '@esbuild/netbsd-x64': 0.27.2
- '@esbuild/openbsd-arm64': 0.27.2
- '@esbuild/openbsd-x64': 0.27.2
- '@esbuild/openharmony-arm64': 0.27.2
- '@esbuild/sunos-x64': 0.27.2
- '@esbuild/win32-arm64': 0.27.2
- '@esbuild/win32-ia32': 0.27.2
- '@esbuild/win32-x64': 0.27.2
+ '@esbuild/aix-ppc64': 0.27.3
+ '@esbuild/android-arm': 0.27.3
+ '@esbuild/android-arm64': 0.27.3
+ '@esbuild/android-x64': 0.27.3
+ '@esbuild/darwin-arm64': 0.27.3
+ '@esbuild/darwin-x64': 0.27.3
+ '@esbuild/freebsd-arm64': 0.27.3
+ '@esbuild/freebsd-x64': 0.27.3
+ '@esbuild/linux-arm': 0.27.3
+ '@esbuild/linux-arm64': 0.27.3
+ '@esbuild/linux-ia32': 0.27.3
+ '@esbuild/linux-loong64': 0.27.3
+ '@esbuild/linux-mips64el': 0.27.3
+ '@esbuild/linux-ppc64': 0.27.3
+ '@esbuild/linux-riscv64': 0.27.3
+ '@esbuild/linux-s390x': 0.27.3
+ '@esbuild/linux-x64': 0.27.3
+ '@esbuild/netbsd-arm64': 0.27.3
+ '@esbuild/netbsd-x64': 0.27.3
+ '@esbuild/openbsd-arm64': 0.27.3
+ '@esbuild/openbsd-x64': 0.27.3
+ '@esbuild/openharmony-arm64': 0.27.3
+ '@esbuild/sunos-x64': 0.27.3
+ '@esbuild/win32-arm64': 0.27.3
+ '@esbuild/win32-ia32': 0.27.3
+ '@esbuild/win32-x64': 0.27.3
escalade@3.2.0: {}
@@ -6474,7 +6353,7 @@ snapshots:
eslint-compat-utils@0.6.5(eslint@9.39.2(jiti@2.6.1)):
dependencies:
eslint: 9.39.2(jiti@2.6.1)
- semver: 7.7.3
+ semver: 7.7.4
eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)):
dependencies:
@@ -6482,7 +6361,7 @@ snapshots:
eslint-import-context@0.1.9(unrs-resolver@1.11.1):
dependencies:
- get-tsconfig: 4.13.0
+ get-tsconfig: 4.13.6
stable-hash-x: 0.2.0
optionalDependencies:
unrs-resolver: 1.11.1
@@ -6495,30 +6374,30 @@ snapshots:
transitivePeerDependencies:
- supports-color
- eslint-import-resolver-typescript@4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)):
+ eslint-import-resolver-typescript@4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1)):
dependencies:
debug: 4.4.3
eslint: 9.39.2(jiti@2.6.1)
eslint-import-context: 0.1.9(unrs-resolver@1.11.1)
- get-tsconfig: 4.13.0
+ get-tsconfig: 4.13.6
is-bun-module: 2.0.0
stable-hash-x: 0.2.0
tinyglobby: 0.2.15
unrs-resolver: 1.11.1
optionalDependencies:
- eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1))
- eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1))
+ eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1))
+ eslint-plugin-import-x: 4.16.1(@typescript-eslint/utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1))
transitivePeerDependencies:
- supports-color
- eslint-module-utils@2.12.1(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)):
+ eslint-module-utils@2.12.1(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)):
dependencies:
debug: 3.2.7
optionalDependencies:
- '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/parser': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
eslint: 9.39.2(jiti@2.6.1)
eslint-import-resolver-node: 0.3.9
- eslint-import-resolver-typescript: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1))
+ eslint-import-resolver-typescript: 4.4.4(eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)))(eslint-plugin-import@2.32.0)(eslint@9.39.2(jiti@2.6.1))
transitivePeerDependencies:
- supports-color
@@ -6551,8 +6430,8 @@ snapshots:
'@eslint/eslintrc': 3.3.3
'@eslint/js': 9.39.2
'@github/browserslist-config': 1.0.0
- '@typescript-eslint/eslint-plugin': 8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
- '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/eslint-plugin': 8.56.0(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/parser': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
aria-query: 5.3.2
eslint: 9.39.2(jiti@2.6.1)
eslint-config-prettier: 10.1.8(eslint@9.39.2(jiti@2.6.1))
@@ -6560,17 +6439,17 @@ snapshots:
eslint-plugin-eslint-comments: 3.2.0(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-filenames: 1.3.2(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-i18n-text: 1.0.1(eslint@9.39.2(jiti@2.6.1))
- eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1))
+ eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-jsx-a11y: 6.10.2(eslint@9.39.2(jiti@2.6.1))
eslint-plugin-no-only-tests: 3.3.0
- eslint-plugin-prettier: 5.5.4(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.7.4)
+ eslint-plugin-prettier: 5.5.5(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.8.1)
eslint-rule-documentation: 1.0.23
globals: 16.5.0
jsx-ast-utils: 3.3.5
- prettier: 3.7.4
+ prettier: 3.8.1
svg-element-attributes: 1.3.1
typescript: 5.9.3
- typescript-eslint: 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ typescript-eslint: 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
transitivePeerDependencies:
- '@types/eslint'
- eslint-import-resolver-typescript
@@ -6581,25 +6460,25 @@ snapshots:
dependencies:
eslint: 9.39.2(jiti@2.6.1)
- eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)):
+ eslint-plugin-import-x@4.16.1(@typescript-eslint/utils@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint@9.39.2(jiti@2.6.1)):
dependencies:
- '@typescript-eslint/types': 8.50.0
- comment-parser: 1.4.1
+ '@typescript-eslint/types': 8.56.0
+ comment-parser: 1.4.5
debug: 4.4.3
eslint: 9.39.2(jiti@2.6.1)
eslint-import-context: 0.1.9(unrs-resolver@1.11.1)
is-glob: 4.0.3
- minimatch: 10.1.1
- semver: 7.7.3
+ minimatch: 10.2.1
+ semver: 7.7.4
stable-hash-x: 0.2.0
unrs-resolver: 1.11.1
optionalDependencies:
- '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
eslint-import-resolver-node: 0.3.9
transitivePeerDependencies:
- supports-color
- eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)):
+ eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1)):
dependencies:
'@rtsao/scc': 1.1.0
array-includes: '@nolyfill/array-includes@1.0.44'
@@ -6610,7 +6489,7 @@ snapshots:
doctrine: 2.1.0
eslint: 9.39.2(jiti@2.6.1)
eslint-import-resolver-node: 0.3.9
- eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1))
+ eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@4.4.4)(eslint@9.39.2(jiti@2.6.1))
hasown: '@nolyfill/hasown@1.0.44'
is-core-module: '@nolyfill/is-core-module@1.0.39'
is-glob: 4.0.3
@@ -6622,7 +6501,7 @@ snapshots:
string.prototype.trimend: '@nolyfill/string.prototype.trimend@1.0.44'
tsconfig-paths: 3.15.0
optionalDependencies:
- '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/parser': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
transitivePeerDependencies:
- eslint-import-resolver-typescript
- eslint-import-resolver-webpack
@@ -6634,7 +6513,7 @@ snapshots:
array-includes: '@nolyfill/array-includes@1.0.44'
array.prototype.flatmap: '@nolyfill/array.prototype.flatmap@1.0.44'
ast-types-flow: 0.0.8
- axe-core: 4.11.0
+ axe-core: 4.11.1
axobject-query: 4.1.0
damerau-levenshtein: 1.0.8
emoji-regex: 9.2.2
@@ -6649,57 +6528,55 @@ snapshots:
eslint-plugin-no-only-tests@3.3.0: {}
- eslint-plugin-playwright@2.4.0(eslint@9.39.2(jiti@2.6.1)):
+ eslint-plugin-playwright@2.5.1(eslint@9.39.2(jiti@2.6.1)):
dependencies:
eslint: 9.39.2(jiti@2.6.1)
globals: 16.5.0
- eslint-plugin-prettier@5.5.4(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.7.4):
+ eslint-plugin-prettier@5.5.5(@types/eslint@9.6.1)(eslint-config-prettier@10.1.8(eslint@9.39.2(jiti@2.6.1)))(eslint@9.39.2(jiti@2.6.1))(prettier@3.8.1):
dependencies:
eslint: 9.39.2(jiti@2.6.1)
- prettier: 3.7.4
- prettier-linter-helpers: 1.0.0
- synckit: 0.11.11
+ prettier: 3.8.1
+ prettier-linter-helpers: 1.0.1
+ synckit: 0.11.12
optionalDependencies:
'@types/eslint': 9.6.1
eslint-config-prettier: 10.1.8(eslint@9.39.2(jiti@2.6.1))
- eslint-plugin-regexp@2.10.0(eslint@9.39.2(jiti@2.6.1)):
+ eslint-plugin-regexp@3.0.0(eslint@9.39.2(jiti@2.6.1)):
dependencies:
- '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1))
'@eslint-community/regexpp': 4.12.2
- comment-parser: 1.4.1
+ comment-parser: 1.4.5
eslint: 9.39.2(jiti@2.6.1)
- jsdoc-type-pratt-parser: 4.8.0
+ jsdoc-type-pratt-parser: 7.1.1
refa: 0.12.1
regexp-ast-analysis: 0.7.1
scslre: 0.3.0
- eslint-plugin-sonarjs@3.0.5(eslint@9.39.2(jiti@2.6.1)):
+ eslint-plugin-sonarjs@3.0.7(eslint@9.39.2(jiti@2.6.1)):
dependencies:
- '@eslint-community/regexpp': 4.12.1
+ '@eslint-community/regexpp': 4.12.2
builtin-modules: 3.3.0
bytes: 3.1.2
eslint: 9.39.2(jiti@2.6.1)
functional-red-black-tree: 1.0.1
jsx-ast-utils-x: 0.1.0
lodash.merge: 4.6.2
- minimatch: 9.0.5
+ minimatch: 10.1.2
scslre: 0.3.0
- semver: 7.7.2
+ semver: 7.7.4
typescript: 5.9.3
- eslint-plugin-unicorn@62.0.0(eslint@9.39.2(jiti@2.6.1)):
+ eslint-plugin-unicorn@63.0.0(eslint@9.39.2(jiti@2.6.1)):
dependencies:
'@babel/helper-validator-identifier': 7.28.5
- '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1))
- '@eslint/plugin-kit': 0.4.1
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1))
change-case: 5.4.4
- ci-info: 4.3.1
+ ci-info: 4.4.0
clean-regexp: 1.0.0
- core-js-compat: 3.47.0
+ core-js-compat: 3.48.0
eslint: 9.39.2(jiti@2.6.1)
- esquery: 1.6.0
find-up-simple: 1.0.1
globals: 16.5.0
indent-string: 5.0.0
@@ -6708,39 +6585,39 @@ snapshots:
pluralize: 8.0.0
regexp-tree: 0.1.27
regjsparser: 0.13.0
- semver: 7.7.3
+ semver: 7.7.4
strip-indent: 4.1.1
- eslint-plugin-vue-scoped-css@2.12.0(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))):
+ eslint-plugin-vue-scoped-css@2.12.0(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@9.39.2(jiti@2.6.1))):
dependencies:
- '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1))
eslint: 9.39.2(jiti@2.6.1)
eslint-compat-utils: 0.6.5(eslint@9.39.2(jiti@2.6.1))
- lodash: 4.17.21
+ lodash: 4.17.23
postcss: 8.5.6
postcss-safe-parser: 6.0.0(postcss@8.5.6)
postcss-scss: 4.0.9(postcss@8.5.6)
postcss-selector-parser: 7.1.1
postcss-styl: 0.12.3
- vue-eslint-parser: 10.2.0(eslint@9.39.2(jiti@2.6.1))
+ vue-eslint-parser: 10.4.0(eslint@9.39.2(jiti@2.6.1))
transitivePeerDependencies:
- supports-color
- eslint-plugin-vue@10.6.2(@stylistic/eslint-plugin@5.6.1(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1))):
+ eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.8.0(eslint@9.39.2(jiti@2.6.1)))(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@9.39.2(jiti@2.6.1))):
dependencies:
- '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1))
eslint: 9.39.2(jiti@2.6.1)
natural-compare: 1.4.0
nth-check: 2.1.1
postcss-selector-parser: 7.1.1
- semver: 7.7.3
- vue-eslint-parser: 10.2.0(eslint@9.39.2(jiti@2.6.1))
+ semver: 7.7.4
+ vue-eslint-parser: 10.4.0(eslint@9.39.2(jiti@2.6.1))
xml-name-validator: 4.0.0
optionalDependencies:
- '@stylistic/eslint-plugin': 5.6.1(eslint@9.39.2(jiti@2.6.1))
- '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@stylistic/eslint-plugin': 5.8.0(eslint@9.39.2(jiti@2.6.1))
+ '@typescript-eslint/parser': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
- eslint-plugin-wc@3.0.2(eslint@9.39.2(jiti@2.6.1)):
+ eslint-plugin-wc@3.1.0(eslint@9.39.2(jiti@2.6.1)):
dependencies:
eslint: 9.39.2(jiti@2.6.1)
is-valid-element-name: 1.0.0
@@ -6758,13 +6635,22 @@ snapshots:
esrecurse: 4.3.0
estraverse: 5.3.0
+ eslint-scope@9.1.0:
+ dependencies:
+ '@types/esrecurse': 4.3.1
+ '@types/estree': 1.0.8
+ esrecurse: 4.3.0
+ estraverse: 5.3.0
+
eslint-visitor-keys@3.4.3: {}
eslint-visitor-keys@4.2.1: {}
+ eslint-visitor-keys@5.0.0: {}
+
eslint@9.39.2(jiti@2.6.1):
dependencies:
- '@eslint-community/eslint-utils': 4.9.0(eslint@9.39.2(jiti@2.6.1))
+ '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@2.6.1))
'@eslint-community/regexpp': 4.12.2
'@eslint/config-array': 0.21.1
'@eslint/config-helpers': 0.4.2
@@ -6784,7 +6670,7 @@ snapshots:
eslint-scope: 8.4.0
eslint-visitor-keys: 4.2.1
espree: 10.4.0
- esquery: 1.6.0
+ esquery: 1.7.0
esutils: 2.0.3
fast-deep-equal: 3.1.3
file-entry-cache: 8.0.0
@@ -6809,7 +6695,13 @@ snapshots:
acorn-jsx: 5.3.2(acorn@8.15.0)
eslint-visitor-keys: 4.2.1
- esquery@1.6.0:
+ espree@11.1.0:
+ dependencies:
+ acorn: 8.15.0
+ acorn-jsx: 5.3.2(acorn@8.15.0)
+ eslint-visitor-keys: 5.0.0
+
+ esquery@1.7.0:
dependencies:
estraverse: 5.3.0
@@ -6855,7 +6747,7 @@ snapshots:
fastest-levenshtein@1.0.16: {}
- fastq@1.19.1:
+ fastq@1.20.1:
dependencies:
reusify: 1.1.0
@@ -6871,9 +6763,9 @@ snapshots:
fflate@0.8.2: {}
- file-entry-cache@11.1.1:
+ file-entry-cache@11.1.2:
dependencies:
- flat-cache: 6.1.19
+ flat-cache: 6.1.20
file-entry-cache@8.0.0:
dependencies:
@@ -6900,11 +6792,11 @@ snapshots:
flatted: 3.3.3
keyv: 4.5.4
- flat-cache@6.1.19:
+ flat-cache@6.1.20:
dependencies:
- cacheable: 2.3.1
+ cacheable: 2.3.2
flatted: 3.3.3
- hookified: 1.14.0
+ hookified: 1.15.1
flat@5.0.2: {}
@@ -6922,7 +6814,7 @@ snapshots:
get-east-asian-width@1.4.0: {}
- get-tsconfig@4.13.0:
+ get-tsconfig@4.13.6:
dependencies:
resolve-pkg-maps: 1.0.0
@@ -6959,14 +6851,16 @@ snapshots:
globals@16.5.0: {}
- globby@11.1.0:
+ globals@17.3.0: {}
+
+ globby@16.1.0:
dependencies:
- array-union: 2.1.0
- dir-glob: 3.0.1
+ '@sindresorhus/merge-streams': 4.0.0
fast-glob: 3.3.3
- ignore: 5.3.2
- merge2: 1.4.1
- slash: 3.0.0
+ ignore: 7.0.5
+ is-path-inside: 4.0.0
+ slash: 5.1.0
+ unicorn-magic: 0.4.0
globjoin@0.1.4: {}
@@ -6976,23 +6870,31 @@ snapshots:
hammerjs@2.0.8: {}
- happy-dom@20.0.11:
+ happy-dom@20.6.1:
dependencies:
- '@types/node': 20.19.27
+ '@types/node': 25.2.3
'@types/whatwg-mimetype': 3.0.2
+ '@types/ws': 8.18.1
+ entities: 6.0.1
whatwg-mimetype: 3.0.0
+ ws: 8.19.0
+ transitivePeerDependencies:
+ - bufferutil
+ - utf-8-validate
has-flag@4.0.0: {}
+ has-flag@5.0.1: {}
+
hash-sum@2.0.0: {}
- hashery@1.3.0:
+ hashery@1.4.0:
dependencies:
- hookified: 1.14.0
+ hookified: 1.15.1
- hookified@1.14.0: {}
+ hookified@1.15.1: {}
- html-tags@3.3.1: {}
+ html-tags@5.1.0: {}
htmlparser2@8.0.2:
dependencies:
@@ -7029,6 +6931,8 @@ snapshots:
pkg-dir: 4.2.0
resolve-cwd: 3.0.0
+ import-meta-resolve@4.2.0: {}
+
imurmurhash@0.1.4: {}
indent-string@5.0.0: {}
@@ -7071,7 +6975,7 @@ snapshots:
is-bun-module@2.0.0:
dependencies:
- semver: 7.7.3
+ semver: 7.7.4
is-decimal@2.0.1: {}
@@ -7087,6 +6991,8 @@ snapshots:
is-number@7.0.0: {}
+ is-path-inside@4.0.0: {}
+
is-plain-object@2.0.4:
dependencies:
isobject: 3.0.1
@@ -7103,9 +7009,13 @@ snapshots:
isobject@3.0.1: {}
+ jackspeak@4.2.3:
+ dependencies:
+ '@isaacs/cliui': 9.0.0
+
jest-worker@27.5.1:
dependencies:
- '@types/node': 25.0.3
+ '@types/node': 25.2.3
merge-stream: 2.0.0
supports-color: 8.1.1
@@ -7113,7 +7023,7 @@ snapshots:
jiti@2.6.1: {}
- jquery@3.7.1: {}
+ jquery@4.0.0: {}
js-levenshtein-esm@2.0.0: {}
@@ -7125,7 +7035,7 @@ snapshots:
dependencies:
argparse: 2.0.1
- jsdoc-type-pratt-parser@4.8.0: {}
+ jsdoc-type-pratt-parser@7.1.1: {}
jsesc@3.1.0: {}
@@ -7160,7 +7070,7 @@ snapshots:
just-extend@5.1.1: {}
- katex@0.16.27:
+ katex@0.16.28:
dependencies:
commander: 8.3.0
@@ -7168,7 +7078,7 @@ snapshots:
dependencies:
json-buffer: 3.0.1
- keyv@5.5.5:
+ keyv@5.6.0:
dependencies:
'@keyv/serialize': 1.1.1
@@ -7182,13 +7092,13 @@ snapshots:
known-css-properties@0.37.0: {}
- langium@3.3.1:
+ langium@4.2.1:
dependencies:
- chevrotain: 11.0.3
- chevrotain-allstar: 0.3.1(chevrotain@11.0.3)
+ chevrotain: 11.1.1
+ chevrotain-allstar: 0.3.1(chevrotain@11.1.1)
vscode-languageserver: 9.0.1
vscode-languageserver-textdocument: 1.0.12
- vscode-uri: 3.0.8
+ vscode-uri: 3.1.0
language-subtag-registry@0.3.23: {}
@@ -7229,9 +7139,7 @@ snapshots:
dependencies:
p-locate: 5.0.0
- lodash-es@4.17.21: {}
-
- lodash-es@4.17.22: {}
+ lodash-es@4.17.23: {}
lodash.camelcase@4.3.0: {}
@@ -7247,7 +7155,7 @@ snapshots:
lodash.upperfirst@4.3.1: {}
- lodash@4.17.21: {}
+ lodash@4.17.23: {}
magic-string@0.30.21:
dependencies:
@@ -7255,7 +7163,7 @@ snapshots:
map-obj@5.0.2: {}
- markdown-it@14.1.0:
+ markdown-it@14.1.1:
dependencies:
argparse: 2.0.1
entities: 4.5.0
@@ -7266,15 +7174,15 @@ snapshots:
markdownlint-cli@0.47.0:
dependencies:
- commander: 14.0.2
+ commander: 14.0.3
deep-extend: 0.6.0
ignore: 7.0.5
js-yaml: 4.1.1
jsonc-parser: 3.3.1
jsonpointer: 5.0.1
- markdown-it: 14.1.0
+ markdown-it: 14.1.1
markdownlint: 0.40.0
- minimatch: 10.1.1
+ minimatch: 10.1.3
run-con: 1.3.2
smol-toml: 1.5.2
tinyglobby: 0.2.15
@@ -7301,14 +7209,14 @@ snapshots:
marked@4.3.0: {}
- material-icon-theme@5.29.0:
+ material-icon-theme@5.31.0:
dependencies:
chroma-js: 3.2.0
events: 3.3.0
fast-deep-equal: 3.1.3
svgson: 5.3.1
- mathml-tag-names@2.1.3: {}
+ mathml-tag-names@4.0.0: {}
mdn-data@2.0.28: {}
@@ -7316,17 +7224,17 @@ snapshots:
mdurl@2.0.0: {}
- meow@13.2.0: {}
+ meow@14.0.0: {}
merge-stream@2.0.0: {}
merge2@1.4.1: {}
- mermaid@11.12.2:
+ mermaid@11.12.3:
dependencies:
- '@braintree/sanitize-url': 7.1.1
+ '@braintree/sanitize-url': 7.1.2
'@iconify/utils': 3.1.0
- '@mermaid-js/parser': 0.6.3
+ '@mermaid-js/parser': 1.0.0
'@types/d3': 7.4.3
cytoscape: 3.33.1
cytoscape-cose-bilkent: 4.1.0(cytoscape@3.33.1)
@@ -7336,9 +7244,9 @@ snapshots:
dagre-d3-es: 7.0.13
dayjs: 1.11.19
dompurify: 3.3.1
- katex: 0.16.27
+ katex: 0.16.28
khroma: 2.1.0
- lodash-es: 4.17.22
+ lodash-es: 4.17.23
marked: 16.4.2
roughjs: 4.6.6
stylis: 4.3.6
@@ -7347,7 +7255,7 @@ snapshots:
micromark-core-commonmark@2.0.3:
dependencies:
- decode-named-character-reference: 1.2.0
+ decode-named-character-reference: 1.3.0
devlop: 1.1.0
micromark-factory-destination: 2.0.1
micromark-factory-label: 2.0.1
@@ -7402,9 +7310,9 @@ snapshots:
micromark-extension-math@3.1.0:
dependencies:
- '@types/katex': 0.16.7
+ '@types/katex': 0.16.8
devlop: 1.1.0
- katex: 0.16.27
+ katex: 0.16.28
micromark-factory-space: 2.0.1
micromark-util-character: 2.1.1
micromark-util-symbol: 2.0.1
@@ -7499,7 +7407,7 @@ snapshots:
dependencies:
'@types/debug': 4.1.12
debug: 4.4.3
- decode-named-character-reference: 1.2.0
+ decode-named-character-reference: 1.3.0
devlop: 1.1.0
micromark-core-commonmark: 2.0.3
micromark-factory-space: 2.0.1
@@ -7528,15 +7436,23 @@ snapshots:
dependencies:
mime-db: 1.52.0
- mini-css-extract-plugin@2.9.4(webpack@5.104.0):
+ mini-css-extract-plugin@2.10.0(webpack@5.105.2):
dependencies:
schema-utils: 4.3.3
tapable: 2.3.0
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
- minimatch@10.1.1:
+ minimatch@10.1.2:
dependencies:
- '@isaacs/brace-expansion': 5.0.0
+ '@isaacs/brace-expansion': 5.0.1
+
+ minimatch@10.1.3:
+ dependencies:
+ brace-expansion: 5.0.2
+
+ minimatch@10.2.1:
+ dependencies:
+ brace-expansion: 5.0.2
minimatch@3.1.2:
dependencies:
@@ -7553,13 +7469,13 @@ snapshots:
acorn: 8.15.0
pathe: 2.0.3
pkg-types: 1.3.1
- ufo: 1.6.1
+ ufo: 1.6.3
- monaco-editor-webpack-plugin@7.1.1(monaco-editor@0.55.1)(webpack@5.104.0):
+ monaco-editor-webpack-plugin@7.1.1(monaco-editor@0.55.1)(webpack@5.105.2):
dependencies:
loader-utils: 2.0.4
monaco-editor: 0.55.1
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
monaco-editor@0.55.1:
dependencies:
@@ -7616,7 +7532,7 @@ snapshots:
dependencies:
wrappy: 1.0.2
- online-3d-viewer@0.17.0:
+ online-3d-viewer@0.18.0:
dependencies:
'@simonwep/pickr': 1.9.0
fflate: 0.8.2
@@ -7660,14 +7576,14 @@ snapshots:
'@types/unist': 2.0.11
character-entities-legacy: 3.0.0
character-reference-invalid: 2.0.1
- decode-named-character-reference: 1.2.0
+ decode-named-character-reference: 1.3.0
is-alphanumerical: 2.0.1
is-decimal: 2.0.1
is-hexadecimal: 2.0.1
parse-json@5.2.0:
dependencies:
- '@babel/code-frame': 7.27.1
+ '@babel/code-frame': 7.29.0
error-ex: 1.3.4
json-parse-even-better-errors: 2.3.1
lines-and-columns: 1.2.4
@@ -7684,13 +7600,11 @@ snapshots:
path-parse@1.0.7: {}
- path-type@4.0.0: {}
-
pathe@2.0.3: {}
pdfobject@2.3.1: {}
- perfect-debounce@2.0.0: {}
+ perfect-debounce@2.1.0: {}
picocolors@1.1.1: {}
@@ -7712,11 +7626,11 @@ snapshots:
mlly: 1.8.0
pathe: 2.0.3
- playwright-core@1.57.0: {}
+ playwright-core@1.58.2: {}
- playwright@1.57.0:
+ playwright@1.58.2:
dependencies:
- playwright-core: 1.57.0
+ playwright-core: 1.58.2
optionalDependencies:
fsevents: 2.3.2
@@ -7729,7 +7643,7 @@ snapshots:
path-data-parser: 0.1.0
points-on-curve: 0.2.0
- postcss-html@1.8.0:
+ postcss-html@1.8.1:
dependencies:
htmlparser2: 8.0.2
js-tokens: 9.0.1
@@ -7755,14 +7669,14 @@ snapshots:
optionalDependencies:
postcss: 8.5.6
- postcss-loader@8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.0):
+ postcss-loader@8.2.1(postcss@8.5.6)(typescript@5.9.3)(webpack@5.105.2):
dependencies:
cosmiconfig: 9.0.0(typescript@5.9.3)
jiti: 2.6.1
postcss: 8.5.6
- semver: 7.7.3
+ semver: 7.7.4
optionalDependencies:
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
transitivePeerDependencies:
- typescript
@@ -7792,8 +7706,6 @@ snapshots:
postcss: 8.5.6
postcss-selector-parser: 6.1.2
- postcss-resolve-nested-selector@0.1.6: {}
-
postcss-safe-parser@6.0.0(postcss@8.5.6):
dependencies:
postcss: 8.5.6
@@ -7836,19 +7748,19 @@ snapshots:
prelude-ls@1.2.1: {}
- prettier-linter-helpers@1.0.0:
+ prettier-linter-helpers@1.0.1:
dependencies:
fast-diff: 1.3.0
- prettier@3.7.4: {}
+ prettier@3.8.1: {}
punycode.js@2.3.1: {}
punycode@2.3.1: {}
- qified@0.5.3:
+ qified@0.6.0:
dependencies:
- hookified: 1.14.0
+ hookified: 1.15.1
queue-microtask@1.2.3: {}
@@ -7909,32 +7821,35 @@ snapshots:
robust-predicates@3.0.2: {}
- rollup@4.53.5:
+ rollup@4.57.1:
dependencies:
'@types/estree': 1.0.8
optionalDependencies:
- '@rollup/rollup-android-arm-eabi': 4.53.5
- '@rollup/rollup-android-arm64': 4.53.5
- '@rollup/rollup-darwin-arm64': 4.53.5
- '@rollup/rollup-darwin-x64': 4.53.5
- '@rollup/rollup-freebsd-arm64': 4.53.5
- '@rollup/rollup-freebsd-x64': 4.53.5
- '@rollup/rollup-linux-arm-gnueabihf': 4.53.5
- '@rollup/rollup-linux-arm-musleabihf': 4.53.5
- '@rollup/rollup-linux-arm64-gnu': 4.53.5
- '@rollup/rollup-linux-arm64-musl': 4.53.5
- '@rollup/rollup-linux-loong64-gnu': 4.53.5
- '@rollup/rollup-linux-ppc64-gnu': 4.53.5
- '@rollup/rollup-linux-riscv64-gnu': 4.53.5
- '@rollup/rollup-linux-riscv64-musl': 4.53.5
- '@rollup/rollup-linux-s390x-gnu': 4.53.5
- '@rollup/rollup-linux-x64-gnu': 4.53.5
- '@rollup/rollup-linux-x64-musl': 4.53.5
- '@rollup/rollup-openharmony-arm64': 4.53.5
- '@rollup/rollup-win32-arm64-msvc': 4.53.5
- '@rollup/rollup-win32-ia32-msvc': 4.53.5
- '@rollup/rollup-win32-x64-gnu': 4.53.5
- '@rollup/rollup-win32-x64-msvc': 4.53.5
+ '@rollup/rollup-android-arm-eabi': 4.57.1
+ '@rollup/rollup-android-arm64': 4.57.1
+ '@rollup/rollup-darwin-arm64': 4.57.1
+ '@rollup/rollup-darwin-x64': 4.57.1
+ '@rollup/rollup-freebsd-arm64': 4.57.1
+ '@rollup/rollup-freebsd-x64': 4.57.1
+ '@rollup/rollup-linux-arm-gnueabihf': 4.57.1
+ '@rollup/rollup-linux-arm-musleabihf': 4.57.1
+ '@rollup/rollup-linux-arm64-gnu': 4.57.1
+ '@rollup/rollup-linux-arm64-musl': 4.57.1
+ '@rollup/rollup-linux-loong64-gnu': 4.57.1
+ '@rollup/rollup-linux-loong64-musl': 4.57.1
+ '@rollup/rollup-linux-ppc64-gnu': 4.57.1
+ '@rollup/rollup-linux-ppc64-musl': 4.57.1
+ '@rollup/rollup-linux-riscv64-gnu': 4.57.1
+ '@rollup/rollup-linux-riscv64-musl': 4.57.1
+ '@rollup/rollup-linux-s390x-gnu': 4.57.1
+ '@rollup/rollup-linux-x64-gnu': 4.57.1
+ '@rollup/rollup-linux-x64-musl': 4.57.1
+ '@rollup/rollup-openbsd-x64': 4.57.1
+ '@rollup/rollup-openharmony-arm64': 4.57.1
+ '@rollup/rollup-win32-arm64-msvc': 4.57.1
+ '@rollup/rollup-win32-ia32-msvc': 4.57.1
+ '@rollup/rollup-win32-x64-gnu': 4.57.1
+ '@rollup/rollup-win32-x64-msvc': 4.57.1
fsevents: 2.3.3
roughjs@4.6.6:
@@ -7959,36 +7874,34 @@ snapshots:
sax@1.2.4: {}
- sax@1.4.3: {}
+ sax@1.4.4: {}
schema-utils@4.3.3:
dependencies:
'@types/json-schema': 7.0.15
- ajv: 8.17.1
- ajv-formats: 2.1.1(ajv@8.17.1)
- ajv-keywords: 5.1.0(ajv@8.17.1)
+ ajv: 8.18.0
+ ajv-formats: 2.1.1(ajv@8.18.0)
+ ajv-keywords: 5.1.0(ajv@8.18.0)
scslre@0.3.0:
dependencies:
- '@eslint-community/regexpp': 4.12.1
+ '@eslint-community/regexpp': 4.12.2
refa: 0.12.1
regexp-ast-analysis: 0.7.1
semver@6.3.1: {}
- semver@7.7.2: {}
-
- semver@7.7.3: {}
+ semver@7.7.4: {}
serialize-javascript@6.0.2:
dependencies:
randombytes: 2.1.0
- seroval-plugins@1.3.3(seroval@1.3.2):
+ seroval-plugins@1.5.0(seroval@1.5.0):
dependencies:
- seroval: 1.3.2
+ seroval: 1.5.0
- seroval@1.3.2: {}
+ seroval@1.5.0: {}
shallow-clone@3.0.1:
dependencies:
@@ -8004,7 +7917,7 @@ snapshots:
signal-exit@4.1.0: {}
- slash@3.0.0: {}
+ slash@5.1.0: {}
slice-ansi@4.0.0:
dependencies:
@@ -8014,19 +7927,19 @@ snapshots:
smol-toml@1.5.2: {}
- solid-js@1.9.10:
+ solid-js@1.9.11:
dependencies:
csstype: 3.2.3
- seroval: 1.3.2
- seroval-plugins: 1.3.3(seroval@1.3.2)
+ seroval: 1.5.0
+ seroval-plugins: 1.5.0(seroval@1.5.0)
- solid-transition-group@0.2.3(solid-js@1.9.10):
+ solid-transition-group@0.2.3(solid-js@1.9.11):
dependencies:
- '@solid-primitives/refs': 1.1.2(solid-js@1.9.10)
- '@solid-primitives/transition-group': 1.1.2(solid-js@1.9.10)
- solid-js: 1.9.10
+ '@solid-primitives/refs': 1.1.2(solid-js@1.9.11)
+ '@solid-primitives/transition-group': 1.1.2(solid-js@1.9.11)
+ solid-js: 1.9.11
- sortablejs@1.15.6: {}
+ sortablejs@1.15.7: {}
source-list-map@2.0.1: {}
@@ -8073,7 +7986,7 @@ snapshots:
spdx-expression-parse: 3.0.1
spdx-ranges: 2.1.1
- spectral-cli-bundle@1.0.3:
+ spectral-cli-bundle@1.0.7:
optionalDependencies:
fsevents: 2.3.3
@@ -8100,6 +8013,11 @@ snapshots:
get-east-asian-width: 1.4.0
strip-ansi: 7.1.2
+ string-width@8.1.1:
+ dependencies:
+ get-east-asian-width: 1.4.0
+ strip-ansi: 7.1.2
+
strip-ansi@6.0.1:
dependencies:
ansi-regex: 5.0.1
@@ -8116,65 +8034,65 @@ snapshots:
style-search@0.1.0: {}
- stylelint-config-recommended@17.0.0(stylelint@16.26.1(typescript@5.9.3)):
+ stylelint-config-recommended@18.0.0(stylelint@17.3.0(typescript@5.9.3)):
dependencies:
- stylelint: 16.26.1(typescript@5.9.3)
+ stylelint: 17.3.0(typescript@5.9.3)
- stylelint-declaration-block-no-ignored-properties@2.8.0(stylelint@16.26.1(typescript@5.9.3)):
+ stylelint-declaration-block-no-ignored-properties@3.0.0(stylelint@17.3.0(typescript@5.9.3)):
dependencies:
- stylelint: 16.26.1(typescript@5.9.3)
+ stylelint: 17.3.0(typescript@5.9.3)
- stylelint-declaration-strict-value@1.10.11(stylelint@16.26.1(typescript@5.9.3)):
+ stylelint-declaration-strict-value@1.10.11(stylelint@17.3.0(typescript@5.9.3)):
dependencies:
- stylelint: 16.26.1(typescript@5.9.3)
+ stylelint: 17.3.0(typescript@5.9.3)
- stylelint-value-no-unknown-custom-properties@6.0.1(stylelint@16.26.1(typescript@5.9.3)):
+ stylelint-value-no-unknown-custom-properties@6.1.1(stylelint@17.3.0(typescript@5.9.3)):
dependencies:
postcss-value-parser: 4.2.0
resolve: 1.22.11
- stylelint: 16.26.1(typescript@5.9.3)
+ stylelint: 17.3.0(typescript@5.9.3)
- stylelint@16.26.1(typescript@5.9.3):
+ stylelint@17.3.0(typescript@5.9.3):
dependencies:
- '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4)
- '@csstools/css-syntax-patches-for-csstree': 1.0.21
- '@csstools/css-tokenizer': 3.0.4
- '@csstools/media-query-list-parser': 4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)
- '@csstools/selector-specificity': 5.0.0(postcss-selector-parser@7.1.1)
- '@dual-bundle/import-meta-resolve': 4.2.1
- balanced-match: 2.0.0
+ '@csstools/css-calc': 3.1.1(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)
+ '@csstools/css-parser-algorithms': 4.0.0(@csstools/css-tokenizer@4.0.0)
+ '@csstools/css-syntax-patches-for-csstree': 1.0.27
+ '@csstools/css-tokenizer': 4.0.0
+ '@csstools/media-query-list-parser': 5.0.0(@csstools/css-parser-algorithms@4.0.0(@csstools/css-tokenizer@4.0.0))(@csstools/css-tokenizer@4.0.0)
+ '@csstools/selector-resolve-nested': 4.0.0(postcss-selector-parser@7.1.1)
+ '@csstools/selector-specificity': 6.0.0(postcss-selector-parser@7.1.1)
+ balanced-match: 3.0.1
colord: 2.9.3
cosmiconfig: 9.0.0(typescript@5.9.3)
- css-functions-list: 3.2.3
+ css-functions-list: 3.3.3
css-tree: 3.1.0
debug: 4.4.3
fast-glob: 3.3.3
fastest-levenshtein: 1.0.16
- file-entry-cache: 11.1.1
+ file-entry-cache: 11.1.2
global-modules: 2.0.0
- globby: 11.1.0
+ globby: 16.1.0
globjoin: 0.1.4
- html-tags: 3.3.1
+ html-tags: 5.1.0
ignore: 7.0.5
+ import-meta-resolve: 4.2.0
imurmurhash: 0.1.4
is-plain-object: 5.0.0
known-css-properties: 0.37.0
- mathml-tag-names: 2.1.3
- meow: 13.2.0
+ mathml-tag-names: 4.0.0
+ meow: 14.0.0
micromatch: 4.0.8
normalize-path: 3.0.0
picocolors: 1.1.1
postcss: 8.5.6
- postcss-resolve-nested-selector: 0.1.6
postcss-safe-parser: 7.0.1(postcss@8.5.6)
postcss-selector-parser: 7.1.1
postcss-value-parser: 4.2.0
- resolve-from: 5.0.0
- string-width: 4.2.3
- supports-hyperlinks: 3.2.0
+ string-width: 8.1.1
+ supports-hyperlinks: 4.4.0
svg-tags: 1.0.0
table: 6.9.0
- write-file-atomic: 5.0.1
+ write-file-atomic: 7.0.0
transitivePeerDependencies:
- supports-color
- typescript
@@ -8204,6 +8122,8 @@ snapshots:
superstruct@0.10.13: {}
+ supports-color@10.2.2: {}
+
supports-color@7.2.0:
dependencies:
has-flag: 4.0.0
@@ -8212,10 +8132,10 @@ snapshots:
dependencies:
has-flag: 4.0.0
- supports-hyperlinks@3.2.0:
+ supports-hyperlinks@4.4.0:
dependencies:
- has-flag: 4.0.0
- supports-color: 7.2.0
+ has-flag: 5.0.1
+ supports-color: 10.2.2
supports-preserve-symlinks-flag@1.0.0: {}
@@ -8231,14 +8151,14 @@ snapshots:
css-what: 6.2.2
csso: 5.0.5
picocolors: 1.1.1
- sax: 1.4.3
+ sax: 1.4.4
svgson@5.3.1:
dependencies:
deep-rename-keys: 0.2.1
xml-reader: 2.4.3
- swagger-ui-dist@5.31.0:
+ swagger-ui-dist@5.31.1:
dependencies:
'@scarf/scarf': 1.4.0
@@ -8249,13 +8169,13 @@ snapshots:
transitivePeerDependencies:
- encoding
- synckit@0.11.11:
+ synckit@0.11.12:
dependencies:
'@pkgr/core': 0.2.9
table@6.9.0:
dependencies:
- ajv: 8.17.1
+ ajv: 8.18.0
lodash.truncate: 4.4.2
slice-ansi: 4.0.0
string-width: 4.2.3
@@ -8290,16 +8210,16 @@ snapshots:
tapable@2.3.0: {}
- terser-webpack-plugin@5.3.16(webpack@5.104.0):
+ terser-webpack-plugin@5.3.16(webpack@5.105.2):
dependencies:
'@jridgewell/trace-mapping': 0.3.31
jest-worker: 27.5.1
schema-utils: 4.3.3
serialize-javascript: 6.0.2
- terser: 5.44.1
- webpack: 5.104.0(webpack-cli@6.0.1)
+ terser: 5.46.0
+ webpack: 5.105.2(webpack-cli@6.0.1)
- terser@5.44.1:
+ terser@5.46.0:
dependencies:
'@jridgewell/source-map': 0.3.11
acorn: 8.15.0
@@ -8320,8 +8240,6 @@ snapshots:
tinybench@2.9.0: {}
- tinycolor2@1.6.0: {}
-
tinyexec@1.0.2: {}
tinyglobby@0.2.15:
@@ -8345,7 +8263,7 @@ snapshots:
tributejs@5.1.3: {}
- ts-api-utils@2.1.0(typescript@5.9.3):
+ ts-api-utils@2.4.0(typescript@5.9.3):
dependencies:
typescript: 5.9.3
@@ -8369,12 +8287,12 @@ snapshots:
type-fest@4.41.0: {}
- typescript-eslint@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3):
+ typescript-eslint@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3):
dependencies:
- '@typescript-eslint/eslint-plugin': 8.50.0(@typescript-eslint/parser@8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
- '@typescript-eslint/parser': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
- '@typescript-eslint/typescript-estree': 8.50.0(typescript@5.9.3)
- '@typescript-eslint/utils': 8.50.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/eslint-plugin': 8.56.0(@typescript-eslint/parser@8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/parser': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
+ '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3)
+ '@typescript-eslint/utils': 8.56.0(eslint@9.39.2(jiti@2.6.1))(typescript@5.9.3)
eslint: 9.39.2(jiti@2.6.1)
typescript: 5.9.3
transitivePeerDependencies:
@@ -8386,14 +8304,14 @@ snapshots:
uc.micro@2.1.0: {}
- ufo@1.6.1: {}
+ ufo@1.6.3: {}
uint8-to-base64@0.2.1: {}
- undici-types@6.21.0: {}
-
undici-types@7.16.0: {}
+ unicorn-magic@0.4.0: {}
+
unrs-resolver@1.11.1:
dependencies:
napi-postinstall: 0.3.4
@@ -8424,7 +8342,7 @@ snapshots:
escalade: 3.2.0
picocolors: 1.1.1
- updates@17.0.7: {}
+ updates@17.5.7: {}
uri-js@4.4.1:
dependencies:
@@ -8436,33 +8354,35 @@ snapshots:
vanilla-colorful@0.7.2: {}
- vite-string-plugin@1.4.9: {}
-
- vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2):
+ vite-string-plugin@2.0.1(vite@7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)):
dependencies:
- esbuild: 0.27.2
+ vite: 7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)
+
+ vite@7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2):
+ dependencies:
+ esbuild: 0.27.3
fdir: 6.5.0(picomatch@4.0.3)
picomatch: 4.0.3
postcss: 8.5.6
- rollup: 4.53.5
+ rollup: 4.57.1
tinyglobby: 0.2.15
optionalDependencies:
- '@types/node': 25.0.3
+ '@types/node': 25.2.3
fsevents: 2.3.3
jiti: 2.6.1
stylus: 0.57.0
- terser: 5.44.1
+ terser: 5.46.0
yaml: 2.8.2
- vitest@4.0.16(@types/node@25.0.3)(happy-dom@20.0.11)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2):
+ vitest@4.0.18(@types/node@25.2.3)(happy-dom@20.6.1)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2):
dependencies:
- '@vitest/expect': 4.0.16
- '@vitest/mocker': 4.0.16(vite@7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2))
- '@vitest/pretty-format': 4.0.16
- '@vitest/runner': 4.0.16
- '@vitest/snapshot': 4.0.16
- '@vitest/spy': 4.0.16
- '@vitest/utils': 4.0.16
+ '@vitest/expect': 4.0.18
+ '@vitest/mocker': 4.0.18(vite@7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2))
+ '@vitest/pretty-format': 4.0.18
+ '@vitest/runner': 4.0.18
+ '@vitest/snapshot': 4.0.18
+ '@vitest/spy': 4.0.18
+ '@vitest/utils': 4.0.18
es-module-lexer: 1.7.0
expect-type: 1.3.0
magic-string: 0.30.21
@@ -8474,11 +8394,11 @@ snapshots:
tinyexec: 1.0.2
tinyglobby: 0.2.15
tinyrainbow: 3.0.3
- vite: 7.3.0(@types/node@25.0.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.44.1)(yaml@2.8.2)
+ vite: 7.3.1(@types/node@25.2.3)(jiti@2.6.1)(stylus@0.57.0)(terser@5.46.0)(yaml@2.8.2)
why-is-node-running: 2.3.0
optionalDependencies:
- '@types/node': 25.0.3
- happy-dom: 20.0.11
+ '@types/node': 25.2.3
+ happy-dom: 20.6.1
transitivePeerDependencies:
- jiti
- less
@@ -8507,71 +8427,69 @@ snapshots:
dependencies:
vscode-languageserver-protocol: 3.17.5
- vscode-uri@3.0.8: {}
-
vscode-uri@3.1.0: {}
vue-bar-graph@2.2.0(typescript@5.9.3):
dependencies:
- vue: 3.5.25(typescript@5.9.3)
+ vue: 3.5.28(typescript@5.9.3)
transitivePeerDependencies:
- typescript
- vue-chartjs@5.3.3(chart.js@4.5.1)(vue@3.5.25(typescript@5.9.3)):
+ vue-chartjs@5.3.3(chart.js@4.5.1)(vue@3.5.28(typescript@5.9.3)):
dependencies:
chart.js: 4.5.1
- vue: 3.5.25(typescript@5.9.3)
+ vue: 3.5.28(typescript@5.9.3)
- vue-eslint-parser@10.2.0(eslint@9.39.2(jiti@2.6.1)):
+ vue-eslint-parser@10.4.0(eslint@9.39.2(jiti@2.6.1)):
dependencies:
debug: 4.4.3
eslint: 9.39.2(jiti@2.6.1)
- eslint-scope: 8.4.0
- eslint-visitor-keys: 4.2.1
- espree: 10.4.0
- esquery: 1.6.0
- semver: 7.7.3
+ eslint-scope: 9.1.0
+ eslint-visitor-keys: 5.0.0
+ espree: 11.1.0
+ esquery: 1.7.0
+ semver: 7.7.4
transitivePeerDependencies:
- supports-color
- vue-loader@17.4.2(vue@3.5.25(typescript@5.9.3))(webpack@5.104.0):
+ vue-loader@17.4.2(vue@3.5.28(typescript@5.9.3))(webpack@5.105.2):
dependencies:
chalk: 4.1.2
hash-sum: 2.0.0
- watchpack: 2.4.4
- webpack: 5.104.0(webpack-cli@6.0.1)
+ watchpack: 2.5.1
+ webpack: 5.105.2(webpack-cli@6.0.1)
optionalDependencies:
- vue: 3.5.25(typescript@5.9.3)
+ vue: 3.5.28(typescript@5.9.3)
- vue-tsc@3.1.8(typescript@5.9.3):
+ vue-tsc@3.2.4(typescript@5.9.3):
dependencies:
- '@volar/typescript': 2.4.26
- '@vue/language-core': 3.1.8(typescript@5.9.3)
+ '@volar/typescript': 2.4.27
+ '@vue/language-core': 3.2.4
typescript: 5.9.3
- vue@3.5.25(typescript@5.9.3):
+ vue@3.5.28(typescript@5.9.3):
dependencies:
- '@vue/compiler-dom': 3.5.25
- '@vue/compiler-sfc': 3.5.25
- '@vue/runtime-dom': 3.5.25
- '@vue/server-renderer': 3.5.25(vue@3.5.25(typescript@5.9.3))
- '@vue/shared': 3.5.25
+ '@vue/compiler-dom': 3.5.28
+ '@vue/compiler-sfc': 3.5.28
+ '@vue/runtime-dom': 3.5.28
+ '@vue/server-renderer': 3.5.28(vue@3.5.28(typescript@5.9.3))
+ '@vue/shared': 3.5.28
optionalDependencies:
typescript: 5.9.3
- watchpack@2.4.4:
+ watchpack@2.5.1:
dependencies:
glob-to-regexp: 0.4.1
graceful-fs: 4.2.11
webidl-conversions@3.0.1: {}
- webpack-cli@6.0.1(webpack@5.104.0):
+ webpack-cli@6.0.1(webpack@5.105.2):
dependencies:
'@discoveryjs/json-ext': 0.6.3
- '@webpack-cli/configtest': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)
- '@webpack-cli/info': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)
- '@webpack-cli/serve': 3.0.1(webpack-cli@6.0.1)(webpack@5.104.0)
+ '@webpack-cli/configtest': 3.0.1(webpack-cli@6.0.1)(webpack@5.105.2)
+ '@webpack-cli/info': 3.0.1(webpack-cli@6.0.1)(webpack@5.105.2)
+ '@webpack-cli/serve': 3.0.1(webpack-cli@6.0.1)(webpack@5.105.2)
colorette: 2.0.20
commander: 12.1.0
cross-spawn: 7.0.6
@@ -8580,7 +8498,7 @@ snapshots:
import-local: 3.2.0
interpret: 3.1.1
rechoir: 0.8.0
- webpack: 5.104.0(webpack-cli@6.0.1)
+ webpack: 5.105.2(webpack-cli@6.0.1)
webpack-merge: 6.0.1
webpack-merge@6.0.1:
@@ -8594,9 +8512,9 @@ snapshots:
source-list-map: 2.0.1
source-map: 0.6.1
- webpack-sources@3.3.3: {}
+ webpack-sources@3.3.4: {}
- webpack@5.104.0(webpack-cli@6.0.1):
+ webpack@5.105.2(webpack-cli@6.0.1):
dependencies:
'@types/eslint-scope': 3.7.7
'@types/estree': 1.0.8
@@ -8608,7 +8526,7 @@ snapshots:
acorn-import-phases: 1.0.4(acorn@8.15.0)
browserslist: 4.28.1
chrome-trace-event: 1.0.4
- enhanced-resolve: 5.18.4
+ enhanced-resolve: 5.19.0
es-module-lexer: 2.0.0
eslint-scope: 5.1.1
events: 3.3.0
@@ -8620,11 +8538,11 @@ snapshots:
neo-async: 2.6.2
schema-utils: 4.3.3
tapable: 2.3.0
- terser-webpack-plugin: 5.3.16(webpack@5.104.0)
- watchpack: 2.4.4
- webpack-sources: 3.3.3
+ terser-webpack-plugin: 5.3.16(webpack@5.105.2)
+ watchpack: 2.5.1
+ webpack-sources: 3.3.4
optionalDependencies:
- webpack-cli: 6.0.1(webpack@5.104.0)
+ webpack-cli: 6.0.1(webpack@5.105.2)
transitivePeerDependencies:
- '@swc/core'
- esbuild
@@ -8668,11 +8586,13 @@ snapshots:
wrappy@1.0.2: {}
- write-file-atomic@5.0.1:
+ write-file-atomic@7.0.0:
dependencies:
imurmurhash: 0.1.4
signal-exit: 4.1.0
+ ws@8.19.0: {}
+
xml-lexer@0.2.2:
dependencies:
eventemitter3: 2.0.3
diff --git a/public/assets/img/svg/octicon-book-locked.svg b/public/assets/img/svg/octicon-book-locked.svg
new file mode 100644
index 0000000000..a72d10f96c
--- /dev/null
+++ b/public/assets/img/svg/octicon-book-locked.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/public/assets/img/svg/octicon-comment-locked.svg b/public/assets/img/svg/octicon-comment-locked.svg
new file mode 100644
index 0000000000..d8e9274715
--- /dev/null
+++ b/public/assets/img/svg/octicon-comment-locked.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/public/assets/img/svg/octicon-git-pull-request-locked.svg b/public/assets/img/svg/octicon-git-pull-request-locked.svg
new file mode 100644
index 0000000000..d9ea190491
--- /dev/null
+++ b/public/assets/img/svg/octicon-git-pull-request-locked.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/public/assets/img/svg/octicon-issue-locked.svg b/public/assets/img/svg/octicon-issue-locked.svg
new file mode 100644
index 0000000000..24af659c4f
--- /dev/null
+++ b/public/assets/img/svg/octicon-issue-locked.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/public/assets/img/svg/octicon-logo-github.svg b/public/assets/img/svg/octicon-logo-github.svg
index 02d92c9b13..8aae451ae5 100644
--- a/public/assets/img/svg/octicon-logo-github.svg
+++ b/public/assets/img/svg/octicon-logo-github.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/public/assets/img/svg/octicon-mark-github.svg b/public/assets/img/svg/octicon-mark-github.svg
index 9381053c06..6d6dc40886 100644
--- a/public/assets/img/svg/octicon-mark-github.svg
+++ b/public/assets/img/svg/octicon-mark-github.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 3657feb2ce..20a10d1915 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -6,7 +6,7 @@ requires-python = ">=3.10"
[dependency-groups]
dev = [
"djlint==1.36.4",
- "yamllint==1.37.1",
+ "yamllint==1.38.0",
]
[tool.djlint]
diff --git a/routers/api/packages/api.go b/routers/api/packages/api.go
index f6ee5958b5..71fee23c92 100644
--- a/routers/api/packages/api.go
+++ b/routers/api/packages/api.go
@@ -117,7 +117,7 @@ func CommonRoutes() *web.Router {
&auth.OAuth2{},
&auth.Basic{},
&nuget.Auth{},
- &conan.Auth{},
+ &Auth{},
&chef.Auth{},
})
@@ -537,7 +537,8 @@ func ContainerRoutes() *web.Router {
verifyAuth(r, []auth.Method{
&auth.Basic{},
- &container.Auth{},
+ // container auth requires an token, so container.Authenticate issues a Ghost user token for anonymous access
+ &Auth{AllowGhostUser: true},
})
// TODO: Content Discovery / References (not implemented yet)
diff --git a/routers/api/packages/conan/auth.go b/routers/api/packages/auth.go
similarity index 54%
rename from routers/api/packages/conan/auth.go
rename to routers/api/packages/auth.go
index bce3235a2e..28e5be1e4d 100644
--- a/routers/api/packages/conan/auth.go
+++ b/routers/api/packages/auth.go
@@ -1,7 +1,7 @@
-// Copyright 2022 The Gitea Authors. All rights reserved.
+// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
-package conan
+package packages
import (
"net/http"
@@ -14,10 +14,13 @@ import (
var _ auth.Method = &Auth{}
-type Auth struct{}
+// Auth is for conan and container
+type Auth struct {
+ AllowGhostUser bool
+}
func (a *Auth) Name() string {
- return "conan"
+ return "packages"
}
// Verify extracts the user from the Bearer token
@@ -29,13 +32,25 @@ func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataS
}
if packageMeta == nil || packageMeta.UserID == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
- u, err := user_model.GetUserByID(req.Context(), packageMeta.UserID)
- if err != nil {
- return nil, err
+ var u *user_model.User
+ switch packageMeta.UserID {
+ case user_model.GhostUserID:
+ if !a.AllowGhostUser {
+ return nil, nil //nolint:nilnil // the auth method is not applicable
+ }
+ u = user_model.NewGhostUser()
+ case user_model.ActionsUserID:
+ u = user_model.NewActionsUserWithTaskID(packageMeta.ActionsUserTaskID)
+ default:
+ u, err = user_model.GetUserByID(req.Context(), packageMeta.UserID)
+ if err != nil {
+ return nil, err
+ }
}
+
if packageMeta.Scope != "" {
store.GetData()["IsApiToken"] = true
store.GetData()["ApiTokenScope"] = packageMeta.Scope
diff --git a/routers/api/packages/chef/auth.go b/routers/api/packages/chef/auth.go
index c6808300a2..5f7dad9d2d 100644
--- a/routers/api/packages/chef/auth.go
+++ b/routers/api/packages/chef/auth.go
@@ -61,7 +61,7 @@ func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataS
return nil, err
}
if u == nil {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
pub, err := getUserPublicKey(req.Context(), u)
@@ -88,7 +88,7 @@ func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataS
func getUserFromRequest(req *http.Request) (*user_model.User, error) {
username := req.Header.Get("X-Ops-Userid")
if username == "" {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
return user_model.GetUserByName(req.Context(), username)
diff --git a/routers/api/packages/container/auth.go b/routers/api/packages/container/auth.go
deleted file mode 100644
index 19a931c405..0000000000
--- a/routers/api/packages/container/auth.go
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2022 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package container
-
-import (
- "net/http"
-
- user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/services/auth"
- "code.gitea.io/gitea/services/packages"
-)
-
-var _ auth.Method = &Auth{}
-
-type Auth struct{}
-
-func (a *Auth) Name() string {
- return "container"
-}
-
-// Verify extracts the user from the Bearer token
-// If it's an anonymous session, a ghost user is returned
-func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataStore, sess auth.SessionStore) (*user_model.User, error) {
- packageMeta, err := packages.ParseAuthorizationRequest(req)
- if err != nil {
- log.Trace("ParseAuthorizationToken: %v", err)
- return nil, err
- }
-
- if packageMeta == nil || packageMeta.UserID == 0 {
- return nil, nil
- }
-
- u, err := user_model.GetPossibleUserByID(req.Context(), packageMeta.UserID)
- if err != nil {
- return nil, err
- }
-
- if packageMeta.Scope != "" {
- store.GetData()["IsApiToken"] = true
- store.GetData()["ApiTokenScope"] = packageMeta.Scope
- }
-
- return u, nil
-}
diff --git a/routers/api/packages/container/blob.go b/routers/api/packages/container/blob.go
index 4b7bcee9d0..da509bc7a7 100644
--- a/routers/api/packages/container/blob.go
+++ b/routers/api/packages/container/blob.go
@@ -26,9 +26,18 @@ import (
// saveAsPackageBlob creates a package blob from an upload
// The uploaded blob gets stored in a special upload version to link them to the package/image
-func saveAsPackageBlob(ctx context.Context, hsr packages_module.HashedSizeReader, pci *packages_service.PackageCreationInfo) (*packages_model.PackageBlob, error) { //nolint:unparam // PackageBlob is never used
+// There will be concurrent uploading for the same blob, so it needs a global lock per blob hash
+func saveAsPackageBlob(ctx context.Context, hsr packages_module.HashedSizeReader, pci *packages_service.PackageCreationInfo) (*packages_model.PackageBlob, error) { //nolint:unparam //returned PackageBlob is never used
pb := packages_service.NewPackageBlob(hsr)
+ err := globallock.LockAndDo(ctx, "container-blob:"+pb.HashSHA256, func(ctx context.Context) error {
+ var err error
+ pb, err = saveAsPackageBlobInternal(ctx, hsr, pci, pb)
+ return err
+ })
+ return pb, err
+}
+func saveAsPackageBlobInternal(ctx context.Context, hsr packages_module.HashedSizeReader, pci *packages_service.PackageCreationInfo, pb *packages_model.PackageBlob) (*packages_model.PackageBlob, error) {
exists := false
contentStore := packages_module.NewContentStore()
@@ -67,7 +76,7 @@ func saveAsPackageBlob(ctx context.Context, hsr packages_module.HashedSizeReader
return createFileForBlob(ctx, uploadVersion, pb)
})
if err != nil {
- if !exists {
+ if !exists && pb != nil { // pb can be nil if GetOrInsertBlob failed
if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil {
log.Error("Error deleting package blob from content store: %v", err)
}
diff --git a/routers/api/packages/nuget/auth.go b/routers/api/packages/nuget/auth.go
index ce7df0ce0a..8248453e74 100644
--- a/routers/api/packages/nuget/auth.go
+++ b/routers/api/packages/nuget/auth.go
@@ -6,43 +6,21 @@ package nuget
import (
"net/http"
- auth_model "code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/services/auth"
)
var _ auth.Method = &Auth{}
-type Auth struct{}
+type Auth struct {
+ basicAuth auth.Basic
+}
func (a *Auth) Name() string {
return "nuget"
}
-// https://docs.microsoft.com/en-us/nuget/api/package-publish-resource#request-parameters
func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataStore, sess auth.SessionStore) (*user_model.User, error) {
- token, err := auth_model.GetAccessTokenBySHA(req.Context(), req.Header.Get("X-NuGet-ApiKey"))
- if err != nil {
- if !(auth_model.IsErrAccessTokenNotExist(err) || auth_model.IsErrAccessTokenEmpty(err)) {
- return nil, err
- }
- return nil, nil
- }
-
- u, err := user_model.GetUserByID(req.Context(), token.UID)
- if err != nil {
- return nil, err
- }
-
- token.UpdatedUnix = timeutil.TimeStampNow()
- if err := auth_model.UpdateAccessToken(req.Context(), token); err != nil {
- log.Error("UpdateAccessToken: %v", err)
- }
-
- store.GetData()["IsApiToken"] = true
- store.GetData()["ApiToken"] = token
-
- return u, nil
+ // ref: https://docs.microsoft.com/en-us/nuget/api/package-publish-resource#request-parameters
+ return a.basicAuth.VerifyAuthToken(req, w, store, sess, req.Header.Get("X-NuGet-ApiKey"))
}
diff --git a/routers/api/packages/swift/swift.go b/routers/api/packages/swift/swift.go
index d84f79a0a8..66c28c9772 100644
--- a/routers/api/packages/swift/swift.go
+++ b/routers/api/packages/swift/swift.go
@@ -300,7 +300,7 @@ func formFileOptionalReadCloser(ctx *context.Context, formKey string) (io.ReadCl
content := ctx.Req.FormValue(formKey)
if content == "" {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the content does not exist
}
return io.NopCloser(strings.NewReader(content)), nil
}
diff --git a/routers/api/v1/admin/adopt.go b/routers/api/v1/admin/adopt.go
index c2efed7490..92711409f0 100644
--- a/routers/api/v1/admin/adopt.go
+++ b/routers/api/v1/admin/adopt.go
@@ -8,7 +8,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
- "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/routers/api/v1/utils"
"code.gitea.io/gitea/services/context"
repo_service "code.gitea.io/gitea/services/repository"
@@ -99,12 +99,12 @@ func AdoptRepository(ctx *context.APIContext) {
ctx.APIErrorInternal(err)
return
}
- isDir, err := util.IsDir(repo_model.RepoPath(ctxUser.Name, repoName))
+ exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, repoName)))
if err != nil {
ctx.APIErrorInternal(err)
return
}
- if has || !isDir {
+ if has || !exist {
ctx.APIErrorNotFound()
return
}
@@ -161,12 +161,12 @@ func DeleteUnadoptedRepository(ctx *context.APIContext) {
ctx.APIErrorInternal(err)
return
}
- isDir, err := util.IsDir(repo_model.RepoPath(ctxUser.Name, repoName))
+ exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, repoName)))
if err != nil {
ctx.APIErrorInternal(err)
return
}
- if has || !isDir {
+ if has || !exist {
ctx.APIErrorNotFound()
return
}
diff --git a/routers/api/v1/admin/hooks.go b/routers/api/v1/admin/hooks.go
index a687541be5..6170e7343a 100644
--- a/routers/api/v1/admin/hooks.go
+++ b/routers/api/v1/admin/hooks.go
@@ -57,8 +57,13 @@ func ListHooks(ctx *context.APIContext) {
case "all":
isSystemWebhook = optional.None[bool]()
}
+ listOptions := utils.GetListOptions(ctx)
+ opts := &webhook.ListSystemWebhookOptions{
+ ListOptions: listOptions,
+ IsSystem: isSystemWebhook,
+ }
- sysHooks, err := webhook.GetSystemOrDefaultWebhooks(ctx, isSystemWebhook)
+ sysHooks, total, err := webhook.GetGlobalWebhooks(ctx, opts)
if err != nil {
ctx.APIErrorInternal(err)
return
@@ -72,6 +77,8 @@ func ListHooks(ctx *context.APIContext) {
}
hooks[i] = h
}
+ ctx.SetLinkHeader(int(total), listOptions.PageSize)
+ ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, hooks)
}
diff --git a/routers/api/v1/admin/runners.go b/routers/api/v1/admin/runners.go
index 736c421229..1e15b24076 100644
--- a/routers/api/v1/admin/runners.go
+++ b/routers/api/v1/admin/runners.go
@@ -14,7 +14,7 @@ import (
func GetRegistrationToken(ctx *context.APIContext) {
// swagger:operation GET /admin/runners/registration-token admin adminGetRunnerRegistrationToken
// ---
- // summary: Get an global actions runner registration token
+ // summary: Get a global actions runner registration token
// produces:
// - application/json
// parameters:
@@ -29,7 +29,7 @@ func GetRegistrationToken(ctx *context.APIContext) {
func CreateRegistrationToken(ctx *context.APIContext) {
// swagger:operation POST /admin/actions/runners/registration-token admin adminCreateRunnerRegistrationToken
// ---
- // summary: Get an global actions runner registration token
+ // summary: Get a global actions runner registration token
// produces:
// - application/json
// parameters:
@@ -57,11 +57,11 @@ func ListRunners(ctx *context.APIContext) {
shared.ListRunners(ctx, 0, 0)
}
-// GetRunner get an global runner
+// GetRunner get a global runner
func GetRunner(ctx *context.APIContext) {
// swagger:operation GET /admin/actions/runners/{runner_id} admin getAdminRunner
// ---
- // summary: Get an global runner
+ // summary: Get a global runner
// produces:
// - application/json
// parameters:
@@ -80,11 +80,11 @@ func GetRunner(ctx *context.APIContext) {
shared.GetRunner(ctx, 0, 0, ctx.PathParamInt64("runner_id"))
}
-// DeleteRunner delete an global runner
+// DeleteRunner delete a global runner
func DeleteRunner(ctx *context.APIContext) {
// swagger:operation DELETE /admin/actions/runners/{runner_id} admin deleteAdminRunner
// ---
- // summary: Delete an global runner
+ // summary: Delete a global runner
// produces:
// - application/json
// parameters:
diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go
index fcf9e73057..359d5af4c4 100644
--- a/routers/api/v1/api.go
+++ b/routers/api/v1/api.go
@@ -188,8 +188,7 @@ func repoAssignment() func(ctx *context.APIContext) {
repo.Owner = owner
ctx.Repo.Repository = repo
- if ctx.Doer != nil && ctx.Doer.ID == user_model.ActionsUserID {
- taskID := ctx.Data["ActionsTaskID"].(int64)
+ if taskID, ok := user_model.GetActionsUserTaskID(ctx.Doer); ok {
ctx.Repo.Permission, err = access_model.GetActionsUserRepoPermission(ctx, repo, ctx.Doer, taskID)
if err != nil {
ctx.APIErrorInternal(err)
@@ -349,11 +348,7 @@ func tokenRequiresScopes(requiredScopeCategories ...auth_model.AccessTokenScopeC
// Contexter middleware already checks token for user sign in process.
func reqToken() func(ctx *context.APIContext) {
return func(ctx *context.APIContext) {
- // If actions token is present
- if true == ctx.Data["IsActionsToken"] {
- return
- }
-
+ // if a real user is signed in, or the user is from a Actions task, we are good
if ctx.IsSigned {
return
}
@@ -1353,6 +1348,8 @@ func Routes() *web.Router {
m.Combo("").Get(repo.ListPullRequests).
Post(reqToken(), mustNotBeArchived, bind(api.CreatePullRequestOption{}), repo.CreatePullRequest)
m.Get("/pinned", repo.ListPinnedPullRequests)
+ m.Post("/comments/{id}/resolve", reqToken(), mustNotBeArchived, repo.ResolvePullReviewComment)
+ m.Post("/comments/{id}/unresolve", reqToken(), mustNotBeArchived, repo.UnresolvePullReviewComment)
m.Group("/{index}", func() {
m.Combo("").Get(repo.GetPullRequest).
Patch(reqToken(), bind(api.EditPullRequestOption{}), repo.EditPullRequest)
@@ -1384,19 +1381,19 @@ func Routes() *web.Router {
})
m.Get("/{base}/*", repo.GetPullRequestByBaseHead)
}, mustAllowPulls, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo())
- m.Group("/statuses", func() {
+ m.Group("/statuses", func() { // "/statuses/{sha}" only accepts commit ID
m.Combo("/{sha}").Get(repo.GetCommitStatuses).
Post(reqToken(), reqRepoWriter(unit.TypeCode), bind(api.CreateStatusOption{}), repo.NewCommitStatus)
}, reqRepoReader(unit.TypeCode))
m.Group("/commits", func() {
m.Get("", context.ReferencesGitRepo(), repo.GetAllCommits)
- m.Group("/{ref}", func() {
- m.Get("/status", repo.GetCombinedCommitStatusByRef)
- m.Get("/statuses", repo.GetCommitStatusesByRef)
- }, context.ReferencesGitRepo())
- m.Group("/{sha}", func() {
- m.Get("/pull", repo.GetCommitPullRequest)
- }, context.ReferencesGitRepo())
+ m.PathGroup("/*", func(g *web.RouterPathGroup) {
+ // Mis-configured reverse proxy might decode the `%2F` to slash ahead, so we need to support both formats (escaped, unescaped) here.
+ // It also matches GitHub's behavior
+ g.MatchPath("GET", "//status", repo.GetCombinedCommitStatusByRef)
+ g.MatchPath("GET", "//statuses", repo.GetCommitStatusesByRef)
+ g.MatchPath("GET", "//pull", repo.GetCommitPullRequest)
+ })
}, reqRepoReader(unit.TypeCode))
m.Group("/git", func() {
m.Group("/commits", func() {
diff --git a/routers/api/v1/misc/markup_test.go b/routers/api/v1/misc/markup_test.go
index 38a1a3be9e..4d61b287ae 100644
--- a/routers/api/v1/misc/markup_test.go
+++ b/routers/api/v1/misc/markup_test.go
@@ -173,8 +173,8 @@ Here are some links to the most important topics. You can find the full list of
`, http.StatusOK)
- testRenderMarkup(t, "file", false, "path/test.unknown", "## Test", "unsupported file to render: \"path/test.unknown\"\n", http.StatusUnprocessableEntity)
- testRenderMarkup(t, "unknown", false, "", "## Test", "Unknown mode: unknown\n", http.StatusUnprocessableEntity)
+ testRenderMarkup(t, "file", false, "path/test.unknown", "## Test", "unable to find a render\n", http.StatusUnprocessableEntity)
+ testRenderMarkup(t, "unknown", false, "", "## Test", "unsupported render mode: unknown\n", http.StatusUnprocessableEntity)
}
var simpleCases = []string{
diff --git a/routers/api/v1/notify/repo.go b/routers/api/v1/notify/repo.go
index e87054e26c..51695a52c8 100644
--- a/routers/api/v1/notify/repo.go
+++ b/routers/api/v1/notify/repo.go
@@ -125,8 +125,8 @@ func ListRepoNotifications(ctx *context.APIContext) {
return
}
+ ctx.SetLinkHeader(int(totalCount), opts.PageSize)
ctx.SetTotalCountHeader(totalCount)
-
ctx.JSON(http.StatusOK, convert.ToNotifications(ctx, nl))
}
diff --git a/routers/api/v1/notify/user.go b/routers/api/v1/notify/user.go
index 3ebb678835..82cedd418b 100644
--- a/routers/api/v1/notify/user.go
+++ b/routers/api/v1/notify/user.go
@@ -86,6 +86,7 @@ func ListNotifications(ctx *context.APIContext) {
return
}
+ ctx.SetLinkHeader(int(totalCount), opts.PageSize)
ctx.SetTotalCountHeader(totalCount)
ctx.JSON(http.StatusOK, convert.ToNotifications(ctx, nl))
}
diff --git a/routers/api/v1/org/action.go b/routers/api/v1/org/action.go
index 3ae5e60585..59d8d3f2b4 100644
--- a/routers/api/v1/org/action.go
+++ b/routers/api/v1/org/action.go
@@ -67,6 +67,7 @@ func (Action) ListActionsSecrets(ctx *context.APIContext) {
}
}
+ ctx.SetLinkHeader(int(count), opts.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiSecrets)
}
@@ -240,9 +241,10 @@ func (Action) ListVariables(ctx *context.APIContext) {
// "404":
// "$ref": "#/responses/notFound"
+ listOptions := utils.GetListOptions(ctx)
vars, count, err := db.FindAndCount[actions_model.ActionVariable](ctx, &actions_model.FindVariablesOpts{
OwnerID: ctx.Org.Organization.ID,
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
})
if err != nil {
ctx.APIErrorInternal(err)
@@ -259,7 +261,7 @@ func (Action) ListVariables(ctx *context.APIContext) {
Description: v.Description,
}
}
-
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, variables)
}
diff --git a/routers/api/v1/org/member.go b/routers/api/v1/org/member.go
index 1c12b0cc94..b72cafee0c 100644
--- a/routers/api/v1/org/member.go
+++ b/routers/api/v1/org/member.go
@@ -20,11 +20,12 @@ import (
// listMembers list an organization's members
func listMembers(ctx *context.APIContext, isMember bool) {
+ listOptions := utils.GetListOptions(ctx)
opts := &organization.FindOrgMembersOpts{
Doer: ctx.Doer,
IsDoerMember: isMember,
OrgID: ctx.Org.Organization.ID,
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
}
count, err := organization.CountOrgMembers(ctx, opts)
@@ -44,6 +45,7 @@ func listMembers(ctx *context.APIContext, isMember bool) {
apiMembers[i] = convert.ToUser(ctx, member, ctx.Doer)
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiMembers)
}
diff --git a/routers/api/v1/org/team.go b/routers/api/v1/org/team.go
index 1a1710750a..7d43db1e9b 100644
--- a/routers/api/v1/org/team.go
+++ b/routers/api/v1/org/team.go
@@ -54,8 +54,9 @@ func ListTeams(ctx *context.APIContext) {
// "404":
// "$ref": "#/responses/notFound"
+ listOptions := utils.GetListOptions(ctx)
teams, count, err := organization.SearchTeam(ctx, &organization.SearchTeamOptions{
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
OrgID: ctx.Org.Organization.ID,
})
if err != nil {
@@ -69,6 +70,7 @@ func ListTeams(ctx *context.APIContext) {
return
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiTeams)
}
@@ -93,8 +95,9 @@ func ListUserTeams(ctx *context.APIContext) {
// "200":
// "$ref": "#/responses/TeamList"
+ listOptions := utils.GetListOptions(ctx)
teams, count, err := organization.SearchTeam(ctx, &organization.SearchTeamOptions{
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
UserID: ctx.Doer.ID,
})
if err != nil {
@@ -108,6 +111,7 @@ func ListUserTeams(ctx *context.APIContext) {
return
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiTeams)
}
@@ -392,8 +396,9 @@ func GetTeamMembers(ctx *context.APIContext) {
return
}
+ listOptions := utils.GetListOptions(ctx)
teamMembers, err := organization.GetTeamMembers(ctx, &organization.SearchMembersOptions{
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
TeamID: ctx.Org.Team.ID,
})
if err != nil {
@@ -406,6 +411,7 @@ func GetTeamMembers(ctx *context.APIContext) {
members[i] = convert.ToUser(ctx, member, ctx.Doer)
}
+ ctx.SetLinkHeader(ctx.Org.Team.NumMembers, listOptions.PageSize)
ctx.SetTotalCountHeader(int64(ctx.Org.Team.NumMembers))
ctx.JSON(http.StatusOK, members)
}
@@ -559,8 +565,9 @@ func GetTeamRepos(ctx *context.APIContext) {
// "$ref": "#/responses/notFound"
team := ctx.Org.Team
+ listOptions := utils.GetListOptions(ctx)
teamRepos, err := repo_model.GetTeamRepositories(ctx, &repo_model.SearchTeamRepoOptions{
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
TeamID: team.ID,
})
if err != nil {
@@ -576,6 +583,7 @@ func GetTeamRepos(ctx *context.APIContext) {
}
repos[i] = convert.ToRepo(ctx, repo, permission)
}
+ ctx.SetLinkHeader(team.NumRepos, listOptions.PageSize)
ctx.SetTotalCountHeader(int64(team.NumRepos))
ctx.JSON(http.StatusOK, repos)
}
@@ -874,7 +882,7 @@ func ListTeamActivityFeeds(ctx *context.APIContext) {
ctx.APIErrorInternal(err)
return
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
-
ctx.JSON(http.StatusOK, convert.ToActivities(ctx, feeds, ctx.Doer))
}
diff --git a/routers/api/v1/repo/action.go b/routers/api/v1/repo/action.go
index 25aabe6dd2..03ce0d3aab 100644
--- a/routers/api/v1/repo/action.go
+++ b/routers/api/v1/repo/action.go
@@ -69,10 +69,11 @@ func (Action) ListActionsSecrets(ctx *context.APIContext) {
// "$ref": "#/responses/notFound"
repo := ctx.Repo.Repository
+ listOptions := utils.GetListOptions(ctx)
opts := &secret_model.FindSecretsOptions{
RepoID: repo.ID,
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
}
secrets, count, err := db.FindAndCount[secret_model.Secret](ctx, opts)
@@ -89,7 +90,7 @@ func (Action) ListActionsSecrets(ctx *context.APIContext) {
Created: v.CreatedUnix.AsTime(),
}
}
-
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiSecrets)
}
@@ -482,9 +483,11 @@ func (Action) ListVariables(ctx *context.APIContext) {
// "404":
// "$ref": "#/responses/notFound"
+ listOptions := utils.GetListOptions(ctx)
+
vars, count, err := db.FindAndCount[actions_model.ActionVariable](ctx, &actions_model.FindVariablesOpts{
RepoID: ctx.Repo.Repository.ID,
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
})
if err != nil {
ctx.APIErrorInternal(err)
@@ -502,6 +505,7 @@ func (Action) ListVariables(ctx *context.APIContext) {
}
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, variables)
}
@@ -807,9 +811,10 @@ func ListActionTasks(ctx *context.APIContext) {
// "$ref": "#/responses/conflict"
// "422":
// "$ref": "#/responses/validationError"
+ listOptions := utils.GetListOptions(ctx)
tasks, total, err := db.FindAndCount[actions_model.ActionTask](ctx, &actions_model.FindTaskOptions{
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
RepoID: ctx.Repo.Repository.ID,
})
if err != nil {
@@ -830,6 +835,8 @@ func ListActionTasks(ctx *context.APIContext) {
res.Entries[i] = convertedTask
}
+ ctx.SetLinkHeader(int(total), listOptions.PageSize)
+ ctx.SetTotalCountHeader(total) // Duplicates api response field but it's better to set it for consistency
ctx.JSON(http.StatusOK, &res)
}
diff --git a/routers/api/v1/repo/branch.go b/routers/api/v1/repo/branch.go
index 4624d7e738..82fd68bdec 100644
--- a/routers/api/v1/repo/branch.go
+++ b/routers/api/v1/repo/branch.go
@@ -155,7 +155,7 @@ func DeleteBranch(ctx *context.APIContext) {
case git.IsErrBranchNotExist(err):
ctx.APIErrorNotFound(err)
case errors.Is(err, repo_service.ErrBranchIsDefault):
- ctx.APIError(http.StatusForbidden, errors.New("can not delete default branch"))
+ ctx.APIError(http.StatusForbidden, errors.New("can not delete default or pull request target branch"))
case errors.Is(err, git_model.ErrBranchIsProtected):
ctx.APIError(http.StatusForbidden, errors.New("branch protected"))
default:
@@ -515,7 +515,7 @@ func RenameBranch(ctx *context.APIContext) {
case repo_model.IsErrUserDoesNotHaveAccessToRepo(err):
ctx.APIError(http.StatusForbidden, "User must be a repo or site admin to rename default or protected branches.")
case errors.Is(err, git_model.ErrBranchIsProtected):
- ctx.APIError(http.StatusForbidden, "Branch is protected by glob-based protection rules.")
+ ctx.APIError(http.StatusForbidden, "Failed to rename branch due to branch protection rules.")
default:
ctx.APIErrorInternal(err)
}
diff --git a/routers/api/v1/repo/compare.go b/routers/api/v1/repo/compare.go
index 6d427c8073..6285138c27 100644
--- a/routers/api/v1/repo/compare.go
+++ b/routers/api/v1/repo/compare.go
@@ -5,7 +5,6 @@ package repo
import (
"net/http"
- "strings"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/gitrepo"
@@ -52,18 +51,7 @@ func CompareDiff(ctx *context.APIContext) {
}
}
- infoPath := ctx.PathParam("*")
- infos := []string{ctx.Repo.Repository.DefaultBranch, ctx.Repo.Repository.DefaultBranch}
- if infoPath != "" {
- infos = strings.SplitN(infoPath, "...", 2)
- if len(infos) != 2 {
- if infos = strings.SplitN(infoPath, "..", 2); len(infos) != 2 {
- infos = []string{ctx.Repo.Repository.DefaultBranch, infoPath}
- }
- }
- }
-
- compareResult, closer := parseCompareInfo(ctx, api.CreatePullRequestOption{Base: infos[0], Head: infos[1]})
+ compareInfo, closer := parseCompareInfo(ctx, ctx.PathParam("*"))
if ctx.Written() {
return
}
@@ -72,10 +60,10 @@ func CompareDiff(ctx *context.APIContext) {
verification := ctx.FormString("verification") == "" || ctx.FormBool("verification")
files := ctx.FormString("files") == "" || ctx.FormBool("files")
- apiCommits := make([]*api.Commit, 0, len(compareResult.compareInfo.Commits))
+ apiCommits := make([]*api.Commit, 0, len(compareInfo.Commits))
userCache := make(map[string]*user_model.User)
- for i := 0; i < len(compareResult.compareInfo.Commits); i++ {
- apiCommit, err := convert.ToCommit(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, compareResult.compareInfo.Commits[i], userCache,
+ for i := 0; i < len(compareInfo.Commits); i++ {
+ apiCommit, err := convert.ToCommit(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, compareInfo.Commits[i], userCache,
convert.ToCommitOptions{
Stat: true,
Verification: verification,
@@ -89,7 +77,7 @@ func CompareDiff(ctx *context.APIContext) {
}
ctx.JSON(http.StatusOK, &api.Compare{
- TotalCommits: len(compareResult.compareInfo.Commits),
+ TotalCommits: len(compareInfo.Commits),
Commits: apiCommits,
})
}
diff --git a/routers/api/v1/repo/download.go b/routers/api/v1/repo/download.go
index ea5846d343..5ddda525f9 100644
--- a/routers/api/v1/repo/download.go
+++ b/routers/api/v1/repo/download.go
@@ -8,25 +8,35 @@ import (
"net/http"
repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
archiver_service "code.gitea.io/gitea/services/repository/archiver"
)
-func serveRepoArchive(ctx *context.APIContext, reqFileName string) {
- aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, reqFileName)
+func serveRepoArchive(ctx *context.APIContext, reqFileName string, paths []string) {
+ aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, reqFileName, paths)
if err != nil {
- if errors.Is(err, archiver_service.ErrUnknownArchiveFormat{}) {
+ if errors.Is(err, util.ErrInvalidArgument) {
ctx.APIError(http.StatusBadRequest, err)
- } else if errors.Is(err, archiver_service.RepoRefNotFoundError{}) {
+ } else if errors.Is(err, util.ErrNotExist) {
ctx.APIError(http.StatusNotFound, err)
} else {
ctx.APIErrorInternal(err)
}
return
}
- archiver_service.ServeRepoArchive(ctx.Base, aReq)
+ err = archiver_service.ServeRepoArchive(ctx.Base, aReq)
+ if err != nil {
+ if errors.Is(err, util.ErrInvalidArgument) {
+ ctx.APIError(http.StatusBadRequest, err)
+ } else {
+ ctx.APIErrorInternal(err)
+ }
+ }
}
+// DownloadArchive is the GitHub-compatible endpoint to download repository archives
+// TODO: The API document is missing: Add github compatible tarball download API endpoints (#32572)
func DownloadArchive(ctx *context.APIContext) {
var tp repo_model.ArchiveType
switch ballType := ctx.PathParam("ball_type"); ballType {
@@ -40,5 +50,5 @@ func DownloadArchive(ctx *context.APIContext) {
ctx.APIError(http.StatusBadRequest, "Unknown archive type: "+ballType)
return
}
- serveRepoArchive(ctx, ctx.PathParam("*")+"."+tp.String())
+ serveRepoArchive(ctx, ctx.PathParam("*")+"."+tp.String(), ctx.FormStrings("path"))
}
diff --git a/routers/api/v1/repo/file.go b/routers/api/v1/repo/file.go
index 27a0827a10..deb68963c2 100644
--- a/routers/api/v1/repo/file.go
+++ b/routers/api/v1/repo/file.go
@@ -273,13 +273,19 @@ func GetArchive(ctx *context.APIContext) {
// description: the git reference for download with attached archive format (e.g. master.zip)
// type: string
// required: true
+ // - name: path
+ // in: query
+ // type: array
+ // items:
+ // type: string
+ // description: subpath of the repository to download
+ // collectionFormat: multi
// responses:
// 200:
// description: success
// "404":
// "$ref": "#/responses/notFound"
-
- serveRepoArchive(ctx, ctx.PathParam("*"))
+ serveRepoArchive(ctx, ctx.PathParam("*"), ctx.FormStrings("path"))
}
// GetEditorconfig get editor config of a repository
diff --git a/routers/api/v1/repo/issue_comment.go b/routers/api/v1/repo/issue_comment.go
index 4db1e878b1..37b5836e1d 100644
--- a/routers/api/v1/repo/issue_comment.go
+++ b/routers/api/v1/repo/issue_comment.go
@@ -445,7 +445,7 @@ func GetIssueComment(ctx *context.APIContext) {
// "404":
// "$ref": "#/responses/notFound"
- comment, err := issues_model.GetCommentByID(ctx, ctx.PathParamInt64("id"))
+ comment, err := issues_model.GetCommentWithRepoID(ctx, ctx.Repo.Repository.ID, ctx.PathParamInt64("id"))
if err != nil {
if issues_model.IsErrCommentNotExist(err) {
ctx.APIErrorNotFound(err)
@@ -455,15 +455,6 @@ func GetIssueComment(ctx *context.APIContext) {
return
}
- if err = comment.LoadIssue(ctx); err != nil {
- ctx.APIErrorInternal(err)
- return
- }
- if comment.Issue.RepoID != ctx.Repo.Repository.ID {
- ctx.Status(http.StatusNotFound)
- return
- }
-
if !ctx.Repo.CanReadIssuesOrPulls(comment.Issue.IsPull) {
ctx.APIErrorNotFound()
return
@@ -579,7 +570,7 @@ func EditIssueCommentDeprecated(ctx *context.APIContext) {
}
func editIssueComment(ctx *context.APIContext, form api.EditIssueCommentOption) {
- comment, err := issues_model.GetCommentByID(ctx, ctx.PathParamInt64("id"))
+ comment, err := issues_model.GetCommentWithRepoID(ctx, ctx.Repo.Repository.ID, ctx.PathParamInt64("id"))
if err != nil {
if issues_model.IsErrCommentNotExist(err) {
ctx.APIErrorNotFound(err)
@@ -589,16 +580,6 @@ func editIssueComment(ctx *context.APIContext, form api.EditIssueCommentOption)
return
}
- if err := comment.LoadIssue(ctx); err != nil {
- ctx.APIErrorInternal(err)
- return
- }
-
- if comment.Issue.RepoID != ctx.Repo.Repository.ID {
- ctx.Status(http.StatusNotFound)
- return
- }
-
if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
ctx.Status(http.StatusForbidden)
return
@@ -698,7 +679,7 @@ func DeleteIssueCommentDeprecated(ctx *context.APIContext) {
}
func deleteIssueComment(ctx *context.APIContext) {
- comment, err := issues_model.GetCommentByID(ctx, ctx.PathParamInt64("id"))
+ comment, err := issues_model.GetCommentWithRepoID(ctx, ctx.Repo.Repository.ID, ctx.PathParamInt64("id"))
if err != nil {
if issues_model.IsErrCommentNotExist(err) {
ctx.APIErrorNotFound(err)
@@ -708,16 +689,6 @@ func deleteIssueComment(ctx *context.APIContext) {
return
}
- if err := comment.LoadIssue(ctx); err != nil {
- ctx.APIErrorInternal(err)
- return
- }
-
- if comment.Issue.RepoID != ctx.Repo.Repository.ID {
- ctx.Status(http.StatusNotFound)
- return
- }
-
if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
ctx.Status(http.StatusForbidden)
return
diff --git a/routers/api/v1/repo/issue_dependency.go b/routers/api/v1/repo/issue_dependency.go
index b34e325e5d..6c66e719eb 100644
--- a/routers/api/v1/repo/issue_dependency.go
+++ b/routers/api/v1/repo/issue_dependency.go
@@ -7,13 +7,13 @@ package repo
import (
"net/http"
- "code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/api/v1/utils"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
)
@@ -77,23 +77,14 @@ func GetIssueDependencies(ctx *context.APIContext) {
return
}
- page := max(ctx.FormInt("page"), 1)
- limit := ctx.FormInt("limit")
- if limit == 0 {
- limit = setting.API.DefaultPagingNum
- } else if limit > setting.API.MaxResponseItems {
- limit = setting.API.MaxResponseItems
- }
+ listOptions := utils.GetListOptions(ctx)
canWrite := ctx.Repo.Permission.CanWriteIssuesOrPulls(issue.IsPull)
- blockerIssues := make([]*issues_model.Issue, 0, limit)
+ blockerIssues := make([]*issues_model.Issue, 0, listOptions.PageSize)
// 2. Get the issues this issue depends on, i.e. the `<#b>`: ` <- <#b>`
- blockersInfo, err := issue.BlockedByDependencies(ctx, db.ListOptions{
- Page: page,
- PageSize: limit,
- })
+ blockersInfo, total, err := issue.BlockedByDependencies(ctx, listOptions)
if err != nil {
ctx.APIErrorInternal(err)
return
@@ -149,7 +140,8 @@ func GetIssueDependencies(ctx *context.APIContext) {
}
blockerIssues = append(blockerIssues, &blocker.Issue)
}
-
+ ctx.SetLinkHeader(int(total), listOptions.PageSize)
+ ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, convert.ToAPIIssueList(ctx, ctx.Doer, blockerIssues))
}
diff --git a/routers/api/v1/repo/issue_stopwatch.go b/routers/api/v1/repo/issue_stopwatch.go
index 0f28b9757d..f9fbff091d 100644
--- a/routers/api/v1/repo/issue_stopwatch.go
+++ b/routers/api/v1/repo/issue_stopwatch.go
@@ -224,7 +224,7 @@ func GetStopwatches(ctx *context.APIContext) {
return
}
- apiSWs, err := convert.ToStopWatches(ctx, sws)
+ apiSWs, err := convert.ToStopWatches(ctx, ctx.Doer, sws)
if err != nil {
ctx.APIErrorInternal(err)
return
diff --git a/routers/api/v1/repo/issue_tracked_time.go b/routers/api/v1/repo/issue_tracked_time.go
index 171da272cc..7c1e77ccf5 100644
--- a/routers/api/v1/repo/issue_tracked_time.go
+++ b/routers/api/v1/repo/issue_tracked_time.go
@@ -356,7 +356,7 @@ func DeleteTime(ctx *context.APIContext) {
return
}
- time, err := issues_model.GetTrackedTimeByID(ctx, ctx.PathParamInt64("id"))
+ time, err := issues_model.GetTrackedTimeByID(ctx, issue.ID, ctx.PathParamInt64("id"))
if err != nil {
if db.IsErrNotExist(err) {
ctx.APIErrorNotFound(err)
diff --git a/routers/api/v1/repo/migrate.go b/routers/api/v1/repo/migrate.go
index 17259dc724..9355177fce 100644
--- a/routers/api/v1/repo/migrate.go
+++ b/routers/api/v1/repo/migrate.go
@@ -140,6 +140,7 @@ func Migrate(ctx *context.APIContext) {
}
opts := migrations.MigrateOptions{
+ OriginalURL: form.CloneAddr,
CloneAddr: remoteAddr,
RepoName: form.RepoName,
Description: form.Description,
diff --git a/routers/api/v1/repo/pull.go b/routers/api/v1/repo/pull.go
index b422c36d29..e6f4dd62ce 100644
--- a/routers/api/v1/repo/pull.go
+++ b/routers/api/v1/repo/pull.go
@@ -25,9 +25,11 @@ import (
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/routers/api/v1/utils"
"code.gitea.io/gitea/routers/common"
@@ -36,6 +38,7 @@ import (
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/convert"
"code.gitea.io/gitea/services/forms"
+ git_service "code.gitea.io/gitea/services/git"
"code.gitea.io/gitea/services/gitdiff"
issue_service "code.gitea.io/gitea/services/issue"
notify_service "code.gitea.io/gitea/services/notify"
@@ -414,20 +417,20 @@ func CreatePullRequest(ctx *context.APIContext) {
)
// Get repo/branch information
- compareResult, closer := parseCompareInfo(ctx, form)
+ compareResult, closer := parseCompareInfo(ctx, form.Base+".."+form.Head)
if ctx.Written() {
return
}
defer closer()
- if !compareResult.baseRef.IsBranch() || !compareResult.headRef.IsBranch() {
+ if !compareResult.BaseRef.IsBranch() || !compareResult.HeadRef.IsBranch() {
ctx.APIError(http.StatusUnprocessableEntity, "Invalid PullRequest: base and head must be branches")
return
}
// Check if another PR exists with the same targets
- existingPr, err := issues_model.GetUnmergedPullRequest(ctx, compareResult.headRepo.ID, ctx.Repo.Repository.ID,
- compareResult.headRef.ShortName(), compareResult.baseRef.ShortName(),
+ existingPr, err := issues_model.GetUnmergedPullRequest(ctx, compareResult.HeadRepo.ID, ctx.Repo.Repository.ID,
+ compareResult.HeadRef.ShortName(), compareResult.BaseRef.ShortName(),
issues_model.PullRequestFlowGithub,
)
if err != nil {
@@ -494,6 +497,12 @@ func CreatePullRequest(ctx *context.APIContext) {
deadlineUnix = timeutil.TimeStamp(form.Deadline.Unix())
}
+ unitPullRequest, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ ctx.APIErrorInternal(err)
+ return
+ }
+
prIssue := &issues_model.Issue{
RepoID: repo.ID,
Title: form.Title,
@@ -505,16 +514,18 @@ func CreatePullRequest(ctx *context.APIContext) {
DeadlineUnix: deadlineUnix,
}
pr := &issues_model.PullRequest{
- HeadRepoID: compareResult.headRepo.ID,
+ HeadRepoID: compareResult.HeadRepo.ID,
BaseRepoID: repo.ID,
- HeadBranch: compareResult.headRef.ShortName(),
- BaseBranch: compareResult.baseRef.ShortName(),
- HeadRepo: compareResult.headRepo,
+ HeadBranch: compareResult.HeadRef.ShortName(),
+ BaseBranch: compareResult.BaseRef.ShortName(),
+ HeadRepo: compareResult.HeadRepo,
BaseRepo: repo,
- MergeBase: compareResult.compareInfo.MergeBase,
+ MergeBase: compareResult.MergeBase,
Type: issues_model.PullRequestGitea,
}
+ pr.AllowMaintainerEdit = optional.FromPtr(form.AllowMaintainerEdit).ValueOrDefault(unitPullRequest.PullRequestsConfig().DefaultAllowMaintainerEdit)
+
// Get all assignee IDs
assigneeIDs, err := issues_model.MakeIDsFromAPIAssigneesToAdd(ctx, form.Assignee, form.Assignees)
if err != nil {
@@ -1057,63 +1068,32 @@ func MergePullRequest(ctx *context.APIContext) {
ctx.Status(http.StatusOK)
}
-type parseCompareInfoResult struct {
- headRepo *repo_model.Repository
- headGitRepo *git.Repository
- compareInfo *pull_service.CompareInfo
- baseRef git.RefName
- headRef git.RefName
-}
-
// parseCompareInfo returns non-nil if it succeeds, it always writes to the context and returns nil if it fails
-func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) (result *parseCompareInfoResult, closer func()) {
- var err error
- // Get compared branches information
- // format: ...[:]
- // base<-head: master...head:feature
- // same repo: master...feature
+func parseCompareInfo(ctx *context.APIContext, compareParam string) (result *git_service.CompareInfo, closer func()) {
baseRepo := ctx.Repo.Repository
- baseRefToGuess := form.Base
+ compareReq := common.ParseCompareRouterParam(compareParam)
- headUser := ctx.Repo.Owner
- headRefToGuess := form.Head
- if headInfos := strings.Split(form.Head, ":"); len(headInfos) == 1 {
- // If there is no head repository, it means pull request between same repository.
- // Do nothing here because the head variables have been assigned above.
- } else if len(headInfos) == 2 {
- // There is a head repository (the head repository could also be the same base repo)
- headRefToGuess = headInfos[1]
- headUser, err = user_model.GetUserOrOrgByName(ctx, headInfos[0])
- if err != nil {
- if user_model.IsErrUserNotExist(err) {
- ctx.APIErrorNotFound("GetUserByName")
- } else {
- ctx.APIErrorInternal(err)
- }
- return nil, nil
- }
- } else {
- ctx.APIErrorNotFound()
+ // remove the check when we support compare with carets
+ if compareReq.BaseOriRefSuffix != "" {
+ ctx.APIError(http.StatusBadRequest, "Unsupported comparison syntax: ref with suffix")
return nil, nil
}
- isSameRepo := ctx.Repo.Owner.ID == headUser.ID
-
- var headRepo *repo_model.Repository
- if isSameRepo {
- headRepo = baseRepo
- } else {
- headRepo, err = common.FindHeadRepo(ctx, baseRepo, headUser.ID)
- if err != nil {
- ctx.APIErrorInternal(err)
- return nil, nil
- }
- if headRepo == nil {
- ctx.APIErrorNotFound("head repository not found")
- return nil, nil
- }
+ _, headRepo, err := common.GetHeadOwnerAndRepo(ctx, baseRepo, compareReq)
+ switch {
+ case errors.Is(err, util.ErrInvalidArgument):
+ ctx.APIError(http.StatusBadRequest, err.Error())
+ return nil, nil
+ case errors.Is(err, util.ErrNotExist):
+ ctx.APIErrorNotFound()
+ return nil, nil
+ case err != nil:
+ ctx.APIErrorInternal(err)
+ return nil, nil
}
+ isSameRepo := baseRepo.ID == headRepo.ID
+
var headGitRepo *git.Repository
if isSameRepo {
headGitRepo = ctx.Repo.GitRepo
@@ -1140,8 +1120,8 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
}
if !permBase.CanRead(unit.TypeCode) {
- log.Trace("Permission Denied: User %-v cannot create/read pull requests or cannot read code in Repo %-v\nUser in baseRepo has Permissions: %-+v", ctx.Doer, baseRepo, permBase)
- ctx.APIErrorNotFound("Can't read pulls or can't read UnitTypeCode")
+ log.Trace("Permission Denied: User %-v cannot read code in Repo %-v\nUser in baseRepo has Permissions: %-+v", ctx.Doer, baseRepo, permBase)
+ ctx.APIErrorNotFound("can't read baseRepo UnitTypeCode")
return nil, nil
}
@@ -1158,10 +1138,10 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
return nil, nil
}
- baseRef := ctx.Repo.GitRepo.UnstableGuessRefByShortName(baseRefToGuess)
- headRef := headGitRepo.UnstableGuessRefByShortName(headRefToGuess)
+ baseRef := ctx.Repo.GitRepo.UnstableGuessRefByShortName(util.IfZero(compareReq.BaseOriRef, baseRepo.GetPullRequestTargetBranch(ctx)))
+ headRef := headGitRepo.UnstableGuessRefByShortName(util.IfZero(compareReq.HeadOriRef, headRepo.DefaultBranch))
- log.Trace("Repo path: %q, base ref: %q->%q, head ref: %q->%q", ctx.Repo.Repository.RelativePath(), baseRefToGuess, baseRef, headRefToGuess, headRef)
+ log.Trace("Repo path: %q, base ref: %q->%q, head ref: %q->%q", ctx.Repo.Repository.RelativePath(), compareReq.BaseOriRef, baseRef, compareReq.HeadOriRef, headRef)
baseRefValid := baseRef.IsBranch() || baseRef.IsTag() || git.IsStringLikelyCommitID(git.ObjectFormatFromName(ctx.Repo.Repository.ObjectFormatName), baseRef.ShortName())
headRefValid := headRef.IsBranch() || headRef.IsTag() || git.IsStringLikelyCommitID(git.ObjectFormatFromName(headRepo.ObjectFormatName), headRef.ShortName())
@@ -1171,14 +1151,13 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
return nil, nil
}
- compareInfo, err := pull_service.GetCompareInfo(ctx, baseRepo, headRepo, headGitRepo, baseRef.ShortName(), headRef.ShortName(), false, false)
+ compareInfo, err := git_service.GetCompareInfo(ctx, baseRepo, headRepo, headGitRepo, baseRef, headRef, compareReq.DirectComparison(), false)
if err != nil {
ctx.APIErrorInternal(err)
return nil, nil
}
- result = &parseCompareInfoResult{headRepo: headRepo, headGitRepo: headGitRepo, compareInfo: compareInfo, baseRef: baseRef, headRef: headRef}
- return result, closer
+ return compareInfo, closer
}
// UpdatePullRequest merge PR's baseBranch into headBranch
@@ -1343,7 +1322,7 @@ func CancelScheduledAutoMerge(ctx *context.APIContext) {
}
if ctx.Doer.ID != autoMerge.DoerID {
- allowed, err := access_model.IsUserRepoAdmin(ctx, ctx.Repo.Repository, ctx.Doer)
+ allowed, err := pull_service.IsUserAllowedToMerge(ctx, pull, ctx.Repo.Permission, ctx.Doer)
if err != nil {
ctx.APIErrorInternal(err)
return
@@ -1422,7 +1401,7 @@ func GetPullRequestCommits(ctx *context.APIContext) {
return
}
- var prInfo *pull_service.CompareInfo
+ var compareInfo *git_service.CompareInfo
baseGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.BaseRepo)
if err != nil {
ctx.APIErrorInternal(err)
@@ -1431,19 +1410,18 @@ func GetPullRequestCommits(ctx *context.APIContext) {
defer closer.Close()
if pr.HasMerged {
- prInfo, err = pull_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, pr.MergeBase, pr.GetGitHeadRefName(), false, false)
+ compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefName(pr.MergeBase), git.RefName(pr.GetGitHeadRefName()), false, false)
} else {
- prInfo, err = pull_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, pr.BaseBranch, pr.GetGitHeadRefName(), false, false)
+ compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefNameFromBranch(pr.BaseBranch), git.RefName(pr.GetGitHeadRefName()), false, false)
}
if err != nil {
ctx.APIErrorInternal(err)
return
}
- commits := prInfo.Commits
listOptions := utils.GetListOptions(ctx)
- totalNumberOfCommits := len(commits)
+ totalNumberOfCommits := len(compareInfo.Commits)
totalNumberOfPages := int(math.Ceil(float64(totalNumberOfCommits) / float64(listOptions.PageSize)))
userCache := make(map[string]*user_model.User)
@@ -1458,7 +1436,7 @@ func GetPullRequestCommits(ctx *context.APIContext) {
apiCommits := make([]*api.Commit, 0, limit)
for i := start; i < start+limit; i++ {
- apiCommit, err := convert.ToCommit(ctx, ctx.Repo.Repository, baseGitRepo, commits[i], userCache,
+ apiCommit, err := convert.ToCommit(ctx, ctx.Repo.Repository, baseGitRepo, compareInfo.Commits[i], userCache,
convert.ToCommitOptions{
Stat: true,
Verification: verification,
@@ -1552,11 +1530,11 @@ func GetPullRequestFiles(ctx *context.APIContext) {
baseGitRepo := ctx.Repo.GitRepo
- var prInfo *pull_service.CompareInfo
+ var compareInfo *git_service.CompareInfo
if pr.HasMerged {
- prInfo, err = pull_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, pr.MergeBase, pr.GetGitHeadRefName(), true, false)
+ compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefName(pr.MergeBase), git.RefName(pr.GetGitHeadRefName()), false, false)
} else {
- prInfo, err = pull_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, pr.BaseBranch, pr.GetGitHeadRefName(), true, false)
+ compareInfo, err = git_service.GetCompareInfo(ctx, pr.BaseRepo, pr.BaseRepo, baseGitRepo, git.RefNameFromBranch(pr.BaseBranch), git.RefName(pr.GetGitHeadRefName()), false, false)
}
if err != nil {
ctx.APIErrorInternal(err)
@@ -1569,7 +1547,7 @@ func GetPullRequestFiles(ctx *context.APIContext) {
return
}
- startCommitID := prInfo.MergeBase
+ startCommitID := compareInfo.MergeBase
endCommitID := headCommitID
maxLines := setting.Git.MaxGitDiffLines
diff --git a/routers/api/v1/repo/pull_review.go b/routers/api/v1/repo/pull_review.go
index 3c00193fac..d4b268c009 100644
--- a/routers/api/v1/repo/pull_review.go
+++ b/routers/api/v1/repo/pull_review.go
@@ -208,6 +208,126 @@ func GetPullReviewComments(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, apiComments)
}
+// ResolvePullReviewComment resolves a review comment in a pull request
+func ResolvePullReviewComment(ctx *context.APIContext) {
+ // swagger:operation POST /repos/{owner}/{repo}/pulls/comments/{id}/resolve repository repoResolvePullReviewComment
+ // ---
+ // summary: Resolve a pull request review comment
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // - name: id
+ // in: path
+ // description: id of the review comment
+ // type: integer
+ // format: int64
+ // required: true
+ // responses:
+ // "204":
+ // "$ref": "#/responses/empty"
+ // "400":
+ // "$ref": "#/responses/validationError"
+ // "403":
+ // "$ref": "#/responses/forbidden"
+ // "404":
+ // "$ref": "#/responses/notFound"
+ updatePullReviewCommentResolve(ctx, true)
+}
+
+// UnresolvePullReviewComment unresolves a review comment in a pull request
+func UnresolvePullReviewComment(ctx *context.APIContext) {
+ // swagger:operation POST /repos/{owner}/{repo}/pulls/comments/{id}/unresolve repository repoUnresolvePullReviewComment
+ // ---
+ // summary: Unresolve a pull request review comment
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // - name: id
+ // in: path
+ // description: id of the review comment
+ // type: integer
+ // format: int64
+ // required: true
+ // responses:
+ // "204":
+ // "$ref": "#/responses/empty"
+ // "400":
+ // "$ref": "#/responses/validationError"
+ // "403":
+ // "$ref": "#/responses/forbidden"
+ // "404":
+ // "$ref": "#/responses/notFound"
+ updatePullReviewCommentResolve(ctx, false)
+}
+
+func updatePullReviewCommentResolve(ctx *context.APIContext, isResolve bool) {
+ comment := getPullReviewCommentToResolve(ctx)
+ if comment == nil {
+ return
+ }
+
+ canMarkConv, err := issues_model.CanMarkConversation(ctx, comment.Issue, ctx.Doer)
+ if err != nil {
+ ctx.APIErrorInternal(err)
+ return
+ }
+ if !canMarkConv {
+ ctx.APIError(http.StatusForbidden, "user should have permission to resolve comment")
+ return
+ }
+
+ if err = issues_model.MarkConversation(ctx, comment, ctx.Doer, isResolve); err != nil {
+ ctx.APIErrorInternal(err)
+ return
+ }
+
+ ctx.Status(http.StatusNoContent)
+}
+
+func getPullReviewCommentToResolve(ctx *context.APIContext) *issues_model.Comment {
+ comment, err := issues_model.GetCommentWithRepoID(ctx, ctx.Repo.Repository.ID, ctx.PathParamInt64("id"))
+ if err != nil {
+ if issues_model.IsErrCommentNotExist(err) {
+ ctx.APIErrorNotFound("GetCommentByID", err)
+ } else {
+ ctx.APIErrorInternal(err)
+ }
+ return nil
+ }
+
+ if !comment.Issue.IsPull {
+ ctx.APIError(http.StatusBadRequest, "comment does not belong to a pull request")
+ return nil
+ }
+
+ if comment.Type != issues_model.CommentTypeCode {
+ ctx.APIError(http.StatusBadRequest, "comment is not a review comment")
+ return nil
+ }
+
+ return comment
+}
+
// DeletePullReview delete a specific review from a pull request
func DeletePullReview(ctx *context.APIContext) {
// swagger:operation DELETE /repos/{owner}/{repo}/pulls/{index}/reviews/{id} repository repoDeletePullReview
diff --git a/routers/api/v1/repo/release_attachment.go b/routers/api/v1/repo/release_attachment.go
index 43e97beb27..5f5423fafe 100644
--- a/routers/api/v1/repo/release_attachment.go
+++ b/routers/api/v1/repo/release_attachment.go
@@ -398,7 +398,6 @@ func DeleteReleaseAttachment(ctx *context.APIContext) {
ctx.APIErrorNotFound()
return
}
- // FIXME Should prove the existence of the given repo, but results in unnecessary database requests
if err := repo_model.DeleteAttachment(ctx, attach, true); err != nil {
ctx.APIErrorInternal(err)
diff --git a/routers/api/v1/repo/status.go b/routers/api/v1/repo/status.go
index d632d5b5e1..e69d4468de 100644
--- a/routers/api/v1/repo/status.go
+++ b/routers/api/v1/repo/status.go
@@ -257,8 +257,8 @@ func GetCombinedCommitStatusByRef(ctx *context.APIContext) {
}
repo := ctx.Repo.Repository
-
- statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, refCommit.Commit.ID.String(), utils.GetListOptions(ctx))
+ listOptions := utils.GetListOptions(ctx)
+ statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, refCommit.Commit.ID.String(), listOptions)
if err != nil {
ctx.APIErrorInternal(fmt.Errorf("GetLatestCommitStatus[%s, %s]: %w", repo.FullName(), refCommit.CommitID, err))
return
@@ -269,6 +269,7 @@ func GetCombinedCommitStatusByRef(ctx *context.APIContext) {
ctx.APIErrorInternal(fmt.Errorf("CountLatestCommitStatus[%s, %s]: %w", repo.FullName(), refCommit.CommitID, err))
return
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
combiStatus := convert.ToCombinedStatus(ctx, refCommit.Commit.ID.String(), statuses,
diff --git a/routers/api/v1/repo/wiki.go b/routers/api/v1/repo/wiki.go
index baf5e0189f..90dd08394e 100644
--- a/routers/api/v1/repo/wiki.go
+++ b/routers/api/v1/repo/wiki.go
@@ -333,6 +333,7 @@ func ListWikiPages(ctx *context.APIContext) {
pages = append(pages, wiki_service.ToWikiPageMetaData(wikiName, c, ctx.Repo.Repository))
}
+ ctx.SetLinkHeader(len(entries), limit)
ctx.SetTotalCountHeader(int64(len(entries)))
ctx.JSON(http.StatusOK, pages)
}
@@ -445,6 +446,7 @@ func ListPageRevisions(ctx *context.APIContext) {
return
}
+ // FIXME: SetLinkHeader missing
ctx.SetTotalCountHeader(commitsCount)
ctx.JSON(http.StatusOK, convert.ToWikiCommitList(commitsHistory, commitsCount))
}
diff --git a/routers/api/v1/shared/action.go b/routers/api/v1/shared/action.go
index c97e9419fd..108fca787b 100644
--- a/routers/api/v1/shared/action.go
+++ b/routers/api/v1/shared/action.go
@@ -32,11 +32,12 @@ func ListJobs(ctx *context.APIContext, ownerID, repoID, runID int64) {
if ownerID != 0 && repoID != 0 {
setting.PanicInDevOrTesting("ownerID and repoID should not be both set")
}
+ listOptions := utils.GetListOptions(ctx)
opts := actions_model.FindRunJobOptions{
OwnerID: ownerID,
RepoID: repoID,
RunID: runID,
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
}
for _, status := range ctx.FormStrings("status") {
values, err := convertToInternal(status)
@@ -78,7 +79,8 @@ func ListJobs(ctx *context.APIContext, ownerID, repoID, runID int64) {
}
res.Entries[i] = convertedWorkflowJob
}
-
+ ctx.SetLinkHeader(int(total), listOptions.PageSize)
+ ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &res)
}
@@ -120,10 +122,11 @@ func ListRuns(ctx *context.APIContext, ownerID, repoID int64) {
if ownerID != 0 && repoID != 0 {
setting.PanicInDevOrTesting("ownerID and repoID should not be both set")
}
+ listOptions := utils.GetListOptions(ctx)
opts := actions_model.FindRunOptions{
OwnerID: ownerID,
RepoID: repoID,
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
}
if event := ctx.FormString("event"); event != "" {
@@ -182,6 +185,7 @@ func ListRuns(ctx *context.APIContext, ownerID, repoID int64) {
}
res.Entries[i] = convertedRun
}
-
+ ctx.SetLinkHeader(int(total), listOptions.PageSize)
+ ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &res)
}
diff --git a/routers/api/v1/shared/block.go b/routers/api/v1/shared/block.go
index b22f8a74fd..19ad552e20 100644
--- a/routers/api/v1/shared/block.go
+++ b/routers/api/v1/shared/block.go
@@ -16,8 +16,9 @@ import (
)
func ListBlocks(ctx *context.APIContext, blocker *user_model.User) {
+ listOptions := utils.GetListOptions(ctx)
blocks, total, err := user_model.FindBlockings(ctx, &user_model.FindBlockingOptions{
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
BlockerID: blocker.ID,
})
if err != nil {
@@ -35,6 +36,7 @@ func ListBlocks(ctx *context.APIContext, blocker *user_model.User) {
users = append(users, convert.ToUser(ctx, b.Blockee, blocker))
}
+ ctx.SetLinkHeader(int(total), listOptions.PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &users)
}
diff --git a/routers/api/v1/user/action.go b/routers/api/v1/user/action.go
index e934d02aa7..069d5e39b6 100644
--- a/routers/api/v1/user/action.go
+++ b/routers/api/v1/user/action.go
@@ -333,10 +333,10 @@ func ListVariables(ctx *context.APIContext) {
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
-
+ listOptions := utils.GetListOptions(ctx)
vars, count, err := db.FindAndCount[actions_model.ActionVariable](ctx, &actions_model.FindVariablesOpts{
OwnerID: ctx.Doer.ID,
- ListOptions: utils.GetListOptions(ctx),
+ ListOptions: listOptions,
})
if err != nil {
ctx.APIErrorInternal(err)
@@ -354,6 +354,7 @@ func ListVariables(ctx *context.APIContext) {
}
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, variables)
}
diff --git a/routers/api/v1/user/follower.go b/routers/api/v1/user/follower.go
index 339b994af4..48c624ced9 100644
--- a/routers/api/v1/user/follower.go
+++ b/routers/api/v1/user/follower.go
@@ -24,12 +24,14 @@ func responseAPIUsers(ctx *context.APIContext, users []*user_model.User) {
}
func listUserFollowers(ctx *context.APIContext, u *user_model.User) {
- users, count, err := user_model.GetUserFollowers(ctx, u, ctx.Doer, utils.GetListOptions(ctx))
+ listOptions := utils.GetListOptions(ctx)
+ users, count, err := user_model.GetUserFollowers(ctx, u, ctx.Doer, listOptions)
if err != nil {
ctx.APIErrorInternal(err)
return
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
responseAPIUsers(ctx, users)
}
@@ -88,12 +90,14 @@ func ListFollowers(ctx *context.APIContext) {
}
func listUserFollowing(ctx *context.APIContext, u *user_model.User) {
- users, count, err := user_model.GetUserFollowing(ctx, u, ctx.Doer, utils.GetListOptions(ctx))
+ listOptions := utils.GetListOptions(ctx)
+ users, count, err := user_model.GetUserFollowing(ctx, u, ctx.Doer, listOptions)
if err != nil {
ctx.APIErrorInternal(err)
return
}
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
responseAPIUsers(ctx, users)
}
diff --git a/routers/api/v1/user/key.go b/routers/api/v1/user/key.go
index aa69245e49..de0ac7b1e4 100644
--- a/routers/api/v1/user/key.go
+++ b/routers/api/v1/user/key.go
@@ -1,4 +1,5 @@
// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package user
@@ -53,11 +54,11 @@ func composePublicKeysAPILink() string {
func listPublicKeys(ctx *context.APIContext, user *user_model.User) {
var keys []*asymkey_model.PublicKey
var err error
- var count int
+ var count int64
fingerprint := ctx.FormString("fingerprint")
username := ctx.PathParam("username")
-
+ listOptions := utils.GetListOptions(ctx)
if fingerprint != "" {
var userID int64 // Unrestricted
// Querying not just listing
@@ -65,20 +66,18 @@ func listPublicKeys(ctx *context.APIContext, user *user_model.User) {
// Restrict to provided uid
userID = user.ID
}
- keys, err = db.Find[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
+ keys, count, err = db.FindAndCount[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
+ ListOptions: listOptions,
OwnerID: userID,
Fingerprint: fingerprint,
})
- count = len(keys)
} else {
- var total int64
// Use ListPublicKeys
- keys, total, err = db.FindAndCount[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
- ListOptions: utils.GetListOptions(ctx),
+ keys, count, err = db.FindAndCount[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
+ ListOptions: listOptions,
OwnerID: user.ID,
NotKeytype: asymkey_model.KeyTypePrincipal,
})
- count = int(total)
}
if err != nil {
@@ -95,7 +94,8 @@ func listPublicKeys(ctx *context.APIContext, user *user_model.User) {
}
}
- ctx.SetTotalCountHeader(int64(count))
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
+ ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, &apiKeys)
}
@@ -211,7 +211,7 @@ func CreateUserPublicKey(ctx *context.APIContext, form api.CreateKeyOption, uid
return
}
- key, err := asymkey_model.AddPublicKey(ctx, uid, form.Title, content, 0)
+ key, err := asymkey_model.AddPublicKey(ctx, uid, form.Title, content, 0, false)
if err != nil {
repo.HandleAddKeyError(ctx, err)
return
diff --git a/routers/api/v1/user/star.go b/routers/api/v1/user/star.go
index ee5d63063b..5c0d976527 100644
--- a/routers/api/v1/user/star.go
+++ b/routers/api/v1/user/star.go
@@ -76,6 +76,7 @@ func GetStarredRepos(ctx *context.APIContext) {
return
}
+ ctx.SetLinkHeader(ctx.ContextUser.NumStars, utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(int64(ctx.ContextUser.NumStars))
ctx.JSON(http.StatusOK, &repos)
}
@@ -107,6 +108,7 @@ func GetMyStarredRepos(ctx *context.APIContext) {
ctx.APIErrorInternal(err)
}
+ ctx.SetLinkHeader(ctx.Doer.NumStars, utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(int64(ctx.Doer.NumStars))
ctx.JSON(http.StatusOK, &repos)
}
diff --git a/routers/api/v1/user/watch.go b/routers/api/v1/user/watch.go
index 844eac2c67..1ce0f3f529 100644
--- a/routers/api/v1/user/watch.go
+++ b/routers/api/v1/user/watch.go
@@ -71,6 +71,7 @@ func GetWatchedRepos(ctx *context.APIContext) {
ctx.APIErrorInternal(err)
}
+ ctx.SetLinkHeader(int(total), utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &repos)
}
@@ -99,7 +100,7 @@ func GetMyWatchedRepos(ctx *context.APIContext) {
if err != nil {
ctx.APIErrorInternal(err)
}
-
+ ctx.SetLinkHeader(int(total), utils.GetListOptions(ctx).PageSize)
ctx.SetTotalCountHeader(total)
ctx.JSON(http.StatusOK, &repos)
}
diff --git a/routers/api/v1/utils/hook.go b/routers/api/v1/utils/hook.go
index 6f598f14c8..9f0447a80b 100644
--- a/routers/api/v1/utils/hook.go
+++ b/routers/api/v1/utils/hook.go
@@ -23,8 +23,9 @@ import (
// ListOwnerHooks lists the webhooks of the provided owner
func ListOwnerHooks(ctx *context.APIContext, owner *user_model.User) {
+ listOptions := GetListOptions(ctx)
opts := &webhook.ListWebhookOptions{
- ListOptions: GetListOptions(ctx),
+ ListOptions: listOptions,
OwnerID: owner.ID,
}
@@ -42,7 +43,7 @@ func ListOwnerHooks(ctx *context.APIContext, owner *user_model.User) {
return
}
}
-
+ ctx.SetLinkHeader(int(count), listOptions.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, apiHooks)
}
diff --git a/routers/api/v1/utils/page.go b/routers/api/v1/utils/page.go
index 024ba7b8d9..2c7b64967a 100644
--- a/routers/api/v1/utils/page.go
+++ b/routers/api/v1/utils/page.go
@@ -12,7 +12,7 @@ import (
// GetListOptions returns list options using the page and limit parameters
func GetListOptions(ctx *context.APIContext) db.ListOptions {
return db.ListOptions{
- Page: ctx.FormInt("page"),
+ Page: max(ctx.FormInt("page"), 1),
PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
}
}
diff --git a/routers/common/blockexpensive.go b/routers/common/blockexpensive.go
index ebec0a2e5b..fec364351c 100644
--- a/routers/common/blockexpensive.go
+++ b/routers/common/blockexpensive.go
@@ -24,7 +24,7 @@ func BlockExpensive() func(next http.Handler) http.Handler {
ret := determineRequestPriority(reqctx.FromContext(req.Context()))
if !ret.SignedIn {
if ret.Expensive || ret.LongPolling {
- http.Redirect(w, req, setting.AppSubURL+"/user/login", http.StatusSeeOther)
+ http.Redirect(w, req, middleware.RedirectLinkUserLogin(req), http.StatusSeeOther)
return
}
}
diff --git a/routers/common/compare.go b/routers/common/compare.go
index be689bbdb5..7e917c4df8 100644
--- a/routers/common/compare.go
+++ b/routers/common/compare.go
@@ -5,22 +5,94 @@ package common
import (
"context"
+ "strings"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
- pull_service "code.gitea.io/gitea/services/pull"
+ "code.gitea.io/gitea/modules/util"
)
-// CompareInfo represents the collected results from ParseCompareInfo
-type CompareInfo struct {
- HeadUser *user_model.User
- HeadRepo *repo_model.Repository
- HeadGitRepo *git.Repository
- CompareInfo *pull_service.CompareInfo
- BaseBranch string
- HeadBranch string
- DirectComparison bool
+type CompareRouterReq struct {
+ BaseOriRef string
+ BaseOriRefSuffix string
+
+ CompareSeparator string
+
+ HeadOwner string
+ HeadRepoName string
+ HeadOriRef string
+}
+
+func (cr *CompareRouterReq) DirectComparison() bool {
+ // FIXME: the design of "DirectComparison" is wrong, it loses the information of `^`
+ // To correctly handle the comparison, developers should use `ci.CompareSeparator` directly, all "DirectComparison" related code should be rewritten.
+ return cr.CompareSeparator == ".."
+}
+
+func parseHead(head string) (headOwnerName, headRepoName, headRef string) {
+ paths := strings.SplitN(head, ":", 2)
+ if len(paths) == 1 {
+ return "", "", paths[0]
+ }
+ ownerRepo := strings.SplitN(paths[0], "/", 2)
+ if len(ownerRepo) == 1 {
+ return paths[0], "", paths[1]
+ }
+ return ownerRepo[0], ownerRepo[1], paths[1]
+}
+
+// ParseCompareRouterParam Get compare information from the router parameter.
+// A full compare url is of the form:
+//
+// 0. /{:baseOwner}/{:baseRepoName}/compare
+// 1. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headBranch}
+// 2. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}:{:headBranch}
+// 3. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}/{:headRepoName}:{:headBranch}
+// 4. /{:baseOwner}/{:baseRepoName}/compare/{:headBranch}
+// 5. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}:{:headBranch}
+// 6. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}/{:headRepoName}:{:headBranch}
+//
+// Here we obtain the infoPath "{:baseBranch}...[{:headOwner}/{:headRepoName}:]{:headBranch}" as ctx.PathParam("*")
+// with the :baseRepo in ctx.Repo.
+//
+// Note: Generally :headRepoName is not provided here - we are only passed :headOwner.
+//
+// How do we determine the :headRepo?
+//
+// 1. If :headOwner is not set then the :headRepo = :baseRepo
+// 2. If :headOwner is set - then look for the fork of :baseRepo owned by :headOwner
+// 3. But... :baseRepo could be a fork of :headOwner's repo - so check that
+// 4. Now, :baseRepo and :headRepos could be forks of the same repo - so check that
+//
+// format: ...[:]
+// base<-head: master...head:feature
+// same repo: master...feature
+func ParseCompareRouterParam(routerParam string) *CompareRouterReq {
+ if routerParam == "" {
+ return &CompareRouterReq{}
+ }
+
+ sep := "..."
+ basePart, headPart, ok := strings.Cut(routerParam, sep)
+ if !ok {
+ sep = ".."
+ basePart, headPart, ok = strings.Cut(routerParam, sep)
+ if !ok {
+ headOwnerName, headRepoName, headRef := parseHead(routerParam)
+ return &CompareRouterReq{
+ HeadOriRef: headRef,
+ HeadOwner: headOwnerName,
+ HeadRepoName: headRepoName,
+ CompareSeparator: "...",
+ }
+ }
+ }
+
+ ci := &CompareRouterReq{CompareSeparator: sep}
+ ci.BaseOriRef, ci.BaseOriRefSuffix = git.ParseRefSuffix(basePart)
+ ci.HeadOwner, ci.HeadRepoName, ci.HeadOriRef = parseHead(headPart)
+ return ci
}
// maxForkTraverseLevel defines the maximum levels to traverse when searching for the head repository.
@@ -45,9 +117,51 @@ func FindHeadRepo(ctx context.Context, baseRepo *repo_model.Repository, headUser
return findHeadRepoFromRootBase(ctx, baseRepo, headUserID, maxForkTraverseLevel)
}
+func GetHeadOwnerAndRepo(ctx context.Context, baseRepo *repo_model.Repository, compareReq *CompareRouterReq) (headOwner *user_model.User, headRepo *repo_model.Repository, err error) {
+ if compareReq.HeadOwner == "" {
+ if compareReq.HeadRepoName != "" { // unsupported syntax
+ return nil, nil, util.ErrorWrap(util.ErrInvalidArgument, "head owner must be specified when head repo name is given")
+ }
+
+ return baseRepo.Owner, baseRepo, nil
+ }
+
+ if compareReq.HeadOwner == baseRepo.Owner.Name {
+ headOwner = baseRepo.Owner
+ } else {
+ headOwner, err = user_model.GetUserOrOrgByName(ctx, compareReq.HeadOwner)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+ if compareReq.HeadRepoName == "" {
+ if headOwner.ID == baseRepo.OwnerID {
+ headRepo = baseRepo
+ } else {
+ headRepo, err = FindHeadRepo(ctx, baseRepo, headOwner.ID)
+ if err != nil {
+ return nil, nil, err
+ }
+ if headRepo == nil {
+ return nil, nil, util.ErrorWrap(util.ErrInvalidArgument, "the user %s does not have a fork of the base repository", headOwner.Name)
+ }
+ }
+ } else {
+ if compareReq.HeadOwner == baseRepo.Owner.Name && compareReq.HeadRepoName == baseRepo.Name {
+ headRepo = baseRepo
+ } else {
+ headRepo, err = repo_model.GetRepositoryByName(ctx, headOwner.ID, compareReq.HeadRepoName)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+ }
+ return headOwner, headRepo, nil
+}
+
func findHeadRepoFromRootBase(ctx context.Context, baseRepo *repo_model.Repository, headUserID int64, traverseLevel int) (*repo_model.Repository, error) {
if traverseLevel == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
// test if we are lucky
repo, err := repo_model.GetUserFork(ctx, baseRepo.ID, headUserID)
@@ -71,5 +185,5 @@ func findHeadRepoFromRootBase(ctx context.Context, baseRepo *repo_model.Reposito
return forked, nil
}
}
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
diff --git a/routers/common/compare_test.go b/routers/common/compare_test.go
new file mode 100644
index 0000000000..e4e24a03cf
--- /dev/null
+++ b/routers/common/compare_test.go
@@ -0,0 +1,105 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package common
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCompareRouterReq(t *testing.T) {
+ cases := []struct {
+ input string
+ CompareRouterReq *CompareRouterReq
+ }{
+ {
+ input: "",
+ CompareRouterReq: &CompareRouterReq{},
+ },
+ {
+ input: "v1.0...v1.1",
+ CompareRouterReq: &CompareRouterReq{
+ BaseOriRef: "v1.0",
+ CompareSeparator: "...",
+ HeadOriRef: "v1.1",
+ },
+ },
+ {
+ input: "main..develop",
+ CompareRouterReq: &CompareRouterReq{
+ BaseOriRef: "main",
+ CompareSeparator: "..",
+ HeadOriRef: "develop",
+ },
+ },
+ {
+ input: "main^...develop",
+ CompareRouterReq: &CompareRouterReq{
+ BaseOriRef: "main",
+ BaseOriRefSuffix: "^",
+ CompareSeparator: "...",
+ HeadOriRef: "develop",
+ },
+ },
+ {
+ input: "main^^^^^...develop",
+ CompareRouterReq: &CompareRouterReq{
+ BaseOriRef: "main",
+ BaseOriRefSuffix: "^^^^^",
+ CompareSeparator: "...",
+ HeadOriRef: "develop",
+ },
+ },
+ {
+ input: "develop",
+ CompareRouterReq: &CompareRouterReq{
+ CompareSeparator: "...",
+ HeadOriRef: "develop",
+ },
+ },
+ {
+ input: "teabot:feature1",
+ CompareRouterReq: &CompareRouterReq{
+ CompareSeparator: "...",
+ HeadOwner: "teabot",
+ HeadOriRef: "feature1",
+ },
+ },
+ {
+ input: "lunny/forked_repo:develop",
+ CompareRouterReq: &CompareRouterReq{
+ CompareSeparator: "...",
+ HeadOwner: "lunny",
+ HeadRepoName: "forked_repo",
+ HeadOriRef: "develop",
+ },
+ },
+ {
+ input: "main...lunny/forked_repo:develop",
+ CompareRouterReq: &CompareRouterReq{
+ BaseOriRef: "main",
+ CompareSeparator: "...",
+ HeadOwner: "lunny",
+ HeadRepoName: "forked_repo",
+ HeadOriRef: "develop",
+ },
+ },
+ {
+ input: "main^...lunny/forked_repo:develop",
+ CompareRouterReq: &CompareRouterReq{
+ BaseOriRef: "main",
+ BaseOriRefSuffix: "^",
+ CompareSeparator: "...",
+ HeadOwner: "lunny",
+ HeadRepoName: "forked_repo",
+ HeadOriRef: "develop",
+ },
+ },
+ }
+
+ for _, c := range cases {
+ assert.Equal(t, c.CompareRouterReq, ParseCompareRouterParam(c.input), "input: %s", c.input)
+ }
+}
diff --git a/routers/common/errpage.go b/routers/common/errpage.go
index 4caef92d14..4d24914bd2 100644
--- a/routers/common/errpage.go
+++ b/routers/common/errpage.go
@@ -4,8 +4,11 @@
package common
import (
+ "bytes"
"fmt"
+ "io"
"net/http"
+ "strings"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/httpcache"
@@ -19,6 +22,36 @@ import (
const tplStatus500 templates.TplName = "status/500"
+func renderServerErrorPage(w http.ResponseWriter, req *http.Request, respCode int, tmpl templates.TplName, ctxData map[string]any, plainMsg string) {
+ acceptsHTML := false
+ for _, part := range req.Header["Accept"] {
+ if strings.Contains(part, "text/html") {
+ acceptsHTML = true
+ break
+ }
+ }
+
+ httpcache.SetCacheControlInHeader(w.Header(), &httpcache.CacheControlOptions{NoTransform: true})
+ w.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
+
+ tmplCtx := context.NewTemplateContext(req.Context(), req)
+ tmplCtx["Locale"] = middleware.Locale(w, req)
+
+ w.WriteHeader(respCode)
+
+ outBuf := &bytes.Buffer{}
+ if acceptsHTML {
+ err := templates.PageRenderer().HTML(outBuf, respCode, tmpl, ctxData, tmplCtx)
+ if err != nil {
+ _, _ = w.Write([]byte("Internal server error but failed to render error page template, please collect error logs and report to Gitea issue tracker"))
+ return
+ }
+ } else {
+ outBuf.WriteString(plainMsg)
+ }
+ _, _ = io.Copy(w, outBuf)
+}
+
// RenderPanicErrorPage renders a 500 page, and it never panics
func RenderPanicErrorPage(w http.ResponseWriter, req *http.Request, err any) {
combinedErr := fmt.Sprintf("%v\n%s", err, log.Stack(2))
@@ -32,24 +65,14 @@ func RenderPanicErrorPage(w http.ResponseWriter, req *http.Request, err any) {
routing.UpdatePanicError(req.Context(), err)
- httpcache.SetCacheControlInHeader(w.Header(), &httpcache.CacheControlOptions{NoTransform: true})
- w.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions)
-
- tmplCtx := context.NewTemplateContext(req.Context(), req)
- tmplCtx["Locale"] = middleware.Locale(w, req)
+ plainMsg := "Internal Server Error"
ctxData := middleware.GetContextData(req.Context())
-
// This recovery handler could be called without Gitea's web context, so we shouldn't touch that context too much.
// Otherwise, the 500-page may cause new panics, eg: cache.GetContextWithData, it makes the developer&users couldn't find the original panic.
user, _ := ctxData[middleware.ContextDataKeySignedUser].(*user_model.User)
if !setting.IsProd || (user != nil && user.IsAdmin) {
- ctxData["ErrorMsg"] = "PANIC: " + combinedErr
- }
-
- err = templates.HTMLRenderer().HTML(w, http.StatusInternalServerError, tplStatus500, ctxData, tmplCtx)
- if err != nil {
- log.Error("Error occurs again when rendering error page: %v", err)
- w.WriteHeader(http.StatusInternalServerError)
- _, _ = w.Write([]byte("Internal server error, please collect error logs and report to Gitea issue tracker"))
+ plainMsg = "PANIC: " + combinedErr
+ ctxData["ErrorMsg"] = plainMsg
}
+ renderServerErrorPage(w, req, http.StatusInternalServerError, tplStatus500, ctxData, plainMsg)
}
diff --git a/routers/common/errpage_test.go b/routers/common/errpage_test.go
index 33aa6bb339..c50d45c296 100644
--- a/routers/common/errpage_test.go
+++ b/routers/common/errpage_test.go
@@ -18,19 +18,28 @@ import (
)
func TestRenderPanicErrorPage(t *testing.T) {
- w := httptest.NewRecorder()
- req := &http.Request{URL: &url.URL{}}
- req = req.WithContext(reqctx.NewRequestContextForTest(t.Context()))
- RenderPanicErrorPage(w, req, errors.New("fake panic error (for test only)"))
- respContent := w.Body.String()
- assert.Contains(t, respContent, `class="page-content status-page-500"`)
- assert.Contains(t, respContent, ``)
- assert.Contains(t, respContent, `lang="en-US"`) // make sure the locale work
+ t.Run("HTML", func(t *testing.T) {
+ w := httptest.NewRecorder()
+ req := &http.Request{URL: &url.URL{}, Header: http.Header{"Accept": []string{"text/html"}}}
+ req = req.WithContext(reqctx.NewRequestContextForTest(t.Context()))
+ RenderPanicErrorPage(w, req, errors.New("fake panic error (for test only)"))
+ respContent := w.Body.String()
+ assert.Contains(t, respContent, `class="page-content status-page-500"`)
+ assert.Contains(t, respContent, ``)
+ assert.Contains(t, respContent, `lang="en-US"`) // make sure the locale work
- // the 500 page doesn't have normal pages footer, it makes it easier to distinguish a normal page and a failed page.
- // especially when a sub-template causes page error, the HTTP response code is still 200,
- // the different "footer" is the only way to know whether a page is fully rendered without error.
- assert.False(t, test.IsNormalPageCompleted(respContent))
+ // the 500 page doesn't have normal pages footer, it makes it easier to distinguish a normal page and a failed page.
+ // especially when a sub-template causes page error, the HTTP response code is still 200,
+ // the different "footer" is the only way to know whether a page is fully rendered without error.
+ assert.False(t, test.IsNormalPageCompleted(respContent))
+ })
+ t.Run("Plain", func(t *testing.T) {
+ w := httptest.NewRecorder()
+ req := &http.Request{URL: &url.URL{}}
+ req = req.WithContext(reqctx.NewRequestContextForTest(t.Context()))
+ renderServiceUnavailable(w, req)
+ assert.Equal(t, "Service Unavailable", w.Body.String())
+ })
}
func TestMain(m *testing.M) {
diff --git a/routers/common/markup.go b/routers/common/markup.go
index 00b2dd07c6..e189bcdecf 100644
--- a/routers/common/markup.go
+++ b/routers/common/markup.go
@@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/models/renderhelper"
"code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/httplib"
+ "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/markup/markdown"
"code.gitea.io/gitea/modules/setting"
@@ -31,7 +32,7 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur
// and the urlPathContext is "/gitea/owner/repo/src/branch/features/feat-123/doc"
if mode == "" || mode == "markdown" {
- // raw markdown doesn't need any special handling
+ // raw Markdown doesn't need any special handling
baseLink := urlPathContext
if baseLink == "" {
baseLink = fmt.Sprintf("%s%s", httplib.GuessCurrentHostURL(ctx), urlPathContext)
@@ -39,7 +40,8 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur
rctx := renderhelper.NewRenderContextSimpleDocument(ctx, baseLink).WithUseAbsoluteLink(true).
WithMarkupType(markdown.MarkupName)
if err := markdown.RenderRaw(rctx, strings.NewReader(text), ctx.Resp); err != nil {
- ctx.HTTPError(http.StatusInternalServerError, err.Error())
+ log.Error("RenderMarkupRaw: %v", err)
+ ctx.HTTPError(http.StatusInternalServerError, "failed to render raw markup")
}
return
}
@@ -92,7 +94,7 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur
})
rctx = rctx.WithMarkupType("").WithRelativePath(filePath) // render the repo file content by its extension
default:
- ctx.HTTPError(http.StatusUnprocessableEntity, "Unknown mode: "+mode)
+ ctx.HTTPError(http.StatusUnprocessableEntity, "unsupported render mode: "+mode)
return
}
rctx = rctx.WithUseAbsoluteLink(true)
@@ -100,7 +102,8 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur
if errors.Is(err, util.ErrInvalidArgument) {
ctx.HTTPError(http.StatusUnprocessableEntity, err.Error())
} else {
- ctx.HTTPError(http.StatusInternalServerError, err.Error())
+ log.Error("RenderMarkup: %v", err)
+ ctx.HTTPError(http.StatusInternalServerError, "failed to render markup")
}
return
}
diff --git a/routers/common/qos.go b/routers/common/qos.go
index 0670ea0b4c..96f23b64fe 100644
--- a/routers/common/qos.go
+++ b/routers/common/qos.go
@@ -14,7 +14,6 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/web/middleware"
- giteacontext "code.gitea.io/gitea/services/context"
"github.com/bohde/codel"
"github.com/go-chi/chi/v5"
@@ -119,27 +118,6 @@ func requestPriority(ctx context.Context) Priority {
// renderServiceUnavailable will render an HTTP 503 Service
// Unavailable page, providing HTML if the client accepts it.
func renderServiceUnavailable(w http.ResponseWriter, req *http.Request) {
- acceptsHTML := false
- for _, part := range req.Header["Accept"] {
- if strings.Contains(part, "text/html") {
- acceptsHTML = true
- break
- }
- }
-
- // If the client doesn't accept HTML, then render a plain text response
- if !acceptsHTML {
- http.Error(w, "503 Service Unavailable", http.StatusServiceUnavailable)
- return
- }
-
- tmplCtx := giteacontext.NewTemplateContext(req.Context(), req)
- tmplCtx["Locale"] = middleware.Locale(w, req)
ctxData := middleware.GetContextData(req.Context())
- err := templates.HTMLRenderer().HTML(w, http.StatusServiceUnavailable, tplStatus503, ctxData, tmplCtx)
- if err != nil {
- log.Error("Error occurs again when rendering service unavailable page: %v", err)
- w.WriteHeader(http.StatusInternalServerError)
- _, _ = w.Write([]byte("Internal server error, please collect error logs and report to Gitea issue tracker"))
- }
+ renderServerErrorPage(w, req, http.StatusServiceUnavailable, tplStatus503, ctxData, "Service Unavailable")
}
diff --git a/routers/common/qos_test.go b/routers/common/qos_test.go
index 850a5f51db..17dc9cb30c 100644
--- a/routers/common/qos_test.go
+++ b/routers/common/qos_test.go
@@ -4,7 +4,6 @@
package common
import (
- "net/http"
"testing"
user_model "code.gitea.io/gitea/models/user"
@@ -62,30 +61,3 @@ func TestRequestPriority(t *testing.T) {
})
}
}
-
-func TestRenderServiceUnavailable(t *testing.T) {
- t.Run("HTML", func(t *testing.T) {
- ctx, resp := contexttest.MockContext(t, "")
- ctx.Req.Header.Set("Accept", "text/html")
-
- renderServiceUnavailable(resp, ctx.Req)
- assert.Equal(t, http.StatusServiceUnavailable, resp.Code)
- assert.Contains(t, resp.Header().Get("Content-Type"), "text/html")
-
- body := resp.Body.String()
- assert.Contains(t, body, `lang="en-US"`)
- assert.Contains(t, body, "503 Service Unavailable")
- })
-
- t.Run("plain", func(t *testing.T) {
- ctx, resp := contexttest.MockContext(t, "")
- ctx.Req.Header.Set("Accept", "text/plain")
-
- renderServiceUnavailable(resp, ctx.Req)
- assert.Equal(t, http.StatusServiceUnavailable, resp.Code)
- assert.Contains(t, resp.Header().Get("Content-Type"), "text/plain")
-
- body := resp.Body.String()
- assert.Contains(t, body, "503 Service Unavailable")
- })
-}
diff --git a/routers/init.go b/routers/init.go
index 859b00ebb2..82a5378263 100644
--- a/routers/init.go
+++ b/routers/init.go
@@ -15,7 +15,6 @@ import (
"code.gitea.io/gitea/modules/eventsource"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
- "code.gitea.io/gitea/modules/highlight"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/markup/external"
@@ -24,7 +23,6 @@ import (
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/svg"
"code.gitea.io/gitea/modules/system"
- "code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
@@ -132,7 +130,6 @@ func InitWebInstalled(ctx context.Context) {
mustInit(uinotification.Init)
mustInitCtx(ctx, archiver.Init)
- highlight.NewContext()
external.RegisterRenderers()
markup.Init(markup_service.FormalRenderHelperFuncs())
@@ -182,7 +179,6 @@ func InitWebInstalled(ctx context.Context) {
// NormalRoutes represents non install routes
func NormalRoutes() *web.Router {
- _ = templates.HTMLRenderer()
r := web.NewRouter()
r.Use(common.ProtocolMiddlewares()...)
diff --git a/routers/install/install.go b/routers/install/install.go
index c5acf968bd..399128b6ed 100644
--- a/routers/install/install.go
+++ b/routers/install/install.go
@@ -24,11 +24,9 @@ import (
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
- "code.gitea.io/gitea/modules/reqctx"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/timeutil"
- "code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/modules/user"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/modules/web/middleware"
@@ -37,8 +35,6 @@ import (
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/forms"
"code.gitea.io/gitea/services/versioned_migration"
-
- "gitea.com/go-chi/session"
)
const (
@@ -55,29 +51,13 @@ func getSupportedDbTypeNames() (dbTypeNames []map[string]string) {
return dbTypeNames
}
-// installContexter prepare for rendering installation page
func installContexter() func(next http.Handler) http.Handler {
- rnd := templates.HTMLRenderer()
- dbTypeNames := getSupportedDbTypeNames()
- envConfigKeys := setting.CollectEnvConfigKeys()
- return func(next http.Handler) http.Handler {
- return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
- base := context.NewBaseContext(resp, req)
- ctx := context.NewWebContext(base, rnd, session.GetSession(req))
- ctx.Data.MergeFrom(middleware.CommonTemplateContextData())
- ctx.Data.MergeFrom(reqctx.ContextData{
- "Title": ctx.Locale.Tr("install.install"),
- "PageIsInstall": true,
- "DbTypeNames": dbTypeNames,
- "EnvConfigKeys": envConfigKeys,
- "CustomConfFile": setting.CustomConf,
- "AllLangs": translation.AllLangs(),
-
- "PasswordHashAlgorithms": hash.RecommendedHashAlgorithms,
- })
- next.ServeHTTP(resp, ctx.Req)
- })
- }
+ return context.ContexterInstallPage(map[string]any{
+ "DbTypeNames": getSupportedDbTypeNames(),
+ "EnvConfigKeys": setting.CollectEnvConfigKeys(),
+ "CustomConfFile": setting.CustomConf,
+ "PasswordHashAlgorithms": hash.RecommendedHashAlgorithms,
+ })
}
// Install render installation page
diff --git a/routers/private/hook_pre_receive.go b/routers/private/hook_pre_receive.go
index 88e8b466f1..39955116c4 100644
--- a/routers/private/hook_pre_receive.go
+++ b/routers/private/hook_pre_receive.go
@@ -191,7 +191,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID string, r
// 2. Disallow force pushes to protected branches
if oldCommitID != objectFormat.EmptyObjectID().String() {
- output, err := gitrepo.RunCmdString(ctx,
+ output, _, err := gitrepo.RunCmdString(ctx,
repo,
gitcmd.NewCommand("rev-list", "--max-count=1").
AddDynamicArguments(oldCommitID, "^"+newCommitID).
diff --git a/routers/private/hook_verification.go b/routers/private/hook_verification.go
index 9c357f4b41..fd26ba89e2 100644
--- a/routers/private/hook_verification.go
+++ b/routers/private/hook_verification.go
@@ -5,9 +5,7 @@ package private
import (
"bufio"
- "context"
"io"
- "os"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
@@ -18,16 +16,6 @@ import (
// This file contains commit verification functions for refs passed across in hooks
func verifyCommits(oldCommitID, newCommitID string, repo *git.Repository, env []string) error {
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- log.Error("Unable to create os.Pipe for %s", repo.Path)
- return err
- }
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
-
var command *gitcmd.Command
objectFormat, _ := repo.GetObjectFormat()
if oldCommitID == objectFormat.EmptyObjectID().String() {
@@ -39,18 +27,14 @@ func verifyCommits(oldCommitID, newCommitID string, repo *git.Repository, env []
command = gitcmd.NewCommand("rev-list").AddDynamicArguments(oldCommitID + "..." + newCommitID)
}
// This is safe as force pushes are already forbidden
- err = command.WithEnv(env).
+ stdoutReader, stdoutReaderClose := command.MakeStdoutPipe()
+ defer stdoutReaderClose()
+
+ err := command.WithEnv(env).
WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
err := readAndVerifyCommitsFromShaReader(stdoutReader, repo, env)
- if err != nil {
- log.Error("readAndVerifyCommitsFromShaReader failed: %v", err)
- cancel()
- }
- _ = stdoutReader.Close()
- return err
+ return ctx.CancelPipeline(err)
}).
Run(repo.Ctx)
if err != nil && !isErrUnverifiedCommit(err) {
@@ -72,34 +56,21 @@ func readAndVerifyCommitsFromShaReader(input io.ReadCloser, repo *git.Repository
}
func readAndVerifyCommit(sha string, repo *git.Repository, env []string) error {
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- log.Error("Unable to create pipe for %s: %v", repo.Path, err)
- return err
- }
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
-
commitID := git.MustIDFromString(sha)
+ cmd := gitcmd.NewCommand("cat-file", "commit").AddDynamicArguments(sha)
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
- return gitcmd.NewCommand("cat-file", "commit").AddDynamicArguments(sha).
- WithEnv(env).
+ return cmd.WithEnv(env).
WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
commit, err := git.CommitFromReader(repo, commitID, stdoutReader)
if err != nil {
return err
}
verification := asymkey_service.ParseCommitWithSignature(ctx, commit)
if !verification.Verified {
- cancel()
- return &errUnverifiedCommit{
- commit.ID.String(),
- }
+ return ctx.CancelPipeline(&errUnverifiedCommit{commit.ID.String()})
}
return nil
}).
diff --git a/routers/private/manager.go b/routers/private/manager.go
index 00e52d6511..b84919d180 100644
--- a/routers/private/manager.go
+++ b/routers/private/manager.go
@@ -21,7 +21,7 @@ import (
// ReloadTemplates reloads all the templates
func ReloadTemplates(ctx *context.PrivateContext) {
- err := templates.ReloadHTMLTemplates()
+ err := templates.ReloadAllTemplates()
if err != nil {
ctx.JSON(http.StatusInternalServerError, private.Response{
UserMsg: fmt.Sprintf("Template error: %v", err),
diff --git a/routers/web/admin/auths.go b/routers/web/admin/auths.go
index fb1a7d9524..3407789f2f 100644
--- a/routers/web/admin/auths.go
+++ b/routers/web/admin/auths.go
@@ -136,6 +136,7 @@ func parseLDAPConfig(form forms.AuthenticationForm) *ldap.Source {
AttributesInBind: form.AttributesInBind,
AttributeSSHPublicKey: form.AttributeSSHPublicKey,
AttributeAvatar: form.AttributeAvatar,
+ SSHKeysAreVerified: form.SSHKeysAreVerified,
SearchPageSize: pageSize,
Filter: form.Filter,
GroupsEnabled: form.GroupsEnabled,
diff --git a/routers/web/admin/repos.go b/routers/web/admin/repos.go
index 1bc8abb88c..424219815c 100644
--- a/routers/web/admin/repos.go
+++ b/routers/web/admin/repos.go
@@ -11,10 +11,10 @@ import (
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
- "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/web/explore"
"code.gitea.io/gitea/services/context"
repo_service "code.gitea.io/gitea/services/repository"
@@ -134,12 +134,12 @@ func AdoptOrDeleteRepository(ctx *context.Context) {
ctx.ServerError("IsRepositoryExist", err)
return
}
- isDir, err := util.IsDir(repo_model.RepoPath(ctxUser.Name, repoName))
+ exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, repoName)))
if err != nil {
ctx.ServerError("IsDir", err)
return
}
- if has || !isDir {
+ if has || !exist {
// Fallthrough to failure mode
} else if action == "adopt" {
if _, err := repo_service.AdoptRepository(ctx, ctx.Doer, ctxUser, repo_service.CreateRepoOptions{
diff --git a/routers/web/auth/2fa.go b/routers/web/auth/2fa.go
index 1f087a7897..a19c9d7aca 100644
--- a/routers/web/auth/2fa.go
+++ b/routers/web/auth/2fa.go
@@ -26,7 +26,7 @@ var (
func TwoFactor(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("twofa")
- if CheckAutoLogin(ctx) {
+ if performAutoLogin(ctx) {
return
}
@@ -99,7 +99,7 @@ func TwoFactorPost(ctx *context.Context) {
func TwoFactorScratch(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("twofa_scratch")
- if CheckAutoLogin(ctx) {
+ if performAutoLogin(ctx) {
return
}
@@ -151,7 +151,7 @@ func TwoFactorScratchPost(ctx *context.Context) {
return
}
- handleSignInFull(ctx, u, remember, false)
+ handleSignInFull(ctx, u, remember)
if ctx.Written() {
return
}
diff --git a/routers/web/auth/auth.go b/routers/web/auth/auth.go
index d36fb5bab7..bc0939d92a 100644
--- a/routers/web/auth/auth.go
+++ b/routers/web/auth/auth.go
@@ -9,6 +9,7 @@ import (
"fmt"
"html/template"
"net/http"
+ "net/url"
"strings"
"code.gitea.io/gitea/models/auth"
@@ -126,20 +127,47 @@ func resetLocale(ctx *context.Context, u *user_model.User) error {
return nil
}
-func RedirectAfterLogin(ctx *context.Context) {
+func rememberAuthRedirectLink(ctx *context.Context) {
redirectTo := ctx.FormString("redirect_to")
if redirectTo == "" {
- redirectTo = ctx.GetSiteCookie("redirect_to")
+ if ref, err := url.Parse(ctx.Req.Referer()); err == nil && httplib.IsCurrentGiteaSiteURL(ctx, ctx.Req.Referer()) {
+ // the request paths starting with "/user/" are either:
+ // * auth related pages: don't redirect back to them
+ // * user settings pages: they have "require sign-in" protection already, no "referer redirect" would happen
+ skipRefererRedirect := strings.HasPrefix(ref.Path, setting.AppSubURL+"/user/")
+ if !skipRefererRedirect {
+ redirectTo = ref.RequestURI()
+ }
+ }
}
- middleware.DeleteRedirectToCookie(ctx.Resp)
- nextRedirectTo := setting.AppSubURL + string(setting.LandingPageURL)
- if setting.LandingPageURL == setting.LandingPageLogin {
- nextRedirectTo = setting.AppSubURL + "/" // do not cycle-redirect to the login page
+ if redirectTo != "" {
+ middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
}
- ctx.RedirectToCurrentSite(redirectTo, nextRedirectTo)
}
-func CheckAutoLogin(ctx *context.Context) bool {
+func consumeAuthRedirectLink(ctx *context.Context) string {
+ redirects := []string{ctx.FormString("redirect_to"), middleware.GetRedirectToCookie(ctx.Req)}
+ middleware.DeleteRedirectToCookie(ctx.Resp)
+ if setting.LandingPageURL == setting.LandingPageLogin {
+ redirects = append(redirects, setting.AppSubURL+"/") // do not cycle-redirect to the login page
+ } else {
+ redirects = append(redirects, setting.AppSubURL+string(setting.LandingPageURL))
+ }
+ for _, link := range redirects {
+ if link != "" && httplib.IsCurrentGiteaSiteURL(ctx, link) {
+ return link
+ }
+ }
+ return setting.AppSubURL + "/"
+}
+
+func redirectAfterAuth(ctx *context.Context) {
+ ctx.RedirectToCurrentSite(consumeAuthRedirectLink(ctx))
+}
+
+func performAutoLogin(ctx *context.Context) bool {
+ rememberAuthRedirectLink(ctx)
+
isSucceed, err := autoSignIn(ctx) // try to auto-login
if err != nil {
if errors.Is(err, auth_service.ErrAuthTokenInvalidHash) {
@@ -150,13 +178,8 @@ func CheckAutoLogin(ctx *context.Context) bool {
return true
}
- redirectTo := ctx.FormString("redirect_to")
- if len(redirectTo) > 0 {
- middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
- }
-
if isSucceed {
- RedirectAfterLogin(ctx)
+ redirectAfterAuth(ctx)
return true
}
@@ -181,11 +204,11 @@ func prepareSignInPageData(ctx *context.Context) {
// SignIn render sign in page
func SignIn(ctx *context.Context) {
- if CheckAutoLogin(ctx) {
+ if performAutoLogin(ctx) {
return
}
if ctx.IsSigned {
- RedirectAfterLogin(ctx)
+ redirectAfterAuth(ctx)
return
}
prepareSignInPageData(ctx)
@@ -295,19 +318,19 @@ func SignInPost(ctx *context.Context) {
// This handles the final part of the sign-in process of the user.
func handleSignIn(ctx *context.Context, u *user_model.User, remember bool) {
- redirect := handleSignInFull(ctx, u, remember, true)
+ handleSignInFull(ctx, u, remember)
if ctx.Written() {
return
}
- ctx.Redirect(redirect)
+ redirectAfterAuth(ctx)
}
-func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRedirect bool) string {
+func handleSignInFull(ctx *context.Context, u *user_model.User, remember bool) {
if remember {
nt, token, err := auth_service.CreateAuthTokenForUserID(ctx, u.ID)
if err != nil {
ctx.ServerError("CreateAuthTokenForUserID", err)
- return setting.AppSubURL + "/"
+ return
}
ctx.SetSiteCookie(setting.CookieRememberName, nt.ID+":"+token, setting.LogInRememberDays*timeutil.Day)
@@ -316,7 +339,7 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe
userHasTwoFactorAuth, err := auth.HasTwoFactorOrWebAuthn(ctx, u.ID)
if err != nil {
ctx.ServerError("HasTwoFactorOrWebAuthn", err)
- return setting.AppSubURL + "/"
+ return
}
if err := updateSession(ctx, []string{
@@ -335,7 +358,7 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe
session.KeyUserHasTwoFactorAuth: userHasTwoFactorAuth,
}); err != nil {
ctx.ServerError("RegenerateSession", err)
- return setting.AppSubURL + "/"
+ return
}
// Language setting of the user overwrites the one previously set
@@ -346,7 +369,7 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe
}
if err := user_service.UpdateUser(ctx, u, opts); err != nil {
ctx.ServerError("UpdateUser Language", fmt.Errorf("Error updating user language [user: %d, locale: %s]", u.ID, ctx.Locale.Language()))
- return setting.AppSubURL + "/"
+ return
}
}
@@ -359,21 +382,8 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe
// Register last login
if err := user_service.UpdateUser(ctx, u, &user_service.UpdateOptions{SetLastLogin: true}); err != nil {
ctx.ServerError("UpdateUser", err)
- return setting.AppSubURL + "/"
+ return
}
-
- if redirectTo := ctx.GetSiteCookie("redirect_to"); redirectTo != "" && httplib.IsCurrentGiteaSiteURL(ctx, redirectTo) {
- middleware.DeleteRedirectToCookie(ctx.Resp)
- if obeyRedirect {
- ctx.RedirectToCurrentSite(redirectTo)
- }
- return redirectTo
- }
-
- if obeyRedirect {
- ctx.Redirect(setting.AppSubURL + "/")
- }
- return setting.AppSubURL + "/"
}
// extractUserNameFromOAuth2 tries to extract a normalized username from the given OAuth2 user.
@@ -436,10 +446,7 @@ func SignUp(ctx *context.Context) {
// Show Disabled Registration message if DisableRegistration or AllowOnlyExternalRegistration options are true
ctx.Data["DisableRegistration"] = setting.Service.DisableRegistration || setting.Service.AllowOnlyExternalRegistration
- redirectTo := ctx.FormString("redirect_to")
- if len(redirectTo) > 0 {
- middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
- }
+ rememberAuthRedirectLink(ctx)
ctx.HTML(http.StatusOK, tplSignUp)
}
@@ -817,13 +824,7 @@ func handleAccountActivation(ctx *context.Context, user *user_model.User) {
}
ctx.Flash.Success(ctx.Tr("auth.account_activated"))
- if redirectTo := ctx.GetSiteCookie("redirect_to"); len(redirectTo) > 0 {
- middleware.DeleteRedirectToCookie(ctx.Resp)
- ctx.RedirectToCurrentSite(redirectTo)
- return
- }
-
- ctx.Redirect(setting.AppSubURL + "/")
+ redirectAfterAuth(ctx)
}
// ActivateEmail render the activate email page
diff --git a/routers/web/auth/oauth.go b/routers/web/auth/oauth.go
index 5eab7ffeb4..b96ea17bc3 100644
--- a/routers/web/auth/oauth.go
+++ b/routers/web/auth/oauth.go
@@ -21,7 +21,6 @@ import (
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/session"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/web/middleware"
source_service "code.gitea.io/gitea/services/auth/source"
"code.gitea.io/gitea/services/auth/source/oauth2"
"code.gitea.io/gitea/services/context"
@@ -42,10 +41,7 @@ func SignInOAuth(ctx *context.Context) {
return
}
- redirectTo := ctx.FormString("redirect_to")
- if len(redirectTo) > 0 {
- middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
- }
+ rememberAuthRedirectLink(ctx)
// try to do a direct callback flow, so we don't authenticate the user again but use the valid accesstoken to get the user
user, gothUser, err := oAuth2UserLoginCallback(ctx, authSource, ctx.Req, ctx.Resp)
@@ -398,13 +394,7 @@ func handleOAuth2SignIn(ctx *context.Context, authSource *auth.Source, u *user_m
return
}
- if redirectTo := ctx.GetSiteCookie("redirect_to"); len(redirectTo) > 0 {
- middleware.DeleteRedirectToCookie(ctx.Resp)
- ctx.RedirectToCurrentSite(redirectTo)
- return
- }
-
- ctx.Redirect(setting.AppSubURL + "/")
+ redirectAfterAuth(ctx)
return
}
diff --git a/routers/web/auth/oauth2_provider.go b/routers/web/auth/oauth2_provider.go
index 02e1a50285..e09469526e 100644
--- a/routers/web/auth/oauth2_provider.go
+++ b/routers/web/auth/oauth2_provider.go
@@ -230,8 +230,7 @@ func AuthorizeOAuth(ctx *context.Context) {
// pkce support
switch form.CodeChallengeMethod {
- case "S256":
- case "plain":
+ case "S256", "plain":
if err := ctx.Session.Set("CodeChallengeMethod", form.CodeChallengeMethod); err != nil {
handleAuthorizeError(ctx, AuthorizeError{
ErrorCode: ErrorCodeServerError,
diff --git a/routers/web/auth/oauth_signin_sync.go b/routers/web/auth/oauth_signin_sync.go
index 86d1966024..2f7a8eab58 100644
--- a/routers/web/auth/oauth_signin_sync.go
+++ b/routers/web/auth/oauth_signin_sync.go
@@ -86,7 +86,7 @@ func oauth2UpdateSSHPubIfNeed(ctx *context.Context, authSource *auth.Source, got
if err != nil {
return err
}
- if !asymkey_model.SynchronizePublicKeys(ctx, user, authSource, sshKeys) {
+ if !asymkey_model.SynchronizePublicKeys(ctx, user, authSource, sshKeys, false) {
return nil
}
return asymkey_service.RewriteAllPublicKeys(ctx)
diff --git a/routers/web/auth/openid.go b/routers/web/auth/openid.go
index 4ef4c96ccc..948e65366e 100644
--- a/routers/web/auth/openid.go
+++ b/routers/web/auth/openid.go
@@ -35,7 +35,7 @@ func SignInOpenID(ctx *context.Context) {
return
}
- if CheckAutoLogin(ctx) {
+ if performAutoLogin(ctx) {
return
}
diff --git a/routers/web/auth/password.go b/routers/web/auth/password.go
index 537ad4b994..61c6119470 100644
--- a/routers/web/auth/password.go
+++ b/routers/web/auth/password.go
@@ -16,7 +16,6 @@ import (
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/web"
- "code.gitea.io/gitea/modules/web/middleware"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/forms"
"code.gitea.io/gitea/services/mailer"
@@ -236,7 +235,7 @@ func ResetPasswdPost(ctx *context.Context) {
return
}
- handleSignInFull(ctx, u, remember, false)
+ handleSignInFull(ctx, u, remember)
if ctx.Written() {
return
}
@@ -308,11 +307,5 @@ func MustChangePasswordPost(ctx *context.Context) {
log.Trace("User updated password: %s", ctx.Doer.Name)
- if redirectTo := ctx.GetSiteCookie("redirect_to"); redirectTo != "" {
- middleware.DeleteRedirectToCookie(ctx.Resp)
- ctx.RedirectToCurrentSite(redirectTo)
- return
- }
-
- ctx.Redirect(setting.AppSubURL + "/")
+ redirectAfterAuth(ctx)
}
diff --git a/routers/web/auth/webauthn.go b/routers/web/auth/webauthn.go
index dacb6be225..cae726b8bf 100644
--- a/routers/web/auth/webauthn.go
+++ b/routers/web/auth/webauthn.go
@@ -26,7 +26,7 @@ var tplWebAuthn templates.TplName = "user/auth/webauthn"
func WebAuthn(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("twofa")
- if CheckAutoLogin(ctx) {
+ if performAutoLogin(ctx) {
return
}
@@ -156,12 +156,8 @@ func WebAuthnPasskeyLogin(ctx *context.Context) {
}
remember := false // TODO: implement remember me
- redirect := handleSignInFull(ctx, user, remember, false)
- if redirect == "" {
- redirect = setting.AppSubURL + "/"
- }
-
- ctx.JSONRedirect(redirect)
+ handleSignInFull(ctx, user, remember)
+ ctx.JSONRedirect(consumeAuthRedirectLink(ctx))
}
// WebAuthnLoginAssertion submits a WebAuthn challenge to the browser
@@ -274,11 +270,7 @@ func WebAuthnLoginAssertionPost(ctx *context.Context) {
}
remember := ctx.Session.Get("twofaRemember").(bool)
- redirect := handleSignInFull(ctx, user, remember, false)
- if redirect == "" {
- redirect = setting.AppSubURL + "/"
- }
+ handleSignInFull(ctx, user, remember)
_ = ctx.Session.Delete("twofaUid")
-
- ctx.JSONRedirect(redirect)
+ ctx.JSONRedirect(consumeAuthRedirectLink(ctx))
}
diff --git a/routers/web/devtest/mail_preview.go b/routers/web/devtest/mail_preview.go
index d6bade15d7..7b1787d52b 100644
--- a/routers/web/devtest/mail_preview.go
+++ b/routers/web/devtest/mail_preview.go
@@ -8,6 +8,7 @@ import (
"strings"
"code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/mailer"
@@ -34,17 +35,18 @@ func MailPreviewRender(ctx *context.Context) {
func prepareMailPreviewRender(ctx *context.Context, tmplName string) {
tmplSubject := mailer.LoadedTemplates().SubjectTemplates.Lookup(tmplName)
- if tmplSubject == nil {
- ctx.Data["RenderMailSubject"] = "default subject"
- } else {
+ // FIXME: MAIL-TEMPLATE-SUBJECT: only "issue" related messages support using subject from templates
+ subject := "(default subject)"
+ if tmplSubject != nil {
var buf strings.Builder
err := tmplSubject.Execute(&buf, nil)
if err != nil {
- ctx.Data["RenderMailSubject"] = err.Error()
+ subject = "ERROR: " + err.Error()
} else {
- ctx.Data["RenderMailSubject"] = buf.String()
+ subject = util.IfZero(buf.String(), subject)
}
}
+ ctx.Data["RenderMailSubject"] = subject
ctx.Data["RenderMailTemplateName"] = tmplName
}
diff --git a/routers/web/feed/convert.go b/routers/web/feed/convert.go
index 7c59132841..a5c379e01a 100644
--- a/routers/web/feed/convert.go
+++ b/routers/web/feed/convert.go
@@ -158,16 +158,16 @@ func feedActionsToFeedItems(ctx *context.Context, actions activities_model.Actio
if link.Href == "#" {
link.Href = srcLink
}
- titleExtra = ctx.Locale.Tr("action.mirror_sync_push", act.GetRepoAbsoluteLink(ctx), srcLink, act.GetBranch(), act.ShortRepoPath(ctx))
+ titleExtra = ctx.Locale.Tr("action.mirror_sync_push", act.GetRepoAbsoluteLink(ctx), srcLink, act.RefName, act.ShortRepoPath(ctx))
case activities_model.ActionMirrorSyncCreate:
srcLink := toSrcLink(ctx, act)
if link.Href == "#" {
link.Href = srcLink
}
- titleExtra = ctx.Locale.Tr("action.mirror_sync_create", act.GetRepoAbsoluteLink(ctx), srcLink, act.GetBranch(), act.ShortRepoPath(ctx))
+ titleExtra = ctx.Locale.Tr("action.mirror_sync_create", act.GetRepoAbsoluteLink(ctx), srcLink, act.RefName, act.ShortRepoPath(ctx))
case activities_model.ActionMirrorSyncDelete:
link.Href = act.GetRepoAbsoluteLink(ctx)
- titleExtra = ctx.Locale.Tr("action.mirror_sync_delete", act.GetRepoAbsoluteLink(ctx), act.GetBranch(), act.ShortRepoPath(ctx))
+ titleExtra = ctx.Locale.Tr("action.mirror_sync_delete", act.GetRepoAbsoluteLink(ctx), act.RefName, act.ShortRepoPath(ctx))
case activities_model.ActionApprovePullRequest:
pullLink := toPullLink(ctx, act)
titleExtra = ctx.Locale.Tr("action.approve_pull_request", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
diff --git a/routers/web/feed/profile_test.go b/routers/web/feed/profile_test.go
index a0f1509269..bf9492b57b 100644
--- a/routers/web/feed/profile_test.go
+++ b/routers/web/feed/profile_test.go
@@ -23,7 +23,6 @@ func TestCheckGetOrgFeedsAsOrgMember(t *testing.T) {
ctx, resp := contexttest.MockContext(t, "org3.atom")
ctx.ContextUser = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
contexttest.LoadUser(t, ctx, 2)
- ctx.IsSigned = true
feed.ShowUserFeedAtom(ctx)
assert.Contains(t, resp.Body.String(), "") // Should contain 1 private entry
})
@@ -31,7 +30,6 @@ func TestCheckGetOrgFeedsAsOrgMember(t *testing.T) {
ctx, resp := contexttest.MockContext(t, "org3.atom")
ctx.ContextUser = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
contexttest.LoadUser(t, ctx, 5)
- ctx.IsSigned = true
feed.ShowUserFeedAtom(ctx)
assert.NotContains(t, resp.Body.String(), "") // Should not contain any entries
})
diff --git a/routers/web/githttp.go b/routers/web/githttp.go
index ed3c56b07b..43d318c1a1 100644
--- a/routers/web/githttp.go
+++ b/routers/web/githttp.go
@@ -10,9 +10,12 @@ import (
)
func addOwnerRepoGitHTTPRouters(m *web.Router) {
+ // Some users want to use "web-based git client" to access Gitea's repositories,
+ // so the CORS handler and OPTIONS method are used.
m.Group("/{username}/{reponame}", func() {
m.Methods("POST,OPTIONS", "/git-upload-pack", repo.ServiceUploadPack)
m.Methods("POST,OPTIONS", "/git-receive-pack", repo.ServiceReceivePack)
+ m.Methods("POST,OPTIONS", "/git-upload-archive", repo.ServiceUploadArchive)
m.Methods("GET,OPTIONS", "/info/refs", repo.GetInfoRefs)
m.Methods("GET,OPTIONS", "/HEAD", repo.GetTextFile("HEAD"))
m.Methods("GET,OPTIONS", "/objects/info/alternates", repo.GetTextFile("objects/info/alternates"))
diff --git a/routers/web/healthcheck/check.go b/routers/web/healthcheck/check.go
index 85f47613f0..0eea1e1ff7 100644
--- a/routers/web/healthcheck/check.go
+++ b/routers/web/healthcheck/check.go
@@ -64,6 +64,15 @@ type componentStatus struct {
}
// Check is the health check API handler
+//
+// HINT: HEALTH-CHECK-ENDPOINT: there is no clear definition about what "health" means.
+// In most cases, end users don't need to check such endpoint, because even if database is down,
+// Gitea will reover after database is up again. Sysop should monitor database and cache status directly.
+//
+// And keep in mind: this health check should NEVER be used as a "restart" trigger, for example: Docker's "HEALTHCHECK".
+// * If Gitea is upgrading and migrating database, there will be a long time before this endpoint starts to return "pass" status.
+// In this case, if the checker restarts Gitea just because it doesn't get "pass" status in short time,
+// the instance will just be restarted again and again before the migation finishes and the sitution just goes worse.
func Check(w http.ResponseWriter, r *http.Request) {
rsp := response{
Status: pass,
diff --git a/routers/web/org/projects.go b/routers/web/org/projects.go
index d524409c41..e01e615de6 100644
--- a/routers/web/org/projects.go
+++ b/routers/web/org/projects.go
@@ -13,7 +13,7 @@ import (
issues_model "code.gitea.io/gitea/models/issues"
org_model "code.gitea.io/gitea/models/organization"
project_model "code.gitea.io/gitea/models/project"
- attachment_model "code.gitea.io/gitea/models/repo"
+ repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/optional"
@@ -25,6 +25,8 @@ import (
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/forms"
project_service "code.gitea.io/gitea/services/projects"
+
+ "xorm.io/builder"
)
const (
@@ -205,22 +207,24 @@ func ChangeProjectStatus(ctx *context.Context) {
}
id := ctx.PathParamInt64("id")
- if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx, 0, id, toClose); err != nil {
- ctx.NotFoundOrServerError("ChangeProjectStatusByRepoIDAndID", project_model.IsErrProjectNotExist, err)
- return
- }
- ctx.JSONRedirect(project_model.ProjectLinkForOrg(ctx.ContextUser, id))
-}
-
-// DeleteProject delete a project
-func DeleteProject(ctx *context.Context) {
- p, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id"))
+ project, err := project_model.GetProjectByIDAndOwner(ctx, id, ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
}
- if p.OwnerID != ctx.ContextUser.ID {
- ctx.NotFound(nil)
+
+ if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx, 0, project.ID, toClose); err != nil {
+ ctx.NotFoundOrServerError("ChangeProjectStatusByRepoIDAndID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ ctx.JSONRedirect(project_model.ProjectLinkForOrg(ctx.ContextUser, project.ID))
+}
+
+// DeleteProject delete a project
+func DeleteProject(ctx *context.Context) {
+ p, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID)
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
}
@@ -246,15 +250,11 @@ func RenderEditProject(ctx *context.Context) {
return
}
- p, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id"))
+ p, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
}
- if p.OwnerID != ctx.ContextUser.ID {
- ctx.NotFound(nil)
- return
- }
ctx.Data["projectID"] = p.ID
ctx.Data["title"] = p.Title
@@ -288,15 +288,11 @@ func EditProjectPost(ctx *context.Context) {
return
}
- p, err := project_model.GetProjectByID(ctx, projectID)
+ p, err := project_model.GetProjectByIDAndOwner(ctx, projectID, ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
}
- if p.OwnerID != ctx.ContextUser.ID {
- ctx.NotFound(nil)
- return
- }
p.Title = form.Title
p.Description = form.Content
@@ -316,15 +312,12 @@ func EditProjectPost(ctx *context.Context) {
// ViewProject renders the project with board view for a project
func ViewProject(ctx *context.Context) {
- project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id"))
+ project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
}
- if project.OwnerID != ctx.ContextUser.ID {
- ctx.NotFound(nil)
- return
- }
+
if err := project.LoadOwner(ctx); err != nil {
ctx.ServerError("LoadOwner", err)
return
@@ -341,12 +334,26 @@ func ViewProject(ctx *context.Context) {
return
}
assigneeID := ctx.FormString("assignee")
+ milestoneID := ctx.FormInt64("milestone")
+
+ // Prepare milestone IDs for filtering
+ var milestoneIDs []int64
+ if milestoneID > 0 {
+ milestoneIDs = []int64{milestoneID}
+ } else if milestoneID == db.NoConditionID {
+ milestoneIDs = []int64{db.NoConditionID}
+ }
opts := issues_model.IssuesOptions{
- LabelIDs: preparedLabelFilter.SelectedLabelIDs,
- AssigneeID: assigneeID,
- Owner: project.Owner,
- Doer: ctx.Doer,
+ LabelIDs: preparedLabelFilter.SelectedLabelIDs,
+ AssigneeID: assigneeID,
+ MilestoneIDs: milestoneIDs,
+ Owner: project.Owner,
+ }
+ if ctx.Doer != nil {
+ opts.Doer = ctx.Doer
+ } else {
+ opts.AllPublic = true
}
issuesMap, err := project_service.LoadIssuesFromProject(ctx, project, &opts)
@@ -359,10 +366,10 @@ func ViewProject(ctx *context.Context) {
}
if project.CardType != project_model.CardTypeTextOnly {
- issuesAttachmentMap := make(map[int64][]*attachment_model.Attachment)
+ issuesAttachmentMap := make(map[int64][]*repo_model.Attachment)
for _, issuesList := range issuesMap {
for _, issue := range issuesList {
- if issueAttachment, err := attachment_model.GetAttachmentsByIssueIDImagesLatest(ctx, issue.ID); err == nil {
+ if issueAttachment, err := repo_model.GetAttachmentsByIssueIDImagesLatest(ctx, issue.ID); err == nil {
issuesAttachmentMap[issue.ID] = issueAttachment
}
}
@@ -420,6 +427,42 @@ func ViewProject(ctx *context.Context) {
ctx.Data["Labels"] = labels
ctx.Data["NumLabels"] = len(labels)
+ // Get milestones for filtering
+ // For organization projects, we need to get milestones from all repos the user has access to
+ var milestones issues_model.MilestoneList
+ if project.RepoID > 0 {
+ // Repo-specific project
+ milestones, err = db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoID: project.RepoID,
+ })
+ if err != nil {
+ ctx.ServerError("GetRepoMilestones", err)
+ return
+ }
+ } else {
+ // Organization-wide project - get milestones from all organization repos
+ // but only from repositories the current user can access.
+ // Use RepoCond with a subquery to avoid materializing all repo IDs in memory
+ // which can hit SQL parameter limits for orgs with many repos.
+ accessCond := repo_model.AccessibleRepositoryCondition(ctx.Doer, unit.TypeIssues)
+ repoCond := builder.And(
+ builder.Eq{"owner_id": project.OwnerID},
+ accessCond,
+ )
+ milestones, err = db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoCond: repoCond,
+ })
+ if err != nil {
+ ctx.ServerError("GetOrgMilestones", err)
+ return
+ }
+ }
+
+ openMilestones, closedMilestones := milestones.SplitByOpenClosed()
+ ctx.Data["OpenMilestones"] = openMilestones
+ ctx.Data["ClosedMilestones"] = closedMilestones
+ ctx.Data["MilestoneID"] = milestoneID
+
// Get assignees.
assigneeUsers, err := org_model.GetOrgAssignees(ctx, project.OwnerID)
if err != nil {
@@ -455,28 +498,15 @@ func DeleteProjectColumn(ctx *context.Context) {
return
}
- project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id"))
+ project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
}
- pb, err := project_model.GetColumn(ctx, ctx.PathParamInt64("columnID"))
+ _, err = project_model.GetColumnByIDAndProjectID(ctx, ctx.PathParamInt64("columnID"), project.ID)
if err != nil {
- ctx.ServerError("GetProjectColumn", err)
- return
- }
- if pb.ProjectID != ctx.PathParamInt64("id") {
- ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
- "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", pb.ID, project.ID),
- })
- return
- }
-
- if project.OwnerID != ctx.ContextUser.ID {
- ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
- "message": fmt.Sprintf("ProjectColumn[%d] is not in Owner[%d] as expected", pb.ID, ctx.ContextUser.ID),
- })
+ ctx.NotFoundOrServerError("GetColumnByIDAndProjectID", project_model.IsErrProjectColumnNotExist, err)
return
}
@@ -492,7 +522,7 @@ func DeleteProjectColumn(ctx *context.Context) {
func AddColumnToProjectPost(ctx *context.Context) {
form := web.GetForm(ctx).(*forms.EditProjectColumnForm)
- project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id"))
+ project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
@@ -520,30 +550,18 @@ func CheckProjectColumnChangePermissions(ctx *context.Context) (*project_model.P
return nil, nil
}
- project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id"))
+ project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return nil, nil
}
- column, err := project_model.GetColumn(ctx, ctx.PathParamInt64("columnID"))
+ column, err := project_model.GetColumnByIDAndProjectID(ctx, ctx.PathParamInt64("columnID"), project.ID)
if err != nil {
- ctx.ServerError("GetProjectColumn", err)
- return nil, nil
- }
- if column.ProjectID != ctx.PathParamInt64("id") {
- ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
- "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", column.ID, project.ID),
- })
+ ctx.NotFoundOrServerError("GetColumnByIDAndProjectID", project_model.IsErrProjectColumnNotExist, err)
return nil, nil
}
- if project.OwnerID != ctx.ContextUser.ID {
- ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
- "message": fmt.Sprintf("ProjectColumn[%d] is not in Repository[%d] as expected", column.ID, project.ID),
- })
- return nil, nil
- }
return project, column
}
@@ -595,24 +613,15 @@ func MoveIssues(ctx *context.Context) {
return
}
- project, err := project_model.GetProjectByID(ctx, ctx.PathParamInt64("id"))
+ project, err := project_model.GetProjectByIDAndOwner(ctx, ctx.PathParamInt64("id"), ctx.ContextUser.ID)
if err != nil {
ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
return
}
- if project.OwnerID != ctx.ContextUser.ID {
- ctx.NotFound(nil)
- return
- }
- column, err := project_model.GetColumn(ctx, ctx.PathParamInt64("columnID"))
+ column, err := project_model.GetColumnByIDAndProjectID(ctx, ctx.PathParamInt64("columnID"), project.ID)
if err != nil {
- ctx.NotFoundOrServerError("GetProjectColumn", project_model.IsErrProjectColumnNotExist, err)
- return
- }
-
- if column.ProjectID != project.ID {
- ctx.NotFound(nil)
+ ctx.NotFoundOrServerError("GetColumnByIDAndProjectID", project_model.IsErrProjectColumnNotExist, err)
return
}
diff --git a/routers/web/org/projects_test.go b/routers/web/org/projects_test.go
index c3a769e621..63bcefb6e2 100644
--- a/routers/web/org/projects_test.go
+++ b/routers/web/org/projects_test.go
@@ -4,11 +4,14 @@
package org_test
import (
+ "net/http"
"testing"
"code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/routers/web/org"
"code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
"github.com/stretchr/testify/assert"
)
@@ -26,3 +29,30 @@ func TestCheckProjectColumnChangePermissions(t *testing.T) {
assert.NotNil(t, column)
assert.False(t, ctx.Written())
}
+
+func TestChangeProjectStatusRejectsForeignProjects(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ // project 4 is owned by user2 not user1
+ ctx, _ := contexttest.MockContext(t, "user1/-/projects/4/close")
+ contexttest.LoadUser(t, ctx, 1)
+ ctx.ContextUser = ctx.Doer
+ ctx.SetPathParam("action", "close")
+ ctx.SetPathParam("id", "4")
+
+ org.ChangeProjectStatus(ctx)
+
+ assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus())
+}
+
+func TestAddColumnToProjectPostRejectsForeignProjects(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user1/-/projects/4/columns/new")
+ contexttest.LoadUser(t, ctx, 1)
+ ctx.ContextUser = ctx.Doer
+ ctx.SetPathParam("id", "4")
+ web.SetForm(ctx, &forms.EditProjectColumnForm{Title: "foreign"})
+
+ org.AddColumnToProjectPost(ctx)
+
+ assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus())
+}
diff --git a/routers/web/projects/workflows.go b/routers/web/projects/workflows.go
index a1a732d60a..7dafd3fbee 100644
--- a/routers/web/projects/workflows.go
+++ b/routers/web/projects/workflows.go
@@ -49,7 +49,7 @@ func convertFormToFilters(ctx stdCtx.Context, project *project_model.Project, fo
if strValue, ok := value.(string); ok && strValue != "" {
strValueInt, _ := strconv.ParseInt(strValue, 10, 64)
if strValueInt > 0 {
- col, _ := project_model.GetColumnByProjectIDAndColumnID(ctx, project.ID, strValueInt)
+ col, _ := project_model.GetColumnByIDAndProjectID(ctx, strValueInt, project.ID)
if col == nil {
continue
}
@@ -83,7 +83,7 @@ func convertFormToActions(ctx stdCtx.Context, project *project_model.Project, fo
if colValue, ok := value.(string); ok {
colValueInt, _ := strconv.ParseInt(colValue, 10, 64)
if colValueInt > 0 {
- col, _ := project_model.GetColumnByProjectIDAndColumnID(ctx, project.ID, colValueInt)
+ col, _ := project_model.GetColumnByIDAndProjectID(ctx, colValueInt, project.ID)
if col == nil {
continue
}
diff --git a/routers/web/repo/actions/view.go b/routers/web/repo/actions/view.go
index cc70cd4e06..195df464b8 100644
--- a/routers/web/repo/actions/view.go
+++ b/routers/web/repo/actions/view.go
@@ -27,6 +27,7 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/routers/common"
@@ -302,7 +303,7 @@ func ViewPost(ctx *context_module.Context) {
resp.State.CurrentJob.Steps = make([]*ViewJobStep, 0) // marshal to '[]' instead fo 'null' in json
resp.Logs.StepsLog = make([]*ViewStepLog, 0) // marshal to '[]' instead fo 'null' in json
if task != nil {
- steps, logs, err := convertToViewModel(ctx, req.LogCursors, task)
+ steps, logs, err := convertToViewModel(ctx, ctx.Locale, req.LogCursors, task)
if err != nil {
ctx.ServerError("convertToViewModel", err)
return
@@ -314,7 +315,7 @@ func ViewPost(ctx *context_module.Context) {
ctx.JSON(http.StatusOK, resp)
}
-func convertToViewModel(ctx *context_module.Context, cursors []LogCursor, task *actions_model.ActionTask) ([]*ViewJobStep, []*ViewStepLog, error) {
+func convertToViewModel(ctx context.Context, locale translation.Locale, cursors []LogCursor, task *actions_model.ActionTask) ([]*ViewJobStep, []*ViewStepLog, error) {
var viewJobs []*ViewJobStep
var logs []*ViewStepLog
@@ -344,7 +345,7 @@ func convertToViewModel(ctx *context_module.Context, cursors []LogCursor, task *
Lines: []*ViewStepLogLine{
{
Index: 1,
- Message: ctx.Locale.TrString("actions.runs.expire_log_message"),
+ Message: locale.TrString("actions.runs.expire_log_message"),
// Timestamp doesn't mean anything when the log is expired.
// Set it to the task's updated time since it's probably the time when the log has expired.
Timestamp: float64(task.Updated.AsTime().UnixNano()) / float64(time.Second),
diff --git a/routers/web/repo/actions/view_test.go b/routers/web/repo/actions/view_test.go
new file mode 100644
index 0000000000..7296ea6849
--- /dev/null
+++ b/routers/web/repo/actions/view_test.go
@@ -0,0 +1,47 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/translation"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestConvertToViewModel(t *testing.T) {
+ task := &actions_model.ActionTask{
+ Status: actions_model.StatusSuccess,
+ Steps: []*actions_model.ActionTaskStep{
+ {Name: "Run step-name", Index: 0, Status: actions_model.StatusSuccess, LogLength: 1, Started: timeutil.TimeStamp(1), Stopped: timeutil.TimeStamp(5)},
+ },
+ Stopped: timeutil.TimeStamp(20),
+ }
+
+ viewJobSteps, _, err := convertToViewModel(t.Context(), translation.MockLocale{}, nil, task)
+ require.NoError(t, err)
+
+ expectedViewJobs := []*ViewJobStep{
+ {
+ Summary: "Set up job",
+ Duration: "0s",
+ Status: "success",
+ },
+ {
+ Summary: "Run step-name",
+ Duration: "4s",
+ Status: "success",
+ },
+ {
+ Summary: "Complete job",
+ Duration: "15s",
+ Status: "success",
+ },
+ }
+ assert.Equal(t, expectedViewJobs, viewJobSteps)
+}
diff --git a/routers/web/repo/attachment.go b/routers/web/repo/attachment.go
index 54200d8de8..bc14e42543 100644
--- a/routers/web/repo/attachment.go
+++ b/routers/web/repo/attachment.go
@@ -4,16 +4,16 @@
package repo
import (
- "fmt"
"net/http"
+ issues_model "code.gitea.io/gitea/models/issues"
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/httpcache"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
- "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/routers/common"
"code.gitea.io/gitea/services/attachment"
"code.gitea.io/gitea/services/context"
@@ -40,7 +40,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
file, header, err := ctx.Req.FormFile("file")
if err != nil {
- ctx.HTTPError(http.StatusInternalServerError, fmt.Sprintf("FormFile: %v", err))
+ ctx.ServerError("FormFile", err)
return
}
defer file.Close()
@@ -56,7 +56,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
ctx.HTTPError(http.StatusBadRequest, err.Error())
return
}
- ctx.HTTPError(http.StatusInternalServerError, fmt.Sprintf("NewAttachment: %v", err))
+ ctx.ServerError("UploadAttachmentGeneralSizeLimit", err)
return
}
@@ -74,13 +74,44 @@ func DeleteAttachment(ctx *context.Context) {
ctx.HTTPError(http.StatusBadRequest, err.Error())
return
}
- if !ctx.IsSigned || (ctx.Doer.ID != attach.UploaderID) {
+
+ if !ctx.IsSigned {
ctx.HTTPError(http.StatusForbidden)
return
}
+
+ if attach.RepoID != ctx.Repo.Repository.ID {
+ ctx.HTTPError(http.StatusBadRequest, "attachment does not belong to this repository")
+ return
+ }
+
+ if ctx.Doer.ID != attach.UploaderID {
+ if attach.IssueID > 0 {
+ issue, err := issues_model.GetIssueByID(ctx, attach.IssueID)
+ if err != nil {
+ ctx.ServerError("GetIssueByID", err)
+ return
+ }
+ if !ctx.Repo.Permission.CanWriteIssuesOrPulls(issue.IsPull) {
+ ctx.HTTPError(http.StatusForbidden)
+ return
+ }
+ } else if attach.ReleaseID > 0 {
+ if !ctx.Repo.Permission.CanWrite(unit.TypeReleases) {
+ ctx.HTTPError(http.StatusForbidden)
+ return
+ }
+ } else {
+ if !ctx.Repo.Permission.IsAdmin() && !ctx.Repo.Permission.IsOwner() {
+ ctx.HTTPError(http.StatusForbidden)
+ return
+ }
+ }
+ }
+
err = repo_model.DeleteAttachment(ctx, attach, true)
if err != nil {
- ctx.HTTPError(http.StatusInternalServerError, fmt.Sprintf("DeleteAttachment: %v", err))
+ ctx.ServerError("DeleteAttachment", err)
return
}
ctx.JSON(http.StatusOK, map[string]string{
@@ -100,23 +131,41 @@ func ServeAttachment(ctx *context.Context, uuid string) {
return
}
- repository, unitType, err := repo_service.LinkedRepository(ctx, attach)
- if err != nil {
- ctx.ServerError("LinkedRepository", err)
+ // prevent visiting attachment from other repository directly
+ // The check will be ignored before this code merged.
+ if attach.CreatedUnix > repo_model.LegacyAttachmentMissingRepoIDCutoff && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID != attach.RepoID {
+ ctx.HTTPError(http.StatusNotFound)
return
}
- if repository == nil { // If not linked
+ unitType, repoID, err := repo_service.GetAttachmentLinkedTypeAndRepoID(ctx, attach)
+ if err != nil {
+ ctx.ServerError("GetAttachmentLinkedTypeAndRepoID", err)
+ return
+ }
+
+ if unitType == unit.TypeInvalid { // unlinked attachment can only be accessed by the uploader
if !(ctx.IsSigned && attach.UploaderID == ctx.Doer.ID) { // We block if not the uploader
ctx.HTTPError(http.StatusNotFound)
return
}
- } else { // If we have the repository we check access
- perm, err := access_model.GetUserRepoPermission(ctx, repository, ctx.Doer)
- if err != nil {
- ctx.HTTPError(http.StatusInternalServerError, "GetUserRepoPermission", err.Error())
- return
+ } else { // If we have the linked type, we need to check access
+ var perm access_model.Permission
+ if ctx.Repo.Repository == nil {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ ctx.ServerError("GetRepositoryByID", err)
+ return
+ }
+ perm, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ } else {
+ perm = ctx.Repo.Permission
}
+
if !perm.CanRead(unitType) {
ctx.HTTPError(http.StatusNotFound)
return
@@ -150,7 +199,7 @@ func ServeAttachment(ctx *context.Context, uuid string) {
}
defer fr.Close()
- common.ServeContentByReadSeeker(ctx.Base, attach.Name, util.ToPointer(attach.CreatedUnix.AsTime()), fr)
+ common.ServeContentByReadSeeker(ctx.Base, attach.Name, new(attach.CreatedUnix.AsTime()), fr)
}
// GetAttachment serve attachments
diff --git a/routers/web/repo/blame.go b/routers/web/repo/blame.go
index ab3aecbbe7..4fb61bee6d 100644
--- a/routers/web/repo/blame.go
+++ b/routers/web/repo/blame.go
@@ -267,7 +267,7 @@ func renderBlame(ctx *context.Context, blameParts []*gitrepo.BlamePart, commitNa
bufContent := buf.Bytes()
bufContent = charset.ToUTF8(bufContent, charset.ConvertOpts{})
- highlighted, lexerName := highlight.Code(path.Base(ctx.Repo.TreePath), language, util.UnsafeBytesToString(bufContent))
+ highlighted, _, lexerDisplayName := highlight.RenderCodeSlowGuess(path.Base(ctx.Repo.TreePath), language, util.UnsafeBytesToString(bufContent))
unsafeLines := highlight.UnsafeSplitHighlightedLines(highlighted)
for i, br := range rows {
var line template.HTML
@@ -280,5 +280,5 @@ func renderBlame(ctx *context.Context, blameParts []*gitrepo.BlamePart, commitNa
ctx.Data["EscapeStatus"] = escapeStatus
ctx.Data["BlameRows"] = rows
- ctx.Data["LexerName"] = lexerName
+ ctx.Data["LexerName"] = lexerDisplayName
}
diff --git a/routers/web/repo/branch.go b/routers/web/repo/branch.go
index 2b0ba9072d..f563035600 100644
--- a/routers/web/repo/branch.go
+++ b/routers/web/repo/branch.go
@@ -41,6 +41,7 @@ func Branches(ctx *context.Context) {
ctx.Data["AllowsPulls"] = ctx.Repo.Repository.AllowsPulls(ctx)
ctx.Data["IsWriter"] = ctx.Repo.CanWrite(unit.TypeCode)
ctx.Data["IsMirror"] = ctx.Repo.Repository.IsMirror
+ // TODO: Can be replaced by ctx.Repo.PullRequestCtx.CanCreateNewPull()
ctx.Data["CanPull"] = ctx.Repo.CanWrite(unit.TypeCode) ||
(ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID))
ctx.Data["PageIsViewCode"] = true
diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go
index f702b2de16..27f5651ecb 100644
--- a/routers/web/repo/commit.go
+++ b/routers/web/repo/commit.go
@@ -279,7 +279,7 @@ func Diff(ctx *context.Context) {
diffBlobExcerptData := &gitdiff.DiffBlobExcerptData{
BaseLink: ctx.Repo.RepoLink + "/blob_excerpt",
- DiffStyle: ctx.FormString("style"),
+ DiffStyle: GetDiffViewStyle(ctx),
AfterCommitID: commitID,
}
gitRepo := ctx.Repo.GitRepo
diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go
index f66dabbf87..e034731e5c 100644
--- a/routers/web/repo/compare.go
+++ b/routers/web/repo/compare.go
@@ -4,11 +4,9 @@
package repo
import (
- "bufio"
gocontext "context"
"encoding/csv"
"errors"
- "fmt"
"io"
"net/http"
"net/url"
@@ -41,8 +39,8 @@ import (
"code.gitea.io/gitea/routers/common"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/context/upload"
+ git_service "code.gitea.io/gitea/services/git"
"code.gitea.io/gitea/services/gitdiff"
- pull_service "code.gitea.io/gitea/services/pull"
user_service "code.gitea.io/gitea/services/user"
)
@@ -149,9 +147,9 @@ func setCsvCompareContext(ctx *context.Context) {
if err != nil {
return nil, nil, err
}
-
+ var closer io.Closer = reader
csvReader, err := csv_module.CreateReaderAndDetermineDelimiter(ctx, charset.ToUTF8WithFallbackReader(reader, charset.ConvertOpts{}))
- return csvReader, reader, err
+ return csvReader, closer, err
}
baseReader, baseBlobCloser, err := csvReaderFromCommit(markup.NewRenderContext(ctx).WithRelativePath(diffFile.OldName), baseBlob)
@@ -192,146 +190,93 @@ func setCsvCompareContext(ctx *context.Context) {
}
// ParseCompareInfo parse compare info between two commit for preparing comparing references
-func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
+func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo {
baseRepo := ctx.Repo.Repository
- ci := &common.CompareInfo{}
-
fileOnly := ctx.FormBool("file-only")
- // Get compared branches information
- // A full compare url is of the form:
- //
- // 1. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headBranch}
- // 2. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}:{:headBranch}
- // 3. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}/{:headRepoName}:{:headBranch}
- // 4. /{:baseOwner}/{:baseRepoName}/compare/{:headBranch}
- // 5. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}:{:headBranch}
- // 6. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}/{:headRepoName}:{:headBranch}
- //
- // Here we obtain the infoPath "{:baseBranch}...[{:headOwner}/{:headRepoName}:]{:headBranch}" as ctx.PathParam("*")
- // with the :baseRepo in ctx.Repo.
- //
- // Note: Generally :headRepoName is not provided here - we are only passed :headOwner.
- //
- // How do we determine the :headRepo?
- //
- // 1. If :headOwner is not set then the :headRepo = :baseRepo
- // 2. If :headOwner is set - then look for the fork of :baseRepo owned by :headOwner
- // 3. But... :baseRepo could be a fork of :headOwner's repo - so check that
- // 4. Now, :baseRepo and :headRepos could be forks of the same repo - so check that
- //
- // format: ...[:]
- // base<-head: master...head:feature
- // same repo: master...feature
+ // 1 Parse compare router param
+ compareReq := common.ParseCompareRouterParam(ctx.PathParam("*"))
- var (
- isSameRepo bool
- infoPath string
- err error
- )
-
- infoPath = ctx.PathParam("*")
- var infos []string
- if infoPath == "" {
- infos = []string{baseRepo.DefaultBranch, baseRepo.DefaultBranch}
- } else {
- infos = strings.SplitN(infoPath, "...", 2)
- if len(infos) != 2 {
- if infos = strings.SplitN(infoPath, "..", 2); len(infos) == 2 {
- ci.DirectComparison = true
- ctx.Data["PageIsComparePull"] = false
- } else {
- infos = []string{baseRepo.DefaultBranch, infoPath}
- }
- }
- }
-
- ctx.Data["BaseName"] = baseRepo.OwnerName
- ci.BaseBranch = infos[0]
- ctx.Data["BaseBranch"] = ci.BaseBranch
-
- // If there is no head repository, it means compare between same repository.
- headInfos := strings.Split(infos[1], ":")
- if len(headInfos) == 1 {
- isSameRepo = true
- ci.HeadUser = ctx.Repo.Owner
- ci.HeadBranch = headInfos[0]
- } else if len(headInfos) == 2 {
- headInfosSplit := strings.Split(headInfos[0], "/")
- if len(headInfosSplit) == 1 {
- ci.HeadUser, err = user_model.GetUserOrOrgByName(ctx, headInfos[0])
- if err != nil {
- if user_model.IsErrUserNotExist(err) {
- ctx.NotFound(nil)
- } else {
- ctx.ServerError("GetUserByName", err)
- }
- return nil
- }
- ci.HeadBranch = headInfos[1]
- isSameRepo = ci.HeadUser.ID == ctx.Repo.Owner.ID
- if isSameRepo {
- ci.HeadRepo = baseRepo
- }
- } else {
- ci.HeadRepo, err = repo_model.GetRepositoryByOwnerAndName(ctx, headInfosSplit[0], headInfosSplit[1])
- if err != nil {
- if repo_model.IsErrRepoNotExist(err) {
- ctx.NotFound(nil)
- } else {
- ctx.ServerError("GetRepositoryByOwnerAndName", err)
- }
- return nil
- }
- if err := ci.HeadRepo.LoadOwner(ctx); err != nil {
- if user_model.IsErrUserNotExist(err) {
- ctx.NotFound(nil)
- } else {
- ctx.ServerError("GetUserByName", err)
- }
- return nil
- }
- ci.HeadBranch = headInfos[1]
- ci.HeadUser = ci.HeadRepo.Owner
- isSameRepo = ci.HeadRepo.ID == ctx.Repo.Repository.ID
- }
- } else {
- ctx.NotFound(nil)
+ // remove the check when we support compare with carets
+ if compareReq.BaseOriRefSuffix != "" {
+ ctx.HTTPError(http.StatusBadRequest, "Unsupported comparison syntax: ref with suffix")
return nil
}
- ctx.Data["HeadUser"] = ci.HeadUser
- ctx.Data["HeadBranch"] = ci.HeadBranch
- ctx.Repo.PullRequest.SameRepo = isSameRepo
- // Check if base branch is valid.
- baseIsCommit := ctx.Repo.GitRepo.IsCommitExist(ci.BaseBranch)
- baseIsBranch, _ := git_model.IsBranchExist(ctx, ctx.Repo.Repository.ID, ci.BaseBranch)
- baseIsTag := gitrepo.IsTagExist(ctx, ctx.Repo.Repository, ci.BaseBranch)
+ // 2 get repository and owner for head
+ headOwner, headRepo, err := common.GetHeadOwnerAndRepo(ctx, baseRepo, compareReq)
+ switch {
+ case errors.Is(err, util.ErrInvalidArgument):
+ ctx.HTTPError(http.StatusBadRequest, err.Error())
+ return nil
+ case errors.Is(err, util.ErrNotExist):
+ ctx.NotFound(nil)
+ return nil
+ case err != nil:
+ ctx.ServerError("GetHeadOwnerAndRepo", err)
+ return nil
+ }
- if !baseIsCommit && !baseIsBranch && !baseIsTag {
- // Check if baseBranch is short sha commit hash
- if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(ci.BaseBranch); baseCommit != nil {
- ci.BaseBranch = baseCommit.ID.String()
- ctx.Data["BaseBranch"] = ci.BaseBranch
- baseIsCommit = true
- } else if ci.BaseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() {
- if isSameRepo {
- ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadBranch))
- } else {
- ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadRepo.FullName()) + ":" + util.PathEscapeSegments(ci.HeadBranch))
- }
+ isSameRepo := baseRepo.ID == headRepo.ID
+
+ // 3 permission check
+ // base repository's code unit read permission check has been done on web.go
+ permBase := ctx.Repo.Permission
+
+ // If we're not merging from the same repo:
+ if !isSameRepo {
+ // Assert ctx.Doer has permission to read headRepo's codes
+ permHead, err := access_model.GetUserRepoPermission(ctx, headRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
return nil
- } else {
+ }
+ if !permHead.CanRead(unit.TypeCode) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v",
+ ctx.Doer,
+ headRepo,
+ permHead)
+ }
ctx.NotFound(nil)
return nil
}
+ ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode)
}
- ctx.Data["BaseIsCommit"] = baseIsCommit
- ctx.Data["BaseIsBranch"] = baseIsBranch
- ctx.Data["BaseIsTag"] = baseIsTag
+
+ // 4 get base and head refs
+ baseRefName := util.IfZero(compareReq.BaseOriRef, baseRepo.GetPullRequestTargetBranch(ctx))
+ headRefName := util.IfZero(compareReq.HeadOriRef, headRepo.DefaultBranch)
+
+ baseRef := ctx.Repo.GitRepo.UnstableGuessRefByShortName(baseRefName)
+ if baseRef == "" {
+ ctx.NotFound(nil)
+ return nil
+ }
+ var headGitRepo *git.Repository
+ if isSameRepo {
+ headGitRepo = ctx.Repo.GitRepo
+ } else {
+ headGitRepo, err = gitrepo.OpenRepository(ctx, headRepo)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil
+ }
+ defer headGitRepo.Close()
+ }
+ headRef := headGitRepo.UnstableGuessRefByShortName(headRefName)
+ if headRef == "" {
+ ctx.NotFound(nil)
+ return nil
+ }
+
+ ctx.Data["BaseName"] = baseRepo.OwnerName
+ ctx.Data["BaseBranch"] = baseRef.ShortName() // for legacy templates
+ ctx.Data["HeadUser"] = headOwner
+ ctx.Data["HeadBranch"] = headRef.ShortName() // for legacy templates
ctx.Data["IsPull"] = true
- // Now we have the repository that represents the base
+ context.InitRepoPullRequestCtx(ctx, baseRepo, headRepo)
// The current base and head repositories and branches may not
// actually be the intended branches that the user wants to
@@ -368,31 +313,31 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
}
}
- has := ci.HeadRepo != nil
+ has := headRepo != nil
// 3. If the base is a forked from "RootRepo" and the owner of
// the "RootRepo" is the :headUser - set headRepo to that
- if !has && rootRepo != nil && rootRepo.OwnerID == ci.HeadUser.ID {
- ci.HeadRepo = rootRepo
+ if !has && rootRepo != nil && rootRepo.OwnerID == headOwner.ID {
+ headRepo = rootRepo
has = true
}
// 4. If the ctx.Doer has their own fork of the baseRepo and the headUser is the ctx.Doer
// set the headRepo to the ownFork
- if !has && ownForkRepo != nil && ownForkRepo.OwnerID == ci.HeadUser.ID {
- ci.HeadRepo = ownForkRepo
+ if !has && ownForkRepo != nil && ownForkRepo.OwnerID == headOwner.ID {
+ headRepo = ownForkRepo
has = true
}
// 5. If the headOwner has a fork of the baseRepo - use that
if !has {
- ci.HeadRepo = repo_model.GetForkedRepo(ctx, ci.HeadUser.ID, baseRepo.ID)
- has = ci.HeadRepo != nil
+ headRepo = repo_model.GetForkedRepo(ctx, headOwner.ID, baseRepo.ID)
+ has = headRepo != nil
}
// 6. If the baseRepo is a fork and the headUser has a fork of that use that
if !has && baseRepo.IsFork {
- ci.HeadRepo = repo_model.GetForkedRepo(ctx, ci.HeadUser.ID, baseRepo.ForkID)
- has = ci.HeadRepo != nil
+ headRepo = repo_model.GetForkedRepo(ctx, headOwner.ID, baseRepo.ForkID)
+ has = headRepo != nil
}
// 7. Otherwise if we're not the same repo and haven't found a repo give up
@@ -400,70 +345,15 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
ctx.Data["PageIsComparePull"] = false
}
- // 8. Finally open the git repo
- if isSameRepo {
- ci.HeadRepo = ctx.Repo.Repository
- ci.HeadGitRepo = ctx.Repo.GitRepo
- } else if has {
- ci.HeadGitRepo, err = gitrepo.RepositoryFromRequestContextOrOpen(ctx, ci.HeadRepo)
- if err != nil {
- ctx.ServerError("RepositoryFromRequestContextOrOpen", err)
- return nil
- }
- } else {
- ctx.NotFound(nil)
- return nil
- }
-
- ctx.Data["HeadRepo"] = ci.HeadRepo
+ ctx.Data["HeadRepo"] = headRepo
ctx.Data["BaseCompareRepo"] = ctx.Repo.Repository
- // Now we need to assert that the ctx.Doer has permission to read
- // the baseRepo's code and pulls
- // (NOT headRepo's)
- permBase, err := access_model.GetUserRepoPermission(ctx, baseRepo, ctx.Doer)
- if err != nil {
- ctx.ServerError("GetUserRepoPermission", err)
- return nil
- }
- if !permBase.CanRead(unit.TypeCode) {
- if log.IsTrace() {
- log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in baseRepo has Permissions: %-+v",
- ctx.Doer,
- baseRepo,
- permBase)
- }
- ctx.NotFound(nil)
- return nil
- }
-
- // If we're not merging from the same repo:
- if !isSameRepo {
- // Assert ctx.Doer has permission to read headRepo's codes
- permHead, err := access_model.GetUserRepoPermission(ctx, ci.HeadRepo, ctx.Doer)
- if err != nil {
- ctx.ServerError("GetUserRepoPermission", err)
- return nil
- }
- if !permHead.CanRead(unit.TypeCode) {
- if log.IsTrace() {
- log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v",
- ctx.Doer,
- ci.HeadRepo,
- permHead)
- }
- ctx.NotFound(nil)
- return nil
- }
- ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode)
- }
-
// If we have a rootRepo and it's different from:
// 1. the computed base
// 2. the computed head
// then get the branches of it
if rootRepo != nil &&
- rootRepo.ID != ci.HeadRepo.ID &&
+ rootRepo.ID != headRepo.ID &&
rootRepo.ID != baseRepo.ID {
canRead := access_model.CheckRepoUnitUser(ctx, rootRepo, ctx.Doer, unit.TypeCode)
if canRead {
@@ -487,7 +377,7 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
// 3. The rootRepo (if we have one)
// then get the branches from it.
if ownForkRepo != nil &&
- ownForkRepo.ID != ci.HeadRepo.ID &&
+ ownForkRepo.ID != headRepo.ID &&
ownForkRepo.ID != baseRepo.ID &&
(rootRepo == nil || ownForkRepo.ID != rootRepo.ID) {
canRead := access_model.CheckRepoUnitUser(ctx, ownForkRepo, ctx.Doer, unit.TypeCode)
@@ -505,28 +395,9 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
}
}
- // Check if head branch is valid.
- headIsCommit := ci.HeadGitRepo.IsCommitExist(ci.HeadBranch)
- headIsBranch, _ := git_model.IsBranchExist(ctx, ci.HeadRepo.ID, ci.HeadBranch)
- headIsTag := gitrepo.IsTagExist(ctx, ci.HeadRepo, ci.HeadBranch)
- if !headIsCommit && !headIsBranch && !headIsTag {
- // Check if headBranch is short sha commit hash
- if headCommit, _ := ci.HeadGitRepo.GetCommit(ci.HeadBranch); headCommit != nil {
- ci.HeadBranch = headCommit.ID.String()
- ctx.Data["HeadBranch"] = ci.HeadBranch
- headIsCommit = true
- } else {
- ctx.NotFound(nil)
- return nil
- }
- }
- ctx.Data["HeadIsCommit"] = headIsCommit
- ctx.Data["HeadIsBranch"] = headIsBranch
- ctx.Data["HeadIsTag"] = headIsTag
-
// Treat as pull request if both references are branches
if ctx.Data["PageIsComparePull"] == nil {
- ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch && permBase.CanReadIssuesOrPulls(true)
+ ctx.Data["PageIsComparePull"] = baseRef.IsBranch() && headRef.IsBranch() && permBase.CanReadIssuesOrPulls(true)
}
if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) {
@@ -540,41 +411,58 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
return nil
}
- baseBranchRef := ci.BaseBranch
- if baseIsBranch {
- baseBranchRef = git.BranchPrefix + ci.BaseBranch
- } else if baseIsTag {
- baseBranchRef = git.TagPrefix + ci.BaseBranch
- }
- headBranchRef := ci.HeadBranch
- if headIsBranch {
- headBranchRef = git.BranchPrefix + ci.HeadBranch
- } else if headIsTag {
- headBranchRef = git.TagPrefix + ci.HeadBranch
- }
-
- ci.CompareInfo, err = pull_service.GetCompareInfo(ctx, baseRepo, ci.HeadRepo, ci.HeadGitRepo, baseBranchRef, headBranchRef, ci.DirectComparison, fileOnly)
+ compareInfo, err := git_service.GetCompareInfo(ctx, baseRepo, headRepo, headGitRepo, baseRef, headRef, compareReq.DirectComparison(), fileOnly)
if err != nil {
ctx.ServerError("GetCompareInfo", err)
return nil
}
- if ci.DirectComparison {
- ctx.Data["BeforeCommitID"] = ci.CompareInfo.BaseCommitID
+ if compareReq.DirectComparison() {
+ ctx.Data["BeforeCommitID"] = compareInfo.BaseCommitID
} else {
- ctx.Data["BeforeCommitID"] = ci.CompareInfo.MergeBase
+ ctx.Data["BeforeCommitID"] = compareInfo.MergeBase
}
- return ci
+ return compareInfo
+}
+
+func prepareNewPullRequestTitleContent(ci *git_service.CompareInfo, commits []*git_model.SignCommitWithStatuses) (title, content string) {
+ title = ci.HeadRef.ShortName()
+
+ if len(commits) > 0 {
+ // the "commits" are from "ShowPrettyFormatLogToList", which is ordered from newest to oldest, here take the oldest one
+ c := commits[len(commits)-1]
+ title = strings.TrimSpace(c.UserCommit.Summary())
+ }
+
+ if len(commits) == 1 {
+ // FIXME: GIT-COMMIT-MESSAGE-ENCODING: try to convert the encoding for commit message explicitly, ideally it should be done by a git commit struct method
+ c := commits[0]
+ _, content, _ = strings.Cut(strings.TrimSpace(c.UserCommit.CommitMessage), "\n")
+ content = strings.TrimSpace(content)
+ content = string(charset.ToUTF8([]byte(content), charset.ConvertOpts{}))
+ }
+
+ var titleTrailer string
+ // TODO: 255 doesn't seem to be a good limit for title, just keep the old behavior
+ title, titleTrailer = util.EllipsisDisplayStringX(title, 255)
+ if titleTrailer != "" {
+ if content != "" {
+ content = titleTrailer + "\n\n" + content
+ } else {
+ content = titleTrailer + "\n"
+ }
+ }
+ return title, content
}
// PrepareCompareDiff renders compare diff page
func PrepareCompareDiff(
ctx *context.Context,
- ci *common.CompareInfo,
+ ci *git_service.CompareInfo,
whitespaceBehavior gitcmd.TrustedCmdArgs,
) (nothingToCompare bool) {
repo := ctx.Repo.Repository
- headCommitID := ci.CompareInfo.HeadCommitID
+ headCommitID := ci.HeadCommitID
ctx.Data["CommitRepoLink"] = ci.HeadRepo.Link()
ctx.Data["AfterCommitID"] = headCommitID
@@ -586,17 +474,15 @@ func PrepareCompareDiff(
ctx.Data["TitleQuery"] = newPrFormTitle
ctx.Data["BodyQuery"] = newPrFormBody
- if (headCommitID == ci.CompareInfo.MergeBase && !ci.DirectComparison) ||
- headCommitID == ci.CompareInfo.BaseCommitID {
+ if (headCommitID == ci.MergeBase && !ci.DirectComparison()) ||
+ headCommitID == ci.BaseCommitID {
ctx.Data["IsNothingToCompare"] = true
if unit, err := repo.GetUnit(ctx, unit.TypePullRequests); err == nil {
config := unit.PullRequestsConfig()
if !config.AutodetectManualMerge {
- allowEmptyPr := !(ci.BaseBranch == ci.HeadBranch && ctx.Repo.Repository.Name == ci.HeadRepo.Name)
- ctx.Data["AllowEmptyPr"] = allowEmptyPr
-
- return !allowEmptyPr
+ ctx.Data["AllowEmptyPr"] = !ci.IsSameRef()
+ return ci.IsSameRef()
}
ctx.Data["AllowEmptyPr"] = false
@@ -604,9 +490,9 @@ func PrepareCompareDiff(
return true
}
- beforeCommitID := ci.CompareInfo.MergeBase
- if ci.DirectComparison {
- beforeCommitID = ci.CompareInfo.BaseCommitID
+ beforeCommitID := ci.MergeBase
+ if ci.DirectComparison() {
+ beforeCommitID = ci.BaseCommitID
}
maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
@@ -626,7 +512,7 @@ func PrepareCompareDiff(
MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
MaxFiles: maxFiles,
WhitespaceBehavior: whitespaceBehavior,
- DirectComparison: ci.DirectComparison,
+ DirectComparison: ci.DirectComparison(),
}, ctx.FormStrings("files")...)
if err != nil {
ctx.ServerError("GetDiff", err)
@@ -641,7 +527,7 @@ func PrepareCompareDiff(
ctx.Data["Diff"] = diff
ctx.Data["DiffBlobExcerptData"] = &gitdiff.DiffBlobExcerptData{
BaseLink: ci.HeadRepo.Link() + "/blob_excerpt",
- DiffStyle: ctx.FormString("style"),
+ DiffStyle: GetDiffViewStyle(ctx),
AfterCommitID: headCommitID,
}
ctx.Data["DiffNotAvailable"] = diffShortStat.NumFiles == 0
@@ -674,7 +560,7 @@ func PrepareCompareDiff(
return false
}
- commits, err := processGitCommits(ctx, ci.CompareInfo.Commits)
+ commits, err := processGitCommits(ctx, ci.Commits)
if err != nil {
ctx.ServerError("processGitCommits", err)
return false
@@ -682,34 +568,11 @@ func PrepareCompareDiff(
ctx.Data["Commits"] = commits
ctx.Data["CommitCount"] = len(commits)
- title := ci.HeadBranch
- if len(commits) == 1 {
- c := commits[0]
- title = strings.TrimSpace(c.UserCommit.Summary())
-
- body := strings.Split(strings.TrimSpace(c.UserCommit.Message()), "\n")
- if len(body) > 1 {
- ctx.Data["content"] = strings.Join(body[1:], "\n")
- }
- }
-
- if len(title) > 255 {
- var trailer string
- title, trailer = util.EllipsisDisplayStringX(title, 255)
- if len(trailer) > 0 {
- if ctx.Data["content"] != nil {
- ctx.Data["content"] = fmt.Sprintf("%s\n\n%s", trailer, ctx.Data["content"])
- } else {
- ctx.Data["content"] = trailer + "\n"
- }
- }
- }
-
- ctx.Data["title"] = title
- ctx.Data["Username"] = ci.HeadUser.Name
+ ctx.Data["title"], ctx.Data["content"] = prepareNewPullRequestTitleContent(ci, commits)
+ ctx.Data["Username"] = ci.HeadRepo.OwnerName
ctx.Data["Reponame"] = ci.HeadRepo.Name
- setCompareContext(ctx, beforeCommit, headCommit, ci.HeadUser.Name, repo.Name)
+ setCompareContext(ctx, beforeCommit, headCommit, ci.HeadRepo.OwnerName, repo.Name)
return false
}
@@ -739,13 +602,7 @@ func CompareDiff(ctx *context.Context) {
ctx.Data["PageIsViewCode"] = true
ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
- ctx.Data["DirectComparison"] = ci.DirectComparison
- ctx.Data["OtherCompareSeparator"] = ".."
- ctx.Data["CompareSeparator"] = "..."
- if ci.DirectComparison {
- ctx.Data["CompareSeparator"] = ".."
- ctx.Data["OtherCompareSeparator"] = "..."
- }
+ ctx.Data["CompareInfo"] = ci
nothingToCompare := PrepareCompareDiff(ctx, ci, gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)))
if ctx.Written() {
@@ -790,7 +647,7 @@ func CompareDiff(ctx *context.Context) {
ctx.Data["HeadTags"] = headTags
if ctx.Data["PageIsComparePull"] == true {
- pr, err := issues_model.GetUnmergedPullRequest(ctx, ci.HeadRepo.ID, ctx.Repo.Repository.ID, ci.HeadBranch, ci.BaseBranch, issues_model.PullRequestFlowGithub)
+ pr, err := issues_model.GetUnmergedPullRequest(ctx, ci.HeadRepo.ID, ctx.Repo.Repository.ID, ci.HeadRef.ShortName(), ci.BaseRef.ShortName(), issues_model.PullRequestFlowGithub)
if err != nil {
if !issues_model.IsErrPullRequestNotExist(err) {
ctx.ServerError("GetUnmergedPullRequest", err)
@@ -822,11 +679,7 @@ func CompareDiff(ctx *context.Context) {
beforeCommitID := ctx.Data["BeforeCommitID"].(string)
afterCommitID := ctx.Data["AfterCommitID"].(string)
- separator := "..."
- if ci.DirectComparison {
- separator = ".."
- }
- ctx.Data["Title"] = "Comparing " + base.ShortSha(beforeCommitID) + separator + base.ShortSha(afterCommitID)
+ ctx.Data["Title"] = "Comparing " + base.ShortSha(beforeCommitID) + ci.CompareSeparator + base.ShortSha(afterCommitID)
ctx.Data["IsDiffCompare"] = true
@@ -896,19 +749,22 @@ func attachHiddenCommentIDs(section *gitdiff.DiffSection, lineComments map[int64
// ExcerptBlob render blob excerpt contents
func ExcerptBlob(ctx *context.Context) {
commitID := ctx.PathParam("sha")
- lastLeft := ctx.FormInt("last_left")
- lastRight := ctx.FormInt("last_right")
- idxLeft := ctx.FormInt("left")
- idxRight := ctx.FormInt("right")
- leftHunkSize := ctx.FormInt("left_hunk_size")
- rightHunkSize := ctx.FormInt("right_hunk_size")
- direction := ctx.FormString("direction")
+ opts := gitdiff.BlobExcerptOptions{
+ LastLeft: ctx.FormInt("last_left"),
+ LastRight: ctx.FormInt("last_right"),
+ LeftIndex: ctx.FormInt("left"),
+ RightIndex: ctx.FormInt("right"),
+ LeftHunkSize: ctx.FormInt("left_hunk_size"),
+ RightHunkSize: ctx.FormInt("right_hunk_size"),
+ Direction: ctx.FormString("direction"),
+ Language: ctx.FormString("filelang"),
+ }
filePath := ctx.FormString("path")
gitRepo := ctx.Repo.GitRepo
diffBlobExcerptData := &gitdiff.DiffBlobExcerptData{
BaseLink: ctx.Repo.RepoLink + "/blob_excerpt",
- DiffStyle: ctx.FormString("style"),
+ DiffStyle: GetDiffViewStyle(ctx),
AfterCommitID: commitID,
}
@@ -922,61 +778,27 @@ func ExcerptBlob(ctx *context.Context) {
diffBlobExcerptData.BaseLink = ctx.Repo.RepoLink + "/wiki/blob_excerpt"
}
- chunkSize := gitdiff.BlobExcerptChunkSize
commit, err := gitRepo.GetCommit(commitID)
if err != nil {
- ctx.HTTPError(http.StatusInternalServerError, "GetCommit")
+ ctx.ServerError("GetCommit", err)
return
}
- section := &gitdiff.DiffSection{
- FileName: filePath,
- }
- if direction == "up" && (idxLeft-lastLeft) > chunkSize {
- idxLeft -= chunkSize
- idxRight -= chunkSize
- leftHunkSize += chunkSize
- rightHunkSize += chunkSize
- section.Lines, err = getExcerptLines(commit, filePath, idxLeft-1, idxRight-1, chunkSize)
- } else if direction == "down" && (idxLeft-lastLeft) > chunkSize {
- section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, chunkSize)
- lastLeft += chunkSize
- lastRight += chunkSize
- } else {
- offset := -1
- if direction == "down" {
- offset = 0
- }
- section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, idxRight-lastRight+offset)
- leftHunkSize = 0
- rightHunkSize = 0
- idxLeft = lastLeft
- idxRight = lastRight
- }
+ blob, err := commit.Tree.GetBlobByPath(filePath)
if err != nil {
- ctx.HTTPError(http.StatusInternalServerError, "getExcerptLines")
+ ctx.ServerError("GetBlobByPath", err)
return
}
-
- newLineSection := &gitdiff.DiffLine{
- Type: gitdiff.DiffLineSection,
- SectionInfo: &gitdiff.DiffLineSectionInfo{
- Path: filePath,
- LastLeftIdx: lastLeft,
- LastRightIdx: lastRight,
- LeftIdx: idxLeft,
- RightIdx: idxRight,
- LeftHunkSize: leftHunkSize,
- RightHunkSize: rightHunkSize,
- },
+ reader, err := blob.DataAsync()
+ if err != nil {
+ ctx.ServerError("DataAsync", err)
+ return
}
- if newLineSection.GetExpandDirection() != "" {
- newLineSection.Content = fmt.Sprintf("@@ -%d,%d +%d,%d @@\n", idxLeft, leftHunkSize, idxRight, rightHunkSize)
- switch direction {
- case "up":
- section.Lines = append([]*gitdiff.DiffLine{newLineSection}, section.Lines...)
- case "down":
- section.Lines = append(section.Lines, newLineSection)
- }
+ defer reader.Close()
+
+ section, err := gitdiff.BuildBlobExcerptDiffSection(filePath, reader, opts)
+ if err != nil {
+ ctx.ServerError("BuildBlobExcerptDiffSection", err)
+ return
}
diffBlobExcerptData.PullIssueIndex = ctx.FormInt64("pull_issue_index")
@@ -1017,37 +839,3 @@ func ExcerptBlob(ctx *context.Context) {
ctx.HTML(http.StatusOK, tplBlobExcerpt)
}
-
-func getExcerptLines(commit *git.Commit, filePath string, idxLeft, idxRight, chunkSize int) ([]*gitdiff.DiffLine, error) {
- blob, err := commit.Tree.GetBlobByPath(filePath)
- if err != nil {
- return nil, err
- }
- reader, err := blob.DataAsync()
- if err != nil {
- return nil, err
- }
- defer reader.Close()
- scanner := bufio.NewScanner(reader)
- var diffLines []*gitdiff.DiffLine
- for line := 0; line < idxRight+chunkSize; line++ {
- if ok := scanner.Scan(); !ok {
- break
- }
- if line < idxRight {
- continue
- }
- lineText := scanner.Text()
- diffLine := &gitdiff.DiffLine{
- LeftIdx: idxLeft + (line - idxRight) + 1,
- RightIdx: line + 1,
- Type: gitdiff.DiffLinePlain,
- Content: " " + lineText,
- }
- diffLines = append(diffLines, diffLine)
- }
- if err = scanner.Err(); err != nil {
- return nil, fmt.Errorf("getExcerptLines scan: %w", err)
- }
- return diffLines, nil
-}
diff --git a/routers/web/repo/compare_test.go b/routers/web/repo/compare_test.go
index 61472dc71e..700aba8821 100644
--- a/routers/web/repo/compare_test.go
+++ b/routers/web/repo/compare_test.go
@@ -4,9 +4,16 @@
package repo
import (
+ "strings"
"testing"
+ "unicode/utf8"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ git_service "code.gitea.io/gitea/services/git"
"code.gitea.io/gitea/services/gitdiff"
"github.com/stretchr/testify/assert"
@@ -38,3 +45,47 @@ func TestAttachCommentsToLines(t *testing.T) {
assert.Equal(t, int64(300), section.Lines[1].Comments[0].ID)
assert.Equal(t, int64(301), section.Lines[1].Comments[1].ID)
}
+
+func TestNewPullRequestTitleContent(t *testing.T) {
+ ci := &git_service.CompareInfo{HeadRef: "refs/heads/head-branch"}
+
+ mockCommit := func(msg string) *git_model.SignCommitWithStatuses {
+ return &git_model.SignCommitWithStatuses{
+ SignCommit: &asymkey_model.SignCommit{
+ UserCommit: &user_model.UserCommit{
+ Commit: &git.Commit{
+ CommitMessage: msg,
+ },
+ },
+ },
+ }
+ }
+
+ title, content := prepareNewPullRequestTitleContent(ci, nil)
+ assert.Equal(t, "head-branch", title)
+ assert.Empty(t, content)
+
+ title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("title-only")})
+ assert.Equal(t, "title-only", title)
+ assert.Empty(t, content)
+
+ title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("title-" + strings.Repeat("a", 255))})
+ assert.Equal(t, "title-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa…", title)
+ assert.Equal(t, "…aaaaaaaaa\n", content)
+
+ title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("title\nbody")})
+ assert.Equal(t, "title", title)
+ assert.Equal(t, "body", content)
+
+ title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("a\xf0\xf0\xf0\nb\xf0\xf0\xf0")})
+ assert.Equal(t, "a?", title) // FIXME: GIT-COMMIT-MESSAGE-ENCODING: "title" doesn't use the same charset converting logic as "content"
+ assert.Equal(t, "b"+string(utf8.RuneError)+string(utf8.RuneError), content)
+
+ title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{
+ // ordered from newest to oldest
+ mockCommit("title2\nbody2"),
+ mockCommit("title1\nbody1"),
+ })
+ assert.Equal(t, "title1", title)
+ assert.Empty(t, content)
+}
diff --git a/routers/web/repo/editor.go b/routers/web/repo/editor.go
index 048c9f3d4a..171ccd7719 100644
--- a/routers/web/repo/editor.go
+++ b/routers/web/repo/editor.go
@@ -18,7 +18,6 @@ import (
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/util"
@@ -78,8 +77,6 @@ func prepareEditorPageFormOptions(ctx *context.Context, editorAction string) *co
ctx.Data["CommitFormOptions"] = commitFormOptions
// for online editor
- ctx.Data["PreviewableExtensions"] = strings.Join(markup.PreviewableExtensions(), ",")
- ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
ctx.Data["IsEditingFileOnly"] = ctx.FormString("return_uri") != ""
ctx.Data["ReturnURI"] = ctx.FormString("return_uri")
@@ -321,7 +318,7 @@ func EditFile(ctx *context.Context) {
}
}
- ctx.Data["EditorconfigJson"] = getContextRepoEditorConfig(ctx, ctx.Repo.TreePath)
+ ctx.Data["CodeEditorConfig"] = getCodeEditorConfig(ctx, ctx.Repo.TreePath)
ctx.HTML(http.StatusOK, tplEditFile)
}
diff --git a/routers/web/repo/editor_apply_patch.go b/routers/web/repo/editor_apply_patch.go
index 357c6f3a21..1a01bfd5cb 100644
--- a/routers/web/repo/editor_apply_patch.go
+++ b/routers/web/repo/editor_apply_patch.go
@@ -20,6 +20,7 @@ func NewDiffPatch(ctx *context.Context) {
}
ctx.Data["PageIsPatch"] = true
+ ctx.Data["CodeEditorConfig"] = CodeEditorConfig{} // not really editing a file, so no need to fill in the config
ctx.HTML(http.StatusOK, tplPatchFile)
}
diff --git a/routers/web/repo/editor_cherry_pick.go b/routers/web/repo/editor_cherry_pick.go
index ca0e19517a..605a35b100 100644
--- a/routers/web/repo/editor_cherry_pick.go
+++ b/routers/web/repo/editor_cherry_pick.go
@@ -67,7 +67,7 @@ func CherryPickPost(ctx *context.Context) {
if parsed.form.Revert {
err = gitrepo.GetReverseRawDiff(ctx, ctx.Repo.Repository, fromCommitID, buf)
} else {
- err = git.GetRawDiff(ctx.Repo.GitRepo, fromCommitID, "patch", buf)
+ err = git.GetRawDiff(ctx.Repo.GitRepo, fromCommitID, git.RawDiffPatch, buf)
}
if err == nil {
opts.Content = buf.String()
diff --git a/routers/web/repo/editor_preview.go b/routers/web/repo/editor_preview.go
index 14be5b72b6..ec1f41a013 100644
--- a/routers/web/repo/editor_preview.go
+++ b/routers/web/repo/editor_preview.go
@@ -6,12 +6,13 @@ package repo
import (
"net/http"
+ "code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/services/context"
files_service "code.gitea.io/gitea/services/repository/files"
)
func DiffPreviewPost(ctx *context.Context) {
- content := ctx.FormString("content")
+ newContent := ctx.FormString("content")
treePath := files_service.CleanGitTreePath(ctx.Repo.TreePath)
if treePath == "" {
ctx.HTTPError(http.StatusBadRequest, "file name to diff is invalid")
@@ -27,7 +28,12 @@ func DiffPreviewPost(ctx *context.Context) {
return
}
- diff, err := files_service.GetDiffPreview(ctx, ctx.Repo.Repository, ctx.Repo.BranchName, treePath, content)
+ oldContent, err := entry.Blob().GetBlobContent(setting.UI.MaxDisplayFileSize)
+ if err != nil {
+ ctx.ServerError("GetBlobContent", err)
+ return
+ }
+ diff, err := files_service.GetDiffPreview(ctx, ctx.Repo.Repository, ctx.Repo.BranchName, treePath, oldContent, newContent)
if err != nil {
ctx.ServerError("GetDiffPreview", err)
return
diff --git a/routers/web/repo/editor_util.go b/routers/web/repo/editor_util.go
index 07bcb474f0..aca732ac70 100644
--- a/routers/web/repo/editor_util.go
+++ b/routers/web/repo/editor_util.go
@@ -7,6 +7,7 @@ import (
"context"
"fmt"
"path"
+ "strconv"
"strings"
git_model "code.gitea.io/gitea/models/git"
@@ -14,9 +15,11 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
- "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
context_service "code.gitea.io/gitea/services/context"
)
@@ -62,17 +65,33 @@ func getClosestParentWithFiles(gitRepo *git.Repository, branchName, originTreePa
return f(originTreePath, commit)
}
-// getContextRepoEditorConfig returns the editorconfig JSON string for given treePath or "null"
-func getContextRepoEditorConfig(ctx *context_service.Context, treePath string) string {
+// CodeEditorConfig is also used by frontend, defined in "codeeditor.ts"
+type CodeEditorConfig struct {
+ PreviewableExtensions []string `json:"previewable_extensions"`
+ LineWrapExtensions []string `json:"line_wrap_extensions"`
+ LineWrapOn bool `json:"line_wrap_on"`
+
+ IndentStyle string `json:"indent_style"`
+ IndentSize int `json:"indent_size"`
+ TabWidth int `json:"tab_width"`
+ TrimTrailingWhitespace *bool `json:"trim_trailing_whitespace,omitempty"`
+}
+
+func getCodeEditorConfig(ctx *context_service.Context, treePath string) (ret CodeEditorConfig) {
+ ret.PreviewableExtensions = markup.PreviewableExtensions()
+ ret.LineWrapExtensions = setting.Repository.Editor.LineWrapExtensions
+ ret.LineWrapOn = util.SliceContainsString(ret.LineWrapExtensions, path.Ext(treePath), true)
ec, _, err := ctx.Repo.GetEditorconfig()
if err == nil {
def, err := ec.GetDefinitionForFilename(treePath)
if err == nil {
- jsonStr, _ := json.Marshal(def)
- return string(jsonStr)
+ ret.IndentStyle = def.IndentStyle
+ ret.IndentSize, _ = strconv.Atoi(def.IndentSize)
+ ret.TabWidth = def.TabWidth
+ ret.TrimTrailingWhitespace = def.TrimTrailingWhitespace
}
}
- return "null"
+ return ret
}
// getParentTreeFields returns list of parent tree names and corresponding tree paths based on given treePath.
diff --git a/routers/web/repo/githttp.go b/routers/web/repo/githttp.go
index c7b53dcbfb..e922ed99fc 100644
--- a/routers/web/repo/githttp.go
+++ b/routers/web/repo/githttp.go
@@ -5,7 +5,6 @@
package repo
import (
- "bytes"
"compress/gzip"
"fmt"
"net/http"
@@ -23,6 +22,7 @@ import (
access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/gitrepo"
@@ -30,6 +30,7 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
repo_service "code.gitea.io/gitea/services/repository"
@@ -55,8 +56,9 @@ func CorsHandler() func(next http.Handler) http.Handler {
}
}
-// httpBase implementation git smart HTTP protocol
-func httpBase(ctx *context.Context) *serviceHandler {
+// httpBase does the common work for git http services,
+// including early response, authentication, repository lookup and permission check.
+func httpBase(ctx *context.Context, optGitService ...string) *serviceHandler {
username := ctx.PathParam("username")
reponame := strings.TrimSuffix(ctx.PathParam("reponame"), ".git")
@@ -65,20 +67,23 @@ func httpBase(ctx *context.Context) *serviceHandler {
return nil
}
+ var serviceType string
var isPull, receivePack bool
- service := ctx.FormString("service")
- if service == "git-receive-pack" ||
- strings.HasSuffix(ctx.Req.URL.Path, "git-receive-pack") {
- isPull = false
+ switch util.OptionalArg(optGitService) {
+ case "git-receive-pack":
+ serviceType = ServiceTypeReceivePack
receivePack = true
- } else if service == "git-upload-pack" ||
- strings.HasSuffix(ctx.Req.URL.Path, "git-upload-pack") {
+ case "git-upload-pack":
+ serviceType = ServiceTypeUploadPack
isPull = true
- } else if service == "git-upload-archive" ||
- strings.HasSuffix(ctx.Req.URL.Path, "git-upload-archive") {
+ case "git-upload-archive":
+ serviceType = ServiceTypeUploadArchive
isPull = true
- } else {
+ case "":
isPull = ctx.Req.Method == http.MethodHead || ctx.Req.Method == http.MethodGet
+ default: // unknown service
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
+ return nil
}
var accessMode perm.AccessMode
@@ -162,7 +167,7 @@ func httpBase(ctx *context.Context) *serviceHandler {
return nil
}
- if ctx.IsBasicAuth && ctx.Data["IsApiToken"] != true && ctx.Data["IsActionsToken"] != true {
+ if ctx.IsBasicAuth && ctx.Data["IsApiToken"] != true && !ctx.Doer.IsGiteaActions() {
_, err = auth_model.GetTwoFactorByUID(ctx, ctx.Doer.ID)
if err == nil {
// TODO: This response should be changed to "invalid credentials" for security reasons once the expectation behind it (creating an app token to authenticate) is properly documented
@@ -188,13 +193,12 @@ func httpBase(ctx *context.Context) *serviceHandler {
}
if repoExist {
- // Because of special ref "refs/for" .. , need delay write permission check
+ // Because of special ref "refs/for" (agit) , need delay write permission check
if git.DefaultFeatures().SupportProcReceive {
accessMode = perm.AccessModeRead
}
- if ctx.Data["IsActionsToken"] == true {
- taskID := ctx.Data["ActionsTaskID"].(int64)
+ if taskID, ok := user_model.GetActionsUserTaskID(ctx.Doer); ok {
p, err := access_model.GetActionsUserRepoPermission(ctx, repo, ctx.Doer, taskID)
if err != nil {
ctx.ServerError("GetActionsUserRepoPermission", err)
@@ -277,7 +281,6 @@ func httpBase(ctx *context.Context) *serviceHandler {
ctx.PlainText(http.StatusForbidden, "repository wiki is disabled")
return nil
}
- log.Error("Failed to get the wiki unit in %-v Error: %v", repo, err)
ctx.ServerError("GetUnit(UnitTypeWiki) for "+repo.FullName(), err)
return nil
}
@@ -285,9 +288,7 @@ func httpBase(ctx *context.Context) *serviceHandler {
environ = append(environ, repo_module.EnvRepoID+fmt.Sprintf("=%d", repo.ID))
- ctx.Req.URL.Path = strings.ToLower(ctx.Req.URL.Path) // blue: In case some repo name has upper case name
-
- return &serviceHandler{repo, isWiki, environ}
+ return &serviceHandler{serviceType, repo, isWiki, environ}
}
var (
@@ -330,6 +331,8 @@ func dummyInfoRefs(ctx *context.Context) {
}
type serviceHandler struct {
+ serviceType string
+
repo *repo_model.Repository
isWiki bool
environ []string
@@ -350,7 +353,7 @@ func setHeaderNoCache(ctx *context.Context) {
func setHeaderCacheForever(ctx *context.Context) {
now := time.Now().Unix()
- expires := now + 31536000
+ expires := now + 365*86400 // 365 days
ctx.Resp.Header().Set("Date", strconv.FormatInt(now, 10))
ctx.Resp.Header().Set("Expires", strconv.FormatInt(expires, 10))
ctx.Resp.Header().Set("Cache-Control", "public, max-age=31536000")
@@ -367,7 +370,7 @@ func isSlashRune(r rune) bool { return r == '/' || r == '\\' }
func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string) {
if containsParentDirectorySeparator(file) {
- log.Error("request file path contains invalid path: %v", file)
+ log.Debug("request file path contains invalid path: %v", file)
ctx.Resp.WriteHeader(http.StatusBadRequest)
return
}
@@ -380,38 +383,45 @@ func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string
// one or more key=value pairs separated by colons
var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`)
-func prepareGitCmdWithAllowedService(service string) (*gitcmd.Command, error) {
- if service == ServiceTypeReceivePack {
- return gitcmd.NewCommand(ServiceTypeReceivePack), nil
+func prepareGitCmdWithAllowedService(service string, allowedServices []string) *gitcmd.Command {
+ if !slices.Contains(allowedServices, service) {
+ return nil
}
- if service == ServiceTypeUploadPack {
- return gitcmd.NewCommand(ServiceTypeUploadPack), nil
+ switch service {
+ case ServiceTypeReceivePack:
+ return gitcmd.NewCommand(ServiceTypeReceivePack)
+ case ServiceTypeUploadPack:
+ return gitcmd.NewCommand(ServiceTypeUploadPack)
+ case ServiceTypeUploadArchive:
+ return gitcmd.NewCommand(ServiceTypeUploadArchive)
+ default:
+ return nil
}
- return nil, fmt.Errorf("service %q is not allowed", service)
}
-func serviceRPC(ctx *context.Context, h *serviceHandler, service string) {
- defer func() {
- if err := ctx.Req.Body.Close(); err != nil {
- log.Error("serviceRPC: Close: %v", err)
- }
- }()
+func serviceRPC(ctx *context.Context, service string) {
+ defer ctx.Req.Body.Close()
+ h := httpBase(ctx, "git-"+service)
+ if h == nil {
+ return
+ }
expectedContentType := fmt.Sprintf("application/x-git-%s-request", service)
if ctx.Req.Header.Get("Content-Type") != expectedContentType {
- log.Error("Content-Type (%q) doesn't match expected: %q", ctx.Req.Header.Get("Content-Type"), expectedContentType)
- // FIXME: why it's 401 if the content type is unexpected?
- ctx.Resp.WriteHeader(http.StatusUnauthorized)
+ log.Debug("Content-Type (%q) doesn't match expected: %q", ctx.Req.Header.Get("Content-Type"), expectedContentType)
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
return
}
- cmd, err := prepareGitCmdWithAllowedService(service)
- if err != nil {
- log.Error("Failed to prepareGitCmdWithService: %v", err)
- // FIXME: why it's 401 if the service type doesn't supported?
- ctx.Resp.WriteHeader(http.StatusUnauthorized)
+ cmd := prepareGitCmdWithAllowedService(service, []string{ServiceTypeUploadPack, ServiceTypeReceivePack, ServiceTypeUploadArchive})
+ if cmd == nil {
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
return
}
+ // git upload-archive does not have a "--stateless-rpc" option
+ if service == ServiceTypeUploadPack || service == ServiceTypeReceivePack {
+ cmd.AddArguments("--stateless-rpc")
+ }
ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service))
@@ -419,10 +429,10 @@ func serviceRPC(ctx *context.Context, h *serviceHandler, service string) {
// Handle GZIP.
if ctx.Req.Header.Get("Content-Encoding") == "gzip" {
+ var err error
reqBody, err = gzip.NewReader(reqBody)
if err != nil {
- log.Error("Fail to create gzip reader: %v", err)
- ctx.Resp.WriteHeader(http.StatusInternalServerError)
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
return
}
}
@@ -434,49 +444,35 @@ func serviceRPC(ctx *context.Context, h *serviceHandler, service string) {
h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
}
- var stderr bytes.Buffer
- if err := gitrepo.RunCmd(ctx, h.getStorageRepo(), cmd.AddArguments("--stateless-rpc", ".").
+ if err := gitrepo.RunCmdWithStderr(ctx, h.getStorageRepo(), cmd.AddArguments(".").
WithEnv(append(os.Environ(), h.environ...)).
- WithStderr(&stderr).
- WithStdin(reqBody).
- WithStdout(ctx.Resp).
- WithUseContextTimeout(true)); err != nil {
- if !git.IsErrCanceledOrKilled(err) {
- log.Error("Fail to serve RPC(%s) in %s: %v - %s", service, h.getStorageRepo().RelativePath(), err, stderr.String())
+ WithStdinCopy(reqBody).
+ WithStdoutCopy(ctx.Resp),
+ ); err != nil {
+ if !gitcmd.IsErrorCanceledOrKilled(err) {
+ log.Error("Fail to serve RPC(%s) in %s: %v", service, h.getStorageRepo().RelativePath(), err)
}
- return
}
}
const (
- ServiceTypeUploadPack = "upload-pack"
- ServiceTypeReceivePack = "receive-pack"
+ ServiceTypeUploadPack = "upload-pack"
+ ServiceTypeReceivePack = "receive-pack"
+ ServiceTypeUploadArchive = "upload-archive"
)
// ServiceUploadPack implements Git Smart HTTP protocol
func ServiceUploadPack(ctx *context.Context) {
- h := httpBase(ctx)
- if h != nil {
- serviceRPC(ctx, h, ServiceTypeUploadPack)
- }
+ serviceRPC(ctx, ServiceTypeUploadPack)
}
// ServiceReceivePack implements Git Smart HTTP protocol
func ServiceReceivePack(ctx *context.Context) {
- h := httpBase(ctx)
- if h != nil {
- serviceRPC(ctx, h, ServiceTypeReceivePack)
- }
+ serviceRPC(ctx, ServiceTypeReceivePack)
}
-func getServiceType(ctx *context.Context) string {
- switch ctx.Req.FormValue("service") {
- case "git-" + ServiceTypeUploadPack:
- return ServiceTypeUploadPack
- case "git-" + ServiceTypeReceivePack:
- return ServiceTypeReceivePack
- }
- return ""
+func ServiceUploadArchive(ctx *context.Context) {
+ serviceRPC(ctx, ServiceTypeUploadArchive)
}
func packetWrite(str string) []byte {
@@ -489,36 +485,45 @@ func packetWrite(str string) []byte {
// GetInfoRefs implements Git dumb HTTP
func GetInfoRefs(ctx *context.Context) {
- h := httpBase(ctx)
+ h := httpBase(ctx, ctx.FormString("service")) // git http protocol: "?service=git-"
if h == nil {
return
}
setHeaderNoCache(ctx)
- service := getServiceType(ctx)
- cmd, err := prepareGitCmdWithAllowedService(service)
- if err == nil {
- if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
- h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
- }
- h.environ = append(os.Environ(), h.environ...)
-
- refs, _, err := gitrepo.RunCmdBytes(ctx, h.getStorageRepo(), cmd.AddArguments("--stateless-rpc", "--advertise-refs", ".").
- WithEnv(h.environ))
- if err != nil {
- log.Error(fmt.Sprintf("%v - %s", err, string(refs)))
- }
-
- ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", service))
- ctx.Resp.WriteHeader(http.StatusOK)
- _, _ = ctx.Resp.Write(packetWrite("# service=git-" + service + "\n"))
- _, _ = ctx.Resp.Write([]byte("0000"))
- _, _ = ctx.Resp.Write(refs)
- } else {
+ if h.serviceType == "" {
+ // it's said that some legacy git clients will send requests to "/info/refs" without "service" parameter,
+ // although there should be no such case client in the modern days. TODO: not quite sure why we need this UpdateServerInfo logic
if err := gitrepo.UpdateServerInfo(ctx, h.getStorageRepo()); err != nil {
- log.Error("Failed to update server info: %v", err)
+ ctx.ServerError("UpdateServerInfo", err)
+ return
}
h.sendFile(ctx, "text/plain; charset=utf-8", "info/refs")
+ return
}
+
+ cmd := prepareGitCmdWithAllowedService(h.serviceType, []string{ServiceTypeUploadPack, ServiceTypeReceivePack})
+ if cmd == nil {
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
+ return
+ }
+
+ if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
+ h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
+ }
+ h.environ = append(os.Environ(), h.environ...)
+
+ cmd = cmd.AddArguments("--stateless-rpc", "--advertise-refs", ".").WithEnv(h.environ)
+ refs, _, err := gitrepo.RunCmdBytes(ctx, h.getStorageRepo(), cmd)
+ if err != nil {
+ ctx.ServerError("RunGitServiceAdvertiseRefs", err)
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", h.serviceType))
+ ctx.Resp.WriteHeader(http.StatusOK)
+ _, _ = ctx.Resp.Write(packetWrite("# service=git-" + h.serviceType + "\n"))
+ _, _ = ctx.Resp.Write([]byte("0000"))
+ _, _ = ctx.Resp.Write(refs)
}
// GetTextFile implements Git dumb HTTP
diff --git a/routers/web/repo/issue.go b/routers/web/repo/issue.go
index 54b7e5df2a..eaec3b5789 100644
--- a/routers/web/repo/issue.go
+++ b/routers/web/repo/issue.go
@@ -109,11 +109,6 @@ func MustAllowPulls(ctx *context.Context) {
ctx.NotFound(nil)
return
}
-
- // User can send pull request if owns a forked repository.
- if ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID) {
- ctx.Repo.PullRequest.Allowed = true
- }
}
func retrieveProjectsInternal(ctx *context.Context, repo *repo_model.Repository) (open, closed []*project_model.Project) {
diff --git a/routers/web/repo/issue_list.go b/routers/web/repo/issue_list.go
index da0ba6c407..ff4ff26685 100644
--- a/routers/web/repo/issue_list.go
+++ b/routers/web/repo/issue_list.go
@@ -462,14 +462,7 @@ func renderMilestones(ctx *context.Context) {
return
}
- openMilestones, closedMilestones := issues_model.MilestoneList{}, issues_model.MilestoneList{}
- for _, milestone := range milestones {
- if milestone.IsClosed {
- closedMilestones = append(closedMilestones, milestone)
- } else {
- openMilestones = append(openMilestones, milestone)
- }
- }
+ openMilestones, closedMilestones := issues_model.MilestoneList(milestones).SplitByOpenClosed()
ctx.Data["OpenMilestones"] = openMilestones
ctx.Data["ClosedMilestones"] = closedMilestones
}
diff --git a/routers/web/repo/issue_timetrack.go b/routers/web/repo/issue_timetrack.go
index 985bfd6698..b9ed059fde 100644
--- a/routers/web/repo/issue_timetrack.go
+++ b/routers/web/repo/issue_timetrack.go
@@ -60,7 +60,7 @@ func DeleteTime(c *context.Context) {
return
}
- t, err := issues_model.GetTrackedTimeByID(c, c.PathParamInt64("timeid"))
+ t, err := issues_model.GetTrackedTimeByID(c, issue.ID, c.PathParamInt64("timeid"))
if err != nil {
if db.IsErrNotExist(err) {
c.NotFound(err)
diff --git a/routers/web/repo/issue_view.go b/routers/web/repo/issue_view.go
index 803afbffe4..1354c2d6f9 100644
--- a/routers/web/repo/issue_view.go
+++ b/routers/web/repo/issue_view.go
@@ -7,7 +7,6 @@ import (
"fmt"
"math/big"
"net/http"
- "net/url"
"sort"
"strconv"
@@ -33,6 +32,7 @@ import (
"code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/templates/vars"
"code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web/middleware"
asymkey_service "code.gitea.io/gitea/services/asymkey"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/context/upload"
@@ -408,7 +408,7 @@ func ViewIssue(ctx *context.Context) {
}
ctx.Data["Reference"] = issue.Ref
- ctx.Data["SignInLink"] = setting.AppSubURL + "/user/login?redirect_to=" + url.QueryEscape(ctx.Data["Link"].(string))
+ ctx.Data["SignInLink"] = middleware.RedirectLinkUserLogin(ctx.Req)
ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
ctx.Data["HasProjectsWritePermission"] = ctx.Repo.CanWrite(unit.TypeProjects)
@@ -469,7 +469,7 @@ func prepareIssueViewSidebarDependency(ctx *context.Context, issue *issues_model
ctx.Data["AllowCrossRepositoryDependencies"] = setting.Service.AllowCrossRepositoryDependencies
// Get Dependencies
- blockedBy, err := issue.BlockedByDependencies(ctx, db.ListOptions{})
+ blockedBy, _, err := issue.BlockedByDependencies(ctx, db.ListOptions{})
if err != nil {
ctx.ServerError("BlockedByDependencies", err)
return
@@ -495,7 +495,7 @@ func preparePullViewSigning(ctx *context.Context, issue *issues_model.Issue) {
pull := issue.PullRequest
ctx.Data["WillSign"] = false
if ctx.Doer != nil {
- sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.Doer, ctx.Repo.GitRepo, pull.BaseBranch, pull.GetGitHeadRefName())
+ sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.Doer, ctx.Repo.GitRepo)
ctx.Data["WillSign"] = sign
ctx.Data["SigningKeyMergeDisplay"] = asymkey_model.GetDisplaySigningKey(key)
if err != nil {
diff --git a/routers/web/repo/middlewares.go b/routers/web/repo/middlewares.go
index 7518e6feae..c7c9da498b 100644
--- a/routers/web/repo/middlewares.go
+++ b/routers/web/repo/middlewares.go
@@ -7,8 +7,11 @@ import (
"strconv"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/gitdiff"
user_service "code.gitea.io/gitea/services/user"
)
@@ -28,36 +31,24 @@ func SetEditorconfigIfExists(ctx *context.Context) {
ctx.Data["Editorconfig"] = ec
}
+func GetDiffViewStyle(ctx *context.Context) string {
+ return util.Iif(ctx.Data["IsSplitStyle"] == true, gitdiff.DiffStyleSplit, gitdiff.DiffStyleUnified)
+}
+
// SetDiffViewStyle set diff style as render variable
func SetDiffViewStyle(ctx *context.Context) {
- queryStyle := ctx.FormString("style")
-
- if !ctx.IsSigned {
- ctx.Data["IsSplitStyle"] = queryStyle == "split"
- return
+ style := ctx.FormString("style")
+ if ctx.IsSigned {
+ style = util.IfZero(style, ctx.Doer.DiffViewStyle)
+ style = util.Iif(style == gitdiff.DiffStyleSplit, gitdiff.DiffStyleSplit, gitdiff.DiffStyleUnified)
+ if style != ctx.Doer.DiffViewStyle {
+ err := user_service.UpdateUser(ctx, ctx.Doer, &user_service.UpdateOptions{DiffViewStyle: optional.Some(style)})
+ if err != nil {
+ log.Error("UpdateUser DiffViewStyle: %v", err)
+ }
+ }
}
-
- var (
- userStyle = ctx.Doer.DiffViewStyle
- style string
- )
-
- if queryStyle == "unified" || queryStyle == "split" {
- style = queryStyle
- } else if userStyle == "unified" || userStyle == "split" {
- style = userStyle
- } else {
- style = "unified"
- }
-
ctx.Data["IsSplitStyle"] = style == "split"
-
- opts := &user_service.UpdateOptions{
- DiffViewStyle: optional.Some(style),
- }
- if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil {
- ctx.ServerError("UpdateUser", err)
- }
}
// SetWhitespaceBehavior set whitespace behavior as render variable
diff --git a/routers/web/repo/middlewares_test.go b/routers/web/repo/middlewares_test.go
new file mode 100644
index 0000000000..c6dc2e4615
--- /dev/null
+++ b/routers/web/repo/middlewares_test.go
@@ -0,0 +1,59 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/gitdiff"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDiffViewStyle(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ t.Run("AnonymousUser", func(t *testing.T) {
+ ctx, _ := contexttest.MockContext(t, "/any")
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx))
+
+ ctx, _ = contexttest.MockContext(t, "/any?style=split")
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleSplit, GetDiffViewStyle(ctx))
+
+ ctx, _ = contexttest.MockContext(t, "/any")
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx)) // at the moment, anonymous users don't have a saved preference
+ })
+
+ t.Run("SignedInUser", func(t *testing.T) {
+ ctx, _ := contexttest.MockContext(t, "/any")
+ contexttest.LoadUser(t, ctx, 2)
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx))
+
+ ctx, _ = contexttest.MockContext(t, "/any?style=split")
+ contexttest.LoadUser(t, ctx, 2)
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleSplit, GetDiffViewStyle(ctx))
+
+ ctx, _ = contexttest.MockContext(t, "/any")
+ contexttest.LoadUser(t, ctx, 2)
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleSplit, GetDiffViewStyle(ctx))
+
+ ctx, _ = contexttest.MockContext(t, "/any?style=unified")
+ contexttest.LoadUser(t, ctx, 2)
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx))
+
+ ctx, _ = contexttest.MockContext(t, "/any")
+ contexttest.LoadUser(t, ctx, 2)
+ SetDiffViewStyle(ctx)
+ assert.Equal(t, gitdiff.DiffStyleUnified, GetDiffViewStyle(ctx))
+ })
+}
diff --git a/routers/web/repo/projects.go b/routers/web/repo/projects.go
index 44bb02afd8..9f74d00211 100644
--- a/routers/web/repo/projects.go
+++ b/routers/web/repo/projects.go
@@ -312,13 +312,25 @@ func ViewProject(ctx *context.Context) {
}
preparedLabelFilter := issue.PrepareFilterIssueLabels(ctx, ctx.Repo.Repository.ID, ctx.Repo.Owner)
+ if ctx.Written() {
+ return
+ }
assigneeID := ctx.FormString("assignee")
+ milestoneID := ctx.FormInt64("milestone")
+
+ var milestoneIDs []int64
+ if milestoneID > 0 {
+ milestoneIDs = []int64{milestoneID}
+ } else if milestoneID == db.NoConditionID {
+ milestoneIDs = []int64{db.NoConditionID}
+ }
issuesMap, err := project_service.LoadIssuesFromProject(ctx, project, &issues_model.IssuesOptions{
- RepoIDs: []int64{ctx.Repo.Repository.ID},
- LabelIDs: preparedLabelFilter.SelectedLabelIDs,
- AssigneeID: assigneeID,
+ RepoIDs: []int64{ctx.Repo.Repository.ID},
+ LabelIDs: preparedLabelFilter.SelectedLabelIDs,
+ AssigneeID: assigneeID,
+ MilestoneIDs: milestoneIDs,
})
if err != nil {
ctx.ServerError("LoadIssuesOfColumns", err)
@@ -399,6 +411,12 @@ func ViewProject(ctx *context.Context) {
ctx.Data["Assignees"] = shared_user.MakeSelfOnTop(ctx.Doer, assigneeUsers)
ctx.Data["AssigneeID"] = assigneeID
+ renderMilestones(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["MilestoneID"] = milestoneID
+
rctx := renderhelper.NewRenderContextRepoComment(ctx, ctx.Repo.Repository)
project.RenderedContent, err = markdown.RenderString(rctx, project.Description)
if err != nil {
diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go
index 488389e204..d306927001 100644
--- a/routers/web/repo/pull.go
+++ b/routers/web/repo/pull.go
@@ -23,6 +23,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/commitstatus"
"code.gitea.io/gitea/modules/emoji"
"code.gitea.io/gitea/modules/fileicon"
"code.gitea.io/gitea/modules/git"
@@ -32,8 +33,10 @@ import (
"code.gitea.io/gitea/modules/graceful"
issue_template "code.gitea.io/gitea/modules/issue/template"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/routers/utils"
@@ -44,6 +47,7 @@ import (
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/context/upload"
"code.gitea.io/gitea/services/forms"
+ git_service "code.gitea.io/gitea/services/git"
"code.gitea.io/gitea/services/gitdiff"
notify_service "code.gitea.io/gitea/services/notify"
pull_service "code.gitea.io/gitea/services/pull"
@@ -235,7 +239,7 @@ func GetMergedBaseCommitID(ctx *context.Context, issue *issues_model.Issue) stri
}
if commitSHA != "" {
// Get immediate parent of the first commit in the patch, grab history back
- parentCommit, err = gitrepo.RunCmdString(ctx, ctx.Repo.Repository,
+ parentCommit, _, err = gitrepo.RunCmdString(ctx, ctx.Repo.Repository,
gitcmd.NewCommand("rev-list", "-1", "--skip=1").AddDynamicArguments(commitSHA))
if err == nil {
parentCommit = strings.TrimSpace(parentCommit)
@@ -256,7 +260,7 @@ func GetMergedBaseCommitID(ctx *context.Context, issue *issues_model.Issue) stri
return baseCommit
}
-func preparePullViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_service.CompareInfo {
+func preparePullViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_service.CompareInfo {
if !issue.IsPull {
return nil
}
@@ -267,7 +271,7 @@ func preparePullViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *p
}
// prepareMergedViewPullInfo show meta information for a merged pull request view page
-func prepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_service.CompareInfo {
+func prepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_service.CompareInfo {
pull := issue.PullRequest
setMergeTarget(ctx, pull)
@@ -275,8 +279,8 @@ func prepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue)
baseCommit := GetMergedBaseCommitID(ctx, issue)
- compareInfo, err := pull_service.GetCompareInfo(ctx, ctx.Repo.Repository, ctx.Repo.Repository, ctx.Repo.GitRepo,
- baseCommit, pull.GetGitHeadRefName(), false, false)
+ compareInfo, err := git_service.GetCompareInfo(ctx, ctx.Repo.Repository, ctx.Repo.Repository, ctx.Repo.GitRepo,
+ git.RefName(baseCommit), git.RefName(pull.GetGitHeadRefName()), false, false)
if err != nil {
if strings.Contains(err.Error(), "fatal: Not a valid object name") || strings.Contains(err.Error(), "unknown revision or path not in the working tree") {
ctx.Data["IsPullRequestBroken"] = true
@@ -318,10 +322,59 @@ type pullCommitStatusCheckData struct {
RequireApprovalRunCount int // number of workflow runs that require approval
CanApprove bool // whether the user can approve workflow runs
ApproveLink string // link to approve all checks
+ RequiredChecksState commitstatus.CommitStatusState
+ LatestCommitStatus *git_model.CommitStatus
+}
+
+func (d *pullCommitStatusCheckData) CommitStatusCheckPrompt(locale translation.Locale) string {
+ if d.RequiredChecksState.IsPending() || len(d.MissingRequiredChecks) > 0 {
+ return locale.TrString("repo.pulls.status_checking")
+ } else if d.RequiredChecksState.IsSuccess() {
+ if d.LatestCommitStatus != nil && d.LatestCommitStatus.State.IsFailure() {
+ return locale.TrString("repo.pulls.status_checks_failure_optional")
+ }
+ return locale.TrString("repo.pulls.status_checks_success")
+ } else if d.RequiredChecksState.IsWarning() {
+ return locale.TrString("repo.pulls.status_checks_warning")
+ } else if d.RequiredChecksState.IsFailure() {
+ return locale.TrString("repo.pulls.status_checks_failure_required")
+ } else if d.RequiredChecksState.IsError() {
+ return locale.TrString("repo.pulls.status_checks_error")
+ }
+ return locale.TrString("repo.pulls.status_checking")
+}
+
+func getViewPullHeadBranchInfo(ctx *context.Context, pull *issues_model.PullRequest, baseGitRepo *git.Repository) (headCommitID string, headCommitExists bool, err error) {
+ if pull.HeadRepo == nil {
+ return "", false, nil
+ }
+ headGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pull.HeadRepo)
+ if err != nil {
+ return "", false, util.Iif(errors.Is(err, util.ErrNotExist), nil, err)
+ }
+ defer closer.Close()
+
+ if pull.Flow == issues_model.PullRequestFlowGithub {
+ headCommitExists, _ = git_model.IsBranchExist(ctx, pull.HeadRepo.ID, pull.HeadBranch)
+ } else {
+ headCommitExists = gitrepo.IsReferenceExist(ctx, pull.BaseRepo, pull.GetGitHeadRefName())
+ }
+
+ if headCommitExists {
+ if pull.Flow != issues_model.PullRequestFlowGithub {
+ headCommitID, err = baseGitRepo.GetRefCommitID(pull.GetGitHeadRefName())
+ } else {
+ headCommitID, err = headGitRepo.GetBranchCommitID(pull.HeadBranch)
+ }
+ if err != nil {
+ return "", false, util.Iif(errors.Is(err, util.ErrNotExist), nil, err)
+ }
+ }
+ return headCommitID, headCommitExists, nil
}
// prepareViewPullInfo show meta information for a pull request preview page
-func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_service.CompareInfo {
+func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git_service.CompareInfo {
ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
repo := ctx.Repo.Repository
@@ -358,6 +411,8 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_
defer baseGitRepo.Close()
}
+ statusCheckData := &pullCommitStatusCheckData{}
+
if exist, _ := git_model.IsBranchExist(ctx, pull.BaseRepo.ID, pull.BaseBranch); !exist {
ctx.Data["BaseBranchNotExist"] = true
ctx.Data["IsPullRequestBroken"] = true
@@ -378,13 +433,14 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_
git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
}
+ statusCheckData.LatestCommitStatus = git_model.CalcCommitStatus(commitStatuses)
if len(commitStatuses) > 0 {
ctx.Data["LatestCommitStatuses"] = commitStatuses
- ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ ctx.Data["LatestCommitStatus"] = statusCheckData.LatestCommitStatus
}
- compareInfo, err := pull_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo,
- pull.MergeBase, pull.GetGitHeadRefName(), false, false)
+ compareInfo, err := git_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo,
+ git.RefName(pull.MergeBase), git.RefName(pull.GetGitHeadRefName()), false, false)
if err != nil {
if strings.Contains(err.Error(), "fatal: Not a valid object name") {
ctx.Data["IsPullRequestBroken"] = true
@@ -403,34 +459,10 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_
return compareInfo
}
- var headBranchExist bool
- var headBranchSha string
- // HeadRepo may be missing
- if pull.HeadRepo != nil {
- headGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pull.HeadRepo)
- if err != nil {
- ctx.ServerError("RepositoryFromContextOrOpen", err)
- return nil
- }
- defer closer.Close()
-
- if pull.Flow == issues_model.PullRequestFlowGithub {
- headBranchExist, _ = git_model.IsBranchExist(ctx, pull.HeadRepo.ID, pull.HeadBranch)
- } else {
- headBranchExist = gitrepo.IsReferenceExist(ctx, pull.BaseRepo, pull.GetGitHeadRefName())
- }
-
- if headBranchExist {
- if pull.Flow != issues_model.PullRequestFlowGithub {
- headBranchSha, err = baseGitRepo.GetRefCommitID(pull.GetGitHeadRefName())
- } else {
- headBranchSha, err = headGitRepo.GetBranchCommitID(pull.HeadBranch)
- }
- if err != nil {
- ctx.ServerError("GetBranchCommitID", err)
- return nil
- }
- }
+ headBranchSha, headBranchExist, err := getViewPullHeadBranchInfo(ctx, pull, baseGitRepo)
+ if err != nil {
+ ctx.ServerError("getViewPullHeadBranchInfo", err)
+ return nil
}
if headBranchExist {
@@ -465,10 +497,8 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_
return nil
}
- statusCheckData := &pullCommitStatusCheckData{
- ApproveLink: fmt.Sprintf("%s/actions/approve-all-checks?commit_id=%s", repo.Link(), sha),
- }
ctx.Data["StatusCheckData"] = statusCheckData
+ statusCheckData.ApproveLink = fmt.Sprintf("%s/actions/approve-all-checks?commit_id=%s", repo.Link(), sha)
commitStatuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
if err != nil {
@@ -493,9 +523,10 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_
statusCheckData.CanApprove = ctx.Repo.CanWrite(unit.TypeActions)
}
+ statusCheckData.LatestCommitStatus = git_model.CalcCommitStatus(commitStatuses)
if len(commitStatuses) > 0 {
ctx.Data["LatestCommitStatuses"] = commitStatuses
- ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ ctx.Data["LatestCommitStatus"] = statusCheckData.LatestCommitStatus
}
if pb != nil && pb.EnableStatusCheck {
@@ -532,7 +563,7 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_
}
return false
}
- ctx.Data["RequiredStatusCheckState"] = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts)
+ statusCheckData.RequiredChecksState = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts)
}
ctx.Data["HeadBranchMovedOn"] = headBranchSha != sha
@@ -550,8 +581,8 @@ func prepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *pull_
}
}
- compareInfo, err := pull_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo,
- git.BranchPrefix+pull.BaseBranch, pull.GetGitHeadRefName(), false, false)
+ compareInfo, err := git_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo,
+ git.RefNameFromBranch(pull.BaseBranch), git.RefName(pull.GetGitHeadRefName()), false, false)
if err != nil {
if strings.Contains(err.Error(), "fatal: Not a valid object name") {
ctx.Data["IsPullRequestBroken"] = true
@@ -863,7 +894,7 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) {
ctx.Data["DiffBlobExcerptData"] = &gitdiff.DiffBlobExcerptData{
BaseLink: ctx.Repo.RepoLink + "/blob_excerpt",
PullIssueIndex: pull.Index,
- DiffStyle: ctx.FormString("style"),
+ DiffStyle: GetDiffViewStyle(ctx),
AfterCommitID: afterCommitID,
}
ctx.Data["DiffNotAvailable"] = diffShortStat.NumFiles == 0
@@ -1135,11 +1166,9 @@ func MergePullRequest(ctx *context.Context) {
message += "\n\n" + form.MergeMessageField
}
- deleteBranchAfterMerge, err := pull_service.ShouldDeleteBranchAfterMerge(ctx, form.DeleteBranchAfterMerge, ctx.Repo.Repository, pr)
- if err != nil {
- ctx.ServerError("ShouldDeleteBranchAfterMerge", err)
- return
- }
+ // There is always a checkbox on the UI (the DeleteBranchAfterMerge is nil if the checkbox is not checked),
+ // just use the user's choice, don't use pull_service.ShouldDeleteBranchAfterMerge to decide
+ deleteBranchAfterMerge := optional.FromPtr(form.DeleteBranchAfterMerge).Value()
if form.MergeWhenChecksSucceed {
// delete all scheduled auto merges
@@ -1265,6 +1294,28 @@ func CancelAutoMergePullRequest(ctx *context.Context) {
return
}
+ exist, autoMerge, err := pull_model.GetScheduledMergeByPullID(ctx, issue.PullRequest.ID)
+ if err != nil {
+ ctx.ServerError("GetScheduledMergeByPullID", err)
+ return
+ }
+ if !exist {
+ ctx.NotFound(nil)
+ return
+ }
+
+ if ctx.Doer.ID != autoMerge.DoerID {
+ allowed, err := pull_service.IsUserAllowedToMerge(ctx, issue.PullRequest, ctx.Repo.Permission, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("IsUserAllowedToMerge", err)
+ return
+ }
+ if !allowed {
+ ctx.HTTPError(http.StatusForbidden, "user has no permission to cancel the scheduled auto merge")
+ return
+ }
+ }
+
if err := automerge.RemoveScheduledAutoMerge(ctx, ctx.Doer, issue.PullRequest); err != nil {
if db.IsErrNotExist(err) {
ctx.Flash.Error(ctx.Tr("repo.pulls.auto_merge_not_scheduled"))
@@ -1341,7 +1392,7 @@ func CompareAndPullRequestPost(ctx *context.Context) {
}
// Check if a pull request already exists with the same head and base branch.
- pr, err := issues_model.GetUnmergedPullRequest(ctx, ci.HeadRepo.ID, repo.ID, ci.HeadBranch, ci.BaseBranch, issues_model.PullRequestFlowGithub)
+ pr, err := issues_model.GetUnmergedPullRequest(ctx, ci.HeadRepo.ID, repo.ID, ci.HeadRef.ShortName(), ci.BaseRef.ShortName(), issues_model.PullRequestFlowGithub)
if err != nil && !issues_model.IsErrPullRequestNotExist(err) {
ctx.ServerError("GetUnmergedPullRequest", err)
return
@@ -1371,11 +1422,11 @@ func CompareAndPullRequestPost(ctx *context.Context) {
pullRequest := &issues_model.PullRequest{
HeadRepoID: ci.HeadRepo.ID,
BaseRepoID: repo.ID,
- HeadBranch: ci.HeadBranch,
- BaseBranch: ci.BaseBranch,
+ HeadBranch: ci.HeadRef.ShortName(),
+ BaseBranch: ci.BaseRef.ShortName(),
HeadRepo: ci.HeadRepo,
BaseRepo: repo,
- MergeBase: ci.CompareInfo.MergeBase,
+ MergeBase: ci.MergeBase,
Type: issues_model.PullRequestGitea,
AllowMaintainerEdit: form.AllowMaintainerEdit,
}
@@ -1390,6 +1441,7 @@ func CompareAndPullRequestPost(ctx *context.Context) {
AssigneeIDs: assigneeIDs,
Reviewers: validateRet.Reviewers,
TeamReviewers: validateRet.TeamReviewers,
+ ProjectID: projectID,
}
if err := pull_service.NewPullRequest(ctx, prOpts); err != nil {
switch {
@@ -1441,15 +1493,6 @@ func CompareAndPullRequestPost(ctx *context.Context) {
return
}
- if projectID > 0 && ctx.Repo.CanWrite(unit.TypeProjects) {
- if err := issues_model.IssueAssignOrRemoveProject(ctx, pullIssue, ctx.Doer, projectID, 0); err != nil {
- if !errors.Is(err, util.ErrPermissionDenied) {
- ctx.ServerError("IssueAssignOrRemoveProject", err)
- return
- }
- }
- }
-
log.Trace("Pull request created: %d/%d", repo.ID, pullIssue.ID)
ctx.JSONRedirect(pullIssue.Link())
}
diff --git a/routers/web/repo/pull_review_test.go b/routers/web/repo/pull_review_test.go
index 42223c1d9c..1b28cad5b6 100644
--- a/routers/web/repo/pull_review_test.go
+++ b/routers/web/repo/pull_review_test.go
@@ -30,7 +30,7 @@ func TestRenderConversation(t *testing.T) {
run := func(name string, cb func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder)) {
t.Run(name, func(t *testing.T) {
- ctx, resp := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, resp := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
contexttest.LoadUser(t, ctx, pr.Issue.PosterID)
contexttest.LoadRepo(t, ctx, pr.BaseRepoID)
contexttest.LoadGitRepo(t, ctx)
diff --git a/routers/web/repo/render.go b/routers/web/repo/render.go
index d6447795bf..b1299c7047 100644
--- a/routers/web/repo/render.go
+++ b/routers/web/repo/render.go
@@ -32,24 +32,18 @@ func RenderFile(ctx *context.Context) {
return
}
- dataRc, err := blob.DataAsync()
+ blobReader, err := blob.DataAsync()
if err != nil {
ctx.ServerError("DataAsync", err)
return
}
- defer dataRc.Close()
-
- if markupType := markup.DetectMarkupTypeByFileName(blob.Name()); markupType == "" {
- http.Error(ctx.Resp, "Unsupported file type render", http.StatusBadRequest)
- return
- }
+ defer blobReader.Close()
rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
CurrentTreePath: path.Dir(ctx.Repo.TreePath),
}).WithRelativePath(ctx.Repo.TreePath).WithInStandalonePage(true)
-
- renderer, err := markup.FindRendererByContext(rctx)
+ renderer, rendererInput, err := rctx.DetectMarkupRendererByReader(blobReader)
if err != nil {
http.Error(ctx.Resp, "Unable to find renderer", http.StatusBadRequest)
return
@@ -71,7 +65,7 @@ func RenderFile(ctx *context.Context) {
ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
}
- err = markup.RenderWithRenderer(rctx, renderer, dataRc, ctx.Resp)
+ err = markup.RenderWithRenderer(rctx, renderer, rendererInput, ctx.Resp)
if err != nil {
log.Error("Failed to render file %q: %v", ctx.Repo.TreePath, err)
http.Error(ctx.Resp, "Failed to render file", http.StatusInternalServerError)
diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go
index 3a0976ffa0..bc2b0264c0 100644
--- a/routers/web/repo/repo.go
+++ b/routers/web/repo/repo.go
@@ -364,31 +364,39 @@ func RedirectDownload(ctx *context.Context) {
// Download an archive of a repository
func Download(ctx *context.Context) {
- aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*"))
+ aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*"), ctx.FormStrings("path"))
if err != nil {
- if errors.Is(err, archiver_service.ErrUnknownArchiveFormat{}) {
+ if errors.Is(err, util.ErrInvalidArgument) {
ctx.HTTPError(http.StatusBadRequest, err.Error())
- } else if errors.Is(err, archiver_service.RepoRefNotFoundError{}) {
+ } else if errors.Is(err, util.ErrNotExist) {
ctx.HTTPError(http.StatusNotFound, err.Error())
} else {
ctx.ServerError("archiver_service.NewRequest", err)
}
return
}
- archiver_service.ServeRepoArchive(ctx.Base, aReq)
+ err = archiver_service.ServeRepoArchive(ctx.Base, aReq)
+ if err != nil {
+ if errors.Is(err, util.ErrInvalidArgument) {
+ ctx.HTTPError(http.StatusBadRequest, err.Error())
+ } else {
+ ctx.ServerError("archiver_service.ServeRepoArchive", err)
+ }
+ }
}
// InitiateDownload will enqueue an archival request, as needed. It may submit
// a request that's already in-progress, but the archiver service will just
// kind of drop it on the floor if this is the case.
func InitiateDownload(ctx *context.Context) {
- if setting.Repository.StreamArchives {
+ paths := ctx.FormStrings("path")
+ if setting.Repository.StreamArchives || len(paths) > 0 {
ctx.JSON(http.StatusOK, map[string]any{
"complete": true,
})
return
}
- aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*"))
+ aReq, err := archiver_service.NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("*"), paths)
if err != nil {
ctx.HTTPError(http.StatusBadRequest, "invalid archive request")
return
diff --git a/routers/web/repo/setting/git_hooks.go b/routers/web/repo/setting/git_hooks.go
index ba4b5e85b6..1f542a3f9f 100644
--- a/routers/web/repo/setting/git_hooks.go
+++ b/routers/web/repo/setting/git_hooks.go
@@ -7,6 +7,7 @@ import (
"net/http"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/routers/web/repo"
"code.gitea.io/gitea/services/context"
)
@@ -41,6 +42,7 @@ func GitHooksEdit(ctx *context.Context) {
return
}
ctx.Data["Hook"] = hook
+ ctx.Data["CodeEditorConfig"] = repo.CodeEditorConfig{} // not really editing a repo file, so no editor config
ctx.HTML(http.StatusOK, tplGithookEdit)
}
diff --git a/routers/web/repo/setting/lfs.go b/routers/web/repo/setting/lfs.go
index c7a19062d2..8a8015035f 100644
--- a/routers/web/repo/setting/lfs.go
+++ b/routers/web/repo/setting/lfs.go
@@ -407,7 +407,9 @@ func LFSPointerFiles(ctx *context.Context) {
err = func() error {
pointerChan := make(chan lfs.PointerBlob)
errChan := make(chan error, 1)
- go lfs.SearchPointerBlobs(ctx, ctx.Repo.GitRepo, pointerChan, errChan)
+ go func() {
+ errChan <- lfs.SearchPointerBlobs(ctx, ctx.Repo.GitRepo, pointerChan)
+ }()
numPointers := 0
var numAssociated, numNoExist, numAssociatable int
@@ -483,11 +485,6 @@ func LFSPointerFiles(ctx *context.Context) {
results = append(results, result)
}
- err, has := <-errChan
- if has {
- return err
- }
-
ctx.Data["Pointers"] = results
ctx.Data["NumPointers"] = numPointers
ctx.Data["NumAssociated"] = numAssociated
@@ -495,7 +492,8 @@ func LFSPointerFiles(ctx *context.Context) {
ctx.Data["NumNoExist"] = numNoExist
ctx.Data["NumNotAssociated"] = numPointers - numAssociated
- return nil
+ err := <-errChan
+ return err
}()
if err != nil {
ctx.ServerError("LFSPointerFiles", err)
diff --git a/routers/web/repo/setting/secrets.go b/routers/web/repo/setting/secrets.go
index c6e2d18249..cd32a7dbb7 100644
--- a/routers/web/repo/setting/secrets.go
+++ b/routers/web/repo/setting/secrets.go
@@ -46,7 +46,7 @@ func getSecretsCtx(ctx *context.Context) (*secretsCtx, error) {
if ctx.Data["PageIsOrgSettings"] == true {
if _, err := shared_user.RenderUserOrgHeader(ctx); err != nil {
ctx.ServerError("RenderUserOrgHeader", err)
- return nil, nil
+ return nil, nil //nolint:nilnil // error is already handled by ctx.ServerError
}
return &secretsCtx{
OwnerID: ctx.ContextUser.ID,
diff --git a/routers/web/repo/setting/setting.go b/routers/web/repo/setting/setting.go
index 0c73c1490f..f9e80a72e0 100644
--- a/routers/web/repo/setting/setting.go
+++ b/routers/web/repo/setting/setting.go
@@ -28,6 +28,7 @@ import (
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/validation"
"code.gitea.io/gitea/modules/web"
+ repo_router "code.gitea.io/gitea/routers/web/repo"
actions_service "code.gitea.io/gitea/services/actions"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/forms"
@@ -88,6 +89,11 @@ func SettingsCtxData(ctx *context.Context) {
return
}
ctx.Data["PushMirrors"] = pushMirrors
+
+ repo_router.PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
}
// Settings show a repository's settings page
@@ -622,6 +628,7 @@ func handleSettingsPostAdvanced(ctx *context.Context) {
DefaultDeleteBranchAfterMerge: form.DefaultDeleteBranchAfterMerge,
DefaultMergeStyle: repo_model.MergeStyle(form.PullsDefaultMergeStyle),
DefaultAllowMaintainerEdit: form.DefaultAllowMaintainerEdit,
+ DefaultTargetBranch: strings.TrimSpace(form.DefaultTargetBranch),
}))
} else if !unit_model.TypePullRequests.UnitGlobalDisabled() {
deleteUnitTypes = append(deleteUnitTypes, unit_model.TypePullRequests)
diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go
index 8e85cc3278..8aeb1a0af8 100644
--- a/routers/web/repo/view.go
+++ b/routers/web/repo/view.go
@@ -151,12 +151,7 @@ func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
return true
}
-func markupRender(ctx *context.Context, renderCtx *markup.RenderContext, input io.Reader) (escaped *charset.EscapeStatus, output template.HTML, err error) {
- renderer, err := markup.FindRendererByContext(renderCtx)
- if err != nil {
- return nil, "", err
- }
-
+func markupRenderToHTML(ctx *context.Context, renderCtx *markup.RenderContext, renderer markup.Renderer, input io.Reader) (escaped *charset.EscapeStatus, output template.HTML, err error) {
markupRd, markupWr := io.Pipe()
defer markupWr.Close()
diff --git a/routers/web/repo/view_file.go b/routers/web/repo/view_file.go
index 167cd5f927..44bc8543b0 100644
--- a/routers/web/repo/view_file.go
+++ b/routers/web/repo/view_file.go
@@ -21,9 +21,7 @@ import (
"code.gitea.io/gitea/modules/git/attribute"
"code.gitea.io/gitea/modules/highlight"
"code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/typesniffer"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
issue_service "code.gitea.io/gitea/services/issue"
@@ -60,14 +58,19 @@ func prepareFileViewLfsAttrs(ctx *context.Context) (*attribute.Attributes, bool)
return attrs, true
}
-func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedType typesniffer.SniffedType, prefetchBuf []byte, utf8Reader io.Reader) bool {
- markupType := markup.DetectMarkupTypeByFileName(filename)
- if markupType == "" {
- markupType = markup.DetectRendererType(filename, sniffedType, prefetchBuf)
- }
- if markupType == "" {
- return false
+func handleFileViewRenderMarkup(ctx *context.Context, prefetchBuf []byte, utf8Reader io.Reader) bool {
+ rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
+ CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
+ CurrentTreePath: path.Dir(ctx.Repo.TreePath),
+ }).WithRelativePath(ctx.Repo.TreePath)
+
+ renderer := rctx.DetectMarkupRenderer(prefetchBuf)
+ if renderer == nil {
+ return false // not supported markup
}
+ metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx)
+ metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL()
+ rctx.WithMetas(metas)
ctx.Data["HasSourceRenderedToggle"] = true
@@ -75,19 +78,10 @@ func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedTy
return false
}
- ctx.Data["MarkupType"] = markupType
- metas := ctx.Repo.Repository.ComposeRepoFileMetas(ctx)
- metas["RefTypeNameSubURL"] = ctx.Repo.RefTypeNameSubURL()
- rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
- CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
- CurrentTreePath: path.Dir(ctx.Repo.TreePath),
- }).
- WithMarkupType(markupType).
- WithRelativePath(ctx.Repo.TreePath).
- WithMetas(metas)
+ ctx.Data["MarkupType"] = rctx.RenderOptions.MarkupType
var err error
- ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, utf8Reader)
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRenderToHTML(ctx, rctx, renderer, utf8Reader)
if err != nil {
ctx.ServerError("Render", err)
return true
@@ -95,7 +89,8 @@ func handleFileViewRenderMarkup(ctx *context.Context, filename string, sniffedTy
return true
}
-func handleFileViewRenderSource(ctx *context.Context, filename string, attrs *attribute.Attributes, fInfo *fileInfo, utf8Reader io.Reader) bool {
+func handleFileViewRenderSource(ctx *context.Context, attrs *attribute.Attributes, fInfo *fileInfo, utf8Reader io.Reader) bool {
+ filename := ctx.Repo.TreePath
if ctx.FormString("display") == "rendered" || !fInfo.st.IsRepresentableAsText() {
return false
}
@@ -124,11 +119,11 @@ func handleFileViewRenderSource(ctx *context.Context, filename string, attrs *at
}
language := attrs.GetLanguage().Value()
- fileContent, lexerName, err := highlight.File(filename, language, buf)
+ fileContent, lexerName, err := highlight.RenderFullFile(filename, language, buf)
ctx.Data["LexerName"] = lexerName
if err != nil {
- log.Error("highlight.File failed, fallback to plain text: %v", err)
- fileContent = highlight.PlainText(buf)
+ log.Error("highlight.RenderFullFile failed, fallback to plain text: %v", err)
+ fileContent = highlight.RenderPlainText(buf)
}
status := &charset.EscapeStatus{}
statuses := make([]*charset.EscapeStatus, len(fileContent))
@@ -246,10 +241,10 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) {
switch {
case fInfo.blobOrLfsSize >= setting.UI.MaxDisplayFileSize:
ctx.Data["IsFileTooLarge"] = true
- case handleFileViewRenderMarkup(ctx, entry.Name(), fInfo.st, buf, contentReader):
+ case handleFileViewRenderMarkup(ctx, buf, contentReader):
// it also sets ctx.Data["FileContent"] and more
ctx.Data["IsMarkup"] = true
- case handleFileViewRenderSource(ctx, entry.Name(), attrs, fInfo, contentReader):
+ case handleFileViewRenderSource(ctx, attrs, fInfo, contentReader):
// it also sets ctx.Data["FileContent"] and more
ctx.Data["IsDisplayingSource"] = true
case handleFileViewRenderImage(ctx, fInfo, buf):
diff --git a/routers/web/repo/view_readme.go b/routers/web/repo/view_readme.go
index f1fa5732f0..830709422e 100644
--- a/routers/web/repo/view_readme.go
+++ b/routers/web/repo/view_readme.go
@@ -18,7 +18,6 @@ import (
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
@@ -190,18 +189,15 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil
rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
- if markupType := markup.DetectMarkupTypeByFileName(readmeFile.Name()); markupType != "" {
+ rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
+ CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
+ CurrentTreePath: path.Dir(readmeFullPath),
+ }).WithRelativePath(readmeFullPath)
+ renderer := rctx.DetectMarkupRenderer(buf)
+ if renderer != nil {
ctx.Data["IsMarkup"] = true
- ctx.Data["MarkupType"] = markupType
-
- rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{
- CurrentRefPath: ctx.Repo.RefTypeNameSubURL(),
- CurrentTreePath: path.Dir(readmeFullPath),
- }).
- WithMarkupType(markupType).
- WithRelativePath(readmeFullPath)
-
- ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, rctx, rd)
+ ctx.Data["MarkupType"] = rctx.RenderOptions.MarkupType
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRenderToHTML(ctx, rctx, renderer, rd)
if err != nil {
log.Error("Render failed for %s in %-v: %v Falling back to rendering source", readmeFile.Name(), ctx.Repo.Repository, err)
delete(ctx.Data, "IsMarkup")
diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go
index 921e17fb6a..5f775efb22 100644
--- a/routers/web/repo/wiki.go
+++ b/routers/web/repo/wiki.go
@@ -10,7 +10,7 @@ import (
"io"
"net/http"
"net/url"
- "path/filepath"
+ "path"
"strings"
"code.gitea.io/gitea/models/renderhelper"
@@ -277,12 +277,10 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
return nil, nil
}
- if rctx.SidebarTocNode != nil {
+ if rctx.TocShowInSection == markup.TocShowInSidebar && len(rctx.TocHeadingItems) > 0 {
sb := strings.Builder{}
- if err = markdown.SpecializedMarkdown(rctx).Renderer().Render(&sb, nil, rctx.SidebarTocNode); err != nil {
- log.Error("Failed to render wiki sidebar TOC: %v", err)
- }
- ctx.Data["WikiSidebarTocHTML"] = templates.SanitizeHTML(sb.String())
+ markup.RenderTocHeadingItems(rctx, map[string]string{"open": ""}, &sb)
+ ctx.Data["WikiSidebarTocHTML"] = template.HTML(sb.String())
}
if !isSideBar {
@@ -492,9 +490,9 @@ func Wiki(ctx *context.Context) {
}
wikiPath := entry.Name()
- if markup.DetectMarkupTypeByFileName(wikiPath) != markdown.MarkupName {
- ext := strings.ToUpper(filepath.Ext(wikiPath))
- ctx.Data["FormatWarning"] = ext + " rendering is not supported at the moment. Rendered as Markdown."
+ detectedRender := markup.DetectRendererTypeByFilename(wikiPath)
+ if detectedRender == nil || detectedRender.Name() != markdown.MarkupName {
+ ctx.Data["FormatWarning"] = "File extension " + path.Ext(wikiPath) + " is not supported at the moment. Rendered as Markdown."
}
// Get last change information.
lastCommit, err := wikiGitRepo.GetCommitByPath(wikiPath)
diff --git a/routers/web/shared/actions/runners.go b/routers/web/shared/actions/runners.go
index 648f8046a4..9dca366123 100644
--- a/routers/web/shared/actions/runners.go
+++ b/routers/web/shared/actions/runners.go
@@ -59,7 +59,7 @@ func getRunnersCtx(ctx *context.Context) (*runnersCtx, error) {
if ctx.Data["PageIsOrgSettings"] == true {
if _, err := shared_user.RenderUserOrgHeader(ctx); err != nil {
ctx.ServerError("RenderUserOrgHeader", err)
- return nil, nil
+ return nil, nil //nolint:nilnil // error is already handled by ctx.ServerError
}
return &runnersCtx{
RepoID: 0,
diff --git a/routers/web/shared/actions/variables.go b/routers/web/shared/actions/variables.go
index a43c2c2690..8a8c49f415 100644
--- a/routers/web/shared/actions/variables.go
+++ b/routers/web/shared/actions/variables.go
@@ -51,7 +51,7 @@ func getVariablesCtx(ctx *context.Context) (*variablesCtx, error) {
if ctx.Data["PageIsOrgSettings"] == true {
if _, err := shared_user.RenderUserOrgHeader(ctx); err != nil {
ctx.ServerError("RenderUserOrgHeader", err)
- return nil, nil
+ return nil, nil //nolint:nilnil // error is already handled by ctx.ServerError
}
return &variablesCtx{
OwnerID: ctx.ContextUser.ID,
diff --git a/routers/web/user/heatmap.go b/routers/web/user/heatmap.go
new file mode 100644
index 0000000000..e81739e5b8
--- /dev/null
+++ b/routers/web/user/heatmap.go
@@ -0,0 +1,66 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "net/http"
+ "net/url"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+func prepareHeatmapURL(ctx *context.Context) {
+ ctx.Data["EnableHeatmap"] = setting.Service.EnableUserHeatmap
+ if !setting.Service.EnableUserHeatmap {
+ return
+ }
+
+ if ctx.Org.Organization == nil {
+ // for individual user
+ ctx.Data["HeatmapURL"] = ctx.Doer.HomeLink() + "/-/heatmap"
+ return
+ }
+
+ // for org or team
+ heatmapURL := ctx.Org.Organization.OrganisationLink() + "/dashboard/-/heatmap"
+ if ctx.Org.Team != nil {
+ heatmapURL += "/" + url.PathEscape(ctx.Org.Team.LowerName)
+ }
+ ctx.Data["HeatmapURL"] = heatmapURL
+}
+
+func writeHeatmapJSON(ctx *context.Context, hdata []*activities_model.UserHeatmapData) {
+ data := make([][2]int64, len(hdata))
+ var total int64
+ for i, v := range hdata {
+ data[i] = [2]int64{int64(v.Timestamp), v.Contributions}
+ total += v.Contributions
+ }
+ ctx.JSON(http.StatusOK, map[string]any{
+ "heatmapData": data,
+ "totalContributions": total,
+ })
+}
+
+// DashboardHeatmap returns heatmap data as JSON, for the individual user, organization or team dashboard.
+func DashboardHeatmap(ctx *context.Context) {
+ if !setting.Service.EnableUserHeatmap {
+ ctx.NotFound(nil)
+ return
+ }
+ var data []*activities_model.UserHeatmapData
+ var err error
+ if ctx.Org.Organization == nil {
+ data, err = activities_model.GetUserHeatmapDataByUser(ctx, ctx.ContextUser, ctx.Doer)
+ } else {
+ data, err = activities_model.GetUserHeatmapDataByOrgTeam(ctx, ctx.Org.Organization, ctx.Org.Team, ctx.Doer)
+ }
+ if err != nil {
+ ctx.ServerError("GetUserHeatmapData", err)
+ return
+ }
+ writeHeatmapJSON(ctx, data)
+}
diff --git a/routers/web/user/home.go b/routers/web/user/home.go
index b53a3daedb..afdba9a75f 100644
--- a/routers/web/user/home.go
+++ b/routers/web/user/home.go
@@ -54,8 +54,8 @@ const (
tplProfile templates.TplName = "user/profile"
)
-// getDashboardContextUser finds out which context user dashboard is being viewed as .
-func getDashboardContextUser(ctx *context.Context) *user_model.User {
+// prepareDashboardContextUserOrgTeams finds out which context user dashboard is being viewed as .
+func prepareDashboardContextUserOrgTeams(ctx *context.Context) *user_model.User {
ctxUser := ctx.Doer
orgName := ctx.PathParam("org")
if len(orgName) > 0 {
@@ -76,7 +76,7 @@ func getDashboardContextUser(ctx *context.Context) *user_model.User {
// Dashboard render the dashboard page
func Dashboard(ctx *context.Context) {
- ctxUser := getDashboardContextUser(ctx)
+ ctxUser := prepareDashboardContextUserOrgTeams(ctx)
if ctx.Written() {
return
}
@@ -109,15 +109,7 @@ func Dashboard(ctx *context.Context) {
"uid": uid,
}
- if setting.Service.EnableUserHeatmap {
- data, err := activities_model.GetUserHeatmapDataByUserTeam(ctx, ctxUser, ctx.Org.Team, ctx.Doer)
- if err != nil {
- ctx.ServerError("GetUserHeatmapDataByUserTeam", err)
- return
- }
- ctx.Data["HeatmapData"] = data
- ctx.Data["HeatmapTotalContributions"] = activities_model.GetTotalContributionsInHeatmap(data)
- }
+ prepareHeatmapURL(ctx)
feeds, count, err := feed_service.GetFeedsForDashboard(ctx, activities_model.GetFeedsOptions{
RequestedUser: ctxUser,
@@ -156,7 +148,7 @@ func Milestones(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("milestones")
ctx.Data["PageIsMilestonesDashboard"] = true
- ctxUser := getDashboardContextUser(ctx)
+ ctxUser := prepareDashboardContextUserOrgTeams(ctx)
if ctx.Written() {
return
}
@@ -371,7 +363,7 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) {
// Return with NotFound or ServerError if unsuccessful.
// ----------------------------------------------------
- ctxUser := getDashboardContextUser(ctx)
+ ctxUser := prepareDashboardContextUserOrgTeams(ctx)
if ctx.Written() {
return
}
@@ -660,6 +652,8 @@ func ShowSSHKeys(ctx *context.Context) {
}
var buf bytes.Buffer
+ // "authorized_keys" file format: "#" followed by comment line per key
+ buf.WriteString("# Gitea isn't a key server. The keys are exported as the user uploaded and might not have been fully verified.\n")
for i := range keys {
buf.WriteString(keys[i].OmitEmail())
buf.WriteString("\n")
@@ -695,6 +689,8 @@ func ShowGPGKeys(ctx *context.Context) {
var buf bytes.Buffer
headers := make(map[string]string)
+ // https://www.rfc-editor.org/rfc/rfc4880
+ headers["Comment"] = "Gitea isn't a key server. The keys are exported as the user uploaded and might not have been fully verified."
if len(failedEntitiesID) > 0 { // If some key need re-import to be exported
headers["Note"] = "The keys with the following IDs couldn't be exported and need to be reuploaded " + strings.Join(failedEntitiesID, ", ")
} else if len(entities) == 0 {
diff --git a/routers/web/user/home_test.go b/routers/web/user/home_test.go
index c5b9e16c1e..5f3646769e 100644
--- a/routers/web/user/home_test.go
+++ b/routers/web/user/home_test.go
@@ -116,7 +116,7 @@ func TestMilestonesForSpecificRepo(t *testing.T) {
}
func TestDashboardPagination(t *testing.T) {
- ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
page := context.NewPagination(10, 3, 1, 3)
setting.AppSubURL = "/SubPath"
diff --git a/routers/web/user/notification.go b/routers/web/user/notification.go
index aaf9d435c0..cf61b0a2f2 100644
--- a/routers/web/user/notification.go
+++ b/routers/web/user/notification.go
@@ -15,6 +15,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unit"
"code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
@@ -128,7 +129,9 @@ func prepareUserNotificationsData(ctx *context.Context) {
ctx.Data["Notifications"] = notifications
ctx.Data["Link"] = setting.AppSubURL + "/notifications"
ctx.Data["SequenceNumber"] = ctx.FormString("sequence-number")
+
pager.AddParamFromRequest(ctx.Req)
+ pager.RemoveParam(container.SetOf("div-only", "sequence-number"))
ctx.Data["Page"] = pager
}
diff --git a/routers/web/user/profile.go b/routers/web/user/profile.go
index d7052914b6..f580055030 100644
--- a/routers/web/user/profile.go
+++ b/routers/web/user/profile.go
@@ -103,6 +103,7 @@ func prepareUserProfileTabData(ctx *context.Context, profileDbRepo *repo_model.R
repos []*repo_model.Repository
count int64
total int
+ curRows int
orderBy db.SearchOrderBy
)
@@ -161,21 +162,15 @@ func prepareUserProfileTabData(ctx *context.Context, profileDbRepo *repo_model.R
ctx.Data["Cards"] = following
total = int(numFollowing)
case "activity":
- // prepare heatmap data
- if setting.Service.EnableUserHeatmap {
- data, err := activities_model.GetUserHeatmapDataByUser(ctx, ctx.ContextUser, ctx.Doer)
- if err != nil {
- ctx.ServerError("GetUserHeatmapDataByUser", err)
- return
- }
- ctx.Data["HeatmapData"] = data
- ctx.Data["HeatmapTotalContributions"] = activities_model.GetTotalContributionsInHeatmap(data)
+ if setting.Service.EnableUserHeatmap && activities_model.ActivityReadable(ctx.ContextUser, ctx.Doer) {
+ ctx.Data["EnableHeatmap"] = true
+ ctx.Data["HeatmapURL"] = ctx.ContextUser.HomeLink() + "/-/heatmap"
}
date := ctx.FormString("date")
pagingNum = setting.UI.FeedPagingNum
showPrivate := ctx.IsSigned && (ctx.Doer.IsAdmin || ctx.Doer.ID == ctx.ContextUser.ID)
- items, count, err := feed_service.GetFeeds(ctx, activities_model.GetFeedsOptions{
+ items, feedCount, err := feed_service.GetFeedsForDashboard(ctx, activities_model.GetFeedsOptions{
RequestedUser: ctx.ContextUser,
Actor: ctx.Doer,
IncludePrivate: showPrivate,
@@ -193,8 +188,8 @@ func prepareUserProfileTabData(ctx *context.Context, profileDbRepo *repo_model.R
}
ctx.Data["Feeds"] = items
ctx.Data["Date"] = date
-
- total = int(count)
+ curRows = len(items)
+ total = feedCount
case "stars":
ctx.Data["PageIsProfileStarList"] = true
ctx.Data["ShowRepoOwnerOnList"] = true
@@ -316,6 +311,9 @@ func prepareUserProfileTabData(ctx *context.Context, profileDbRepo *repo_model.R
}
pager := context.NewPagination(total, pagingNum, page, 5)
+ if tab == "activity" {
+ pager.WithCurRows(curRows)
+ }
pager.AddParamFromRequest(ctx.Req)
ctx.Data["Page"] = pager
}
diff --git a/routers/web/user/setting/account.go b/routers/web/user/setting/account.go
index f6dc79a2c2..2a6c1f00bc 100644
--- a/routers/web/user/setting/account.go
+++ b/routers/web/user/setting/account.go
@@ -113,7 +113,12 @@ func EmailPost(ctx *context.Context) {
// Make email address primary.
if ctx.FormString("_method") == "PRIMARY" {
- if err := user_model.MakeActiveEmailPrimary(ctx, ctx.FormInt64("id")); err != nil {
+ if err := user_model.MakeActiveEmailPrimary(ctx, ctx.Doer.ID, ctx.FormInt64("id")); err != nil {
+ if user_model.IsErrEmailAddressNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("settings.email_primary_not_found"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
ctx.ServerError("MakeEmailPrimary", err)
return
}
diff --git a/routers/web/user/setting/adopt.go b/routers/web/user/setting/adopt.go
index 171c1933d4..abf9d8c6db 100644
--- a/routers/web/user/setting/adopt.go
+++ b/routers/web/user/setting/adopt.go
@@ -4,12 +4,9 @@
package setting
import (
- "path/filepath"
-
repo_model "code.gitea.io/gitea/models/repo"
- user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/context"
repo_service "code.gitea.io/gitea/services/repository"
)
@@ -27,7 +24,6 @@ func AdoptOrDeleteRepository(ctx *context.Context) {
action := ctx.FormString("action")
ctxUser := ctx.Doer
- root := user_model.UserPath(ctxUser.LowerName)
// check not a repo
has, err := repo_model.IsRepositoryModelExist(ctx, ctxUser, dir)
@@ -36,12 +32,12 @@ func AdoptOrDeleteRepository(ctx *context.Context) {
return
}
- isDir, err := util.IsDir(filepath.Join(root, dir+".git"))
+ exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(repo_model.RelativePath(ctxUser.Name, dir)))
if err != nil {
ctx.ServerError("IsDir", err)
return
}
- if has || !isDir {
+ if has || !exist {
// Fallthrough to failure mode
} else if action == "adopt" && allowAdopt {
if _, err := repo_service.AdoptRepository(ctx, ctxUser, ctxUser, repo_service.CreateRepoOptions{
diff --git a/routers/web/user/setting/keys.go b/routers/web/user/setting/keys.go
index 13aa4a471b..999bb76683 100644
--- a/routers/web/user/setting/keys.go
+++ b/routers/web/user/setting/keys.go
@@ -187,7 +187,7 @@ func KeysPost(ctx *context.Context) {
return
}
- if _, err = asymkey_model.AddPublicKey(ctx, ctx.Doer.ID, form.Title, content, 0); err != nil {
+ if _, err = asymkey_model.AddPublicKey(ctx, ctx.Doer.ID, form.Title, content, 0, false); err != nil {
ctx.Data["HasSSHError"] = true
switch {
case asymkey_model.IsErrKeyAlreadyExist(err):
diff --git a/routers/web/user/setting/security/main_test.go b/routers/web/user/setting/security/main_test.go
new file mode 100644
index 0000000000..2a27cd6dbf
--- /dev/null
+++ b/routers/web/user/setting/security/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package security
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/user/setting/security/openid.go b/routers/web/user/setting/security/openid.go
index a23a98dd25..78db7650fe 100644
--- a/routers/web/user/setting/security/openid.go
+++ b/routers/web/user/setting/security/openid.go
@@ -4,12 +4,14 @@
package security
import (
+ "errors"
"net/http"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/auth/openid"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/services/context"
"code.gitea.io/gitea/services/forms"
@@ -116,7 +118,11 @@ func DeleteOpenID(ctx *context.Context) {
}
if err := user_model.DeleteUserOpenID(ctx, &user_model.UserOpenID{ID: ctx.FormInt64("id"), UID: ctx.Doer.ID}); err != nil {
- ctx.ServerError("DeleteUserOpenID", err)
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.HTTPError(http.StatusNotFound)
+ } else {
+ ctx.ServerError("DeleteUserOpenID", err)
+ }
return
}
log.Trace("OpenID address deleted: %s", ctx.Doer.Name)
@@ -132,8 +138,12 @@ func ToggleOpenIDVisibility(ctx *context.Context) {
return
}
- if err := user_model.ToggleUserOpenIDVisibility(ctx, ctx.FormInt64("id")); err != nil {
- ctx.ServerError("ToggleUserOpenIDVisibility", err)
+ if err := user_model.ToggleUserOpenIDVisibility(ctx, ctx.FormInt64("id"), ctx.Doer); err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.HTTPError(http.StatusNotFound)
+ } else {
+ ctx.ServerError("ToggleUserOpenIDVisibility", err)
+ }
return
}
diff --git a/routers/web/user/setting/security/openid_test.go b/routers/web/user/setting/security/openid_test.go
new file mode 100644
index 0000000000..860639ea1c
--- /dev/null
+++ b/routers/web/user/setting/security/openid_test.go
@@ -0,0 +1,36 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package security
+
+import (
+ "net/http"
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDeleteOpenIDReturnsNotFoundForOtherUsersAddress(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "POST /user/settings/security")
+ contexttest.LoadUser(t, ctx, 2)
+ ctx.SetFormString("id", "1")
+
+ DeleteOpenID(ctx)
+
+ assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus())
+}
+
+func TestToggleOpenIDVisibilityReturnsNotFoundForOtherUsersAddress(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "POST /user/settings/security")
+ contexttest.LoadUser(t, ctx, 2)
+ ctx.SetFormString("id", "1")
+
+ ToggleOpenIDVisibility(ctx)
+
+ assert.Equal(t, http.StatusNotFound, ctx.Resp.WrittenStatus())
+}
diff --git a/routers/web/user/stop_watch.go b/routers/web/user/stop_watch.go
index 1d1cc61cc9..4bd8841573 100644
--- a/routers/web/user/stop_watch.go
+++ b/routers/web/user/stop_watch.go
@@ -29,7 +29,7 @@ func GetStopwatches(ctx *context.Context) {
return
}
- apiSWs, err := convert.ToStopWatches(ctx, sws)
+ apiSWs, err := convert.ToStopWatches(ctx, ctx.Doer, sws)
if err != nil {
ctx.HTTPError(http.StatusInternalServerError, err.Error())
return
diff --git a/routers/web/web.go b/routers/web/web.go
index 004e413840..717745bc8f 100644
--- a/routers/web/web.go
+++ b/routers/web/web.go
@@ -18,7 +18,6 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/structs"
- "code.gitea.io/gitea/modules/templates"
"code.gitea.io/gitea/modules/validation"
"code.gitea.io/gitea/modules/web"
"code.gitea.io/gitea/modules/web/middleware"
@@ -160,9 +159,7 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont
}
ctx.Data["Title"] = ctx.Tr("auth.must_change_password")
ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password"
- if ctx.Req.URL.Path != "/user/events" {
- middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
- }
+ middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
ctx.Redirect(setting.AppSubURL + "/user/settings/change_password")
return
}
@@ -173,7 +170,7 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont
}
}
- // Redirect to dashboard (or alternate location) if user tries to visit any non-login page.
+ // When a signed-in user visits a page that requires sign-out (e.g.: "/user/login"), redirect to home (or alternate location)
if options.SignOutRequired && ctx.IsSigned && ctx.Req.URL.RequestURI() != "/" {
ctx.RedirectToCurrentSite(ctx.FormString("redirect_to"))
return
@@ -188,10 +185,7 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont
if options.SignInRequired {
if !ctx.IsSigned {
- if ctx.Req.URL.Path != "/user/events" {
- middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
- }
- ctx.Redirect(setting.AppSubURL + "/user/login")
+ ctx.Redirect(middleware.RedirectLinkUserLogin(ctx.Req))
return
} else if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm {
ctx.Data["Title"] = ctx.Tr("auth.active_your_account")
@@ -201,12 +195,8 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Cont
}
// Redirect to log in page if auto-signin info is provided and has not signed in.
- if !options.SignOutRequired && !ctx.IsSigned &&
- ctx.GetSiteCookie(setting.CookieRememberName) != "" {
- if ctx.Req.URL.Path != "/user/events" {
- middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
- }
- ctx.Redirect(setting.AppSubURL + "/user/login")
+ if !options.SignOutRequired && !ctx.IsSigned && ctx.GetSiteCookie(setting.CookieRememberName) != "" {
+ ctx.Redirect(middleware.RedirectLinkUserLogin(ctx.Req))
return
}
@@ -242,8 +232,6 @@ func Routes() *web.Router {
routes.Methods("GET, HEAD", "/apple-touch-icon-precomposed.png", misc.StaticRedirect("/assets/img/apple-touch-icon.png"))
routes.Methods("GET, HEAD", "/favicon.ico", misc.StaticRedirect("/assets/img/favicon.png"))
- _ = templates.HTMLRenderer()
-
var mid []any
if setting.EnableGzip {
@@ -901,6 +889,8 @@ func registerWebRoutes(m *web.Router) {
m.Group("/{org}", func() {
m.Get("/dashboard", user.Dashboard)
m.Get("/dashboard/{team}", user.Dashboard)
+ m.Get("/dashboard/-/heatmap", user.DashboardHeatmap)
+ m.Get("/dashboard/-/heatmap/{team}", user.DashboardHeatmap)
m.Get("/issues", user.Issues)
m.Get("/issues/{team}", user.Issues)
m.Get("/pulls", user.Pulls)
@@ -1037,6 +1027,7 @@ func registerWebRoutes(m *web.Router) {
}
m.Get("/repositories", org.Repositories)
+ m.Get("/heatmap", user.DashboardHeatmap)
m.Group("/projects", func() {
m.Group("", func() {
@@ -1558,7 +1549,7 @@ func registerWebRoutes(m *web.Router) {
m.Group("/{username}/{reponame}", func() {
m.Get("/{type:pulls}", repo.Issues)
m.Group("/{type:pulls}/{index}", func() {
- m.Get("", repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewIssue)
+ m.Get("", repo.SetEditorconfigIfExists, repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewIssue)
m.Get(".diff", repo.DownloadPullDiff)
m.Get(".patch", repo.DownloadPullPatch)
m.Get("/merge_box", repo.ViewPullMergeBox)
diff --git a/services/actions/commit_status.go b/services/actions/commit_status.go
index 089dfeb634..884b98e966 100644
--- a/services/actions/commit_status.go
+++ b/services/actions/commit_status.go
@@ -9,6 +9,7 @@ import (
"fmt"
"path"
"strconv"
+ "strings"
actions_model "code.gitea.io/gitea/models/actions"
"code.gitea.io/gitea/models/db"
@@ -114,6 +115,21 @@ func getCommitStatusEventNameAndCommitID(run *actions_model.ActionRun) (event, c
return "", "", errors.New("head of pull request is missing in event payload")
}
commitID = payload.PullRequest.Head.Sha
+ case // pull_request_review events share the same PullRequestPayload as pull_request
+ webhook_module.HookEventPullRequestReviewApproved,
+ webhook_module.HookEventPullRequestReviewRejected,
+ webhook_module.HookEventPullRequestReviewComment:
+ event = run.TriggerEvent
+ payload, err := run.GetPullRequestEventPayload()
+ if err != nil {
+ return "", "", fmt.Errorf("GetPullRequestEventPayload: %w", err)
+ }
+ if payload.PullRequest == nil {
+ return "", "", errors.New("pull request is missing in event payload")
+ } else if payload.PullRequest.Head == nil {
+ return "", "", errors.New("head of pull request is missing in event payload")
+ }
+ commitID = payload.PullRequest.Head.Sha
case webhook_module.HookEventRelease:
event = string(run.Event)
commitID = run.CommitSHA
@@ -129,6 +145,7 @@ func createCommitStatus(ctx context.Context, repo *repo_model.Repository, event,
runName = wfs[0].Name
}
ctxName := fmt.Sprintf("%s / %s (%s)", runName, job.Name, event)
+ ctxName = strings.TrimSpace(ctxName) // git_model.NewCommitStatus also trims spaces
state := toCommitStatus(job.Status)
if statuses, err := git_model.GetLatestCommitStatus(ctx, repo.ID, commitID, db.ListOptionsAll); err == nil {
for _, v := range statuses {
diff --git a/services/actions/context.go b/services/actions/context.go
index b6de429ccf..626ae6ee6b 100644
--- a/services/actions/context.go
+++ b/services/actions/context.go
@@ -104,7 +104,7 @@ type TaskNeed struct {
// FindTaskNeeds finds the `needs` for the task by the task's job
func FindTaskNeeds(ctx context.Context, job *actions_model.ActionRunJob) (map[string]*TaskNeed, error) {
if len(job.Needs) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil when the job has no needs
}
needs := container.SetOf(job.Needs...)
diff --git a/services/actions/init_test.go b/services/actions/init_test.go
index 2d33a4e5cc..e61b3759e1 100644
--- a/services/actions/init_test.go
+++ b/services/actions/init_test.go
@@ -18,7 +18,6 @@ import (
func TestMain(m *testing.M) {
unittest.MainTest(m)
- os.Exit(m.Run())
}
func TestInitToken(t *testing.T) {
diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go
index 74a8a127ef..0e14c3cb17 100644
--- a/services/actions/job_emitter.go
+++ b/services/actions/job_emitter.go
@@ -114,7 +114,7 @@ func checkJobsByRunID(ctx context.Context, runID int64) error {
}
}
if runUpdated {
- NotifyWorkflowRunStatusUpdateWithReload(ctx, jobs[0])
+ NotifyWorkflowRunStatusUpdateWithReload(ctx, js[0])
}
}
return nil
@@ -123,7 +123,7 @@ func checkJobsByRunID(ctx context.Context, runID int64) error {
// findBlockedRunByConcurrency finds the blocked concurrent run in a repo and returns `nil, nil` when there is no blocked run.
func findBlockedRunByConcurrency(ctx context.Context, repoID int64, concurrencyGroup string) (*actions_model.ActionRun, error) {
if concurrencyGroup == "" {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that no blocked run exists
}
cRuns, cJobs, err := actions_model.GetConcurrentRunsAndJobs(ctx, repoID, concurrencyGroup, []actions_model.Status{actions_model.StatusBlocked})
if err != nil {
diff --git a/services/agit/agit.go b/services/agit/agit.go
index 15fc2e8fb5..fa2ddd9baf 100644
--- a/services/agit/agit.go
+++ b/services/agit/agit.go
@@ -229,7 +229,7 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git.
}
if !forcePush.Value() {
- output, err := gitrepo.RunCmdString(ctx, repo,
+ output, _, err := gitrepo.RunCmdString(ctx, repo,
gitcmd.NewCommand("rev-list", "--max-count=1").
AddDynamicArguments(oldCommitID, "^"+opts.NewCommitIDs[i]),
)
diff --git a/services/asymkey/commit_test.go b/services/asymkey/commit_test.go
index 6edba1e90a..2dd08b5dd4 100644
--- a/services/asymkey/commit_test.go
+++ b/services/asymkey/commit_test.go
@@ -31,7 +31,7 @@ func TestParseCommitWithSSHSignature(t *testing.T) {
// AAAEDWqPHTH51xb4hy1y1f1VeWL/2A9Q0b6atOyv5fx8x5prpPrMXSg9qTx04jPNPWRcHs
// utyxWjThIpzcaO68yWVnAAAAEXVzZXIyQGV4YW1wbGUuY29tAQIDBA==
// -----END OPENSSH PRIVATE KEY-----
- sshPubKey, err := asymkey_model.AddPublicKey(t.Context(), 999, "user-ssh-key-any-name", "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILpPrMXSg9qTx04jPNPWRcHsutyxWjThIpzcaO68yWVn", 0)
+ sshPubKey, err := asymkey_model.AddPublicKey(t.Context(), 999, "user-ssh-key-any-name", "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILpPrMXSg9qTx04jPNPWRcHsutyxWjThIpzcaO68yWVn", 0, false)
require.NoError(t, err)
_, err = db.GetEngine(t.Context()).ID(sshPubKey.ID).Cols("verified").Update(&asymkey_model.PublicKey{Verified: true})
require.NoError(t, err)
diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go
index d778ff8918..cffefe08ae 100644
--- a/services/asymkey/sign.go
+++ b/services/asymkey/sign.go
@@ -271,13 +271,22 @@ Loop:
}
// SignMerge determines if we should sign a PR merge commit to the base repository
-func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, gitRepo *git.Repository, baseCommit, headCommit string) (bool, *git.SigningKey, *git.Signature, error) {
+func SignMerge(ctx context.Context, pr *issues_model.PullRequest, u *user_model.User, gitRepo *git.Repository) (bool, *git.SigningKey, *git.Signature, error) {
if err := pr.LoadBaseRepo(ctx); err != nil {
log.Error("Unable to get Base Repo for pull request")
return false, nil, nil, err
}
repo := pr.BaseRepo
+ baseCommit, err := gitRepo.GetCommit(pr.BaseBranch)
+ if err != nil {
+ return false, nil, nil, err
+ }
+ headCommit, err := gitRepo.GetCommit(pr.GetGitHeadRefName())
+ if err != nil {
+ return false, nil, nil, err
+ }
+
signingKey, signer := gitrepo.GetSigningKey(ctx)
if signingKey == nil {
return false, nil, nil, &ErrWontSign{noKey}
@@ -319,38 +328,26 @@ Loop:
return false, nil, nil, &ErrWontSign{approved}
}
case baseSigned:
- commit, err := gitRepo.GetCommit(baseCommit)
- if err != nil {
- return false, nil, nil, err
- }
- verification := ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, baseCommit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{baseSigned}
}
case headSigned:
- commit, err := gitRepo.GetCommit(headCommit)
- if err != nil {
- return false, nil, nil, err
- }
- verification := ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, headCommit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{headSigned}
}
case commitsSigned:
- commit, err := gitRepo.GetCommit(headCommit)
- if err != nil {
- return false, nil, nil, err
- }
- verification := ParseCommitWithSignature(ctx, commit)
+ verification := ParseCommitWithSignature(ctx, headCommit)
if !verification.Verified {
return false, nil, nil, &ErrWontSign{commitsSigned}
}
// need to work out merge-base
- mergeBaseCommit, _, err := gitRepo.GetMergeBase("", baseCommit, headCommit)
+ mergeBaseCommit, err := gitrepo.MergeBase(ctx, pr.BaseRepo, baseCommit.ID.String(), headCommit.ID.String())
if err != nil {
return false, nil, nil, err
}
- commitList, err := commit.CommitsBeforeUntil(mergeBaseCommit)
+ commitList, err := headCommit.CommitsBeforeUntil(mergeBaseCommit)
if err != nil {
return false, nil, nil, err
}
diff --git a/services/asymkey/ssh_key_test.go b/services/asymkey/ssh_key_test.go
index 3605bd1e64..b052050dc6 100644
--- a/services/asymkey/ssh_key_test.go
+++ b/services/asymkey/ssh_key_test.go
@@ -66,7 +66,7 @@ ssh-dss AAAAB3NzaC1kc3MAAACBAOChCC7lf6Uo9n7BmZ6M8St19PZf4Tn59NriyboW2x/DZuYAz3ib
for i, kase := range testCases {
s.ID = int64(i) + 20
- asymkey_model.AddPublicKeysBySource(t.Context(), user, s, []string{kase.keyString})
+ asymkey_model.AddPublicKeysBySource(t.Context(), user, s, []string{kase.keyString}, false)
keys, err := db.Find[asymkey_model.PublicKey](t.Context(), asymkey_model.FindPublicKeyOptions{
OwnerID: user.ID,
LoginSourceID: s.ID,
diff --git a/services/auth/auth_token.go b/services/auth/auth_token.go
index 6b59238c98..8897bbd19c 100644
--- a/services/auth/auth_token.go
+++ b/services/auth/auth_token.go
@@ -32,7 +32,7 @@ var (
func CheckAuthToken(ctx context.Context, value string) (*auth_model.AuthToken, error) {
if len(value) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
parts := strings.SplitN(value, ":", 2)
diff --git a/services/auth/basic.go b/services/auth/basic.go
index 501924b4df..3161d7f33d 100644
--- a/services/auth/basic.go
+++ b/services/auth/basic.go
@@ -40,25 +40,21 @@ func (b *Basic) Name() string {
return BasicMethodName
}
-// Verify extracts and validates Basic data (username and password/token) from the
-// "Authorization" header of the request and returns the corresponding user object for that
-// name/token on successful validation.
-// Returns nil if header is empty or validation fails.
-func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+func (b *Basic) parseAuthBasic(req *http.Request) (ret struct{ authToken, uname, passwd string }) {
// Basic authentication should only fire on API, Feed, Download, Archives or on Git or LFSPaths
// Not all feed (rss/atom) clients feature the ability to add cookies or headers, so we need to allow basic auth for feeds
detector := newAuthPathDetector(req)
if !detector.isAPIPath() && !detector.isFeedRequest(req) && !detector.isContainerPath() && !detector.isAttachmentDownload() && !detector.isArchivePath() && !detector.isGitRawOrAttachOrLFSPath() {
- return nil, nil
+ return ret
}
authHeader := req.Header.Get("Authorization")
if authHeader == "" {
- return nil, nil
+ return ret
}
parsed, ok := httpauth.ParseAuthorizationHeader(authHeader)
if !ok || parsed.BasicAuth == nil {
- return nil, nil
+ return ret
}
uname, passwd := parsed.BasicAuth.Username, parsed.BasicAuth.Password
@@ -73,7 +69,12 @@ func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore
} else {
log.Trace("Basic Authorization: Attempting login with username as token")
}
+ ret.authToken, ret.uname, ret.passwd = authToken, uname, passwd
+ return ret
+}
+// VerifyAuthToken only the access token provided as parameter, used by other auth methods that want to reuse access token verification logic
+func (b *Basic) VerifyAuthToken(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore, authToken string) (*user_model.User, error) {
// get oauth2 token's user's ID
_, uid := GetOAuthAccessTokenScopeAndUserID(req.Context(), authToken)
if uid != 0 {
@@ -117,16 +118,29 @@ func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore
task, err := actions_model.GetRunningTaskByToken(req.Context(), authToken)
if err == nil && task != nil {
log.Trace("Basic Authorization: Valid AccessToken for task[%d]", task.ID)
-
store.GetData()["LoginMethod"] = ActionTokenMethodName
- store.GetData()["IsActionsToken"] = true
- store.GetData()["ActionsTaskID"] = task.ID
+ return user_model.NewActionsUserWithTaskID(task.ID), nil
+ }
+ return nil, nil //nolint:nilnil // the auth method is not applicable
+}
- return user_model.NewActionsUser(), nil
+// Verify extracts and validates Basic data (username and password/token) from the
+// "Authorization" header of the request and returns the corresponding user object for that
+// name/token on successful validation.
+// Returns nil if header is empty or validation fails.
+func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
+ parseBasicRet := b.parseAuthBasic(req)
+ authToken, uname, passwd := parseBasicRet.authToken, parseBasicRet.uname, parseBasicRet.passwd
+ if authToken == "" && uname == "" {
+ return nil, nil //nolint:nilnil // the auth method is not applicable
+ }
+ u, err := b.VerifyAuthToken(req, w, store, sess, authToken)
+ if u != nil || err != nil {
+ return u, err
}
if !setting.Service.EnableBasicAuth {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
log.Trace("Basic Authorization: Attempting SignIn for %s", uname)
diff --git a/services/auth/httpsign.go b/services/auth/httpsign.go
index 25e96ff32d..130207c0ea 100644
--- a/services/auth/httpsign.go
+++ b/services/auth/httpsign.go
@@ -42,7 +42,7 @@ func (h *HTTPSign) Name() string {
func (h *HTTPSign) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
sigHead := req.Header.Get("Signature")
if len(sigHead) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
var (
@@ -53,14 +53,14 @@ func (h *HTTPSign) Verify(req *http.Request, w http.ResponseWriter, store DataSt
if len(req.Header.Get("X-Ssh-Certificate")) != 0 {
// Handle Signature signed by SSH certificates
if len(setting.SSH.TrustedUserCAKeys) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
publicKey, err = VerifyCert(req)
if err != nil {
log.Debug("VerifyCert on request from %s: failed: %v", req.RemoteAddr, err)
log.Warn("Failed authentication attempt from %s", req.RemoteAddr)
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
} else {
// Handle Signature signed by Public Key
@@ -68,7 +68,7 @@ func (h *HTTPSign) Verify(req *http.Request, w http.ResponseWriter, store DataSt
if err != nil {
log.Debug("VerifyPubKey on request from %s: failed: %v", req.RemoteAddr, err)
log.Warn("Failed authentication attempt from %s", req.RemoteAddr)
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
}
diff --git a/services/auth/oauth2.go b/services/auth/oauth2.go
index 7df6f4638e..86903b0ce1 100644
--- a/services/auth/oauth2.go
+++ b/services/auth/oauth2.go
@@ -6,6 +6,7 @@ package auth
import (
"context"
+ "errors"
"net/http"
"strings"
"time"
@@ -17,14 +18,12 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/actions"
"code.gitea.io/gitea/services/oauth2_provider"
)
-// Ensure the struct implements the interface.
-var (
- _ Method = &OAuth2{}
-)
+var _ Method = &OAuth2{}
// GetOAuthAccessTokenScopeAndUserID returns access token scope and user id
func GetOAuthAccessTokenScopeAndUserID(ctx context.Context, accessToken string) (auth_model.AccessTokenScope, int64) {
@@ -106,18 +105,16 @@ func parseToken(req *http.Request) (string, bool) {
return "", false
}
-// userIDFromToken returns the user id corresponding to the OAuth token.
+// userFromToken returns the user corresponding to the OAuth token.
// It will set 'IsApiToken' to true if the token is an API token and
-// set 'ApiTokenScope' to the scope of the access token
-func (o *OAuth2) userIDFromToken(ctx context.Context, tokenSHA string, store DataStore) int64 {
+// set 'ApiTokenScope' to the scope of the access token (TODO: this behavior should be fixed, don't set ctx.Data)
+func (o *OAuth2) userFromToken(ctx context.Context, tokenSHA string, store DataStore) (*user_model.User, error) {
// Let's see if token is valid.
if strings.Contains(tokenSHA, ".") {
// First attempt to decode an actions JWT, returning the actions user
if taskID, err := actions.TokenToTaskID(tokenSHA); err == nil {
if CheckTaskIsRunning(ctx, taskID) {
- store.GetData()["IsActionsToken"] = true
- store.GetData()["ActionsTaskID"] = taskID
- return user_model.ActionsUserID
+ return user_model.NewActionsUserWithTaskID(taskID), nil
}
}
@@ -127,33 +124,27 @@ func (o *OAuth2) userIDFromToken(ctx context.Context, tokenSHA string, store Dat
store.GetData()["IsApiToken"] = true
store.GetData()["ApiTokenScope"] = accessTokenScope
}
- return uid
+ return user_model.GetUserByID(ctx, uid)
}
t, err := auth_model.GetAccessTokenBySHA(ctx, tokenSHA)
if err != nil {
if auth_model.IsErrAccessTokenNotExist(err) {
// check task token
- task, err := actions_model.GetRunningTaskByToken(ctx, tokenSHA)
- if err == nil && task != nil {
+ if task, err := actions_model.GetRunningTaskByToken(ctx, tokenSHA); err == nil {
log.Trace("Basic Authorization: Valid AccessToken for task[%d]", task.ID)
-
- store.GetData()["IsActionsToken"] = true
- store.GetData()["ActionsTaskID"] = task.ID
-
- return user_model.ActionsUserID
+ return user_model.NewActionsUserWithTaskID(task.ID), nil
}
- } else if !auth_model.IsErrAccessTokenNotExist(err) && !auth_model.IsErrAccessTokenEmpty(err) {
- log.Error("GetAccessTokenBySHA: %v", err)
}
- return 0
+ return nil, err
}
+
t.UpdatedUnix = timeutil.TimeStampNow()
if err = auth_model.UpdateAccessToken(ctx, t); err != nil {
log.Error("UpdateAccessToken: %v", err)
}
store.GetData()["IsApiToken"] = true
store.GetData()["ApiTokenScope"] = t.Scope
- return t.UID
+ return user_model.GetUserByID(ctx, t.UID)
}
// Verify extracts the user ID from the OAuth token in the query parameters
@@ -165,29 +156,17 @@ func (o *OAuth2) Verify(req *http.Request, w http.ResponseWriter, store DataStor
detector := newAuthPathDetector(req)
if !detector.isAPIPath() && !detector.isAttachmentDownload() && !detector.isAuthenticatedTokenRequest() &&
!detector.isGitRawOrAttachPath() && !detector.isArchivePath() {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
token, ok := parseToken(req)
if !ok {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
- id := o.userIDFromToken(req.Context(), token, store)
-
- if id <= 0 && id != -2 { // -2 means actions, so we need to allow it.
- return nil, user_model.ErrUserNotExist{}
+ user, err := o.userFromToken(req.Context(), token, store)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ log.Error("userFromToken: %v", err) // the callers might ignore the error, so log it here
}
- log.Trace("OAuth2 Authorization: Found token for user[%d]", id)
-
- user, err := user_model.GetPossibleUserByID(req.Context(), id)
- if err != nil {
- if !user_model.IsErrUserNotExist(err) {
- log.Error("GetUserByName: %v", err)
- }
- return nil, err
- }
-
- log.Trace("OAuth2 Authorization: Logged in user %-v", user)
- return user, nil
+ return user, err
}
diff --git a/services/auth/oauth2_test.go b/services/auth/oauth2_test.go
index f003742a94..308da846b8 100644
--- a/services/auth/oauth2_test.go
+++ b/services/auth/oauth2_test.go
@@ -12,23 +12,26 @@ import (
"code.gitea.io/gitea/services/actions"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestUserIDFromToken(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
t.Run("Actions JWT", func(t *testing.T) {
- const RunningTaskID = 47
+ const RunningTaskID int64 = 47
token, err := actions.CreateAuthorizationToken(RunningTaskID, 1, 2)
assert.NoError(t, err)
ds := make(reqctx.ContextData)
o := OAuth2{}
- uid := o.userIDFromToken(t.Context(), token, ds)
- assert.Equal(t, user_model.ActionsUserID, uid)
- assert.Equal(t, true, ds["IsActionsToken"])
- assert.Equal(t, ds["ActionsTaskID"], int64(RunningTaskID))
+ u, err := o.userFromToken(t.Context(), token, ds)
+ require.NoError(t, err)
+ assert.Equal(t, user_model.ActionsUserID, u.ID)
+ taskID, ok := user_model.GetActionsUserTaskID(u)
+ assert.True(t, ok)
+ assert.Equal(t, RunningTaskID, taskID)
})
}
diff --git a/services/auth/reverseproxy.go b/services/auth/reverseproxy.go
index d6664d738d..064b263a67 100644
--- a/services/auth/reverseproxy.go
+++ b/services/auth/reverseproxy.go
@@ -51,7 +51,7 @@ func (r *ReverseProxy) Name() string {
func (r *ReverseProxy) getUserFromAuthUser(req *http.Request) (*user_model.User, error) {
username := r.getUserName(req)
if len(username) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
log.Trace("ReverseProxy Authorization: Found username: %s", username)
@@ -111,7 +111,7 @@ func (r *ReverseProxy) Verify(req *http.Request, w http.ResponseWriter, store Da
if user == nil {
user = r.getUserFromAuthEmail(req)
if user == nil {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
}
diff --git a/services/auth/session.go b/services/auth/session.go
index 35d97e42da..5b6e4599b8 100644
--- a/services/auth/session.go
+++ b/services/auth/session.go
@@ -29,19 +29,19 @@ func (s *Session) Name() string {
// Returns nil if there is no user uid stored in the session.
func (s *Session) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) (*user_model.User, error) {
if sess == nil {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
// Get user ID
uid := sess.Get("uid")
if uid == nil {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
log.Trace("Session Authorization: Found user[%d]", uid)
id, ok := uid.(int64)
if !ok {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
// Get user object
@@ -52,7 +52,7 @@ func (s *Session) Verify(req *http.Request, w http.ResponseWriter, store DataSto
// Return the err as-is to keep current signed-in session, in case the err is something like context.Canceled. Otherwise non-existing user (nil, nil) will make the caller clear the signed-in session.
return nil, err
}
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
log.Trace("Session Authorization: Logged in user %-v", user)
diff --git a/services/auth/source/ldap/source.go b/services/auth/source/ldap/source.go
index 2362cad8aa..81d4b5446b 100644
--- a/services/auth/source/ldap/source.go
+++ b/services/auth/source/ldap/source.go
@@ -44,6 +44,7 @@ type Source struct {
AttributesInBind bool // fetch attributes in bind context (not user)
AttributeSSHPublicKey string // LDAP SSH Public Key attribute
AttributeAvatar string
+ SSHKeysAreVerified bool // true if SSH keys in LDAP are verified
SearchPageSize uint32 // Search with paging page size
Filter string // Query filter to validate entry
AdminFilter string // Query filter to check if user is admin
diff --git a/services/auth/source/ldap/source_authenticate.go b/services/auth/source/ldap/source_authenticate.go
index 4463bcc054..582841aebe 100644
--- a/services/auth/source/ldap/source_authenticate.go
+++ b/services/auth/source/ldap/source_authenticate.go
@@ -73,7 +73,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u
}
if user != nil {
- if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.AuthSource, sr.SSHPublicKey) {
+ if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, user, source.AuthSource, sr.SSHPublicKey, source.SSHKeysAreVerified) {
if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil {
return user, err
}
@@ -99,7 +99,7 @@ func (source *Source) Authenticate(ctx context.Context, user *user_model.User, u
return user, err
}
- if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.AuthSource, sr.SSHPublicKey) {
+ if isAttributeSSHPublicKeySet && asymkey_model.AddPublicKeysBySource(ctx, user, source.AuthSource, sr.SSHPublicKey, source.SSHKeysAreVerified) {
if err := asymkey_service.RewriteAllPublicKeys(ctx); err != nil {
return user, err
}
diff --git a/services/auth/source/ldap/source_sync.go b/services/auth/source/ldap/source_sync.go
index 7b401c5c96..0c5fdac674 100644
--- a/services/auth/source/ldap/source_sync.go
+++ b/services/auth/source/ldap/source_sync.go
@@ -135,7 +135,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error {
if err == nil && isAttributeSSHPublicKeySet {
log.Trace("SyncExternalUsers[%s]: Adding LDAP Public SSH Keys for user %s", source.AuthSource.Name, usr.Name)
- if asymkey_model.AddPublicKeysBySource(ctx, usr, source.AuthSource, su.SSHPublicKey) {
+ if asymkey_model.AddPublicKeysBySource(ctx, usr, source.AuthSource, su.SSHPublicKey, source.SSHKeysAreVerified) {
sshKeysNeedUpdate = true
}
}
@@ -145,7 +145,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error {
}
} else if updateExisting {
// Synchronize SSH Public Key if that attribute is set
- if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.AuthSource, su.SSHPublicKey) {
+ if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(ctx, usr, source.AuthSource, su.SSHPublicKey, source.SSHKeysAreVerified) {
sshKeysNeedUpdate = true
}
diff --git a/services/auth/source/oauth2/providers_test.go b/services/auth/source/oauth2/providers_test.go
index 353816c71e..08c50b12a9 100644
--- a/services/auth/source/oauth2/providers_test.go
+++ b/services/auth/source/oauth2/providers_test.go
@@ -19,11 +19,11 @@ func (p *fakeProvider) Name() string {
func (p *fakeProvider) SetName(name string) {}
func (p *fakeProvider) BeginAuth(state string) (goth.Session, error) {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
func (p *fakeProvider) UnmarshalSession(string) (goth.Session, error) {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
func (p *fakeProvider) FetchUser(goth.Session) (goth.User, error) {
diff --git a/services/auth/sspi.go b/services/auth/sspi.go
index 8cb39886c4..6450753935 100644
--- a/services/auth/sspi.go
+++ b/services/auth/sspi.go
@@ -63,7 +63,7 @@ func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore,
return nil, sspiAuthErrInit
}
if !s.shouldAuthenticate(req) {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
cfg, err := s.getConfig(req.Context())
@@ -97,7 +97,7 @@ func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore,
username := sanitizeUsername(userInfo.Username, cfg)
if len(username) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
log.Info("Authenticated as %s\n", username)
@@ -109,7 +109,7 @@ func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore,
}
if !cfg.AutoCreateUsers {
log.Error("User '%s' not found", username)
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
user, err = s.newUser(req.Context(), username, cfg)
if err != nil {
diff --git a/services/context/api.go b/services/context/api.go
index 591efadf37..a104d13588 100644
--- a/services/context/api.go
+++ b/services/context/api.go
@@ -221,7 +221,7 @@ func APIContexter() func(http.Handler) http.Handler {
ctx := &APIContext{
Base: base,
Cache: cache.GetCache(),
- Repo: &Repository{PullRequest: &PullRequest{}},
+ Repo: &Repository{},
Org: &APIOrganization{},
}
diff --git a/services/context/base.go b/services/context/base.go
index 8bd66bed09..4baea95ccf 100644
--- a/services/context/base.go
+++ b/services/context/base.go
@@ -18,6 +18,7 @@ import (
"code.gitea.io/gitea/modules/reqctx"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/modules/web/middleware"
)
@@ -147,10 +148,7 @@ func (b *Base) PlainText(status int, text string) {
// Redirect redirects the request
func (b *Base) Redirect(location string, status ...int) {
- code := http.StatusSeeOther
- if len(status) == 1 {
- code = status[0]
- }
+ code := util.OptionalArg(status, http.StatusSeeOther)
if !httplib.IsRelativeURL(location) {
// Some browsers (Safari) have buggy behavior for Cookie + Cache + External Redirection, eg: /my-path => https://other/path
diff --git a/services/context/captcha.go b/services/context/captcha.go
index 9272e7a65a..b4c3a92907 100644
--- a/services/context/captcha.go
+++ b/services/context/captcha.go
@@ -5,6 +5,7 @@ package context
import (
"fmt"
+ "image/color"
"sync"
"code.gitea.io/gitea/modules/cache"
@@ -29,6 +30,15 @@ func GetImageCaptcha() *captcha.Captcha {
imageCaptchaOnce.Do(func() {
cpt = captcha.NewCaptcha(captcha.Options{
SubURL: setting.AppSubURL,
+ // Use a color palette with high contrast colors suitable for both light and dark modes
+ // These colors provide good visibility and readability in both themes
+ ColorPalette: color.Palette{
+ color.RGBA{R: 234, G: 67, B: 53, A: 255}, // Bright red
+ color.RGBA{R: 66, G: 133, B: 244, A: 255}, // Medium blue
+ color.RGBA{R: 52, G: 168, B: 83, A: 255}, // Green
+ color.RGBA{R: 251, G: 188, B: 5, A: 255}, // Yellow/gold
+ color.RGBA{R: 171, G: 71, B: 188, A: 255}, // Purple
+ },
})
cpt.Store = cache.GetCache().ChiCache()
})
diff --git a/services/context/context.go b/services/context/context.go
index 420b2aefa8..394a78aa9d 100644
--- a/services/context/context.go
+++ b/services/context/context.go
@@ -17,6 +17,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/cache"
"code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/reqctx"
"code.gitea.io/gitea/modules/session"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/templates"
@@ -128,7 +129,7 @@ func NewWebContext(base *Base, render Render, session session.Store) *Context {
Cache: cache.GetCache(),
Link: setting.AppSubURL + strings.TrimSuffix(base.Req.URL.EscapedPath(), "/"),
- Repo: &Repository{PullRequest: &PullRequest{}},
+ Repo: &Repository{},
Org: &Organization{},
}
ctx.TemplateContext = NewTemplateContextForWeb(ctx)
@@ -137,15 +138,32 @@ func NewWebContext(base *Base, render Render, session session.Store) *Context {
return ctx
}
-// Contexter initializes a classic context for a request.
-func Contexter() func(next http.Handler) http.Handler {
- rnd := templates.HTMLRenderer()
+func ContexterInstallPage(data map[string]any) func(next http.Handler) http.Handler {
+ rnd := templates.PageRenderer()
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
+ base := NewBaseContext(resp, req)
+ ctx := NewWebContext(base, rnd, session.GetContextSession(req))
+ ctx.Data.MergeFrom(middleware.CommonTemplateContextData())
+ ctx.Data.MergeFrom(reqctx.ContextData{
+ "Title": ctx.Locale.Tr("install.install"),
+ "PageIsInstall": true,
+ "AllLangs": translation.AllLangs(),
+ })
+ ctx.Data.MergeFrom(data)
+ next.ServeHTTP(resp, ctx.Req)
+ })
+ }
+}
+
+// Contexter initializes a classic context for a request.
+func Contexter() func(next http.Handler) http.Handler {
+ rnd := templates.PageRenderer()
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
base := NewBaseContext(resp, req)
ctx := NewWebContext(base, rnd, session.GetContextSession(req))
ctx.Data.MergeFrom(middleware.CommonTemplateContextData())
- ctx.Data["CurrentURL"] = setting.AppSubURL + req.URL.RequestURI()
ctx.Data["Link"] = ctx.Link
// PageData is passed by reference, and it will be rendered to `window.config.pageData` in `head.tmpl` for JavaScript modules
diff --git a/services/context/package.go b/services/context/package.go
index 8b722932b1..0e9210515b 100644
--- a/services/context/package.go
+++ b/services/context/package.go
@@ -150,7 +150,7 @@ func determineAccessMode(ctx *Base, pkg *Package, doer *user_model.User) (perm.A
// PackageContexter initializes a package context for a request.
func PackageContexter() func(next http.Handler) http.Handler {
- renderer := templates.HTMLRenderer()
+ renderer := templates.PageRenderer()
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
base := NewBaseContext(resp, req)
diff --git a/services/context/pagination.go b/services/context/pagination.go
index 2a9805db05..21efab8b12 100644
--- a/services/context/pagination.go
+++ b/services/context/pagination.go
@@ -8,8 +8,10 @@ import (
"html/template"
"net/http"
"net/url"
+ "slices"
"strings"
+ "code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/paginator"
)
@@ -49,6 +51,14 @@ func (p *Pagination) AddParamFromRequest(req *http.Request) {
p.AddParamFromQuery(req.URL.Query())
}
+func (p *Pagination) RemoveParam(keys container.Set[string]) {
+ p.urlParams = slices.DeleteFunc(p.urlParams, func(s string) bool {
+ k, _, _ := strings.Cut(s, "=")
+ k, _ = url.QueryUnescape(k)
+ return keys.Contains(k)
+ })
+}
+
// GetParams returns the configured URL params
func (p *Pagination) GetParams() template.URL {
return template.URL(strings.Join(p.urlParams, "&"))
diff --git a/services/context/pagination_test.go b/services/context/pagination_test.go
new file mode 100644
index 0000000000..78359caa09
--- /dev/null
+++ b/services/context/pagination_test.go
@@ -0,0 +1,35 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package context
+
+import (
+ "net/url"
+ "testing"
+
+ "code.gitea.io/gitea/modules/container"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPagination(t *testing.T) {
+ p := NewPagination(1, 1, 1, 1)
+ params := url.Values{}
+ params.Add("k1", "11")
+ params.Add("k1", "12")
+ params.Add("k", "a")
+ params.Add("k", "b")
+ params.Add("k2", "21")
+ params.Add("k2", "22")
+ params.Add("foo", "bar")
+
+ p.AddParamFromQuery(params)
+ v, _ := url.ParseQuery(string(p.GetParams()))
+ assert.Equal(t, params, v)
+
+ p.RemoveParam(container.SetOf("k", "foo"))
+ params.Del("k")
+ params.Del("foo")
+ v, _ = url.ParseQuery(string(p.GetParams()))
+ assert.Equal(t, params, v)
+}
diff --git a/services/context/repo.go b/services/context/repo.go
index 3813335374..674da577b9 100644
--- a/services/context/repo.go
+++ b/services/context/repo.go
@@ -37,11 +37,46 @@ import (
"github.com/editorconfig/editorconfig-core-go/v2"
)
-// PullRequest contains information to make a pull request
-type PullRequest struct {
- BaseRepo *repo_model.Repository
- Allowed bool // it only used by the web tmpl: "PullRequestCtx.Allowed"
- SameRepo bool // it only used by the web tmpl: "PullRequestCtx.SameRepo"
+// PullRequestContext contains context information for making a new pull request
+type PullRequestContext struct {
+ ctx *Context
+
+ baseRepo, headRepo *repo_model.Repository
+
+ canCreateNewPull *bool
+ defaultTargetBranch *string
+}
+
+func (prc *PullRequestContext) SameRepo() bool {
+ return prc.baseRepo != nil && prc.headRepo != nil && prc.baseRepo.ID == prc.headRepo.ID
+}
+
+func (prc *PullRequestContext) CanCreateNewPull() bool {
+ if prc.canCreateNewPull != nil {
+ return *prc.canCreateNewPull
+ }
+ ctx := prc.ctx
+ // People who have push access or have forked repository can propose a new pull request.
+ can := prc.baseRepo.CanContentChange() &&
+ (ctx.Repo.CanWrite(unit_model.TypeCode) || (ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID)))
+ prc.canCreateNewPull = &can
+ return can
+}
+
+func (prc *PullRequestContext) MakeDefaultCompareLink(headBranch string) string {
+ return prc.baseRepo.Link() + "/compare/" +
+ util.PathEscapeSegments(prc.DefaultTargetBranch()) + "..." +
+ util.Iif(prc.SameRepo(), "", util.PathEscapeSegments(prc.headRepo.OwnerName)+":") +
+ util.PathEscapeSegments(headBranch)
+}
+
+func (prc *PullRequestContext) DefaultTargetBranch() string {
+ if prc.defaultTargetBranch != nil {
+ return *prc.defaultTargetBranch
+ }
+ branchName := prc.baseRepo.GetPullRequestTargetBranch(prc.ctx)
+ prc.defaultTargetBranch = &branchName
+ return branchName
}
// Repository contains information to operate a repository
@@ -64,7 +99,7 @@ type Repository struct {
CommitID string
CommitsCount int64
- PullRequest *PullRequest
+ PullRequestCtx *PullRequestContext
}
// CanWriteToBranch checks if the branch is writable by the user
@@ -418,6 +453,12 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) {
ctx.Data["IsEmptyRepo"] = ctx.Repo.Repository.IsEmpty
}
+func InitRepoPullRequestCtx(ctx *Context, base, head *repo_model.Repository) {
+ ctx.Repo.PullRequestCtx = &PullRequestContext{ctx: ctx}
+ ctx.Repo.PullRequestCtx.baseRepo, ctx.Repo.PullRequestCtx.headRepo = base, head
+ ctx.Data["PullRequestCtx"] = ctx.Repo.PullRequestCtx
+}
+
// RepoAssignment returns a middleware to handle repository assignment
func RepoAssignment(ctx *Context) {
if ctx.Data["Repository"] != nil {
@@ -666,28 +707,16 @@ func RepoAssignment(ctx *Context) {
ctx.Data["BranchesCount"] = branchesTotal
- // People who have push access or have forked repository can propose a new pull request.
- canPush := ctx.Repo.CanWrite(unit_model.TypeCode) ||
- (ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID))
- canCompare := false
-
- // Pull request is allowed if this is a fork repository
- // and base repository accepts pull requests.
+ // Pull request is allowed if this is a fork repository, and base repository accepts pull requests.
if repo.BaseRepo != nil && repo.BaseRepo.AllowsPulls(ctx) {
- canCompare = true
+ // TODO: this (and below) "BaseRepo" var is not clear and should be removed in the future
ctx.Data["BaseRepo"] = repo.BaseRepo
- ctx.Repo.PullRequest.BaseRepo = repo.BaseRepo
- ctx.Repo.PullRequest.Allowed = canPush
+ InitRepoPullRequestCtx(ctx, repo.BaseRepo, repo)
} else if repo.AllowsPulls(ctx) {
// Or, this is repository accepts pull requests between branches.
- canCompare = true
ctx.Data["BaseRepo"] = repo
- ctx.Repo.PullRequest.BaseRepo = repo
- ctx.Repo.PullRequest.Allowed = canPush
- ctx.Repo.PullRequest.SameRepo = true
+ InitRepoPullRequestCtx(ctx, repo, repo)
}
- ctx.Data["CanCompareOrPull"] = canCompare
- ctx.Data["PullRequestCtx"] = ctx.Repo.PullRequest
if ctx.Repo.Repository.Status == repo_model.RepositoryPendingTransfer {
repoTransfer, err := repo_model.GetPendingRepositoryTransfer(ctx, ctx.Repo.Repository)
diff --git a/services/contexttest/context_tests.go b/services/contexttest/context_tests.go
index 44d9f4a70f..701c25e442 100644
--- a/services/contexttest/context_tests.go
+++ b/services/contexttest/context_tests.go
@@ -143,8 +143,9 @@ func LoadRepoCommit(t *testing.T, ctx gocontext.Context) {
gitRepo, err := gitrepo.OpenRepository(ctx, repo.Repository)
require.NoError(t, err)
- defer gitRepo.Close()
-
+ t.Cleanup(func() {
+ gitRepo.Close()
+ })
if repo.RefFullName == "" {
repo.RefFullName = git_module.RefNameFromBranch(repo.Repository.DefaultBranch)
}
@@ -161,8 +162,10 @@ func LoadUser(t *testing.T, ctx gocontext.Context, userID int64) {
switch ctx := ctx.(type) {
case *context.Context:
ctx.Doer = doer
+ ctx.IsSigned = true
case *context.APIContext:
ctx.Doer = doer
+ ctx.IsSigned = true
default:
assert.FailNow(t, "context is not *context.Context or *context.APIContext")
}
@@ -189,7 +192,7 @@ func LoadGitRepo(t *testing.T, ctx gocontext.Context) {
type MockRender struct{}
func (tr *MockRender) TemplateLookup(tmpl string, _ gocontext.Context) (templates.TemplateExecutor, error) {
- return nil, nil
+ return nil, nil //nolint:nilnil // mock implementation returns nil to indicate no template found
}
func (tr *MockRender) HTML(w io.Writer, status int, _ templates.TplName, _ any, _ gocontext.Context) error {
diff --git a/services/convert/convert.go b/services/convert/convert.go
index c081aec771..e1cd30705e 100644
--- a/services/convert/convert.go
+++ b/services/convert/convert.go
@@ -349,20 +349,29 @@ func ToActionWorkflowJob(ctx context.Context, repo *repo_model.Repository, task
}
}
- runnerID = task.RunnerID
- if runner, ok, _ := db.GetByID[actions_model.ActionRunner](ctx, runnerID); ok {
- runnerName = runner.Name
- }
- for i, step := range task.Steps {
- stepStatus, stepConclusion := ToActionsStatus(job.Status)
- steps = append(steps, &api.ActionWorkflowStep{
- Name: step.Name,
- Number: int64(i),
- Status: stepStatus,
- Conclusion: stepConclusion,
- StartedAt: step.Started.AsTime().UTC(),
- CompletedAt: step.Stopped.AsTime().UTC(),
- })
+ if task != nil {
+ if task.Steps == nil {
+ task.Steps, err = actions_model.GetTaskStepsByTaskID(ctx, task.ID)
+ if err != nil {
+ return nil, err
+ }
+ task.Steps = util.SliceNilAsEmpty(task.Steps)
+ }
+ runnerID = task.RunnerID
+ if runner, ok, _ := db.GetByID[actions_model.ActionRunner](ctx, runnerID); ok {
+ runnerName = runner.Name
+ }
+ for i, step := range task.Steps {
+ stepStatus, stepConclusion := ToActionsStatus(job.Status)
+ steps = append(steps, &api.ActionWorkflowStep{
+ Name: step.Name,
+ Number: int64(i),
+ Status: stepStatus,
+ Conclusion: stepConclusion,
+ StartedAt: step.Started.AsTime().UTC(),
+ CompletedAt: step.Stopped.AsTime().UTC(),
+ })
+ }
}
}
@@ -383,7 +392,7 @@ func ToActionWorkflowJob(ctx context.Context, repo *repo_model.Repository, task
Conclusion: conclusion,
RunnerID: runnerID,
RunnerName: runnerName,
- Steps: steps,
+ Steps: util.SliceNilAsEmpty(steps),
CreatedAt: job.Created.AsTime().UTC(),
StartedAt: job.Started.AsTime().UTC(),
CompletedAt: job.Stopped.AsTime().UTC(),
diff --git a/services/convert/issue.go b/services/convert/issue.go
index e26412bcca..b396dd0737 100644
--- a/services/convert/issue.go
+++ b/services/convert/issue.go
@@ -10,6 +10,7 @@ import (
"strings"
issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/label"
@@ -163,11 +164,12 @@ func ToTrackedTime(ctx context.Context, doer *user_model.User, t *issues_model.T
}
// ToStopWatches convert Stopwatch list to api.StopWatches
-func ToStopWatches(ctx context.Context, sws []*issues_model.Stopwatch) (api.StopWatches, error) {
+func ToStopWatches(ctx context.Context, doer *user_model.User, sws []*issues_model.Stopwatch) (api.StopWatches, error) {
result := api.StopWatches(make([]api.StopWatch, 0, len(sws)))
issueCache := make(map[int64]*issues_model.Issue)
repoCache := make(map[int64]*repo_model.Repository)
+ permCache := make(map[int64]access_model.Permission)
var (
issue *issues_model.Issue
repo *repo_model.Repository
@@ -182,13 +184,30 @@ func ToStopWatches(ctx context.Context, sws []*issues_model.Stopwatch) (api.Stop
if err != nil {
return nil, err
}
+ issueCache[sw.IssueID] = issue
}
repo, ok = repoCache[issue.RepoID]
if !ok {
repo, err = repo_model.GetRepositoryByID(ctx, issue.RepoID)
if err != nil {
- return nil, err
+ log.Error("GetRepositoryByID(%d): %v", issue.RepoID, err)
+ continue
}
+ repoCache[issue.RepoID] = repo
+ }
+
+ // ADD: Check user permissions
+ perm, ok := permCache[repo.ID]
+ if !ok {
+ perm, err = access_model.GetUserRepoPermission(ctx, repo, doer)
+ if err != nil {
+ continue
+ }
+ permCache[repo.ID] = perm
+ }
+
+ if !perm.CanReadIssuesOrPulls(issue.IsPull) {
+ continue
}
result = append(result, api.StopWatch{
diff --git a/services/convert/issue_test.go b/services/convert/issue_test.go
index 4d780f3f00..a12a69288a 100644
--- a/services/convert/issue_test.go
+++ b/services/convert/issue_test.go
@@ -8,9 +8,11 @@ import (
"testing"
"time"
+ "code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
@@ -55,3 +57,29 @@ func TestMilestone_APIFormat(t *testing.T) {
Deadline: milestone.DeadlineUnix.AsTimePtr(),
}, *ToAPIMilestone(milestone))
}
+
+func TestToStopWatchesRespectsPermissions(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ ctx := t.Context()
+ publicSW := unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{ID: 1})
+ privateIssue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{RepoID: 3})
+ privateSW := &issues_model.Stopwatch{IssueID: privateIssue.ID, UserID: 5}
+ assert.NoError(t, db.Insert(ctx, privateSW))
+ assert.NotZero(t, privateSW.ID)
+
+ regularUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+ adminUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ sws := []*issues_model.Stopwatch{publicSW, privateSW}
+
+ visible, err := ToStopWatches(ctx, regularUser, sws)
+ assert.NoError(t, err)
+ assert.Len(t, visible, 1)
+ assert.Equal(t, "repo1", visible[0].RepoName)
+
+ visibleAdmin, err := ToStopWatches(ctx, adminUser, sws)
+ assert.NoError(t, err)
+ assert.Len(t, visibleAdmin, 2)
+ assert.ElementsMatch(t, []string{"repo1", "repo3"}, []string{visibleAdmin[0].RepoName, visibleAdmin[1].RepoName})
+}
diff --git a/services/convert/notification.go b/services/convert/notification.go
index 69470638be..87166501a6 100644
--- a/services/convert/notification.go
+++ b/services/convert/notification.go
@@ -8,8 +8,8 @@ import (
"net/url"
activities_model "code.gitea.io/gitea/models/activities"
- "code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/modules/log"
api "code.gitea.io/gitea/modules/structs"
)
@@ -25,11 +25,17 @@ func ToNotificationThread(ctx context.Context, n *activities_model.Notification)
// since user only get notifications when he has access to use minimal access mode
if n.Repository != nil {
- result.Repository = ToRepo(ctx, n.Repository, access_model.Permission{AccessMode: perm.AccessModeRead})
-
- // This permission is not correct and we should not be reporting it
- for repository := result.Repository; repository != nil; repository = repository.Parent {
- repository.Permissions = nil
+ perm, err := access_model.GetUserRepoPermission(ctx, n.Repository, n.User)
+ if err != nil {
+ log.Error("GetUserRepoPermission failed: %v", err)
+ return result
+ }
+ if perm.HasAnyUnitAccessOrPublicAccess() { // if user has been revoked access to repo, do not show repo info
+ result.Repository = ToRepo(ctx, n.Repository, perm)
+ // This permission is not correct and we should not be reporting it
+ for repository := result.Repository; repository != nil; repository = repository.Parent {
+ repository.Permissions = nil
+ }
}
}
diff --git a/services/convert/notification_test.go b/services/convert/notification_test.go
new file mode 100644
index 0000000000..718a070819
--- /dev/null
+++ b/services/convert/notification_test.go
@@ -0,0 +1,57 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package convert
+
+import (
+ "testing"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestToNotificationThreadIncludesRepoForAccessibleUser(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ n := newRepoNotification(t, 1, 4)
+ thread := ToNotificationThread(t.Context(), n)
+
+ if assert.NotNil(t, thread.Repository) {
+ assert.Equal(t, n.Repository.FullName(), thread.Repository.FullName)
+ assert.Nil(t, thread.Repository.Permissions)
+ }
+}
+
+func TestToNotificationThreadOmitsRepoWhenAccessRevoked(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ n := newRepoNotification(t, 2, 4)
+ thread := ToNotificationThread(t.Context(), n)
+
+ assert.Nil(t, thread.Repository)
+}
+
+func newRepoNotification(t *testing.T, repoID, userID int64) *activities_model.Notification {
+ t.Helper()
+
+ ctx := t.Context()
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ assert.NoError(t, repo.LoadOwner(ctx))
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
+
+ return &activities_model.Notification{
+ ID: repoID*1000 + userID,
+ UserID: user.ID,
+ RepoID: repo.ID,
+ Status: activities_model.NotificationStatusUnread,
+ Source: activities_model.NotificationSourceRepository,
+ UpdatedUnix: timeutil.TimeStampNow(),
+ Repository: repo,
+ User: user,
+ }
+}
diff --git a/services/convert/pull_review.go b/services/convert/pull_review.go
index 574f34fa17..ba0102cf1f 100644
--- a/services/convert/pull_review.go
+++ b/services/convert/pull_review.go
@@ -92,34 +92,40 @@ func ToPullReviewCommentList(ctx context.Context, review *issues_model.Review, d
for _, lines := range review.CodeComments {
for _, comments := range lines {
for _, comment := range comments {
- apiComment := &api.PullReviewComment{
- ID: comment.ID,
- Body: comment.Content,
- Poster: ToUser(ctx, comment.Poster, doer),
- Resolver: ToUser(ctx, comment.ResolveDoer, doer),
- ReviewID: review.ID,
- Created: comment.CreatedUnix.AsTime(),
- Updated: comment.UpdatedUnix.AsTime(),
- Path: comment.TreePath,
- CommitID: comment.CommitSHA,
- OrigCommitID: comment.OldRef,
- DiffHunk: patch2diff(comment.Patch),
- HTMLURL: comment.HTMLURL(ctx),
- HTMLPullURL: review.Issue.HTMLURL(ctx),
- }
-
- if comment.Line < 0 {
- apiComment.OldLineNum = comment.UnsignedLine()
- } else {
- apiComment.LineNum = comment.UnsignedLine()
- }
- apiComments = append(apiComments, apiComment)
+ apiComments = append(apiComments, ToPullReviewComment(ctx, comment, doer))
}
}
}
return apiComments, nil
}
+// ToPullReviewComment convert a single code review comment to api format
+func ToPullReviewComment(ctx context.Context, comment *issues_model.Comment, doer *user_model.User) *api.PullReviewComment {
+ apiComment := &api.PullReviewComment{
+ ID: comment.ID,
+ Body: comment.Content,
+ Poster: ToUser(ctx, comment.Poster, doer),
+ Resolver: ToUser(ctx, comment.ResolveDoer, doer),
+ ReviewID: comment.ReviewID,
+ Created: comment.CreatedUnix.AsTime(),
+ Updated: comment.UpdatedUnix.AsTime(),
+ Path: comment.TreePath,
+ CommitID: comment.CommitSHA,
+ OrigCommitID: comment.OldRef,
+ DiffHunk: patch2diff(comment.Patch),
+ HTMLURL: comment.HTMLURL(ctx),
+ HTMLPullURL: comment.Issue.HTMLURL(ctx),
+ }
+
+ if comment.Line < 0 {
+ apiComment.OldLineNum = comment.UnsignedLine()
+ } else {
+ apiComment.LineNum = comment.UnsignedLine()
+ }
+
+ return apiComment
+}
+
func patch2diff(patch string) string {
split := strings.Split(patch, "\n@@")
if len(split) == 2 {
diff --git a/services/convert/repository.go b/services/convert/repository.go
index a364591bb8..150c952b15 100644
--- a/services/convert/repository.go
+++ b/services/convert/repository.go
@@ -34,7 +34,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR
permissionInRepo.SetUnitsWithDefaultAccessMode(repo.Units, permissionInRepo.AccessMode)
}
- // TODO: ideally we should pass "doer" into "ToRepo" to to make CloneLink could generate user-related links
+ // TODO: ideally we should pass "doer" into "ToRepo" to make CloneLink could generate user-related links
// And passing "doer" in will also fix other FIXMEs in this file.
cloneLink := repo.CloneLinkGeneral(ctx) // no doer at the moment
permission := &api.Permission{
@@ -103,6 +103,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR
defaultDeleteBranchAfterMerge := false
defaultMergeStyle := repo_model.MergeStyleMerge
defaultAllowMaintainerEdit := false
+ defaultTargetBranch := ""
if unit, err := repo.GetUnit(ctx, unit_model.TypePullRequests); err == nil {
config := unit.PullRequestsConfig()
hasPullRequests = true
@@ -118,6 +119,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR
defaultDeleteBranchAfterMerge = config.DefaultDeleteBranchAfterMerge
defaultMergeStyle = config.GetDefaultMergeStyle()
defaultAllowMaintainerEdit = config.DefaultAllowMaintainerEdit
+ defaultTargetBranch = config.DefaultTargetBranch
}
hasProjects := false
projectsMode := repo_model.ProjectsModeAll
@@ -127,20 +129,10 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR
projectsMode = config.ProjectsMode
}
- hasReleases := false
- if _, err := repo.GetUnit(ctx, unit_model.TypeReleases); err == nil {
- hasReleases = true
- }
-
- hasPackages := false
- if _, err := repo.GetUnit(ctx, unit_model.TypePackages); err == nil {
- hasPackages = true
- }
-
- hasActions := false
- if _, err := repo.GetUnit(ctx, unit_model.TypeActions); err == nil {
- hasActions = true
- }
+ hasCode := repo.UnitEnabled(ctx, unit_model.TypeCode)
+ hasReleases := repo.UnitEnabled(ctx, unit_model.TypeReleases)
+ hasPackages := repo.UnitEnabled(ctx, unit_model.TypePackages)
+ hasActions := repo.UnitEnabled(ctx, unit_model.TypeActions)
if err := repo.LoadOwner(ctx); err != nil {
return nil
@@ -221,6 +213,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR
Updated: repo.UpdatedUnix.AsTime(),
ArchivedAt: repo.ArchivedUnix.AsTime(),
Permissions: permission,
+ HasCode: hasCode,
HasIssues: hasIssues,
ExternalTracker: externalTracker,
InternalTracker: internalTracker,
@@ -244,6 +237,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR
DefaultDeleteBranchAfterMerge: defaultDeleteBranchAfterMerge,
DefaultMergeStyle: string(defaultMergeStyle),
DefaultAllowMaintainerEdit: defaultAllowMaintainerEdit,
+ DefaultTargetBranch: defaultTargetBranch,
AvatarURL: repo.AvatarLink(ctx),
Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate,
MirrorInterval: mirrorInterval,
diff --git a/services/cron/tasks_basic.go b/services/cron/tasks_basic.go
index 48380b7b9a..c620959cc1 100644
--- a/services/cron/tasks_basic.go
+++ b/services/cron/tasks_basic.go
@@ -54,7 +54,7 @@ func registerRepoHealthCheck() {
RunAtStart: false,
Schedule: "@midnight",
},
- Timeout: time.Duration(setting.Git.Timeout.Default) * time.Second,
+ Timeout: time.Duration(setting.Git.Timeout.GC) * time.Second,
Args: []string{},
}, func(ctx context.Context, _ *user_model.User, config Config) error {
rhcConfig := config.(*RepoHealthCheckConfig)
diff --git a/services/doctor/heads.go b/services/doctor/heads.go
index bdadfa674c..4d34b18e18 100644
--- a/services/doctor/heads.go
+++ b/services/doctor/heads.go
@@ -20,10 +20,10 @@ func synchronizeRepoHeads(ctx context.Context, logger log.Logger, autofix bool)
numReposUpdated := 0
err := iterateRepositories(ctx, func(repo *repo_model.Repository) error {
numRepos++
- _, defaultBranchErr := gitrepo.RunCmdString(ctx, repo,
+ _, _, defaultBranchErr := gitrepo.RunCmdString(ctx, repo,
gitcmd.NewCommand("rev-parse").AddDashesAndList(repo.DefaultBranch))
- head, headErr := gitrepo.RunCmdString(ctx, repo,
+ head, _, headErr := gitrepo.RunCmdString(ctx, repo,
gitcmd.NewCommand("symbolic-ref", "--short", "HEAD"))
// what we expect: default branch is valid, and HEAD points to it
diff --git a/services/doctor/mergebase.go b/services/doctor/mergebase.go
index 852e37f415..a76ed8afb7 100644
--- a/services/doctor/mergebase.go
+++ b/services/doctor/mergebase.go
@@ -43,17 +43,17 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro
if !pr.HasMerged {
var err error
- pr.MergeBase, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitHeadRefName()))
+ pr.MergeBase, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base").AddDashesAndList(pr.BaseBranch, pr.GetGitHeadRefName()))
if err != nil {
var err2 error
- pr.MergeBase, err2 = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse").AddDynamicArguments(git.BranchPrefix+pr.BaseBranch))
+ pr.MergeBase, _, err2 = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse").AddDynamicArguments(git.BranchPrefix+pr.BaseBranch))
if err2 != nil {
logger.Warn("Unable to get merge base for PR ID %d, #%d onto %s in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2)
return nil
}
}
} else {
- parentsString, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID))
+ parentsString, _, err := gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-list", "--parents", "-n", "1").AddDynamicArguments(pr.MergedCommitID))
if err != nil {
logger.Warn("Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
return nil
@@ -66,7 +66,7 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro
refs := append([]string{}, parents[1:]...)
refs = append(refs, pr.GetGitHeadRefName())
cmd := gitcmd.NewCommand("merge-base").AddDashesAndList(refs...)
- pr.MergeBase, err = gitrepo.RunCmdString(ctx, repo, cmd)
+ pr.MergeBase, _, err = gitrepo.RunCmdString(ctx, repo, cmd)
if err != nil {
logger.Warn("Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
return nil
diff --git a/services/forms/auth_form.go b/services/forms/auth_form.go
index 886110236c..95965b5f29 100644
--- a/services/forms/auth_form.go
+++ b/services/forms/auth_form.go
@@ -34,6 +34,7 @@ type AuthenticationForm struct {
AttributeMail string
AttributeSSHPublicKey string
AttributeAvatar string
+ SSHKeysAreVerified bool
AttributesInBind bool
UsePagedSearch bool
SearchPageSize int
diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go
index 2d33d2b42b..765a723968 100644
--- a/services/forms/repo_form.go
+++ b/services/forms/repo_form.go
@@ -143,6 +143,7 @@ type RepoSettingForm struct {
PullsAllowRebaseUpdate bool
DefaultDeleteBranchAfterMerge bool
DefaultAllowMaintainerEdit bool
+ DefaultTargetBranch string
EnableTimetracker bool
AllowOnlyContributorsToTrackTime bool
EnableIssueDependencies bool
@@ -405,13 +406,6 @@ func (f *NewPackagistHookForm) Validate(req *http.Request, errs binding.Errors)
return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
}
-// .___
-// | | ______ ________ __ ____
-// | |/ ___// ___/ | \_/ __ \
-// | |\___ \ \___ \| | /\ ___/
-// |___/____ >____ >____/ \___ >
-// \/ \/ \/
-
// CreateIssueForm form for creating issue
type CreateIssueForm struct {
Title string `binding:"Required;MaxSize(255)"`
diff --git a/services/git/compare.go b/services/git/compare.go
new file mode 100644
index 0000000000..251a035058
--- /dev/null
+++ b/services/git/compare.go
@@ -0,0 +1,107 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package git
+
+import (
+ "context"
+ "fmt"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// CompareInfo represents needed information for comparing references.
+type CompareInfo struct {
+ BaseRepo *repo_model.Repository
+ BaseRef git.RefName
+ BaseCommitID string
+ HeadRepo *repo_model.Repository
+ HeadGitRepo *git.Repository
+ HeadRef git.RefName
+ HeadCommitID string
+ CompareSeparator string
+ MergeBase string
+ Commits []*git.Commit
+ NumFiles int
+}
+
+func (ci *CompareInfo) IsSameRepository() bool {
+ return ci.BaseRepo.ID == ci.HeadRepo.ID
+}
+
+func (ci *CompareInfo) IsSameRef() bool {
+ return ci.IsSameRepository() && ci.BaseRef == ci.HeadRef
+}
+
+func (ci *CompareInfo) DirectComparison() bool {
+ // FIXME: the design of "DirectComparison" is wrong, it loses the information of `^`
+ // To correctly handle the comparison, developers should use `ci.CompareSeparator` directly, all "DirectComparison" related code should be rewritten.
+ return ci.CompareSeparator == ".."
+}
+
+// GetCompareInfo generates and returns compare information between base and head branches of repositories.
+func GetCompareInfo(ctx context.Context, baseRepo, headRepo *repo_model.Repository, headGitRepo *git.Repository, baseRef, headRef git.RefName, directComparison, fileOnly bool) (_ *CompareInfo, err error) {
+ compareInfo := &CompareInfo{
+ BaseRepo: baseRepo,
+ BaseRef: baseRef,
+ HeadRepo: headRepo,
+ HeadGitRepo: headGitRepo,
+ HeadRef: headRef,
+ CompareSeparator: util.Iif(directComparison, "..", "..."),
+ }
+
+ compareInfo.BaseCommitID, err = gitrepo.GetFullCommitID(ctx, baseRepo, baseRef.String())
+ if err != nil {
+ return nil, err
+ }
+ compareInfo.HeadCommitID, err = gitrepo.GetFullCommitID(ctx, headRepo, headRef.String())
+ if err != nil {
+ return nil, err
+ }
+
+ // if they are not the same repository, then we need to fetch the base commit into the head repository
+ // because we will use headGitRepo in the following code
+ if baseRepo.ID != headRepo.ID {
+ exist := headGitRepo.IsReferenceExist(compareInfo.BaseCommitID)
+ if !exist {
+ if err := gitrepo.FetchRemoteCommit(ctx, headRepo, baseRepo, compareInfo.BaseCommitID); err != nil {
+ return nil, fmt.Errorf("FetchRemoteCommit: %w", err)
+ }
+ }
+ }
+
+ if !directComparison {
+ compareInfo.MergeBase, err = gitrepo.MergeBase(ctx, headRepo, compareInfo.BaseCommitID, compareInfo.HeadCommitID)
+ if err != nil {
+ return nil, fmt.Errorf("MergeBase: %w", err)
+ }
+ } else {
+ compareInfo.MergeBase = compareInfo.BaseCommitID
+ }
+
+ // We have a common base - therefore we know that ... should work
+ if !fileOnly {
+ // In git log/rev-list, the "..." syntax represents the symmetric difference between two references,
+ // which is different from the meaning of "..." in git diff (where it implies diffing from the merge base).
+ // For listing PR commits, we must use merge-base..head to include only the commits introduced by the head branch.
+ // Otherwise, commits newly pushed to the base branch would also be included, which is incorrect.
+ compareInfo.Commits, err = headGitRepo.ShowPrettyFormatLogToList(ctx, compareInfo.MergeBase+".."+compareInfo.HeadCommitID)
+ if err != nil {
+ return nil, fmt.Errorf("ShowPrettyFormatLogToList: %w", err)
+ }
+ } else {
+ compareInfo.Commits = []*git.Commit{}
+ }
+
+ // Count number of changed files.
+ // This probably should be removed as we need to use shortstat elsewhere
+ // Now there is git diff --shortstat but this appears to be slower than simply iterating with --nameonly
+ compareInfo.NumFiles, err = headGitRepo.GetDiffNumChangedFiles(compareInfo.BaseCommitID, compareInfo.HeadCommitID, directComparison)
+ if err != nil {
+ return nil, err
+ }
+ return compareInfo, nil
+}
diff --git a/services/gitdiff/csv.go b/services/gitdiff/csv.go
index c10ee14490..3f62f15ca5 100644
--- a/services/gitdiff/csv.go
+++ b/services/gitdiff/csv.go
@@ -193,7 +193,7 @@ func createCsvDiff(diffFile *DiffFile, baseReader, headReader *csv.Reader) ([]*T
}
if aRow == nil && bRow == nil {
// No content
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the row has no content
}
aIndex := 0 // tracks where we are in the a2bColMap
diff --git a/services/gitdiff/git_diff_tree.go b/services/gitdiff/git_diff_tree.go
index 2a3c7c9445..b4f26210be 100644
--- a/services/gitdiff/git_diff_tree.go
+++ b/services/gitdiff/git_diff_tree.go
@@ -166,16 +166,6 @@ func parseGitDiffTreeLine(line string) (*DiffTreeRecord, error) {
return nil, fmt.Errorf("unparsable output for diff-tree --raw: `%s`, expected 5 space delimited values got %d)", line, len(fields))
}
- baseMode, err := git.ParseEntryMode(fields[0])
- if err != nil {
- return nil, err
- }
-
- headMode, err := git.ParseEntryMode(fields[1])
- if err != nil {
- return nil, err
- }
-
baseBlobID := fields[2]
headBlobID := fields[3]
@@ -201,8 +191,8 @@ func parseGitDiffTreeLine(line string) (*DiffTreeRecord, error) {
return &DiffTreeRecord{
Status: status,
Score: score,
- BaseMode: baseMode,
- HeadMode: headMode,
+ BaseMode: git.ParseEntryMode(fields[0]),
+ HeadMode: git.ParseEntryMode(fields[1]),
BaseBlobID: baseBlobID,
HeadBlobID: headBlobID,
BasePath: basePath,
diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go
index 17eb3d4280..7777cf4a1c 100644
--- a/services/gitdiff/gitdiff.go
+++ b/services/gitdiff/gitdiff.go
@@ -40,6 +40,7 @@ import (
"code.gitea.io/gitea/modules/translation"
"code.gitea.io/gitea/modules/util"
+ "github.com/alecthomas/chroma/v2"
"github.com/sergi/go-diff/diffmatchpatch"
stdcharset "golang.org/x/net/html/charset"
"golang.org/x/text/encoding"
@@ -82,6 +83,8 @@ type DiffLine struct {
// DiffLineSectionInfo represents diff line section meta data
type DiffLineSectionInfo struct {
+ language *diffVarMutable[string]
+
Path string
// These line "idx" are 1-based line numbers
@@ -122,8 +125,14 @@ type DiffHTMLOperation struct {
// BlobExcerptChunkSize represent max lines of excerpt
const BlobExcerptChunkSize = 20
-// MaxDiffHighlightEntireFileSize is the maximum file size that will be highlighted with "entire file diff"
-const MaxDiffHighlightEntireFileSize = 1 * 1024 * 1024
+// Chroma seems extremely slow when highlighting large files, it might take dozens or hundreds of milliseconds.
+// When fully highlighting a diff with a lot of large files, it would take many seconds or even dozens of seconds.
+// So, don't highlight the entire file if it's too large, or highlighting takes too long.
+// When there is no full-file highlighting, the legacy "line-by-line" highlighting is still applied as the fallback.
+const (
+ MaxFullFileHighlightSizeLimit = 256 * 1024
+ MaxFullFileHighlightTimeLimit = 2 * time.Second
+)
// GetType returns the type of DiffLine.
func (d *DiffLine) GetType() int {
@@ -166,16 +175,19 @@ func (d *DiffLine) GetLineTypeMarker() string {
}
func (d *DiffLine) getBlobExcerptQuery() string {
- query := fmt.Sprintf(
+ language := ""
+ if d.SectionInfo.language != nil { // for normal cases, it can't be nil, this check is only for some tests
+ language = d.SectionInfo.language.value
+ }
+ return fmt.Sprintf(
"last_left=%d&last_right=%d&"+
"left=%d&right=%d&"+
"left_hunk_size=%d&right_hunk_size=%d&"+
- "path=%s",
+ "path=%s&filelang=%s",
d.SectionInfo.LastLeftIdx, d.SectionInfo.LastRightIdx,
d.SectionInfo.LeftIdx, d.SectionInfo.RightIdx,
d.SectionInfo.LeftHunkSize, d.SectionInfo.RightHunkSize,
- url.QueryEscape(d.SectionInfo.Path))
- return query
+ url.QueryEscape(d.SectionInfo.Path), url.QueryEscape(language))
}
func (d *DiffLine) GetExpandDirection() string {
@@ -200,6 +212,11 @@ type DiffBlobExcerptData struct {
AfterCommitID string
}
+const (
+ DiffStyleSplit = "split"
+ DiffStyleUnified = "unified"
+)
+
func (d *DiffLine) RenderBlobExcerptButtons(fileNameHash string, data *DiffBlobExcerptData) template.HTML {
dataHiddenCommentIDs := strings.Join(base.Int64sToStrings(d.SectionInfo.HiddenCommentIDs), ",")
anchor := fmt.Sprintf("diff-%sK%d", fileNameHash, d.SectionInfo.RightIdx)
@@ -262,11 +279,12 @@ func FillHiddenCommentIDsForDiffLine(line *DiffLine, lineComments map[int64][]*i
line.SectionInfo.HiddenCommentIDs = hiddenCommentIDs
}
-func getDiffLineSectionInfo(treePath, line string, lastLeftIdx, lastRightIdx int) *DiffLineSectionInfo {
+func newDiffLineSectionInfo(curFile *DiffFile, line string, lastLeftIdx, lastRightIdx int) *DiffLineSectionInfo {
leftLine, leftHunk, rightLine, rightHunk := git.ParseDiffHunkString(line)
return &DiffLineSectionInfo{
- Path: treePath,
+ Path: curFile.Name,
+ language: &curFile.language,
LastLeftIdx: lastLeftIdx,
LastRightIdx: lastRightIdx,
LeftIdx: leftLine,
@@ -286,7 +304,11 @@ func getLineContent(content string, locale translation.Locale) DiffInline {
// DiffSection represents a section of a DiffFile.
type DiffSection struct {
- file *DiffFile
+ language *diffVarMutable[string]
+ highlightedLeftLines *diffVarMutable[map[int]template.HTML]
+ highlightedRightLines *diffVarMutable[map[int]template.HTML]
+ highlightLexer *diffVarMutable[chroma.Lexer]
+
FileName string
Lines []*DiffLine
}
@@ -327,17 +349,19 @@ func (diffSection *DiffSection) getLineContentForRender(lineIdx int, diffLine *D
if setting.Git.DisableDiffHighlight {
return template.HTML(html.EscapeString(diffLine.Content[1:]))
}
- h, _ = highlight.Code(diffSection.FileName, fileLanguage, diffLine.Content[1:])
- return h
+ if diffSection.highlightLexer.value == nil {
+ diffSection.highlightLexer.value = highlight.DetectChromaLexerByFileName(diffSection.FileName, fileLanguage)
+ }
+ return highlight.RenderCodeByLexer(diffSection.highlightLexer.value, diffLine.Content[1:])
}
func (diffSection *DiffSection) getDiffLineForRender(diffLineType DiffLineType, leftLine, rightLine *DiffLine, locale translation.Locale) DiffInline {
var fileLanguage string
var highlightedLeftLines, highlightedRightLines map[int]template.HTML
// when a "diff section" is manually prepared by ExcerptBlob, it doesn't have "file" information
- if diffSection.file != nil {
- fileLanguage = diffSection.file.Language
- highlightedLeftLines, highlightedRightLines = diffSection.file.highlightedLeftLines, diffSection.file.highlightedRightLines
+ if diffSection.language != nil {
+ fileLanguage = diffSection.language.value
+ highlightedLeftLines, highlightedRightLines = diffSection.highlightedLeftLines.value, diffSection.highlightedRightLines.value
}
var lineHTML template.HTML
@@ -371,6 +395,12 @@ func (diffSection *DiffSection) getDiffLineForRender(diffLineType DiffLineType,
// GetComputedInlineDiffFor computes inline diff for the given line.
func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine, locale translation.Locale) DiffInline {
+ defer func() {
+ if err := recover(); err != nil {
+ // the logic is too complex in this function, help to catch any panic because Golang template doesn't print the stack
+ log.Error("panic in GetComputedInlineDiffFor: %v\nStack: %s", err, log.Stack(2))
+ }
+ }()
// try to find equivalent diff line. ignore, otherwise
switch diffLine.Type {
case DiffLineSection:
@@ -388,33 +418,37 @@ func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine, loc
}
}
+// diffVarMutable is a wrapper to make a variable mutable to be shared across structs
+type diffVarMutable[T any] struct {
+ value T
+}
+
// DiffFile represents a file diff.
type DiffFile struct {
// only used internally to parse Ambiguous filenames
isAmbiguous bool
// basic fields (parsed from diff result)
- Name string
- NameHash string
- OldName string
- Addition int
- Deletion int
- Type DiffFileType
- Mode string
- OldMode string
- IsCreated bool
- IsDeleted bool
- IsBin bool
- IsLFSFile bool
- IsRenamed bool
- IsSubmodule bool
+ Name string
+ NameHash string
+ OldName string
+ Addition int
+ Deletion int
+ Type DiffFileType
+ EntryMode string
+ OldEntryMode string
+ IsCreated bool
+ IsDeleted bool
+ IsBin bool
+ IsLFSFile bool
+ IsRenamed bool
+ IsSubmodule bool
// basic fields but for render purpose only
Sections []*DiffSection
IsIncomplete bool
IsIncompleteLineTooLong bool
// will be filled by the extra loop in GitDiffForRender
- Language string
IsGenerated bool
IsVendored bool
SubmoduleDiffInfo *SubmoduleDiffInfo // IsSubmodule==true, then there must be a SubmoduleDiffInfo
@@ -426,9 +460,11 @@ type DiffFile struct {
IsViewed bool // User specific
HasChangedSinceLastReview bool // User specific
- // for render purpose only, will be filled by the extra loop in GitDiffForRender
- highlightedLeftLines map[int]template.HTML
- highlightedRightLines map[int]template.HTML
+ // for render purpose only, will be filled by the extra loop in GitDiffForRender, the maps of lines are 0-based
+ language diffVarMutable[string]
+ highlightRender diffVarMutable[chroma.Lexer] // cache render (atm: lexer) for current file, only detect once for line-by-line mode
+ highlightedLeftLines diffVarMutable[map[int]template.HTML]
+ highlightedRightLines diffVarMutable[map[int]template.HTML]
}
// GetType returns type of diff file.
@@ -465,6 +501,7 @@ func (diffFile *DiffFile) GetTailSectionAndLimitedContent(leftCommit, rightCommi
Type: DiffLineSection,
Content: " ",
SectionInfo: &DiffLineSectionInfo{
+ language: &diffFile.language,
Path: diffFile.Name,
LastLeftIdx: lastLine.LeftIdx,
LastRightIdx: lastLine.RightIdx,
@@ -496,21 +533,36 @@ func (diffFile *DiffFile) ShouldBeHidden() bool {
return diffFile.IsGenerated || diffFile.IsViewed
}
-func (diffFile *DiffFile) ModeTranslationKey(mode string) string {
- switch mode {
- case "040000":
- return "git.filemode.directory"
- case "100644":
- return "git.filemode.normal_file"
- case "100755":
- return "git.filemode.executable_file"
- case "120000":
- return "git.filemode.symbolic_link"
- case "160000":
- return "git.filemode.submodule"
- default:
- return mode
+func (diffFile *DiffFile) TranslateDiffEntryMode(locale translation.Locale) string {
+ entryModeTr := func(mode string) string {
+ entryMode := git.ParseEntryMode(mode)
+ switch {
+ case entryMode.IsDir():
+ return locale.TrString("git.filemode.directory")
+ case entryMode.IsRegular():
+ return locale.TrString("git.filemode.normal_file")
+ case entryMode.IsExecutable():
+ return locale.TrString("git.filemode.executable_file")
+ case entryMode.IsLink():
+ return locale.TrString("git.filemode.symbolic_link")
+ case entryMode.IsSubModule():
+ return locale.TrString("git.filemode.submodule")
+ default:
+ return mode
+ }
}
+
+ if diffFile.EntryMode != "" && diffFile.OldEntryMode != "" {
+ oldMode := entryModeTr(diffFile.OldEntryMode)
+ newMode := entryModeTr(diffFile.EntryMode)
+ return locale.TrString("git.filemode.changed_filemode", oldMode, newMode)
+ }
+ if diffFile.EntryMode != "" {
+ if entryMode := git.ParseEntryMode(diffFile.EntryMode); !entryMode.IsRegular() {
+ return entryModeTr(diffFile.EntryMode)
+ }
+ }
+ return ""
}
type limitByteWriter struct {
@@ -530,7 +582,7 @@ func getCommitFileLineCountAndLimitedContent(commit *git.Commit, filePath string
if err != nil {
return 0, nil
}
- w := &limitByteWriter{limit: MaxDiffHighlightEntireFileSize + 1}
+ w := &limitByteWriter{limit: MaxFullFileHighlightSizeLimit + 1}
lineCount, err = blob.GetBlobLineCount(w)
if err != nil {
return 0, nil
@@ -690,10 +742,10 @@ parsingLoop:
strings.HasPrefix(line, "new mode "):
if strings.HasPrefix(line, "old mode ") {
- curFile.OldMode = prepareValue(line, "old mode ")
+ curFile.OldEntryMode = prepareValue(line, "old mode ")
}
if strings.HasPrefix(line, "new mode ") {
- curFile.Mode = prepareValue(line, "new mode ")
+ curFile.EntryMode = prepareValue(line, "new mode ")
}
if strings.HasSuffix(line, " 160000\n") {
curFile.IsSubmodule, curFile.SubmoduleDiffInfo = true, &SubmoduleDiffInfo{}
@@ -728,7 +780,7 @@ parsingLoop:
curFile.Type = DiffFileAdd
curFile.IsCreated = true
if strings.HasPrefix(line, "new file mode ") {
- curFile.Mode = prepareValue(line, "new file mode ")
+ curFile.EntryMode = prepareValue(line, "new file mode ")
}
if strings.HasSuffix(line, " 160000\n") {
curFile.IsSubmodule, curFile.SubmoduleDiffInfo = true, &SubmoduleDiffInfo{}
@@ -888,6 +940,15 @@ func skipToNextDiffHead(input *bufio.Reader) (line string, err error) {
return line, err
}
+func newDiffSectionForDiffFile(curFile *DiffFile) *DiffSection {
+ return &DiffSection{
+ language: &curFile.language,
+ highlightLexer: &curFile.highlightRender,
+ highlightedLeftLines: &curFile.highlightedLeftLines,
+ highlightedRightLines: &curFile.highlightedRightLines,
+ }
+}
+
func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharacters int, input *bufio.Reader) (lineBytes []byte, isFragment bool, err error) {
sb := strings.Builder{}
@@ -945,12 +1006,12 @@ func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharact
line := sb.String()
// Create a new section to represent this hunk
- curSection = &DiffSection{file: curFile}
+ curSection = newDiffSectionForDiffFile(curFile)
lastLeftIdx = -1
curFile.Sections = append(curFile.Sections, curSection)
// FIXME: the "-1" can't be right, these "line idx" are all 1-based, maybe there are other bugs that covers this bug.
- lineSectionInfo := getDiffLineSectionInfo(curFile.Name, line, leftLine-1, rightLine-1)
+ lineSectionInfo := newDiffLineSectionInfo(curFile, line, leftLine-1, rightLine-1)
diffLine := &DiffLine{
Type: DiffLineSection,
Content: line,
@@ -985,7 +1046,7 @@ func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharact
rightLine++
if curSection == nil {
// Create a new section to represent this hunk
- curSection = &DiffSection{file: curFile}
+ curSection = newDiffSectionForDiffFile(curFile)
curFile.Sections = append(curFile.Sections, curSection)
lastLeftIdx = -1
}
@@ -1018,7 +1079,7 @@ func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharact
}
if curSection == nil {
// Create a new section to represent this hunk
- curSection = &DiffSection{file: curFile}
+ curSection = newDiffSectionForDiffFile(curFile)
curFile.Sections = append(curFile.Sections, curSection)
lastLeftIdx = -1
}
@@ -1045,7 +1106,7 @@ func parseHunks(ctx context.Context, curFile *DiffFile, maxLines, maxLineCharact
lastLeftIdx = -1
if curSection == nil {
// Create a new section to represent this hunk
- curSection = &DiffSection{file: curFile}
+ curSection = newDiffSectionForDiffFile(curFile)
curFile.Sections = append(curFile.Sections, curSection)
}
curSection.Lines = append(curSection.Lines, diffLine)
@@ -1244,23 +1305,14 @@ func getDiffBasic(ctx context.Context, gitRepo *git.Repository, opts *DiffOption
cmdCtx, cmdCancel := context.WithCancel(ctx)
defer cmdCancel()
- reader, writer := io.Pipe()
- defer func() {
- _ = reader.Close()
- _ = writer.Close()
- }()
-
+ reader, readerClose := cmdDiff.MakeStdoutPipe()
+ defer readerClose()
go func() {
- stderr := &bytes.Buffer{}
- if err := cmdDiff.WithTimeout(time.Duration(setting.Git.Timeout.Default) * time.Second).
+ if err := cmdDiff.
WithDir(repoPath).
- WithStdout(writer).
- WithStderr(stderr).
- Run(cmdCtx); err != nil && !git.IsErrCanceledOrKilled(err) {
- log.Error("error during GetDiff(git diff dir: %s): %v, stderr: %s", repoPath, err, stderr.String())
+ RunWithStderr(cmdCtx); err != nil && !gitcmd.IsErrorCanceledOrKilled(err) {
+ log.Error("error during GetDiff(git diff dir: %s): %v", repoPath, err)
}
-
- _ = writer.Close()
}()
diff, err := ParsePatch(cmdCtx, opts.MaxLines, opts.MaxLineCharacters, opts.MaxFiles, reader, parsePatchSkipToFile)
@@ -1284,6 +1336,8 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
return nil, err
}
+ startTime := time.Now()
+
checker, err := attribute.NewBatchChecker(gitRepo, opts.AfterCommitID, []string{attribute.LinguistVendored, attribute.LinguistGenerated, attribute.LinguistLanguage, attribute.GitlabLanguage, attribute.Diff})
if err != nil {
return nil, err
@@ -1299,7 +1353,7 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
isVendored, isGenerated = attrs.GetVendored(), attrs.GetGenerated()
language := attrs.GetLanguage()
if language.Has() {
- diffFile.Language = language.Value()
+ diffFile.language.value = language.Value()
}
attrDiff = attrs.Get(attribute.Diff).ToString()
}
@@ -1323,13 +1377,14 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
diffFile.Sections = append(diffFile.Sections, tailSection)
}
- shouldFullFileHighlight := !setting.Git.DisableDiffHighlight && attrDiff.Value() == ""
+ shouldFullFileHighlight := attrDiff.Value() == "" // only do highlight if no custom diff command
+ shouldFullFileHighlight = shouldFullFileHighlight && time.Since(startTime) < MaxFullFileHighlightTimeLimit
if shouldFullFileHighlight {
- if limitedContent.LeftContent != nil && limitedContent.LeftContent.buf.Len() < MaxDiffHighlightEntireFileSize {
- diffFile.highlightedLeftLines = highlightCodeLines(diffFile, true /* left */, limitedContent.LeftContent.buf.Bytes())
+ if limitedContent.LeftContent != nil {
+ diffFile.highlightedLeftLines.value = highlightCodeLinesForDiffFile(diffFile, true /* left */, limitedContent.LeftContent.buf.Bytes())
}
- if limitedContent.RightContent != nil && limitedContent.RightContent.buf.Len() < MaxDiffHighlightEntireFileSize {
- diffFile.highlightedRightLines = highlightCodeLines(diffFile, false /* right */, limitedContent.RightContent.buf.Bytes())
+ if limitedContent.RightContent != nil {
+ diffFile.highlightedRightLines.value = highlightCodeLinesForDiffFile(diffFile, false /* right */, limitedContent.RightContent.buf.Bytes())
}
}
}
@@ -1337,13 +1392,27 @@ func GetDiffForRender(ctx context.Context, repoLink string, gitRepo *git.Reposit
return diff, nil
}
-func highlightCodeLines(diffFile *DiffFile, isLeft bool, rawContent []byte) map[int]template.HTML {
+func FillDiffFileHighlightLinesByContent(diffFile *DiffFile, left, right []byte) {
+ diffFile.highlightedLeftLines.value = highlightCodeLinesForDiffFile(diffFile, true /* left */, left)
+ diffFile.highlightedRightLines.value = highlightCodeLinesForDiffFile(diffFile, false /* right */, right)
+}
+
+func highlightCodeLinesForDiffFile(diffFile *DiffFile, isLeft bool, rawContent []byte) map[int]template.HTML {
+ return highlightCodeLines(diffFile.Name, diffFile.language.value, diffFile.Sections, isLeft, rawContent)
+}
+
+func highlightCodeLines(name, lang string, sections []*DiffSection, isLeft bool, rawContent []byte) map[int]template.HTML {
+ if setting.Git.DisableDiffHighlight || len(rawContent) > MaxFullFileHighlightSizeLimit {
+ return nil
+ }
+
content := util.UnsafeBytesToString(charset.ToUTF8(rawContent, charset.ConvertOpts{}))
- highlightedNewContent, _ := highlight.Code(diffFile.Name, diffFile.Language, content)
+ lexer := highlight.DetectChromaLexerByFileName(name, lang)
+ highlightedNewContent := highlight.RenderCodeByLexer(lexer, content)
unsafeLines := highlight.UnsafeSplitHighlightedLines(highlightedNewContent)
lines := make(map[int]template.HTML, len(unsafeLines))
// only save the highlighted lines we need, but not the whole file, to save memory
- for _, sec := range diffFile.Sections {
+ for _, sec := range sections {
for _, ln := range sec.Lines {
lineIdx := ln.LeftIdx
if !isLeft {
diff --git a/services/gitdiff/gitdiff_excerpt.go b/services/gitdiff/gitdiff_excerpt.go
new file mode 100644
index 0000000000..4b1958fc11
--- /dev/null
+++ b/services/gitdiff/gitdiff_excerpt.go
@@ -0,0 +1,124 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "html/template"
+ "io"
+
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+type BlobExcerptOptions struct {
+ LastLeft int
+ LastRight int
+ LeftIndex int
+ RightIndex int
+ LeftHunkSize int
+ RightHunkSize int
+ Direction string
+ Language string
+}
+
+func fillExcerptLines(section *DiffSection, filePath string, reader io.Reader, lang string, idxLeft, idxRight, chunkSize int) error {
+ buf := &bytes.Buffer{}
+ scanner := bufio.NewScanner(reader)
+ var diffLines []*DiffLine
+ for line := 0; line < idxRight+chunkSize; line++ {
+ if ok := scanner.Scan(); !ok {
+ break
+ }
+ lineText := scanner.Text()
+ if buf.Len()+len(lineText) < int(setting.UI.MaxDisplayFileSize) {
+ buf.WriteString(lineText)
+ buf.WriteByte('\n')
+ }
+ if line < idxRight {
+ continue
+ }
+ diffLine := &DiffLine{
+ LeftIdx: idxLeft + (line - idxRight) + 1,
+ RightIdx: line + 1,
+ Type: DiffLinePlain,
+ Content: " " + lineText,
+ }
+ diffLines = append(diffLines, diffLine)
+ }
+ if err := scanner.Err(); err != nil {
+ return fmt.Errorf("fillExcerptLines scan: %w", err)
+ }
+ section.Lines = diffLines
+ // DiffLinePlain always uses right lines
+ section.highlightedRightLines.value = highlightCodeLines(filePath, lang, []*DiffSection{section}, false /* right */, buf.Bytes())
+ return nil
+}
+
+func BuildBlobExcerptDiffSection(filePath string, reader io.Reader, opts BlobExcerptOptions) (*DiffSection, error) {
+ lastLeft, lastRight, idxLeft, idxRight := opts.LastLeft, opts.LastRight, opts.LeftIndex, opts.RightIndex
+ leftHunkSize, rightHunkSize, direction := opts.LeftHunkSize, opts.RightHunkSize, opts.Direction
+ language := opts.Language
+
+ chunkSize := BlobExcerptChunkSize
+ section := &DiffSection{
+ language: &diffVarMutable[string]{value: language},
+ highlightLexer: &diffVarMutable[chroma.Lexer]{},
+ highlightedLeftLines: &diffVarMutable[map[int]template.HTML]{},
+ highlightedRightLines: &diffVarMutable[map[int]template.HTML]{},
+ FileName: filePath,
+ }
+ var err error
+ if direction == "up" && (idxLeft-lastLeft) > chunkSize {
+ idxLeft -= chunkSize
+ idxRight -= chunkSize
+ leftHunkSize += chunkSize
+ rightHunkSize += chunkSize
+ err = fillExcerptLines(section, filePath, reader, language, idxLeft-1, idxRight-1, chunkSize)
+ } else if direction == "down" && (idxLeft-lastLeft) > chunkSize {
+ err = fillExcerptLines(section, filePath, reader, language, lastLeft, lastRight, chunkSize)
+ lastLeft += chunkSize
+ lastRight += chunkSize
+ } else {
+ offset := -1
+ if direction == "down" {
+ offset = 0
+ }
+ err = fillExcerptLines(section, filePath, reader, language, lastLeft, lastRight, idxRight-lastRight+offset)
+ leftHunkSize = 0
+ rightHunkSize = 0
+ idxLeft = lastLeft
+ idxRight = lastRight
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ newLineSection := &DiffLine{
+ Type: DiffLineSection,
+ SectionInfo: &DiffLineSectionInfo{
+ language: &diffVarMutable[string]{value: opts.Language},
+ Path: filePath,
+ LastLeftIdx: lastLeft,
+ LastRightIdx: lastRight,
+ LeftIdx: idxLeft,
+ RightIdx: idxRight,
+ LeftHunkSize: leftHunkSize,
+ RightHunkSize: rightHunkSize,
+ },
+ }
+ if newLineSection.GetExpandDirection() != "" {
+ newLineSection.Content = fmt.Sprintf("@@ -%d,%d +%d,%d @@\n", idxLeft, leftHunkSize, idxRight, rightHunkSize)
+ switch direction {
+ case "up":
+ section.Lines = append([]*DiffLine{newLineSection}, section.Lines...)
+ case "down":
+ section.Lines = append(section.Lines, newLineSection)
+ }
+ }
+ return section, nil
+}
diff --git a/services/gitdiff/gitdiff_excerpt_test.go b/services/gitdiff/gitdiff_excerpt_test.go
new file mode 100644
index 0000000000..cb71e66462
--- /dev/null
+++ b/services/gitdiff/gitdiff_excerpt_test.go
@@ -0,0 +1,39 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package gitdiff
+
+import (
+ "bytes"
+ "strconv"
+ "testing"
+
+ "code.gitea.io/gitea/modules/translation"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestBuildBlobExcerptDiffSection(t *testing.T) {
+ data := &bytes.Buffer{}
+ for i := range 100 {
+ data.WriteString("a = " + strconv.Itoa(i+1) + "\n")
+ }
+
+ locale := translation.MockLocale{}
+ lineMiddle := 50
+ diffSection, err := BuildBlobExcerptDiffSection("a.py", bytes.NewReader(data.Bytes()), BlobExcerptOptions{
+ LeftIndex: lineMiddle,
+ RightIndex: lineMiddle,
+ LeftHunkSize: 10,
+ RightHunkSize: 10,
+ Direction: "up",
+ })
+ require.NoError(t, err)
+ assert.Len(t, diffSection.highlightedRightLines.value, BlobExcerptChunkSize)
+ assert.NotEmpty(t, diffSection.highlightedRightLines.value[lineMiddle-BlobExcerptChunkSize-1])
+ assert.NotEmpty(t, diffSection.highlightedRightLines.value[lineMiddle-2]) // 0-based
+
+ diffInline := diffSection.GetComputedInlineDiffFor(diffSection.Lines[1], locale)
+ assert.Equal(t, `a = 30`+"\n", string(diffInline.Content))
+}
diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go
index a94dad8b63..62b17c223c 100644
--- a/services/gitdiff/gitdiff_test.go
+++ b/services/gitdiff/gitdiff_test.go
@@ -1111,22 +1111,20 @@ func TestDiffLine_GetExpandDirection(t *testing.T) {
func TestHighlightCodeLines(t *testing.T) {
t.Run("CharsetDetecting", func(t *testing.T) {
diffFile := &DiffFile{
- Name: "a.c",
- Language: "c",
+ Name: "a.c",
Sections: []*DiffSection{
{
Lines: []*DiffLine{{LeftIdx: 1}},
},
},
}
- ret := highlightCodeLines(diffFile, true, []byte("// abc\xcc def\xcd")) // ISO-8859-1 bytes
+ ret := highlightCodeLinesForDiffFile(diffFile, true, []byte("// abc\xcc def\xcd")) // ISO-8859-1 bytes
assert.Equal(t, "// abcÌ defÍ\n", string(ret[0]))
})
t.Run("LeftLines", func(t *testing.T) {
diffFile := &DiffFile{
- Name: "a.c",
- Language: "c",
+ Name: "a.c",
Sections: []*DiffSection{
{
Lines: []*DiffLine{
@@ -1138,7 +1136,7 @@ func TestHighlightCodeLines(t *testing.T) {
},
}
const nl = "\n"
- ret := highlightCodeLines(diffFile, true, []byte("a\nb\n"))
+ ret := highlightCodeLinesForDiffFile(diffFile, true, []byte("a\nb\n"))
assert.Equal(t, map[int]template.HTML{
0: `a` + nl,
1: `b`,
diff --git a/services/gitdiff/highlightdiff.go b/services/gitdiff/highlightdiff.go
index e8be063e69..1de3963788 100644
--- a/services/gitdiff/highlightdiff.go
+++ b/services/gitdiff/highlightdiff.go
@@ -7,12 +7,45 @@ import (
"bytes"
"html/template"
"strings"
+ "unicode/utf8"
+
+ "code.gitea.io/gitea/modules/util"
"github.com/sergi/go-diff/diffmatchpatch"
)
-// token is a html tag or entity, eg: "", "", "<"
-func extractHTMLToken(s string) (before, token, after string, valid bool) {
+// extractDiffTokenRemainingFullTag tries to extract full tag with content from the remaining string
+// e.g. for input: "contentthe-rest...", it returns "content", "the-rest...", true
+func extractDiffTokenRemainingFullTag(s string) (token, after string, valid bool) {
+ pos := 0
+ for ; pos < len(s); pos++ {
+ c := s[pos]
+ if c == '<' {
+ break
+ }
+ // keep in mind: even if we'd like to relax this check,
+ // we should never ignore "&" because it is for HTML entity and can't be safely used in the diff algorithm,
+ // because diff between "<" and ">" will generate broken result.
+ isSymbolChar := 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9' || c == '_' || c == '-' || c == '.'
+ if !isSymbolChar {
+ return "", s, false
+ }
+ }
+ if pos+1 >= len(s) || s[pos+1] != '/' {
+ return "", s, false
+ }
+ pos2 := strings.IndexByte(s[pos:], '>')
+ if pos2 == -1 {
+ return "", s, false
+ }
+ return s[:pos+pos2+1], s[pos+pos2+1:], true
+}
+
+// Returned token:
+// * full tag with content: "<content>", it is used to optimize diff results to highlight the whole changed symbol
+// * opening/closing tag: "" or ""
+// * HTML entity: "<"
+func extractDiffToken(s string) (before, token, after string, valid bool) {
for pos1 := 0; pos1 < len(s); pos1++ {
switch s[pos1] {
case '<':
@@ -20,7 +53,15 @@ func extractHTMLToken(s string) (before, token, after string, valid bool) {
if pos2 == -1 {
return "", "", s, false
}
- return s[:pos1], s[pos1 : pos1+pos2+1], s[pos1+pos2+1:], true
+ before, token, after = s[:pos1], s[pos1:pos1+pos2+1], s[pos1+pos2+1:]
+
+ if !strings.HasPrefix(token, "") {
+ // try to extract full tag with content, e.g. `<content>`, to optimize diff results
+ if fullTokenRemaining, fullTokenAfter, ok := extractDiffTokenRemainingFullTag(after); ok {
+ return before, "<" + token + fullTokenRemaining + ">", fullTokenAfter, true
+ }
+ }
+ return before, token, after, true
case '&':
pos2 := strings.IndexByte(s[pos1:], ';')
if pos2 == -1 {
@@ -47,7 +88,9 @@ type highlightCodeDiff struct {
placeholderOverflowCount int
- lineWrapperTags []string
+ diffCodeAddedOpen rune
+ diffCodeRemovedOpen rune
+ diffCodeClose rune
}
func newHighlightCodeDiff() *highlightCodeDiff {
@@ -87,11 +130,26 @@ func (hcd *highlightCodeDiff) collectUsedRunes(code template.HTML) {
}
}
-func (hcd *highlightCodeDiff) diffLineWithHighlight(lineType DiffLineType, codeA, codeB template.HTML) template.HTML {
- return hcd.diffLineWithHighlightWrapper(nil, lineType, codeA, codeB)
+func (hcd *highlightCodeDiff) diffEqualPartIsSpaceOnly(s string) bool {
+ for _, r := range s {
+ if r >= hcd.placeholderBegin {
+ recovered := hcd.placeholderTokenMap[r]
+ if strings.HasPrefix(recovered, "<<") {
+ return false // a full tag with content, it can't be space-only
+ } else if strings.HasPrefix(recovered, "<") {
+ continue // a single opening/closing tag, skip the tag and continue to check the content
+ }
+ return false // otherwise, it must be an HTML entity, it can't be space-only
+ }
+ isSpace := r == ' ' || r == '\t' || r == '\n' || r == '\r'
+ if !isSpace {
+ return false
+ }
+ }
+ return true
}
-func (hcd *highlightCodeDiff) diffLineWithHighlightWrapper(lineWrapperTags []string, lineType DiffLineType, codeA, codeB template.HTML) template.HTML {
+func (hcd *highlightCodeDiff) diffLineWithHighlight(lineType DiffLineType, codeA, codeB template.HTML) template.HTML {
hcd.collectUsedRunes(codeA)
hcd.collectUsedRunes(codeB)
@@ -104,32 +162,44 @@ func (hcd *highlightCodeDiff) diffLineWithHighlightWrapper(lineWrapperTags []str
buf := bytes.NewBuffer(nil)
- // restore the line wrapper tags and , if necessary
- for _, tag := range lineWrapperTags {
- buf.WriteString(tag)
+ if hcd.diffCodeClose == 0 {
+ // tests can pre-set the placeholders
+ hcd.diffCodeAddedOpen = hcd.registerTokenAsPlaceholder(``)
+ hcd.diffCodeRemovedOpen = hcd.registerTokenAsPlaceholder(``)
+ hcd.diffCodeClose = hcd.registerTokenAsPlaceholder(``)
}
- addedCodePrefix := hcd.registerTokenAsPlaceholder(``)
- removedCodePrefix := hcd.registerTokenAsPlaceholder(``)
- codeTagSuffix := hcd.registerTokenAsPlaceholder(``)
+ equalPartSpaceOnly := true
+ for _, diff := range diffs {
+ if diff.Type != diffmatchpatch.DiffEqual {
+ continue
+ }
+ if equalPartSpaceOnly = hcd.diffEqualPartIsSpaceOnly(diff.Text); !equalPartSpaceOnly {
+ break
+ }
+ }
- if codeTagSuffix != 0 {
+ // only add "added"/"removed" tags when needed:
+ // * non-space contents appear in the DiffEqual parts (not a full-line add/del)
+ // * placeholder map still works (not exhausted, can get the closing tag placeholder)
+ addDiffTags := !equalPartSpaceOnly && hcd.diffCodeClose != 0
+ if addDiffTags {
for _, diff := range diffs {
switch {
case diff.Type == diffmatchpatch.DiffEqual:
buf.WriteString(diff.Text)
case diff.Type == diffmatchpatch.DiffInsert && lineType == DiffLineAdd:
- buf.WriteRune(addedCodePrefix)
+ buf.WriteRune(hcd.diffCodeAddedOpen)
buf.WriteString(diff.Text)
- buf.WriteRune(codeTagSuffix)
+ buf.WriteRune(hcd.diffCodeClose)
case diff.Type == diffmatchpatch.DiffDelete && lineType == DiffLineDel:
- buf.WriteRune(removedCodePrefix)
+ buf.WriteRune(hcd.diffCodeRemovedOpen)
buf.WriteString(diff.Text)
- buf.WriteRune(codeTagSuffix)
+ buf.WriteRune(hcd.diffCodeClose)
}
}
} else {
- // placeholder map space is exhausted
+ // the caller will still add added/removed backgrounds for the whole line
for _, diff := range diffs {
take := diff.Type == diffmatchpatch.DiffEqual || (diff.Type == diffmatchpatch.DiffInsert && lineType == DiffLineAdd) || (diff.Type == diffmatchpatch.DiffDelete && lineType == DiffLineDel)
if take {
@@ -137,19 +207,21 @@ func (hcd *highlightCodeDiff) diffLineWithHighlightWrapper(lineWrapperTags []str
}
}
}
- for range lineWrapperTags {
- buf.WriteString("")
- }
return hcd.recoverOneDiff(buf.String())
}
func (hcd *highlightCodeDiff) registerTokenAsPlaceholder(token string) rune {
+ recovered := token
+ if token[0] == '<' && token[1] != '<' {
+ // when recovering a single tag, only use the tag itself, ignore the trailing comment (for how the comment is generated, see the code in `convert` function)
+ recovered = token[:strings.IndexByte(token, '>')+1]
+ }
placeholder, ok := hcd.tokenPlaceholderMap[token]
if !ok {
placeholder = hcd.nextPlaceholder()
if placeholder != 0 {
hcd.tokenPlaceholderMap[token] = placeholder
- hcd.placeholderTokenMap[placeholder] = token
+ hcd.placeholderTokenMap[placeholder] = recovered
}
}
return placeholder
@@ -160,44 +232,42 @@ func (hcd *highlightCodeDiff) convertToPlaceholders(htmlContent template.HTML) s
var tagStack []string
res := strings.Builder{}
- firstRunForLineTags := hcd.lineWrapperTags == nil
-
+ htmlCode := string(htmlContent)
var beforeToken, token string
var valid bool
-
- htmlCode := string(htmlContent)
- // the standard chroma highlight HTML is " ... "
for {
- beforeToken, token, htmlCode, valid = extractHTMLToken(htmlCode)
+ beforeToken, token, htmlCode, valid = extractDiffToken(htmlCode)
if !valid || token == "" {
break
}
// write the content before the token into result string, and consume the token in the string
res.WriteString(beforeToken)
+ // the standard chroma highlight HTML is ` ... `
// the line wrapper tags should be removed before diff
if strings.HasPrefix(token, `")
continue
}
var tokenInMap string
- if strings.HasSuffix(token, "") { // for closing tag
+ if strings.HasPrefix(token, "") { // for closing tag
if len(tagStack) == 0 {
- break // invalid diff result, no opening tag but see closing tag
+ continue // no opening tag but see closing tag, skip it
}
// make sure the closing tag in map is related to the open tag, to make the diff algorithm can match the opening/closing tags
// the closing tag will be recorded in the map by key "" for ""
tokenInMap = token + ""
tagStack = tagStack[:len(tagStack)-1]
- } else if token[0] == '<' { // for opening tag
- tokenInMap = token
- tagStack = append(tagStack, token)
- } else if token[0] == '&' { // for html entity
+ } else if token[0] == '<' {
+ if token[1] == '<' {
+ // full tag `<content>`, recover to `content`
+ tokenInMap = token
+ } else {
+ // opening tag
+ tokenInMap = token
+ tagStack = append(tagStack, token)
+ }
+ } else if token[0] == '&' { // for HTML entity
tokenInMap = token
} // else: impossible
@@ -210,8 +280,13 @@ func (hcd *highlightCodeDiff) convertToPlaceholders(htmlContent template.HTML) s
// unfortunately, all private use runes has been exhausted, no more placeholder could be used, no more converting
// usually, the exhausting won't occur in real cases, the magnitude of used placeholders is not larger than that of the CSS classes outputted by chroma.
hcd.placeholderOverflowCount++
+ if strings.HasPrefix(token, "<<") {
+ pos1 := strings.IndexByte(token, '>')
+ pos2 := strings.LastIndexByte(token, '<')
+ res.WriteString(token[pos1+1 : pos2]) // recover to `content` from "<content>"
+ }
if strings.HasPrefix(token, "&") {
- // when the token is a html entity, something must be outputted even if there is no placeholder.
+ // when the token is an HTML entity, something must be outputted even if there is no placeholder.
res.WriteRune(0xFFFD) // replacement character TODO: how to handle this case more gracefully?
res.WriteString(token[1:]) // still output the entity code part, otherwise there will be no diff result.
}
@@ -223,43 +298,99 @@ func (hcd *highlightCodeDiff) convertToPlaceholders(htmlContent template.HTML) s
return res.String()
}
+// recoverOneRune tries to recover one rune
+// * if the rune is a placeholder, it will be recovered to the corresponding content
+// * otherwise it will be returned as is
+func (hcd *highlightCodeDiff) recoverOneRune(buf []byte) (r rune, runeLen int, isSingleTag bool, recovered string) {
+ r, runeLen = utf8.DecodeRune(buf)
+ token := hcd.placeholderTokenMap[r]
+ if token == "" {
+ return r, runeLen, false, "" // rune itself, not a placeholder
+ } else if token[0] == '<' {
+ if token[1] == '<' {
+ return 0, runeLen, false, token[1 : len(token)-1] // full tag `<content>`, recover to `content`
+ }
+ return r, runeLen, true, token // single tag
+ }
+ return 0, runeLen, false, token // HTML entity
+}
+
func (hcd *highlightCodeDiff) recoverOneDiff(str string) template.HTML {
sb := strings.Builder{}
var tagStack []string
+ var diffCodeOpenTag string
+ diffCodeCloseTag := hcd.placeholderTokenMap[hcd.diffCodeClose]
+ strBytes := util.UnsafeStringToBytes(str)
- for _, r := range str {
- token, ok := hcd.placeholderTokenMap[r]
- if !ok || token == "" {
- sb.WriteRune(r) // if the rune is not a placeholder, write it as it is
- continue
- }
- var tokenToRecover string
- if strings.HasPrefix(token, "") { // for closing tag
- // only get the tag itself, ignore the trailing comment (for how the comment is generated, see the code in `convert` function)
- tokenToRecover = token[:strings.IndexByte(token, '>')+1]
- if len(tagStack) == 0 {
- continue // if no opening tag in stack yet, skip the closing tag
+ // this loop is slightly longer than expected, for performance consideration
+ for idx := 0; idx < len(strBytes); {
+ // take a look at the next rune
+ r, runeLen, isSingleTag, recovered := hcd.recoverOneRune(strBytes[idx:])
+ idx += runeLen
+
+ // loop section 1: if it isn't a single tag, then try to find the following runes until the next single tag, and recover them together
+ if !isSingleTag {
+ if diffCodeOpenTag != "" {
+ // start the "added/removed diff tag" if the current token is in the diff part
+ sb.WriteString(diffCodeOpenTag)
}
- tagStack = tagStack[:len(tagStack)-1]
- } else if token[0] == '<' { // for opening tag
- tokenToRecover = token
- tagStack = append(tagStack, token)
- } else if token[0] == '&' { // for html entity
- tokenToRecover = token
- } // else: impossible
- sb.WriteString(tokenToRecover)
+ if recovered != "" {
+ sb.WriteString(recovered)
+ } else {
+ sb.WriteRune(r)
+ }
+ // inner loop to recover following runes until the next single tag
+ for idx < len(strBytes) {
+ r, runeLen, isSingleTag, recovered = hcd.recoverOneRune(strBytes[idx:])
+ idx += runeLen
+ if isSingleTag {
+ break
+ }
+ if recovered != "" {
+ sb.WriteString(recovered)
+ } else {
+ sb.WriteRune(r)
+ }
+ }
+ if diffCodeOpenTag != "" {
+ // end the "added/removed diff tag" if the current token is in the diff part
+ sb.WriteString(diffCodeCloseTag)
+ }
+ }
+
+ if !isSingleTag {
+ break // the inner loop has already consumed all remaining runes, no more single tag found
+ }
+
+ // loop section 2: for opening/closing HTML tags
+ placeholder := r
+ if recovered[1] != '/' { // opening tag
+ if placeholder == hcd.diffCodeAddedOpen || placeholder == hcd.diffCodeRemovedOpen {
+ diffCodeOpenTag = recovered
+ recovered = ""
+ } else {
+ tagStack = append(tagStack, recovered)
+ }
+ } else { // closing tag
+ if placeholder == hcd.diffCodeClose {
+ diffCodeOpenTag = "" // the highlighted diff is closed, no more diff
+ recovered = ""
+ } else if len(tagStack) != 0 {
+ tagStack = tagStack[:len(tagStack)-1]
+ } else {
+ recovered = ""
+ }
+ }
+ sb.WriteString(recovered)
}
- if len(tagStack) > 0 {
- // close all opening tags
- for i := len(tagStack) - 1; i >= 0; i-- {
- tagToClose := tagStack[i]
- // get the closing tag "" from "" or ""
- pos := strings.IndexAny(tagToClose, " >")
- if pos != -1 {
- sb.WriteString("" + tagToClose[1:pos] + ">")
- } // else: impossible. every tag was pushed into the stack by the code above and is valid HTML opening tag
- }
+ // close all opening tags
+ for i := len(tagStack) - 1; i >= 0; i-- {
+ tagToClose := tagStack[i]
+ // get the closing tag "" from "" or ""
+ pos := strings.IndexAny(tagToClose, " >")
+ // pos must be positive, because the tags were pushed by us
+ sb.WriteString("" + tagToClose[1:pos] + ">")
}
return template.HTML(sb.String())
}
diff --git a/services/gitdiff/highlightdiff_test.go b/services/gitdiff/highlightdiff_test.go
index 0df2e29d13..ea9a8829ed 100644
--- a/services/gitdiff/highlightdiff_test.go
+++ b/services/gitdiff/highlightdiff_test.go
@@ -9,28 +9,62 @@ import (
"strings"
"testing"
+ "code.gitea.io/gitea/modules/highlight"
+
"github.com/stretchr/testify/assert"
)
-func TestDiffWithHighlight(t *testing.T) {
- t.Run("DiffLineAddDel", func(t *testing.T) {
+func BenchmarkHighlightDiff(b *testing.B) {
+ for b.Loop() {
+ // still fast enough: BenchmarkHighlightDiff-12 1000000 1027 ns/op
+ // TODO: the real bottleneck is that "diffLineWithHighlight" is called twice when rendering "added" and "removed" lines by the caller
+ // Ideally the caller should cache the diff result, and then use the diff result to render "added" and "removed" lines separately
hcd := newHighlightCodeDiff()
codeA := template.HTML(`x foo y`)
codeB := template.HTML(`x bar y`)
- outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
- assert.Equal(t, `x foo y`, string(outDel))
- outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
- assert.Equal(t, `x bar y`, string(outAdd))
+ hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
+ }
+}
+
+func TestDiffWithHighlight(t *testing.T) {
+ t.Run("DiffLineAddDel", func(t *testing.T) {
+ t.Run("WithDiffTags", func(t *testing.T) {
+ hcd := newHighlightCodeDiff()
+ codeA := template.HTML(`x foo y`)
+ codeB := template.HTML(`x bar y`)
+ outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
+ assert.Equal(t, `x foo y`, string(outDel))
+ outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
+ assert.Equal(t, `x bar y`, string(outAdd))
+ })
+ t.Run("NoRedundantTags", func(t *testing.T) {
+ // the equal parts only contain spaces, in this case, don't use "added/removed" tags
+ // because the diff lines already have a background color to indicate the change
+ hcd := newHighlightCodeDiff()
+ codeA := template.HTML(" \tfoo ")
+ codeB := template.HTML(" bar \n")
+ outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
+ assert.Equal(t, string(codeA), string(outDel))
+ outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
+ assert.Equal(t, string(codeB), string(outAdd))
+ })
})
t.Run("CleanUp", func(t *testing.T) {
hcd := newHighlightCodeDiff()
- codeA := template.HTML(`this is a comment`)
- codeB := template.HTML(`this is updated comment`)
+ codeA := template.HTML(` this is a comment`)
+ codeB := template.HTML(` this is updated comment`)
outDel := hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
- assert.Equal(t, `this is a comment`, string(outDel))
+ assert.Equal(t, ` this is a comment`, string(outDel))
outAdd := hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
- assert.Equal(t, `this is updated comment`, string(outAdd))
+ assert.Equal(t, ` this is updated comment`, string(outAdd))
+
+ codeA = `line1` + "\n" + `line2`
+ codeB = `line1` + "\n" + `line!`
+ outDel = hcd.diffLineWithHighlight(DiffLineDel, codeA, codeB)
+ assert.Equal(t, `line1`+"\n"+`line2`, string(outDel))
+ outAdd = hcd.diffLineWithHighlight(DiffLineAdd, codeA, codeB)
+ assert.Equal(t, `line1`+"\n"+`line!`, string(outAdd))
})
t.Run("OpenCloseTags", func(t *testing.T) {
@@ -40,6 +74,55 @@ func TestDiffWithHighlight(t *testing.T) {
assert.Equal(t, "", string(hcd.recoverOneDiff("O")))
assert.Empty(t, string(hcd.recoverOneDiff("C")))
})
+
+ t.Run("ComplexDiff1", func(t *testing.T) {
+ oldCode, _, _ := highlight.RenderCodeSlowGuess("a.go", "Go", `xxx || yyy`)
+ newCode, _, _ := highlight.RenderCodeSlowGuess("a.go", "Go", `bot&xxx || bot&yyy`)
+ hcd := newHighlightCodeDiff()
+ out := hcd.diffLineWithHighlight(DiffLineAdd, oldCode, newCode)
+ assert.Equal(t, strings.ReplaceAll(`
+bot&
+xxx ||
+bot&
+yyy`, "\n", ""), string(out))
+ })
+
+ forceTokenAsPlaceholder := func(hcd *highlightCodeDiff, r rune, token string) rune {
+ // for testing purpose only
+ hcd.tokenPlaceholderMap[token] = r
+ hcd.placeholderTokenMap[r] = token
+ return r
+ }
+
+ t.Run("ComplexDiff2", func(t *testing.T) {
+ // When running "diffLineWithHighlight", the newly inserted "added-code", and "removed-code" tags may break the original layout.
+ // The newly inserted tags can appear in any position, because the "diff" algorithm can make outputs like:
+ // * Equal:
+ // * Insert: xxyy
+ // * Equal: zz
+ // Then the newly inserted tags will make this output, the tags mismatch.
+ // * xxyy zz
+ // So we need to fix it to:
+ // * xx yy zz
+ hcd := newHighlightCodeDiff()
+ hcd.diffCodeAddedOpen = forceTokenAsPlaceholder(hcd, '[', "")
+ hcd.diffCodeClose = forceTokenAsPlaceholder(hcd, ']', "")
+ forceTokenAsPlaceholder(hcd, '{', "")
+ forceTokenAsPlaceholder(hcd, '}', "")
+ assert.Equal(t, `aaxxyyzzbb`, string(hcd.recoverOneDiff("aa{xx[yy]zz}bb")))
+ assert.Equal(t, `aaxxyyzzbb`, string(hcd.recoverOneDiff("aa[xx{yy}zz]bb")))
+ assert.Equal(t, `aaxxyyzzbb`, string(hcd.recoverOneDiff("aa{xx[yy}zz]bb")))
+ assert.Equal(t, `aaxxyyzzbb`, string(hcd.recoverOneDiff("aa[xx{yy]zz}bb")))
+ assert.Equal(t, `aaxxyyzzbbcc`, string(hcd.recoverOneDiff("aa[xx{yy][zz}bb]cc")))
+
+ // And do a simple test for "diffCodeRemovedOpen", it shares the same logic as "diffCodeAddedOpen"
+ hcd = newHighlightCodeDiff()
+ hcd.diffCodeRemovedOpen = forceTokenAsPlaceholder(hcd, '[', "")
+ hcd.diffCodeClose = forceTokenAsPlaceholder(hcd, ']', "")
+ forceTokenAsPlaceholder(hcd, '{', "")
+ forceTokenAsPlaceholder(hcd, '}', "")
+ assert.Equal(t, `aaxxyyzzbbcc`, string(hcd.recoverOneDiff("aa[xx{yy][zz}bb]cc")))
+ })
}
func TestDiffWithHighlightPlaceholder(t *testing.T) {
@@ -64,6 +147,11 @@ func TestDiffWithHighlightPlaceholderExhausted(t *testing.T) {
assert.Equal(t, placeHolderAmp+"lt;", string(output))
output = hcd.diffLineWithHighlight(DiffLineAdd, `<`, `>`)
assert.Equal(t, placeHolderAmp+"gt;", string(output))
+
+ output = hcd.diffLineWithHighlight(DiffLineDel, `foo`, `bar`)
+ assert.Equal(t, "foo", string(output))
+ output = hcd.diffLineWithHighlight(DiffLineAdd, `foo`, `bar`)
+ assert.Equal(t, "bar", string(output))
}
func TestDiffWithHighlightTagMatch(t *testing.T) {
diff --git a/services/issue/assignee.go b/services/issue/assignee.go
index ba9c91e0ed..ae4b7138ee 100644
--- a/services/issue/assignee.go
+++ b/services/issue/assignee.go
@@ -70,7 +70,7 @@ func ReviewRequest(ctx context.Context, issue *issues_model.Issue, doer *user_mo
}
if isAdd {
- comment, err = issues_model.AddReviewRequest(ctx, issue, reviewer, doer)
+ comment, err = issues_model.AddReviewRequest(ctx, issue, reviewer, doer, false)
} else {
comment, err = issues_model.RemoveReviewRequest(ctx, issue, reviewer, doer)
}
@@ -224,7 +224,7 @@ func TeamReviewRequest(ctx context.Context, issue *issues_model.Issue, doer *use
return nil, err
}
if isAdd {
- comment, err = issues_model.AddTeamReviewRequest(ctx, issue, reviewer, doer)
+ comment, err = issues_model.AddTeamReviewRequest(ctx, issue, reviewer, doer, false)
} else {
comment, err = issues_model.RemoveTeamReviewRequest(ctx, issue, reviewer, doer)
}
@@ -234,7 +234,7 @@ func TeamReviewRequest(ctx context.Context, issue *issues_model.Issue, doer *use
}
if comment == nil || !isAdd {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil because no comment was created or it is a removal
}
return comment, teamReviewRequestNotify(ctx, issue, doer, reviewer, isAdd, comment)
diff --git a/services/issue/commit.go b/services/issue/commit.go
index 963d0359fd..6cc120697a 100644
--- a/services/issue/commit.go
+++ b/services/issue/commit.go
@@ -89,13 +89,31 @@ func issueAddTime(ctx context.Context, issue *issues_model.Issue, doer *user_mod
return err
}
+// isSelfReference checks if a commit is the merge commit of the PR it references.
+// This prevents creating self-referencing timeline entries when a PR merge commit
+// contains a reference to its own PR number in the commit message.
+func isSelfReference(ctx context.Context, issue *issues_model.Issue, commitSHA string) bool {
+ if !issue.IsPull {
+ return false
+ }
+
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ if !issues_model.IsErrPullRequestNotExist(err) {
+ log.Error("LoadPullRequest: %v", err)
+ }
+ return false
+ }
+
+ return issue.PullRequest.MergedCommitID == commitSHA
+}
+
// getIssueFromRef returns the issue referenced by a ref. Returns a nil *Issue
// if the provided ref references a non-existent issue.
func getIssueFromRef(ctx context.Context, repo *repo_model.Repository, index int64) (*issues_model.Issue, error) {
issue, err := issues_model.GetIssueByIndex(ctx, repo.ID, index)
if err != nil {
if issues_model.IsErrIssueNotExist(err) {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the object does not exist
}
return nil, err
}
@@ -158,6 +176,11 @@ func UpdateIssuesCommit(ctx context.Context, doer *user_model.User, repo *repo_m
continue
}
+ // Skip self-references: if this commit is the merge commit of the PR it references
+ if isSelfReference(ctx, refIssue, c.Sha1) {
+ continue
+ }
+
message := fmt.Sprintf(`%s`, html.EscapeString(repo.Link()), html.EscapeString(url.PathEscape(c.Sha1)), html.EscapeString(strings.SplitN(c.Message, "\n", 2)[0]))
if err = CreateRefComment(ctx, doer, refRepo, refIssue, message, c.Sha1); err != nil {
if errors.Is(err, user_model.ErrBlockedUser) {
diff --git a/services/issue/commit_test.go b/services/issue/commit_test.go
index d19702269a..5950762c44 100644
--- a/services/issue/commit_test.go
+++ b/services/issue/commit_test.go
@@ -298,3 +298,59 @@ func TestUpdateIssuesCommit_AnotherRepoNoPermission(t *testing.T) {
unittest.AssertNotExistsBean(t, issueBean, "is_closed=1")
unittest.CheckConsistencyFor(t, &activities_model.Action{})
}
+
+func TestUpdateIssuesCommit_SelfReference(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ // Test that a PR merge commit that references its own PR does not create a self-reference comment
+ // PR #2 (issue_id=2) has merged_commit_id: 1a8823cd1a9549fde083f992f6b9b87a7ab74fb3
+ pushCommits := []*repository.PushCommit{
+ {
+ Sha1: "1a8823cd1a9549fde083f992f6b9b87a7ab74fb3",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "Merge pull request 'issue2' (#2) from branch1 into master",
+ },
+ }
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ selfRefCommentBean := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "1a8823cd1a9549fde083f992f6b9b87a7ab74fb3",
+ PosterID: user.ID,
+ IssueID: 2,
+ }
+
+ unittest.AssertNotExistsBean(t, selfRefCommentBean)
+ assert.NoError(t, UpdateIssuesCommit(t.Context(), user, repo, pushCommits, repo.DefaultBranch))
+ unittest.AssertNotExistsBean(t, selfRefCommentBean)
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+
+ // Test that normal commit references are still created
+ pushCommits2 := []*repository.PushCommit{
+ {
+ Sha1: "abcdef9876543210",
+ CommitterEmail: "user2@example.com",
+ CommitterName: "User Two",
+ AuthorEmail: "user2@example.com",
+ AuthorName: "User Two",
+ Message: "Fix bug, refs #1",
+ },
+ }
+
+ otherRefCommentBean := &issues_model.Comment{
+ Type: issues_model.CommentTypeCommitRef,
+ CommitSHA: "abcdef9876543210",
+ PosterID: user.ID,
+ IssueID: 1,
+ }
+
+ unittest.AssertNotExistsBean(t, otherRefCommentBean)
+ assert.NoError(t, UpdateIssuesCommit(t.Context(), user, repo, pushCommits2, repo.DefaultBranch))
+ unittest.AssertExistsAndLoadBean(t, otherRefCommentBean)
+ unittest.CheckConsistencyFor(t, &activities_model.Action{})
+}
diff --git a/services/issue/issue.go b/services/issue/issue.go
index 85e70d0761..9beb4c46ec 100644
--- a/services/issue/issue.go
+++ b/services/issue/issue.go
@@ -228,8 +228,8 @@ func AddAssigneeIfNotAssigned(ctx context.Context, issue *issues_model.Issue, do
return nil, err
}
if isAssigned {
- // nothing to to
- return nil, nil
+ // nothing to do
+ return nil, nil //nolint:nilnil // return nil because the user is already assigned
}
valid, err := access_model.CanBeAssigned(ctx, assignee, issue.Repo, issue.IsPull)
diff --git a/services/issue/pull.go b/services/issue/pull.go
index 8ee14c0a4b..2fcf3860d0 100644
--- a/services/issue/pull.go
+++ b/services/issue/pull.go
@@ -7,35 +7,16 @@ import (
"context"
"fmt"
"slices"
- "time"
issues_model "code.gitea.io/gitea/models/issues"
org_model "code.gitea.io/gitea/models/organization"
- repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
- "code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)
-func getMergeBase(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, pr *issues_model.PullRequest, baseBranch, headBranch string) (string, error) {
- // Add a temporary remote
- tmpRemote := fmt.Sprintf("mergebase-%d-%d", pr.ID, time.Now().UnixNano())
- if err := gitrepo.GitRemoteAdd(ctx, repo, tmpRemote, gitRepo.Path); err != nil {
- return "", fmt.Errorf("GitRemoteAdd: %w", err)
- }
- defer func() {
- if err := gitrepo.GitRemoteRemove(graceful.GetManager().ShutdownContext(), repo, tmpRemote); err != nil {
- log.Error("getMergeBase: GitRemoteRemove: %v", err)
- }
- }()
-
- mergeBase, _, err := gitRepo.GetMergeBase(tmpRemote, baseBranch, headBranch)
- return mergeBase, err
-}
-
type ReviewRequestNotifier struct {
Comment *issues_model.Comment
IsAdd bool
@@ -99,11 +80,10 @@ func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullReque
}
// get the mergebase
- mergeBase, err := getMergeBase(ctx, pr.BaseRepo, repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitHeadRefName())
+ mergeBase, err := gitrepo.MergeBase(ctx, pr.BaseRepo, git.BranchPrefix+pr.BaseBranch, pr.GetGitHeadRefName())
if err != nil {
return nil, err
}
-
// https://github.com/go-gitea/gitea/issues/29763, we need to get the files changed
// between the merge base and the head commit but not the base branch and the head commit
changedFiles, err := repo.GetFilesChangedBetween(mergeBase, pr.GetGitHeadRefName())
@@ -149,7 +129,7 @@ func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullReque
for _, u := range uniqUsers {
if u.ID != issue.Poster.ID && !contain(latestReivews, u) {
- comment, err := issues_model.AddReviewRequest(ctx, issue, u, issue.Poster)
+ comment, err := issues_model.AddReviewRequest(ctx, issue, u, issue.Poster, true)
if err != nil {
log.Warn("Failed add assignee user: %s to PR review: %s#%d, error: %s", u.Name, pr.BaseRepo.Name, pr.ID, err)
return nil, err
@@ -166,7 +146,7 @@ func PullRequestCodeOwnersReview(ctx context.Context, pr *issues_model.PullReque
}
for _, t := range uniqTeams {
- comment, err := issues_model.AddTeamReviewRequest(ctx, issue, t, issue.Poster)
+ comment, err := issues_model.AddTeamReviewRequest(ctx, issue, t, issue.Poster, true)
if err != nil {
log.Warn("Failed add assignee team: %s to PR review: %s#%d, error: %s", t.Name, pr.BaseRepo.Name, pr.ID, err)
return nil, err
diff --git a/services/lfs/locks.go b/services/lfs/locks.go
index 5bc3f6b95a..c2279edaf0 100644
--- a/services/lfs/locks.go
+++ b/services/lfs/locks.go
@@ -90,7 +90,7 @@ func GetListLockHandler(ctx *context.Context) {
})
return
}
- lock, err := git_model.GetLFSLockByID(ctx, v)
+ lock, err := git_model.GetLFSLockByIDAndRepo(ctx, v, repository.ID)
if err != nil && !git_model.IsErrLFSLockNotExist(err) {
log.Error("Unable to get lock with ID[%s]: Error: %v", v, err)
}
diff --git a/services/lfs/server.go b/services/lfs/server.go
index 81991de434..10b4dba222 100644
--- a/services/lfs/server.go
+++ b/services/lfs/server.go
@@ -11,10 +11,8 @@ import (
"errors"
"fmt"
"io"
- "maps"
"net/http"
"net/url"
- "path"
"regexp"
"strconv"
"strings"
@@ -28,6 +26,7 @@ import (
"code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/auth/httpauth"
+ "code.gitea.io/gitea/modules/httplib"
"code.gitea.io/gitea/modules/json"
lfs_module "code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
@@ -44,6 +43,7 @@ type requestContext struct {
Repo string
Authorization string
Method string
+ RepoGitURL string
}
// Claims is a JWT Token Claims
@@ -83,17 +83,17 @@ func GetLFSAuthTokenWithBearer(opts AuthTokenOptions) (string, error) {
// DownloadLink builds a URL to download the object.
func (rc *requestContext) DownloadLink(p lfs_module.Pointer) string {
- return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/objects", url.PathEscape(p.Oid))
+ return rc.RepoGitURL + "/info/lfs/objects/" + url.PathEscape(p.Oid)
}
// UploadLink builds a URL to upload the object.
func (rc *requestContext) UploadLink(p lfs_module.Pointer) string {
- return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/objects", url.PathEscape(p.Oid), strconv.FormatInt(p.Size, 10))
+ return rc.RepoGitURL + "/info/lfs/objects/" + url.PathEscape(p.Oid) + "/" + strconv.FormatInt(p.Size, 10)
}
// VerifyLink builds a URL for verifying the object.
func (rc *requestContext) VerifyLink(p lfs_module.Pointer) string {
- return setting.AppURL + path.Join(url.PathEscape(rc.User), url.PathEscape(rc.Repo+".git"), "info/lfs/verify")
+ return rc.RepoGitURL + "/info/lfs/verify"
}
// CheckAcceptMediaType checks if the client accepts the LFS media type.
@@ -421,11 +421,14 @@ func decodeJSON(req *http.Request, v any) error {
}
func getRequestContext(ctx *context.Context) *requestContext {
+ ownerName := ctx.PathParam("username")
+ repoName := strings.TrimSuffix(ctx.PathParam("reponame"), ".git")
return &requestContext{
- User: ctx.PathParam("username"),
- Repo: strings.TrimSuffix(ctx.PathParam("reponame"), ".git"),
+ User: ownerName,
+ Repo: repoName,
Authorization: ctx.Req.Header.Get("Authorization"),
Method: ctx.Req.Method,
+ RepoGitURL: httplib.GuessCurrentAppURL(ctx) + url.PathEscape(ownerName) + "/" + url.PathEscape(repoName+".git"),
}
}
@@ -483,40 +486,32 @@ func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, downloa
rep.Error = err
} else {
rep.Actions = make(map[string]*lfs_module.Link)
-
- header := make(map[string]string)
-
- if len(rc.Authorization) > 0 {
- header["Authorization"] = rc.Authorization
- }
-
if download {
var link *lfs_module.Link
if setting.LFS.Storage.ServeDirect() {
// If we have a signed url (S3, object storage), redirect to this directly.
u, err := storage.LFS.URL(pointer.RelativePath(), pointer.Oid, rc.Method, nil)
if u != nil && err == nil {
- // Presigned url does not need the Authorization header
- // https://github.com/go-gitea/gitea/issues/21525
- delete(header, "Authorization")
- link = &lfs_module.Link{Href: u.String(), Header: header}
+ link = lfs_module.NewLink(u.String()) // Presigned url does not need the Authorization header
}
}
if link == nil {
- link = &lfs_module.Link{Href: rc.DownloadLink(pointer), Header: header}
+ link = lfs_module.NewLink(rc.DownloadLink(pointer)).WithHeader("Authorization", rc.Authorization)
}
rep.Actions["download"] = link
}
if upload {
- rep.Actions["upload"] = &lfs_module.Link{Href: rc.UploadLink(pointer), Header: header}
+ // Set Transfer-Encoding header to enable chunked uploads. Required by git-lfs client to do chunked transfer.
+ // See: https://github.com/git-lfs/git-lfs/blob/main/tq/basic_upload.go#L58-59
+ rep.Actions["upload"] = lfs_module.NewLink(rc.UploadLink(pointer)).
+ WithHeader("Authorization", rc.Authorization).
+ WithHeader("Transfer-Encoding", "chunked")
- verifyHeader := make(map[string]string)
- maps.Copy(verifyHeader, header)
-
- // This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662
- verifyHeader["Accept"] = lfs_module.AcceptHeader
-
- rep.Actions["verify"] = &lfs_module.Link{Href: rc.VerifyLink(pointer), Header: verifyHeader}
+ // "Accept" header is the workaround for git-lfs < 2.8.0 (before 2019).
+ // This workaround could be removed in the future: https://github.com/git-lfs/git-lfs/issues/3662
+ rep.Actions["verify"] = lfs_module.NewLink(rc.VerifyLink(pointer)).
+ WithHeader("Authorization", rc.Authorization).
+ WithHeader("Accept", lfs_module.AcceptHeader)
}
}
return rep
@@ -546,8 +541,7 @@ func authenticate(ctx *context.Context, repository *repo_model.Repository, autho
accessMode = perm_model.AccessModeWrite
}
- if ctx.Data["IsActionsToken"] == true {
- taskID := ctx.Data["ActionsTaskID"].(int64)
+ if taskID, ok := user_model.GetActionsUserTaskID(ctx.Doer); ok {
perm, err := access_model.GetActionsUserRepoPermission(ctx, repository, ctx.Doer, taskID)
if err != nil {
log.Error("Unable to GetActionsUserRepoPermission for task[%d] Error: %v", taskID, err)
diff --git a/services/mailer/mail.go b/services/mailer/mail.go
index d81b6d10af..8f831f89ad 100644
--- a/services/mailer/mail.go
+++ b/services/mailer/mail.go
@@ -15,7 +15,6 @@ import (
"mime"
"regexp"
"strings"
- "sync/atomic"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
@@ -32,12 +31,10 @@ import (
const mailMaxSubjectRunes = 256 // There's no actual limit for subject in RFC 5322
-var loadedTemplates atomic.Pointer[templates.MailTemplates]
-
var subjectRemoveSpaces = regexp.MustCompile(`[\s]+`)
-func LoadedTemplates() *templates.MailTemplates {
- return loadedTemplates.Load()
+func LoadedTemplates() *templates.MailRender {
+ return templates.MailRenderer()
}
// SendTestMail sends a test mail
diff --git a/services/mailer/mail_issue_common.go b/services/mailer/mail_issue_common.go
index d65e310288..994df6707a 100644
--- a/services/mailer/mail_issue_common.go
+++ b/services/mailer/mail_issue_common.go
@@ -21,6 +21,7 @@ import (
"code.gitea.io/gitea/modules/markup/markdown"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/util"
incoming_payload "code.gitea.io/gitea/services/mailer/incoming/payload"
sender_service "code.gitea.io/gitea/services/mailer/sender"
"code.gitea.io/gitea/services/mailer/token"
@@ -122,9 +123,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang
var mailSubject bytes.Buffer
if err := LoadedTemplates().SubjectTemplates.ExecuteTemplate(&mailSubject, tplName, mailMeta); err == nil {
subject = sanitizeSubject(mailSubject.String())
- if subject == "" {
- subject = fallback
- }
+ subject = util.IfZero(subject, fallback)
} else {
log.Error("ExecuteTemplate [%s]: %v", tplName+"/subject", err)
}
@@ -203,7 +202,7 @@ func composeIssueCommentMessages(ctx context.Context, comment *mailComment, lang
msg.SetHeader("References", references...)
msg.SetHeader("List-Unsubscribe", listUnsubscribe...)
- for key, value := range generateAdditionalHeadersForIssue(comment, actType, recipient) {
+ for key, value := range generateAdditionalHeadersForIssue(ctx, comment, actType, recipient) {
msg.SetHeader(key, value)
}
@@ -261,14 +260,14 @@ func actionToTemplate(issue *issues_model.Issue, actionType activities_model.Act
}
template = "repo/" + typeName + "/" + name
- ok := LoadedTemplates().BodyTemplates.Lookup(template) != nil
+ ok := LoadedTemplates().BodyTemplates.HasTemplate(template)
if !ok && typeName != "issue" {
template = "repo/issue/" + name
- ok = LoadedTemplates().BodyTemplates.Lookup(template) != nil
+ ok = LoadedTemplates().BodyTemplates.HasTemplate(template)
}
if !ok {
template = "repo/" + typeName + "/default"
- ok = LoadedTemplates().BodyTemplates.Lookup(template) != nil
+ ok = LoadedTemplates().BodyTemplates.HasTemplate(template)
}
if !ok {
template = "repo/issue/default"
@@ -303,17 +302,17 @@ func generateMessageIDForIssue(issue *issues_model.Issue, comment *issues_model.
return fmt.Sprintf("<%s/%s/%d%s@%s>", issue.Repo.FullName(), path, issue.Index, extra, setting.Domain)
}
-func generateAdditionalHeadersForIssue(ctx *mailComment, reason string, recipient *user_model.User) map[string]string {
- repo := ctx.Issue.Repo
+func generateAdditionalHeadersForIssue(ctx context.Context, comment *mailComment, reason string, recipient *user_model.User) map[string]string {
+ repo := comment.Issue.Repo
- issueID := strconv.FormatInt(ctx.Issue.Index, 10)
+ issueID := strconv.FormatInt(comment.Issue.Index, 10)
headers := generateMetadataHeaders(repo)
- maps.Copy(headers, generateSenderRecipientHeaders(ctx.Doer, recipient))
+ maps.Copy(headers, generateSenderRecipientHeaders(comment.Doer, recipient))
maps.Copy(headers, generateReasonHeaders(reason))
headers["X-Gitea-Issue-ID"] = issueID
- headers["X-Gitea-Issue-Link"] = ctx.Issue.HTMLURL(context.TODO()) // FIXME: use proper context
+ headers["X-Gitea-Issue-Link"] = comment.Issue.HTMLURL(ctx)
headers["X-GitLab-Issue-IID"] = issueID
return headers
diff --git a/services/mailer/mail_release.go b/services/mailer/mail_release.go
index 248cf0ab90..1f940f33df 100644
--- a/services/mailer/mail_release.go
+++ b/services/mailer/mail_release.go
@@ -7,9 +7,12 @@ import (
"bytes"
"context"
"fmt"
+ "slices"
+ access_model "code.gitea.io/gitea/models/perm/access"
"code.gitea.io/gitea/models/renderhelper"
repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup/markdown"
@@ -44,6 +47,16 @@ func MailNewRelease(ctx context.Context, rel *repo_model.Release) {
return
}
+ if err := rel.LoadRepo(ctx); err != nil {
+ log.Error("rel.LoadRepo: %v", err)
+ return
+ }
+
+ // delete publisher or any users with no permission
+ recipients = slices.DeleteFunc(recipients, func(u *user_model.User) bool {
+ return u.ID == rel.PublisherID || !access_model.CheckRepoUnitUser(ctx, rel.Repo, u, unit.TypeReleases)
+ })
+
langMap := make(map[string][]*user_model.User)
for _, user := range recipients {
if user.ID != rel.PublisherID {
diff --git a/services/mailer/mail_release_test.go b/services/mailer/mail_release_test.go
new file mode 100644
index 0000000000..6fc8587f98
--- /dev/null
+++ b/services/mailer/mail_release_test.go
@@ -0,0 +1,64 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mailer
+
+import (
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+ sender_service "code.gitea.io/gitea/services/mailer/sender"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMailNewReleaseFiltersUnauthorizedWatchers(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ defer test.MockVariableValue(&setting.MailService)()
+ defer test.MockVariableValue(&setting.Domain)()
+ defer test.MockVariableValue(&setting.AppName)()
+ defer test.MockVariableValue(&setting.AppURL)()
+
+ setting.MailService = &setting.Mailer{
+ From: "Gitea",
+ FromEmail: "noreply@example.com",
+ }
+ setting.Domain = "example.com"
+ setting.AppName = "Gitea"
+ setting.AppURL = "https://example.com/"
+ defer mockMailTemplates(string(tplNewReleaseMail), "{{.Subject}}", "{{.Release.TagName}}
")()
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ require.True(t, repo.IsPrivate)
+
+ admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ unauthorized := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+
+ assert.NoError(t, repo_model.WatchRepo(t.Context(), admin, repo, true))
+ assert.NoError(t, repo_model.WatchRepo(t.Context(), unauthorized, repo, true))
+
+ rel := unittest.AssertExistsAndLoadBean(t, &repo_model.Release{ID: 11})
+ rel.Repo = nil
+ rel.Publisher = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: rel.PublisherID})
+
+ var sent []*sender_service.Message
+ origSend := SendAsync
+ SendAsync = func(msgs ...*sender_service.Message) {
+ sent = append(sent, msgs...)
+ }
+ defer func() {
+ SendAsync = origSend
+ }()
+
+ MailNewRelease(t.Context(), rel)
+
+ require.Len(t, sent, 1)
+ assert.Equal(t, admin.EmailTo(), sent[0].To)
+ assert.NotEqual(t, unauthorized.EmailTo(), sent[0].To)
+}
diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go
index 8a4a589357..caa072725a 100644
--- a/services/mailer/mail_test.go
+++ b/services/mailer/mail_test.go
@@ -96,11 +96,8 @@ func prepareMailerBase64Test(t *testing.T) (doer *user_model.User, repo *repo_mo
return user, repo, issue, att1, att2
}
-func prepareMailTemplates(name, subjectTmpl, bodyTmpl string) {
- loadedTemplates.Store(&templates.MailTemplates{
- SubjectTemplates: texttmpl.Must(texttmpl.New(name).Parse(subjectTmpl)),
- BodyTemplates: template.Must(template.New(name).Parse(bodyTmpl)),
- })
+func mockMailTemplates(name, subjectTmpl, bodyTmpl string) func() {
+ return templates.MailRenderer().MockTemplate(name, subjectTmpl, bodyTmpl)
}
func TestComposeIssueComment(t *testing.T) {
@@ -112,10 +109,8 @@ func TestComposeIssueComment(t *testing.T) {
},
})
- setting.IncomingEmail.Enabled = true
- defer func() { setting.IncomingEmail.Enabled = false }()
-
- prepareMailTemplates("repo/issue/comment", subjectTpl, bodyTpl)
+ defer test.MockVariableValue(&setting.IncomingEmail.Enabled, true)()
+ defer mockMailTemplates("repo/issue/comment", subjectTpl, bodyTpl)()
recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}}
msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{
@@ -160,7 +155,7 @@ func TestComposeIssueComment(t *testing.T) {
func TestMailMentionsComment(t *testing.T) {
doer, _, issue, comment := prepareMailerTest(t)
comment.Poster = doer
- prepareMailTemplates("repo/issue/comment", subjectTpl, bodyTpl)
+ defer mockMailTemplates("repo/issue/comment", subjectTpl, bodyTpl)()
mails := 0
defer test.MockVariableValue(&SendAsync, func(msgs ...*sender_service.Message) {
@@ -175,7 +170,7 @@ func TestMailMentionsComment(t *testing.T) {
func TestComposeIssueMessage(t *testing.T) {
doer, _, issue, _ := prepareMailerTest(t)
- prepareMailTemplates("repo/issue/new", subjectTpl, bodyTpl)
+ defer mockMailTemplates("repo/issue/new", subjectTpl, bodyTpl)()
recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}}
msgs, err := composeIssueCommentMessages(t.Context(), &mailComment{
Issue: issue, Doer: doer, ActionType: activities_model.ActionCreateIssue,
@@ -204,14 +199,10 @@ func TestTemplateSelection(t *testing.T) {
doer, repo, issue, comment := prepareMailerTest(t)
recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}}
- prepareMailTemplates("repo/issue/default", "repo/issue/default/subject", "repo/issue/default/body")
-
- texttmpl.Must(LoadedTemplates().SubjectTemplates.New("repo/issue/new").Parse("repo/issue/new/subject"))
- texttmpl.Must(LoadedTemplates().SubjectTemplates.New("repo/pull/comment").Parse("repo/pull/comment/subject"))
- texttmpl.Must(LoadedTemplates().SubjectTemplates.New("repo/issue/close").Parse("")) // Must default to a fallback subject
- template.Must(LoadedTemplates().BodyTemplates.New("repo/issue/new").Parse("repo/issue/new/body"))
- template.Must(LoadedTemplates().BodyTemplates.New("repo/pull/comment").Parse("repo/pull/comment/body"))
- template.Must(LoadedTemplates().BodyTemplates.New("repo/issue/close").Parse("repo/issue/close/body"))
+ defer mockMailTemplates("repo/issue/default", "repo/issue/default/subject", "repo/issue/default/body")()
+ defer mockMailTemplates("repo/issue/new", "repo/issue/new/subject", "repo/issue/new/body")()
+ defer mockMailTemplates("repo/pull/comment", "repo/pull/comment/subject", "repo/pull/comment/body")()
+ defer mockMailTemplates("repo/issue/close", "", "repo/issue/close/body")() // Must default to a fallback subject
expect := func(t *testing.T, msg *sender_service.Message, expSubject, expBody string) {
subject := msg.ToMessage().GetGenHeader("Subject")
@@ -256,7 +247,7 @@ func TestTemplateServices(t *testing.T) {
expect := func(t *testing.T, issue *issues_model.Issue, comment *issues_model.Comment, doer *user_model.User,
actionType activities_model.ActionType, fromMention bool, tplSubject, tplBody, expSubject, expBody string,
) {
- prepareMailTemplates("repo/issue/default", tplSubject, tplBody)
+ defer mockMailTemplates("repo/issue/default", tplSubject, tplBody)()
recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}}
msg := testComposeIssueCommentMessage(t, &mailComment{
Issue: issue, Doer: doer, ActionType: actionType,
@@ -304,7 +295,7 @@ func TestGenerateAdditionalHeadersForIssue(t *testing.T) {
comment := &mailComment{Issue: issue, Doer: doer}
recipient := &user_model.User{Name: "test", Email: "test@gitea.com"}
- headers := generateAdditionalHeadersForIssue(comment, "dummy-reason", recipient)
+ headers := generateAdditionalHeadersForIssue(t.Context(), comment, "dummy-reason", recipient)
expected := map[string]string{
"List-ID": "user2/repo1 ",
@@ -523,7 +514,7 @@ func TestEmbedBase64Images(t *testing.T) {
att2ImgBase64 := fmt.Sprintf(`
`, att2Base64)
t.Run("ComposeMessage", func(t *testing.T) {
- prepareMailTemplates("repo/issue/new", subjectTpl, bodyTpl)
+ defer mockMailTemplates("repo/issue/new", subjectTpl, bodyTpl)()
issue.Content = fmt.Sprintf(`MSG-BEFORE MSG-AFTER`, att1.UUID)
require.NoError(t, issues_model.UpdateIssueCols(t.Context(), issue, "content"))
diff --git a/services/mailer/mail_workflow_run.go b/services/mailer/mail_workflow_run.go
index 3789102812..9efaa4182b 100644
--- a/services/mailer/mail_workflow_run.go
+++ b/services/mailer/mail_workflow_run.go
@@ -149,30 +149,31 @@ func composeAndSendActionsWorkflowRunStatusEmail(ctx context.Context, repo *repo
return nil
}
-func MailActionsTrigger(ctx context.Context, sender *user_model.User, repo *repo_model.Repository, run *actions_model.ActionRun) error {
+func MailActionsTrigger(ctx context.Context, recipient *user_model.User, repo *repo_model.Repository, run *actions_model.ActionRun) error {
if setting.MailService == nil {
return nil
}
if !run.Status.IsDone() || run.Status.IsSkipped() {
return nil
}
-
- recipients := make([]*user_model.User, 0)
-
- if !sender.IsGiteaActions() && !sender.IsGhost() && sender.IsMailable() {
- notifyPref, err := user_model.GetUserSetting(ctx, sender.ID,
- user_model.SettingsKeyEmailNotificationGiteaActions, user_model.SettingEmailNotificationGiteaActionsFailureOnly)
- if err != nil {
- return err
- }
- if notifyPref == user_model.SettingEmailNotificationGiteaActionsAll || !run.Status.IsSuccess() && notifyPref != user_model.SettingEmailNotificationGiteaActionsDisabled {
- recipients = append(recipients, sender)
- }
+ if !recipient.IsMailable() {
+ return nil
}
- if len(recipients) > 0 {
- log.Debug("MailActionsTrigger: Initiate email composition")
- return composeAndSendActionsWorkflowRunStatusEmail(ctx, repo, run, sender, recipients)
+ notifyPref, err := user_model.GetUserSetting(ctx, recipient.ID,
+ user_model.SettingsKeyEmailNotificationGiteaActions, user_model.SettingEmailNotificationGiteaActionsFailureOnly)
+ if err != nil {
+ return err
}
- return nil
+ // "disabled" never sends
+ if notifyPref == user_model.SettingEmailNotificationGiteaActionsDisabled {
+ return nil
+ }
+ // "failure-only" skips non-failure runs
+ if notifyPref != user_model.SettingEmailNotificationGiteaActionsAll && !run.Status.IsFailure() {
+ return nil
+ }
+
+ log.Debug("MailActionsTrigger: Initiate email composition")
+ return composeAndSendActionsWorkflowRunStatusEmail(ctx, repo, run, recipient, []*user_model.User{recipient})
}
diff --git a/services/mailer/mailer.go b/services/mailer/mailer.go
index db00aac4f1..05dd5d8588 100644
--- a/services/mailer/mailer.go
+++ b/services/mailer/mailer.go
@@ -43,7 +43,7 @@ func NewContext(ctx context.Context) {
sender = &sender_service.SMTPSender{}
}
- templates.LoadMailTemplates(ctx, &loadedTemplates)
+ _ = templates.MailRenderer()
mailQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "mail", func(items ...*sender_service.Message) []*sender_service.Message {
for _, msg := range items {
diff --git a/services/markup/renderhelper_codepreview_test.go b/services/markup/renderhelper_codepreview_test.go
index 6665f0d009..c84845e7ea 100644
--- a/services/markup/renderhelper_codepreview_test.go
+++ b/services/markup/renderhelper_codepreview_test.go
@@ -18,7 +18,7 @@ import (
func TestRenderHelperCodePreview(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
- ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
htm, err := renderRepoFileCodePreview(ctx, markup.RenderCodePreviewOptions{
FullURL: "http://full",
OwnerName: "user2",
@@ -46,7 +46,7 @@ func TestRenderHelperCodePreview(t *testing.T) {
`, string(htm))
- ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
htm, err = renderRepoFileCodePreview(ctx, markup.RenderCodePreviewOptions{
FullURL: "http://full",
OwnerName: "user2",
@@ -70,7 +70,7 @@ func TestRenderHelperCodePreview(t *testing.T) {
`, string(htm))
- ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
_, err = renderRepoFileCodePreview(ctx, markup.RenderCodePreviewOptions{
FullURL: "http://full",
OwnerName: "user15",
diff --git a/services/markup/renderhelper_issueicontitle_test.go b/services/markup/renderhelper_issueicontitle_test.go
index adce8401e0..25907f4b77 100644
--- a/services/markup/renderhelper_issueicontitle_test.go
+++ b/services/markup/renderhelper_issueicontitle_test.go
@@ -19,7 +19,7 @@ import (
func TestRenderHelperIssueIconTitle(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
- ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
ctx.Repo.Repository = unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 1})
htm, err := renderRepoIssueIconTitle(ctx, markup.RenderIssueIconTitleOptions{
LinkHref: "/link",
@@ -28,7 +28,7 @@ func TestRenderHelperIssueIconTitle(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, `octicon-issue-opened(16/text green) issue1 (#1)`, string(htm))
- ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
htm, err = renderRepoIssueIconTitle(ctx, markup.RenderIssueIconTitleOptions{
OwnerName: "user2",
RepoName: "repo1",
@@ -38,7 +38,7 @@ func TestRenderHelperIssueIconTitle(t *testing.T) {
assert.NoError(t, err)
assert.Equal(t, `octicon-issue-opened(16/text green) issue1 (user2/repo1#1)`, string(htm))
- ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ ctx, _ = contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.PageRenderer()})
_, err = renderRepoIssueIconTitle(ctx, markup.RenderIssueIconTitleOptions{
OwnerName: "user2",
RepoName: "repo2",
diff --git a/services/migrations/codebase_test.go b/services/migrations/codebase_test.go
index 6cd52e5e59..dabe7e1ac9 100644
--- a/services/migrations/codebase_test.go
+++ b/services/migrations/codebase_test.go
@@ -54,12 +54,12 @@ func TestCodebaseDownloadRepo(t *testing.T) {
assertMilestonesEqual(t, []*base.Milestone{
{
Title: "Milestone1",
- Deadline: timePtr(time.Date(2021, time.September, 16, 0, 0, 0, 0, time.UTC)),
+ Deadline: new(time.Date(2021, time.September, 16, 0, 0, 0, 0, time.UTC)),
},
{
Title: "Milestone2",
- Deadline: timePtr(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)),
- Closed: timePtr(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)),
+ Deadline: new(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)),
+ Closed: new(time.Date(2021, time.September, 17, 0, 0, 0, 0, time.UTC)),
State: "closed",
},
}, milestones)
diff --git a/services/migrations/codecommit.go b/services/migrations/codecommit.go
index d08b2e6d4a..188280273f 100644
--- a/services/migrations/codecommit.go
+++ b/services/migrations/codecommit.go
@@ -14,7 +14,6 @@ import (
"code.gitea.io/gitea/modules/log"
base "code.gitea.io/gitea/modules/migration"
"code.gitea.io/gitea/modules/structs"
- "code.gitea.io/gitea/modules/util"
"github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go-v2/service/codecommit"
@@ -87,7 +86,7 @@ type CodeCommitDownloader struct {
// GetRepoInfo returns a repository information
func (c *CodeCommitDownloader) GetRepoInfo(ctx context.Context) (*base.Repository, error) {
output, err := c.codeCommitClient.GetRepository(ctx, &codecommit.GetRepositoryInput{
- RepositoryName: util.ToPointer(c.repoName),
+ RepositoryName: new(c.repoName),
})
if err != nil {
return nil, err
@@ -119,7 +118,7 @@ func (c *CodeCommitDownloader) GetComments(ctx context.Context, commentable base
for {
resp, err := c.codeCommitClient.GetCommentsForPullRequest(ctx, &codecommit.GetCommentsForPullRequestInput{
NextToken: nextToken,
- PullRequestId: util.ToPointer(strconv.FormatInt(commentable.GetForeignIndex(), 10)),
+ PullRequestId: new(strconv.FormatInt(commentable.GetForeignIndex(), 10)),
})
if err != nil {
return nil, false, err
@@ -161,7 +160,7 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag
prs := make([]*base.PullRequest, 0, len(batch))
for _, id := range batch {
output, err := c.codeCommitClient.GetPullRequest(ctx, &codecommit.GetPullRequestInput{
- PullRequestId: util.ToPointer(id),
+ PullRequestId: new(id),
})
if err != nil {
return nil, false, err
@@ -241,7 +240,7 @@ func (c *CodeCommitDownloader) getAllPullRequestIDs(ctx context.Context) ([]stri
for {
output, err := c.codeCommitClient.ListPullRequests(ctx, &codecommit.ListPullRequestsInput{
- RepositoryName: util.ToPointer(c.repoName),
+ RepositoryName: new(c.repoName),
NextToken: nextToken,
})
if err != nil {
diff --git a/services/migrations/gitea_downloader.go b/services/migrations/gitea_downloader.go
index 5d48d2f003..242873e551 100644
--- a/services/migrations/gitea_downloader.go
+++ b/services/migrations/gitea_downloader.go
@@ -345,25 +345,43 @@ func (g *GiteaDownloader) GetReleases(ctx context.Context) ([]*base.Release, err
return releases, nil
}
-func (g *GiteaDownloader) getIssueReactions(index int64) ([]*base.Reaction, error) {
- var reactions []*base.Reaction
+func (g *GiteaDownloader) getIssueReactions(ctx context.Context, index int64) ([]*base.Reaction, error) {
if err := g.client.CheckServerVersionConstraint(">=1.11"); err != nil {
log.Info("GiteaDownloader: instance to old, skip getIssueReactions")
- return reactions, nil
- }
- rl, _, err := g.client.GetIssueReactions(g.repoOwner, g.repoName, index)
- if err != nil {
- return nil, err
+ return nil, nil
}
- for _, reaction := range rl {
- reactions = append(reactions, &base.Reaction{
- UserID: reaction.User.ID,
- UserName: reaction.User.UserName,
- Content: reaction.Reaction,
- })
+ allReactions := make([]*base.Reaction, 0, g.maxPerPage)
+
+ for i := 1; ; i++ {
+ // make sure gitea can shutdown gracefully
+ select {
+ case <-ctx.Done():
+ return nil, nil
+ default:
+ }
+
+ reactions, _, err := g.client.ListIssueReactions(g.repoOwner, g.repoName, index, gitea_sdk.ListIssueReactionsOptions{ListOptions: gitea_sdk.ListOptions{
+ PageSize: g.maxPerPage,
+ Page: i,
+ }})
+ if err != nil {
+ return nil, err
+ }
+
+ for _, reaction := range reactions {
+ allReactions = append(allReactions, &base.Reaction{
+ UserID: reaction.User.ID,
+ UserName: reaction.User.UserName,
+ Content: reaction.Reaction,
+ })
+ }
+
+ if !g.pagination || len(reactions) < g.maxPerPage {
+ break
+ }
}
- return reactions, nil
+ return allReactions, nil
}
func (g *GiteaDownloader) getCommentReactions(commentID int64) ([]*base.Reaction, error) {
@@ -388,7 +406,7 @@ func (g *GiteaDownloader) getCommentReactions(commentID int64) ([]*base.Reaction
}
// GetIssues returns issues according start and limit
-func (g *GiteaDownloader) GetIssues(_ context.Context, page, perPage int) ([]*base.Issue, bool, error) {
+func (g *GiteaDownloader) GetIssues(ctx context.Context, page, perPage int) ([]*base.Issue, bool, error) {
if perPage > g.maxPerPage {
perPage = g.maxPerPage
}
@@ -413,7 +431,7 @@ func (g *GiteaDownloader) GetIssues(_ context.Context, page, perPage int) ([]*ba
milestone = issue.Milestone.Title
}
- reactions, err := g.getIssueReactions(issue.Index)
+ reactions, err := g.getIssueReactions(ctx, issue.Index)
if err != nil {
WarnAndNotice("Unable to load reactions during migrating issue #%d in %s. Error: %v", issue.Index, g, err)
}
@@ -497,7 +515,7 @@ func (g *GiteaDownloader) GetComments(ctx context.Context, commentable base.Comm
}
// GetPullRequests returns pull requests according page and perPage
-func (g *GiteaDownloader) GetPullRequests(_ context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
+func (g *GiteaDownloader) GetPullRequests(ctx context.Context, page, perPage int) ([]*base.PullRequest, bool, error) {
if perPage > g.maxPerPage {
perPage = g.maxPerPage
}
@@ -546,7 +564,7 @@ func (g *GiteaDownloader) GetPullRequests(_ context.Context, page, perPage int)
mergeCommitSHA = *pr.MergedCommitID
}
- reactions, err := g.getIssueReactions(pr.Index)
+ reactions, err := g.getIssueReactions(ctx, pr.Index)
if err != nil {
WarnAndNotice("Unable to load reactions during migrating pull #%d in %s. Error: %v", pr.Index, g, err)
}
diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go
index fb985ee9d5..cf727b44c7 100644
--- a/services/migrations/gitea_downloader_test.go
+++ b/services/migrations/gitea_downloader_test.go
@@ -17,10 +17,10 @@ import (
)
func TestGiteaDownloadRepo(t *testing.T) {
- // Skip tests if Gitea token is not found
- giteaToken := os.Getenv("GITEA_TOKEN")
+ // Skip tests if Gitea token is not found (TODO: this test seems stopped for long time because there is no token in CI secrets)
+ giteaToken := os.Getenv("GITEA_TEST_OFFICIAL_SITE_TOKEN")
if giteaToken == "" {
- t.Skip("skipped test because GITEA_TOKEN was not in the environment")
+ t.Skip("skipped test because GITEA_TEST_OFFICIAL_SITE_TOKEN was not in the environment")
}
resp, err := http.Get("https://gitea.com/gitea")
@@ -86,16 +86,16 @@ func TestGiteaDownloadRepo(t *testing.T) {
{
Title: "V2 Finalize",
Created: time.Unix(0, 0),
- Deadline: timePtr(time.Unix(1599263999, 0)),
- Updated: timePtr(time.Unix(0, 0)),
+ Deadline: new(time.Unix(1599263999, 0)),
+ Updated: new(time.Unix(0, 0)),
State: "open",
},
{
Title: "V1",
Description: "Generate Content",
Created: time.Unix(0, 0),
- Updated: timePtr(time.Unix(0, 0)),
- Closed: timePtr(time.Unix(1598985406, 0)),
+ Updated: new(time.Unix(0, 0)),
+ Closed: new(time.Unix(1598985406, 0)),
State: "closed",
},
}, milestones)
@@ -171,7 +171,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
Content: "laugh",
},
},
- Closed: timePtr(time.Date(2020, 9, 1, 15, 49, 34, 0, time.UTC)),
+ Closed: new(time.Date(2020, 9, 1, 15, 49, 34, 0, time.UTC)),
},
{
Number: 2,
@@ -190,7 +190,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
Color: "d4c5f9",
Description: "",
}},
- Closed: timePtr(time.Unix(1598969497, 0)),
+ Closed: new(time.Unix(1598969497, 0)),
},
}, issues)
@@ -237,7 +237,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
IsLocked: false,
Created: time.Unix(1598982759, 0),
Updated: time.Unix(1599023425, 0),
- Closed: timePtr(time.Unix(1598982934, 0)),
+ Closed: new(time.Unix(1598982934, 0)),
Assignees: []string{"techknowlogick"},
Base: base.PullRequestBranch{
CloneURL: "",
@@ -254,7 +254,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
OwnerName: "6543-forks",
},
Merged: true,
- MergedTime: timePtr(time.Unix(1598982934, 0)),
+ MergedTime: new(time.Unix(1598982934, 0)),
MergeCommitSHA: "827aa28a907853e5ddfa40c8f9bc52471a2685fd",
PatchURL: "https://gitea.com/gitea/test_repo/pulls/12.patch",
}, prs[1])
diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go
index 96c2655b3a..af9a0e0eaf 100644
--- a/services/migrations/gitea_uploader.go
+++ b/services/migrations/gitea_uploader.go
@@ -8,8 +8,6 @@ import (
"context"
"fmt"
"io"
- "os"
- "path/filepath"
"strconv"
"strings"
"time"
@@ -320,6 +318,7 @@ func (g *GiteaLocalUploader) CreateReleases(ctx context.Context, releases ...*ba
}
attach := repo_model.Attachment{
UUID: uuid.New().String(),
+ RepoID: g.repo.ID,
Name: asset.Name,
DownloadCount: int64(*asset.DownloadCount),
Size: int64(*asset.Size),
@@ -365,11 +364,12 @@ func (g *GiteaLocalUploader) CreateReleases(ctx context.Context, releases ...*ba
// SyncTags syncs releases with tags in the database
func (g *GiteaLocalUploader) SyncTags(ctx context.Context) error {
- return repo_module.SyncReleasesWithTags(ctx, g.repo, g.gitRepo)
+ _, err := repo_module.SyncReleasesWithTags(ctx, g.repo, g.gitRepo)
+ return err
}
func (g *GiteaLocalUploader) SyncBranches(ctx context.Context) error {
- _, err := repo_module.SyncRepoBranchesWithRepo(ctx, g.repo, g.gitRepo, g.doer.ID)
+ _, _, err := repo_module.SyncRepoBranchesWithRepo(ctx, g.repo, g.gitRepo, g.doer.ID)
return err
}
@@ -589,12 +589,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba
}
defer ret.Close()
- pullDir := filepath.Join(g.repo.RepoPath(), "pulls")
- if err = os.MkdirAll(pullDir, os.ModePerm); err != nil {
- return err
- }
-
- f, err := os.Create(filepath.Join(pullDir, fmt.Sprintf("%d.patch", pr.Number)))
+ f, err := gitrepo.CreateRepoFile(ctx, g.repo, fmt.Sprintf("pulls/%d.patch", pr.Number))
if err != nil {
return err
}
@@ -668,7 +663,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba
fetchArg = git.BranchPrefix + fetchArg
}
- _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("fetch", "--no-tags").AddDashesAndList(remote, fetchArg))
+ _, _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("fetch", "--no-tags").AddDashesAndList(remote, fetchArg))
if err != nil {
log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err)
return head, nil
@@ -703,7 +698,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(ctx context.Context, pr *ba
// The SHA is empty
log.Warn("Empty reference, no pull head for PR #%d in %s/%s", pr.Number, g.repoOwner, g.repoName)
} else {
- _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA))
+ _, _, err = gitrepo.RunCmdString(ctx, g.repo, gitcmd.NewCommand("rev-list", "--quiet", "-1").AddDynamicArguments(pr.Head.SHA))
if err != nil {
// Git update-ref remove bad references with a relative path
log.Warn("Deprecated local head %s for PR #%d in %s/%s, removing %s", pr.Head.SHA, pr.Number, g.repoOwner, g.repoName, pr.GetGitHeadRefName())
@@ -739,7 +734,7 @@ func (g *GiteaLocalUploader) newPullRequest(ctx context.Context, pr *base.PullRe
if pr.Base.Ref != "" && pr.Head.SHA != "" {
// A PR against a tag base does not make sense - therefore pr.Base.Ref must be a branch
// TODO: should we be checking for the refs/heads/ prefix on the pr.Base.Ref? (i.e. are these actually branches or refs)
- pr.Base.SHA, _, err = g.gitRepo.GetMergeBase("", git.BranchPrefix+pr.Base.Ref, pr.Head.SHA)
+ pr.Base.SHA, err = gitrepo.MergeBase(ctx, g.repo, git.BranchPrefix+pr.Base.Ref, pr.Head.SHA)
if err != nil {
log.Error("Cannot determine the merge base for PR #%d in %s/%s. Error: %v", pr.Number, g.repoOwner, g.repoName, err)
}
@@ -901,21 +896,10 @@ func (g *GiteaLocalUploader) CreateReviews(ctx context.Context, reviews ...*base
// SECURITY: The TreePath must be cleaned! use relative path
comment.TreePath = util.PathJoinRel(comment.TreePath)
- var patch string
- reader, writer := io.Pipe()
- defer func() {
- _ = reader.Close()
- _ = writer.Close()
- }()
- go func(comment *base.ReviewComment) {
- if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, writer); err != nil {
- // We should ignore the error since the commit maybe removed when force push to the pull request
- log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err)
- }
- _ = writer.Close()
- }(comment)
-
- patch, _ = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
+ patch, _ := git.GetFileDiffCutAroundLine(
+ g.gitRepo, pr.MergeBase, headCommitID, comment.TreePath,
+ int64((&issues_model.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines,
+ )
if comment.CreatedAt.IsZero() {
comment.CreatedAt = review.CreatedAt
diff --git a/services/migrations/github.go b/services/migrations/github.go
index ae7350c016..ce631dcd42 100644
--- a/services/migrations/github.go
+++ b/services/migrations/github.go
@@ -329,7 +329,6 @@ func (g *GithubDownloaderV3) convertGithubRelease(ctx context.Context, rel *gith
r.Assets = append(r.Assets, &base.ReleaseAsset{
ID: asset.GetID(),
Name: asset.GetName(),
- ContentType: asset.ContentType,
Size: asset.Size,
DownloadCount: asset.DownloadCount,
Created: asset.CreatedAt.Time,
diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go
index 6d1a5378b9..198062f7cf 100644
--- a/services/migrations/github_test.go
+++ b/services/migrations/github_test.go
@@ -47,19 +47,19 @@ func TestGitHubDownloadRepo(t *testing.T) {
{
Title: "1.0.0",
Description: "Milestone 1.0.0",
- Deadline: timePtr(time.Date(2019, 11, 11, 8, 0, 0, 0, time.UTC)),
+ Deadline: new(time.Date(2019, 11, 11, 8, 0, 0, 0, time.UTC)),
Created: time.Date(2019, 11, 12, 19, 37, 8, 0, time.UTC),
- Updated: timePtr(time.Date(2019, 11, 12, 21, 56, 17, 0, time.UTC)),
- Closed: timePtr(time.Date(2019, 11, 12, 19, 45, 49, 0, time.UTC)),
+ Updated: new(time.Date(2019, 11, 12, 21, 56, 17, 0, time.UTC)),
+ Closed: new(time.Date(2019, 11, 12, 19, 45, 49, 0, time.UTC)),
State: "closed",
},
{
Title: "1.1.0",
Description: "Milestone 1.1.0",
- Deadline: timePtr(time.Date(2019, 11, 12, 8, 0, 0, 0, time.UTC)),
+ Deadline: new(time.Date(2019, 11, 12, 8, 0, 0, 0, time.UTC)),
Created: time.Date(2019, 11, 12, 19, 37, 25, 0, time.UTC),
- Updated: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
- Closed: timePtr(time.Date(2019, 11, 12, 19, 45, 46, 0, time.UTC)),
+ Updated: new(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
+ Closed: new(time.Date(2019, 11, 12, 19, 45, 46, 0, time.UTC)),
State: "closed",
},
}, milestones)
@@ -163,7 +163,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
Content: "+1",
},
},
- Closed: timePtr(time.Date(2019, 11, 12, 20, 22, 22, 0, time.UTC)),
+ Closed: new(time.Date(2019, 11, 12, 20, 22, 22, 0, time.UTC)),
},
{
Number: 2,
@@ -214,7 +214,7 @@ func TestGitHubDownloadRepo(t *testing.T) {
Content: "+1",
},
},
- Closed: timePtr(time.Date(2019, 11, 12, 21, 1, 31, 0, time.UTC)),
+ Closed: new(time.Date(2019, 11, 12, 21, 1, 31, 0, time.UTC)),
},
}, issues)
@@ -284,9 +284,9 @@ func TestGitHubDownloadRepo(t *testing.T) {
OwnerName: "go-gitea",
RepoName: "test_repo",
},
- Closed: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
+ Closed: new(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
Merged: true,
- MergedTime: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
+ MergedTime: new(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)),
MergeCommitSHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2",
ForeignIndex: 3,
},
diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go
index 260fa9cd5d..cbf974af2c 100644
--- a/services/migrations/gitlab.go
+++ b/services/migrations/gitlab.go
@@ -316,12 +316,11 @@ func (g *GitlabDownloader) convertGitlabRelease(ctx context.Context, rel *gitlab
httpClient := NewMigrationHTTPClient()
- for k, asset := range rel.Assets.Links {
+ for _, asset := range rel.Assets.Links {
assetID := asset.ID // Don't optimize this, for closure we need a local variable
r.Assets = append(r.Assets, &base.ReleaseAsset{
ID: int64(asset.ID),
Name: asset.Name,
- ContentType: &rel.Assets.Sources[k].Format,
Size: &zero,
DownloadCount: &zero,
DownloadFunc: func() (io.ReadCloser, error) {
diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go
index fef1053ec8..9e4050289d 100644
--- a/services/migrations/gitlab_test.go
+++ b/services/migrations/gitlab_test.go
@@ -59,14 +59,14 @@ func TestGitlabDownloadRepo(t *testing.T) {
{
Title: "1.1.0",
Created: time.Date(2019, 11, 28, 8, 42, 44, 575000000, time.UTC),
- Updated: timePtr(time.Date(2019, 11, 28, 8, 42, 44, 575000000, time.UTC)),
+ Updated: new(time.Date(2019, 11, 28, 8, 42, 44, 575000000, time.UTC)),
State: "active",
},
{
Title: "1.0.0",
Created: time.Date(2019, 11, 28, 8, 42, 30, 301000000, time.UTC),
- Updated: timePtr(time.Date(2019, 11, 28, 15, 57, 52, 401000000, time.UTC)),
- Closed: timePtr(time.Date(2019, 11, 28, 15, 57, 52, 401000000, time.UTC)),
+ Updated: new(time.Date(2019, 11, 28, 15, 57, 52, 401000000, time.UTC)),
+ Closed: new(time.Date(2019, 11, 28, 15, 57, 52, 401000000, time.UTC)),
State: "closed",
},
}, milestones)
@@ -161,7 +161,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
Content: "open_mouth",
},
},
- Closed: timePtr(time.Date(2019, 11, 28, 8, 46, 23, 275000000, time.UTC)),
+ Closed: new(time.Date(2019, 11, 28, 8, 46, 23, 275000000, time.UTC)),
},
{
Number: 2,
@@ -210,7 +210,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
Content: "hearts",
},
},
- Closed: timePtr(time.Date(2019, 11, 28, 8, 45, 44, 959000000, time.UTC)),
+ Closed: new(time.Date(2019, 11, 28, 8, 45, 44, 959000000, time.UTC)),
},
}, issues)
diff --git a/services/migrations/main_test.go b/services/migrations/main_test.go
index d0ec6a3f8d..9893b4e7d1 100644
--- a/services/migrations/main_test.go
+++ b/services/migrations/main_test.go
@@ -18,10 +18,6 @@ func TestMain(m *testing.M) {
unittest.MainTest(m)
}
-func timePtr(t time.Time) *time.Time {
- return &t
-}
-
func assertTimeEqual(t *testing.T, expected, actual time.Time) {
assert.Equal(t, expected.UTC(), actual.UTC())
}
@@ -171,7 +167,6 @@ func assertReactionsEqual(t *testing.T, expected, actual []*base.Reaction) {
func assertReleaseAssetEqual(t *testing.T, expected, actual *base.ReleaseAsset) {
assert.Equal(t, expected.ID, actual.ID)
assert.Equal(t, expected.Name, actual.Name)
- assert.Equal(t, expected.ContentType, actual.ContentType)
assert.Equal(t, expected.Size, actual.Size)
assert.Equal(t, expected.DownloadCount, actual.DownloadCount)
assertTimeEqual(t, expected.Created, actual.Created)
diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go
index bf65e10454..99f8dba92f 100644
--- a/services/migrations/migrate.go
+++ b/services/migrations/migrate.go
@@ -131,8 +131,8 @@ func MigrateRepository(ctx context.Context, doer *user_model.User, ownerName str
if err1 := uploader.Rollback(); err1 != nil {
log.Error("rollback failed: %v", err1)
}
- if err2 := system_model.CreateRepositoryNotice(fmt.Sprintf("Migrate repository from %s failed: %v", opts.OriginalURL, err)); err2 != nil {
- log.Error("create respotiry notice failed: ", err2)
+ if err2 := system_model.CreateRepositoryNotice(fmt.Sprintf("Migrate repository (%s/%s) from %s failed: %v", ownerName, opts.RepoName, opts.OriginalURL, err)); err2 != nil {
+ log.Error("create repository notice failed: ", err2)
}
return nil, err
}
diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go
index f9c40049db..9ce35f9eab 100644
--- a/services/mirror/mirror_pull.go
+++ b/services/mirror/mirror_pull.go
@@ -25,13 +25,11 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/migrations"
notify_service "code.gitea.io/gitea/services/notify"
repo_service "code.gitea.io/gitea/services/repository"
)
-// gitShortEmptySha Git short empty SHA
-const gitShortEmptySha = "0000000"
-
// UpdateAddress writes new address to Git repository and database
func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error {
u, err := giturl.ParseGitURL(addr)
@@ -72,159 +70,19 @@ func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error
return repo_model.UpdateRepositoryColsNoAutoTime(ctx, m.Repo, "original_url")
}
-// mirrorSyncResult contains information of a updated reference.
-// If the oldCommitID is "0000000", it means a new reference, the value of newCommitID is empty.
-// If the newCommitID is "0000000", it means the reference is deleted, the value of oldCommitID is empty.
-type mirrorSyncResult struct {
- refName git.RefName
- oldCommitID string
- newCommitID string
-}
-
-// parseRemoteUpdateOutput detects create, update and delete operations of references from upstream.
-// possible output example:
-/*
-// * [new tag] v0.1.8 -> v0.1.8
-// * [new branch] master -> origin/master
-// * [new ref] refs/pull/2/head -> refs/pull/2/head"
-// - [deleted] (none) -> origin/test // delete a branch
-// - [deleted] (none) -> 1 // delete a tag
-// 957a993..a87ba5f test -> origin/test
-// + f895a1e...957a993 test -> origin/test (forced update)
-*/
-// TODO: return whether it's a force update
-func parseRemoteUpdateOutput(output, remoteName string) []*mirrorSyncResult {
- results := make([]*mirrorSyncResult, 0, 3)
- lines := strings.Split(output, "\n")
- for i := range lines {
- // Make sure reference name is presented before continue
- idx := strings.Index(lines[i], "-> ")
- if idx == -1 {
- continue
- }
-
- refName := strings.TrimSpace(lines[i][idx+3:])
-
- switch {
- case strings.HasPrefix(lines[i], " * [new tag]"): // new tag
- results = append(results, &mirrorSyncResult{
- refName: git.RefNameFromTag(refName),
- oldCommitID: gitShortEmptySha,
- })
- case strings.HasPrefix(lines[i], " * [new branch]"): // new branch
- refName = strings.TrimPrefix(refName, remoteName+"/")
- results = append(results, &mirrorSyncResult{
- refName: git.RefNameFromBranch(refName),
- oldCommitID: gitShortEmptySha,
- })
- case strings.HasPrefix(lines[i], " * [new ref]"): // new reference
- results = append(results, &mirrorSyncResult{
- refName: git.RefName(refName),
- oldCommitID: gitShortEmptySha,
- })
- case strings.HasPrefix(lines[i], " - "): // Delete reference
- isTag := !strings.HasPrefix(refName, remoteName+"/")
- var refFullName git.RefName
- if strings.HasPrefix(refName, "refs/") {
- refFullName = git.RefName(refName)
- } else if isTag {
- refFullName = git.RefNameFromTag(refName)
- } else {
- refFullName = git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/"))
- }
- results = append(results, &mirrorSyncResult{
- refName: refFullName,
- newCommitID: gitShortEmptySha,
- })
- case strings.HasPrefix(lines[i], " + "): // Force update
- if idx := strings.Index(refName, " "); idx > -1 {
- refName = refName[:idx]
- }
- delimIdx := strings.Index(lines[i][3:], " ")
- if delimIdx == -1 {
- log.Error("SHA delimiter not found: %q", lines[i])
- continue
- }
- shas := strings.Split(lines[i][3:delimIdx+3], "...")
- if len(shas) != 2 {
- log.Error("Expect two SHAs but not what found: %q", lines[i])
- continue
- }
- var refFullName git.RefName
- if strings.HasPrefix(refName, "refs/") {
- refFullName = git.RefName(refName)
- } else {
- refFullName = git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/"))
- }
-
- results = append(results, &mirrorSyncResult{
- refName: refFullName,
- oldCommitID: shas[0],
- newCommitID: shas[1],
- })
- case strings.HasPrefix(lines[i], " "): // New commits of a reference
- delimIdx := strings.Index(lines[i][3:], " ")
- if delimIdx == -1 {
- log.Error("SHA delimiter not found: %q", lines[i])
- continue
- }
- shas := strings.Split(lines[i][3:delimIdx+3], "..")
- if len(shas) != 2 {
- log.Error("Expect two SHAs but not what found: %q", lines[i])
- continue
- }
- var refFullName git.RefName
- if strings.HasPrefix(refName, "refs/") {
- refFullName = git.RefName(refName)
- } else {
- refFullName = git.RefNameFromBranch(strings.TrimPrefix(refName, remoteName+"/"))
- }
-
- results = append(results, &mirrorSyncResult{
- refName: refFullName,
- oldCommitID: shas[0],
- newCommitID: shas[1],
- })
-
- default:
- log.Warn("parseRemoteUpdateOutput: unexpected update line %q", lines[i])
- }
- }
- return results
-}
-
-func pruneBrokenReferences(ctx context.Context,
- m *repo_model.Mirror,
- timeout time.Duration,
- stdoutBuilder, stderrBuilder *strings.Builder,
- isWiki bool,
-) error {
- wiki := ""
- var storageRepo gitrepo.Repository = m.Repo
- if isWiki {
- wiki = "Wiki "
- storageRepo = m.Repo.WikiStorageRepo()
- }
-
- stderrBuilder.Reset()
- stdoutBuilder.Reset()
-
- pruneErr := gitrepo.GitRemotePrune(ctx, storageRepo, m.GetRemoteName(), timeout, stdoutBuilder, stderrBuilder)
+func pruneBrokenReferences(ctx context.Context, m *repo_model.Mirror, gitRepo gitrepo.Repository, timeout time.Duration) error {
+ cmd := gitcmd.NewCommand("remote", "prune").AddDynamicArguments(m.GetRemoteName()).WithTimeout(timeout)
+ stdout, _, pruneErr := gitrepo.RunCmdString(ctx, gitRepo, cmd)
if pruneErr != nil {
- stdout := stdoutBuilder.String()
- stderr := stderrBuilder.String()
-
- // sanitize the output, since it may contain the remote address, which may
- // contain a password
- stderrMessage := util.SanitizeCredentialURLs(stderr)
+ // sanitize the output, since it may contain the remote address, which may contain a password
+ stderrMessage := util.SanitizeCredentialURLs(pruneErr.Stderr())
stdoutMessage := util.SanitizeCredentialURLs(stdout)
- log.Error("Failed to prune mirror repository %s%-v references:\nStdout: %s\nStderr: %s\nErr: %v", wiki, m.Repo, stdoutMessage, stderrMessage, pruneErr)
- desc := fmt.Sprintf("Failed to prune mirror repository %s'%s' references: %s", wiki, storageRepo.RelativePath(), stderrMessage)
+ log.Error("Failed to prune mirror repository %s references:\nStdout: %s\nStderr: %s\nErr: %v", gitRepo.RelativePath(), stdoutMessage, stderrMessage, pruneErr)
+ desc := fmt.Sprintf("Failed to prune mirror repository (%s) references: %s", m.Repo.FullName(), stderrMessage)
if err := system_model.CreateRepositoryNotice(desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
- // this if will only be reached on a successful prune so try to get the mirror again
}
return pruneErr
}
@@ -248,60 +106,46 @@ func checkRecoverableSyncError(stderrMessage string) bool {
}
// runSync returns true if sync finished without error.
-func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bool) {
- timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second
-
+func runSync(ctx context.Context, m *repo_model.Mirror) ([]*repo_module.SyncResult, bool) {
log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo)
- // use fetch but not remote update because git fetch support --tags but remote update doesn't
- cmd := gitcmd.NewCommand("fetch")
- if m.EnablePrune {
- cmd.AddArguments("--prune")
- }
- cmd.AddArguments("--tags").AddDynamicArguments(m.GetRemoteName())
-
remoteURL, remoteErr := gitrepo.GitRemoteGetURL(ctx, m.Repo, m.GetRemoteName())
if remoteErr != nil {
log.Error("SyncMirrors [repo: %-v]: GetRemoteURL Error %v", m.Repo, remoteErr)
return nil, false
}
-
envs := proxy.EnvWithProxy(remoteURL.URL)
+ timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second
- stdoutBuilder := strings.Builder{}
- stderrBuilder := strings.Builder{}
- if err := gitrepo.RunCmd(ctx, m.Repo, cmd.WithTimeout(timeout).
- WithEnv(envs).
- WithStdout(&stdoutBuilder).
- WithStderr(&stderrBuilder)); err != nil {
- stdout := stdoutBuilder.String()
- stderr := stderrBuilder.String()
+ // use fetch but not remote update because git fetch support --tags but remote update doesn't
+ cmdFetch := func() *gitcmd.Command {
+ cmd := gitcmd.NewCommand("fetch", "--tags")
+ if m.EnablePrune {
+ cmd.AddArguments("--prune")
+ }
+ return cmd.AddDynamicArguments(m.GetRemoteName()).WithTimeout(timeout).WithEnv(envs)
+ }
+ var err error
+ fetchStdout, fetchStderr, err := gitrepo.RunCmdString(ctx, m.Repo, cmdFetch())
+ if err != nil {
// sanitize the output, since it may contain the remote address, which may contain a password
- stderrMessage := util.SanitizeCredentialURLs(stderr)
- stdoutMessage := util.SanitizeCredentialURLs(stdout)
+ stderrMessage := util.SanitizeCredentialURLs(fetchStderr)
+ stdoutMessage := util.SanitizeCredentialURLs(fetchStdout)
// Now check if the error is a resolve reference due to broken reference
- if checkRecoverableSyncError(stderr) {
+ if checkRecoverableSyncError(fetchStderr) {
log.Warn("SyncMirrors [repo: %-v]: failed to update mirror repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err)
err = nil
-
// Attempt prune
- pruneErr := pruneBrokenReferences(ctx, m, timeout, &stdoutBuilder, &stderrBuilder, false)
+ pruneErr := pruneBrokenReferences(ctx, m, m.Repo, timeout)
if pruneErr == nil {
// Successful prune - reattempt mirror
- stderrBuilder.Reset()
- stdoutBuilder.Reset()
- if err = gitrepo.RunCmd(ctx, m.Repo, cmd.WithTimeout(timeout).
- WithStdout(&stdoutBuilder).
- WithStderr(&stderrBuilder)); err != nil {
- stdout := stdoutBuilder.String()
- stderr := stderrBuilder.String()
-
- // sanitize the output, since it may contain the remote address, which may
- // contain a password
- stderrMessage = util.SanitizeCredentialURLs(stderr)
- stdoutMessage = util.SanitizeCredentialURLs(stdout)
+ fetchStdout, fetchStderr, err = gitrepo.RunCmdString(ctx, m.Repo, cmdFetch())
+ if err != nil {
+ // sanitize the output, since it may contain the remote address, which may contain a password
+ stderrMessage = util.SanitizeCredentialURLs(fetchStderr)
+ stdoutMessage = util.SanitizeCredentialURLs(fetchStdout)
}
}
}
@@ -309,15 +153,13 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
// If there is still an error (or there always was an error)
if err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to update mirror repository:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err)
- desc := fmt.Sprintf("Failed to update mirror repository '%s': %s", m.Repo.RelativePath(), stderrMessage)
- if err = system_model.CreateRepositoryNotice(desc); err != nil {
+ desc := fmt.Sprintf("Failed to update mirror repository (%s): %s", m.Repo.FullName(), stderrMessage)
+ if err := system_model.CreateRepositoryNotice(desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
return nil, false
}
}
- output := stderrBuilder.String()
-
if err := gitrepo.WriteCommitGraph(ctx, m.Repo); err != nil {
log.Error("SyncMirrors [repo: %-v]: %v", m.Repo, err)
}
@@ -331,21 +173,24 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
if m.LFS && setting.LFS.StartServer {
log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo)
endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint)
- lfsClient := lfs.NewClient(endpoint, nil)
+ lfsClient := lfs.NewClient(endpoint, migrations.NewMigrationHTTPTransport())
if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo.FullName(), err)
}
}
log.Trace("SyncMirrors [repo: %-v]: syncing branches...", m.Repo)
- if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, m.Repo, gitRepo, 0); err != nil {
+ _, results, err := repo_module.SyncRepoBranchesWithRepo(ctx, m.Repo, gitRepo, 0)
+ if err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to synchronize branches: %v", m.Repo, err)
}
log.Trace("SyncMirrors [repo: %-v]: syncing releases with tags...", m.Repo)
- if err = repo_module.SyncReleasesWithTags(ctx, m.Repo, gitRepo); err != nil {
+ tagResults, err := repo_module.SyncReleasesWithTags(ctx, m.Repo, gitRepo)
+ if err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to synchronize tags to releases: %v", m.Repo, err)
}
+ results = append(results, tagResults...)
gitRepo.Close()
log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo)
@@ -353,16 +198,16 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
log.Error("SyncMirrors [repo: %-v]: failed to update size for mirror repository: %v", m.Repo.FullName(), err)
}
+ cmdRemoteUpdatePrune := func() *gitcmd.Command {
+ return gitcmd.NewCommand("remote", "update", "--prune").
+ AddDynamicArguments(m.GetRemoteName()).WithTimeout(timeout).WithEnv(envs)
+ }
+
if repo_service.HasWiki(ctx, m.Repo) {
log.Trace("SyncMirrors [repo: %-v Wiki]: running git remote update...", m.Repo)
- stderrBuilder.Reset()
- stdoutBuilder.Reset()
-
- if err := gitrepo.GitRemoteUpdatePrune(ctx, m.Repo.WikiStorageRepo(), m.GetRemoteName(),
- timeout, &stdoutBuilder, &stderrBuilder); err != nil {
- stdout := stdoutBuilder.String()
- stderr := stderrBuilder.String()
-
+ // the result of "git remote update" is in stderr
+ stdout, stderr, err := gitrepo.RunCmdString(ctx, m.Repo.WikiStorageRepo(), cmdRemoteUpdatePrune())
+ if err != nil {
// sanitize the output, since it may contain the remote address, which may contain a password
stderrMessage := util.SanitizeCredentialURLs(stderr)
stdoutMessage := util.SanitizeCredentialURLs(stdout)
@@ -373,16 +218,11 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
err = nil
// Attempt prune
- pruneErr := pruneBrokenReferences(ctx, m, timeout, &stdoutBuilder, &stderrBuilder, true)
+ pruneErr := pruneBrokenReferences(ctx, m, m.Repo.WikiStorageRepo(), timeout)
if pruneErr == nil {
// Successful prune - reattempt mirror
- stderrBuilder.Reset()
- stdoutBuilder.Reset()
-
- if err = gitrepo.GitRemoteUpdatePrune(ctx, m.Repo.WikiStorageRepo(), m.GetRemoteName(),
- timeout, &stdoutBuilder, &stderrBuilder); err != nil {
- stdout := stdoutBuilder.String()
- stderr := stderrBuilder.String()
+ stdout, stderr, err = gitrepo.RunCmdString(ctx, m.Repo.WikiStorageRepo(), cmdRemoteUpdatePrune())
+ if err != nil {
stderrMessage = util.SanitizeCredentialURLs(stderr)
stdoutMessage = util.SanitizeCredentialURLs(stdout)
}
@@ -392,8 +232,8 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
// If there is still an error (or there always was an error)
if err != nil {
log.Error("SyncMirrors [repo: %-v Wiki]: failed to update mirror repository wiki:\nStdout: %s\nStderr: %s\nErr: %v", m.Repo, stdoutMessage, stderrMessage, err)
- desc := fmt.Sprintf("Failed to update mirror repository wiki '%s': %s", m.Repo.WikiStorageRepo().RelativePath(), stderrMessage)
- if err = system_model.CreateRepositoryNotice(desc); err != nil {
+ desc := fmt.Sprintf("Failed to update mirror repository wiki (%s): %s", m.Repo.FullName(), stderrMessage)
+ if err := system_model.CreateRepositoryNotice(desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
return nil, false
@@ -418,7 +258,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
}
m.UpdatedUnix = timeutil.TimeStampNow()
- return parseRemoteUpdateOutput(output, m.GetRemoteName()), true
+ return results, true
}
func getRepoPullMirrorLockKey(repoID int64) string {
@@ -487,42 +327,42 @@ func SyncPullMirror(ctx context.Context, repoID int64) bool {
for _, result := range results {
// Discard GitHub pull requests, i.e. refs/pull/*
- if result.refName.IsPull() {
+ if result.RefName.IsPull() {
continue
}
// Create reference
- if result.oldCommitID == gitShortEmptySha {
- commitID, err := gitRepo.GetRefCommitID(result.refName.String())
+ if result.OldCommitID == "" {
+ commitID, err := gitRepo.GetRefCommitID(result.RefName.String())
if err != nil {
- log.Error("SyncMirrors [repo: %-v]: unable to GetRefCommitID [ref_name: %s]: %v", m.Repo, result.refName, err)
+ log.Error("SyncMirrors [repo: %-v]: unable to GetRefCommitID [ref_name: %s]: %v", m.Repo, result.RefName, err)
continue
}
objectFormat := git.ObjectFormatFromName(m.Repo.ObjectFormatName)
notify_service.SyncPushCommits(ctx, m.Repo.MustOwner(ctx), m.Repo, &repo_module.PushUpdateOptions{
- RefFullName: result.refName,
+ RefFullName: result.RefName,
OldCommitID: objectFormat.EmptyObjectID().String(),
NewCommitID: commitID,
}, repo_module.NewPushCommits())
- notify_service.SyncCreateRef(ctx, m.Repo.MustOwner(ctx), m.Repo, result.refName, commitID)
+ notify_service.SyncCreateRef(ctx, m.Repo.MustOwner(ctx), m.Repo, result.RefName, commitID)
continue
}
// Delete reference
- if result.newCommitID == gitShortEmptySha {
- notify_service.SyncDeleteRef(ctx, m.Repo.MustOwner(ctx), m.Repo, result.refName)
+ if result.NewCommitID == "" {
+ notify_service.SyncDeleteRef(ctx, m.Repo.MustOwner(ctx), m.Repo, result.RefName)
continue
}
// Push commits
- oldCommitID, err := gitrepo.GetFullCommitID(ctx, repo, result.oldCommitID)
+ oldCommitID, err := gitrepo.GetFullCommitID(ctx, repo, result.OldCommitID)
if err != nil {
- log.Error("SyncMirrors [repo: %-v]: unable to get GetFullCommitID[%s]: %v", m.Repo, result.oldCommitID, err)
+ log.Error("SyncMirrors [repo: %-v]: unable to get GetFullCommitID[%s]: %v", m.Repo, result.OldCommitID, err)
continue
}
- newCommitID, err := gitrepo.GetFullCommitID(ctx, repo, result.newCommitID)
+ newCommitID, err := gitrepo.GetFullCommitID(ctx, repo, result.NewCommitID)
if err != nil {
- log.Error("SyncMirrors [repo: %-v]: unable to get GetFullCommitID [%s]: %v", m.Repo, result.newCommitID, err)
+ log.Error("SyncMirrors [repo: %-v]: unable to get GetFullCommitID [%s]: %v", m.Repo, result.NewCommitID, err)
continue
}
commits, err := gitRepo.CommitsBetweenIDs(newCommitID, oldCommitID)
@@ -546,7 +386,7 @@ func SyncPullMirror(ctx context.Context, repoID int64) bool {
theCommits.CompareURL = m.Repo.ComposeCompareURL(oldCommitID, newCommitID)
notify_service.SyncPushCommits(ctx, m.Repo.MustOwner(ctx), m.Repo, &repo_module.PushUpdateOptions{
- RefFullName: result.refName,
+ RefFullName: result.RefName,
OldCommitID: oldCommitID,
NewCommitID: newCommitID,
}, theCommits)
@@ -585,7 +425,7 @@ func SyncPullMirror(ctx context.Context, repoID int64) bool {
return true
}
-func checkAndUpdateEmptyRepository(ctx context.Context, m *repo_model.Mirror, results []*mirrorSyncResult) bool {
+func checkAndUpdateEmptyRepository(ctx context.Context, m *repo_model.Mirror, results []*repo_module.SyncResult) bool {
if !m.Repo.IsEmpty {
return true
}
@@ -599,11 +439,11 @@ func checkAndUpdateEmptyRepository(ctx context.Context, m *repo_model.Mirror, re
}
firstName := ""
for _, result := range results {
- if !result.refName.IsBranch() {
+ if !result.RefName.IsBranch() {
continue
}
- name := result.refName.BranchName()
+ name := result.RefName.BranchName()
if len(firstName) == 0 {
firstName = name
}
@@ -632,7 +472,7 @@ func checkAndUpdateEmptyRepository(ctx context.Context, m *repo_model.Mirror, re
// Update the is empty and default_branch columns
if err := repo_model.UpdateRepositoryColsWithAutoTime(ctx, m.Repo, "default_branch", "is_empty"); err != nil {
log.Error("Failed to update default branch of repository %-v. Error: %v", m.Repo, err)
- desc := fmt.Sprintf("Failed to update default branch of repository '%s': %v", m.Repo.RelativePath(), err)
+ desc := fmt.Sprintf("Failed to update default branch of repository (%s): %v", m.Repo.FullName(), err)
if err = system_model.CreateRepositoryNotice(desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
diff --git a/services/mirror/mirror_pull_test.go b/services/mirror/mirror_pull_test.go
index 97859be5b0..58b4da51a2 100644
--- a/services/mirror/mirror_pull_test.go
+++ b/services/mirror/mirror_pull_test.go
@@ -9,62 +9,6 @@ import (
"github.com/stretchr/testify/assert"
)
-func Test_parseRemoteUpdateOutput(t *testing.T) {
- output := `
- * [new tag] v0.1.8 -> v0.1.8
- * [new branch] master -> origin/master
- - [deleted] (none) -> origin/test1
- - [deleted] (none) -> tag1
- + f895a1e...957a993 test2 -> origin/test2 (forced update)
- 957a993..a87ba5f test3 -> origin/test3
- * [new ref] refs/pull/26595/head -> refs/pull/26595/head
- * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge
- e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head
- + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update)
-`
- results := parseRemoteUpdateOutput(output, "origin")
- assert.Len(t, results, 10)
- assert.Equal(t, "refs/tags/v0.1.8", results[0].refName.String())
- assert.Equal(t, gitShortEmptySha, results[0].oldCommitID)
- assert.Empty(t, results[0].newCommitID)
-
- assert.Equal(t, "refs/heads/master", results[1].refName.String())
- assert.Equal(t, gitShortEmptySha, results[1].oldCommitID)
- assert.Empty(t, results[1].newCommitID)
-
- assert.Equal(t, "refs/heads/test1", results[2].refName.String())
- assert.Empty(t, results[2].oldCommitID)
- assert.Equal(t, gitShortEmptySha, results[2].newCommitID)
-
- assert.Equal(t, "refs/tags/tag1", results[3].refName.String())
- assert.Empty(t, results[3].oldCommitID)
- assert.Equal(t, gitShortEmptySha, results[3].newCommitID)
-
- assert.Equal(t, "refs/heads/test2", results[4].refName.String())
- assert.Equal(t, "f895a1e", results[4].oldCommitID)
- assert.Equal(t, "957a993", results[4].newCommitID)
-
- assert.Equal(t, "refs/heads/test3", results[5].refName.String())
- assert.Equal(t, "957a993", results[5].oldCommitID)
- assert.Equal(t, "a87ba5f", results[5].newCommitID)
-
- assert.Equal(t, "refs/pull/26595/head", results[6].refName.String())
- assert.Equal(t, gitShortEmptySha, results[6].oldCommitID)
- assert.Empty(t, results[6].newCommitID)
-
- assert.Equal(t, "refs/pull/26595/merge", results[7].refName.String())
- assert.Equal(t, gitShortEmptySha, results[7].oldCommitID)
- assert.Empty(t, results[7].newCommitID)
-
- assert.Equal(t, "refs/pull/25873/head", results[8].refName.String())
- assert.Equal(t, "e0639e38fb", results[8].oldCommitID)
- assert.Equal(t, "6db2410489", results[8].newCommitID)
-
- assert.Equal(t, "refs/pull/25873/merge", results[9].refName.String())
- assert.Equal(t, "1c97ebc746", results[9].oldCommitID)
- assert.Equal(t, "976d27d52f", results[9].newCommitID)
-}
-
func Test_checkRecoverableSyncError(t *testing.T) {
cases := []struct {
recoverable bool
diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go
index bae189ba87..844e18684b 100644
--- a/services/mirror/mirror_push.go
+++ b/services/mirror/mirror_push.go
@@ -23,6 +23,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/migrations"
repo_service "code.gitea.io/gitea/services/repository"
)
@@ -144,7 +145,7 @@ func runPushSync(ctx context.Context, m *repo_model.PushMirror) error {
defer gitRepo.Close()
endpoint := lfs.DetermineEndpoint(remoteURL.String(), "")
- lfsClient := lfs.NewClient(endpoint, nil)
+ lfsClient := lfs.NewClient(endpoint, migrations.NewMigrationHTTPTransport())
if err := pushAllLFSObjects(ctx, gitRepo, lfsClient); err != nil {
return util.SanitizeErrorCredentialURLs(err)
}
@@ -192,7 +193,9 @@ func pushAllLFSObjects(ctx context.Context, gitRepo *git.Repository, lfsClient l
pointerChan := make(chan lfs.PointerBlob)
errChan := make(chan error, 1)
- go lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan, errChan)
+ go func() {
+ errChan <- lfs.SearchPointerBlobs(ctx, gitRepo, pointerChan)
+ }()
uploadObjects := func(pointers []lfs.Pointer) error {
err := lfsClient.Upload(ctx, pointers, func(p lfs.Pointer, objectError error) (io.ReadCloser, error) {
@@ -242,13 +245,12 @@ func pushAllLFSObjects(ctx context.Context, gitRepo *git.Repository, lfsClient l
}
}
- err, has := <-errChan
- if has {
+ err := <-errChan
+ if err != nil {
log.Error("Error enumerating LFS objects for repository: %v", err)
- return err
}
- return nil
+ return err
}
func syncPushMirrorWithSyncOnCommit(ctx context.Context, repoID int64) {
diff --git a/services/org/team_test.go b/services/org/team_test.go
index a5e01e7a54..5cb588b7dd 100644
--- a/services/org/team_test.go
+++ b/services/org/team_test.go
@@ -8,6 +8,7 @@ import (
"strings"
"testing"
+ issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/organization"
"code.gitea.io/gitea/models/perm"
access_model "code.gitea.io/gitea/models/perm/access"
@@ -62,6 +63,36 @@ func TestTeam_RemoveMember(t *testing.T) {
assert.True(t, organization.IsErrLastOrgOwner(err))
}
+func TestRemoveTeamMemberRemovesSubscriptionsAndStopwatches(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ ctx := t.Context()
+ team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3})
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{RepoID: repo.ID})
+
+ assert.NoError(t, repo_model.WatchRepo(ctx, user, repo, true))
+ assert.NoError(t, issues_model.CreateOrUpdateIssueWatch(ctx, user.ID, issue.ID, true))
+ ok, err := issues_model.CreateIssueStopwatch(ctx, user, issue)
+ assert.NoError(t, err)
+ assert.True(t, ok)
+
+ assert.NoError(t, RemoveTeamMember(ctx, team, user))
+
+ watch, err := repo_model.GetWatch(ctx, user.ID, repo.ID)
+ assert.NoError(t, err)
+ assert.False(t, repo_model.IsWatchMode(watch.Mode))
+
+ _, exists, err := issues_model.GetIssueWatch(ctx, user.ID, issue.ID)
+ assert.NoError(t, err)
+ assert.False(t, exists)
+
+ hasStopwatch, _, _, err := issues_model.HasUserStopwatch(ctx, user.ID)
+ assert.NoError(t, err)
+ assert.False(t, hasStopwatch)
+}
+
func TestNewTeam(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
diff --git a/services/packages/auth.go b/services/packages/auth.go
index 6e87643e29..dd1f68a7ee 100644
--- a/services/packages/auth.go
+++ b/services/packages/auth.go
@@ -23,21 +23,24 @@ type packageClaims struct {
PackageMeta
}
type PackageMeta struct {
- UserID int64
- Scope auth_model.AccessTokenScope
+ UserID int64
+ Scope auth_model.AccessTokenScope
+ ActionsUserTaskID int64
}
func CreateAuthorizationToken(u *user_model.User, packageScope auth_model.AccessTokenScope) (string, error) {
now := time.Now()
+ actionsUserTaskID, _ := user_model.GetActionsUserTaskID(u)
claims := packageClaims{
RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)),
NotBefore: jwt.NewNumericDate(now),
},
PackageMeta: PackageMeta{
- UserID: u.ID,
- Scope: packageScope,
+ UserID: u.ID,
+ Scope: packageScope,
+ ActionsUserTaskID: actionsUserTaskID,
},
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
@@ -53,7 +56,7 @@ func CreateAuthorizationToken(u *user_model.User, packageScope auth_model.Access
func ParseAuthorizationRequest(req *http.Request) (*PackageMeta, error) {
h := req.Header.Get("Authorization")
if h == "" {
- return nil, nil
+ return nil, nil //nolint:nilnil // the auth method is not applicable
}
parts := strings.SplitN(h, " ", 2)
diff --git a/services/packages/cargo/index.go b/services/packages/cargo/index.go
index ebcaa3e56d..580d84ebc2 100644
--- a/services/packages/cargo/index.go
+++ b/services/packages/cargo/index.go
@@ -152,7 +152,7 @@ func BuildPackageIndex(ctx context.Context, p *packages_model.Package) (*bytes.B
return nil, fmt.Errorf("SearchVersions[%s]: %w", p.Name, err)
}
if len(pvs) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the package has no versions
}
pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
diff --git a/services/packages/container/blob_uploader.go b/services/packages/container/blob_uploader.go
index 27bc4a5421..17139c3706 100644
--- a/services/packages/container/blob_uploader.go
+++ b/services/packages/container/blob_uploader.go
@@ -63,10 +63,10 @@ func NewBlobUploader(ctx context.Context, id string) (*BlobUploader, error) {
}
return &BlobUploader{
- model,
- hash,
- f,
- false,
+ PackageBlobUpload: model,
+ MultiHasher: hash,
+ file: f,
+ reading: false,
}, nil
}
diff --git a/services/packages/debian/repository.go b/services/packages/debian/repository.go
index 34b52b45cf..910f93b034 100644
--- a/services/packages/debian/repository.go
+++ b/services/packages/debian/repository.go
@@ -7,6 +7,7 @@ import (
"bytes"
"compress/gzip"
"context"
+ "crypto"
"errors"
"fmt"
"io"
@@ -67,7 +68,14 @@ func GetOrCreateKeyPair(ctx context.Context, ownerID int64) (string, string, err
}
func generateKeypair() (string, string, error) {
- e, err := openpgp.NewEntity("", "Debian Registry", "", nil)
+ // Repository signing keys are long-lived and there is currently no rotation mechanism, choose stronger algorithms
+ cfg := &packet.Config{
+ RSABits: 4096,
+ DefaultHash: crypto.SHA256,
+ DefaultCipher: packet.CipherAES256,
+ }
+
+ e, err := openpgp.NewEntity("", "Automatically generated Debian Registry Key; created "+time.Now().UTC().Format(time.RFC3339), "", cfg)
if err != nil {
return "", "", err
}
diff --git a/services/projects/workflow_notifier.go b/services/projects/workflow_notifier.go
index 5c5bbb4f9e..20f96d26b8 100644
--- a/services/projects/workflow_notifier.go
+++ b/services/projects/workflow_notifier.go
@@ -355,9 +355,9 @@ func executeWorkflowActions(ctx context.Context, workflow *project_model.Workflo
log.Error("Invalid column ID: %s", action.Value)
continue
}
- column, err := project_model.GetColumnByProjectIDAndColumnID(ctx, issue.Project.ID, columnID)
+ column, err := project_model.GetColumnByIDAndProjectID(ctx, columnID, issue.Project.ID)
if err != nil {
- log.Error("GetColumnByProjectIDAndColumnID: %v", err)
+ log.Error("GetColumnByIDAndProjectID: %v", err)
continue
}
if err := MoveIssueToAnotherColumn(ctx, user_model.NewProjectWorkflowsUser(), issue, column); err != nil {
diff --git a/services/pull/check.go b/services/pull/check.go
index 691ce9da9f..f6e8433cf2 100644
--- a/services/pull/check.go
+++ b/services/pull/check.go
@@ -238,7 +238,7 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer
}
defer closer.Close()
- sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, gitRepo, pr.BaseBranch, pr.GetGitHeadRefName())
+ sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, gitRepo)
return sign, err
}
@@ -246,7 +246,7 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer
// markPullRequestAsMergeable checks if pull request is possible to leaving checking status,
// and set to be either conflict or mergeable.
func markPullRequestAsMergeable(ctx context.Context, pr *issues_model.PullRequest) {
- // If the status has not been changed to conflict by testPullRequestTmpRepoBranchMergeable then we are mergeable
+ // If the status has not been changed to conflict by the conflict checking functions then we are mergeable
if pr.Status == issues_model.PullRequestStatusChecking {
pr.Status = issues_model.PullRequestStatusMergeable
}
@@ -287,12 +287,11 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com
prHeadRef := pr.GetGitHeadRefName()
// Check if the pull request is merged into BaseBranch
- if _, err := gitrepo.RunCmdString(ctx, pr.BaseRepo,
- gitcmd.NewCommand("merge-base", "--is-ancestor").
- AddDynamicArguments(prHeadRef, pr.BaseBranch)); err != nil {
- if strings.Contains(err.Error(), "exit status 1") {
+ cmd := gitcmd.NewCommand("merge-base", "--is-ancestor").AddDynamicArguments(prHeadRef, pr.BaseBranch)
+ if err := gitrepo.RunCmdWithStderr(ctx, pr.BaseRepo, cmd); err != nil {
+ if gitcmd.IsErrorExitCode(err, 1) {
// prHeadRef is not an ancestor of the base branch
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil to indicate that the PR head is not merged
}
// Errors are signaled by a non-zero status that is not 1
return nil, fmt.Errorf("%-v git merge-base --is-ancestor: %w", pr, err)
@@ -315,7 +314,7 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com
objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
// Get the commit from BaseBranch where the pull request got merged
- mergeCommit, err := gitrepo.RunCmdString(ctx, pr.BaseRepo,
+ mergeCommit, _, err := gitrepo.RunCmdString(ctx, pr.BaseRepo,
gitcmd.NewCommand("rev-list", "--ancestry-path", "--merges", "--reverse").
AddDynamicArguments(prHeadCommitID+".."+pr.BaseBranch))
if err != nil {
@@ -443,8 +442,8 @@ func checkPullRequestMergeable(id int64) {
return
}
- if err := testPullRequestBranchMergeable(pr); err != nil {
- log.Error("testPullRequestTmpRepoBranchMergeable[%-v]: %v", pr, err)
+ if err := checkPullRequestBranchMergeable(ctx, pr); err != nil {
+ log.Error("checkPullRequestBranchMergeable[%-v]: %v", pr, err)
pr.Status = issues_model.PullRequestStatusError
if err := pr.UpdateCols(ctx, "status"); err != nil {
log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err)
diff --git a/services/pull/comment.go b/services/pull/comment.go
index f24e8128e9..6c10bf2aa8 100644
--- a/services/pull/comment.go
+++ b/services/pull/comment.go
@@ -49,7 +49,7 @@ func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldC
// CreatePushPullComment create push code to pull base comment
func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string, isForcePush bool) (comment *issues_model.Comment, err error) {
if pr.HasMerged || oldCommitID == "" || newCommitID == "" {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil because no comment needs to be created
}
opts := &issues_model.CreateCommentOptions{
@@ -71,7 +71,7 @@ func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *iss
}
// It maybe an empty pull request. Only non-empty pull request need to create push comment
if len(data.CommitIDs) == 0 {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil because no comment needs to be created
}
}
diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go
index 25860fc1a8..656bcc50af 100644
--- a/services/pull/commit_status.go
+++ b/services/pull/commit_status.go
@@ -6,6 +6,8 @@ package pull
import (
"context"
+ "errors"
+ "fmt"
"code.gitea.io/gitea/models/db"
git_model "code.gitea.io/gitea/models/git"
@@ -14,8 +16,6 @@ import (
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/glob"
"code.gitea.io/gitea/modules/log"
-
- "github.com/pkg/errors"
)
// MergeRequiredContextsCommitStatus returns a commit status state for given required contexts
@@ -69,7 +69,7 @@ func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus,
func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) (bool, error) {
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
- return false, errors.Wrap(err, "GetLatestCommitStatus")
+ return false, fmt.Errorf("GetLatestCommitStatus: %w", err)
}
if pb == nil || !pb.EnableStatusCheck {
return true, nil
@@ -86,19 +86,19 @@ func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) (
func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (commitstatus.CommitStatusState, error) {
// Ensure HeadRepo is loaded
if err := pr.LoadHeadRepo(ctx); err != nil {
- return "", errors.Wrap(err, "LoadHeadRepo")
+ return "", fmt.Errorf("LoadHeadRepo: %w", err)
}
// check if all required status checks are successful
headGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo)
if err != nil {
- return "", errors.Wrap(err, "OpenRepository")
+ return "", fmt.Errorf("OpenRepository: %w", err)
}
defer closer.Close()
if pr.Flow == issues_model.PullRequestFlowGithub {
if exist, err := git_model.IsBranchExist(ctx, pr.HeadRepo.ID, pr.HeadBranch); err != nil {
- return "", errors.Wrap(err, "IsBranchExist")
+ return "", fmt.Errorf("IsBranchExist: %w", err)
} else if !exist {
return "", errors.New("Head branch does not exist, can not merge")
}
@@ -118,17 +118,17 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullR
}
if err := pr.LoadBaseRepo(ctx); err != nil {
- return "", errors.Wrap(err, "LoadBaseRepo")
+ return "", fmt.Errorf("LoadBaseRepo: %w", err)
}
commitStatuses, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
if err != nil {
- return "", errors.Wrap(err, "GetLatestCommitStatus")
+ return "", fmt.Errorf("GetLatestCommitStatus: %w", err)
}
pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
if err != nil {
- return "", errors.Wrap(err, "LoadProtectedBranch")
+ return "", fmt.Errorf("LoadProtectedBranch: %w", err)
}
var requiredContexts []string
if pb != nil {
diff --git a/services/pull/compare.go b/services/pull/compare.go
deleted file mode 100644
index fbdb17cfdd..0000000000
--- a/services/pull/compare.go
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2025 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package pull
-
-import (
- "context"
- "fmt"
- "strconv"
- "time"
-
- repo_model "code.gitea.io/gitea/models/repo"
- "code.gitea.io/gitea/modules/git"
- "code.gitea.io/gitea/modules/gitrepo"
- "code.gitea.io/gitea/modules/graceful"
- logger "code.gitea.io/gitea/modules/log"
-)
-
-// CompareInfo represents needed information for comparing references.
-type CompareInfo struct {
- MergeBase string
- BaseCommitID string
- HeadCommitID string
- Commits []*git.Commit
- NumFiles int
-}
-
-// GetCompareInfo generates and returns compare information between base and head branches of repositories.
-func GetCompareInfo(ctx context.Context, baseRepo, headRepo *repo_model.Repository, headGitRepo *git.Repository, baseBranch, headBranch string, directComparison, fileOnly bool) (_ *CompareInfo, err error) {
- var (
- remoteBranch string
- tmpRemote string
- )
-
- // We don't need a temporary remote for same repository.
- if baseRepo.ID != headRepo.ID {
- // Add a temporary remote
- tmpRemote = strconv.FormatInt(time.Now().UnixNano(), 10)
- if err = gitrepo.GitRemoteAdd(ctx, headRepo, tmpRemote, baseRepo.RepoPath()); err != nil {
- return nil, fmt.Errorf("GitRemoteAdd: %w", err)
- }
- defer func() {
- if err := gitrepo.GitRemoteRemove(graceful.GetManager().ShutdownContext(), headRepo, tmpRemote); err != nil {
- logger.Error("GetPullRequestInfo: GitRemoteRemove: %v", err)
- }
- }()
- }
-
- compareInfo := new(CompareInfo)
-
- compareInfo.HeadCommitID, err = gitrepo.GetFullCommitID(ctx, headRepo, headBranch)
- if err != nil {
- compareInfo.HeadCommitID = headBranch
- }
-
- compareInfo.MergeBase, remoteBranch, err = headGitRepo.GetMergeBase(tmpRemote, baseBranch, headBranch)
- if err == nil {
- compareInfo.BaseCommitID, err = gitrepo.GetFullCommitID(ctx, headRepo, remoteBranch)
- if err != nil {
- compareInfo.BaseCommitID = remoteBranch
- }
- separator := "..."
- baseCommitID := compareInfo.MergeBase
- if directComparison {
- separator = ".."
- baseCommitID = compareInfo.BaseCommitID
- }
-
- // We have a common base - therefore we know that ... should work
- if !fileOnly {
- compareInfo.Commits, err = headGitRepo.ShowPrettyFormatLogToList(ctx, baseCommitID+separator+headBranch)
- if err != nil {
- return nil, fmt.Errorf("ShowPrettyFormatLogToList: %w", err)
- }
- } else {
- compareInfo.Commits = []*git.Commit{}
- }
- } else {
- compareInfo.Commits = []*git.Commit{}
- compareInfo.MergeBase, err = gitrepo.GetFullCommitID(ctx, headRepo, remoteBranch)
- if err != nil {
- compareInfo.MergeBase = remoteBranch
- }
- compareInfo.BaseCommitID = compareInfo.MergeBase
- }
-
- // Count number of changed files.
- // This probably should be removed as we need to use shortstat elsewhere
- // Now there is git diff --shortstat but this appears to be slower than simply iterating with --nameonly
- compareInfo.NumFiles, err = headGitRepo.GetDiffNumChangedFiles(remoteBranch, headBranch, directComparison)
- if err != nil {
- return nil, err
- }
- return compareInfo, nil
-}
diff --git a/services/pull/lfs.go b/services/pull/lfs.go
index eb2a08ed8d..094b563b92 100644
--- a/services/pull/lfs.go
+++ b/services/pull/lfs.go
@@ -7,15 +7,19 @@ package pull
import (
"bufio"
"context"
+ "errors"
"io"
"strconv"
- "sync"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/git/pipeline"
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+
+ "golang.org/x/sync/errgroup"
)
// LFSPush pushes lfs objects referred to in new commits in the head repository from the base repository
@@ -26,81 +30,82 @@ func LFSPush(ctx context.Context, tmpBasePath, mergeHeadSHA, mergeBaseSHA string
// ensure only blobs and <=1k size then pass in to git cat-file --batch
// to read each sha and check each as a pointer
// Then if they are lfs -> add them to the baseRepo
- revListReader, revListWriter := io.Pipe()
- shasToCheckReader, shasToCheckWriter := io.Pipe()
- catFileCheckReader, catFileCheckWriter := io.Pipe()
- shasToBatchReader, shasToBatchWriter := io.Pipe()
- catFileBatchReader, catFileBatchWriter := io.Pipe()
- errChan := make(chan error, 1)
- wg := sync.WaitGroup{}
- wg.Add(6)
- // Create the go-routines in reverse order.
+
+ cmd1RevList, cmd3BathCheck, cmd5BatchContent := gitcmd.NewCommand(), gitcmd.NewCommand(), gitcmd.NewCommand()
+ cmd1RevListOut, cmd1RevListClose := cmd1RevList.MakeStdoutPipe()
+ defer cmd1RevListClose()
+
+ cmd3BatchCheckIn, cmd3BatchCheckOut, cmd3BatchCheckClose := cmd3BathCheck.MakeStdinStdoutPipe()
+ defer cmd3BatchCheckClose()
+
+ cmd5BatchContentIn, cmd5BatchContentOut, cmd5BatchContentClose := cmd5BatchContent.MakeStdinStdoutPipe()
+ defer cmd5BatchContentClose()
+
+ // Create the go-routines in reverse order (update: the order is not needed any more, the pipes are properly prepared)
+ wg := &errgroup.Group{}
// 6. Take the output of cat-file --batch and check if each file in turn
// to see if they're pointers to files in the LFS store associated with
// the head repo and add them to the base repo if so
- go createLFSMetaObjectsFromCatFileBatch(ctx, catFileBatchReader, &wg, pr)
+ wg.Go(func() error {
+ return createLFSMetaObjectsFromCatFileBatch(ctx, cmd5BatchContentOut, pr)
+ })
// 5. Take the shas of the blobs and batch read them
- go pipeline.CatFileBatch(ctx, shasToBatchReader, catFileBatchWriter, &wg, tmpBasePath)
+ wg.Go(func() error {
+ return pipeline.CatFileBatch(ctx, cmd5BatchContent, tmpBasePath)
+ })
// 4. From the provided objects restrict to blobs <=1k
- go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg)
+ wg.Go(func() error {
+ return pipeline.BlobsLessThan1024FromCatFileBatchCheck(cmd3BatchCheckOut, cmd5BatchContentIn)
+ })
// 3. Run batch-check on the objects retrieved from rev-list
- go pipeline.CatFileBatchCheck(ctx, shasToCheckReader, catFileCheckWriter, &wg, tmpBasePath)
+ wg.Go(func() error {
+ return pipeline.CatFileBatchCheck(ctx, cmd3BathCheck, tmpBasePath)
+ })
// 2. Check each object retrieved rejecting those without names as they will be commits or trees
- go pipeline.BlobsFromRevListObjects(revListReader, shasToCheckWriter, &wg)
+ wg.Go(func() error {
+ return pipeline.BlobsFromRevListObjects(cmd1RevListOut, cmd3BatchCheckIn)
+ })
// 1. Run rev-list objects from mergeHead to mergeBase
- go pipeline.RevListObjects(ctx, revListWriter, &wg, tmpBasePath, mergeHeadSHA, mergeBaseSHA, errChan)
+ wg.Go(func() error {
+ return pipeline.RevListObjects(ctx, cmd1RevList, tmpBasePath, mergeHeadSHA, mergeBaseSHA)
+ })
- wg.Wait()
- select {
- case err, has := <-errChan:
- if has {
- return err
- }
- default:
- }
- return nil
+ return wg.Wait()
}
-func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReader *io.PipeReader, wg *sync.WaitGroup, pr *issues_model.PullRequest) {
- defer wg.Done()
+func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReader io.ReadCloser, pr *issues_model.PullRequest) error {
defer catFileBatchReader.Close()
contentStore := lfs.NewContentStore()
-
bufferedReader := bufio.NewReader(catFileBatchReader)
buf := make([]byte, 1025)
for {
// File descriptor line: sha
_, err := bufferedReader.ReadString(' ')
if err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return util.Iif(errors.Is(err, io.EOF), nil, err)
}
// Throw away the blob
if _, err := bufferedReader.ReadString(' '); err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
sizeStr, err := bufferedReader.ReadString('\n')
if err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
size, err := strconv.Atoi(sizeStr[:len(sizeStr)-1])
if err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
pointerBuf := buf[:size+1]
if _, err := io.ReadFull(bufferedReader, pointerBuf); err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
pointerBuf = pointerBuf[:size]
// Now we need to check if the pointerBuf is an LFS pointer
@@ -120,15 +125,13 @@ func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReade
log.Warn("During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v", pr.Index, pr.BaseRepo, pointer.Oid, pr.HeadRepo)
continue
}
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
// OK we have a pointer that is associated with the head repo
// and is actually a file in the LFS
// Therefore it should be associated with the base repo
if _, err := git_model.NewLFSMetaObject(ctx, pr.BaseRepoID, pointer); err != nil {
- _ = catFileBatchReader.CloseWithError(err)
- break
+ return err
}
}
}
diff --git a/services/pull/merge.go b/services/pull/merge.go
index 88e30c6832..4925302797 100644
--- a/services/pull/merge.go
+++ b/services/pull/merge.go
@@ -366,11 +366,11 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
}
- mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+baseBranch)
+ mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+tmpRepoBaseBranch)
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for origin/%s: %w", pr.BaseBranch, err)
}
- mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, baseBranch)
+ mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, tmpRepoBaseBranch)
if err != nil {
return "", fmt.Errorf("Failed to get full commit id for the new merge: %w", err)
}
@@ -407,32 +407,30 @@ func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *use
)
mergeCtx.env = append(mergeCtx.env, repo_module.EnvPushTrigger+"="+string(pushTrigger))
- pushCmd := gitcmd.NewCommand("push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
+ pushCmd := gitcmd.NewCommand("push", "origin").AddDynamicArguments(tmpRepoBaseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
// Push back to upstream.
// This cause an api call to "/api/internal/hook/post-receive/...",
// If it's merge, all db transaction and operations should be there but not here to prevent deadlock.
- if err := mergeCtx.PrepareGitCmd(pushCmd).Run(ctx); err != nil {
- if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
+ if err := mergeCtx.PrepareGitCmd(pushCmd).RunWithStderr(ctx); err != nil {
+ if strings.Contains(err.Stderr(), "non-fast-forward") {
return "", &git.ErrPushOutOfDate{
StdOut: mergeCtx.outbuf.String(),
- StdErr: mergeCtx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
- } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
+ } else if strings.Contains(err.Stderr(), "! [remote rejected]") {
err := &git.ErrPushRejected{
StdOut: mergeCtx.outbuf.String(),
- StdErr: mergeCtx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
err.GenerateMessage()
return "", err
}
- return "", fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
+ return "", fmt.Errorf("git push: %s", err.Stderr())
}
mergeCtx.outbuf.Reset()
- mergeCtx.errbuf.Reset()
-
return mergeCommitID, nil
}
@@ -446,9 +444,8 @@ func commitAndSignNoAuthor(ctx *mergeContext, message string) error {
}
cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID)
}
- if err := ctx.PrepareGitCmd(cmdCommit).Run(ctx); err != nil {
- log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
- return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ if err := ctx.PrepareGitCmd(cmdCommit).RunWithStderr(ctx); err != nil {
+ return fmt.Errorf("git commit %v: %w\n%s", ctx.pr, err, ctx.outbuf.String())
}
return nil
}
@@ -507,39 +504,37 @@ func (err ErrMergeDivergingFastForwardOnly) Error() string {
}
func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *gitcmd.Command) error {
- if err := ctx.PrepareGitCmd(cmd).Run(ctx); err != nil {
+ if err := ctx.PrepareGitCmd(cmd).RunWithStderr(ctx); err != nil {
// Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
// We have a merge conflict error
- log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
return ErrMergeConflicts{
Style: mergeStyle,
StdOut: ctx.outbuf.String(),
- StdErr: ctx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
- } else if strings.Contains(ctx.errbuf.String(), "refusing to merge unrelated histories") {
- log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ } else if strings.Contains(err.Stderr(), "refusing to merge unrelated histories") {
+ log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
return ErrMergeUnrelatedHistories{
Style: mergeStyle,
StdOut: ctx.outbuf.String(),
- StdErr: ctx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
- } else if mergeStyle == repo_model.MergeStyleFastForwardOnly && strings.Contains(ctx.errbuf.String(), "Not possible to fast-forward, aborting") {
- log.Debug("MergeDivergingFastForwardOnly %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ } else if mergeStyle == repo_model.MergeStyleFastForwardOnly && strings.Contains(err.Stderr(), "Not possible to fast-forward, aborting") {
+ log.Debug("MergeDivergingFastForwardOnly %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
return ErrMergeDivergingFastForwardOnly{
StdOut: ctx.outbuf.String(),
- StdErr: ctx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
}
- log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
- return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
+ return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
}
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
-
return nil
}
@@ -722,7 +717,7 @@ func SetMerged(ctx context.Context, pr *issues_model.PullRequest, mergedCommitID
return false, fmt.Errorf("ChangeIssueStatus: %w", err)
}
- // We need to save all of the data used to compute this merge as it may have already been changed by testPullRequestBranchMergeable. FIXME: need to set some state to prevent testPullRequestBranchMergeable from running whilst we are merging.
+ // We need to save all of the data used to compute this merge as it may have already been changed by checkPullRequestBranchMergeable. FIXME: need to set some state to prevent checkPullRequestBranchMergeable from running whilst we are merging.
if cnt, err := db.GetEngine(ctx).Where("id = ?", pr.ID).
And("has_merged = ?", false).
Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix, conflicted_files").
diff --git a/services/pull/merge_ff_only.go b/services/pull/merge_ff_only.go
index 22a560e29c..2ed09a7161 100644
--- a/services/pull/merge_ff_only.go
+++ b/services/pull/merge_ff_only.go
@@ -11,7 +11,7 @@ import (
// doMergeStyleFastForwardOnly merges the tracking into the current HEAD - which is assumed to be staging branch (equal to the pr.BaseBranch)
func doMergeStyleFastForwardOnly(ctx *mergeContext) error {
- cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(trackingBranch)
+ cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(tmpRepoTrackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleFastForwardOnly, cmd); err != nil {
log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
return err
diff --git a/services/pull/merge_merge.go b/services/pull/merge_merge.go
index bc94400f21..2bc1fb9a34 100644
--- a/services/pull/merge_merge.go
+++ b/services/pull/merge_merge.go
@@ -11,7 +11,7 @@ import (
// doMergeStyleMerge merges the tracking branch into the current HEAD - which is assumed to be the staging branch (equal to the pr.BaseBranch)
func doMergeStyleMerge(ctx *mergeContext, message string) error {
- cmd := gitcmd.NewCommand("merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
+ cmd := gitcmd.NewCommand("merge", "--no-ff", "--no-commit").AddDynamicArguments(tmpRepoTrackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil {
log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
return err
diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go
index 6f752c351d..1131a23d85 100644
--- a/services/pull/merge_prepare.go
+++ b/services/pull/merge_prepare.go
@@ -5,7 +5,6 @@ package pull
import (
"bufio"
- "bytes"
"context"
"fmt"
"io"
@@ -19,7 +18,9 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
asymkey_service "code.gitea.io/gitea/services/asymkey"
)
@@ -37,12 +38,10 @@ type mergeContext struct {
// Do NOT use it with gitcmd.RunStd*() functions, otherwise it will panic
func (ctx *mergeContext) PrepareGitCmd(cmd *gitcmd.Command) *gitcmd.Command {
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
return cmd.WithEnv(ctx.env).
WithDir(ctx.tmpBasePath).
WithParentCallerInfo().
- WithStdout(ctx.outbuf).
- WithStderr(ctx.errbuf)
+ WithStdoutBuffer(ctx.outbuf)
}
// ErrSHADoesNotMatch represents a "SHADoesNotMatch" kind of error.
@@ -77,7 +76,7 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque
if expectedHeadCommitID != "" {
trackingCommitID, _, err := gitcmd.NewCommand("show-ref", "--hash").
- AddDynamicArguments(git.BranchPrefix + trackingBranch).
+ AddDynamicArguments(git.BranchPrefix + tmpRepoTrackingBranch).
WithEnv(mergeCtx.env).
WithDir(mergeCtx.tmpBasePath).
RunStdString(ctx)
@@ -96,7 +95,6 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque
}
mergeCtx.outbuf.Reset()
- mergeCtx.errbuf.Reset()
if err := prepareTemporaryRepoForMerge(mergeCtx); err != nil {
defer cancel()
return nil, nil, err
@@ -105,7 +103,7 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque
mergeCtx.sig = doer.NewGitSig()
mergeCtx.committer = mergeCtx.sig
- gitRepo, err := git.OpenRepository(ctx, mergeCtx.tmpBasePath)
+ gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
if err != nil {
defer cancel()
return nil, nil, fmt.Errorf("failed to open temp git repo for pr[%d]: %w", mergeCtx.pr.ID, err)
@@ -113,7 +111,7 @@ func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullReque
defer gitRepo.Close()
// Determine if we should sign
- sign, key, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, gitRepo, "HEAD", trackingBranch)
+ sign, key, signer, _ := asymkey_service.SignMerge(ctx, pr, doer, gitRepo)
if sign {
mergeCtx.signKey = key
if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
@@ -154,8 +152,8 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
}
defer sparseCheckoutListFile.Close() // we will close it earlier but we need to ensure it is closed if there is an error
- if err := getDiffTree(ctx, ctx.tmpBasePath, baseBranch, trackingBranch, sparseCheckoutListFile); err != nil {
- log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, baseBranch, trackingBranch, err)
+ if err := getDiffTree(ctx, ctx.tmpBasePath, tmpRepoBaseBranch, tmpRepoTrackingBranch, sparseCheckoutListFile); err != nil {
+ log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, tmpRepoBaseBranch, tmpRepoTrackingBranch, err)
return fmt.Errorf("getDiffTree: %w", err)
}
@@ -166,13 +164,11 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
setConfig := func(key, value string) error {
if err := ctx.PrepareGitCmd(gitcmd.NewCommand("config", "--local").AddDynamicArguments(key, value)).
- Run(ctx); err != nil {
- log.Error("git config [%s -> %q]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
- return fmt.Errorf("git config [%s -> %q]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
+ RunWithStderr(ctx); err != nil {
+ log.Error("git config [%s -> %q]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), err.Stderr())
+ return fmt.Errorf("git config [%s -> %q]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), err.Stderr())
}
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
-
return nil
}
@@ -199,63 +195,34 @@ func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
// Read base branch index
if err := ctx.PrepareGitCmd(gitcmd.NewCommand("read-tree", "HEAD")).
- Run(ctx); err != nil {
- log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
- return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
+ RunWithStderr(ctx); err != nil {
+ log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), err.Stderr())
+ return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), err.Stderr())
}
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
-
return nil
}
// getDiffTree returns a string containing all the files that were changed between headBranch and baseBranch
// the filenames are escaped so as to fit the format required for .git/info/sparse-checkout
func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, out io.Writer) error {
- diffOutReader, diffOutWriter, err := os.Pipe()
- if err != nil {
- log.Error("Unable to create os.Pipe for %s", repoPath)
- return err
- }
- defer func() {
- _ = diffOutReader.Close()
- _ = diffOutWriter.Close()
- }()
-
- scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
- if atEOF && len(data) == 0 {
- return 0, nil, nil
- }
- if i := bytes.IndexByte(data, '\x00'); i >= 0 {
- return i + 1, data[0:i], nil
- }
- if atEOF {
- return len(data), data, nil
- }
- return 0, nil, nil
- }
-
- err = gitcmd.NewCommand("diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").
- AddDynamicArguments(baseBranch, headBranch).
+ cmd := gitcmd.NewCommand("diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root")
+ diffOutReader, diffOutReaderClose := cmd.MakeStdoutPipe()
+ defer diffOutReaderClose()
+ err := cmd.AddDynamicArguments(baseBranch, headBranch).
WithDir(repoPath).
- WithStdout(diffOutWriter).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- // Close the writer end of the pipe to begin processing
- _ = diffOutWriter.Close()
- defer func() {
- // Close the reader on return to terminate the git command if necessary
- _ = diffOutReader.Close()
- }()
-
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
// Now scan the output from the command
scanner := bufio.NewScanner(diffOutReader)
- scanner.Split(scanNullTerminatedStrings)
+ scanner.Split(util.BufioScannerSplit(0))
for scanner.Scan() {
- filepath := scanner.Text()
+ treePath := scanner.Text()
// escape '*', '?', '[', spaces and '!' prefix
- filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`)
+ treePath = escapedSymbols.ReplaceAllString(treePath, `\$1`)
// no necessary to escape the first '#' symbol because the first symbol is '/'
- fmt.Fprintf(out, "/%s\n", filepath)
+ if _, err := fmt.Fprintf(out, "/%s\n", treePath); err != nil {
+ return err
+ }
}
return scanner.Err()
}).
@@ -286,16 +253,15 @@ func (err ErrRebaseConflicts) Error() string {
// if there is a conflict it will return an ErrRebaseConflicts
func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error {
// Checkout head branch
- if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch)).
- Run(ctx); err != nil {
- return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout", "-b").AddDynamicArguments(tmpRepoStagingBranch, tmpRepoTrackingBranch)).
+ RunWithStderr(ctx); err != nil {
+ return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
}
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
// Rebase before merging
- if err := ctx.PrepareGitCmd(gitcmd.NewCommand("rebase").AddDynamicArguments(baseBranch)).
- Run(ctx); err != nil {
+ if err := ctx.PrepareGitCmd(gitcmd.NewCommand("rebase").AddDynamicArguments(tmpRepoBaseBranch)).
+ RunWithStderr(ctx); err != nil {
// Rebase will leave a REBASE_HEAD file in .git if there is a conflict
if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
var commitSha string
@@ -309,7 +275,7 @@ func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle)
commitShaBytes, readErr := os.ReadFile(failingCommitPath)
if readErr != nil {
// Abandon this attempt to handle the error
- return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
}
commitSha = strings.TrimSpace(string(commitShaBytes))
ok = true
@@ -318,20 +284,19 @@ func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle)
}
if !ok {
log.Error("Unable to determine failing commit sha for failing rebase in temp repo for %-v. Cannot cast as ErrRebaseConflicts.", ctx.pr)
- return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
}
- log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), ctx.errbuf.String())
+ log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), err.Stderr())
return ErrRebaseConflicts{
CommitSHA: commitSha,
Style: mergeStyle,
StdOut: ctx.outbuf.String(),
- StdErr: ctx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
}
- return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
}
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
return nil
}
diff --git a/services/pull/merge_rebase.go b/services/pull/merge_rebase.go
index 0fa4fd00f6..9dbe67a6c6 100644
--- a/services/pull/merge_rebase.go
+++ b/services/pull/merge_rebase.go
@@ -43,7 +43,7 @@ func doMergeRebaseFastForward(ctx *mergeContext) error {
return fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
}
- cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(stagingBranch)
+ cmd := gitcmd.NewCommand("merge", "--ff-only").AddDynamicArguments(tmpRepoStagingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleRebase, cmd); err != nil {
log.Error("Unable to merge staging into base: %v", err)
return err
@@ -88,7 +88,7 @@ func doMergeRebaseFastForward(ctx *mergeContext) error {
// Perform rebase merge with merge commit.
func doMergeRebaseMergeCommit(ctx *mergeContext, message string) error {
- cmd := gitcmd.NewCommand("merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(stagingBranch)
+ cmd := gitcmd.NewCommand("merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(tmpRepoStagingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleRebaseMerge, cmd); err != nil {
log.Error("Unable to merge staging into base: %v", err)
@@ -109,14 +109,12 @@ func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, mes
}
// Checkout base branch again
- if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout").AddDynamicArguments(baseBranch)).
- Run(ctx); err != nil {
- log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
- return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ if err := ctx.PrepareGitCmd(gitcmd.NewCommand("checkout").AddDynamicArguments(tmpRepoBaseBranch)).
+ RunWithStderr(ctx); err != nil {
+ log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
+ return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
}
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
-
if mergeStyle == repo_model.MergeStyleRebase {
return doMergeRebaseFastForward(ctx)
}
diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go
index b5f2a4deff..6c101c8e89 100644
--- a/services/pull/merge_squash.go
+++ b/services/pull/merge_squash.go
@@ -32,9 +32,9 @@ func getAuthorSignatureSquash(ctx *mergeContext) (*git.Signature, error) {
}
defer gitRepo.Close()
- commits, err := gitRepo.CommitsBetweenIDs(trackingBranch, "HEAD")
+ commits, err := gitRepo.CommitsBetweenIDs(tmpRepoTrackingBranch, "HEAD")
if err != nil {
- log.Error("%-v Unable to get commits between: %s %s: %v", ctx.pr, "HEAD", trackingBranch, err)
+ log.Error("%-v Unable to get commits between: %s %s: %v", ctx.pr, "HEAD", tmpRepoTrackingBranch, err)
return nil, err
}
@@ -58,7 +58,7 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error {
return fmt.Errorf("getAuthorSignatureSquash: %w", err)
}
- cmdMerge := gitcmd.NewCommand("merge", "--squash").AddDynamicArguments(trackingBranch)
+ cmdMerge := gitcmd.NewCommand("merge", "--squash").AddDynamicArguments(tmpRepoTrackingBranch)
if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmdMerge); err != nil {
log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err)
return err
@@ -81,11 +81,10 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error {
}
cmdCommit.AddOptionFormat("-S%s", ctx.signKey.KeyID)
}
- if err := ctx.PrepareGitCmd(cmdCommit).Run(ctx); err != nil {
- log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
- return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
+ if err := ctx.PrepareGitCmd(cmdCommit).RunWithStderr(ctx); err != nil {
+ log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), err.Stderr())
+ return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), err.Stderr())
}
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
return nil
}
diff --git a/services/pull/merge_tree.go b/services/pull/merge_tree.go
new file mode 100644
index 0000000000..18d59fabd1
--- /dev/null
+++ b/services/pull/merge_tree.go
@@ -0,0 +1,144 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "errors"
+ "fmt"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// checkConflictsMergeTree uses git merge-tree to check for conflicts and if none are found checks if the patch is empty
+// return true if there are conflicts otherwise return false
+// pr.Status and pr.ConflictedFiles will be updated as necessary
+func checkConflictsMergeTree(ctx context.Context, pr *issues_model.PullRequest, baseCommitID string) (bool, error) {
+ treeHash, conflict, conflictFiles, err := gitrepo.MergeTree(ctx, pr.BaseRepo, baseCommitID, pr.HeadCommitID, pr.MergeBase)
+ if err != nil {
+ return false, fmt.Errorf("MergeTree: %w", err)
+ }
+ if conflict {
+ pr.Status = issues_model.PullRequestStatusConflict
+ // sometimes git merge-tree will detect conflicts but not list any conflicted files
+ // so that pr.ConflictedFiles will be empty
+ pr.ConflictedFiles = conflictFiles
+
+ log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
+ return true, nil
+ }
+
+ // Detect whether the pull request introduces changes by comparing the merged tree (treeHash)
+ // against the current base commit (baseCommitID) using `git diff-tree`. The command returns exit code 0
+ // if there is no diff between these trees (empty patch) and exit code 1 if there is a diff.
+ gitErr := gitrepo.RunCmd(ctx, pr.BaseRepo, gitcmd.NewCommand("diff-tree", "-r", "--quiet").
+ AddDynamicArguments(treeHash, baseCommitID))
+ switch {
+ case gitErr == nil:
+ log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID)
+ pr.Status = issues_model.PullRequestStatusEmpty
+ case gitcmd.IsErrorExitCode(gitErr, 1):
+ pr.Status = issues_model.PullRequestStatusMergeable
+ default:
+ return false, fmt.Errorf("run diff-tree exit abnormally: %w", gitErr)
+ }
+ return false, nil
+}
+
+func checkPullRequestMergeableByMergeTree(ctx context.Context, pr *issues_model.PullRequest) error {
+ // 1. Get head commit
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return err
+ }
+ headGitRepo, err := gitrepo.OpenRepository(ctx, pr.HeadRepo)
+ if err != nil {
+ return fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer headGitRepo.Close()
+
+ // 2. Get/open base repository
+ var baseGitRepo *git.Repository
+ if pr.IsSameRepo() {
+ baseGitRepo = headGitRepo
+ } else {
+ baseGitRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ return fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer baseGitRepo.Close()
+ }
+
+ // 3. Get head commit id
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ pr.HeadCommitID, err = headGitRepo.GetRefCommitID(git.BranchPrefix + pr.HeadBranch)
+ if err != nil {
+ return fmt.Errorf("GetBranchCommitID: can't find commit ID for head: %w", err)
+ }
+ } else {
+ if pr.ID > 0 {
+ pr.HeadCommitID, err = baseGitRepo.GetRefCommitID(pr.GetGitHeadRefName())
+ if err != nil {
+ return fmt.Errorf("GetRefCommitID: can't find commit ID for head: %w", err)
+ }
+ } else if pr.HeadCommitID == "" { // for new pull request with agit, the head commit id must be provided
+ return errors.New("head commit ID is empty for pull request Agit flow")
+ }
+ }
+
+ // 4. fetch head commit id into the current repository
+ // it will be checked in 2 weeks by default from git if the pull request created failure.
+ if !pr.IsSameRepo() {
+ if !baseGitRepo.IsReferenceExist(pr.HeadCommitID) {
+ if err := gitrepo.FetchRemoteCommit(ctx, pr.BaseRepo, pr.HeadRepo, pr.HeadCommitID); err != nil {
+ return fmt.Errorf("FetchRemoteCommit: %w", err)
+ }
+ }
+ }
+
+ // 5. update merge base
+ baseCommitID, err := baseGitRepo.GetRefCommitID(git.BranchPrefix + pr.BaseBranch)
+ if err != nil {
+ return fmt.Errorf("GetBranchCommitID: can't find commit ID for base: %w", err)
+ }
+
+ pr.MergeBase, err = gitrepo.MergeBase(ctx, pr.BaseRepo, baseCommitID, pr.HeadCommitID)
+ if err != nil {
+ // if there is no merge base, then it's empty, still need to allow the pull request to be created
+ // not quite right (e.g.: why not reset the fields like below), but no interest to do more investigation at the moment
+ log.Error("MergeBase: unable to find merge base between %s and %s: %v", baseCommitID, pr.HeadCommitID, err)
+ pr.Status = issues_model.PullRequestStatusEmpty
+ return nil
+ }
+
+ // reset conflicted files and changed protected files
+ pr.ConflictedFiles = nil
+ pr.ChangedProtectedFiles = nil
+
+ // 6. if base == head, then it's an ancestor
+ if pr.HeadCommitID == pr.MergeBase {
+ pr.Status = issues_model.PullRequestStatusAncestor
+ return nil
+ }
+
+ // 7. Check for conflicts
+ conflicted, err := checkConflictsMergeTree(ctx, pr, baseCommitID)
+ if err != nil {
+ log.Error("checkConflictsMergeTree: %v", err)
+ pr.Status = issues_model.PullRequestStatusError
+ return fmt.Errorf("checkConflictsMergeTree: %w", err)
+ }
+ if conflicted || pr.Status == issues_model.PullRequestStatusEmpty {
+ return nil
+ }
+
+ // 8. Check for protected files changes
+ if err = checkPullFilesProtection(ctx, pr, baseGitRepo, pr.HeadCommitID); err != nil {
+ return fmt.Errorf("checkPullFilesProtection: %w", err)
+ }
+ return nil
+}
diff --git a/services/pull/merge_tree_test.go b/services/pull/merge_tree_test.go
new file mode 100644
index 0000000000..6fa2cf7022
--- /dev/null
+++ b/services/pull/merge_tree_test.go
@@ -0,0 +1,154 @@
+// Copyright 2026 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "testing"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git/gitcmd"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func testPullRequestMergeCheck(t *testing.T,
+ targetFunc func(ctx context.Context, pr *issues_model.PullRequest) error,
+ pr *issues_model.PullRequest,
+ expectedStatus issues_model.PullRequestStatus,
+ expectedConflictedFiles []string,
+ expectedChangedProtectedFiles []string,
+) {
+ assert.NoError(t, pr.LoadIssue(t.Context()))
+ assert.NoError(t, pr.LoadBaseRepo(t.Context()))
+ assert.NoError(t, pr.LoadHeadRepo(t.Context()))
+ pr.Status = issues_model.PullRequestStatusChecking
+ pr.ConflictedFiles = []string{"unrelated-conflicted-file"}
+ pr.ChangedProtectedFiles = []string{"unrelated-protected-file"}
+ pr.MergeBase = ""
+ pr.HeadCommitID = ""
+ err := targetFunc(t.Context(), pr)
+ require.NoError(t, err)
+ assert.Equal(t, expectedStatus, pr.Status)
+ assert.Equal(t, expectedConflictedFiles, pr.ConflictedFiles)
+ assert.Equal(t, expectedChangedProtectedFiles, pr.ChangedProtectedFiles)
+ assert.NotEmpty(t, pr.MergeBase)
+ assert.NotEmpty(t, pr.HeadCommitID)
+}
+
+func TestPullRequestMergeable(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ t.Run("NoConflict-MergeTree", func(t *testing.T) {
+ testPullRequestMergeCheck(t, checkPullRequestMergeableByMergeTree, pr, issues_model.PullRequestStatusMergeable, nil, nil)
+ })
+ t.Run("NoConflict-TmpRepo", func(t *testing.T) {
+ testPullRequestMergeCheck(t, checkPullRequestMergeableByTmpRepo, pr, issues_model.PullRequestStatusMergeable, nil, nil)
+ })
+
+ pr.BaseBranch, pr.HeadBranch = "test-merge-tree-conflict-base", "test-merge-tree-conflict-head"
+ conflictFiles := createConflictBranches(t, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.HeadBranch)
+ t.Run("Conflict-MergeTree", func(t *testing.T) {
+ testPullRequestMergeCheck(t, checkPullRequestMergeableByMergeTree, pr, issues_model.PullRequestStatusConflict, conflictFiles, nil)
+ })
+ t.Run("Conflict-TmpRepo", func(t *testing.T) {
+ testPullRequestMergeCheck(t, checkPullRequestMergeableByTmpRepo, pr, issues_model.PullRequestStatusConflict, conflictFiles, nil)
+ })
+
+ pr.BaseBranch, pr.HeadBranch = "test-merge-tree-empty-base", "test-merge-tree-empty-head"
+ createEmptyBranches(t, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.HeadBranch)
+ t.Run("Empty-MergeTree", func(t *testing.T) {
+ testPullRequestMergeCheck(t, checkPullRequestMergeableByMergeTree, pr, issues_model.PullRequestStatusEmpty, nil, nil)
+ })
+ t.Run("Empty-TmpRepo", func(t *testing.T) {
+ testPullRequestMergeCheck(t, checkPullRequestMergeableByTmpRepo, pr, issues_model.PullRequestStatusEmpty, nil, nil)
+ })
+}
+
+func createConflictBranches(t *testing.T, repoPath, baseBranch, headBranch string) []string {
+ conflictFile := "conflict.txt"
+ stdin := fmt.Sprintf(
+ `reset refs/heads/%[1]s
+from refs/heads/master
+
+commit refs/heads/%[1]s
+mark :1
+committer Test 0 +0000
+data 17
+add conflict file
+M 100644 inline %[3]s
+data 4
+base
+
+commit refs/heads/%[1]s
+mark :2
+committer Test 0 +0000
+data 11
+base change
+from :1
+M 100644 inline %[3]s
+data 11
+base change
+
+reset refs/heads/%[2]s
+from :1
+
+commit refs/heads/%[2]s
+mark :3
+committer Test 0 +0000
+data 11
+head change
+from :1
+M 100644 inline %[3]s
+data 11
+head change
+`, baseBranch, headBranch, conflictFile)
+ err := gitcmd.NewCommand("fast-import").WithDir(repoPath).WithStdinBytes([]byte(stdin)).RunWithStderr(t.Context())
+ require.NoError(t, err)
+ return []string{conflictFile}
+}
+
+func createEmptyBranches(t *testing.T, repoPath, baseBranch, headBranch string) {
+ emptyFile := "empty.txt"
+ stdin := fmt.Sprintf(`reset refs/heads/%[1]s
+from refs/heads/master
+
+commit refs/heads/%[1]s
+mark :1
+committer Test 0 +0000
+data 14
+add empty file
+M 100644 inline %[3]s
+data 4
+base
+
+reset refs/heads/%[2]s
+from :1
+
+commit refs/heads/%[2]s
+mark :2
+committer Test 0 +0000
+data 17
+change empty file
+from :1
+M 100644 inline %[3]s
+data 6
+change
+
+commit refs/heads/%[2]s
+mark :3
+committer Test 0 +0000
+data 17
+revert empty file
+from :2
+M 100644 inline %[3]s
+data 4
+base
+`, baseBranch, headBranch, emptyFile)
+ err := gitcmd.NewCommand("fast-import").WithDir(repoPath).WithStdinBytes([]byte(stdin)).RunWithStderr(t.Context())
+ require.NoError(t, err)
+}
diff --git a/services/pull/patch.go b/services/pull/patch.go
index d82fe3e225..30f07f8931 100644
--- a/services/pull/patch.go
+++ b/services/pull/patch.go
@@ -21,7 +21,6 @@ import (
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/glob"
- "code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/process"
"code.gitea.io/gitea/modules/setting"
@@ -67,10 +66,18 @@ var patchErrorSuffices = []string{
": does not exist in index",
}
-func testPullRequestBranchMergeable(pr *issues_model.PullRequest) error {
- ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("testPullRequestBranchMergeable: %s", pr))
+func checkPullRequestBranchMergeable(ctx context.Context, pr *issues_model.PullRequest) error {
+ ctx, _, finished := process.GetManager().AddContext(ctx, fmt.Sprintf("checkPullRequestBranchMergeable: %s", pr))
defer finished()
+ if git.DefaultFeatures().SupportGitMergeTree {
+ return checkPullRequestMergeableByMergeTree(ctx, pr)
+ }
+
+ return checkPullRequestMergeableByTmpRepo(ctx, pr)
+}
+
+func checkPullRequestMergeableByTmpRepo(ctx context.Context, pr *issues_model.PullRequest) error {
prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
if err != nil {
if !git_model.IsErrBranchNotExist(err) {
@@ -80,10 +87,6 @@ func testPullRequestBranchMergeable(pr *issues_model.PullRequest) error {
}
defer cancel()
- return testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr)
-}
-
-func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepoContext, pr *issues_model.PullRequest) error {
gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
if err != nil {
return fmt.Errorf("OpenRepository: %w", err)
@@ -91,16 +94,16 @@ func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepo
defer gitRepo.Close()
// 1. update merge base
- pr.MergeBase, _, err = gitcmd.NewCommand("merge-base", "--", "base", "tracking").WithDir(prCtx.tmpBasePath).RunStdString(ctx)
+ pr.MergeBase, _, err = gitcmd.NewCommand("merge-base", "--", tmpRepoBaseBranch, tmpRepoTrackingBranch).WithDir(prCtx.tmpBasePath).RunStdString(ctx)
if err != nil {
var err2 error
- pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base")
+ pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + tmpRepoBaseBranch)
if err2 != nil {
return fmt.Errorf("GetMergeBase: %v and can't find commit ID for base: %w", err, err2)
}
}
pr.MergeBase = strings.TrimSpace(pr.MergeBase)
- if pr.HeadCommitID, err = gitRepo.GetRefCommitID(git.BranchPrefix + "tracking"); err != nil {
+ if pr.HeadCommitID, err = gitRepo.GetRefCommitID(git.BranchPrefix + tmpRepoTrackingBranch); err != nil {
return fmt.Errorf("GetBranchCommitID: can't find commit ID for head: %w", err)
}
@@ -110,17 +113,19 @@ func testPullRequestTmpRepoBranchMergeable(ctx context.Context, prCtx *prTmpRepo
}
// 2. Check for conflicts
- if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
+ conflicts, err := checkConflictsByTmpRepo(ctx, pr, gitRepo, prCtx.tmpBasePath)
+ if err != nil {
return err
}
- // 3. Check for protected files changes
- if err = checkPullFilesProtection(ctx, pr, gitRepo); err != nil {
- return fmt.Errorf("pr.CheckPullFilesProtection(): %v", err)
+ pr.ChangedProtectedFiles = nil
+ if conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
+ return nil
}
- if len(pr.ChangedProtectedFiles) > 0 {
- log.Trace("Found %d protected files changed", len(pr.ChangedProtectedFiles))
+ // 3. Check for protected files changes
+ if err = checkPullFilesProtection(ctx, pr, gitRepo, tmpRepoTrackingBranch); err != nil {
+ return fmt.Errorf("pr.CheckPullFilesProtection(): %w", err)
}
pr.Status = issues_model.PullRequestStatusMergeable
@@ -249,7 +254,7 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo
defer cancel()
// First we use read-tree to do a simple three-way merge
- if _, _, err := gitcmd.NewCommand("read-tree", "-m").AddDynamicArguments(base, ours, theirs).WithDir(gitPath).RunStdString(ctx); err != nil {
+ if err := gitcmd.NewCommand("read-tree", "-m").AddDynamicArguments(base, ours, theirs).WithDir(gitPath).RunWithStderr(ctx); err != nil {
log.Error("Unable to run read-tree -m! Error: %v", err)
return false, nil, fmt.Errorf("unable to run read-tree -m! Error: %w", err)
}
@@ -307,14 +312,14 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo
return conflict, conflictedFiles, nil
}
-func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) {
- // 1. checkConflicts resets the conflict status - therefore - reset the conflict status
+func checkConflictsByTmpRepo(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) {
+ // 1. checkConflictsByTmpRepo resets the conflict status - therefore - reset the conflict status
pr.ConflictedFiles = nil
// 2. AttemptThreeWayMerge first - this is much quicker than plain patch to base
description := fmt.Sprintf("PR[%d] %s/%s#%d", pr.ID, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, pr.Index)
conflict, conflictFiles, err := AttemptThreeWayMerge(ctx,
- tmpBasePath, gitRepo, pr.MergeBase, "base", "tracking", description)
+ tmpBasePath, gitRepo, pr.MergeBase, tmpRepoBaseBranch, tmpRepoTrackingBranch, description)
if err != nil {
return false, err
}
@@ -329,7 +334,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
return false, fmt.Errorf("unable to write unconflicted tree: %w\n`git ls-files -u`:\n%s", err, lsfiles)
}
treeHash = strings.TrimSpace(treeHash)
- baseTree, err := gitRepo.GetTree("base")
+ baseTree, err := gitRepo.GetTree(tmpRepoBaseBranch)
if err != nil {
return false, err
}
@@ -379,10 +384,10 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
return false, nil
}
- log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable (patchPath): %s", pr.ID, patchPath)
+ log.Trace("PullRequest[%d].checkPullRequestMergeableByTmpRepo (patchPath): %s", pr.ID, patchPath)
// 4. Read the base branch in to the index of the temporary repository
- _, _, err = gitcmd.NewCommand("read-tree", "base").WithDir(tmpBasePath).RunStdString(ctx)
+ _, _, err = gitcmd.NewCommand("read-tree", tmpRepoBaseBranch).WithDir(tmpBasePath).RunStdString(ctx)
if err != nil {
return false, fmt.Errorf("git read-tree %s: %w", pr.BaseBranch, err)
}
@@ -413,30 +418,15 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
// in memory - which is very wasteful.
// - alternatively we can do the equivalent of:
// `git apply --check ... | grep ...`
- // meaning we don't store all of the conflicts unnecessarily.
- stderrReader, stderrWriter, err := os.Pipe()
- if err != nil {
- log.Error("Unable to open stderr pipe: %v", err)
- return false, fmt.Errorf("unable to open stderr pipe: %w", err)
- }
- defer func() {
- _ = stderrReader.Close()
- _ = stderrWriter.Close()
- }()
+ // meaning we don't store all the conflicts unnecessarily.
+ stderrReader, stderrReaderClose := cmdApply.MakeStderrPipe()
+ defer stderrReaderClose()
// 8. Run the check command
conflict = false
err = cmdApply.
WithDir(tmpBasePath).
- WithStderr(stderrWriter).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- // Close the writer end of the pipe to begin processing
- _ = stderrWriter.Close()
- defer func() {
- // Close the reader on return to terminate the git command if necessary
- _ = stderrReader.Close()
- }()
-
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
const prefix = "error: patch failed:"
const errorPrefix = "error: "
const threewayFailed = "Failed to perform three-way merge..."
@@ -449,7 +439,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
scanner := bufio.NewScanner(stderrReader)
for scanner.Scan() {
line := scanner.Text()
- log.Trace("PullRequest[%d].testPullRequestTmpRepoBranchMergeable: stderr: %s", pr.ID, line)
+ log.Trace("PullRequest[%d].checkPullRequestMergeableByTmpRepo: stderr: %s", pr.ID, line)
if strings.HasPrefix(line, prefix) {
conflict = true
filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0])
@@ -474,8 +464,8 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
conflicts.Add(filepath)
}
}
- // only list 10 conflicted files
- if len(conflicts) >= 10 {
+ // only list part of conflicted files
+ if len(conflicts) >= gitrepo.MaxConflictedDetectFiles {
break
}
}
@@ -491,7 +481,7 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
}).
Run(gitRepo.Ctx)
- // 9. Check if the found conflictedfiles is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts.
+ // 9. Check if the found conflicted files is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts.
// Note: `"err" could be non-nil` is due that if enable 3-way merge, it doesn't return any error on found conflicts.
if len(pr.ConflictedFiles) > 0 {
if conflict {
@@ -585,7 +575,7 @@ func CheckUnprotectedFiles(repo *git.Repository, branchName, oldCommitID, newCom
}
// checkPullFilesProtection check if pr changed protected files and save results
-func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) error {
+func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, headRef string) error {
if pr.Status == issues_model.PullRequestStatusEmpty {
pr.ChangedProtectedFiles = nil
return nil
@@ -601,9 +591,12 @@ func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest,
return nil
}
- pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.HeadBranch, pr.MergeBase, "tracking", pb.GetProtectedFilePatterns(), 10, os.Environ())
+ pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.HeadBranch, pr.MergeBase, headRef, pb.GetProtectedFilePatterns(), 10, os.Environ())
if err != nil && !IsErrFilePathProtected(err) {
return err
}
+ if len(pr.ChangedProtectedFiles) > 0 {
+ log.Trace("Found %d protected files changed in PR %s#%d", len(pr.ChangedProtectedFiles), pr.BaseRepo.FullName(), pr.Index)
+ }
return nil
}
diff --git a/services/pull/patch_unmerged.go b/services/pull/patch_unmerged.go
index 0491680313..78a31a8704 100644
--- a/services/pull/patch_unmerged.go
+++ b/services/pull/patch_unmerged.go
@@ -9,7 +9,6 @@ import (
"context"
"fmt"
"io"
- "os"
"strconv"
"strings"
@@ -60,27 +59,11 @@ func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan
close(outputChan)
}()
- lsFilesReader, lsFilesWriter, err := os.Pipe()
- if err != nil {
- log.Error("Unable to open stderr pipe: %v", err)
- outputChan <- &lsFileLine{err: fmt.Errorf("unable to open stderr pipe: %w", err)}
- return
- }
- defer func() {
- _ = lsFilesWriter.Close()
- _ = lsFilesReader.Close()
- }()
-
- stderr := &strings.Builder{}
- err = gitcmd.NewCommand("ls-files", "-u", "-z").
- WithDir(tmpBasePath).
- WithStdout(lsFilesWriter).
- WithStderr(stderr).
- WithPipelineFunc(func(_ context.Context, _ context.CancelFunc) error {
- _ = lsFilesWriter.Close()
- defer func() {
- _ = lsFilesReader.Close()
- }()
+ cmd := gitcmd.NewCommand("ls-files", "-u", "-z")
+ lsFilesReader, lsFilesReaderClose := cmd.MakeStdoutPipe()
+ defer lsFilesReaderClose()
+ err := cmd.WithDir(tmpBasePath).
+ WithPipelineFunc(func(gitcmd.Context) error {
bufferedReader := bufio.NewReader(lsFilesReader)
for {
@@ -113,9 +96,9 @@ func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan
outputChan <- toemit
}
}).
- Run(ctx)
+ RunWithStderr(ctx)
if err != nil {
- outputChan <- &lsFileLine{err: fmt.Errorf("git ls-files -u -z: %w", gitcmd.ConcatenateError(err, stderr.String()))}
+ outputChan <- &lsFileLine{err: fmt.Errorf("git ls-files -u -z: %w", err)}
}
}
diff --git a/services/pull/pull.go b/services/pull/pull.go
index ecc0b2c7ce..285e489078 100644
--- a/services/pull/pull.go
+++ b/services/pull/pull.go
@@ -4,12 +4,10 @@
package pull
import (
- "bytes"
"context"
"errors"
"fmt"
"io"
- "os"
"regexp"
"strings"
"time"
@@ -34,6 +32,7 @@ import (
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
+ git_service "code.gitea.io/gitea/services/git"
issue_service "code.gitea.io/gitea/services/issue"
notify_service "code.gitea.io/gitea/services/notify"
)
@@ -51,6 +50,7 @@ type NewPullRequestOptions struct {
AssigneeIDs []int64
Reviewers []*user_model.User
TeamReviewers []*organization.Team
+ ProjectID int64
}
// NewPullRequest creates new pull request with labels for repository.
@@ -66,11 +66,13 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
// user should be a collaborator or a member of the organization for base repo
canCreate := issue.Poster.IsAdmin || pr.Flow == issues_model.PullRequestFlowAGit
+ canAssignProject := canCreate
if !canCreate {
canCreate, err := repo_model.IsOwnerMemberCollaborator(ctx, repo, issue.Poster.ID)
if err != nil {
return err
}
+ canAssignProject = canCreate
if !canCreate {
// or user should have write permission in the head repo
@@ -84,19 +86,11 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
if !perm.CanWrite(unit.TypeCode) {
return issues_model.ErrMustCollaborator
}
+ canAssignProject = perm.CanWrite(unit.TypeProjects)
}
}
- prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
- if err != nil {
- if !git_model.IsErrBranchNotExist(err) {
- log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
- }
- return err
- }
- defer cancel()
-
- if err := testPullRequestTmpRepoBranchMergeable(ctx, prCtx, pr); err != nil {
+ if err := checkPullRequestBranchMergeable(ctx, pr); err != nil {
return err
}
@@ -116,9 +110,16 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
assigneeCommentMap[assigneeID] = comment
}
+ if opts.ProjectID > 0 && canAssignProject {
+ if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, issue.Poster, opts.ProjectID, 0); err != nil {
+ return err
+ }
+ }
+
pr.Issue = issue
issue.PullRequest = pr
+ var err error
if pr.Flow == issues_model.PullRequestFlowGithub {
err = PushToBaseRepo(ctx, pr)
} else {
@@ -159,6 +160,9 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error {
// Request reviews, these should be requested before other notifications because they will add request reviews record
// on database
permDoer, err := access_model.GetUserRepoPermission(ctx, repo, issue.Poster)
+ if err != nil {
+ return err
+ }
for _, reviewer := range opts.Reviewers {
if _, err = issue_service.ReviewRequest(ctx, pr.Issue, issue.Poster, &permDoer, reviewer, true); err != nil {
return err
@@ -292,7 +296,7 @@ func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer
pr.BaseBranch = targetBranch
// Refresh patch
- if err := testPullRequestBranchMergeable(pr); err != nil {
+ if err := checkPullRequestBranchMergeable(ctx, pr); err != nil {
return err
}
@@ -510,40 +514,23 @@ func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest,
}
defer cancel()
- tmpRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
- if err != nil {
- return false, "", fmt.Errorf("OpenRepository: %w", err)
- }
- defer tmpRepo.Close()
-
- // Find the merge-base
- mergeBase, _, err = tmpRepo.GetMergeBase("", "base", "tracking")
+ mergeBase, err = gitrepo.MergeBase(ctx, pr.BaseRepo, pr.BaseBranch, pr.GetGitHeadRefName())
if err != nil {
return false, "", fmt.Errorf("GetMergeBase: %w", err)
}
cmd := gitcmd.NewCommand("diff", "--name-only", "-z").AddDynamicArguments(newCommitID, oldCommitID, mergeBase)
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- return false, mergeBase, fmt.Errorf("unable to open pipe for to run diff: %w", err)
- }
- stderr := new(bytes.Buffer)
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
if err := cmd.WithDir(prCtx.tmpBasePath).
- WithStdout(stdoutWriter).
- WithStderr(stderr).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
- defer func() {
- _ = stdoutReader.Close()
- }()
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
return util.IsEmptyReader(stdoutReader)
}).
- Run(ctx); err != nil {
- if err == util.ErrNotEmpty {
+ RunWithStderr(ctx); err != nil {
+ if errors.Is(err, util.ErrNotEmpty) {
return true, mergeBase, nil
}
- err = gitcmd.ConcatenateError(err, stderr.String())
log.Error("Unable to run diff on %s %s %s in tempRepo for PR[%d]%s/%s...%s/%s: Error: %v",
newCommitID, oldCommitID, mergeBase,
@@ -1066,14 +1053,14 @@ func GetPullCommits(ctx context.Context, baseGitRepo *git.Repository, doer *user
if pull.HasMerged {
baseBranch = pull.MergeBase
}
- prInfo, err := GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo, baseBranch, pull.GetGitHeadRefName(), true, false)
+ compareInfo, err := git_service.GetCompareInfo(ctx, pull.BaseRepo, pull.BaseRepo, baseGitRepo, git.RefNameFromBranch(baseBranch), git.RefName(pull.GetGitHeadRefName()), false, false)
if err != nil {
return nil, "", err
}
- commits := make([]CommitInfo, 0, len(prInfo.Commits))
+ commits := make([]CommitInfo, 0, len(compareInfo.Commits))
- for _, commit := range prInfo.Commits {
+ for _, commit := range compareInfo.Commits {
var committerOrAuthorName string
var commitTime time.Time
if commit.Author != nil {
diff --git a/services/pull/review.go b/services/pull/review.go
index 9aeeb4c31d..261cf234b3 100644
--- a/services/pull/review.go
+++ b/services/pull/review.go
@@ -8,8 +8,6 @@ import (
"context"
"errors"
"fmt"
- "io"
- "regexp"
"strings"
"code.gitea.io/gitea/models/db"
@@ -17,6 +15,7 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/optional"
@@ -26,7 +25,15 @@ import (
notify_service "code.gitea.io/gitea/services/notify"
)
-var notEnoughLines = regexp.MustCompile(`fatal: file .* has only \d+ lines?`)
+func isErrBlameNotFoundOrNotEnoughLines(err error) bool {
+ stdErr, ok := gitcmd.ErrorAsStderr(err)
+ if !ok {
+ return false
+ }
+ notFound := strings.HasPrefix(stdErr, "fatal: no such path")
+ notEnoughLines := strings.HasPrefix(stdErr, "fatal: file ") && strings.Contains(stdErr, " has only ") && strings.Contains(stdErr, " lines?")
+ return notFound || notEnoughLines
+}
// ErrDismissRequestOnClosedPR represents an error when an user tries to dismiss a review associated to a closed or merged PR.
type ErrDismissRequestOnClosedPR struct{}
@@ -67,7 +74,7 @@ func lineBlame(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Re
func checkInvalidation(ctx context.Context, c *issues_model.Comment, repo *repo_model.Repository, gitRepo *git.Repository, branch string) error {
// FIXME differentiate between previous and proposed line
commit, err := lineBlame(ctx, repo, gitRepo, branch, c.TreePath, uint(c.UnsignedLine()))
- if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
+ if isErrBlameNotFoundOrNotEnoughLines(err) {
c.Invalidated = true
return issues_model.UpdateCommentInvalidate(ctx, c)
}
@@ -251,7 +258,7 @@ func createCodeComment(ctx context.Context, doer *user_model.User, repo *repo_mo
commit, err := lineBlame(ctx, pr.BaseRepo, gitRepo, head, treePath, uint(line))
if err == nil {
commitID = commit.ID.String()
- } else if !(strings.Contains(err.Error(), "exit status 128 - fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
+ } else if !isErrBlameNotFoundOrNotEnoughLines(err) {
return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %w", pr.GetGitHeadRefName(), gitRepo.Path, treePath, line, err)
}
}
@@ -266,22 +273,12 @@ func createCodeComment(ctx context.Context, doer *user_model.User, repo *repo_mo
if len(commitID) == 0 {
commitID = headCommitID
}
- reader, writer := io.Pipe()
- defer func() {
- _ = reader.Close()
- _ = writer.Close()
- }()
- go func() {
- if err := git.GetRepoRawDiffForFile(gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, treePath, writer); err != nil {
- _ = writer.CloseWithError(fmt.Errorf("GetRawDiffForLine[%s, %s, %s, %s]: %w", gitRepo.Path, pr.MergeBase, headCommitID, treePath, err))
- return
- }
- _ = writer.Close()
- }()
- patch, err = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: line}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
+ patch, err = git.GetFileDiffCutAroundLine(
+ gitRepo, pr.MergeBase, headCommitID, treePath,
+ int64((&issues_model.Comment{Line: line}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines,
+ )
if err != nil {
- log.Error("Error whilst generating patch: %v", err)
return nil, err
}
@@ -468,7 +465,7 @@ func DismissReview(ctx context.Context, reviewID, repoID int64, message string,
}
if !isDismiss {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil because this is not a dismiss action
}
if err := review.Issue.LoadAttributes(ctx); err != nil {
diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go
index 113d1cb49e..d0da870241 100644
--- a/services/pull/temp_repo.go
+++ b/services/pull/temp_repo.go
@@ -5,11 +5,11 @@
package pull
import (
+ "bytes"
"context"
"fmt"
"os"
"path/filepath"
- "strings"
git_model "code.gitea.io/gitea/models/git"
issues_model "code.gitea.io/gitea/models/issues"
@@ -23,17 +23,16 @@ import (
// Temporary repos created here use standard branch names to help simplify
// merging code
const (
- baseBranch = "base" // equivalent to pr.BaseBranch
- trackingBranch = "tracking" // equivalent to pr.HeadBranch
- stagingBranch = "staging" // this is used for a working branch
+ tmpRepoBaseBranch = "base" // equivalent to pr.BaseBranch
+ tmpRepoTrackingBranch = "tracking" // equivalent to pr.HeadBranch
+ tmpRepoStagingBranch = "staging" // this is used for a working branch
)
type prTmpRepoContext struct {
context.Context
tmpBasePath string
pr *issues_model.PullRequest
- outbuf *strings.Builder // we keep these around to help reduce needless buffer recreation,
- errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use
+ outbuf *bytes.Buffer // we keep these around to help reduce needless buffer recreation, any use should be preceded by a Reset and preferably after use
}
// PrepareGitCmd prepares a git command with the correct directory, environment, and output buffers
@@ -41,10 +40,7 @@ type prTmpRepoContext struct {
// Do NOT use it with gitcmd.RunStd*() functions, otherwise it will panic
func (ctx *prTmpRepoContext) PrepareGitCmd(cmd *gitcmd.Command) *gitcmd.Command {
ctx.outbuf.Reset()
- ctx.errbuf.Reset()
- return cmd.WithDir(ctx.tmpBasePath).
- WithStdout(ctx.outbuf).
- WithStderr(ctx.errbuf)
+ return cmd.WithDir(ctx.tmpBasePath).WithStdoutBuffer(ctx.outbuf)
}
// createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch
@@ -86,8 +82,7 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
Context: ctx,
tmpBasePath: tmpBasePath,
pr: pr,
- outbuf: &strings.Builder{},
- errbuf: &strings.Builder{},
+ outbuf: &bytes.Buffer{},
}
baseRepoPath := pr.BaseRepo.RepoPath()
@@ -100,7 +95,6 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
}
remoteRepoName := "head_repo"
- baseBranch := "base"
fetchArgs := gitcmd.TrustedCmdArgs{"--no-tags"}
if git.DefaultFeatures().CheckVersionAtLeast("2.25.0") {
@@ -133,25 +127,25 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
}
if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath)).
- Run(ctx); err != nil {
- log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ RunWithStderr(ctx); err != nil {
+ log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), err.Stderr())
cancel()
- return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), err.Stderr())
}
if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("fetch", "origin").AddArguments(fetchArgs...).
- AddDashesAndList(git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+baseBranch, git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+"original_"+baseBranch)).
- Run(ctx); err != nil {
- log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ AddDashesAndList(git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+tmpRepoBaseBranch, git.BranchPrefix+pr.BaseBranch+":"+git.BranchPrefix+"original_"+tmpRepoBaseBranch)).
+ RunWithStderr(ctx); err != nil {
+ log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), err.Stderr())
cancel()
- return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), err.Stderr())
}
- if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch)).
- Run(ctx); err != nil {
- log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+tmpRepoBaseBranch)).
+ RunWithStderr(ctx); err != nil {
+ log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), err.Stderr())
cancel()
- return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), err.Stderr())
}
if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil {
@@ -161,13 +155,12 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
}
if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath)).
- Run(ctx); err != nil {
- log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ RunWithStderr(ctx); err != nil {
+ log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), err.Stderr())
cancel()
- return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), err.Stderr())
}
- trackingBranch := "tracking"
objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
// Fetch head branch
var headBranch string
@@ -178,19 +171,17 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
} else {
headBranch = pr.GetGitHeadRefName()
}
- if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch)).
- Run(ctx); err != nil {
+ if err := prCtx.PrepareGitCmd(gitcmd.NewCommand("fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+tmpRepoTrackingBranch)).
+ RunWithStderr(ctx); err != nil {
cancel()
if exist, _ := git_model.IsBranchExist(ctx, pr.HeadRepo.ID, pr.HeadBranch); !exist {
return nil, nil, git_model.ErrBranchNotExist{
BranchName: pr.HeadBranch,
}
}
- log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
- return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), err.Stderr())
+ return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), err.Stderr())
}
prCtx.outbuf.Reset()
- prCtx.errbuf.Reset()
-
return prCtx, cancel, nil
}
diff --git a/services/pull/update_rebase.go b/services/pull/update_rebase.go
index 6a70c03467..6b90e5d776 100644
--- a/services/pull/update_rebase.go
+++ b/services/pull/update_rebase.go
@@ -28,7 +28,7 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques
defer cancel()
// Determine the old merge-base before the rebase - we use this for LFS push later on
- oldMergeBase, _, _ := gitcmd.NewCommand("merge-base").AddDashesAndList(baseBranch, trackingBranch).
+ oldMergeBase, _, _ := gitcmd.NewCommand("merge-base").AddDashesAndList(tmpRepoBaseBranch, tmpRepoTrackingBranch).
WithDir(mergeCtx.tmpBasePath).RunStdString(ctx)
oldMergeBase = strings.TrimSpace(oldMergeBase)
@@ -42,11 +42,11 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques
// It's questionable about where this should go - either after or before the push
// I think in the interests of data safety - failures to push to the lfs should prevent
// the push as you can always re-rebase.
- if err := LFSPush(ctx, mergeCtx.tmpBasePath, baseBranch, oldMergeBase, &issues_model.PullRequest{
+ if err := LFSPush(ctx, mergeCtx.tmpBasePath, tmpRepoBaseBranch, oldMergeBase, &issues_model.PullRequest{
HeadRepoID: pr.BaseRepoID,
BaseRepoID: pr.HeadRepoID,
}); err != nil {
- log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", baseBranch, oldMergeBase, pr, err)
+ log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", tmpRepoBaseBranch, oldMergeBase, pr, err)
return err
}
}
@@ -65,13 +65,12 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques
}
pushCmd := gitcmd.NewCommand("push", "-f", "head_repo").
- AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
+ AddDynamicArguments(tmpRepoStagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
// Push back to the head repository.
// TODO: this cause an api call to "/api/internal/hook/post-receive/...",
// that prevents us from doint the whole merge in one db transaction
mergeCtx.outbuf.Reset()
- mergeCtx.errbuf.Reset()
if err := pushCmd.
WithEnv(repo_module.FullPushingEnvironment(
@@ -83,28 +82,25 @@ func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullReques
pr.Index,
)).
WithDir(mergeCtx.tmpBasePath).
- WithStdout(mergeCtx.outbuf).
- WithStderr(mergeCtx.errbuf).
- Run(ctx); err != nil {
- if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
+ WithStdoutBuffer(mergeCtx.outbuf).
+ RunWithStderr(ctx); err != nil {
+ if strings.Contains(err.Stderr(), "non-fast-forward") {
return &git.ErrPushOutOfDate{
StdOut: mergeCtx.outbuf.String(),
- StdErr: mergeCtx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
- } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
+ } else if strings.Contains(err.Stderr(), "! [remote rejected]") {
err := &git.ErrPushRejected{
StdOut: mergeCtx.outbuf.String(),
- StdErr: mergeCtx.errbuf.String(),
+ StdErr: err.Stderr(),
Err: err,
}
err.GenerateMessage()
return err
}
- return fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
+ return fmt.Errorf("git push: %s", err.Stderr())
}
mergeCtx.outbuf.Reset()
- mergeCtx.errbuf.Reset()
-
return nil
}
diff --git a/services/release/release.go b/services/release/release.go
index a0d3736b44..a482501164 100644
--- a/services/release/release.go
+++ b/services/release/release.go
@@ -371,7 +371,7 @@ func DeleteReleaseByID(ctx context.Context, repo *repo_model.Repository, rel *re
}
}
- if stdout, err := gitrepo.RunCmdString(ctx, repo,
+ if stdout, _, err := gitrepo.RunCmdString(ctx, repo,
gitcmd.NewCommand("tag", "-d").AddDashesAndList(rel.TagName),
); err != nil && !strings.Contains(err.Error(), "not found") {
log.Error("DeleteReleaseByID (git tag -d): %d in %v Failed:\nStdout: %s\nError: %v", rel.ID, repo, stdout, err)
diff --git a/services/repository/adopt.go b/services/repository/adopt.go
index 8d8e59b053..64e7f3f02b 100644
--- a/services/repository/adopt.go
+++ b/services/repository/adopt.go
@@ -23,7 +23,6 @@ import (
"code.gitea.io/gitea/modules/optional"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/util"
notify_service "code.gitea.io/gitea/services/notify"
)
@@ -75,7 +74,7 @@ func AdoptRepository(ctx context.Context, doer, owner *user_model.User, opts Cre
// WARNING: Don't override all later err with local variables
defer func() {
if err != nil {
- // we can not use the ctx because it maybe canceled or timeout
+ // we can not use `ctx` because it may be canceled or timed out
if errDel := deleteFailedAdoptRepository(repo.ID); errDel != nil {
log.Error("Failed to delete repository %s that could not be adopted: %v", repo.FullName(), errDel)
}
@@ -148,11 +147,11 @@ func adoptRepository(ctx context.Context, repo *repo_model.Repository, defaultBr
}
defer gitRepo.Close()
- if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, 0); err != nil {
+ if _, _, err = repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, 0); err != nil {
return fmt.Errorf("SyncRepoBranchesWithRepo: %w", err)
}
- if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ if _, err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
return fmt.Errorf("SyncReleasesWithTags: %w", err)
}
@@ -214,13 +213,13 @@ func DeleteUnadoptedRepository(ctx context.Context, doer, u *user_model.User, re
return err
}
- repoPath := repo_model.RepoPath(u.Name, repoName)
- isExist, err := util.IsExist(repoPath)
+ relativePath := repo_model.RelativePath(u.Name, repoName)
+ exist, err := gitrepo.IsRepositoryExist(ctx, repo_model.StorageRepo(relativePath))
if err != nil {
- log.Error("Unable to check if %s exists. Error: %v", repoPath, err)
+ log.Error("Unable to check if %s exists. Error: %v", relativePath, err)
return err
}
- if !isExist {
+ if !exist {
return repo_model.ErrRepoNotExist{
OwnerName: u.Name,
Name: repoName,
@@ -236,7 +235,7 @@ func DeleteUnadoptedRepository(ctx context.Context, doer, u *user_model.User, re
}
}
- return util.RemoveAll(repoPath)
+ return gitrepo.DeleteRepository(ctx, repo_model.StorageRepo(relativePath))
}
type unadoptedRepositories struct {
diff --git a/services/repository/archiver/archiver.go b/services/repository/archiver/archiver.go
index b2ca74871c..07214d0bfa 100644
--- a/services/repository/archiver/archiver.go
+++ b/services/repository/archiver/archiver.go
@@ -8,7 +8,6 @@ import (
"errors"
"fmt"
"io"
- "net/http"
"os"
"strings"
"time"
@@ -16,6 +15,7 @@ import (
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/graceful"
"code.gitea.io/gitea/modules/httplib"
@@ -24,6 +24,7 @@ import (
"code.gitea.io/gitea/modules/queue"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
gitea_context "code.gitea.io/gitea/services/context"
)
@@ -36,58 +37,31 @@ type ArchiveRequest struct {
Repo *repo_model.Repository
Type repo_model.ArchiveType
CommitID string
+ Paths []string
archiveRefShortName string // the ref short name to download the archive, for example: "master", "v1.0.0", "commit id"
}
-// ErrUnknownArchiveFormat request archive format is not supported
-type ErrUnknownArchiveFormat struct {
- RequestNameType string
-}
-
-// Error implements error
-func (err ErrUnknownArchiveFormat) Error() string {
- return "unknown format: " + err.RequestNameType
-}
-
-// Is implements error
-func (ErrUnknownArchiveFormat) Is(err error) bool {
- _, ok := err.(ErrUnknownArchiveFormat)
- return ok
-}
-
-// RepoRefNotFoundError is returned when a requested reference (commit, tag) was not found.
-type RepoRefNotFoundError struct {
- RefShortName string
-}
-
-// Error implements error.
-func (e RepoRefNotFoundError) Error() string {
- return "unrecognized repository reference: " + e.RefShortName
-}
-
-func (e RepoRefNotFoundError) Is(err error) bool {
- _, ok := err.(RepoRefNotFoundError)
- return ok
-}
-
// NewRequest creates an archival request, based on the URI. The
// resulting ArchiveRequest is suitable for being passed to Await()
// if it's determined that the request still needs to be satisfied.
-func NewRequest(repo *repo_model.Repository, gitRepo *git.Repository, archiveRefExt string) (*ArchiveRequest, error) {
+func NewRequest(repo *repo_model.Repository, gitRepo *git.Repository, archiveRefExt string, paths []string) (*ArchiveRequest, error) {
// here the archiveRefShortName is not a clear ref, it could be a tag, branch or commit id
archiveRefShortName, archiveType := repo_model.SplitArchiveNameType(archiveRefExt)
if archiveType == repo_model.ArchiveUnknown {
- return nil, ErrUnknownArchiveFormat{archiveRefExt}
+ return nil, util.NewInvalidArgumentErrorf("unknown format: %s", archiveRefExt)
+ }
+ if archiveType == repo_model.ArchiveBundle && len(paths) != 0 {
+ return nil, util.NewInvalidArgumentErrorf("cannot specify paths when requesting a bundle")
}
// Get corresponding commit.
commitID, err := gitRepo.ConvertToGitID(archiveRefShortName)
if err != nil {
- return nil, RepoRefNotFoundError{RefShortName: archiveRefShortName}
+ return nil, util.NewNotExistErrorf("unrecognized repository reference: %s", archiveRefShortName)
}
- r := &ArchiveRequest{Repo: repo, archiveRefShortName: archiveRefShortName, Type: archiveType}
+ r := &ArchiveRequest{Repo: repo, archiveRefShortName: archiveRefShortName, Type: archiveType, Paths: paths}
r.CommitID = commitID.String()
return r, nil
}
@@ -159,6 +133,7 @@ func (aReq *ArchiveRequest) Stream(ctx context.Context, w io.Writer) error {
w,
setting.Repository.PrefixArchiveFiles,
aReq.CommitID,
+ aReq.Paths,
)
}
@@ -181,7 +156,7 @@ func doArchive(ctx context.Context, r *ArchiveRequest) (*repo_model.RepoArchiver
// FIXME: If another process are generating it, we think it's not ready and just return
// Or we should wait until the archive generated.
if archiver.Status == repo_model.ArchiverGenerating {
- return nil, nil
+ return nil, nil //nolint:nilnil // return nil because the archive is still being generated
}
} else {
archiver = &repo_model.RepoArchiver{
@@ -339,7 +314,7 @@ func DeleteRepositoryArchives(ctx context.Context) error {
return storage.Clean(storage.RepoArchives)
}
-func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) {
+func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) error {
// Add nix format link header so tarballs lock correctly:
// https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
ctx.Resp.Header().Add("Link", fmt.Sprintf(`<%s/archive/%s.%s?rev=%s>; rel="immutable"`,
@@ -350,20 +325,22 @@ func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) {
))
downloadName := archiveReq.Repo.Name + "-" + archiveReq.GetArchiveName()
- if setting.Repository.StreamArchives {
+ if setting.Repository.StreamArchives || len(archiveReq.Paths) > 0 {
+ // the header must be set before starting streaming even an error would occur,
+ // because errors may happen in git command and such cases aren't in our control.
httplib.ServeSetHeaders(ctx.Resp, &httplib.ServeHeaderOptions{Filename: downloadName})
if err := archiveReq.Stream(ctx, ctx.Resp); err != nil && !ctx.Written() {
- log.Error("Archive %v streaming failed: %v", archiveReq, err)
- ctx.HTTPError(http.StatusInternalServerError)
+ if gitcmd.StderrHasPrefix(err, "fatal: pathspec") {
+ return util.NewInvalidArgumentErrorf("path doesn't exist or is invalid")
+ }
+ return fmt.Errorf("archive repo %s: failed to stream: %w", archiveReq.Repo.FullName(), err)
}
- return
+ return nil
}
archiver, err := archiveReq.Await(ctx)
if err != nil {
- log.Error("Archive %v await failed: %v", archiveReq, err)
- ctx.HTTPError(http.StatusInternalServerError)
- return
+ return fmt.Errorf("archive repo %s: failed to await: %w", archiveReq.Repo.FullName(), err)
}
rPath := archiver.RelativePath()
@@ -372,15 +349,13 @@ func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) {
u, err := storage.RepoArchives.URL(rPath, downloadName, ctx.Req.Method, nil)
if u != nil && err == nil {
ctx.Redirect(u.String())
- return
+ return nil
}
}
fr, err := storage.RepoArchives.Open(rPath)
if err != nil {
- log.Error("Archive %v open file failed: %v", archiveReq, err)
- ctx.HTTPError(http.StatusInternalServerError)
- return
+ return fmt.Errorf("archive repo %s: failed to open archive file: %w", archiveReq.Repo.FullName(), err)
}
defer fr.Close()
@@ -388,4 +363,5 @@ func ServeRepoArchive(ctx *gitea_context.Base, archiveReq *ArchiveRequest) {
Filename: downloadName,
LastModified: archiver.CreatedUnix.AsLocalTime(),
})
+ return nil
}
diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go
index ae5232f5a1..6cc1856a9c 100644
--- a/services/repository/archiver/archiver_test.go
+++ b/services/repository/archiver/archiver_test.go
@@ -8,11 +8,13 @@ import (
"time"
"code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/contexttest"
_ "code.gitea.io/gitea/models/actions"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
func TestMain(m *testing.M) {
@@ -29,47 +31,47 @@ func TestArchive_Basic(t *testing.T) {
contexttest.LoadGitRepo(t, ctx)
defer ctx.Repo.GitRepo.Close()
- bogusReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip")
+ bogusReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil)
assert.NoError(t, err)
assert.NotNil(t, bogusReq)
assert.Equal(t, firstCommit+".zip", bogusReq.GetArchiveName())
// Check a series of bogus requests.
// Step 1, valid commit with a bad extension.
- bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".unknown")
+ bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".unknown", nil)
assert.Error(t, err)
assert.Nil(t, bogusReq)
// Step 2, missing commit.
- bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "dbffff.zip")
+ bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "dbffff.zip", nil)
assert.Error(t, err)
assert.Nil(t, bogusReq)
// Step 3, doesn't look like branch/tag/commit.
- bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "db.zip")
+ bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "db.zip", nil)
assert.Error(t, err)
assert.Nil(t, bogusReq)
- bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "master.zip")
+ bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "master.zip", nil)
assert.NoError(t, err)
assert.NotNil(t, bogusReq)
assert.Equal(t, "master.zip", bogusReq.GetArchiveName())
- bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "test/archive.zip")
+ bogusReq, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, "test/archive.zip", nil)
assert.NoError(t, err)
assert.NotNil(t, bogusReq)
assert.Equal(t, "test-archive.zip", bogusReq.GetArchiveName())
// Now two valid requests, firstCommit with valid extensions.
- zipReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip")
+ zipReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil)
assert.NoError(t, err)
assert.NotNil(t, zipReq)
- tgzReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".tar.gz")
+ tgzReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".tar.gz", nil)
assert.NoError(t, err)
assert.NotNil(t, tgzReq)
- secondReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".bundle")
+ secondReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".bundle", nil)
assert.NoError(t, err)
assert.NotNil(t, secondReq)
@@ -89,7 +91,7 @@ func TestArchive_Basic(t *testing.T) {
// Sleep two seconds to make sure the queue doesn't change.
time.Sleep(2 * time.Second)
- zipReq2, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip")
+ zipReq2, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil)
assert.NoError(t, err)
// This zipReq should match what's sitting in the queue, as we haven't
// let it release yet. From the consumer's point of view, this looks like
@@ -104,12 +106,12 @@ func TestArchive_Basic(t *testing.T) {
// Now we'll submit a request and TimedWaitForCompletion twice, before and
// after we release it. We should trigger both the timeout and non-timeout
// cases.
- timedReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".tar.gz")
+ timedReq, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, secondCommit+".tar.gz", nil)
assert.NoError(t, err)
assert.NotNil(t, timedReq)
doArchive(t.Context(), timedReq)
- zipReq2, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip")
+ zipReq2, err = NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".zip", nil)
assert.NoError(t, err)
// Now, we're guaranteed to have released the original zipReq from the queue.
// Ensure that we don't get handed back the released entry somehow, but they
@@ -124,9 +126,13 @@ func TestArchive_Basic(t *testing.T) {
// Ideally, the extension would match what we originally requested.
assert.NotEqual(t, zipReq.GetArchiveName(), tgzReq.GetArchiveName())
assert.NotEqual(t, zipReq.GetArchiveName(), secondReq.GetArchiveName())
-}
-func TestErrUnknownArchiveFormat(t *testing.T) {
- err := ErrUnknownArchiveFormat{RequestNameType: "xxx"}
- assert.ErrorIs(t, err, ErrUnknownArchiveFormat{})
+ t.Run("BadPath", func(t *testing.T) {
+ badRequest, err := NewRequest(ctx.Repo.Repository, ctx.Repo.GitRepo, firstCommit+".tar.gz", []string{"not-a-path"})
+ require.NoError(t, err)
+ err = ServeRepoArchive(ctx.Base, badRequest)
+ require.Error(t, err)
+ assert.ErrorIs(t, err, util.ErrInvalidArgument)
+ assert.ErrorContains(t, err, "path doesn't exist or is invalid")
+ })
}
diff --git a/services/repository/branch.go b/services/repository/branch.go
index 142073eabe..b3310b2e68 100644
--- a/services/repository/branch.go
+++ b/services/repository/branch.go
@@ -264,12 +264,12 @@ func checkBranchName(ctx context.Context, repo *repo_model.Repository, name stri
return git_model.ErrBranchAlreadyExists{
BranchName: name,
}
- // If branchRefName like a/b but we want to create a branch named a then we have a conflict
+ // If branchRefName like "a/b" but we want to create a branch named a then we have a conflict
case strings.HasPrefix(branchRefName, name+"/"):
return git_model.ErrBranchNameConflict{
BranchName: branchRefName,
}
- // Conversely if branchRefName like a but we want to create a branch named a/b then we also have a conflict
+ // Conversely if branchRefName like "a" but we want to create a branch named "a/b" then we also have a conflict
case strings.HasPrefix(name, branchRefName+"/"):
return git_model.ErrBranchNameConflict{
BranchName: branchRefName,
@@ -281,7 +281,6 @@ func checkBranchName(ctx context.Context, repo *repo_model.Repository, name stri
}
return nil
})
-
return err
}
@@ -443,6 +442,15 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, doer *user_m
}
}
+ // We also need to check if "to" matches with a protected branch rule.
+ rule, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, to)
+ if err != nil {
+ return "", err
+ }
+ if rule != nil && !rule.CanUserPush(ctx, doer) {
+ return "", git_model.ErrBranchIsProtected
+ }
+
if err := git_model.RenameBranch(ctx, repo, from, to, func(ctx context.Context, isDefault bool) error {
err2 := gitrepo.RenameBranch(ctx, repo, from, to)
if err2 != nil {
@@ -539,10 +547,11 @@ func UpdateBranch(ctx context.Context, repo *repo_model.Repository, gitRepo *git
return gitrepo.Push(ctx, repo, repo, pushOpts)
}
-var ErrBranchIsDefault = util.ErrorWrap(util.ErrPermissionDenied, "branch is default")
+var ErrBranchIsDefault = util.ErrorWrap(util.ErrPermissionDenied, "branch is default or pull request target")
func CanDeleteBranch(ctx context.Context, repo *repo_model.Repository, branchName string, doer *user_model.User) error {
- if branchName == repo.DefaultBranch {
+ unitPRConfig := repo.MustGetUnit(ctx, unit.TypePullRequests).PullRequestsConfig()
+ if branchName == repo.DefaultBranch || branchName == unitPRConfig.DefaultTargetBranch {
return ErrBranchIsDefault
}
diff --git a/services/repository/check.go b/services/repository/check.go
index 57d627c63d..4ae49d81f6 100644
--- a/services/repository/check.go
+++ b/services/repository/check.go
@@ -88,10 +88,10 @@ func GitGcRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Du
command := gitcmd.NewCommand("gc").AddArguments(args...)
var stdout string
var err error
- stdout, err = gitrepo.RunCmdString(ctx, repo, command)
+ stdout, _, err = gitrepo.RunCmdString(ctx, repo, command)
if err != nil {
log.Error("Repository garbage collection failed for %-v. Stdout: %s\nError: %v", repo, stdout, err)
- desc := fmt.Sprintf("Repository garbage collection failed for %s. Stdout: %s\nError: %v", repo.RelativePath(), stdout, err)
+ desc := fmt.Sprintf("Repository garbage collection failed (%s). Stdout: %s\nError: %v", repo.FullName(), stdout, err)
if err := system_model.CreateRepositoryNotice(desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
@@ -101,7 +101,7 @@ func GitGcRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Du
// Now update the size of the repository
if err := repo_module.UpdateRepoSize(ctx, repo); err != nil {
log.Error("Updating size as part of garbage collection failed for %-v. Stdout: %s\nError: %v", repo, stdout, err)
- desc := fmt.Sprintf("Updating size as part of garbage collection failed for %s. Stdout: %s\nError: %v", repo.RelativePath(), stdout, err)
+ desc := fmt.Sprintf("Updating size as part of garbage collection failed (%s). Stdout: %s\nError: %v", repo.FullName(), stdout, err)
if err := system_model.CreateRepositoryNotice(desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
@@ -163,7 +163,7 @@ func DeleteMissingRepositories(ctx context.Context, doer *user_model.User) error
log.Trace("Deleting %d/%d...", repo.OwnerID, repo.ID)
if err := DeleteRepositoryDirectly(ctx, repo.ID); err != nil {
log.Error("Failed to DeleteRepository %-v: Error: %v", repo, err)
- if err2 := system_model.CreateRepositoryNotice("Failed to DeleteRepository %s [%d]: Error: %v", repo.FullName(), repo.ID, err); err2 != nil {
+ if err2 := system_model.CreateRepositoryNotice("Failed to DeleteRepository (%s) [%d]: Error: %v", repo.FullName(), repo.ID, err); err2 != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
}
@@ -191,7 +191,7 @@ func ReinitMissingRepositories(ctx context.Context) error {
log.Trace("Initializing %d/%d...", repo.OwnerID, repo.ID)
if err := gitrepo.InitRepository(ctx, repo, repo.ObjectFormatName); err != nil {
log.Error("Unable (re)initialize repository %d at %s. Error: %v", repo.ID, repo.RelativePath(), err)
- if err2 := system_model.CreateRepositoryNotice("InitRepository [%d]: %v", repo.ID, err); err2 != nil {
+ if err2 := system_model.CreateRepositoryNotice("InitRepository (%s) [%d]: %v", repo.FullName(), repo.ID, err); err2 != nil {
log.Error("CreateRepositoryNotice: %v", err2)
}
}
diff --git a/services/repository/collaboration.go b/services/repository/collaboration.go
index 53b3c2e203..cb56d90ae2 100644
--- a/services/repository/collaboration.go
+++ b/services/repository/collaboration.go
@@ -120,6 +120,11 @@ func ReconsiderWatches(ctx context.Context, repo *repo_model.Repository, user *u
return err
}
+ // Remove all stopwatches a user has running in the repository
+ if err := issues_model.RemoveStopwatchesByRepoID(ctx, user.ID, repo.ID); err != nil {
+ return err
+ }
+
// Remove all IssueWatches a user has subscribed to in the repository
return issues_model.RemoveIssueWatchersByRepoID(ctx, user.ID, repo.ID)
}
diff --git a/services/repository/collaboration_test.go b/services/repository/collaboration_test.go
index 5e33c50366..56d9d72e0a 100644
--- a/services/repository/collaboration_test.go
+++ b/services/repository/collaboration_test.go
@@ -6,7 +6,10 @@ package repository
import (
"testing"
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
@@ -32,8 +35,8 @@ func TestRepository_AddCollaborator(t *testing.T) {
func TestRepository_DeleteCollaboration(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
- user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
- repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 4})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 22})
assert.NoError(t, repo.LoadOwner(t.Context()))
assert.NoError(t, DeleteCollaboration(t.Context(), repo, user))
@@ -44,3 +47,50 @@ func TestRepository_DeleteCollaboration(t *testing.T) {
unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: repo.ID})
}
+
+func TestRepository_DeleteCollaborationRemovesSubscriptionsAndStopwatches(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ ctx := t.Context()
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 22})
+ assert.NoError(t, repo.LoadOwner(ctx))
+ assert.NoError(t, repo_model.WatchRepo(ctx, user, repo, true))
+
+ hasAccess, err := access_model.HasAnyUnitAccess(ctx, user.ID, repo)
+ assert.NoError(t, err)
+ assert.True(t, hasAccess)
+
+ issueCount, err := db.GetEngine(ctx).Where("repo_id=?", repo.ID).Count(new(issues_model.Issue))
+ assert.NoError(t, err)
+ tempIssue := &issues_model.Issue{
+ RepoID: repo.ID,
+ Index: issueCount + 1,
+ PosterID: repo.OwnerID,
+ Title: "temp issue",
+ Content: "temp",
+ }
+ assert.NoError(t, db.Insert(ctx, tempIssue))
+ assert.NoError(t, issues_model.CreateOrUpdateIssueWatch(ctx, user.ID, tempIssue.ID, true))
+ ok, err := issues_model.CreateIssueStopwatch(ctx, user, tempIssue)
+ assert.NoError(t, err)
+ assert.True(t, ok)
+
+ assert.NoError(t, DeleteCollaboration(ctx, repo, user))
+
+ hasAccess, err = access_model.HasAnyUnitAccess(ctx, user.ID, repo)
+ assert.NoError(t, err)
+ assert.False(t, hasAccess)
+
+ watch, err := repo_model.GetWatch(ctx, user.ID, repo.ID)
+ assert.NoError(t, err)
+ assert.False(t, repo_model.IsWatchMode(watch.Mode))
+
+ _, exists, err := issues_model.GetIssueWatch(ctx, user.ID, tempIssue.ID)
+ assert.NoError(t, err)
+ assert.False(t, exists)
+
+ hasStopwatch, _, _, err := issues_model.HasUserStopwatch(ctx, user.ID)
+ assert.NoError(t, err)
+ assert.False(t, hasStopwatch)
+}
diff --git a/services/repository/contributors_graph.go b/services/repository/contributors_graph.go
index 2c5c7c604f..c9cc0dcd0b 100644
--- a/services/repository/contributors_graph.go
+++ b/services/repository/contributors_graph.go
@@ -8,7 +8,6 @@ import (
"context"
"errors"
"fmt"
- "os"
"strconv"
"strings"
"sync"
@@ -117,26 +116,17 @@ func getExtendedCommitStats(repo *git.Repository, revision string /*, limit int
if err != nil {
return nil, err
}
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- return nil, err
- }
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
gitCmd := gitcmd.NewCommand("log", "--shortstat", "--no-merges", "--pretty=format:---%n%aN%n%aE%n%as", "--reverse")
// AddOptionFormat("--max-count=%d", limit)
gitCmd.AddDynamicArguments(baseCommit.ID.String())
+ stdoutReader, stdoutReaderClose := gitCmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+
var extendedCommitStats []*ExtendedCommitStats
- stderr := new(strings.Builder)
err = gitCmd.WithDir(repo.Path).
- WithStdout(stdoutWriter).
- WithStderr(stderr).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
scanner := bufio.NewScanner(stdoutReader)
for scanner.Scan() {
@@ -188,12 +178,11 @@ func getExtendedCommitStats(repo *git.Repository, revision string /*, limit int
}
extendedCommitStats = append(extendedCommitStats, res)
}
- _ = stdoutReader.Close()
return nil
}).
- Run(repo.Ctx)
+ RunWithStderr(repo.Ctx)
if err != nil {
- return nil, fmt.Errorf("Failed to get ContributorsCommitStats for repository.\nError: %w\nStderr: %s", err, stderr)
+ return nil, fmt.Errorf("ContributorsCommitStats: %w", err)
}
return extendedCommitStats, nil
diff --git a/services/repository/create.go b/services/repository/create.go
index 7439fc8f08..cbdc9cca76 100644
--- a/services/repository/create.go
+++ b/services/repository/create.go
@@ -265,8 +265,8 @@ func CreateRepositoryDirectly(ctx context.Context, doer, owner *user_model.User,
// WARNING: Don't override all later err with local variables
defer func() {
if err != nil {
- // we can not use the ctx because it maybe canceled or timeout
- cleanupRepository(repo.ID)
+ // we can not use `ctx` because it may be canceled or timed out
+ cleanupRepository(repo)
}
}()
@@ -315,7 +315,7 @@ func CreateRepositoryDirectly(ctx context.Context, doer, owner *user_model.User,
licenses = append(licenses, opts.License)
var stdout string
- stdout, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse", "HEAD"))
+ stdout, _, err = gitrepo.RunCmdString(ctx, repo, gitcmd.NewCommand("rev-parse", "HEAD"))
if err != nil {
log.Error("CreateRepository(git rev-parse HEAD) in %v: Stdout: %s\nError: %v", repo, stdout, err)
return nil, fmt.Errorf("CreateRepository(git rev-parse HEAD): %w", err)
@@ -461,11 +461,11 @@ func createRepositoryInDB(ctx context.Context, doer, u *user_model.User, repo *r
return nil
}
-func cleanupRepository(repoID int64) {
- if errDelete := DeleteRepositoryDirectly(graceful.GetManager().ShutdownContext(), repoID); errDelete != nil {
+func cleanupRepository(repo *repo_model.Repository) {
+ ctx := graceful.GetManager().ShutdownContext()
+ if errDelete := DeleteRepositoryDirectly(ctx, repo.ID); errDelete != nil {
log.Error("cleanupRepository failed: %v", errDelete)
- // add system notice
- if err := system_model.CreateRepositoryNotice("DeleteRepositoryDirectly failed when cleanup repository: %v", errDelete); err != nil {
+ if err := system_model.CreateRepositoryNotice("DeleteRepositoryDirectly failed when cleanup repository (%s)", repo.FullName(), errDelete); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
}
@@ -476,7 +476,7 @@ func updateGitRepoAfterCreate(ctx context.Context, repo *repo_model.Repository)
return fmt.Errorf("checkDaemonExportOK: %w", err)
}
- if stdout, err := gitrepo.RunCmdString(ctx, repo,
+ if stdout, _, err := gitrepo.RunCmdString(ctx, repo,
gitcmd.NewCommand("update-server-info")); err != nil {
log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err)
return fmt.Errorf("CreateRepository(git update-server-info): %w", err)
diff --git a/services/repository/delete.go b/services/repository/delete.go
index 040280c8a8..aa16a968f7 100644
--- a/services/repository/delete.go
+++ b/services/repository/delete.go
@@ -309,7 +309,7 @@ func DeleteRepositoryDirectly(ctx context.Context, repoID int64, ignoreOrgTeams
// Remove repository files.
if err := gitrepo.DeleteRepository(ctx, repo); err != nil {
- desc := fmt.Sprintf("Delete repository files [%s]: %v", repo.FullName(), err)
+ desc := fmt.Sprintf("Delete repository files (%s): %v", repo.FullName(), err)
if err = system_model.CreateNotice(graceful.GetManager().ShutdownContext(), system_model.NoticeRepository, desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
}
@@ -317,7 +317,7 @@ func DeleteRepositoryDirectly(ctx context.Context, repoID int64, ignoreOrgTeams
// Remove wiki files if it exists.
if err := gitrepo.DeleteRepository(ctx, repo.WikiStorageRepo()); err != nil {
- desc := fmt.Sprintf("Delete wiki repository files [%s]: %v", repo.FullName(), err)
+ desc := fmt.Sprintf("Delete wiki repository files (%s): %v", repo.FullName(), err)
// Note we use the db.DefaultContext here rather than passing in a context as the context may be cancelled
if err = system_model.CreateNotice(graceful.GetManager().ShutdownContext(), system_model.NoticeRepository, desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
diff --git a/services/repository/files/content.go b/services/repository/files/content.go
index d32d3041c2..fc0e00a1a7 100644
--- a/services/repository/files/content.go
+++ b/services/repository/files/content.go
@@ -173,18 +173,18 @@ func getFileContentsByEntryInternal(ctx context.Context, repo *repo_model.Reposi
}
if opts.IncludeCommitMetadata {
- contentsResponse.LastCommitSHA = util.ToPointer(lastCommit.ID.String())
+ contentsResponse.LastCommitSHA = new(lastCommit.ID.String())
// GitHub doesn't have these fields in the response, but we could follow other similar APIs to name them
// https://docs.github.com/en/rest/commits/commits?apiVersion=2022-11-28#list-commits
if lastCommit.Committer != nil {
- contentsResponse.LastCommitterDate = util.ToPointer(lastCommit.Committer.When)
+ contentsResponse.LastCommitterDate = new(lastCommit.Committer.When)
}
if lastCommit.Author != nil {
- contentsResponse.LastAuthorDate = util.ToPointer(lastCommit.Author.When)
+ contentsResponse.LastAuthorDate = new(lastCommit.Author.When)
}
}
if opts.IncludeCommitMessage {
- contentsResponse.LastCommitMessage = util.ToPointer(lastCommit.Message())
+ contentsResponse.LastCommitMessage = new(lastCommit.Message())
}
}
@@ -281,7 +281,7 @@ func GetBlobBySHA(repo *repo_model.Repository, gitRepo *git.Repository, sha stri
return nil, err
}
- ret.Encoding, ret.Content = util.ToPointer("base64"), &content
+ ret.Encoding, ret.Content = new("base64"), &content
if originContent != nil {
ret.LfsOid, ret.LfsSize = parsePossibleLfsPointerBuffer(strings.NewReader(originContent.String()))
}
diff --git a/services/repository/files/content_test.go b/services/repository/files/content_test.go
index d72f918074..dda5572ad0 100644
--- a/services/repository/files/content_test.go
+++ b/services/repository/files/content_test.go
@@ -8,7 +8,6 @@ import (
"code.gitea.io/gitea/models/unittest"
api "code.gitea.io/gitea/modules/structs"
- "code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/contexttest"
_ "code.gitea.io/gitea/models/actions"
@@ -37,8 +36,8 @@ func TestGetContents(t *testing.T) {
ctx.SetPathParam("sha", sha)
gbr, err := GetBlobBySHA(ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.PathParam("sha"))
expectedGBR := &api.GitBlobResponse{
- Content: util.ToPointer("dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK"),
- Encoding: util.ToPointer("base64"),
+ Content: new("dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK"),
+ Encoding: new("base64"),
URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/65f1bf27bc3bf70f64657658635e66094edbcb4d",
SHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
Size: 180,
diff --git a/services/repository/files/diff.go b/services/repository/files/diff.go
index 50d01f9d7c..aa4b55a307 100644
--- a/services/repository/files/diff.go
+++ b/services/repository/files/diff.go
@@ -12,7 +12,7 @@ import (
)
// GetDiffPreview produces and returns diff result of a file which is not yet committed.
-func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, treePath, content string) (*gitdiff.Diff, error) {
+func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, treePath, oldContent, newContent string) (*gitdiff.Diff, error) {
if branch == "" {
branch = repo.DefaultBranch
}
@@ -29,7 +29,7 @@ func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, tr
}
// Add the object to the database
- objectHash, err := t.HashObjectAndWrite(ctx, strings.NewReader(content))
+ objectHash, err := t.HashObjectAndWrite(ctx, strings.NewReader(newContent))
if err != nil {
return nil, err
}
@@ -38,5 +38,5 @@ func GetDiffPreview(ctx context.Context, repo *repo_model.Repository, branch, tr
if err := t.AddObjectToIndex(ctx, "100644", objectHash, treePath); err != nil {
return nil, err
}
- return t.DiffIndex(ctx)
+ return t.DiffIndex(ctx, oldContent, newContent)
}
diff --git a/services/repository/files/diff_test.go b/services/repository/files/diff_test.go
index ae702e4189..5295879621 100644
--- a/services/repository/files/diff_test.go
+++ b/services/repository/files/diff_test.go
@@ -27,8 +27,30 @@ func TestGetDiffPreview(t *testing.T) {
branch := ctx.Repo.Repository.DefaultBranch
treePath := "README.md"
+ oldContent := "# repo1\n\nDescription for repo1"
content := "# repo1\n\nDescription for repo1\nthis is a new line"
+ t.Run("Errors", func(t *testing.T) {
+ t.Run("empty repo", func(t *testing.T) {
+ diff, err := GetDiffPreview(ctx, &repo_model.Repository{}, branch, treePath, oldContent, content)
+ assert.Nil(t, diff)
+ assert.EqualError(t, err, "repository does not exist [id: 0, uid: 0, owner_name: , name: ]")
+ })
+
+ t.Run("bad branch", func(t *testing.T) {
+ badBranch := "bad_branch"
+ diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, badBranch, treePath, oldContent, content)
+ assert.Nil(t, diff)
+ assert.EqualError(t, err, "branch does not exist [name: "+badBranch+"]")
+ })
+
+ t.Run("empty treePath", func(t *testing.T) {
+ diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, branch, "", oldContent, content)
+ assert.Nil(t, diff)
+ assert.EqualError(t, err, "path is invalid [path: ]")
+ })
+ })
+
expectedDiff := &gitdiff.Diff{
Files: []*gitdiff.DiffFile{
{
@@ -112,56 +134,22 @@ func TestGetDiffPreview(t *testing.T) {
}
t.Run("with given branch", func(t *testing.T) {
- diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, branch, treePath, content)
+ diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, branch, treePath, oldContent, content)
assert.NoError(t, err)
expectedBs, err := json.Marshal(expectedDiff)
assert.NoError(t, err)
bs, err := json.Marshal(diff)
assert.NoError(t, err)
- assert.Equal(t, string(expectedBs), string(bs))
+ assert.JSONEq(t, string(expectedBs), string(bs))
})
t.Run("empty branch, same results", func(t *testing.T) {
- diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, "", treePath, content)
+ diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, "", treePath, oldContent, content)
assert.NoError(t, err)
expectedBs, err := json.Marshal(expectedDiff)
assert.NoError(t, err)
bs, err := json.Marshal(diff)
assert.NoError(t, err)
- assert.Equal(t, expectedBs, bs)
- })
-}
-
-func TestGetDiffPreviewErrors(t *testing.T) {
- unittest.PrepareTestEnv(t)
- ctx, _ := contexttest.MockContext(t, "user2/repo1")
- ctx.SetPathParam("id", "1")
- contexttest.LoadRepo(t, ctx, 1)
- contexttest.LoadRepoCommit(t, ctx)
- contexttest.LoadUser(t, ctx, 2)
- contexttest.LoadGitRepo(t, ctx)
- defer ctx.Repo.GitRepo.Close()
-
- branch := ctx.Repo.Repository.DefaultBranch
- treePath := "README.md"
- content := "# repo1\n\nDescription for repo1\nthis is a new line"
-
- t.Run("empty repo", func(t *testing.T) {
- diff, err := GetDiffPreview(ctx, &repo_model.Repository{}, branch, treePath, content)
- assert.Nil(t, diff)
- assert.EqualError(t, err, "repository does not exist [id: 0, uid: 0, owner_name: , name: ]")
- })
-
- t.Run("bad branch", func(t *testing.T) {
- badBranch := "bad_branch"
- diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, badBranch, treePath, content)
- assert.Nil(t, diff)
- assert.EqualError(t, err, "branch does not exist [name: "+badBranch+"]")
- })
-
- t.Run("empty treePath", func(t *testing.T) {
- diff, err := GetDiffPreview(ctx, ctx.Repo.Repository, branch, "", content)
- assert.Nil(t, diff)
- assert.EqualError(t, err, "path is invalid [path: ]")
+ assert.JSONEq(t, string(expectedBs), string(bs))
})
}
diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go
index ee567ecd37..10f923f2e1 100644
--- a/services/repository/files/patch.go
+++ b/services/repository/files/patch.go
@@ -164,20 +164,15 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user
}
}
- stdout := &strings.Builder{}
- stderr := &strings.Builder{}
-
cmdApply := gitcmd.NewCommand("apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary")
if git.DefaultFeatures().CheckVersionAtLeast("2.32") {
cmdApply.AddArguments("-3")
}
if err := cmdApply.WithDir(t.basePath).
- WithStdout(stdout).
- WithStderr(stderr).
- WithStdin(strings.NewReader(opts.Content)).
- Run(ctx); err != nil {
- return nil, fmt.Errorf("Error: Stdout: %s\nStderr: %s\nErr: %w", stdout.String(), stderr.String(), err)
+ WithStdinBytes([]byte(opts.Content)).
+ RunWithStderr(ctx); err != nil {
+ return nil, fmt.Errorf("git apply error: %w", err)
}
// Now write the tree
diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go
index b605236c03..68d1df24b7 100644
--- a/services/repository/files/temp_repo.go
+++ b/services/repository/files/temp_repo.go
@@ -98,7 +98,7 @@ func (t *TemporaryUploadRepository) Init(ctx context.Context, objectFormatName s
// SetDefaultIndex sets the git index to our HEAD
func (t *TemporaryUploadRepository) SetDefaultIndex(ctx context.Context) error {
- if _, _, err := gitcmd.NewCommand("read-tree", "HEAD").WithDir(t.basePath).RunStdString(ctx); err != nil {
+ if err := gitcmd.NewCommand("read-tree", "HEAD").WithDir(t.basePath).RunWithStderr(ctx); err != nil {
return fmt.Errorf("SetDefaultIndex: %w", err)
}
return nil
@@ -106,7 +106,7 @@ func (t *TemporaryUploadRepository) SetDefaultIndex(ctx context.Context) error {
// RefreshIndex looks at the current index and checks to see if merges or updates are needed by checking stat() information.
func (t *TemporaryUploadRepository) RefreshIndex(ctx context.Context) error {
- if _, _, err := gitcmd.NewCommand("update-index", "--refresh").WithDir(t.basePath).RunStdString(ctx); err != nil {
+ if err := gitcmd.NewCommand("update-index", "--refresh").WithDir(t.basePath).RunWithStderr(ctx); err != nil {
return fmt.Errorf("RefreshIndex: %w", err)
}
return nil
@@ -115,16 +115,11 @@ func (t *TemporaryUploadRepository) RefreshIndex(ctx context.Context) error {
// LsFiles checks if the given filename arguments are in the index
func (t *TemporaryUploadRepository) LsFiles(ctx context.Context, filenames ...string) ([]string, error) {
stdOut := new(bytes.Buffer)
- stdErr := new(bytes.Buffer)
-
if err := gitcmd.NewCommand("ls-files", "-z").AddDashesAndList(filenames...).
WithDir(t.basePath).
- WithStdout(stdOut).
- WithStderr(stdErr).
- Run(ctx); err != nil {
- log.Error("Unable to run git ls-files for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String())
- err = fmt.Errorf("Unable to run git ls-files for temporary repo of: %s Error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String())
- return nil, err
+ WithStdoutBuffer(stdOut).
+ RunWithStderr(ctx); err != nil {
+ return nil, fmt.Errorf("unable to run git ls-files for temporary repo of: %s, error: %w", t.repo.FullName(), err)
}
fileList := make([]string, 0, len(filenames))
@@ -149,8 +144,6 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi
if err != nil {
return fmt.Errorf("unable to get object format for temporary repo: %q, error: %w", t.repo.FullName(), err)
}
- stdOut := new(bytes.Buffer)
- stdErr := new(bytes.Buffer)
stdIn := new(bytes.Buffer)
for _, file := range filenames {
if file != "" {
@@ -162,11 +155,9 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi
if err := gitcmd.NewCommand("update-index", "--remove", "-z", "--index-info").
WithDir(t.basePath).
- WithStdout(stdOut).
- WithStderr(stdErr).
- WithStdin(stdIn).
- Run(ctx); err != nil {
- return fmt.Errorf("unable to update-index for temporary repo: %q, error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String())
+ WithStdinBytes(stdIn.Bytes()).
+ RunWithStderr(ctx); err != nil {
+ return fmt.Errorf("unable to update-index for temporary repo: %q, error: %w", t.repo.FullName(), err)
}
return nil
}
@@ -174,16 +165,12 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(ctx context.Context, fi
// HashObjectAndWrite writes the provided content to the object db and returns its hash
func (t *TemporaryUploadRepository) HashObjectAndWrite(ctx context.Context, content io.Reader) (string, error) {
stdOut := new(bytes.Buffer)
- stdErr := new(bytes.Buffer)
-
if err := gitcmd.NewCommand("hash-object", "-w", "--stdin").
WithDir(t.basePath).
- WithStdout(stdOut).
- WithStderr(stdErr).
- WithStdin(content).
- Run(ctx); err != nil {
- log.Error("Unable to hash-object to temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String())
- return "", fmt.Errorf("Unable to hash-object to temporary repo: %s Error: %w\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String())
+ WithStdoutBuffer(stdOut).
+ WithStdinCopy(content).
+ RunWithStderr(ctx); err != nil {
+ return "", fmt.Errorf("unable to hash-object to temporary repo: %s, error: %w", t.repo.FullName(), err)
}
return strings.TrimSpace(stdOut.String()), nil
@@ -191,17 +178,15 @@ func (t *TemporaryUploadRepository) HashObjectAndWrite(ctx context.Context, cont
// AddObjectToIndex adds the provided object hash to the index with the provided mode and path
func (t *TemporaryUploadRepository) AddObjectToIndex(ctx context.Context, mode, objectHash, objectPath string) error {
- if _, _, err := gitcmd.NewCommand("update-index", "--add", "--replace", "--cacheinfo").
- AddDynamicArguments(mode, objectHash, objectPath).WithDir(t.basePath).RunStdString(ctx); err != nil {
- stderr := err.Error()
- if matched, _ := regexp.MatchString(".*Invalid path '.*", stderr); matched {
+ if err := gitcmd.NewCommand("update-index", "--add", "--replace", "--cacheinfo").
+ AddDynamicArguments(mode, objectHash, objectPath).WithDir(t.basePath).RunWithStderr(ctx); err != nil {
+ if matched, _ := regexp.MatchString(".*Invalid path '.*", err.Stderr()); matched {
return ErrFilePathInvalid{
Message: objectPath,
Path: objectPath,
}
}
- log.Error("Unable to add object to index: %s %s %s in temporary repo %s(%s) Error: %v", mode, objectHash, objectPath, t.repo.FullName(), t.basePath, err)
- return fmt.Errorf("Unable to add object to index at %s in temporary repo %s Error: %w", objectPath, t.repo.FullName(), err)
+ return fmt.Errorf("unable to add object to index at %s in temporary repo %s, error: %w", objectPath, t.repo.FullName(), err)
}
return nil
}
@@ -274,7 +259,7 @@ func (t *TemporaryUploadRepository) CommitTree(ctx context.Context, opts *Commit
authorDate := opts.AuthorTime
committerDate := opts.CommitterTime
if authorDate == nil && committerDate == nil {
- authorDate = util.ToPointer(time.Now())
+ authorDate = new(time.Now())
committerDate = authorDate
} else if authorDate == nil {
authorDate = committerDate
@@ -342,18 +327,13 @@ func (t *TemporaryUploadRepository) CommitTree(ctx context.Context, opts *Commit
)
stdout := new(bytes.Buffer)
- stderr := new(bytes.Buffer)
if err := cmdCommitTree.
WithEnv(env).
WithDir(t.basePath).
- WithStdout(stdout).
- WithStderr(stderr).
- WithStdin(messageBytes).
- Run(ctx); err != nil {
- log.Error("Unable to commit-tree in temporary repo: %s (%s) Error: %v\nStdout: %s\nStderr: %s",
- t.repo.FullName(), t.basePath, err, stdout, stderr)
- return "", fmt.Errorf("Unable to commit-tree in temporary repo: %s Error: %w\nStdout: %s\nStderr: %s",
- t.repo.FullName(), err, stdout, stderr)
+ WithStdoutBuffer(stdout).
+ WithStdinBytes(messageBytes.Bytes()).
+ RunWithStderr(ctx); err != nil {
+ return "", fmt.Errorf("unable to commit-tree in temporary repo: %s Error: %w", t.repo.FullName(), err)
}
return strings.TrimSpace(stdout.String()), nil
}
@@ -381,40 +361,31 @@ func (t *TemporaryUploadRepository) Push(ctx context.Context, doer *user_model.U
}
// DiffIndex returns a Diff of the current index to the head
-func (t *TemporaryUploadRepository) DiffIndex(ctx context.Context) (*gitdiff.Diff, error) {
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- return nil, fmt.Errorf("unable to open stdout pipe: %w", err)
- }
- defer func() {
- _ = stdoutReader.Close()
- _ = stdoutWriter.Close()
- }()
- stderr := new(bytes.Buffer)
+func (t *TemporaryUploadRepository) DiffIndex(ctx context.Context, oldContent, newContent string) (*gitdiff.Diff, error) {
var diff *gitdiff.Diff
- err = gitcmd.NewCommand("diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD").
- WithTimeout(30 * time.Second).
+ cmd := gitcmd.NewCommand("diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD")
+ stdoutReader, stdoutReaderClose := cmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
+
+ err := cmd.WithTimeout(30 * time.Second).
WithDir(t.basePath).
- WithStdout(stdoutWriter).
- WithStderr(stderr).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
- defer cancel()
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
var diffErr error
diff, diffErr = gitdiff.ParsePatch(ctx, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, stdoutReader, "")
- _ = stdoutReader.Close()
if diffErr != nil {
// if the diffErr is not nil, it will be returned as the error of "Run()"
return fmt.Errorf("ParsePatch: %w", diffErr)
}
return nil
}).
- Run(ctx)
- if err != nil && !git.IsErrCanceledOrKilled(err) {
- log.Error("Unable to diff-index in temporary repo %s (%s). Error: %v\nStderr: %s", t.repo.FullName(), t.basePath, err, stderr)
+ RunWithStderr(ctx)
+ if err != nil && !gitcmd.IsErrorCanceledOrKilled(err) {
return nil, fmt.Errorf("unable to run diff-index pipeline in temporary repo: %w", err)
}
+ if len(diff.Files) > 0 {
+ gitdiff.FillDiffFileHighlightLinesByContent(diff.Files[0], util.UnsafeStringToBytes(oldContent), util.UnsafeStringToBytes(newContent))
+ }
return diff, nil
}
diff --git a/services/repository/files/tree_test.go b/services/repository/files/tree_test.go
index e7511b3eed..b85f65f431 100644
--- a/services/repository/files/tree_test.go
+++ b/services/repository/files/tree_test.go
@@ -56,6 +56,7 @@ func TestGetTreeBySHA(t *testing.T) {
func TestGetTreeViewNodes(t *testing.T) {
unittest.PrepareTestEnv(t)
+
ctx, _ := contexttest.MockContext(t, "user2/repo1")
ctx.Repo.RefFullName = git.RefNameFromBranch("sub-home-md-img-check")
contexttest.LoadRepo(t, ctx, 1)
@@ -69,11 +70,13 @@ func TestGetTreeViewNodes(t *testing.T) {
mockIconForFile := func(id string) template.HTML {
return template.HTML(``)
}
- mockIconForFolder := func(id string) template.HTML {
- return template.HTML(``)
+ mockIconForFolder := func() template.HTML {
+ // With basic theme (default for folders), we get octicon icons without IDs
+ return template.HTML(`octicon-file-directory-fill(16/)`)
}
- mockOpenIconForFolder := func(id string) template.HTML {
- return template.HTML(``)
+ mockOpenIconForFolder := func() template.HTML {
+ // With basic theme (default for folders), we get octicon icons without IDs
+ return template.HTML(`octicon-file-directory-open-fill(16/)`)
}
treeNodes, err := GetTreeViewNodes(ctx, curRepoLink, renderedIconPool, ctx.Repo.Commit, "", "")
assert.NoError(t, err)
@@ -82,8 +85,8 @@ func TestGetTreeViewNodes(t *testing.T) {
EntryName: "docs",
EntryMode: "tree",
FullPath: "docs",
- EntryIcon: mockIconForFolder(`svg-mfi-folder-docs`),
- EntryIconOpen: mockOpenIconForFolder(`svg-mfi-folder-docs`),
+ EntryIcon: mockIconForFolder(),
+ EntryIconOpen: mockOpenIconForFolder(),
},
}, treeNodes)
@@ -94,8 +97,8 @@ func TestGetTreeViewNodes(t *testing.T) {
EntryName: "docs",
EntryMode: "tree",
FullPath: "docs",
- EntryIcon: mockIconForFolder(`svg-mfi-folder-docs`),
- EntryIconOpen: mockOpenIconForFolder(`svg-mfi-folder-docs`),
+ EntryIcon: mockIconForFolder(),
+ EntryIconOpen: mockOpenIconForFolder(),
Children: []*TreeViewNode{
{
EntryName: "README.md",
diff --git a/services/repository/files/update.go b/services/repository/files/update.go
index bd992d06de..3523b2d342 100644
--- a/services/repository/files/update.go
+++ b/services/repository/files/update.go
@@ -510,7 +510,7 @@ func modifyFile(ctx context.Context, t *TemporaryUploadRepository, file *ChangeR
}
if writeObjectRet.LfsContent == nil {
- return nil, nil // No LFS pointer, so nothing to do
+ return nil, nil //nolint:nilnil // No LFS pointer, so nothing to do
}
defer writeObjectRet.LfsContent.Close()
diff --git a/services/repository/fork.go b/services/repository/fork.go
index f92af65605..8cf41d5654 100644
--- a/services/repository/fork.go
+++ b/services/repository/fork.go
@@ -123,8 +123,8 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork
// WARNING: Don't override all later err with local variables
defer func() {
if err != nil {
- // we can not use the ctx because it maybe canceled or timeout
- cleanupRepository(repo.ID)
+ // we can not use `ctx` because it may be canceled or timed out
+ cleanupRepository(repo)
}
}()
@@ -177,10 +177,10 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork
}
defer gitRepo.Close()
- if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, doer.ID); err != nil {
+ if _, _, err = repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, doer.ID); err != nil {
return nil, fmt.Errorf("SyncRepoBranchesWithRepo: %w", err)
}
- if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ if _, err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
return nil, fmt.Errorf("Sync releases from git tags failed: %v", err)
}
diff --git a/services/repository/generate.go b/services/repository/generate.go
index b2913cd110..bc37bc7bfe 100644
--- a/services/repository/generate.go
+++ b/services/repository/generate.go
@@ -21,7 +21,6 @@ import (
git_model "code.gitea.io/gitea/models/git"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
- "code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/glob"
"code.gitea.io/gitea/modules/log"
@@ -216,19 +215,6 @@ func processGiteaTemplateFile(ctx context.Context, tmpDir string, templateRepo,
}
func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository, tmpDir string) error {
- commitTimeStr := time.Now().Format(time.RFC3339)
- authorSig := repo.Owner.NewGitSig()
-
- // Because this may call hooks we should pass in the environment
- env := append(os.Environ(),
- "GIT_AUTHOR_NAME="+authorSig.Name,
- "GIT_AUTHOR_EMAIL="+authorSig.Email,
- "GIT_AUTHOR_DATE="+commitTimeStr,
- "GIT_COMMITTER_NAME="+authorSig.Name,
- "GIT_COMMITTER_EMAIL="+authorSig.Email,
- "GIT_COMMITTER_DATE="+commitTimeStr,
- )
-
// Clone to temporary path and do the init commit.
if err := gitrepo.CloneRepoToLocal(ctx, templateRepo, tmpDir, git.CloneRepoOptions{
Depth: 1,
@@ -264,15 +250,6 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r
return err
}
- if stdout, _, err := gitcmd.NewCommand("remote", "add", "origin").
- AddDynamicArguments(repo.RepoPath()).
- WithDir(tmpDir).
- WithEnv(env).
- RunStdString(ctx); err != nil {
- log.Error("Unable to add %v as remote origin to temporary repo to %s: stdout %s\nError: %v", repo, tmpDir, stdout, err)
- return fmt.Errorf("git remote add: %w", err)
- }
-
if err = git.AddTemplateSubmoduleIndexes(ctx, tmpDir, submodules); err != nil {
return fmt.Errorf("failed to add submodules: %v", err)
}
diff --git a/services/repository/gitgraph/graph.go b/services/repository/gitgraph/graph.go
index f89d9a095a..8d9bec47f8 100644
--- a/services/repository/gitgraph/graph.go
+++ b/services/repository/gitgraph/graph.go
@@ -6,9 +6,6 @@ package gitgraph
import (
"bufio"
"bytes"
- "context"
- "os"
- "strings"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
@@ -45,22 +42,14 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo
}
graph := NewGraph()
- stderr := new(strings.Builder)
- stdoutReader, stdoutWriter, err := os.Pipe()
- if err != nil {
- return nil, err
- }
commitsToSkip := setting.UI.GraphMaxCommitNum * (page - 1)
- scanner := bufio.NewScanner(stdoutReader)
-
+ stdoutReader, stdoutReaderClose := graphCmd.MakeStdoutPipe()
+ defer stdoutReaderClose()
if err := graphCmd.
WithDir(r.Path).
- WithStdout(stdoutWriter).
- WithStderr(stderr).
- WithPipelineFunc(func(ctx context.Context, cancel context.CancelFunc) error {
- _ = stdoutWriter.Close()
- defer stdoutReader.Close()
+ WithPipelineFunc(func(ctx gitcmd.Context) error {
+ scanner := bufio.NewScanner(stdoutReader)
parser := &Parser{}
parser.firstInUse = -1
parser.maxAllowedColors = maxAllowedColors
@@ -92,8 +81,7 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo
line := scanner.Bytes()
if bytes.IndexByte(line, '*') >= 0 {
if err := parser.AddLineToGraph(graph, row, line); err != nil {
- cancel()
- return err
+ return ctx.CancelPipeline(err)
}
break
}
@@ -104,13 +92,12 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo
row++
line := scanner.Bytes()
if err := parser.AddLineToGraph(graph, row, line); err != nil {
- cancel()
- return err
+ return ctx.CancelPipeline(err)
}
}
return scanner.Err()
}).
- Run(r.Ctx); err != nil {
+ RunWithStderr(r.Ctx); err != nil {
return graph, err
}
return graph, nil
diff --git a/services/repository/init.go b/services/repository/init.go
index 51cc113d63..6aeb5ec644 100644
--- a/services/repository/init.go
+++ b/services/repository/init.go
@@ -11,7 +11,9 @@ import (
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/git/gitcmd"
+ "code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
@@ -71,12 +73,12 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi
defaultBranch = setting.Repository.DefaultBranch
}
- if stdout, _, err := gitcmd.NewCommand("push", "origin").
- AddDynamicArguments("HEAD:" + defaultBranch).
- WithDir(tmpPath).
- WithEnv(repo_module.InternalPushingEnvironment(u, repo)).
- RunStdString(ctx); err != nil {
- log.Error("Failed to push back to HEAD: Stdout: %s\nError: %v", stdout, err)
+ if err := gitrepo.PushFromLocal(ctx, tmpPath, repo, git.PushOptions{
+ LocalRefName: "HEAD",
+ Branch: defaultBranch,
+ Env: repo_module.InternalPushingEnvironment(u, repo),
+ }); err != nil {
+ log.Error("Failed to push back to HEAD Error: %v", err)
return fmt.Errorf("git push: %w", err)
}
diff --git a/services/repository/lfs.go b/services/repository/lfs.go
index 4d48881b87..5ef2dbdac4 100644
--- a/services/repository/lfs.go
+++ b/services/repository/lfs.go
@@ -123,10 +123,8 @@ func GarbageCollectLFSMetaObjectsForRepo(ctx context.Context, repo *repo_model.R
//
// It is likely that a week is potentially excessive but it should definitely be enough that any
// unassociated LFS object is genuinely unassociated.
- OlderThan: timeutil.TimeStamp(opts.OlderThan.Unix()),
- UpdatedLessRecentlyThan: timeutil.TimeStamp(opts.UpdatedLessRecentlyThan.Unix()),
- OrderByUpdated: true,
- LoopFunctionAlwaysUpdates: true,
+ OlderThan: timeutil.TimeStamp(opts.OlderThan.Unix()),
+ UpdatedLessRecentlyThan: timeutil.TimeStamp(opts.UpdatedLessRecentlyThan.Unix()),
})
if err == errStop {
diff --git a/services/repository/lfs_test.go b/services/repository/lfs_test.go
index 7fb202f42d..1335d48cb1 100644
--- a/services/repository/lfs_test.go
+++ b/services/repository/lfs_test.go
@@ -14,6 +14,7 @@ import (
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/test"
repo_service "code.gitea.io/gitea/services/repository"
"github.com/stretchr/testify/assert"
@@ -22,7 +23,8 @@ import (
func TestGarbageCollectLFSMetaObjects(t *testing.T) {
unittest.PrepareTestEnv(t)
- setting.LFS.StartServer = true
+ defer test.MockVariableValue(&setting.LFS.StartServer, true)()
+
err := storage.Init()
assert.NoError(t, err)
@@ -46,6 +48,32 @@ func TestGarbageCollectLFSMetaObjects(t *testing.T) {
assert.ErrorIs(t, err, git_model.ErrLFSObjectNotExist)
}
+func TestGarbageCollectLFSMetaObjectsForRepoAutoFix(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ defer test.MockVariableValue(&setting.LFS.StartServer, true)()
+
+ err := storage.Init()
+ assert.NoError(t, err)
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ // add lfs object
+ lfsContent := []byte("gitea2")
+ lfsOid := storeObjectInRepo(t, repo.ID, &lfsContent)
+
+ err = repo_service.GarbageCollectLFSMetaObjectsForRepo(t.Context(), repo, repo_service.GarbageCollectLFSMetaObjectsOptions{
+ LogDetail: func(string, ...any) {},
+ AutoFix: true,
+ OlderThan: time.Now().Add(24 * time.Hour * 7),
+ UpdatedLessRecentlyThan: time.Now().Add(24 * time.Hour * 3),
+ })
+ assert.NoError(t, err)
+
+ _, err = git_model.GetLFSMetaObjectByOid(t.Context(), repo.ID, lfsOid)
+ assert.ErrorIs(t, err, git_model.ErrLFSObjectNotExist)
+}
+
func storeObjectInRepo(t *testing.T, repositoryID int64, content *[]byte) string {
pointer, err := lfs.GeneratePointer(bytes.NewReader(*content))
assert.NoError(t, err)
diff --git a/services/repository/migrate.go b/services/repository/migrate.go
index 8f515326ad..a51791ed29 100644
--- a/services/repository/migrate.go
+++ b/services/repository/migrate.go
@@ -145,7 +145,7 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
}
}
- if _, err := repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, u.ID); err != nil {
+ if _, _, err := repo_module.SyncRepoBranchesWithRepo(ctx, repo, gitRepo, u.ID); err != nil {
return repo, fmt.Errorf("SyncRepoBranchesWithRepo: %v", err)
}
@@ -153,7 +153,7 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
// otherwise, the releases sync will be done out of this function
if !opts.Releases {
repo.IsMirror = opts.Mirror
- if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ if _, err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
log.Error("Failed to synchronize tags to releases for repository: %v", err)
}
}
@@ -225,7 +225,7 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
// this is necessary for sync local tags from remote
configName := fmt.Sprintf("remote.%s.fetch", mirrorModel.GetRemoteName())
- if stdout, err := gitrepo.RunCmdString(ctx, repo,
+ if stdout, _, err := gitrepo.RunCmdString(ctx, repo,
gitcmd.NewCommand("config").
AddOptionValues("--add", configName, `+refs/tags/*:refs/tags/*`)); err != nil {
log.Error("MigrateRepositoryGitData(git config --add +refs/tags/*:refs/tags/*) in %v: Stdout: %s\nError: %v", repo, stdout, err)
diff --git a/services/repository/repository.go b/services/repository/repository.go
index 4d07cb0e38..ae64f0116a 100644
--- a/services/repository/repository.go
+++ b/services/repository/repository.go
@@ -194,6 +194,10 @@ func MakeRepoPrivate(ctx context.Context, repo *repo_model.Repository) (err erro
return err
}
+ if err = repo_model.ClearRepoWatches(ctx, repo.ID); err != nil {
+ return err
+ }
+
// Create/Remove git-daemon-export-ok for git-daemon...
if err := CheckDaemonExportOK(ctx, repo); err != nil {
return err
@@ -217,28 +221,28 @@ func MakeRepoPrivate(ctx context.Context, repo *repo_model.Repository) (err erro
})
}
-// LinkedRepository returns the linked repo if any
-func LinkedRepository(ctx context.Context, a *repo_model.Attachment) (*repo_model.Repository, unit.Type, error) {
+// GetAttachmentLinkedTypeAndRepoID returns the linked type and repository id of attachment if any
+func GetAttachmentLinkedTypeAndRepoID(ctx context.Context, a *repo_model.Attachment) (unit.Type, int64, error) {
if a.IssueID != 0 {
iss, err := issues_model.GetIssueByID(ctx, a.IssueID)
if err != nil {
- return nil, unit.TypeIssues, err
+ return unit.TypeIssues, 0, err
}
- repo, err := repo_model.GetRepositoryByID(ctx, iss.RepoID)
unitType := unit.TypeIssues
if iss.IsPull {
unitType = unit.TypePullRequests
}
- return repo, unitType, err
- } else if a.ReleaseID != 0 {
+ return unitType, iss.RepoID, nil
+ }
+
+ if a.ReleaseID != 0 {
rel, err := repo_model.GetReleaseByID(ctx, a.ReleaseID)
if err != nil {
- return nil, unit.TypeReleases, err
+ return unit.TypeReleases, 0, err
}
- repo, err := repo_model.GetRepositoryByID(ctx, rel.RepoID)
- return repo, unit.TypeReleases, err
+ return unit.TypeReleases, rel.RepoID, nil
}
- return nil, -1, nil
+ return unit.TypeInvalid, 0, nil
}
// CheckDaemonExportOK creates/removes git-daemon-export-ok for git-daemon...
diff --git a/services/repository/repository_test.go b/services/repository/repository_test.go
index 5673a4a161..2d860f1b9d 100644
--- a/services/repository/repository_test.go
+++ b/services/repository/repository_test.go
@@ -13,31 +13,30 @@ import (
"code.gitea.io/gitea/models/unittest"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
-func TestLinkedRepository(t *testing.T) {
+func TestAttachLinkedTypeAndRepoID(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
testCases := []struct {
name string
attachID int64
- expectedRepo *repo_model.Repository
expectedUnitType unit.Type
+ expectedRepoID int64
}{
- {"LinkedIssue", 1, &repo_model.Repository{ID: 1}, unit.TypeIssues},
- {"LinkedComment", 3, &repo_model.Repository{ID: 1}, unit.TypePullRequests},
- {"LinkedRelease", 9, &repo_model.Repository{ID: 1}, unit.TypeReleases},
- {"Notlinked", 10, nil, -1},
+ {"LinkedIssue", 1, unit.TypeIssues, 1},
+ {"LinkedComment", 3, unit.TypePullRequests, 1},
+ {"LinkedRelease", 9, unit.TypeReleases, 1},
+ {"Notlinked", 10, unit.TypeInvalid, 0},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
attach, err := repo_model.GetAttachmentByID(t.Context(), tc.attachID)
assert.NoError(t, err)
- repo, unitType, err := LinkedRepository(t.Context(), attach)
+ unitType, repoID, err := GetAttachmentLinkedTypeAndRepoID(t.Context(), attach)
assert.NoError(t, err)
- if tc.expectedRepo != nil {
- assert.Equal(t, tc.expectedRepo.ID, repo.ID)
- }
assert.Equal(t, tc.expectedUnitType, unitType)
+ assert.Equal(t, tc.expectedRepoID, repoID)
})
}
}
@@ -70,3 +69,24 @@ func TestRepository_HasWiki(t *testing.T) {
repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
assert.False(t, HasWiki(t.Context(), repo2))
}
+
+func TestMakeRepoPrivateClearsWatches(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ repo.IsPrivate = false
+
+ watchers, err := repo_model.GetRepoWatchersIDs(t.Context(), repo.ID)
+ require.NoError(t, err)
+ require.NotEmpty(t, watchers)
+
+ assert.NoError(t, MakeRepoPrivate(t.Context(), repo))
+
+ watchers, err = repo_model.GetRepoWatchersIDs(t.Context(), repo.ID)
+ assert.NoError(t, err)
+ assert.Empty(t, watchers)
+
+ updatedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo.ID})
+ assert.True(t, updatedRepo.IsPrivate)
+ assert.Zero(t, updatedRepo.NumWatches)
+}
diff --git a/services/repository/template.go b/services/repository/template.go
index 7444bf7b60..96033cb98d 100644
--- a/services/repository/template.go
+++ b/services/repository/template.go
@@ -100,8 +100,8 @@ func GenerateRepository(ctx context.Context, doer, owner *user_model.User, templ
// last - clean up the repository if something goes wrong
defer func() {
if err != nil {
- // we can not use the ctx because it maybe canceled or timeout
- cleanupRepository(generateRepo.ID)
+ // we can not use `ctx` because it may be canceled or timed out
+ cleanupRepository(generateRepo)
}
}()
diff --git a/services/repository/transfer.go b/services/repository/transfer.go
index af477fc7f1..a601ee6f16 100644
--- a/services/repository/transfer.go
+++ b/services/repository/transfer.go
@@ -6,7 +6,6 @@ package repository
import (
"context"
"fmt"
- "os"
"strings"
"code.gitea.io/gitea/models/db"
@@ -291,12 +290,8 @@ func transferOwnership(ctx context.Context, doer *user_model.User, newOwnerName
}
// Rename remote repository to new path and delete local copy.
- dir := user_model.UserPath(newOwner.Name)
- if err := os.MkdirAll(dir, os.ModePerm); err != nil {
- return fmt.Errorf("Failed to create dir %s: %w", dir, err)
- }
-
- if err := util.Rename(repo_model.RepoPath(oldOwner.Name, repo.Name), repo_model.RepoPath(newOwner.Name, repo.Name)); err != nil {
+ oldRelativePath, newRelativePath := repo_model.RelativePath(oldOwner.Name, repo.Name), repo_model.RelativePath(newOwner.Name, repo.Name)
+ if err := gitrepo.RenameRepository(ctx, repo_model.StorageRepo(oldRelativePath), repo_model.StorageRepo(newRelativePath)); err != nil {
return fmt.Errorf("rename repository directory: %w", err)
}
repoRenamed = true
diff --git a/services/user/user.go b/services/user/user.go
index 8e42fa3ccd..9b8bcf83c0 100644
--- a/services/user/user.go
+++ b/services/user/user.go
@@ -239,6 +239,11 @@ func DeleteUser(ctx context.Context, u *user_model.User, purge bool) error {
if err := deleteUser(ctx, u, purge); err != nil {
return fmt.Errorf("DeleteUser: %w", err)
}
+
+ // Finally delete any unlinked attachments, this will also delete the attached files
+ if err := deleteUserUnlinkedAttachments(ctx, u); err != nil {
+ return fmt.Errorf("deleteUserUnlinkedAttachments: %w", err)
+ }
return nil
}); err != nil {
return err
@@ -269,6 +274,19 @@ func DeleteUser(ctx context.Context, u *user_model.User, purge bool) error {
return nil
}
+func deleteUserUnlinkedAttachments(ctx context.Context, u *user_model.User) error {
+ attachments, err := repo_model.GetUnlinkedAttachmentsByUserID(ctx, u.ID)
+ if err != nil {
+ return fmt.Errorf("GetUnlinkedAttachmentsByUserID: %w", err)
+ }
+ for _, attach := range attachments {
+ if err := repo_model.DeleteAttachment(ctx, attach, true); err != nil {
+ return fmt.Errorf("DeleteAttachment ID[%d]: %w", attach.ID, err)
+ }
+ }
+ return nil
+}
+
// DeleteInactiveUsers deletes all inactive users and their email addresses.
func DeleteInactiveUsers(ctx context.Context, olderThan time.Duration) error {
inactiveUsers, err := user_model.GetInactiveUsers(ctx, olderThan)
diff --git a/services/user/user_test.go b/services/user/user_test.go
index 25e8ee7b2f..4d8d448dcd 100644
--- a/services/user/user_test.go
+++ b/services/user/user_test.go
@@ -63,6 +63,24 @@ func TestDeleteUser(t *testing.T) {
assert.Error(t, DeleteUser(t.Context(), org, false))
}
+func TestDeleteUserUnlinkedAttachments(t *testing.T) {
+ t.Run("DeleteExisting", func(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 8})
+ unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 10})
+
+ assert.NoError(t, deleteUserUnlinkedAttachments(t.Context(), user))
+ unittest.AssertNotExistsBean(t, &repo_model.Attachment{ID: 10})
+ })
+
+ t.Run("NoUnlinkedAttachments", func(t *testing.T) {
+ assert.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ assert.NoError(t, deleteUserUnlinkedAttachments(t.Context(), user))
+ })
+}
+
func TestPurgeUser(t *testing.T) {
test := func(userID int64) {
assert.NoError(t, unittest.PrepareTestDatabase())
diff --git a/services/webhook/general.go b/services/webhook/general.go
index be457e46f5..3186f53d74 100644
--- a/services/webhook/general.go
+++ b/services/webhook/general.go
@@ -317,7 +317,7 @@ func getStatusPayloadInfo(p *api.CommitStatusPayload, linkFormatter linkFormatte
text = fmt.Sprintf("Commit Status changed: %s - %s", refLink, p.Description)
color = greenColor
if withSender {
- if user_model.IsGiteaActionsUserName(p.Sender.UserName) {
+ if user_model.GetSystemUserByName(p.Sender.UserName) != nil {
text += " by " + p.Sender.FullName
} else {
text += " by " + linkFormatter(setting.AppURL+url.PathEscape(p.Sender.UserName), p.Sender.UserName)
diff --git a/services/webtheme/webtheme.go b/services/webtheme/webtheme.go
index 57d63f4e07..a0beec2902 100644
--- a/services/webtheme/webtheme.go
+++ b/services/webtheme/webtheme.go
@@ -16,10 +16,14 @@ import (
"code.gitea.io/gitea/modules/util"
)
+type themeCollection struct {
+ themeList []*ThemeMetaInfo
+ themeMap map[string]*ThemeMetaInfo
+}
+
var (
- availableThemes []*ThemeMetaInfo
- availableThemeMap map[string]*ThemeMetaInfo
- themeOnce sync.Once
+ themeMu sync.RWMutex
+ availableThemes *themeCollection
)
const (
@@ -129,23 +133,13 @@ func parseThemeMetaInfo(fileName, cssContent string) *ThemeMetaInfo {
return themeInfo
}
-func initThemes() {
- availableThemes = nil
- defer func() {
- availableThemeMap = map[string]*ThemeMetaInfo{}
- for _, theme := range availableThemes {
- availableThemeMap[theme.InternalName] = theme
- }
- if availableThemeMap[setting.UI.DefaultTheme] == nil {
- setting.LogStartupProblem(1, log.ERROR, "Default theme %q is not available, please correct the '[ui].DEFAULT_THEME' setting in the config file", setting.UI.DefaultTheme)
- }
- }()
- cssFiles, err := public.AssetFS().ListFiles("/assets/css")
+func loadThemesFromAssets() (themeList []*ThemeMetaInfo, themeMap map[string]*ThemeMetaInfo) {
+ cssFiles, err := public.AssetFS().ListFiles("assets/css")
if err != nil {
log.Error("Failed to list themes: %v", err)
- availableThemes = []*ThemeMetaInfo{defaultThemeMetaInfoByInternalName(setting.UI.DefaultTheme)}
- return
+ return nil, nil
}
+
var foundThemes []*ThemeMetaInfo
for _, fileName := range cssFiles {
if strings.HasPrefix(fileName, fileNamePrefix) && strings.HasSuffix(fileName, fileNameSuffix) {
@@ -157,39 +151,84 @@ func initThemes() {
foundThemes = append(foundThemes, parseThemeMetaInfo(fileName, util.UnsafeBytesToString(content)))
}
}
+
+ themeList = foundThemes
if len(setting.UI.Themes) > 0 {
+ themeList = nil // only allow the themes specified in the setting
allowedThemes := container.SetOf(setting.UI.Themes...)
for _, theme := range foundThemes {
if allowedThemes.Contains(theme.InternalName) {
- availableThemes = append(availableThemes, theme)
+ themeList = append(themeList, theme)
}
}
- } else {
- availableThemes = foundThemes
}
- sort.Slice(availableThemes, func(i, j int) bool {
- if availableThemes[i].InternalName == setting.UI.DefaultTheme {
+
+ sort.Slice(themeList, func(i, j int) bool {
+ if themeList[i].InternalName == setting.UI.DefaultTheme {
return true
}
- if availableThemes[i].ColorblindType != availableThemes[j].ColorblindType {
- return availableThemes[i].ColorblindType < availableThemes[j].ColorblindType
+ if themeList[i].ColorblindType != themeList[j].ColorblindType {
+ return themeList[i].ColorblindType < themeList[j].ColorblindType
}
- return availableThemes[i].DisplayName < availableThemes[j].DisplayName
+ return themeList[i].DisplayName < themeList[j].DisplayName
})
- if len(availableThemes) == 0 {
- setting.LogStartupProblem(1, log.ERROR, "No theme candidate in asset files, but Gitea requires there should be at least one usable theme")
- availableThemes = []*ThemeMetaInfo{defaultThemeMetaInfoByInternalName(setting.UI.DefaultTheme)}
+
+ themeMap = map[string]*ThemeMetaInfo{}
+ for _, theme := range themeList {
+ themeMap[theme.InternalName] = theme
}
+ return themeList, themeMap
+}
+
+func getAvailableThemes() (themeList []*ThemeMetaInfo, themeMap map[string]*ThemeMetaInfo) {
+ themeMu.RLock()
+ if availableThemes != nil {
+ themeList, themeMap = availableThemes.themeList, availableThemes.themeMap
+ }
+ themeMu.RUnlock()
+ if len(themeList) != 0 {
+ return themeList, themeMap
+ }
+
+ themeMu.Lock()
+ defer themeMu.Unlock()
+ // no need to double-check "availableThemes.themeList" since the loading isn't really slow, to keep code simple
+ themeList, themeMap = loadThemesFromAssets()
+ hasAvailableThemes := len(themeList) > 0
+ if !hasAvailableThemes {
+ defaultTheme := defaultThemeMetaInfoByInternalName(setting.UI.DefaultTheme)
+ themeList = []*ThemeMetaInfo{defaultTheme}
+ themeMap = map[string]*ThemeMetaInfo{setting.UI.DefaultTheme: defaultTheme}
+ }
+
+ if setting.IsProd {
+ if !hasAvailableThemes {
+ setting.LogStartupProblem(1, log.ERROR, "No theme candidate in asset files, but Gitea requires there should be at least one usable theme")
+ }
+ if themeMap[setting.UI.DefaultTheme] == nil {
+ setting.LogStartupProblem(1, log.ERROR, "Default theme %q is not available, please correct the '[ui].DEFAULT_THEME' setting in the config file", setting.UI.DefaultTheme)
+ }
+ availableThemes = &themeCollection{themeList, themeMap}
+ return themeList, themeMap
+ }
+
+ // In dev mode, only store the loaded themes if the list is not empty, in case the frontend is still being built.
+ // TBH, there still could be a data-race that the themes are only partially built then the list is incomplete for first time loading.
+ // Such edge case can be handled by checking whether the loaded themes are the same in a period or there is a flag file, but it is an over-kill, so, no.
+ if hasAvailableThemes {
+ availableThemes = &themeCollection{themeList, themeMap}
+ }
+ return themeList, themeMap
}
func GetAvailableThemes() []*ThemeMetaInfo {
- themeOnce.Do(initThemes)
- return availableThemes
+ themes, _ := getAvailableThemes()
+ return themes
}
func GetThemeMetaInfo(internalName string) *ThemeMetaInfo {
- themeOnce.Do(initThemes)
- return availableThemeMap[internalName]
+ _, themeMap := getAvailableThemes()
+ return themeMap[internalName]
}
// GuaranteeGetThemeMetaInfo guarantees to return a non-nil ThemeMetaInfo,
diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go
index f4115038cb..a025f26051 100644
--- a/services/wiki/wiki.go
+++ b/services/wiki/wiki.go
@@ -170,7 +170,7 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model
// FIXME: The wiki doesn't have lfs support at present - if this changes need to check attributes here
- objectHash, err := gitRepo.HashObject(strings.NewReader(content))
+ objectHash, err := gitRepo.HashObjectBytes([]byte(content))
if err != nil {
log.Error("HashObject failed: %v", err)
return err
@@ -369,7 +369,7 @@ func DeleteWiki(ctx context.Context, repo *repo_model.Repository) error {
}
if err := gitrepo.DeleteRepository(ctx, repo.WikiStorageRepo()); err != nil {
- desc := fmt.Sprintf("Delete wiki repository files [%s]: %v", repo.FullName(), err)
+ desc := fmt.Sprintf("Delete wiki repository files (%s): %v", repo.FullName(), err)
// Note we use the db.DefaultContext here rather than passing in a context as the context may be cancelled
if err = system_model.CreateNotice(graceful.GetManager().ShutdownContext(), system_model.NoticeRepository, desc); err != nil {
log.Error("CreateRepositoryNotice: %v", err)
diff --git a/stylelint.config.ts b/stylelint.config.js
similarity index 95%
rename from stylelint.config.ts
rename to stylelint.config.js
index 8a5b87e17c..42edf76f43 100644
--- a/stylelint.config.ts
+++ b/stylelint.config.js
@@ -1,5 +1,6 @@
+// @ts-check
+// TODO: Move to .ts after https://github.com/stylelint/stylelint/issues/8893 is fixed
import {fileURLToPath} from 'node:url';
-import type {Config} from 'stylelint';
const cssVarFiles = [
fileURLToPath(new URL('web_src/css/base.css', import.meta.url)),
@@ -7,6 +8,7 @@ const cssVarFiles = [
fileURLToPath(new URL('web_src/css/themes/theme-gitea-dark.css', import.meta.url)),
];
+/** @type {import('stylelint').Config} */
export default {
extends: 'stylelint-config-recommended',
reportUnscopedDisables: true,
@@ -57,14 +59,14 @@ export default {
'@stylistic/block-opening-brace-space-before': 'always',
'@stylistic/color-hex-case': 'lower',
'@stylistic/declaration-bang-space-after': 'never',
- '@stylistic/declaration-bang-space-before': null,
+ '@stylistic/declaration-bang-space-before': 'always',
'@stylistic/declaration-block-semicolon-newline-after': null,
'@stylistic/declaration-block-semicolon-newline-before': null,
'@stylistic/declaration-block-semicolon-space-after': null,
'@stylistic/declaration-block-semicolon-space-before': 'never',
'@stylistic/declaration-block-trailing-semicolon': null,
'@stylistic/declaration-colon-newline-after': null,
- '@stylistic/declaration-colon-space-after': null,
+ '@stylistic/declaration-colon-space-after': 'always',
'@stylistic/declaration-colon-space-before': 'never',
'@stylistic/function-comma-newline-after': null,
'@stylistic/function-comma-newline-before': null,
@@ -101,7 +103,7 @@ export default {
'@stylistic/selector-attribute-operator-space-before': null,
'@stylistic/selector-combinator-space-after': null,
'@stylistic/selector-combinator-space-before': null,
- '@stylistic/selector-descendant-combinator-no-non-space': null,
+ '@stylistic/selector-descendant-combinator-no-non-space': true,
'@stylistic/selector-list-comma-newline-after': null,
'@stylistic/selector-list-comma-newline-before': null,
'@stylistic/selector-list-comma-space-after': 'always-single-line',
@@ -146,4 +148,4 @@ export default {
'shorthand-property-no-redundant-values': true,
'value-no-vendor-prefix': [true, {ignoreValues: ['box', 'inline-box']}],
},
-} satisfies Config;
+};
diff --git a/tailwind.config.ts b/tailwind.config.ts
index 8693208e13..7aaea687eb 100644
--- a/tailwind.config.ts
+++ b/tailwind.config.ts
@@ -80,10 +80,10 @@ export default {
semibold: 'var(--font-weight-semibold)',
bold: 'var(--font-weight-bold)',
},
- fontSize: { // not using `rem` units because our root is currently 14px
- 'xs': '12px',
- 'sm': '14px',
- 'base': '16px',
+ fontSize: { // rarely used, but "text-base" (matching body's 1em=14px) is useful to reset font-size in a header container
+ 'xs': '11px',
+ 'sm': '12px',
+ 'base': '14px',
'lg': '18px',
'xl': '20px',
'2xl': '24px',
diff --git a/templates/admin/auth/edit.tmpl b/templates/admin/auth/edit.tmpl
index d29a52b76b..56f9e1b9cd 100644
--- a/templates/admin/auth/edit.tmpl
+++ b/templates/admin/auth/edit.tmpl
@@ -112,6 +112,12 @@
+
+
+
+
+
+
diff --git a/templates/admin/auth/list.tmpl b/templates/admin/auth/list.tmpl
index a1e72b742f..7296e95558 100644
--- a/templates/admin/auth/list.tmpl
+++ b/templates/admin/auth/list.tmpl
@@ -7,7 +7,7 @@
-
+
| ID |
diff --git a/templates/admin/auth/source/ldap.tmpl b/templates/admin/auth/source/ldap.tmpl
index 9754aed55a..e5852daa3d 100644
--- a/templates/admin/auth/source/ldap.tmpl
+++ b/templates/admin/auth/source/ldap.tmpl
@@ -80,6 +80,12 @@
+
+
+
+
+
+
diff --git a/templates/admin/config.tmpl b/templates/admin/config.tmpl
index 57631fd9c6..728746713c 100644
--- a/templates/admin/config.tmpl
+++ b/templates/admin/config.tmpl
@@ -307,10 +307,6 @@
{{.Git.Timeout.Migrate}} {{ctx.Locale.Tr "tool.raw_seconds"}}
{{ctx.Locale.Tr "admin.config.git_mirror_timeout"}}
{{.Git.Timeout.Mirror}} {{ctx.Locale.Tr "tool.raw_seconds"}}
-
{{ctx.Locale.Tr "admin.config.git_clone_timeout"}}
-
{{.Git.Timeout.Clone}} {{ctx.Locale.Tr "tool.raw_seconds"}}
-
{{ctx.Locale.Tr "admin.config.git_pull_timeout"}}
-
{{.Git.Timeout.Pull}} {{ctx.Locale.Tr "tool.raw_seconds"}}
{{ctx.Locale.Tr "admin.config.git_gc_timeout"}}
{{.Git.Timeout.GC}} {{ctx.Locale.Tr "tool.raw_seconds"}}
diff --git a/templates/admin/cron.tmpl b/templates/admin/cron.tmpl
index 4d01ce51eb..8e44614d97 100644
--- a/templates/admin/cron.tmpl
+++ b/templates/admin/cron.tmpl
@@ -5,7 +5,7 @@