0
0
mirror of https://github.com/go-gitea/gitea.git synced 2026-05-15 10:43:21 +02:00

Merge branch 'go-gitea:main' into feat/google-groups-retrieving

This commit is contained in:
Andy Mrichko 2026-05-03 18:37:43 +03:00 committed by GitHub
commit 4f118e6e08
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
344 changed files with 40474 additions and 4067 deletions

View File

@ -40,9 +40,7 @@ cpu.out
*.log
/gitea
/gitea-vet
/debug
/integrations.test
/bin
/dist
@ -54,12 +52,6 @@ cpu.out
/indexers
/log
/tests/integration/gitea-integration-*
/tests/integration/indexers-*
/tests/e2e/gitea-e2e-*
/tests/e2e/indexers-*
/tests/e2e/reports
/tests/e2e/test-artifacts
/tests/e2e/test-snapshots
/tests/*.ini
/node_modules
/yarn.lock

View File

@ -18,7 +18,7 @@ indent_style = tab
[templates/custom/*.tmpl]
insert_final_newline = false
[templates/swagger/v1_json.tmpl]
[templates/swagger/*_json.tmpl]
indent_style = space
insert_final_newline = false

View File

@ -0,0 +1,29 @@
name: docker-dryrun
description: Composite action that performs the container build steps for a single platform.
inputs:
platform:
description: "The target platform: linux/amd64, linux/arm64, linux/riscv64."
required: true
runs:
using: composite
steps:
- uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Build regular image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: .
platforms: ${{ inputs.platform }}
push: false
file: Dockerfile
cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootful
- name: Build rootless image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: .
platforms: ${{ inputs.platform }}
push: false
file: Dockerfile.rootless
cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootless

View File

@ -27,11 +27,12 @@ concurrency:
group: cache-seeder
cancel-in-progress: true
permissions:
contents: read
jobs:
gobuild:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -49,15 +50,13 @@ jobs:
lint:
runs-on: ubuntu-latest
permissions:
contents: read
strategy:
fail-fast: false
matrix:
include:
- { job: lint-backend, tags: "bindata sqlite sqlite_unlock_notify", target: "lint-backend" }
- { job: lint-go-windows, tags: "bindata sqlite sqlite_unlock_notify", target: "lint-go-windows" }
- { job: lint-go-gogit, tags: "bindata sqlite sqlite_unlock_notify gogit", target: "lint-go" }
- { job: lint-go-gogit, tags: "bindata gogit sqlite sqlite_unlock_notify", target: "lint-go" }
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0

View File

@ -11,13 +11,14 @@ concurrency:
env:
RENOVATE_VERSION: 43.141.5 # renovate: datasource=docker depName=ghcr.io/renovatebot/renovate
permissions:
contents: read
jobs:
cron-renovate:
runs-on: ubuntu-latest
if: github.repository == 'go-gitea/gitea' # prevent running on forks
timeout-minutes: 30
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: renovatebot/github-action@83ec54fee49ab67d9cd201084c1ff325b4b462e4 # v46.1.10

View File

@ -15,6 +15,8 @@ on:
value: ${{ jobs.detect.outputs.templates }}
docker:
value: ${{ jobs.detect.outputs.docker }}
dockerfile:
value: ${{ jobs.detect.outputs.dockerfile }}
swagger:
value: ${{ jobs.detect.outputs.swagger }}
yaml:
@ -24,12 +26,13 @@ on:
e2e:
value: ${{ jobs.detect.outputs.e2e }}
permissions:
contents: read
jobs:
detect:
runs-on: ubuntu-latest
timeout-minutes: 3
permissions:
contents: read
outputs:
backend: ${{ steps.changes.outputs.backend }}
frontend: ${{ steps.changes.outputs.frontend }}
@ -37,6 +40,7 @@ jobs:
actions: ${{ steps.changes.outputs.actions }}
templates: ${{ steps.changes.outputs.templates }}
docker: ${{ steps.changes.outputs.docker }}
dockerfile: ${{ steps.changes.outputs.dockerfile }}
swagger: ${{ steps.changes.outputs.swagger }}
yaml: ${{ steps.changes.outputs.yaml }}
json: ${{ steps.changes.outputs.json }}
@ -94,6 +98,10 @@ jobs:
- "docker/**"
- "Makefile"
dockerfile:
- "Dockerfile"
- "Dockerfile.rootless"
swagger:
- "templates/swagger/v1_json.tmpl"
- "templates/swagger/v1_input.json"

View File

@ -7,18 +7,17 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
permissions:
contents: read
jobs:
files-changed:
uses: ./.github/workflows/files-changed.yml
permissions:
contents: read
lint-backend:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -35,93 +34,40 @@ jobs:
env:
TAGS: bindata sqlite sqlite_unlock_notify
lint-templates:
if: needs.files-changed.outputs.templates == 'true'
lint-on-demand:
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
- run: uv python install 3.14
- uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 24
cache: pnpm
cache-dependency-path: pnpm-lock.yaml
- run: make deps-py
- run: make deps-frontend
- run: make lint-templates
lint-yaml:
if: needs.files-changed.outputs.yaml == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
- run: uv python install 3.14
- run: make deps-py
- run: make lint-yaml
lint-json:
if: needs.files-changed.outputs.json == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 24
cache: pnpm
cache-dependency-path: pnpm-lock.yaml
- run: make deps-frontend
- run: make lint-json
lint-swagger:
if: needs.files-changed.outputs.swagger == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 24
cache: pnpm
cache-dependency-path: pnpm-lock.yaml
- run: make deps-frontend
- run: make lint-swagger
lint-spell:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.templates == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
with:
go-version-file: go.mod
check-latest: true
cache: false
- uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 24
cache: pnpm
cache-dependency-path: pnpm-lock.yaml
- run: make lint-spell
- if: needs.files-changed.outputs.templates == 'true' || needs.files-changed.outputs.yaml == 'true'
uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0
- if: needs.files-changed.outputs.templates == 'true' || needs.files-changed.outputs.yaml == 'true'
run: uv python install 3.14 && make deps-py lint-templates lint-yaml
- if: needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.swagger == 'true' || needs.files-changed.outputs.json == 'true'
run: make deps-frontend lint-md lint-swagger lint-json
- if: needs.files-changed.outputs.actions == 'true'
run: make lint-actions
lint-go-windows:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -144,8 +90,6 @@ jobs:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -166,8 +110,6 @@ jobs:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -186,8 +128,6 @@ jobs:
if: needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
@ -206,8 +146,6 @@ jobs:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -217,13 +155,12 @@ jobs:
cache: false
- uses: ./.github/actions/go-cache
with:
cache-name: backend
# no frontend build here as backend should be able to build
# even without any frontend files
- run: make deps-backend
- run: go build -o gitea_no_gcc # test if build succeeds without the sqlite tag
cache-name: compliance-backend
- run: make deps-backend generate-go
# no frontend build here as backend should be able to build, even without any frontend files
# CGO is not used when cross-compile, so these steps also test if the code is compatible with CGO disabled
- name: build-backend-arm64
run: make backend # test cross compile
run: go build -o gitea_linux_arm64
env:
GOOS: linux
GOARCH: arm64
@ -235,38 +172,7 @@ jobs:
GOARCH: amd64
TAGS: bindata gogit
- name: build-backend-386
run: go build -o gitea_linux_386 # test if compatible with 32 bit
run: go build -o gitea_linux_386
env:
GOOS: linux
GOARCH: 386
docs:
if: needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
with:
node-version: 24
cache: pnpm
cache-dependency-path: pnpm-lock.yaml
- run: make deps-frontend
- run: make lint-md
actions:
if: needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
with:
go-version-file: go.mod
check-latest: true
- run: make lint-actions

View File

@ -7,18 +7,17 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
permissions:
contents: read
jobs:
files-changed:
uses: ./.github/workflows/files-changed.yml
permissions:
contents: read
test-pgsql:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
services:
pgsql:
image: postgres:14
@ -58,21 +57,18 @@ jobs:
env:
TAGS: bindata
- name: run migration tests
run: make test-pgsql-migration
run: GITEA_TEST_DATABASE=pgsql make test-migration
- name: run tests
run: make test-pgsql
run: GITEA_TEST_DATABASE=pgsql make test-integration
timeout-minutes: 50
env:
TAGS: bindata gogit
TEST_TAGS: gogit
TEST_LDAP: 1
test-sqlite:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -84,30 +80,32 @@ jobs:
with:
cache-name: sqlite
- run: make deps-backend
- run: GOEXPERIMENT='' make backend
- run: make backend
env:
TAGS: bindata gogit sqlite sqlite_unlock_notify
GOEXPERIMENT:
- name: run migration tests
run: make test-sqlite-migration
run: GITEA_TEST_DATABASE=sqlite make test-migration
env:
TAGS: bindata gogit
- name: run tests
run: GOEXPERIMENT='' make test-sqlite
run: GITEA_TEST_DATABASE=sqlite make test-integration
timeout-minutes: 50
env:
TAGS: bindata gogit sqlite sqlite_unlock_notify
RACE_ENABLED: true
TEST_TAGS: gogit sqlite sqlite_unlock_notify
TAGS: bindata gogit
GOEXPERIMENT:
GOTEST_FLAGS: -race -timeout=40m
test-unit:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
services:
elasticsearch:
image: elasticsearch:7.5.0
image: docker.elastic.co/elasticsearch/elasticsearch:8.19.14
env:
discovery.type: single-node
xpack.security.enabled: false
ports:
- "9200:9200"
meilisearch:
@ -156,24 +154,21 @@ jobs:
- name: unit-tests
run: make test-backend test-check
env:
TAGS: bindata
RACE_ENABLED: true
GOTESTFLAGS: -timeout=20m
TAGS: bindata sqlite sqlite_unlock_notify
GOTEST_FLAGS: -race -timeout=20m
GITHUB_READ_TOKEN: ${{ secrets.GITHUB_READ_TOKEN }}
- name: unit-tests-gogit
run: GOEXPERIMENT='' make test-backend test-check
run: make test-backend test-check
env:
TAGS: bindata gogit
RACE_ENABLED: true
GOTESTFLAGS: -timeout=20m
TAGS: bindata gogit sqlite sqlite_unlock_notify
GOEXPERIMENT:
GOTEST_FLAGS: -race -timeout=20m
GITHUB_READ_TOKEN: ${{ secrets.GITHUB_READ_TOKEN }}
test-mysql:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
services:
mysql:
# the bitnami mysql image has more options than the official one, it's easier to customize
@ -186,9 +181,10 @@ jobs:
options: >-
--mount type=tmpfs,destination=/bitnami/mysql/data
elasticsearch:
image: elasticsearch:7.5.0
image: docker.elastic.co/elasticsearch/elasticsearch:8.19.14
env:
discovery.type: single-node
xpack.security.enabled: false
ports:
- "9200:9200"
smtpimap:
@ -215,10 +211,9 @@ jobs:
env:
TAGS: bindata
- name: run migration tests
run: make test-mysql-migration
run: GITEA_TEST_DATABASE=mysql make test-migration
- name: run tests
# run: make integration-test-coverage (at the moment, no coverage is really handled)
run: make test-mysql
run: GITEA_TEST_DATABASE=mysql make test-integration
env:
TAGS: bindata
TEST_INDEXER_CODE_ES_URL: "http://elastic:changeme@elasticsearch:9200"
@ -227,8 +222,6 @@ jobs:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
services:
mssql:
image: mcr.microsoft.com/mssql/server:2019-latest
@ -258,9 +251,9 @@ jobs:
- run: make backend
env:
TAGS: bindata
- run: make test-mssql-migration
- run: GITEA_TEST_DATABASE=mssql make test-migration
- name: run tests
run: make test-mssql
run: GITEA_TEST_DATABASE=mssql make test-integration
timeout-minutes: 50
env:
TAGS: bindata

View File

@ -7,34 +7,41 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
permissions:
contents: read
jobs:
files-changed:
uses: ./.github/workflows/files-changed.yml
permissions:
contents: read
container:
# QEMU-based build is slow (40-50 minutes), so run arm64 and riscv64 when dockerfile changes.
# Run amd64 when any docker-related files change, which is fast (4 minutes).
container-amd64:
if: needs.files-changed.outputs.docker == 'true'
needs: files-changed
needs: [files-changed]
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Build regular container image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
- uses: ./.github/actions/docker-dryrun
with:
context: .
platforms: linux/amd64,linux/arm64,linux/riscv64
push: false
cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootful
- name: Build rootless container image
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
platform: linux/amd64
container-arm64:
if: needs.files-changed.outputs.dockerfile == 'true'
needs: [files-changed]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: ./.github/actions/docker-dryrun
with:
context: .
push: false
platforms: linux/amd64,linux/arm64,linux/riscv64
file: Dockerfile.rootless
cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootless
platform: linux/arm64
container-riscv64:
if: needs.files-changed.outputs.dockerfile == 'true'
needs: [files-changed]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: ./.github/actions/docker-dryrun
with:
platform: linux/riscv64

View File

@ -7,18 +7,17 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
permissions:
contents: read
jobs:
files-changed:
uses: ./.github/workflows/files-changed.yml
permissions:
contents: read
test-e2e:
if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.e2e == 'true'
needs: files-changed
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
@ -39,10 +38,13 @@ jobs:
- run: make deps-frontend
- run: make frontend
- run: make deps-backend
- run: make gitea-e2e
- run: make backend
env:
TAGS: bindata sqlite sqlite_unlock_notify
- run: make playwright
- run: make test-e2e
timeout-minutes: 10
env:
TAGS: bindata sqlite sqlite_unlock_notify
FORCE_COLOR: 1
GITEA_TEST_E2E_DEBUG: 1

3
.gitignore vendored
View File

@ -55,10 +55,7 @@ cpu.out
*.log.*.gz
/gitea
/gitea-e2e
/gitea-vet
/debug
/integrations.test
/bin
/dist

View File

@ -139,11 +139,11 @@ Here's how to run the test suite:
- run tests (we suggest running them on Linux)
| Command | Action | |
| :------------------------------------------ | :------------------------------------------------------- | ------------------------------------------- |
|``make test[\#SpecificTestName]`` | run unit test(s) | |
|``make test-sqlite[\#SpecificTestName]`` | run [integration](tests/integration) test(s) for SQLite | [More details](tests/integration/README.md) |
|``make test-e2e`` | run [end-to-end](tests/e2e) test(s) using Playwright | |
| Command | Action | |
|:----------------------------------------------|:-----------------------------------------------------| ------------------------------------------- |
| ``make test-backend[\#SpecificTestName]`` | run unit test(s) | |
| ``make test-integration[\#SpecificTestName]`` | run [integration](tests/integration) test(s) | [More details](tests/integration/README.md) |
| ``make test-e2e`` | run [end-to-end](tests/e2e) test(s) using Playwright | |
- E2E test environment variables

325
Makefile
View File

@ -7,10 +7,9 @@ export GOEXPERIMENT ?= jsonv2
GO ?= go
SHASUM ?= shasum -a 256
HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes)
COMMA := ,
XGO_VERSION := go-1.25.x
XGO_VERSION := go-1.26.x
AIR_PACKAGE ?= github.com/air-verse/air@v1 # renovate: datasource=go
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3 # renovate: datasource=go
@ -22,15 +21,28 @@ XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1 # renovate: datasource=go
ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.7.11 # renovate: datasource=go
DOCKER_IMAGE ?= gitea/gitea
DOCKER_TAG ?= latest
DOCKER_REF := $(DOCKER_IMAGE):$(DOCKER_TAG)
HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes)
ifeq ($(HAS_GO), yes)
CGO_EXTRA_CFLAGS := -DSQLITE_MAX_VARIABLE_NUMBER=32766
CGO_CFLAGS ?= $(shell $(GO) env CGO_CFLAGS) $(CGO_EXTRA_CFLAGS)
endif
MAKE_EVIDENCE_DIR := .make_evidence
# Use sqlite as default database if running tests, only do so for local tests, not in CI.
# CI should explicitly set the database to avoid unexpected results.
ifneq ($(findstring test-,$(MAKECMDGOALS)),)
ifeq ($(CI),)
GITEA_TEST_DATABASE ?= sqlite
endif
endif
TAGS ?=
ifeq ($(GITEA_TEST_DATABASE),sqlite)
TAGS += sqlite sqlite_unlock_notify
endif
TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags
CGO_ENABLED ?= 0
ifneq (,$(findstring sqlite,$(TAGS))$(findstring pam,$(TAGS)))
CGO_ENABLED = 1
@ -49,15 +61,16 @@ else ifeq ($(patsubst Windows%,Windows,$(OS)),Windows)
IS_WINDOWS := yes
endif
endif
# GOFLAGS and EXTRA_GOFLAGS are for the 'go build' command only
ifeq ($(IS_WINDOWS),yes)
GOFLAGS := -v -buildmode=exe
EXECUTABLE ?= gitea.exe
EXECUTABLE_E2E ?= gitea-e2e.exe
else
GOFLAGS := -v
EXECUTABLE ?= gitea
EXECUTABLE_E2E ?= gitea-e2e
endif
EXTRA_GOFLAGS ?=
ifeq ($(shell sed --version 2>/dev/null | grep -q GNU && echo gnu),gnu)
SED_INPLACE := sed -i
@ -65,15 +78,8 @@ else
SED_INPLACE := sed -i ''
endif
EXTRA_GOFLAGS ?=
MAKE_EVIDENCE_DIR := .make_evidence
GOTESTFLAGS ?=
ifeq ($(RACE_ENABLED),true)
GOFLAGS += -race
GOTESTFLAGS += -race
endif
# GOTEST_FLAGS is for unit test and integration test
GOTEST_FLAGS ?= -timeout 40m
STORED_VERSION_FILE := VERSION
@ -126,12 +132,6 @@ AIR_TMP_DIR := .air
GO_LICENSE_FILE := assets/go-licenses.json
TAGS ?=
TAGS_SPLIT := $(subst $(COMMA), ,$(TAGS))
TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags
TEST_TAGS ?= $(TAGS_SPLIT) sqlite sqlite_unlock_notify
TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(DIST) $(MAKE_EVIDENCE_DIR) $(AIR_TMP_DIR)
GO_DIRS := build cmd models modules routers services tests tools
@ -151,6 +151,7 @@ ESLINT_CONCURRENCY ?= 2
SWAGGER_SPEC := templates/swagger/v1_json.tmpl
SWAGGER_SPEC_INPUT := templates/swagger/v1_input.json
SWAGGER_EXCLUDE := code.gitea.io/sdk
OPENAPI3_SPEC := templates/swagger/v1_openapi3_json.tmpl
TEST_MYSQL_HOST ?= mysql:3306
TEST_MYSQL_DBNAME ?= testgitea
@ -163,13 +164,19 @@ TEST_PGSQL_PASSWORD ?= postgres
TEST_PGSQL_SCHEMA ?= gtestschema
TEST_MINIO_ENDPOINT ?= minio:9000
TEST_MSSQL_HOST ?= mssql:1433
TEST_MSSQL_DBNAME ?= gitea
TEST_MSSQL_DBNAME ?= testgitea
TEST_MSSQL_USERNAME ?= sa
TEST_MSSQL_PASSWORD ?= MwantsaSecurePassword1
# Include local Makefile
# Makefile.local is listed in .gitignore
sinclude Makefile.local
ifneq ("$(wildcard Makefile.local)","")
include Makefile.local
endif
$(foreach v, $(filter TEST_%, $(.VARIABLES)), $(eval MAKEFILE_VARS+=$v=$($v)))
$(foreach v, $(filter GITEA_TEST_%, $(.VARIABLES)), $(eval MAKEFILE_VARS+=$v=$($v)))
export MAKEFILE_VARS
.PHONY: all
all: build
@ -178,15 +185,8 @@ all: build
help: Makefile ## print Makefile help information.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m[TARGETS] default target: build\033[0m\n\n\033[35mTargets:\033[0m\n"} /^[0-9A-Za-z._-]+:.*?##/ { printf " \033[36m%-45s\033[0m %s\n", $$1, $$2 }' Makefile #$(MAKEFILE_LIST)
@printf " \033[36m%-46s\033[0m %s\n" "test-e2e" "test end to end using playwright"
@printf " \033[36m%-46s\033[0m %s\n" "test[#TestSpecificName]" "run unit test"
@printf " \033[36m%-46s\033[0m %s\n" "test-sqlite[#TestSpecificName]" "run integration test for sqlite"
.PHONY: git-check
git-check:
@if git lfs >/dev/null 2>&1 ; then : ; else \
echo "Gitea requires git with lfs support to run tests." ; \
exit 1; \
fi
@printf " \033[36m%-46s\033[0m %s\n" "test-backend[#TestSpecificName]" "run unit test (sqlite only)"
@printf " \033[36m%-46s\033[0m %s\n" "test-integration[#TestSpecificName]" "run integration test for GITEA_TEST_DATABASE (sqlite, mysql, pgsql, mssql)"
.PHONY: clean-all
clean-all: clean ## delete backend, frontend and integration files
@ -194,14 +194,8 @@ clean-all: clean ## delete backend, frontend and integration files
.PHONY: clean
clean: ## delete backend and integration files
rm -rf $(EXECUTABLE) $(EXECUTABLE_E2E) $(DIST) $(BINDATA_DEST_WILDCARD) \
integrations*.test \
tests/integration/gitea-integration-* \
tests/integration/indexers-* \
tests/sqlite.ini tests/mysql.ini tests/pgsql.ini tests/mssql.ini man/ \
tests/e2e/gitea-e2e-*/ \
tests/e2e/indexers-*/ \
tests/e2e/reports/ tests/e2e/test-artifacts/ tests/e2e/test-snapshots/
rm -f $(EXECUTABLE) test-*.test tests/*.ini
rm -rf $(DIST) $(BINDATA_DEST_WILDCARD) man tests/integration/gitea-integration-*
.PHONY: fmt
fmt: ## format the Go and template code
@ -233,7 +227,7 @@ TAGS_PREREQ := $(TAGS_EVIDENCE)
endif
.PHONY: generate-swagger
generate-swagger: $(SWAGGER_SPEC) ## generate the swagger spec from code comments
generate-swagger: $(SWAGGER_SPEC) $(OPENAPI3_SPEC) ## generate the swagger spec from code comments
$(SWAGGER_SPEC): $(GO_SOURCES) $(SWAGGER_SPEC_INPUT)
$(GO) run $(SWAGGER_PACKAGE) generate spec --exclude "$(SWAGGER_EXCLUDE)" --input "$(SWAGGER_SPEC_INPUT)" --output './$(SWAGGER_SPEC)'
@ -255,6 +249,21 @@ swagger-validate: ## check if the swagger spec is valid
$(GO) run $(SWAGGER_PACKAGE) validate './$(SWAGGER_SPEC)'
@$(SED_INPLACE) -E -e 's|"basePath":( *)"/(.*)"|"basePath":\1"\2"|g' './$(SWAGGER_SPEC)' # remove the prefix slash from basePath
.PHONY: generate-openapi3
generate-openapi3: $(OPENAPI3_SPEC) ## generate the OpenAPI 3.0 spec from the Swagger 2.0 spec
$(OPENAPI3_SPEC): $(SWAGGER_SPEC) build/generate-openapi.go $(wildcard build/openapi3gen/*.go)
$(GO) run build/generate-openapi.go
.PHONY: openapi3-check
openapi3-check: generate-openapi3
@diff=$$(git diff --color=always '$(OPENAPI3_SPEC)'); \
if [ -n "$$diff" ]; then \
echo "Please run 'make generate-openapi3' and commit the result:"; \
printf "%s" "$${diff}"; \
exit 1; \
fi
.PHONY: checks
checks: checks-frontend checks-backend ## run various consistency checks
@ -262,10 +271,10 @@ checks: checks-frontend checks-backend ## run various consistency checks
checks-frontend: lockfile-check svg-check ## check frontend files
.PHONY: checks-backend
checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check ## check backend files
checks-backend: tidy-check swagger-check openapi3-check fmt-check swagger-validate security-check ## check backend files
.PHONY: lint
lint: lint-frontend lint-backend lint-spell ## lint everything
lint: lint-frontend lint-backend lint-templates lint-swagger lint-spell lint-md lint-actions lint-json lint-yaml ## lint everything
.PHONY: lint-fix
lint-fix: lint-frontend-fix lint-backend-fix lint-spell-fix ## lint everything and fix issues
@ -373,13 +382,10 @@ watch-frontend: node_modules ## start vite dev server for frontend
watch-backend: ## watch backend files and continuously rebuild
GITEA_RUN_MODE=dev $(GO) run $(AIR_PACKAGE) -c .air.toml
.PHONY: test
test: test-frontend test-backend ## test everything
.PHONY: test-backend
test-backend: ## test backend files
@echo "Running go test with $(GOTESTFLAGS) -tags '$(TEST_TAGS)'..."
@$(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(GO_TEST_PACKAGES)
@echo "Running go test with $(GOTEST_FLAGS) -tags '$(TAGS)'..."
@$(GO) test $(GOTEST_FLAGS) -tags='$(TAGS)' $(GO_TEST_PACKAGES)
.PHONY: test-frontend
test-frontend: node_modules ## test frontend files
@ -397,10 +403,10 @@ test-check:
exit 1; \
fi
.PHONY: test\#%
test\#%:
@echo "Running go test with -tags '$(TEST_TAGS)'..."
@$(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -run $(subst .,/,$*) $(GO_TEST_PACKAGES)
.PHONY: test-backend\#%
test-backend\#%:
@echo "Running go test with -tags '$(TAGS)'..."
@$(GO) test $(GOTEST_FLAGS) -tags='$(TAGS)' -run $(subst .,/,$*) $(GO_TEST_PACKAGES)
.PHONY: coverage
coverage:
@ -410,8 +416,8 @@ coverage:
.PHONY: unit-test-coverage
unit-test-coverage:
@echo "Running unit-test-coverage $(GOTESTFLAGS) -tags '$(TEST_TAGS)'..."
@$(GO) test $(GOTESTFLAGS) -timeout=20m -tags='$(TEST_TAGS)' -cover -coverprofile coverage.out $(GO_TEST_PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
@echo "Running unit-test-coverage $(GOTEST_FLAGS) -tags '$(TAGS)'..."
@$(GO) test $(GOTEST_FLAGS) -tags='$(TAGS)' -cover -coverprofile coverage.out $(GO_TEST_PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
.PHONY: tidy
tidy: ## run go mod tidy
@ -438,83 +444,33 @@ go-licenses: $(GO_LICENSE_FILE) ## regenerate go licenses
$(GO_LICENSE_FILE): go.mod go.sum
GO=$(GO) $(GO) run build/generate-go-licenses.go $(GO_LICENSE_FILE)
generate-ini-sqlite:
sed -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-sqlite|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
tests/sqlite.ini.tmpl > tests/sqlite.ini
.PHONY: test-integration
test-integration:
@# Use a compiled binary: testlogger forwards gitea logs to t.Log, so `go test -v`
@# would flood output per passing test. testcache can't help these tests anyway —
@# they mutate the work directory, so cache inputs change between runs.
$(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' -c code.gitea.io/gitea/tests/integration -o ./test-integration-$(GITEA_TEST_DATABASE).test
./test-integration-$(GITEA_TEST_DATABASE).test
.PHONY: test-sqlite
test-sqlite: integrations.sqlite.test generate-ini-sqlite
GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test
.PHONY: test-integration\#%
test-integration\#%:
$(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' -run $(subst .,/,$*) code.gitea.io/gitea/tests/integration
.PHONY: test-sqlite\#%
test-sqlite\#%: integrations.sqlite.test generate-ini-sqlite
GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.run $(subst .,/,$*)
.PHONY: test-migration
test-migration: migrations.integration.test migrations.individual.test
.PHONY: test-sqlite-migration
test-sqlite-migration: migrations.sqlite.test migrations.individual.sqlite.test
.PHONY: migrations.integration.test
migrations.integration.test:
$(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' code.gitea.io/gitea/tests/integration/migration-test
generate-ini-mysql:
sed -e 's|{{TEST_MYSQL_HOST}}|${TEST_MYSQL_HOST}|g' \
-e 's|{{TEST_MYSQL_DBNAME}}|${TEST_MYSQL_DBNAME}|g' \
-e 's|{{TEST_MYSQL_USERNAME}}|${TEST_MYSQL_USERNAME}|g' \
-e 's|{{TEST_MYSQL_PASSWORD}}|${TEST_MYSQL_PASSWORD}|g' \
-e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-mysql|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
tests/mysql.ini.tmpl > tests/mysql.ini
.PHONY: migrations.individual.test
migrations.individual.test:
@# tests of multiple packages use the same database, don't run in parallel
$(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: test-mysql
test-mysql: integrations.mysql.test generate-ini-mysql
GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test
.PHONY: test-mysql\#%
test-mysql\#%: integrations.mysql.test generate-ini-mysql
GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test -test.run $(subst .,/,$*)
.PHONY: test-mysql-migration
test-mysql-migration: migrations.mysql.test migrations.individual.mysql.test
generate-ini-pgsql:
sed -e 's|{{TEST_PGSQL_HOST}}|${TEST_PGSQL_HOST}|g' \
-e 's|{{TEST_PGSQL_DBNAME}}|${TEST_PGSQL_DBNAME}|g' \
-e 's|{{TEST_PGSQL_USERNAME}}|${TEST_PGSQL_USERNAME}|g' \
-e 's|{{TEST_PGSQL_PASSWORD}}|${TEST_PGSQL_PASSWORD}|g' \
-e 's|{{TEST_PGSQL_SCHEMA}}|${TEST_PGSQL_SCHEMA}|g' \
-e 's|{{TEST_MINIO_ENDPOINT}}|${TEST_MINIO_ENDPOINT}|g' \
-e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-pgsql|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
tests/pgsql.ini.tmpl > tests/pgsql.ini
.PHONY: test-pgsql
test-pgsql: integrations.pgsql.test generate-ini-pgsql
GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test
.PHONY: test-pgsql\#%
test-pgsql\#%: integrations.pgsql.test generate-ini-pgsql
GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.run $(subst .,/,$*)
.PHONY: test-pgsql-migration
test-pgsql-migration: migrations.pgsql.test migrations.individual.pgsql.test
generate-ini-mssql:
sed -e 's|{{TEST_MSSQL_HOST}}|${TEST_MSSQL_HOST}|g' \
-e 's|{{TEST_MSSQL_DBNAME}}|${TEST_MSSQL_DBNAME}|g' \
-e 's|{{TEST_MSSQL_USERNAME}}|${TEST_MSSQL_USERNAME}|g' \
-e 's|{{TEST_MSSQL_PASSWORD}}|${TEST_MSSQL_PASSWORD}|g' \
-e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-mssql|g' \
-e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \
tests/mssql.ini.tmpl > tests/mssql.ini
.PHONY: test-mssql
test-mssql: integrations.mssql.test generate-ini-mssql
GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test
.PHONY: test-mssql\#%
test-mssql\#%: integrations.mssql.test generate-ini-mssql
GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test -test.run $(subst .,/,$*)
.PHONY: test-mssql-migration
test-mssql-migration: migrations.mssql.test migrations.individual.mssql.test
.PHONY: migrations.individual.test\#%
migrations.individual.test\#%:
$(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: playwright
playwright: deps-frontend
@ -522,109 +478,8 @@ playwright: deps-frontend
@pnpm exec playwright install $(if $(GITHUB_ACTIONS),,--with-deps) chromium firefox $(PLAYWRIGHT_FLAGS)
.PHONY: test-e2e
test-e2e: playwright $(EXECUTABLE_E2E)
@EXECUTABLE=$(EXECUTABLE_E2E) ./tools/test-e2e.sh $(GITEA_TEST_E2E_FLAGS)
.PHONY: bench-sqlite
bench-sqlite: integrations.sqlite.test generate-ini-sqlite
GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: bench-mysql
bench-mysql: integrations.mysql.test generate-ini-mysql
GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: bench-mssql
bench-mssql: integrations.mssql.test generate-ini-mssql
GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: bench-pgsql
bench-pgsql: integrations.pgsql.test generate-ini-pgsql
GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench .
.PHONY: integration-test-coverage
integration-test-coverage: integrations.cover.test generate-ini-mysql
GITEA_TEST_CONF=tests/mysql.ini ./integrations.cover.test -test.coverprofile=integration.coverage.out
.PHONY: integration-test-coverage-sqlite
integration-test-coverage-sqlite: integrations.cover.sqlite.test generate-ini-sqlite
GITEA_TEST_CONF=tests/sqlite.ini ./integrations.cover.sqlite.test -test.coverprofile=integration.coverage.out
integrations.mysql.test: git-check $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.mysql.test
integrations.pgsql.test: git-check $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.pgsql.test
integrations.mssql.test: git-check $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.mssql.test
integrations.sqlite.test: git-check $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.sqlite.test -tags '$(TEST_TAGS)'
integrations.cover.test: git-check $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -coverpkg $(shell echo $(GO_TEST_PACKAGES) | tr ' ' ',') -o integrations.cover.test
integrations.cover.sqlite.test: git-check $(GO_SOURCES)
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -coverpkg $(shell echo $(GO_TEST_PACKAGES) | tr ' ' ',') -o integrations.cover.sqlite.test -tags '$(TEST_TAGS)'
.PHONY: migrations.mysql.test
migrations.mysql.test: $(GO_SOURCES) generate-ini-mysql
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.mysql.test
GITEA_TEST_CONF=tests/mysql.ini ./migrations.mysql.test
.PHONY: migrations.pgsql.test
migrations.pgsql.test: $(GO_SOURCES) generate-ini-pgsql
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.pgsql.test
GITEA_TEST_CONF=tests/pgsql.ini ./migrations.pgsql.test
.PHONY: migrations.mssql.test
migrations.mssql.test: $(GO_SOURCES) generate-ini-mssql
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.mssql.test
GITEA_TEST_CONF=tests/mssql.ini ./migrations.mssql.test
.PHONY: migrations.sqlite.test
migrations.sqlite.test: $(GO_SOURCES) generate-ini-sqlite
$(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.sqlite.test -tags '$(TEST_TAGS)'
GITEA_TEST_CONF=tests/sqlite.ini ./migrations.sqlite.test
.PHONY: migrations.individual.mysql.test
migrations.individual.mysql.test: $(GO_SOURCES) generate-ini-mysql
GITEA_TEST_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.sqlite.test\#%
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite
GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: migrations.individual.pgsql.test
migrations.individual.pgsql.test: $(GO_SOURCES) generate-ini-pgsql
GITEA_TEST_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.pgsql.test\#%
migrations.individual.pgsql.test\#%: $(GO_SOURCES) generate-ini-pgsql
GITEA_TEST_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: migrations.individual.mssql.test
migrations.individual.mssql.test: $(GO_SOURCES) generate-ini-mssql
GITEA_TEST_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.mssql.test\#%
migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql
GITEA_TEST_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: migrations.individual.sqlite.test
migrations.individual.sqlite.test: $(GO_SOURCES) generate-ini-sqlite
GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES)
.PHONY: migrations.individual.sqlite.test\#%
migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite
GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$*
.PHONY: check
check: test
.PHONY: install $(TAGS_PREREQ)
install: $(wildcard *.go)
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
test-e2e: playwright frontend backend
@EXECUTABLE=$(EXECUTABLE) ./tools/test-e2e.sh $(GITEA_TEST_E2E_FLAGS)
.PHONY: build
build: frontend backend ## build everything
@ -657,9 +512,6 @@ ifneq ($(and $(STATIC),$(findstring pam,$(TAGS))),)
endif
CGO_ENABLED="$(CGO_ENABLED)" CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@
$(EXECUTABLE_E2E): $(GO_SOURCES) $(FRONTEND_DEST)
CGO_ENABLED=1 $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TEST_TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@
.PHONY: release
release: frontend generate release-windows release-linux release-darwin release-freebsd release-copy release-compress vendor release-sources release-check
@ -817,11 +669,6 @@ generate-manpage: ## generate manpage
@gzip -9 man/man1/gitea.1 && echo man/man1/gitea.1.gz created
@#TODO A small script that formats config-cheat-sheet.en-us.md nicely for use as a config man page
.PHONY: docker
docker:
docker build --disable-content-trust=false -t $(DOCKER_REF) .
# support also build args docker build --build-arg GITEA_VERSION=v1.2.3 --build-arg TAGS="bindata sqlite sqlite_unlock_notify" .
# Disable parallel execution because it would break some targets that don't
# specify exact dependencies like 'backend' which does currently not depend
# on 'frontend' to enable Node.js-less builds from source tarballs.

File diff suppressed because one or more lines are too long

97
build/generate-openapi.go Normal file
View File

@ -0,0 +1,97 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
// generate-openapi converts Gitea's Swagger 2.0 spec into an OpenAPI 3.0 spec.
//
// Gitea generates a Swagger 2.0 spec from code annotations (make generate-swagger).
// This tool converts it to OAS3 so that SDK generators and tools that require
// OAS3 (e.g. progenitor for Rust) can consume it directly. The conversion also
// deduplicates inline enum definitions into named schema components, producing
// cleaner SDK output with proper enum types instead of anonymous strings.
//
// Run: go run build/generate-openapi.go
// Output: templates/swagger/v1_openapi3_json.tmpl
//go:build ignore
package main
import (
"encoding/json"
"fmt"
"log"
"os"
"regexp"
"sort"
"strings"
"code.gitea.io/gitea/build/openapi3gen"
"github.com/getkin/kin-openapi/openapi3"
)
const (
swaggerSpecPath = "templates/swagger/v1_json.tmpl"
openapi3OutPath = "templates/swagger/v1_openapi3_json.tmpl"
appSubUrlVar = "{{.SwaggerAppSubUrl}}"
appVerVar = "{{.SwaggerAppVer}}"
appSubUrlPlaceholder = "GITEA_APP_SUB_URL_PLACEHOLDER"
appVerPlaceholder = "0.0.0-gitea-placeholder"
)
var (
appSubUrlRe = regexp.MustCompile(regexp.QuoteMeta(appSubUrlVar))
appVerRe = regexp.MustCompile(regexp.QuoteMeta(appVerVar))
enumScanDirs = []string{
"modules/structs",
"modules/commitstatus",
}
)
func main() {
astEnumMap, err := openapi3gen.ScanSwaggerEnumTypes(enumScanDirs)
if err != nil {
log.Fatalf("scanning swagger:enum annotations: %v", err)
}
names := make([]string, 0, len(astEnumMap))
for _, n := range astEnumMap {
names = append(names, n)
}
sort.Strings(names)
fmt.Fprintf(os.Stderr, "discovered %d swagger:enum types: %s\n", len(names), strings.Join(names, ", "))
data, err := os.ReadFile(swaggerSpecPath)
if err != nil {
log.Fatalf("reading swagger spec: %v", err)
}
cleaned := appSubUrlRe.ReplaceAll(data, []byte(appSubUrlPlaceholder))
cleaned = appVerRe.ReplaceAll(cleaned, []byte(appVerPlaceholder))
oas3, err := openapi3gen.Convert(cleaned, astEnumMap)
if err != nil {
log.Fatalf("converting to openapi 3.0: %v", err)
}
oas3.Servers = openapi3.Servers{
{URL: appSubUrlPlaceholder + "/api/v1"},
}
out, err := json.MarshalIndent(oas3, "", " ")
if err != nil {
log.Fatalf("marshaling openapi 3.0: %v", err)
}
result := strings.ReplaceAll(string(out), appSubUrlPlaceholder, appSubUrlVar)
result = strings.ReplaceAll(result, appVerPlaceholder, appVerVar)
result = strings.TrimSpace(result)
if err := os.WriteFile(openapi3OutPath, []byte(result), 0o644); err != nil {
log.Fatalf("writing openapi 3.0 spec: %v", err)
}
fmt.Printf("Generated %s\n", openapi3OutPath)
}

View File

@ -0,0 +1,281 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package openapi3gen
import (
"fmt"
"regexp"
"strings"
"code.gitea.io/gitea/modules/json"
"github.com/getkin/kin-openapi/openapi2"
"github.com/getkin/kin-openapi/openapi2conv"
"github.com/getkin/kin-openapi/openapi3"
)
// rxDeprecated matches "deprecated" as a word at the start of a description
// or preceded by whitespace/punctuation that indicates a leading marker (e.g.
// "Deprecated: true", "deprecated (use X instead)"). Rejects negated phrases
// like "not deprecated" or "previously deprecated, now supported".
var rxDeprecated = regexp.MustCompile(`(?i)(?:^|[\n.;])\s*deprecated\b`)
// Convert parses a Swagger 2.0 spec and returns an OAS3 spec, applying
// Gitea-specific post-processing: file-schema fixups, URI formats,
// deprecated flags, and shared-enum extraction.
//
// astEnumMap is a value-set-key → Go-type-name map (built by
// ScanSwaggerEnumTypes). If a shared enum in the spec has no entry in the
// map, Convert returns an error — no fallback naming.
func Convert(swaggerJSON []byte, astEnumMap map[string]string) (*openapi3.T, error) {
var swagger2 openapi2.T
if err := json.Unmarshal(swaggerJSON, &swagger2); err != nil {
return nil, fmt.Errorf("parsing swagger 2.0: %w", err)
}
oas3, err := openapi2conv.ToV3(&swagger2)
if err != nil {
return nil, fmt.Errorf("converting to openapi 3.0: %w", err)
}
fixFileSchemas(oas3)
addURIFormats(oas3)
addDeprecatedFlags(oas3)
if err := extractSharedEnums(oas3, astEnumMap); err != nil {
return nil, err
}
return oas3, nil
}
func fixFileSchemas(doc *openapi3.T) {
for _, pathItem := range doc.Paths.Map() {
for _, op := range []*openapi3.Operation{
pathItem.Get, pathItem.Post, pathItem.Put, pathItem.Patch,
pathItem.Delete, pathItem.Head, pathItem.Options, pathItem.Trace,
} {
if op == nil {
continue
}
for _, resp := range op.Responses.Map() {
if resp.Value == nil {
continue
}
for _, mediaType := range resp.Value.Content {
fixSchema(mediaType.Schema)
}
}
if op.RequestBody != nil && op.RequestBody.Value != nil {
for _, mediaType := range op.RequestBody.Value.Content {
fixSchema(mediaType.Schema)
}
}
}
}
}
// fixSchema rewrites any "type: file" schemas to the OAS3 equivalent
// (type: string, format: binary), recursing into Properties, Items, and
// AllOf/OneOf/AnyOf/Not branches. $ref nodes are skipped so shared schemas
// are rewritten exactly once when visited through their declaration.
func fixSchema(ref *openapi3.SchemaRef) {
if ref == nil || ref.Value == nil || ref.Ref != "" {
return
}
s := ref.Value
if s.Type.Is("file") {
s.Type = &openapi3.Types{"string"}
s.Format = "binary"
}
for _, p := range s.Properties {
fixSchema(p)
}
fixSchema(s.Items)
for _, sub := range s.AllOf {
fixSchema(sub)
}
for _, sub := range s.OneOf {
fixSchema(sub)
}
for _, sub := range s.AnyOf {
fixSchema(sub)
}
fixSchema(s.Not)
}
// addURIFormats sets format: uri on string properties whose names indicate
// they hold URLs. This information is lost in Swagger 2.0 but is valuable
// for code generators.
func addURIFormats(doc *openapi3.T) {
if doc.Components == nil {
return
}
for _, schemaRef := range doc.Components.Schemas {
if schemaRef.Value == nil {
continue
}
for propName, propRef := range schemaRef.Value.Properties {
if propRef == nil || propRef.Value == nil || propRef.Ref != "" {
continue
}
prop := propRef.Value
if !prop.Type.Is("string") || prop.Format != "" {
continue
}
if isURLProperty(propName) {
prop.Format = "uri"
}
}
}
}
func isURLProperty(name string) bool {
if strings.HasSuffix(name, "_url") {
return true
}
switch name {
case "url", "html_url", "clone_url":
return true
}
return false
}
// addDeprecatedFlags sets deprecated: true on schema properties whose
// description starts with a "deprecated" marker (e.g. "Deprecated: true"
// or "deprecated (use X instead)"). Does not match negated phrases.
func addDeprecatedFlags(doc *openapi3.T) {
if doc.Components == nil {
return
}
for _, schemaRef := range doc.Components.Schemas {
if schemaRef.Value == nil {
continue
}
for _, propRef := range schemaRef.Value.Properties {
if propRef == nil || propRef.Value == nil || propRef.Ref != "" {
continue
}
if rxDeprecated.MatchString(propRef.Value.Description) {
propRef.Value.Deprecated = true
}
}
}
}
type enumUsage struct {
schemaName string
propName string
propRef *openapi3.SchemaRef
inItems bool
}
// extractSharedEnums finds identical enum arrays used by multiple schema
// properties, creates a standalone named schema for each, and replaces
// the inline enums with $ref pointers.
//
// If the derived enum name collides with an existing component schema, or
// no // swagger:enum annotation matches the value set, generation aborts
// with an actionable error — there are no silent fallbacks.
func extractSharedEnums(doc *openapi3.T, astEnumMap map[string]string) error {
if doc.Components == nil {
return nil
}
enumGroups := map[string][]enumUsage{}
for schemaName, schemaRef := range doc.Components.Schemas {
if schemaRef.Value == nil {
continue
}
for propName, propRef := range schemaRef.Value.Properties {
if propRef == nil || propRef.Value == nil || propRef.Ref != "" {
continue
}
if len(propRef.Value.Enum) > 1 && propRef.Value.Type.Is("string") {
key := EnumKey(propRef.Value.Enum)
enumGroups[key] = append(enumGroups[key], enumUsage{schemaName, propName, propRef, false})
}
if propRef.Value.Type.Is("array") && propRef.Value.Items != nil &&
propRef.Value.Items.Value != nil && propRef.Value.Items.Ref == "" &&
len(propRef.Value.Items.Value.Enum) > 1 && propRef.Value.Items.Value.Type.Is("string") {
key := EnumKey(propRef.Value.Items.Value.Enum)
enumGroups[key] = append(enumGroups[key], enumUsage{schemaName, propName, propRef, true})
}
}
}
for key, usages := range enumGroups {
if len(usages) < 2 {
continue
}
enumName, err := deriveEnumName(key, usages, astEnumMap)
if err != nil {
return err
}
if _, exists := doc.Components.Schemas[enumName]; exists {
return fmt.Errorf("enum name collision: %s already exists as a component schema", enumName)
}
var enumValues []any
if usages[0].inItems {
enumValues = usages[0].propRef.Value.Items.Value.Enum
} else {
enumValues = usages[0].propRef.Value.Enum
}
doc.Components.Schemas[enumName] = &openapi3.SchemaRef{
Value: &openapi3.Schema{
Type: &openapi3.Types{"string"},
Enum: enumValues,
},
}
ref := "#/components/schemas/" + enumName
for _, usage := range usages {
if usage.inItems {
usage.propRef.Value.Items = &openapi3.SchemaRef{Ref: ref}
} else {
old := usage.propRef.Value
if old.Description == "" && !old.Deprecated && old.Format == "" {
usage.propRef.Ref = ref
usage.propRef.Value = nil
} else {
usage.propRef.Value = &openapi3.Schema{
AllOf: openapi3.SchemaRefs{
{Ref: ref},
},
Description: old.Description,
Deprecated: old.Deprecated,
Format: old.Format,
}
}
}
}
}
return nil
}
// deriveEnumName looks up a shared enum's Go type name from astEnumMap by
// value-set key. If no annotation matches, returns an error identifying the
// offending properties and the fix.
func deriveEnumName(key string, usages []enumUsage, astEnumMap map[string]string) (string, error) {
if name, ok := astEnumMap[key]; ok {
return name, nil
}
props := map[string]bool{}
for _, u := range usages {
props[fmt.Sprintf("%s.%s", u.schemaName, u.propName)] = true
}
propList := make([]string, 0, len(props))
for p := range props {
propList = append(propList, p)
}
return "", fmt.Errorf(
"no swagger:enum annotation matches value-set %q used by %d properties: %v; "+
"fix by adding a named string type with // swagger:enum to modules/structs or modules/commitstatus",
key, len(usages), propList,
)
}

View File

@ -0,0 +1,170 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package openapi3gen
import (
"strings"
"testing"
"github.com/getkin/kin-openapi/openapi3"
)
func TestDeriveEnumName_hit(t *testing.T) {
key := EnumKey([]any{"red", "green", "blue"})
astMap := map[string]string{key: "Color"}
usages := []enumUsage{{schemaName: "Paint", propName: "color"}}
got, err := deriveEnumName(key, usages, astMap)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if got != "Color" {
t.Fatalf("got %q, want %q", got, "Color")
}
}
func TestDeriveEnumName_miss(t *testing.T) {
key := EnumKey([]any{"x", "y"})
usages := []enumUsage{{schemaName: "Thing", propName: "kind"}}
_, err := deriveEnumName(key, usages, map[string]string{})
if err == nil {
t.Fatal("expected miss error, got nil")
}
msg := err.Error()
if !strings.Contains(msg, "Thing.kind") {
t.Fatalf("error %q should list the missing usage", msg)
}
if !strings.Contains(msg, "swagger:enum") {
t.Fatalf("error %q should hint at the fix", msg)
}
}
func TestExtractSharedEnums_usesASTMap(t *testing.T) {
doc := &openapi3.T{
Components: &openapi3.Components{
Schemas: openapi3.Schemas{
"A": {Value: &openapi3.Schema{
Type: &openapi3.Types{"object"},
Properties: openapi3.Schemas{
"color": {Value: &openapi3.Schema{
Type: &openapi3.Types{"string"},
Enum: []any{"red", "green", "blue"},
}},
},
}},
"B": {Value: &openapi3.Schema{
Type: &openapi3.Types{"object"},
Properties: openapi3.Schemas{
"color": {Value: &openapi3.Schema{
Type: &openapi3.Types{"string"},
Enum: []any{"red", "green", "blue"},
}},
},
}},
},
},
}
astMap := map[string]string{EnumKey([]any{"red", "green", "blue"}): "Color"}
if err := extractSharedEnums(doc, astMap); err != nil {
t.Fatalf("extractSharedEnums: %v", err)
}
if _, ok := doc.Components.Schemas["Color"]; !ok {
t.Fatalf("expected Color schema to be extracted")
}
}
func TestFixFileSchemas_recursesIntoNested(t *testing.T) {
fileType := func() *openapi3.SchemaRef {
return &openapi3.SchemaRef{Value: &openapi3.Schema{Type: &openapi3.Types{"file"}}}
}
doc := &openapi3.T{
Paths: openapi3.NewPaths(),
}
doc.Paths.Set("/upload", &openapi3.PathItem{
Post: &openapi3.Operation{
RequestBody: &openapi3.RequestBodyRef{
Value: &openapi3.RequestBody{
Content: openapi3.Content{
"multipart/form-data": {
Schema: &openapi3.SchemaRef{Value: &openapi3.Schema{
Type: &openapi3.Types{"object"},
Properties: openapi3.Schemas{
"attachment": fileType(),
"items": {Value: &openapi3.Schema{
Type: &openapi3.Types{"array"},
Items: fileType(),
}},
"alt": {Value: &openapi3.Schema{
AllOf: openapi3.SchemaRefs{fileType()},
}},
"one": {Value: &openapi3.Schema{
OneOf: openapi3.SchemaRefs{fileType()},
}},
"any": {Value: &openapi3.Schema{
AnyOf: openapi3.SchemaRefs{fileType()},
}},
"not": {Value: &openapi3.Schema{
Not: fileType(),
}},
},
}},
},
},
},
},
Responses: openapi3.NewResponses(),
},
})
fixFileSchemas(doc)
props := doc.Paths.Value("/upload").Post.RequestBody.Value.Content["multipart/form-data"].Schema.Value.Properties
if !props["attachment"].Value.Type.Is("string") || props["attachment"].Value.Format != "binary" {
t.Errorf("nested property not fixed: %+v", props["attachment"].Value)
}
if !props["items"].Value.Items.Value.Type.Is("string") || props["items"].Value.Items.Value.Format != "binary" {
t.Errorf("array items not fixed: %+v", props["items"].Value.Items.Value)
}
if !props["alt"].Value.AllOf[0].Value.Type.Is("string") || props["alt"].Value.AllOf[0].Value.Format != "binary" {
t.Errorf("allOf branch not fixed: %+v", props["alt"].Value.AllOf[0].Value)
}
if !props["one"].Value.OneOf[0].Value.Type.Is("string") || props["one"].Value.OneOf[0].Value.Format != "binary" {
t.Errorf("oneOf branch not fixed: %+v", props["one"].Value.OneOf[0].Value)
}
if !props["any"].Value.AnyOf[0].Value.Type.Is("string") || props["any"].Value.AnyOf[0].Value.Format != "binary" {
t.Errorf("anyOf branch not fixed: %+v", props["any"].Value.AnyOf[0].Value)
}
if !props["not"].Value.Not.Value.Type.Is("string") || props["not"].Value.Not.Value.Format != "binary" {
t.Errorf("not branch not fixed: %+v", props["not"].Value.Not.Value)
}
}
func TestExtractSharedEnums_missReturnsError(t *testing.T) {
doc := &openapi3.T{
Components: &openapi3.Components{
Schemas: openapi3.Schemas{
"A": {Value: &openapi3.Schema{
Type: &openapi3.Types{"object"},
Properties: openapi3.Schemas{
"color": {Value: &openapi3.Schema{
Type: &openapi3.Types{"string"},
Enum: []any{"red", "green"},
}},
},
}},
"B": {Value: &openapi3.Schema{
Type: &openapi3.Types{"object"},
Properties: openapi3.Schemas{
"color": {Value: &openapi3.Schema{
Type: &openapi3.Types{"string"},
Enum: []any{"red", "green"},
}},
},
}},
},
},
}
if err := extractSharedEnums(doc, map[string]string{}); err == nil {
t.Fatal("expected miss error")
}
}

View File

@ -0,0 +1,188 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
// Package openapi3gen converts Gitea's Swagger 2.0 spec to an OpenAPI 3.0
// spec. It discovers Go enum type names by scanning swagger:enum annotations
// in the source tree, then names extracted shared-enum schemas accordingly.
package openapi3gen
import (
"fmt"
"go/ast"
"go/parser"
"go/token"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
)
// EnumKey returns a canonical key for a set of enum values: values are
// stringified, sorted, and joined with "|". Used to match enum value sets
// across spec properties and scanned Go type declarations.
func EnumKey(values []any) string {
strs := make([]string, len(values))
for i, v := range values {
strs[i] = fmt.Sprintf("%v", v)
}
sort.Strings(strs)
return strings.Join(strs, "|")
}
var rxSwaggerEnum = regexp.MustCompile(`swagger:enum\s+(\w+)`)
// ScanSwaggerEnumTypes walks .go files under each dir and returns a map from
// a canonical value-set key (see EnumKey) to the Go type name declared with
// // swagger:enum TypeName.
//
// Returns an error on parse failure, on an annotation for a type whose
// constants can't be extracted, or on value-set collisions between two
// different enum types.
func ScanSwaggerEnumTypes(dirs []string) (map[string]string, error) {
fset := token.NewFileSet()
parsed := []*ast.File{}
for _, dir := range dirs {
entries, err := os.ReadDir(dir)
if err != nil {
return nil, fmt.Errorf("reading %s: %w", dir, err)
}
for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") {
continue
}
if strings.HasSuffix(entry.Name(), "_test.go") {
continue
}
path := filepath.Join(dir, entry.Name())
file, err := parser.ParseFile(fset, path, nil, parser.ParseComments)
if err != nil {
return nil, fmt.Errorf("%s: %w", path, err)
}
parsed = append(parsed, file)
}
}
enumTypes := map[string]string{} // typeName → "" (presence marker)
enumValues := map[string][]any{} // typeName → values
// Pass 1: collect every // swagger:enum TypeName declaration.
for _, file := range parsed {
for _, decl := range file.Decls {
gd, ok := decl.(*ast.GenDecl)
if !ok || gd.Tok != token.TYPE {
continue
}
if err := collectEnumType(gd, enumTypes); err != nil {
return nil, fmt.Errorf("%s: %w", fset.Position(gd.Pos()).Filename, err)
}
}
}
// Pass 2: collect const values; now every annotated type is visible.
for _, file := range parsed {
for _, decl := range file.Decls {
gd, ok := decl.(*ast.GenDecl)
if !ok || gd.Tok != token.CONST {
continue
}
collectEnumValues(gd, enumTypes, enumValues)
}
}
result := map[string]string{}
for typeName := range enumTypes {
values, ok := enumValues[typeName]
if !ok || len(values) == 0 {
return nil, fmt.Errorf("swagger:enum %s has no const block with typed string values", typeName)
}
key := EnumKey(values)
if existing, ok := result[key]; ok && existing != typeName {
return nil, fmt.Errorf("swagger:enum value-set collision: %s and %s both use %q", existing, typeName, key)
}
result[key] = typeName
}
return result, nil
}
// collectEnumType scans a `type` GenDecl for // swagger:enum annotations,
// handling both the lone form (`// swagger:enum Foo\n type Foo string`)
// where the comment group is attached to the GenDecl, and the grouped form:
//
// type (
// // swagger:enum Foo
// Foo string
// )
//
// where the comment group is attached to each TypeSpec. Caveat: Go's parser
// only attaches a CommentGroup when it is immediately adjacent to the decl.
// A blank line (not a `//` continuation line) between the comment and the
// declaration drops the Doc, so annotations MUST sit directly above their
// type. All current annotated files obey this — the rule is noted here so
// a future edit that inserts a blank line fails fast rather than silently.
func collectEnumType(gd *ast.GenDecl, enumTypes map[string]string) error {
if err := registerEnumAnnotation(gd.Doc, gd.Specs, enumTypes); err != nil {
return err
}
for _, spec := range gd.Specs {
ts, ok := spec.(*ast.TypeSpec)
if !ok || ts.Doc == nil {
continue
}
if err := registerEnumAnnotation(ts.Doc, []ast.Spec{ts}, enumTypes); err != nil {
return err
}
}
return nil
}
func registerEnumAnnotation(doc *ast.CommentGroup, specs []ast.Spec, enumTypes map[string]string) error {
if doc == nil {
return nil
}
matches := rxSwaggerEnum.FindStringSubmatch(doc.Text())
if len(matches) < 2 {
return nil
}
annotated := matches[1]
for _, spec := range specs {
ts, ok := spec.(*ast.TypeSpec)
if !ok {
continue
}
if ts.Name.Name == annotated {
enumTypes[annotated] = ""
return nil
}
}
return fmt.Errorf("swagger:enum %s: no type declaration with that name in the same decl group; check for a typo", annotated)
}
func collectEnumValues(gd *ast.GenDecl, enumTypes map[string]string, enumValues map[string][]any) {
for _, spec := range gd.Specs {
vs, ok := spec.(*ast.ValueSpec)
if !ok || vs.Type == nil {
continue
}
ident, ok := vs.Type.(*ast.Ident)
if !ok {
continue
}
if _, isEnum := enumTypes[ident.Name]; !isEnum {
continue
}
for _, val := range vs.Values {
lit, ok := val.(*ast.BasicLit)
if !ok || lit.Kind != token.STRING {
continue
}
unquoted, err := strconv.Unquote(lit.Value)
if err != nil {
continue
}
enumValues[ident.Name] = append(enumValues[ident.Name], unquoted)
}
}
}

View File

@ -0,0 +1,239 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package openapi3gen
import (
"os"
"path/filepath"
"strings"
"testing"
)
func TestEnumKey_sortsAndJoins(t *testing.T) {
key := EnumKey([]any{"b", "a", "c"})
if key != "a|b|c" {
t.Fatalf("EnumKey = %q, want %q", key, "a|b|c")
}
}
func TestEnumKey_handlesNonStringValues(t *testing.T) {
key := EnumKey([]any{2, 1, 3})
if key != "1|2|3" {
t.Fatalf("EnumKey = %q, want %q", key, "1|2|3")
}
}
func TestScanSwaggerEnumTypes_basic(t *testing.T) {
dir := t.TempDir()
src := `package fixture
// Color is a primary color.
// swagger:enum Color
type Color string
const (
ColorRed Color = "red"
ColorGreen Color = "green"
ColorBlue Color = "blue"
)
`
if err := os.WriteFile(filepath.Join(dir, "color.go"), []byte(src), 0o644); err != nil {
t.Fatal(err)
}
got, err := ScanSwaggerEnumTypes([]string{dir})
if err != nil {
t.Fatalf("ScanSwaggerEnumTypes: %v", err)
}
wantKey := EnumKey([]any{"red", "green", "blue"})
if got[wantKey] != "Color" {
t.Fatalf("map[%q] = %q, want %q", wantKey, got[wantKey], "Color")
}
}
func TestScanSwaggerEnumTypes_orphanAnnotation(t *testing.T) {
dir := t.TempDir()
src := `package fixture
// swagger:enum Sttype
type StateType string
const (
StateOpen StateType = "open"
)
`
if err := os.WriteFile(filepath.Join(dir, "typo.go"), []byte(src), 0o644); err != nil {
t.Fatal(err)
}
_, err := ScanSwaggerEnumTypes([]string{dir})
if err == nil {
t.Fatal("expected error for annotation referencing a non-matching type name")
}
if !strings.Contains(err.Error(), "Sttype") {
t.Fatalf("error %q should mention the typo'd name Sttype", err.Error())
}
}
func TestScanSwaggerEnumTypes_collision(t *testing.T) {
dir := t.TempDir()
src := `package fixture
// swagger:enum Alpha
type Alpha string
const (
AlphaX Alpha = "x"
AlphaY Alpha = "y"
)
// swagger:enum Beta
type Beta string
const (
BetaX Beta = "x"
BetaY Beta = "y"
)
`
if err := os.WriteFile(filepath.Join(dir, "dup.go"), []byte(src), 0o644); err != nil {
t.Fatal(err)
}
_, err := ScanSwaggerEnumTypes([]string{dir})
if err == nil {
t.Fatal("expected collision error, got nil")
}
msg := err.Error()
if !strings.Contains(msg, "Alpha") || !strings.Contains(msg, "Beta") {
t.Fatalf("error %q should mention both Alpha and Beta", msg)
}
}
func TestScanSwaggerEnumTypes_parseFailure(t *testing.T) {
dir := t.TempDir()
if err := os.WriteFile(filepath.Join(dir, "bad.go"), []byte("package fixture\nfunc Foo() {"), 0o644); err != nil {
t.Fatal(err)
}
_, err := ScanSwaggerEnumTypes([]string{dir})
if err == nil {
t.Fatal("expected parse error, got nil")
}
}
func TestScanSwaggerEnumTypes_annotationWithoutConsts(t *testing.T) {
dir := t.TempDir()
src := `package fixture
// swagger:enum Lonely
type Lonely string
`
if err := os.WriteFile(filepath.Join(dir, "lonely.go"), []byte(src), 0o644); err != nil {
t.Fatal(err)
}
_, err := ScanSwaggerEnumTypes([]string{dir})
if err == nil {
t.Fatal("expected error for annotation without consts")
}
if !strings.Contains(err.Error(), "Lonely") {
t.Fatalf("error %q should mention Lonely", err.Error())
}
}
func TestScanSwaggerEnumTypes_constsAndTypeInDifferentFiles(t *testing.T) {
dir := t.TempDir()
// Name ordering: `a_consts.go` < `b_type.go`, so readdir returns consts first.
// Old single-pass scanner would miss the values; two-pass must not.
constsSrc := `package fixture
const (
HueA Hue = "a"
HueB Hue = "b"
)
`
typeSrc := `package fixture
// swagger:enum Hue
type Hue string
`
if err := os.WriteFile(filepath.Join(dir, "a_consts.go"), []byte(constsSrc), 0o644); err != nil {
t.Fatal(err)
}
if err := os.WriteFile(filepath.Join(dir, "b_type.go"), []byte(typeSrc), 0o644); err != nil {
t.Fatal(err)
}
got, err := ScanSwaggerEnumTypes([]string{dir})
if err != nil {
t.Fatalf("ScanSwaggerEnumTypes: %v", err)
}
wantKey := EnumKey([]any{"a", "b"})
if got[wantKey] != "Hue" {
t.Fatalf("map[%q] = %q, want %q", wantKey, got[wantKey], "Hue")
}
}
func TestScanSwaggerEnumTypes_constsBeforeType(t *testing.T) {
dir := t.TempDir()
src := `package fixture
const (
ShadeDark Shade = "dark"
ShadeLight Shade = "light"
)
// swagger:enum Shade
type Shade string
`
if err := os.WriteFile(filepath.Join(dir, "shade.go"), []byte(src), 0o644); err != nil {
t.Fatal(err)
}
got, err := ScanSwaggerEnumTypes([]string{dir})
if err != nil {
t.Fatalf("ScanSwaggerEnumTypes: %v", err)
}
wantKey := EnumKey([]any{"dark", "light"})
if got[wantKey] != "Shade" {
t.Fatalf("map[%q] = %q, want %q", wantKey, got[wantKey], "Shade")
}
}
func TestScanSwaggerEnumTypes_groupedTypeDecl(t *testing.T) {
dir := t.TempDir()
src := `package fixture
type (
// swagger:enum Color
Color string
// swagger:enum Shade
Shade string
)
const (
ColorRed Color = "red"
ColorBlue Color = "blue"
)
const (
ShadeDark Shade = "dark"
ShadeLight Shade = "light"
)
`
if err := os.WriteFile(filepath.Join(dir, "grouped.go"), []byte(src), 0o644); err != nil {
t.Fatal(err)
}
got, err := ScanSwaggerEnumTypes([]string{dir})
if err != nil {
t.Fatalf("ScanSwaggerEnumTypes: %v", err)
}
colorKey := EnumKey([]any{"red", "blue"})
shadeKey := EnumKey([]any{"dark", "light"})
if got[colorKey] != "Color" {
t.Fatalf("Color: map[%q] = %q, want %q", colorKey, got[colorKey], "Color")
}
if got[shadeKey] != "Shade" {
t.Fatalf("Shade: map[%q] = %q, want %q", shadeKey, got[shadeKey], "Shade")
}
}

View File

@ -203,8 +203,8 @@ func runDump(ctx context.Context, cmd *cli.Command) error {
}
}()
targetDBType := cmd.String("database")
if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() {
targetDBType := setting.DatabaseType(cmd.String("database"))
if targetDBType != "" && targetDBType != setting.Database.Type {
log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType)
} else {
log.Info("Dumping database...")

View File

@ -134,7 +134,7 @@ func PrepareConsoleLoggerLevel(defaultLevel log.Level) func(context.Context, *cl
if globalBool(c, "debug") || globalBool(c, "verbose") {
level = log.TRACE
}
log.SetConsoleLogger(log.DEFAULT, "console-default", level)
log.SetupStderrLogger(log.DEFAULT, "console-stderr", level)
return ctx, nil
}
}

View File

@ -1169,6 +1169,11 @@ LEVEL = Info
;; Retarget child pull requests to the parent pull request branch target on merge of parent pull request. It only works on merged PRs where the head and base branch target the same repo.
;RETARGET_CHILDREN_ON_MERGE = true
;;
;; Default source for the pull request title when opening a new PR.
;; "first-commit" uses the oldest commit's summary.
;; "auto" uses commit's summary if the PR only has one commit, normalizes the branch name if multiple commits.
;DEFAULT_TITLE_SOURCE = first-commit
;;
;; Delay mergeable check until page view or API access, for pull requests that have not been updated in the specified days when their base branches get updated.
;; Use "-1" to always check all pull requests (old behavior). Use "0" to always delay the checks.
;DELAY_CHECK_FOR_INACTIVE_DAYS = 7
@ -1519,7 +1524,7 @@ LEVEL = Info
;; Issue Indexer settings
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; Issue indexer type, currently support: bleve, db, elasticsearch or meilisearch default is bleve
;; Issue indexer type, currently support: bleve, db, elasticsearch (also compatible with OpenSearch) or meilisearch default is bleve
;ISSUE_INDEXER_TYPE = bleve
;;
;; Issue indexer storage path, available when ISSUE_INDEXER_TYPE is bleve
@ -1546,7 +1551,7 @@ LEVEL = Info
;; If empty then it defaults to `sources` only, as if you'd like to disable fully please see REPO_INDEXER_ENABLED.
;REPO_INDEXER_REPO_TYPES = sources,forks,mirrors,templates
;;
;; Code search engine type, could be `bleve` or `elasticsearch`.
;; Code search engine type, could be `bleve` or `elasticsearch` (also compatible with OpenSearch).
;REPO_INDEXER_TYPE = bleve
;;
;; Index file used for code search. available when `REPO_INDEXER_TYPE` is bleve

11
go.mod
View File

@ -46,6 +46,7 @@ require (
github.com/ethantkoenig/rupture v1.0.1
github.com/felixge/fgprof v0.9.5
github.com/fsnotify/fsnotify v1.9.0
github.com/getkin/kin-openapi v0.134.0
github.com/gliderlabs/ssh v0.3.8
github.com/go-chi/chi/v5 v5.2.5
github.com/go-chi/cors v1.2.2
@ -86,7 +87,6 @@ require (
github.com/msteinert/pam/v2 v2.1.0
github.com/nektos/act v0.2.63
github.com/niklasfasching/go-org v1.9.1
github.com/olivere/elastic/v7 v7.0.32
github.com/opencontainers/go-digest v1.0.0
github.com/opencontainers/image-spec v1.1.1
github.com/pquerna/otp v1.5.0
@ -192,6 +192,8 @@ require (
github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-ini/ini v1.67.0 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
github.com/go-webauthn/x v0.2.3 // indirect
github.com/goccy/go-json v0.10.6 // indirect
@ -219,7 +221,7 @@ require (
github.com/klauspost/crc32 v1.3.0 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/libdns/libdns v1.1.1 // indirect
github.com/mailru/easyjson v0.9.2 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/markbates/going v1.0.3 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-runewidth v0.0.21 // indirect
@ -232,15 +234,19 @@ require (
github.com/minio/minlz v1.1.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 // indirect
github.com/mschoch/smat v0.2.0 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/nwaples/rardecode/v2 v2.2.2 // indirect
github.com/oasdiff/yaml v0.0.0-20260313112342-a3ea61cb4d4c // indirect
github.com/oasdiff/yaml3 v0.0.0-20260224194419-61cd415a242b // indirect
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 // indirect
github.com/olekukonko/errors v1.2.0 // indirect
github.com/olekukonko/ll v0.1.8 // indirect
github.com/olekukonko/tablewriter v1.1.4 // indirect
github.com/onsi/ginkgo v1.16.5 // indirect
github.com/perimeterx/marshmallow v1.1.5 // indirect
github.com/philhofer/fwd v1.2.0 // indirect
github.com/pierrec/lz4/v4 v4.1.26 // indirect
github.com/pjbgf/sha1cd v0.5.0 // indirect
@ -260,6 +266,7 @@ require (
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
github.com/tinylib/msgp v1.6.4 // indirect
github.com/unknwon/com v1.0.1 // indirect
github.com/woodsbury/decimal128 v1.3.0 // indirect
github.com/x448/float16 v0.8.4 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect

28
go.sum
View File

@ -267,14 +267,14 @@ github.com/fatih/color v1.19.0 h1:Zp3PiM21/9Ld6FzSKyL5c/BULoe/ONr9KlbYVOfG8+w=
github.com/fatih/color v1.19.0/go.mod h1:zNk67I0ZUT1bEGsSGyCZYZNrHuTkJJB+r6Q9VuMi0LE=
github.com/felixge/fgprof v0.9.5 h1:8+vR6yu2vvSKn08urWyEuxx75NWPEvybbkBirEpsbVY=
github.com/felixge/fgprof v0.9.5/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM=
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/fxamacker/cbor/v2 v2.9.1 h1:2rWm8B193Ll4VdjsJY28jxs70IdDsHRWgQYAI80+rMQ=
github.com/fxamacker/cbor/v2 v2.9.1/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ=
github.com/getkin/kin-openapi v0.134.0 h1:/L5+1+kfe6dXh8Ot/wqiTgUkjOIEJiC0bbYVziHB8rU=
github.com/getkin/kin-openapi v0.134.0/go.mod h1:wK6ZLG/VgoETO9pcLJ/VmAtIcl/DNlMayNTb716EUxE=
github.com/git-lfs/pktline v0.0.0-20230103162542-ca444d533ef1 h1:mtDjlmloH7ytdblogrMz1/8Hqua1y8B4ID+bh3rvod0=
github.com/git-lfs/pktline v0.0.0-20230103162542-ca444d533ef1/go.mod h1:fenKRzpXDjNpsIBhuhUzvjCKlDjKam0boRAenTE0Q6A=
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
@ -310,6 +310,10 @@ github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZR
github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08=
github.com/go-ldap/ldap/v3 v3.4.13 h1:+x1nG9h+MZN7h/lUi5Q3UZ0fJ1GyDQYbPvbuH38baDQ=
github.com/go-ldap/ldap/v3 v3.4.13/go.mod h1:LxsGZV6vbaK0sIvYfsv47rfh4ca0JXokCoKjZxsszv0=
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-redis/redis/v7 v7.4.1 h1:PASvf36gyUpr2zdOUS/9Zqc80GbM+9BDyiJSJDDOrTI=
@ -501,9 +505,8 @@ github.com/lib/pq v1.12.3/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA=
github.com/libdns/libdns v1.1.1 h1:wPrHrXILoSHKWJKGd0EiAVmiJbFShguILTg9leS/P/U=
github.com/libdns/libdns v1.1.1/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mailru/easyjson v0.9.2 h1:dX8U45hQsZpxd80nLvDGihsQ/OxlvTkVUXH2r/8cb2M=
github.com/mailru/easyjson v0.9.2/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU=
github.com/markbates/going v1.0.3 h1:mY45T5TvW+Xz5A6jY7lf4+NLg9D8+iuStIHyR7M8qsE=
github.com/markbates/going v1.0.3/go.mod h1:fQiT6v6yQar9UD6bd/D4Z5Afbk9J6BBVBtLiyY4gp2o=
github.com/markbates/goth v1.82.0 h1:8j/c34AjBSTNzO7zTsOyP5IYCQCMBTRBHAbBt/PI0bQ=
@ -547,6 +550,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8=
github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 h1:j2kD3MT1z4PXCiUllUJF9mWUESr9TWKS7iEKsQ/IipM=
github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450/go.mod h1:skjdDftzkFALcuGzYSklqYd8gvat6F1gZJ4YPVbkZpM=
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
@ -565,6 +570,10 @@ github.com/nwaples/rardecode/v2 v2.2.2/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsR
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
github.com/oasdiff/yaml v0.0.0-20260313112342-a3ea61cb4d4c h1:7ACFcSaQsrWtrH4WHHfUqE1C+f8r2uv8KGaW0jTNjus=
github.com/oasdiff/yaml v0.0.0-20260313112342-a3ea61cb4d4c/go.mod h1:JKox4Gszkxt57kj27u7rvi7IFoIULvCZHUsBTUmQM/s=
github.com/oasdiff/yaml3 v0.0.0-20260224194419-61cd415a242b h1:vivRhVUAa9t1q0Db4ZmezBP8pWQWnXHFokZj0AOea2g=
github.com/oasdiff/yaml3 v0.0.0-20260224194419-61cd415a242b/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o=
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 h1:zrbMGy9YXpIeTnGj4EljqMiZsIcE09mmF8XsD5AYOJc=
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6/go.mod h1:rEKTHC9roVVicUIfZK7DYrdIoM0EOr8mK1Hj5s3JjH0=
github.com/olekukonko/errors v1.2.0 h1:10Zcn4GeV59t/EGqJc8fUjtFT/FuUh5bTMzZ1XwmCRo=
@ -573,8 +582,6 @@ github.com/olekukonko/ll v0.1.8 h1:ysHCJRGHYKzmBSdz9w5AySztx7lG8SQY+naTGYUbsz8=
github.com/olekukonko/ll v0.1.8/go.mod h1:RPRC6UcscfFZgjo1nulkfMH5IM0QAYim0LfnMvUuozw=
github.com/olekukonko/tablewriter v1.1.4 h1:ORUMI3dXbMnRlRggJX3+q7OzQFDdvgbN9nVWj1drm6I=
github.com/olekukonko/tablewriter v1.1.4/go.mod h1:+kedxuyTtgoZLwif3P1Em4hARJs+mVnzKxmsCL/C5RY=
github.com/olivere/elastic/v7 v7.0.32 h1:R7CXvbu8Eq+WlsLgxmKVKPox0oOwAE/2T9Si5BnvK6E=
github.com/olivere/elastic/v7 v7.0.32/go.mod h1:c7PVmLe3Fxq77PIfY/bZmxY/TAamBhCzZ8xDOE09a9k=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
@ -591,6 +598,8 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s=
github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw=
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
@ -653,9 +662,8 @@ github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w
github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g=
github.com/skeema/knownhosts v1.3.2 h1:EDL9mgf4NzwMXCTfaxSD/o/a5fxDw/xL9nkU28JjdBg=
github.com/skeema/knownhosts v1.3.2/go.mod h1:bEg3iQAuw+jyiw+484wwFJoKSLwcfd7fqRy+N0QTiow=
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304 h1:Jpy1PXuP99tXNrhbq2BaPz9B+jNAvH1JPQQpG/9GCXY=
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfItdAd1r3cck=
github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8=
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
@ -700,6 +708,8 @@ github.com/tinylib/msgp v1.6.4/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77ro
github.com/tstranex/u2f v1.0.0 h1:HhJkSzDDlVSVIVt7pDJwCHQj67k7A5EeBgPmeD+pVsQ=
github.com/tstranex/u2f v1.0.0/go.mod h1:eahSLaqAS0zsIEv80+vXT7WanXs7MQQDg3j3wGBSayo=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0=
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY=
github.com/ulikunitz/xz v0.5.15/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
@ -712,6 +722,8 @@ github.com/urfave/cli/v3 v3.4.1/go.mod h1:FJSKtM/9AiiTOJL4fJ6TbMUkxBXn7GO9guZqoZ
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/wneessen/go-mail v0.7.2 h1:xxPnhZ6IZLSgxShebmZ6DPKh1b6OJcoHfzy7UjOkzS8=
github.com/wneessen/go-mail v0.7.2/go.mod h1:+TkW6QP3EVkgTEqHtVmnAE/1MRhmzb8Y9/W3pweuS+k=
github.com/woodsbury/decimal128 v1.3.0 h1:8pffMNWIlC0O5vbyHWFZAt5yWvWcrHA+3ovIIjVWss0=
github.com/woodsbury/decimal128 v1.3.0/go.mod h1:C5UTmyTjW3JftjUFzOVhC20BEQa2a4ZKOB5I6Zjb+ds=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=

View File

@ -71,11 +71,11 @@ func init() {
db.RegisterModel(new(ActionRunIndex))
}
func (run *ActionRun) HTMLURL() string {
func (run *ActionRun) HTMLURL(ctxOpt ...context.Context) string {
if run.Repo == nil {
return ""
}
return fmt.Sprintf("%s/actions/runs/%d", run.Repo.HTMLURL(), run.ID)
return fmt.Sprintf("%s/actions/runs/%d", run.Repo.HTMLURL(ctxOpt...), run.ID)
}
func (run *ActionRun) Link() string {
@ -120,11 +120,7 @@ func (run *ActionRun) RefTooltip() string {
}
// LoadAttributes load Repo TriggerUser if not loaded
func (run *ActionRun) LoadAttributes(ctx context.Context) (err error) {
if run == nil {
return nil
}
func (run *ActionRun) LoadAttributes(ctx context.Context) error {
if err := run.LoadRepo(ctx); err != nil {
return err
}
@ -133,18 +129,19 @@ func (run *ActionRun) LoadAttributes(ctx context.Context) (err error) {
return err
}
if run.TriggerUser == nil {
run.TriggerUserID, run.TriggerUser, err = user_model.GetPossibleUserByID(ctx, run.TriggerUserID)
if err != nil {
return err
}
}
return run.LoadTriggerUser(ctx)
}
return nil
func (run *ActionRun) LoadTriggerUser(ctx context.Context) (err error) {
if run.TriggerUser != nil {
return nil
}
run.TriggerUserID, run.TriggerUser, err = user_model.GetPossibleUserByID(ctx, run.TriggerUserID)
return err
}
func (run *ActionRun) LoadRepo(ctx context.Context) error {
if run == nil || run.Repo != nil {
if run.Repo != nil {
return nil
}

View File

@ -51,10 +51,6 @@ func (attempt *ActionRunAttempt) Duration() time.Duration {
}
func (attempt *ActionRunAttempt) LoadAttributes(ctx context.Context) (err error) {
if attempt == nil {
return nil
}
if attempt.Run == nil {
run, err := GetRunByRepoAndID(ctx, attempt.RepoID, attempt.RunID)
if err != nil {

View File

@ -120,10 +120,6 @@ func (job *ActionRunJob) LoadRepo(ctx context.Context) error {
// LoadAttributes load Run if not loaded
func (job *ActionRunJob) LoadAttributes(ctx context.Context) error {
if job == nil {
return nil
}
if err := job.LoadRun(ctx); err != nil {
return err
}

View File

@ -56,8 +56,10 @@ func (jobs ActionJobList) LoadRuns(ctx context.Context, withRepo bool) error {
return err
}
for _, j := range jobs {
if j.RunID > 0 && j.Run == nil {
if j.Run == nil {
j.Run = runs[j.RunID]
}
if j.Run != nil {
j.Run.Repo = j.Repo
}
}

View File

@ -7,6 +7,7 @@ import (
"context"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/translation"
@ -17,27 +18,39 @@ import (
type RunList []*ActionRun
// GetUserIDs returns a slice of user's id
func (runs RunList) GetUserIDs() []int64 {
return container.FilterSlice(runs, func(run *ActionRun) (int64, bool) {
return run.TriggerUserID, true
})
}
func (runs RunList) LoadTriggerUser(ctx context.Context) error {
userIDs := runs.GetUserIDs()
userIDs := container.FilterSlice(runs, func(run *ActionRun) (int64, bool) {
return run.TriggerUserID, run.TriggerUser == nil
})
users := make(map[int64]*user_model.User, len(userIDs))
if err := db.GetEngine(ctx).In("id", userIDs).Find(&users); err != nil {
return err
}
for _, run := range runs {
if run.TriggerUserID == user_model.ActionsUserID {
run.TriggerUser = user_model.NewActionsUser()
} else {
run.TriggerUser = users[run.TriggerUserID]
if run.TriggerUser == nil {
run.TriggerUser = user_model.NewGhostUser()
}
if run.TriggerUser != nil {
continue
}
run.TriggerUser = users[run.TriggerUserID]
if run.TriggerUserID < 0 {
run.TriggerUserID, run.TriggerUser, _ = user_model.GetPossibleUserByID(ctx, run.TriggerUserID)
} else if run.TriggerUser == nil {
run.TriggerUserID, run.TriggerUser, _ = user_model.GetPossibleUserByID(ctx, user_model.GhostUserID)
}
}
return nil
}
func (runs RunList) LoadRepos(ctx context.Context) error {
repoIDs := container.FilterSlice(runs, func(run *ActionRun) (int64, bool) {
return run.RepoID, run.Repo == nil
})
repos, err := repo_model.GetRepositoriesMapByIDs(ctx, repoIDs)
if err != nil {
return err
}
for _, run := range runs {
if run.Repo == nil {
run.Repo = repos[run.RepoID]
}
}
return nil

View File

@ -125,9 +125,6 @@ func (task *ActionTask) LoadJob(ctx context.Context) error {
// LoadAttributes load Job Steps if not loaded
func (task *ActionTask) LoadAttributes(ctx context.Context) error {
if task == nil {
return nil
}
if err := task.LoadJob(ctx); err != nil {
return err
}

173
models/db/conn.go Normal file
View File

@ -0,0 +1,173 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package db
import (
"errors"
"fmt"
"net"
"net/url"
"os"
"path/filepath"
"strings"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
type ConnOptions struct {
Type setting.DatabaseType
Host string
Database string
User string
Passwd string
Schema string
SSLMode string
SQLitePath string
SQLiteBusyTimeout int
SQLiteJournalMode string
}
type SQLiteConnStrOptions struct {
FilePath string
BusyTimeout int
JournalMode string
}
func GlobalConnOptions() ConnOptions {
return ConnOptions{
Type: setting.Database.Type,
Host: setting.Database.Host,
Database: setting.Database.Name,
User: setting.Database.User,
Passwd: setting.Database.Passwd,
Schema: setting.Database.Schema,
SSLMode: setting.Database.SSLMode,
SQLitePath: setting.Database.Path,
SQLiteBusyTimeout: setting.Database.SQLiteBusyTimeout,
SQLiteJournalMode: setting.Database.SQLiteJournalMode,
}
}
const sqlDriverPostgresSchema = "postgresschema"
var makeSQLiteConnStr = func(opts SQLiteConnStrOptions) (string, string, error) {
return "", "", errors.New(`this Gitea binary was not built with SQLite3 support, get an official release or rebuild with: -tags sqlite,sqlite_unlock_notify`)
}
func ConnStrDefaultDatabase(opts ConnOptions) (string, string, error) {
opts.Database, opts.Schema = "", ""
return ConnStr(opts)
}
func ConnStr(opts ConnOptions) (string, string, error) {
switch {
case opts.Type.IsMySQL():
// use unix socket or tcp socket
connType := util.Iif(strings.HasPrefix(opts.Host, "/"), "unix", "tcp")
// allow (Postgres-inspired) default value to work in MySQL
tls := util.Iif(opts.SSLMode == "disable", "false", opts.SSLMode)
// in case the database name is a partial connection string which contains "?" parameters
paramSep := util.Iif(strings.Contains(opts.Database, "?"), "&", "?")
connStr := fmt.Sprintf("%s:%s@%s(%s)/%s%sparseTime=true&tls=%s", opts.User, opts.Passwd, connType, opts.Host, opts.Database, paramSep, tls)
return "mysql", connStr, nil
case opts.Type.IsPostgreSQL():
connStr := makePgSQLConnStr(opts.Host, opts.User, opts.Passwd, opts.Database, opts.SSLMode)
driver := util.Iif(opts.Schema == "", "postgres", sqlDriverPostgresSchema)
registerPostgresSchemaDriver()
return driver, connStr, nil
case opts.Type.IsMSSQL():
host, port := parseMSSQLHostPort(opts.Host)
connStr := fmt.Sprintf("server=%s; port=%s; user id=%s; password=%s;", host, port, opts.User, opts.Passwd)
if opts.Database != "" {
connStr += "; database=" + opts.Database
}
return "mssql", connStr, nil
case opts.Type.IsSQLite3():
if opts.SQLitePath == "" {
return "", "", errors.New("sqlite3 database path cannot be empty")
}
if err := os.MkdirAll(filepath.Dir(opts.SQLitePath), os.ModePerm); err != nil {
return "", "", fmt.Errorf("failed to create directories: %w", err)
}
return makeSQLiteConnStr(SQLiteConnStrOptions{
FilePath: opts.SQLitePath,
JournalMode: opts.SQLiteJournalMode,
BusyTimeout: opts.SQLiteBusyTimeout,
})
}
return "", "", fmt.Errorf("unknown database type: %s", opts.Type)
}
// parsePgSQLHostPort parses given input in various forms defined in
// https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING
// and returns proper host and port number.
func parsePgSQLHostPort(info string) (host, port string) {
if h, p, err := net.SplitHostPort(info); err == nil {
host, port = h, p
} else {
// treat the "info" as "host", if it's an IPv6 address, remove the wrapper
host = info
if strings.HasPrefix(host, "[") && strings.HasSuffix(host, "]") {
host = host[1 : len(host)-1]
}
}
// set fallback values
if host == "" {
host = "127.0.0.1"
}
if port == "" {
port = "5432"
}
return host, port
}
func makePgSQLConnStr(dbHost, dbUser, dbPasswd, dbName, dbsslMode string) (connStr string) {
dbName, dbParam, _ := strings.Cut(dbName, "?")
host, port := parsePgSQLHostPort(dbHost)
connURL := url.URL{
Scheme: "postgres",
User: url.UserPassword(dbUser, dbPasswd),
Host: net.JoinHostPort(host, port),
Path: dbName,
OmitHost: false,
RawQuery: dbParam,
}
query := connURL.Query()
if strings.HasPrefix(host, "/") { // looks like a unix socket
query.Add("host", host)
connURL.Host = ":" + port
}
query.Set("sslmode", dbsslMode)
connURL.RawQuery = query.Encode()
return connURL.String()
}
// parseMSSQLHostPort splits the host into host and port
func parseMSSQLHostPort(info string) (string, string) {
// the default port "0" might be related to MSSQL's dynamic port, maybe it should be double-confirmed in the future
host, port := "127.0.0.1", "0"
if strings.Contains(info, ":") {
host = strings.Split(info, ":")[0]
port = strings.Split(info, ":")[1]
} else if strings.Contains(info, ",") {
host = strings.Split(info, ",")[0]
port = strings.TrimSpace(strings.Split(info, ",")[1])
} else if len(info) > 0 {
host = info
}
if host == "" {
host = "127.0.0.1"
}
if port == "" {
port = "0"
}
return host, port
}

View File

@ -1,7 +1,7 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package setting
package db
import (
"testing"
@ -9,7 +9,7 @@ import (
"github.com/stretchr/testify/assert"
)
func Test_parsePostgreSQLHostPort(t *testing.T) {
func TestParsePgSQLHostPort(t *testing.T) {
tests := map[string]struct {
HostPort string
Host string
@ -49,14 +49,14 @@ func Test_parsePostgreSQLHostPort(t *testing.T) {
for k, test := range tests {
t.Run(k, func(t *testing.T) {
t.Log(test.HostPort)
host, port := parsePostgreSQLHostPort(test.HostPort)
host, port := parsePgSQLHostPort(test.HostPort)
assert.Equal(t, test.Host, host)
assert.Equal(t, test.Port, port)
})
}
}
func Test_getPostgreSQLConnectionString(t *testing.T) {
func TestMakePgSQLConnStr(t *testing.T) {
tests := []struct {
Host string
User string
@ -103,7 +103,7 @@ func Test_getPostgreSQLConnectionString(t *testing.T) {
}
for _, test := range tests {
connStr := getPostgreSQLConnectionString(test.Host, test.User, test.Passwd, test.Name, test.SSLMode)
connStr := makePgSQLConnStr(test.Host, test.User, test.Passwd, test.Name, test.SSLMode)
assert.Equal(t, test.Output, connStr)
}
}

View File

@ -18,8 +18,8 @@ var registerOnce sync.Once
func registerPostgresSchemaDriver() {
registerOnce.Do(func() {
sql.Register("postgresschema", &postgresSchemaDriver{})
dialects.RegisterDriver("postgresschema", dialects.QueryDriver("postgres"))
sql.Register(sqlDriverPostgresSchema, &postgresSchemaDriver{})
dialects.RegisterDriver(sqlDriverPostgresSchema, dialects.QueryDriver("postgres"))
})
}

View File

@ -0,0 +1,34 @@
//go:build sqlite
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package db
import (
"fmt"
"strconv"
"strings"
"code.gitea.io/gitea/modules/setting"
_ "github.com/mattn/go-sqlite3"
)
func init() {
setting.SupportedDatabaseTypes = append(setting.SupportedDatabaseTypes, "sqlite3")
makeSQLiteConnStr = makeSQLiteConnStrMattnCGO
}
func makeSQLiteConnStrMattnCGO(opts SQLiteConnStrOptions) (string, string, error) {
var params []string
params = append(params, "cache=shared")
params = append(params, "mode=rwc")
params = append(params, "_busy_timeout="+strconv.Itoa(opts.BusyTimeout))
params = append(params, "_txlock=immediate")
if opts.JournalMode != "" {
params = append(params, "_journal_mode="+opts.JournalMode)
}
connStr := fmt.Sprintf("file:%s?%s", opts.FilePath, strings.Join(params, "&"))
return "sqlite3", connStr, nil
}

View File

@ -3,10 +3,14 @@
package db
import "xorm.io/xorm/schemas"
import (
"code.gitea.io/gitea/modules/setting"
"xorm.io/xorm/schemas"
)
// DumpDatabase dumps all data from database according the special database SQL syntax to file system.
func DumpDatabase(filePath, dbType string) error {
func DumpDatabase(filePath string, dbType setting.DatabaseType) error {
var tbs []*schemas.Table
for _, t := range registeredModels {
t, err := xormEngine.TableInfo(t)

View File

@ -6,7 +6,6 @@ package db
import (
"context"
"fmt"
"strings"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
@ -24,31 +23,23 @@ func init() {
// newXORMEngine returns a new XORM engine from the configuration
func newXORMEngine() (*xorm.Engine, error) {
connStr, err := setting.DBConnStr()
connOpts := GlobalConnOptions()
driver, connStr, err := ConnStr(connOpts)
if err != nil {
return nil, err
}
var engine *xorm.Engine
if setting.Database.Type.IsPostgreSQL() && len(setting.Database.Schema) > 0 {
// OK whilst we sort out our schema issues - create a schema aware postgres
registerPostgresSchemaDriver()
engine, err = xorm.NewEngine("postgresschema", connStr)
} else {
engine, err = xorm.NewEngine(setting.Database.Type.String(), connStr)
}
engine, err := xorm.NewEngine(driver, connStr)
if err != nil {
return nil, err
}
switch setting.Database.Type {
case "mysql":
switch {
case connOpts.Type.IsMySQL():
engine.Dialect().SetParams(map[string]string{"rowFormat": "DYNAMIC"})
case "mssql":
case connOpts.Type.IsMSSQL():
engine.Dialect().SetParams(map[string]string{"DEFAULT_VARCHAR": "nvarchar"})
}
engine.SetSchema(setting.Database.Schema)
engine.SetSchema(connOpts.Schema)
return engine, nil
}
@ -56,10 +47,7 @@ func newXORMEngine() (*xorm.Engine, error) {
func InitEngine(ctx context.Context) error {
xe, err := newXORMEngine()
if err != nil {
if strings.Contains(err.Error(), "SQLite3 support") {
return fmt.Errorf("sqlite3 requires: -tags sqlite,sqlite_unlock_notify\n%w", err)
}
return fmt.Errorf("failed to connect to database: %w", err)
return fmt.Errorf("failed to init database engine: %w", err)
}
xe.SetMapper(names.GonicMapper{})

View File

@ -30,7 +30,7 @@ func TestDumpDatabase(t *testing.T) {
assert.NoError(t, db.GetEngine(t.Context()).Sync(new(Version)))
for _, dbType := range setting.SupportedDatabaseTypes {
assert.NoError(t, db.DumpDatabase(filepath.Join(dir, dbType+".sql"), dbType))
assert.NoError(t, db.DumpDatabase(filepath.Join(dir, dbType+".sql"), setting.DatabaseType(dbType)))
}
}

View File

@ -59,17 +59,18 @@ type Issue struct {
PosterID int64 `xorm:"INDEX"`
Poster *user_model.User `xorm:"-"`
OriginalAuthor string
OriginalAuthorID int64 `xorm:"index"`
Title string `xorm:"name"`
Content string `xorm:"LONGTEXT"`
RenderedContent template.HTML `xorm:"-"`
ContentVersion int `xorm:"NOT NULL DEFAULT 0"`
Labels []*Label `xorm:"-"`
isLabelsLoaded bool `xorm:"-"`
MilestoneID int64 `xorm:"INDEX"`
Milestone *Milestone `xorm:"-"`
isMilestoneLoaded bool `xorm:"-"`
Project *project_model.Project `xorm:"-"`
OriginalAuthorID int64 `xorm:"index"`
Title string `xorm:"name"`
Content string `xorm:"LONGTEXT"`
RenderedContent template.HTML `xorm:"-"`
ContentVersion int `xorm:"NOT NULL DEFAULT 0"`
Labels []*Label `xorm:"-"`
isLabelsLoaded bool `xorm:"-"`
MilestoneID int64 `xorm:"INDEX"`
Milestone *Milestone `xorm:"-"`
isMilestoneLoaded bool `xorm:"-"`
Projects []*project_model.Project `xorm:"-"`
isProjectsLoaded bool `xorm:"-"`
Priority int
AssigneeID int64 `xorm:"-"`
Assignee *user_model.User `xorm:"-"`
@ -305,7 +306,7 @@ func (issue *Issue) LoadAttributes(ctx context.Context) (err error) {
return err
}
if err = issue.LoadProject(ctx); err != nil {
if err = issue.LoadProjects(ctx); err != nil {
return err
}
@ -355,6 +356,7 @@ func (issue *Issue) ResetAttributesLoaded() {
issue.isMilestoneLoaded = false
issue.isAttachmentsLoaded = false
issue.isAssigneeLoaded = false
issue.isProjectsLoaded = false
}
// GetIsRead load the `IsRead` field of the issue

View File

@ -185,7 +185,7 @@ func (issues IssueList) LoadMilestones(ctx context.Context) error {
func (issues IssueList) LoadProjects(ctx context.Context) error {
issueIDs := issues.getIssueIDs()
projectMaps := make(map[int64]*project_model.Project, len(issues))
issueProjectMaps := make(map[int64][]*project_model.Project, len(issues))
left := len(issueIDs)
type projectWithIssueID struct {
@ -202,19 +202,21 @@ func (issues IssueList) LoadProjects(ctx context.Context) error {
Select("project.*, project_issue.issue_id").
Join("INNER", "project_issue", "project.id = project_issue.project_id").
In("project_issue.issue_id", issueIDs[:limit]).
OrderBy("project_issue.issue_id ASC, project.id ASC").
Find(&projects)
if err != nil {
return err
}
for _, project := range projects {
projectMaps[project.IssueID] = project.Project
issueProjectMaps[project.IssueID] = append(issueProjectMaps[project.IssueID], project.Project)
}
left -= limit
issueIDs = issueIDs[limit:]
}
for _, issue := range issues {
issue.Project = projectMaps[issue.ID]
issue.Projects = issueProjectMaps[issue.ID]
issue.isProjectsLoaded = true
}
return nil
}

View File

@ -65,10 +65,10 @@ func TestIssueList_LoadAttributes(t *testing.T) {
}
if issue.ID == int64(1) {
assert.Equal(t, int64(400), issue.TotalTrackedTime)
assert.NotNil(t, issue.Project)
assert.Equal(t, int64(1), issue.Project.ID)
assert.NotEmpty(t, issue.Projects)
assert.Equal(t, int64(1), issue.Projects[0].ID)
} else {
assert.Nil(t, issue.Project)
assert.Empty(t, issue.Projects)
}
}
}

View File

@ -12,41 +12,38 @@ import (
"code.gitea.io/gitea/modules/util"
)
// LoadProject load the project the issue was assigned to
func (issue *Issue) LoadProject(ctx context.Context) (err error) {
if issue.Project == nil {
var p project_model.Project
has, err := db.GetEngine(ctx).Table("project").
// LoadProjects loads all projects the issue is assigned to
func (issue *Issue) LoadProjects(ctx context.Context) (err error) {
if !issue.isProjectsLoaded {
err = db.GetEngine(ctx).Table("project").
Join("INNER", "project_issue", "project.id=project_issue.project_id").
Where("project_issue.issue_id = ?", issue.ID).Get(&p)
if err != nil {
return err
} else if has {
issue.Project = &p
Where("project_issue.issue_id = ?", issue.ID).
OrderBy("project.id ASC").
Find(&issue.Projects)
if err == nil {
issue.isProjectsLoaded = true
}
}
return err
}
func (issue *Issue) projectID(ctx context.Context) int64 {
var ip project_model.ProjectIssue
has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip)
if err != nil || !has {
return 0
}
return ip.ProjectID
func (issue *Issue) projectIDs(ctx context.Context) (projectIDs []int64, _ error) {
err := db.GetEngine(ctx).Table("project_issue").Where("issue_id = ?", issue.ID).Cols("project_id").Find(&projectIDs)
return projectIDs, err
}
// ProjectColumnID return project column id if issue was assigned to one
func (issue *Issue) ProjectColumnID(ctx context.Context) (int64, error) {
var ip project_model.ProjectIssue
has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip)
if err != nil {
return 0, err
} else if !has {
return 0, nil
// ProjectColumnMap returns a map of project ID to column ID for this issue.
func (issue *Issue) ProjectColumnMap(ctx context.Context) (map[int64]int64, error) {
var projIssues []project_model.ProjectIssue
if err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Find(&projIssues); err != nil {
return nil, err
}
return ip.ProjectColumnID, nil
result := make(map[int64]int64, len(projIssues))
for _, projIssue := range projIssues {
result[projIssue.ProjectID] = projIssue.ProjectColumnID
}
return result, nil
}
func LoadProjectIssueColumnMap(ctx context.Context, projectID, defaultColumnID int64) (map[int64]int64, error) {
@ -64,66 +61,91 @@ func LoadProjectIssueColumnMap(ctx context.Context, projectID, defaultColumnID i
return result, nil
}
// IssueAssignOrRemoveProject changes the project associated with an issue
// If newProjectID is 0, the issue is removed from the project
func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID, newColumnID int64) error {
// IssueAssignOrRemoveProject updates the projects associated with an issue.
// It adds projects that are in newProjectIDs but not currently assigned,
// and removes projects that are currently assigned but not in newProjectIDs.
// If newProjectIDs is empty, all projects are removed from the issue.
// When adding an issue to a project, it is placed in the project's default column.
func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectIDs []int64) error {
return db.WithTx(ctx, func(ctx context.Context) error {
oldProjectID := issue.projectID(ctx)
if err := issue.LoadRepo(ctx); err != nil {
return err
}
// Only check if we add a new project and not remove it.
if newProjectID > 0 {
newProject, err := project_model.GetProjectByID(ctx, newProjectID)
if err != nil {
return err
}
if !newProject.CanBeAccessedByOwnerRepo(issue.Repo.OwnerID, issue.Repo) {
return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID)
}
if newColumnID == 0 {
newDefaultColumn, err := newProject.MustDefaultColumn(ctx)
if err != nil {
return err
}
newColumnID = newDefaultColumn.ID
}
}
if _, err := db.GetEngine(ctx).Where("project_issue.issue_id=?", issue.ID).Delete(&project_model.ProjectIssue{}); err != nil {
return err
}
if oldProjectID > 0 || newProjectID > 0 {
if _, err := CreateComment(ctx, &CreateCommentOptions{
Type: CommentTypeProject,
Doer: doer,
Repo: issue.Repo,
Issue: issue,
OldProjectID: oldProjectID,
ProjectID: newProjectID,
}); err != nil {
return err
}
}
if newProjectID == 0 {
return nil
}
if newColumnID == 0 {
panic("newColumnID must not be zero") // shouldn't happen
}
newSorting, err := project_model.GetColumnIssueNextSorting(ctx, newProjectID, newColumnID)
oldProjectIDs, err := issue.projectIDs(ctx)
if err != nil {
return err
}
return db.Insert(ctx, &project_model.ProjectIssue{
IssueID: issue.ID,
ProjectID: newProjectID,
ProjectColumnID: newColumnID,
Sorting: newSorting,
})
projectsToAdd, projectsToRemove := util.DiffSlice(oldProjectIDs, newProjectIDs)
issue.isProjectsLoaded = false
issue.Projects = nil
if len(projectsToRemove) > 0 {
if _, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).In("project_id", projectsToRemove).Delete(&project_model.ProjectIssue{}); err != nil {
return err
}
for _, projectID := range projectsToRemove {
if _, err := CreateComment(ctx, &CreateCommentOptions{
Type: CommentTypeProject,
Doer: doer,
Repo: issue.Repo,
Issue: issue,
OldProjectID: projectID,
ProjectID: 0,
}); err != nil {
return err
}
}
}
if len(projectsToAdd) > 0 {
projectMap, err := project_model.GetProjectsMapByIDs(ctx, projectsToAdd)
if err != nil {
return err
}
for _, projectID := range projectsToAdd {
newProject, ok := projectMap[projectID]
if !ok {
return util.NewNotExistErrorf("project %d not found", projectID)
}
if !newProject.CanBeAccessedByOwnerRepo(issue.Repo.OwnerID, issue.Repo) {
return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID)
}
defaultColumn, err := newProject.MustDefaultColumn(ctx)
if err != nil {
return err
}
newSorting, err := project_model.GetColumnIssueNextSorting(ctx, projectID, defaultColumn.ID)
if err != nil {
return err
}
err = db.Insert(ctx, &project_model.ProjectIssue{
IssueID: issue.ID,
ProjectID: projectID,
ProjectColumnID: defaultColumn.ID,
Sorting: newSorting,
})
if err != nil {
return err
}
if _, err := CreateComment(ctx, &CreateCommentOptions{
Type: CommentTypeProject,
Doer: doer,
Repo: issue.Repo,
Issue: issue,
OldProjectID: 0,
ProjectID: projectID,
}); err != nil {
return err
}
}
}
return nil
})
}

View File

@ -0,0 +1,149 @@
// Copyright 2026 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package issues_test
import (
"fmt"
"testing"
issues_model "code.gitea.io/gitea/models/issues"
project_model "code.gitea.io/gitea/models/project"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestIssueMultipleProjects(t *testing.T) {
require.NoError(t, unittest.PrepareTestDatabase())
t.Run("GeneralTest", func(t *testing.T) {
// Get test data
issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
project1 := unittest.AssertExistsAndLoadBean(t, &project_model.Project{ID: 1})
// Create a second project for the same repository
project2 := &project_model.Project{
Title: "Test Project 2",
RepoID: issue1.RepoID,
Type: project_model.TypeRepository,
TemplateType: project_model.TemplateTypeBasicKanban,
}
require.NoError(t, project_model.NewProject(t.Context(), project2))
defer func() {
_ = project_model.DeleteProjectByID(t.Context(), project2.ID)
}()
err := issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{})
require.NoError(t, err)
err = issue1.LoadProjects(t.Context())
require.NoError(t, err)
require.Empty(t, issue1.Projects)
// assign issue to both projects (each project uses its own default column)
err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{project1.ID})
require.NoError(t, err)
assert.Nilf(t, issue1.Projects, "Issue's Projects should be nil after IssueAssignOrRemoveProject to ensure it reloads fresh data")
err = issue1.LoadProjects(t.Context())
require.NoError(t, err)
require.Len(t, issue1.Projects, 1)
err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{project1.ID, project2.ID})
require.NoError(t, err)
assert.Nilf(t, issue1.Projects, "Issue's Projects should be nil after IssueAssignOrRemoveProject to ensure it reloads fresh data")
err = issue1.LoadProjects(t.Context())
require.NoError(t, err)
require.Len(t, issue1.Projects, 2)
assert.ElementsMatch(t, []int64{project1.ID, project2.ID}, []int64{issue1.Projects[0].ID, issue1.Projects[1].ID}, "Issue should be in both projects")
// test issue's project column map
projectColumnMap, err := issue1.ProjectColumnMap(t.Context())
p1Col, _ := project1.MustDefaultColumn(t.Context())
p2Col, _ := project2.MustDefaultColumn(t.Context())
require.NoError(t, err)
assert.Equal(t, p1Col.ID, projectColumnMap[project1.ID])
assert.Equal(t, p2Col.ID, projectColumnMap[project2.ID])
// only keep project2
err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{project2.ID})
require.NoError(t, err)
err = issue1.LoadProjects(t.Context())
require.NoError(t, err)
require.Len(t, issue1.Projects, 1)
assert.Equal(t, project2.ID, issue1.Projects[0].ID)
// also test ResetAttributesLoaded
issue1.Projects = nil
issue1.ResetAttributesLoaded()
err = issue1.LoadProjects(t.Context())
require.NoError(t, err)
require.Len(t, issue1.Projects, 1)
assert.Equal(t, project2.ID, issue1.Projects[0].ID)
// remove issue's projects
err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{})
require.NoError(t, err)
err = issue1.LoadProjects(t.Context())
require.NoError(t, err)
require.Empty(t, issue1.Projects)
})
t.Run("QueryByMultipleProjectIDs", func(t *testing.T) {
// Get test data
issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
issue2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
// Create three projects
var projects []*project_model.Project
for i := 1; i <= 3; i++ {
project := &project_model.Project{
Title: fmt.Sprintf("Query Test Project %d", i),
RepoID: issue1.RepoID,
Type: project_model.TypeRepository,
TemplateType: project_model.TemplateTypeBasicKanban,
}
require.NoError(t, project_model.NewProject(t.Context(), project))
projects = append(projects, project)
defer func(id int64) {
_ = project_model.DeleteProjectByID(t.Context(), id)
}(project.ID)
}
// Assign issue1 to projects 1 and 2
err := issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{projects[0].ID, projects[1].ID})
require.NoError(t, err)
// Assign issue2 to project 3
err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue2, user2, []int64{projects[2].ID})
require.NoError(t, err)
// Query for issues in project 3 only (should find issue2)
issues, err := issues_model.Issues(t.Context(), &issues_model.IssuesOptions{
RepoIDs: []int64{issue1.RepoID},
ProjectIDs: []int64{projects[2].ID},
})
require.NoError(t, err)
assert.NotEmpty(t, issues, "Should find issues in project 3")
// Verify issue2 is in the results
foundIssue2 := false
for _, issue := range issues {
if issue.ID == issue2.ID {
foundIssue2 = true
break
}
}
assert.True(t, foundIssue2, "Issue 2 should be found when querying project 3")
// FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: no multiple project filter support yet. Search logic is wrong. It should use "AND" but not "OR".
// Clean up
err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{})
require.NoError(t, err)
err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue2, user2, []int64{})
require.NoError(t, err)
})
}

View File

@ -16,6 +16,7 @@ import (
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/util"
"xorm.io/builder"
"xorm.io/xorm"
@ -36,8 +37,7 @@ type IssuesOptions struct { //nolint:revive // export stutter
ReviewedID int64
SubscriberID int64
MilestoneIDs []int64
ProjectID int64
ProjectColumnID int64
ProjectIDs []int64
IsClosed optional.Option[bool]
IsPull optional.Option[bool]
LabelIDs []int64
@ -198,26 +198,19 @@ func applyMilestoneCondition(sess *xorm.Session, opts *IssuesOptions) {
}
func applyProjectCondition(sess *xorm.Session, opts *IssuesOptions) {
if opts.ProjectID > 0 { // specific project
sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id").
And("project_issue.project_id=?", opts.ProjectID)
} else if opts.ProjectID == db.NoConditionID { // show those that are in no project
sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue").And(builder.Neq{"project_id": 0})))
projectIDs := util.SliceRemoveAll(opts.ProjectIDs, 0)
if len(projectIDs) == 1 && projectIDs[0] == db.NoConditionID { // show those that are in no project
sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue")))
} else if len(projectIDs) == 1 && projectIDs[0] > 0 { // single specific project
sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id AND project_issue.project_id = ?", projectIDs[0])
} else if len(projectIDs) > 1 { // multiple projects
// FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: this logic is not right, it should use "AND" but not "OR"
sess.And(builder.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.In("project_id", projectIDs))))
}
// opts.ProjectID == 0 means all projects,
// empty projectIDs means all projects,
// do not need to apply any condition
}
func applyProjectColumnCondition(sess *xorm.Session, opts *IssuesOptions) {
// opts.ProjectColumnID == 0 means all project columns,
// do not need to apply any condition
if opts.ProjectColumnID > 0 {
sess.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.Eq{"project_board_id": opts.ProjectColumnID}))
} else if opts.ProjectColumnID == db.NoConditionID {
sess.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.Eq{"project_board_id": 0}))
}
}
func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) {
if len(opts.RepoIDs) == 1 {
opts.RepoCond = builder.Eq{"issue.repo_id": opts.RepoIDs[0]}
@ -276,8 +269,6 @@ func applyConditions(sess *xorm.Session, opts *IssuesOptions) {
applyProjectCondition(sess, opts)
applyProjectColumnCondition(sess, opts)
if opts.IsPull.Has() {
sess.And("issue.is_pull=?", opts.IsPull.Value())
}

View File

@ -424,10 +424,10 @@ func TestIssueLoadAttributes(t *testing.T) {
}
if issue.ID == int64(1) {
assert.Equal(t, int64(400), issue.TotalTrackedTime)
assert.NotNil(t, issue.Project)
assert.Equal(t, int64(1), issue.Project.ID)
assert.NotEmpty(t, issue.Projects)
assert.Equal(t, int64(1), issue.Projects[0].ID)
} else {
assert.Nil(t, issue.Project)
assert.Empty(t, issue.Projects)
}
}
}

View File

@ -71,38 +71,69 @@ func GetUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch
}
// CanMaintainerWriteToBranch check whether user is a maintainer and could write to the branch
func CanMaintainerWriteToBranch(ctx context.Context, p access_model.Permission, branch string, user *user_model.User) bool {
if p.CanWrite(unit.TypeCode) {
return true
func CanMaintainerWriteToBranch(ctx context.Context, headPerm access_model.Permission, headBranch string, doer *user_model.User) bool {
can, err := canMaintainerWriteToBranch(ctx, headPerm, headBranch, doer)
if err != nil {
log.Error("CanMaintainerWriteToBranch: %v", err)
return false
}
return can
}
func canMaintainerWriteToBranch(ctx context.Context, headPerm access_model.Permission, headBranch string, doer *user_model.User) (bool, error) {
if headPerm.CanWrite(unit.TypeCode) {
return true, nil
}
// the code below depends on units to get the repository ID, not ideal but just keep it for now
firstUnitRepoID := p.GetFirstUnitRepoID()
firstUnitRepoID := headPerm.GetFirstUnitRepoID()
if firstUnitRepoID == 0 {
return false
return false, nil
}
prs, err := GetUnmergedPullRequestsByHeadInfo(ctx, firstUnitRepoID, branch)
prs, err := GetUnmergedPullRequestsByHeadInfo(ctx, firstUnitRepoID, headBranch)
if err != nil {
return false
return false, err
}
if _, err := prs.LoadIssues(ctx); err != nil {
return false, err
}
for _, pr := range prs {
if pr.AllowMaintainerEdit {
err = pr.LoadBaseRepo(ctx)
if err != nil {
continue
}
prPerm, err := access_model.GetIndividualUserRepoPermission(ctx, pr.BaseRepo, user)
if err != nil {
continue
}
if prPerm.CanWrite(unit.TypeCode) {
return true
}
if !pr.AllowMaintainerEdit {
continue
}
// check the PR's poster's permissions
// If a "reader" poster created the PR in base repo from head repo, even if it is allowed to be edited by maintainers,
// the maintainers should not be allowed to write, because they don't really have "write" permission in the head repo
if err := pr.Issue.LoadPoster(ctx); err != nil {
return false, err
}
if err := pr.LoadHeadRepo(ctx); err != nil {
return false, err
}
posterHeadPerm, err := access_model.GetIndividualUserRepoPermission(ctx, pr.HeadRepo, pr.Issue.Poster)
if err != nil {
return false, err
}
if !posterHeadPerm.CanWrite(unit.TypeCode) {
continue
}
// check the doer's permission
// Only allow the doer to edit the PR if they have write access to the base repository
if err := pr.LoadBaseRepo(ctx); err != nil {
return false, err
}
doerBasePerm, err := access_model.GetIndividualUserRepoPermission(ctx, pr.BaseRepo, doer)
if err != nil {
return false, err
}
if doerBasePerm.CanWrite(unit.TypeCode) {
return true, nil
}
}
return false
return false, nil
}
// HasUnmergedPullRequestsByHeadInfo checks if there are open and not merged pull request

View File

@ -6,15 +6,28 @@ package issues_test
import (
"testing"
"code.gitea.io/gitea/models/db"
issues_model "code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/perm"
"code.gitea.io/gitea/models/perm/access"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/models/unittest"
user_model "code.gitea.io/gitea/models/user"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"xorm.io/builder"
)
func TestPullRequestList_LoadAttributes(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
func TestPullRequestList(t *testing.T) {
require.NoError(t, unittest.PrepareTestDatabase())
t.Run("LoadAttributes", testPullRequestListLoadAttributes)
t.Run("LoadReviewCommentsCounts", testPullRequestListLoadReviewCommentsCounts)
t.Run("LoadReviews", testPullRequestListLoadReviews)
t.Run("CanMaintainerWriteToBranch", testCanMaintainerWriteToBranch)
}
func testPullRequestListLoadAttributes(t *testing.T) {
prs := issues_model.PullRequestList{
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
@ -28,9 +41,7 @@ func TestPullRequestList_LoadAttributes(t *testing.T) {
assert.NoError(t, issues_model.PullRequestList([]*issues_model.PullRequest{}).LoadAttributes(t.Context()))
}
func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
func testPullRequestListLoadReviewCommentsCounts(t *testing.T) {
prs := issues_model.PullRequestList{
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
@ -43,9 +54,7 @@ func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) {
}
}
func TestPullRequestList_LoadReviews(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
func testPullRequestListLoadReviews(t *testing.T) {
prs := issues_model.PullRequestList{
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
@ -61,3 +70,73 @@ func TestPullRequestList_LoadReviews(t *testing.T) {
assert.EqualValues(t, 10, reviewList[4].ID)
assert.EqualValues(t, 22, reviewList[5].ID)
}
func testCanMaintainerWriteToBranch(t *testing.T) {
ctx := t.Context()
baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10})
headRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 11})
_ = baseRepo.LoadOwner(ctx)
_ = headRepo.LoadOwner(ctx)
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
// a PR from header's owner
headOwnerPR := &issues_model.PullRequest{
Issue: &issues_model.Issue{
RepoID: baseRepo.ID,
PosterID: headRepo.OwnerID,
},
HeadRepoID: headRepo.ID,
BaseRepoID: baseRepo.ID,
HeadBranch: "pr-from-head-owner",
BaseBranch: "master",
}
require.NoError(t, issues_model.NewPullRequest(ctx, baseRepo, headOwnerPR.Issue, nil, nil, headOwnerPR))
// a PR from a user, they might have or not have "write" permission in the target repo
anyUserPR := &issues_model.PullRequest{
Issue: &issues_model.Issue{
RepoID: baseRepo.ID,
PosterID: user.ID,
},
HeadRepoID: headRepo.ID,
BaseRepoID: baseRepo.ID,
HeadBranch: "pr-from-head-user",
BaseBranch: "master",
}
require.NoError(t, issues_model.NewPullRequest(ctx, baseRepo, anyUserPR.Issue, nil, nil, anyUserPR))
doerCanWrite := func(doer *user_model.User, pr *issues_model.PullRequest) bool {
headPerm, _ := access.GetIndividualUserRepoPermission(ctx, headRepo, doer)
return issues_model.CanMaintainerWriteToBranch(ctx, headPerm, pr.HeadBranch, doer)
}
t.Run("NoAllowMaintainerEdit", func(t *testing.T) {
assert.True(t, doerCanWrite(headRepo.Owner, headOwnerPR))
assert.False(t, doerCanWrite(baseRepo.Owner, headOwnerPR))
assert.False(t, doerCanWrite(baseRepo.Owner, anyUserPR))
assert.False(t, doerCanWrite(user, anyUserPR))
})
t.Run("WithAllowMaintainerEdit-HeadPosterReader", func(t *testing.T) {
_, err := db.GetEngine(ctx).Where(builder.In("id", []int64{headOwnerPR.ID, anyUserPR.ID})).
Cols("allow_maintainer_edit").
Update(&issues_model.PullRequest{AllowMaintainerEdit: true})
require.NoError(t, err)
assert.True(t, doerCanWrite(baseRepo.Owner, headOwnerPR))
assert.False(t, doerCanWrite(baseRepo.Owner, anyUserPR)) // poster doesn't have write permission, so maintainer can't write either
})
t.Run("WithAllowMaintainerEdit-HeadPosterWriter", func(t *testing.T) {
_, err := db.GetEngine(ctx).Where(builder.In("id", []int64{headOwnerPR.ID, anyUserPR.ID})).
Cols("allow_maintainer_edit").
Update(&issues_model.PullRequest{AllowMaintainerEdit: true})
require.NoError(t, err)
err = db.Insert(ctx, &repo_model.Collaboration{RepoID: headRepo.ID, UserID: user.ID, Mode: perm.AccessModeWrite})
require.NoError(t, err)
err = db.Insert(ctx, &access.Access{RepoID: headRepo.ID, UserID: user.ID, Mode: perm.AccessModeWrite})
require.NoError(t, err)
assert.True(t, doerCanWrite(baseRepo.Owner, headOwnerPR))
assert.True(t, doerCanWrite(baseRepo.Owner, anyUserPR)) // now the poster has the write permission
})
}

View File

@ -6,22 +6,23 @@ package base
import (
"testing"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/xorm/names"
)
func TestMain(m *testing.M) {
MainTest(m)
migrationtest.MainTest(m)
}
func Test_DropTableColumns(t *testing.T) {
x, deferable := PrepareTestEnv(t, 0)
if x == nil || t.Failed() {
defer deferable()
return
}
x, deferable := migrationtest.PrepareTestEnv(t, 0)
defer deferable()
// FIXME: this logic seems wrong. Need to add an assertion here in the future, but it seems causing failure.
if x == nil || t.Failed() {
t.Skip("PrepareTestEnv did not yield a usable engine")
}
type DropTest struct {
ID int64 `xorm:"pk autoincr"`

View File

@ -1,226 +0,0 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package base
import (
"database/sql"
"fmt"
"os"
"path"
"path/filepath"
"testing"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/tempdir"
"code.gitea.io/gitea/modules/testlogger"
"code.gitea.io/gitea/modules/util"
"github.com/stretchr/testify/require"
"xorm.io/xorm"
"xorm.io/xorm/schemas"
)
// FIXME: this file shouldn't be in a normal package, it should only be compiled for tests
func newXORMEngine(t *testing.T) (*xorm.Engine, error) {
if err := db.InitEngine(t.Context()); err != nil {
return nil, err
}
x := unittest.GetXORMEngine()
return x, nil
}
func deleteDB() error {
switch {
case setting.Database.Type.IsSQLite3():
if err := util.Remove(setting.Database.Path); err != nil {
return err
}
return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm)
case setting.Database.Type.IsMySQL():
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/",
setting.Database.User, setting.Database.Passwd, setting.Database.Host))
if err != nil {
return err
}
defer db.Close()
if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
return err
}
if _, err = db.Exec("CREATE DATABASE IF NOT EXISTS " + setting.Database.Name); err != nil {
return err
}
return nil
case setting.Database.Type.IsPostgreSQL():
db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s",
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode))
if err != nil {
return err
}
defer db.Close()
if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil {
return err
}
if _, err = db.Exec("CREATE DATABASE " + setting.Database.Name); err != nil {
return err
}
db.Close()
// Check if we need to set up a specific schema
if len(setting.Database.Schema) != 0 {
db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s",
setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name, setting.Database.SSLMode))
if err != nil {
return err
}
defer db.Close()
schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema))
if err != nil {
return err
}
defer schrows.Close()
if !schrows.Next() {
// Create and set up a DB schema
_, err = db.Exec("CREATE SCHEMA " + setting.Database.Schema)
if err != nil {
return err
}
}
// Make the user's default search path the created schema; this will affect new connections
_, err = db.Exec(fmt.Sprintf(`ALTER USER "%s" SET search_path = %s`, setting.Database.User, setting.Database.Schema))
if err != nil {
return err
}
return nil
}
case setting.Database.Type.IsMSSQL():
host, port := setting.ParseMSSQLHostPort(setting.Database.Host)
db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;",
host, port, "master", setting.Database.User, setting.Database.Passwd))
if err != nil {
return err
}
defer db.Close()
if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS [%s]", setting.Database.Name)); err != nil {
return err
}
if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE [%s]", setting.Database.Name)); err != nil {
return err
}
}
return nil
}
// PrepareTestEnv prepares the test environment and reset the database. The skip parameter should usually be 0.
// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from.
//
// fixtures in `models/migrations/fixtures/<TestName>` will be loaded automatically
func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, func()) {
t.Helper()
ourSkip := 2
ourSkip += skip
deferFn := testlogger.PrintCurrentTest(t, ourSkip)
giteaRoot := setting.GetGiteaTestSourceRoot()
require.NoError(t, unittest.SyncDirs(filepath.Join(giteaRoot, "tests/gitea-repositories-meta"), setting.RepoRootPath))
if err := deleteDB(); err != nil {
t.Fatalf("unable to reset database: %v", err)
return nil, deferFn
}
x, err := newXORMEngine(t)
require.NoError(t, err)
if x != nil {
oldDefer := deferFn
deferFn = func() {
oldDefer()
if err := x.Close(); err != nil {
t.Errorf("error during close: %v", err)
}
if err := deleteDB(); err != nil {
t.Errorf("unable to reset database: %v", err)
}
}
}
if err != nil {
return x, deferFn
}
if len(syncModels) > 0 {
if err := x.Sync(syncModels...); err != nil {
t.Errorf("error during sync: %v", err)
return x, deferFn
}
}
fixturesDir := filepath.Join(giteaRoot, "models", "migrations", "fixtures", t.Name())
if _, err := os.Stat(fixturesDir); err == nil {
t.Logf("initializing fixtures from: %s", fixturesDir)
if err := unittest.InitFixtures(
unittest.FixturesOptions{
Dir: fixturesDir,
}, x); err != nil {
t.Errorf("error whilst initializing fixtures from %s: %v", fixturesDir, err)
return x, deferFn
}
if err := unittest.LoadFixtures(); err != nil {
t.Errorf("error whilst loading fixtures from %s: %v", fixturesDir, err)
return x, deferFn
}
} else if !os.IsNotExist(err) {
t.Errorf("unexpected error whilst checking for existence of fixtures: %v", err)
} else {
t.Logf("no fixtures found in: %s", fixturesDir)
}
return x, deferFn
}
func LoadTableSchemasMap(t *testing.T, x *xorm.Engine) map[string]*schemas.Table {
tables, err := x.DBMetas()
require.NoError(t, err)
tableMap := make(map[string]*schemas.Table)
for _, table := range tables {
tableMap[table.Name] = table
}
return tableMap
}
func mainTest(m *testing.M) int {
testlogger.Init()
tempWorkPath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("migration-test-data-")
if err != nil {
return testlogger.MainErrorf("Unable to create temporary dir for migration test: %v", err)
}
defer cleanup()
setting.MockBuiltinPaths(tempWorkPath, "", "")
setting.SetupGiteaTestEnv()
if err = git.InitFull(); err != nil {
return testlogger.MainErrorf("Unable to InitFull: %v", err)
}
setting.LoadDBSetting()
setting.InitLoggersForTest()
return m.Run()
}
func MainTest(m *testing.M) {
os.Exit(mainTest(m))
}

View File

@ -0,0 +1,120 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package migrationtest
import (
"os"
"path/filepath"
"testing"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/testlogger"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"xorm.io/xorm"
"xorm.io/xorm/schemas"
)
// PrepareTestEnv prepares the test environment and reset the database. The skip parameter should usually be 0.
// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from.
//
// fixtures in `models/migrations/fixtures/<TestName>` will be loaded automatically
func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, func()) {
t.Helper()
ourSkip := 2
ourSkip += skip
deferFn := testlogger.PrintCurrentTest(t, ourSkip)
giteaRoot := setting.GetGiteaTestSourceRoot()
require.NoError(t, unittest.SyncDirs(filepath.Join(giteaRoot, "tests/gitea-repositories-meta"), setting.RepoRootPath))
cleanup, err := unittest.ResetTestDatabase()
if err != nil {
t.Fatalf("unable to reset database: %v", err)
return nil, deferFn
}
{
oldDefer := deferFn
deferFn = func() {
cleanup()
oldDefer()
}
}
err = db.InitEngine(t.Context())
if !assert.NoError(t, err) {
return nil, deferFn
}
x := unittest.GetXORMEngine()
{
oldDefer := deferFn
deferFn = func() {
_ = x.Close()
oldDefer()
}
}
if len(syncModels) > 0 {
if err := x.Sync(syncModels...); err != nil {
t.Errorf("error during sync: %v", err)
return x, deferFn
}
}
fixturesDir := filepath.Join(giteaRoot, "models", "migrations", "fixtures", t.Name())
if _, err := os.Stat(fixturesDir); err == nil {
t.Logf("initializing fixtures from: %s", fixturesDir)
if err := unittest.InitFixtures(
unittest.FixturesOptions{
Dir: fixturesDir,
}, x); err != nil {
t.Errorf("error whilst initializing fixtures from %s: %v", fixturesDir, err)
return x, deferFn
}
if err := unittest.LoadFixtures(); err != nil {
t.Errorf("error whilst loading fixtures from %s: %v", fixturesDir, err)
return x, deferFn
}
} else if !os.IsNotExist(err) {
t.Errorf("unexpected error whilst checking for existence of fixtures: %v", err)
} else {
t.Logf("no fixtures found in: %s", fixturesDir)
}
return x, deferFn
}
func LoadTableSchemasMap(t *testing.T, x *xorm.Engine) map[string]*schemas.Table {
tables, err := x.DBMetas()
require.NoError(t, err)
tableMap := make(map[string]*schemas.Table)
for _, table := range tables {
tableMap[table.Name] = table
}
return tableMap
}
func mainTest(m *testing.M) int {
testlogger.Init()
err := setting.PrepareIntegrationTestConfig()
if err != nil {
return testlogger.MainErrorf("Unable to prepare integration test config: %v", err)
}
setting.SetupGiteaTestEnv()
if err = git.InitFull(); err != nil {
return testlogger.MainErrorf("Unable to InitFull: %v", err)
}
setting.LoadDBSetting()
setting.InitLoggersForTest()
return m.Run()
}
func MainTest(m *testing.M) {
os.Exit(mainTest(m))
}

View File

@ -6,9 +6,9 @@ package v1_14
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,7 +6,7 @@ package v1_14
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -47,7 +47,7 @@ func Test_RemoveInvalidLabels(t *testing.T) {
}
// load and prepare the test database
x, deferable := base.PrepareTestEnv(t, 0, new(Comment), new(Issue), new(Repository), new(IssueLabel), new(Label))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Comment), new(Issue), new(Repository), new(IssueLabel), new(Label))
if x == nil || t.Failed() {
defer deferable()
return

View File

@ -6,7 +6,7 @@ package v1_14
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
@ -34,7 +34,7 @@ func Test_DeleteOrphanedIssueLabels(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(IssueLabel), new(Label))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(IssueLabel), new(Label))
if x == nil || t.Failed() {
defer deferable()
return

View File

@ -6,9 +6,9 @@ package v1_15
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -7,7 +7,7 @@ import (
"strings"
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -20,7 +20,7 @@ func Test_AddPrimaryEmail2EmailAddress(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(User))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(User))
if x == nil || t.Failed() {
defer deferable()
return

View File

@ -6,7 +6,7 @@ package v1_15
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -20,7 +20,7 @@ func Test_AddIssueResourceIndexTable(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(Issue))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Issue))
if x == nil || t.Failed() {
defer deferable()
return

View File

@ -6,9 +6,9 @@ package v1_16
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,7 +6,7 @@ package v1_16
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/json"
"github.com/stretchr/testify/assert"
@ -27,7 +27,7 @@ func (ls *LoginSourceOriginalV189) TableName() string {
func Test_UnwrapLDAPSourceCfg(t *testing.T) {
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(LoginSourceOriginalV189))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(LoginSourceOriginalV189))
if x == nil || t.Failed() {
defer deferable()
return

View File

@ -6,7 +6,7 @@ package v1_16
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -31,7 +31,7 @@ func Test_AddRepoIDForAttachment(t *testing.T) {
}
// Prepare and load the testing database
x, deferrable := base.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release))
x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release))
defer deferrable()
if x == nil || t.Failed() {
return

View File

@ -6,7 +6,7 @@ package v1_16
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -21,7 +21,7 @@ func Test_AddTableCommitStatusIndex(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(CommitStatus))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(CommitStatus))
if x == nil || t.Failed() {
defer deferable()
return

View File

@ -6,7 +6,7 @@ package v1_16
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
@ -44,7 +44,7 @@ func Test_RemigrateU2FCredentials(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(WebauthnCredential), new(U2fRegistration), new(ExpectedWebauthnCredential))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(WebauthnCredential), new(U2fRegistration), new(ExpectedWebauthnCredential))
if x == nil || t.Failed() {
defer deferable()
return

View File

@ -6,9 +6,9 @@ package v1_17
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -7,7 +7,7 @@ import (
"encoding/base32"
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -38,7 +38,7 @@ func Test_StoreWebauthnCredentialIDAsBytes(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(WebauthnCredential), new(ExpectedWebauthnCredential))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(WebauthnCredential), new(ExpectedWebauthnCredential))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,9 +6,9 @@ package v1_18
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -7,7 +7,7 @@ import (
"testing"
"code.gitea.io/gitea/models/issues"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -16,7 +16,7 @@ func Test_UpdateOpenMilestoneCounts(t *testing.T) {
type ExpectedMilestone issues.Milestone
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(issues.Milestone), new(ExpectedMilestone), new(issues.Issue))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(issues.Milestone), new(ExpectedMilestone), new(issues.Issue))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,7 +6,7 @@ package v1_18
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -18,7 +18,7 @@ func Test_AddConfidentialClientColumnToOAuth2ApplicationTable(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(oauth2Application))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(oauth2Application))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,9 +6,9 @@ package v1_19
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,7 +6,7 @@ package v1_19
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/secret"
"code.gitea.io/gitea/modules/setting"
@ -39,7 +39,7 @@ func Test_AddHeaderAuthorizationEncryptedColWebhook(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(Webhook), new(ExpectedWebhook), new(HookTask))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Webhook), new(ExpectedWebhook), new(HookTask))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,9 +6,9 @@ package v1_20
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -8,7 +8,7 @@ import (
"strings"
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -66,7 +66,7 @@ func Test_ConvertScopedAccessTokens(t *testing.T) {
})
}
x, deferable := base.PrepareTestEnv(t, 0, new(AccessToken))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(AccessToken))
defer deferable()
if x == nil || t.Failed() {
t.Skip()

View File

@ -6,9 +6,9 @@ package v1_21
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,9 +6,9 @@ package v1_22
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,7 +6,7 @@ package v1_22
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -21,7 +21,7 @@ func Test_AddCombinedIndexToIssueUser(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(IssueUser))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(IssueUser))
defer deferable()
assert.NoError(t, AddCombinedIndexToIssueUser(x))

View File

@ -6,7 +6,7 @@ package v1_22
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
"xorm.io/xorm"
@ -64,7 +64,7 @@ func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) {
}
// Prepare and load the testing database
return base.PrepareTestEnv(t, 0,
return migrationtest.PrepareTestEnv(t, 0,
new(Repository),
new(CommitStatus),
new(RepoArchiver),

View File

@ -7,7 +7,7 @@ import (
"strconv"
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -20,7 +20,7 @@ func Test_UpdateBadgeColName(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(Badge))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Badge))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,7 +6,7 @@ package v1_22
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/models/project"
"github.com/stretchr/testify/assert"
@ -14,7 +14,7 @@ import (
func Test_CheckProjectColumnsConsistency(t *testing.T) {
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(project.Project), new(project.Column))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(project.Project), new(project.Column))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,7 +6,7 @@ package v1_22
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
"xorm.io/xorm/schemas"
@ -20,7 +20,7 @@ func Test_AddUniqueIndexForProjectIssue(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(ProjectIssue))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(ProjectIssue))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,9 +6,9 @@ package v1_23
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,7 +6,7 @@ package v1_23
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
@ -44,7 +44,7 @@ func Test_AddIndexToActionTaskStoppedLogExpired(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(ActionTask))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(ActionTask))
defer deferable()
assert.NoError(t, AddIndexToActionTaskStoppedLogExpired(x))

View File

@ -6,7 +6,7 @@ package v1_23
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/assert"
@ -33,7 +33,7 @@ func Test_AddIndexForReleaseSha1(t *testing.T) {
}
// Prepare and load the testing database
x, deferable := base.PrepareTestEnv(t, 0, new(Release))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Release))
defer deferable()
assert.NoError(t, AddIndexForReleaseSha1(x))

View File

@ -6,9 +6,9 @@ package v1_25
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,7 +6,7 @@ package v1_25
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
@ -44,12 +44,12 @@ func Test_UseLongTextInSomeColumnsAndFixBugs(t *testing.T) {
}
// Prepare and load the testing database
x, deferrable := base.PrepareTestEnv(t, 0, new(ReviewState), new(PackageProperty), new(Notice))
x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(ReviewState), new(PackageProperty), new(Notice))
defer deferrable()
require.NoError(t, UseLongTextInSomeColumnsAndFixBugs(x))
tables := base.LoadTableSchemasMap(t, x)
tables := migrationtest.LoadTableSchemasMap(t, x)
table := tables["review_state"]
column := table.GetColumn("updated_files")
assert.Equal(t, "LONGTEXT", column.SQLType.Name)

View File

@ -6,7 +6,7 @@ package v1_25
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/setting"
"github.com/stretchr/testify/assert"
@ -23,11 +23,11 @@ func Test_ExtendCommentTreePathLength(t *testing.T) {
TreePath string `xorm:"VARCHAR(255)"`
}
x, deferrable := base.PrepareTestEnv(t, 0, new(Comment))
x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(Comment))
defer deferrable()
require.NoError(t, ExtendCommentTreePathLength(x))
table := base.LoadTableSchemasMap(t, x)["comment"]
table := migrationtest.LoadTableSchemasMap(t, x)["comment"]
column := table.GetColumn("tree_path")
assert.Contains(t, []string{"NVARCHAR", "VARCHAR"}, column.SQLType.Name)
assert.EqualValues(t, 4000, column.Length)

View File

@ -6,9 +6,9 @@ package v1_26
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -6,7 +6,7 @@ package v1_26
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/timeutil"
"github.com/stretchr/testify/require"
@ -38,7 +38,7 @@ func Test_FixMissedRepoIDWhenMigrateAttachments(t *testing.T) {
}
// Prepare and load the testing database
x, deferrable := base.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release))
x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release))
defer deferrable()
require.NoError(t, FixMissedRepoIDWhenMigrateAttachments(x))

View File

@ -6,7 +6,7 @@ package v1_26
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/test"
@ -57,7 +57,7 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) {
TargetURL string
}
x, deferable := base.PrepareTestEnv(t, 0,
x, deferable := migrationtest.PrepareTestEnv(t, 0,
new(Repository),
new(ActionRun),
new(ActionRunJob),

View File

@ -6,7 +6,7 @@ package v1_26
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/require"
)
@ -17,7 +17,7 @@ func Test_AddDisabledToActionRunner(t *testing.T) {
Name string
}
x, deferable := base.PrepareTestEnv(t, 0, new(ActionRunner))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(ActionRunner))
defer deferable()
_, err := x.Insert(&ActionRunner{Name: "runner"})

View File

@ -6,7 +6,7 @@ package v1_26
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
)
@ -22,7 +22,7 @@ func (UserBadgeBefore) TableName() string {
}
func Test_AddUniqueIndexForUserBadge(t *testing.T) {
x, deferable := base.PrepareTestEnv(t, 0, new(UserBadgeBefore))
x, deferable := migrationtest.PrepareTestEnv(t, 0, new(UserBadgeBefore))
defer deferable()
if x == nil || t.Failed() {
return

View File

@ -6,9 +6,9 @@ package v1_27
import (
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
)
func TestMain(m *testing.M) {
base.MainTest(m)
migrationtest.MainTest(m)
}

View File

@ -8,7 +8,7 @@ import (
"slices"
"testing"
"code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/models/migrations/migrationtest"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -49,7 +49,7 @@ func (actionArtifactBeforeV331) TableName() string {
}
func Test_AddActionRunAttemptModel(t *testing.T) {
x, deferable := base.PrepareTestEnv(t, 0,
x, deferable := migrationtest.PrepareTestEnv(t, 0,
new(actionRunBeforeV331),
new(actionRunJobBeforeV331),
new(actionArtifactBeforeV331),
@ -69,7 +69,7 @@ func Test_AddActionRunAttemptModel(t *testing.T) {
require.NoError(t, AddActionRunAttemptModel(x))
tableMap := base.LoadTableSchemasMap(t, x)
tableMap := migrationtest.LoadTableSchemasMap(t, x)
attemptTable := tableMap["action_run_attempt"]
require.NotNil(t, attemptTable)

View File

@ -68,7 +68,7 @@ func TryInsertFile(ctx context.Context, pf *PackageFile) (*PackageFile, error) {
// GetFilesByVersionID gets all files of a version
func GetFilesByVersionID(ctx context.Context, versionID int64) ([]*PackageFile, error) {
pfs := make([]*PackageFile, 0, 10)
return pfs, db.GetEngine(ctx).Where("version_id = ?", versionID).Find(&pfs)
return pfs, db.GetEngine(ctx).Where("version_id = ?", versionID).OrderBy("lower_name, created_unix, id").Find(&pfs)
}
// GetFileForVersionByID gets a file of a version by id

View File

@ -302,6 +302,15 @@ func GetProjectByID(ctx context.Context, id int64) (*Project, error) {
return p, nil
}
// GetProjectsMapByIDs returns projects by a list of IDs.
func GetProjectsMapByIDs(ctx context.Context, ids []int64) (map[int64]*Project, error) {
projects := make(map[int64]*Project, len(ids))
if len(ids) == 0 {
return projects, nil
}
return projects, db.GetEngine(ctx).In("id", ids).Find(&projects)
}
func GetProjectByIDAndOwner(ctx context.Context, id, ownerID int64) (*Project, error) {
p := new(Project)
has, err := db.GetEngine(ctx).ID(id).And("owner_id = ?", ownerID).Get(p)

View File

@ -34,7 +34,7 @@ func (r *RepoComment) ResolveLink(link, preferLinkType string) string {
case markup.LinkTypeRoot:
return r.ctx.ResolveLinkRoot(link)
default:
return r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefPath, link)
return r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefSubURL, link)
}
}
@ -43,7 +43,7 @@ var _ markup.RenderHelper = (*RepoComment)(nil)
type RepoCommentOptions struct {
DeprecatedRepoName string // it is only a patch for the non-standard "markup" api
DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api
CurrentRefPath string // eg: "branch/main" or "commit/11223344"
CurrentRefSubURL string // eg: "branch/main" or "commit/11223344"
FootnoteContextID string // the extra context ID for footnotes, used to avoid conflicts with other footnotes in the same page
}

View File

@ -54,8 +54,8 @@ func TestRepoComment(t *testing.T) {
`, rendered)
})
t.Run("WithCurrentRefPath", func(t *testing.T) {
rctx := NewRenderContextRepoComment(t.Context(), repo1, RepoCommentOptions{CurrentRefPath: "/commit/1234"}).
t.Run("WithCurrentRefSubURL", func(t *testing.T) {
rctx := NewRenderContextRepoComment(t.Context(), repo1, RepoCommentOptions{CurrentRefSubURL: "/commit/1234"}).
WithMarkupType(markdown.MarkupName)
// the ref path is only used to render commit message: a commit message is rendered at the commit page with its commit ID path

View File

@ -35,11 +35,11 @@ func (r *RepoFile) ResolveLink(link, preferLinkType string) (finalLink string) {
case markup.LinkTypeRoot:
finalLink = r.ctx.ResolveLinkRoot(link)
case markup.LinkTypeRaw:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "raw", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link)
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "raw", r.opts.CurrentRefSubURL), r.opts.CurrentTreePath, link)
case markup.LinkTypeMedia:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "media", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link)
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "media", r.opts.CurrentRefSubURL), r.opts.CurrentTreePath, link)
default:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link)
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefSubURL), r.opts.CurrentTreePath, link)
}
return finalLink
}
@ -50,8 +50,8 @@ type RepoFileOptions struct {
DeprecatedRepoName string // it is only a patch for the non-standard "markup" api
DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api
CurrentRefPath string // eg: "branch/main", it is a sub URL path escaped by callers, TODO: rename to CurrentRefSubURL
CurrentTreePath string // eg: "path/to/file" in the repo, it is the tree path without URL path escaping
CurrentRefSubURL string // eg: "branch/main" or "branch/my%20branch", it is a sub URL path escaped by callers
CurrentTreePath string // eg: "path/to/file" in the repo, it is the tree path without URL path escaping
}
func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository, opts ...RepoFileOptions) *markup.RenderContext {
@ -71,9 +71,8 @@ func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository,
})
}
// External render's iframe needs this to generate correct links
// TODO: maybe need to make it access "CurrentRefPath" directly (but impossible at the moment due to cycle-import)
// CurrentRefPath is already path-escaped by callers
rctx.RenderOptions.Metas["RefTypeNameSubURL"] = helper.opts.CurrentRefPath
// TODO: maybe need to make it access "CurrentRefSubURL" directly (but impossible at the moment due to cycle-import)
rctx.RenderOptions.Metas["RefTypeNameSubURL"] = helper.opts.CurrentRefSubURL
rctx = rctx.WithHelper(helper).WithEnableHeadingIDGeneration(true)
return rctx
}

View File

@ -36,7 +36,7 @@ func TestRepoFile(t *testing.T) {
})
t.Run("AbsoluteAndRelative", func(t *testing.T) {
rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefPath: "branch/main"}).
rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefSubURL: "branch/main"}).
WithMarkupType(markdown.MarkupName)
rendered, err := markup.RenderString(rctx, `
[/test](/test)
@ -53,8 +53,8 @@ func TestRepoFile(t *testing.T) {
`, rendered)
})
t.Run("WithCurrentRefPath", func(t *testing.T) {
rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefPath: "/commit/1234"}).
t.Run("WithCurrentRefSubURL", func(t *testing.T) {
rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefSubURL: "/commit/1234"}).
WithMarkupType(markdown.MarkupName)
rendered, err := markup.RenderString(rctx, `
[/test](/test)
@ -66,10 +66,10 @@ func TestRepoFile(t *testing.T) {
`, rendered)
})
t.Run("WithCurrentRefPathByTag", func(t *testing.T) {
t.Run("WithCurrentRefSubURLByTag", func(t *testing.T) {
rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{
CurrentRefPath: "/commit/1234",
CurrentTreePath: "my-dir",
CurrentRefSubURL: "/commit/1234",
CurrentTreePath: "my-dir",
}).
WithMarkupType(markdown.MarkupName)
rendered, err := markup.RenderString(rctx, `
@ -89,8 +89,8 @@ func TestRepoFileOrgMode(t *testing.T) {
t.Run("Links", func(t *testing.T) {
rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{
CurrentRefPath: "/commit/1234",
CurrentTreePath: "my-dir",
CurrentRefSubURL: "/commit/1234",
CurrentTreePath: "my-dir",
}).WithRelativePath("my-dir/a.org")
rendered, err := markup.RenderString(rctx, `

View File

@ -36,9 +36,9 @@ func (r *RepoWiki) ResolveLink(link, preferLinkType string) (finalLink string) {
case markup.LinkTypeRoot:
finalLink = r.ctx.ResolveLinkRoot(link)
case markup.LinkTypeMedia, markup.LinkTypeRaw:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki/raw", r.opts.currentRefPath), r.opts.currentTreePath, link)
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki/raw", r.opts.currentRefSubURL), r.opts.currentTreePath, link)
default:
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefPath), r.opts.currentTreePath, link)
finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefSubURL), r.opts.currentTreePath, link)
}
return finalLink
}
@ -50,8 +50,8 @@ type RepoWikiOptions struct {
DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api
// these options are not used at the moment because Wiki doesn't support sub-path, nor branch
currentRefPath string // eg: "branch/main"
currentTreePath string // eg: "path/to/file" in the repo
currentRefSubURL string // eg: "branch/main"
currentTreePath string // eg: "path/to/file" in the repo
}
func NewRenderContextRepoWiki(ctx context.Context, repo *repo_model.Repository, opts ...RepoWikiOptions) *markup.RenderContext {

View File

@ -376,8 +376,9 @@ func (repo *Repository) CommitLink(commitID string) (result string) {
}
// APIURL returns the repository API URL
func (repo *Repository) APIURL() string {
return setting.AppURL + "api/v1/repos/" + url.PathEscape(repo.OwnerName) + "/" + url.PathEscape(repo.Name)
func (repo *Repository) APIURL(ctxOpt ...context.Context) string {
ctx := util.OptionalArg(ctxOpt, context.TODO())
return httplib.MakeAbsoluteURL(ctx, setting.AppSubURL+"/api/v1/repos/"+url.PathEscape(repo.OwnerName)+"/"+url.PathEscape(repo.Name))
}
// GetCommitsCountCacheKey returns cache key used for commits count caching.

View File

@ -5,6 +5,8 @@ package unittest
import (
"context"
"database/sql"
"errors"
"fmt"
"os"
"path/filepath"
@ -102,6 +104,101 @@ func mainTest(m *testing.M, testOptsArg ...*TestOptions) int {
return exitStatus
}
func ResetTestDatabase() (cleanup func(), err error) {
defer func() {
if cleanup == nil {
cleanup = func() {}
}
}()
connOpts := db.GlobalConnOptions()
driverDefault, connStrDefault, err := db.ConnStrDefaultDatabase(connOpts)
if err != nil {
return nil, err
}
driverDatabase, connStrDatabase, err := db.ConnStr(connOpts)
if err != nil {
return nil, err
}
if connOpts.Type.IsSQLite3() {
if !strings.HasSuffix(connOpts.SQLitePath, "-test.db") {
return nil, errors.New(`testing database file for sqlite3 must end in "-test.db"`)
}
_ = os.Remove(connOpts.SQLitePath)
err = os.MkdirAll(filepath.Dir(connOpts.SQLitePath), os.ModePerm)
if err != nil {
return nil, err
}
cleanup = func() {
_ = os.Remove(connOpts.SQLitePath)
_ = os.Remove(filepath.Dir(connOpts.SQLitePath))
}
return cleanup, nil
}
if !strings.Contains(connOpts.Database, "test") {
return nil, fmt.Errorf(`testing database name for %s must contain "test"`, connOpts.Database)
}
quotedDbName := connOpts.Database
if connOpts.Type.IsMSSQL() {
quotedDbName = `[` + connOpts.Database + `]`
}
sqlExec := func(sqlDB *sql.DB, sql string) error {
_, err := sqlDB.Exec(sql)
if err != nil {
return fmt.Errorf("failed to execute SQL %q: %w", sql, err)
}
return nil
}
createDatabase := func() error {
sqlDB, err := sql.Open(driverDefault, connStrDefault)
if err != nil {
return err
}
defer sqlDB.Close()
if err = sqlExec(sqlDB, "DROP DATABASE IF EXISTS "+quotedDbName); err != nil {
return err
}
return sqlExec(sqlDB, "CREATE DATABASE "+quotedDbName)
}
if err = createDatabase(); err != nil {
return nil, err
}
cleanup = func() {
sqlDB, err := sql.Open(driverDefault, connStrDefault)
if err != nil {
return
}
defer sqlDB.Close()
_, _ = sqlDB.Exec("DROP DATABASE IF EXISTS " + quotedDbName)
}
createDatabaseSchema := func() error {
if !connOpts.Type.IsPostgreSQL() {
return nil
}
if connOpts.Schema == "" {
return nil
}
sqlDB, err := sql.Open(driverDatabase, connStrDatabase)
if err != nil {
return err
}
defer sqlDB.Close()
if err = sqlExec(sqlDB, "DROP SCHEMA IF EXISTS "+connOpts.Schema); err != nil {
return err
}
return sqlExec(sqlDB, "CREATE SCHEMA "+connOpts.Schema)
}
return cleanup, createDatabaseSchema()
}
// FixturesOptions fixtures needs to be loaded options
type FixturesOptions struct {
Dir string
@ -110,11 +207,12 @@ type FixturesOptions struct {
// CreateTestEngine creates a memory database and loads the fixture data from fixturesDir
func CreateTestEngine(opts FixturesOptions) error {
x, err := xorm.NewEngine("sqlite3", "file::memory:?cache=shared&_txlock=immediate")
driver, connStr, err := db.ConnStr(db.ConnOptions{Type: "sqlite3", SQLitePath: ":memory:"})
if err != nil {
return err
}
x, err := xorm.NewEngine(driver, connStr)
if err != nil {
if strings.Contains(err.Error(), "unknown driver") {
return fmt.Errorf("sqlite3 requires: -tags sqlite,sqlite_unlock_notify\n%w", err)
}
return err
}
x.SetMapper(names.GonicMapper{})

View File

@ -615,6 +615,7 @@ var (
"sitemap.xml", // search engine sitemap
"ssh_info", // agit info
"swagger.v1.json",
"openapi3.v1.json",
"ghost", // reserved name for deleted users (id: -1)
"gitea-actions", // gitea builtin user (id: -2)

View File

@ -7,8 +7,10 @@ import (
"context"
"os"
"path/filepath"
"strings"
"code.gitea.io/gitea/modules/git/gitcmd"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
)
@ -39,6 +41,9 @@ func (b *catFileBatchCommand) getBatch() *catFileBatchCommunicator {
}
func (b *catFileBatchCommand) QueryContent(obj string) (*CatFileObject, BufferedReader, error) {
if strings.Contains(obj, "\n") {
setting.PanicInDevOrTesting("invalid object name with newline: %q", obj)
}
_, err := b.getBatch().reqWriter.Write([]byte("contents " + obj + "\n"))
if err != nil {
return nil, nil, err
@ -51,6 +56,9 @@ func (b *catFileBatchCommand) QueryContent(obj string) (*CatFileObject, Buffered
}
func (b *catFileBatchCommand) QueryInfo(obj string) (*CatFileObject, error) {
if strings.Contains(obj, "\n") {
setting.PanicInDevOrTesting("invalid object name with newline: %q", obj)
}
_, err := b.getBatch().reqWriter.Write([]byte("info " + obj + "\n"))
if err != nil {
return nil, err

Some files were not shown because too many files have changed in this diff Show More