diff --git a/.dockerignore b/.dockerignore index c88fb144fe..f61d8aea3f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -40,9 +40,7 @@ cpu.out *.log /gitea -/gitea-vet /debug -/integrations.test /bin /dist @@ -54,12 +52,6 @@ cpu.out /indexers /log /tests/integration/gitea-integration-* -/tests/integration/indexers-* -/tests/e2e/gitea-e2e-* -/tests/e2e/indexers-* -/tests/e2e/reports -/tests/e2e/test-artifacts -/tests/e2e/test-snapshots /tests/*.ini /node_modules /yarn.lock diff --git a/.editorconfig b/.editorconfig index bf1cf757cc..703a834818 100644 --- a/.editorconfig +++ b/.editorconfig @@ -18,7 +18,7 @@ indent_style = tab [templates/custom/*.tmpl] insert_final_newline = false -[templates/swagger/v1_json.tmpl] +[templates/swagger/*_json.tmpl] indent_style = space insert_final_newline = false diff --git a/.github/actions/docker-dryrun/action.yml b/.github/actions/docker-dryrun/action.yml new file mode 100644 index 0000000000..d280ea26ce --- /dev/null +++ b/.github/actions/docker-dryrun/action.yml @@ -0,0 +1,29 @@ +name: docker-dryrun +description: Composite action that performs the container build steps for a single platform. + +inputs: + platform: + description: "The target platform: linux/amd64, linux/arm64, linux/riscv64." + required: true + +runs: + using: composite + steps: + - uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0 + - uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0 + - name: Build regular image + uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0 + with: + context: . + platforms: ${{ inputs.platform }} + push: false + file: Dockerfile + cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootful + - name: Build rootless image + uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0 + with: + context: . + platforms: ${{ inputs.platform }} + push: false + file: Dockerfile.rootless + cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootless diff --git a/.github/workflows/cache-seeder.yml b/.github/workflows/cache-seeder.yml index d0801a1078..cd086fae17 100644 --- a/.github/workflows/cache-seeder.yml +++ b/.github/workflows/cache-seeder.yml @@ -27,11 +27,12 @@ concurrency: group: cache-seeder cancel-in-progress: true +permissions: + contents: read + jobs: gobuild: runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -49,15 +50,13 @@ jobs: lint: runs-on: ubuntu-latest - permissions: - contents: read strategy: fail-fast: false matrix: include: - { job: lint-backend, tags: "bindata sqlite sqlite_unlock_notify", target: "lint-backend" } - { job: lint-go-windows, tags: "bindata sqlite sqlite_unlock_notify", target: "lint-go-windows" } - - { job: lint-go-gogit, tags: "bindata sqlite sqlite_unlock_notify gogit", target: "lint-go" } + - { job: lint-go-gogit, tags: "bindata gogit sqlite sqlite_unlock_notify", target: "lint-go" } steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 diff --git a/.github/workflows/cron-renovate.yml b/.github/workflows/cron-renovate.yml index edeefc26ad..39bcf26ac9 100644 --- a/.github/workflows/cron-renovate.yml +++ b/.github/workflows/cron-renovate.yml @@ -11,13 +11,14 @@ concurrency: env: RENOVATE_VERSION: 43.141.5 # renovate: datasource=docker depName=ghcr.io/renovatebot/renovate +permissions: + contents: read + jobs: cron-renovate: runs-on: ubuntu-latest if: github.repository == 'go-gitea/gitea' # prevent running on forks timeout-minutes: 30 - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: renovatebot/github-action@83ec54fee49ab67d9cd201084c1ff325b4b462e4 # v46.1.10 diff --git a/.github/workflows/files-changed.yml b/.github/workflows/files-changed.yml index 5fd43e6cef..78915d81ea 100644 --- a/.github/workflows/files-changed.yml +++ b/.github/workflows/files-changed.yml @@ -15,6 +15,8 @@ on: value: ${{ jobs.detect.outputs.templates }} docker: value: ${{ jobs.detect.outputs.docker }} + dockerfile: + value: ${{ jobs.detect.outputs.dockerfile }} swagger: value: ${{ jobs.detect.outputs.swagger }} yaml: @@ -24,12 +26,13 @@ on: e2e: value: ${{ jobs.detect.outputs.e2e }} +permissions: + contents: read + jobs: detect: runs-on: ubuntu-latest timeout-minutes: 3 - permissions: - contents: read outputs: backend: ${{ steps.changes.outputs.backend }} frontend: ${{ steps.changes.outputs.frontend }} @@ -37,6 +40,7 @@ jobs: actions: ${{ steps.changes.outputs.actions }} templates: ${{ steps.changes.outputs.templates }} docker: ${{ steps.changes.outputs.docker }} + dockerfile: ${{ steps.changes.outputs.dockerfile }} swagger: ${{ steps.changes.outputs.swagger }} yaml: ${{ steps.changes.outputs.yaml }} json: ${{ steps.changes.outputs.json }} @@ -94,6 +98,10 @@ jobs: - "docker/**" - "Makefile" + dockerfile: + - "Dockerfile" + - "Dockerfile.rootless" + swagger: - "templates/swagger/v1_json.tmpl" - "templates/swagger/v1_input.json" diff --git a/.github/workflows/pull-compliance.yml b/.github/workflows/pull-compliance.yml index b057962a21..dc46c9fa42 100644 --- a/.github/workflows/pull-compliance.yml +++ b/.github/workflows/pull-compliance.yml @@ -7,18 +7,17 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: read + jobs: files-changed: uses: ./.github/workflows/files-changed.yml - permissions: - contents: read lint-backend: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -35,93 +34,40 @@ jobs: env: TAGS: bindata sqlite sqlite_unlock_notify - lint-templates: - if: needs.files-changed.outputs.templates == 'true' + lint-on-demand: needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 - - run: uv python install 3.14 - - uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 - - uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 - with: - node-version: 24 - cache: pnpm - cache-dependency-path: pnpm-lock.yaml - - run: make deps-py - - run: make deps-frontend - - run: make lint-templates - - lint-yaml: - if: needs.files-changed.outputs.yaml == 'true' - needs: files-changed - runs-on: ubuntu-latest - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 - - run: uv python install 3.14 - - run: make deps-py - - run: make lint-yaml - - lint-json: - if: needs.files-changed.outputs.json == 'true' - needs: files-changed - runs-on: ubuntu-latest - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 - - uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 - with: - node-version: 24 - cache: pnpm - cache-dependency-path: pnpm-lock.yaml - - run: make deps-frontend - - run: make lint-json - - lint-swagger: - if: needs.files-changed.outputs.swagger == 'true' - needs: files-changed - runs-on: ubuntu-latest - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 - - uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 - with: - node-version: 24 - cache: pnpm - cache-dependency-path: pnpm-lock.yaml - - run: make deps-frontend - - run: make lint-swagger - - lint-spell: - if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.templates == 'true' - needs: files-changed - runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 with: go-version-file: go.mod check-latest: true + cache: false + - uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 + - uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 + with: + node-version: 24 + cache: pnpm + cache-dependency-path: pnpm-lock.yaml + - run: make lint-spell + - if: needs.files-changed.outputs.templates == 'true' || needs.files-changed.outputs.yaml == 'true' + uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v8.1.0 + - if: needs.files-changed.outputs.templates == 'true' || needs.files-changed.outputs.yaml == 'true' + run: uv python install 3.14 && make deps-py lint-templates lint-yaml + + - if: needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.swagger == 'true' || needs.files-changed.outputs.json == 'true' + run: make deps-frontend lint-md lint-swagger lint-json + + - if: needs.files-changed.outputs.actions == 'true' + run: make lint-actions + lint-go-windows: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -144,8 +90,6 @@ jobs: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -166,8 +110,6 @@ jobs: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -186,8 +128,6 @@ jobs: if: needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 @@ -206,8 +146,6 @@ jobs: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -217,13 +155,12 @@ jobs: cache: false - uses: ./.github/actions/go-cache with: - cache-name: backend - # no frontend build here as backend should be able to build - # even without any frontend files - - run: make deps-backend - - run: go build -o gitea_no_gcc # test if build succeeds without the sqlite tag + cache-name: compliance-backend + - run: make deps-backend generate-go + # no frontend build here as backend should be able to build, even without any frontend files + # CGO is not used when cross-compile, so these steps also test if the code is compatible with CGO disabled - name: build-backend-arm64 - run: make backend # test cross compile + run: go build -o gitea_linux_arm64 env: GOOS: linux GOARCH: arm64 @@ -235,38 +172,7 @@ jobs: GOARCH: amd64 TAGS: bindata gogit - name: build-backend-386 - run: go build -o gitea_linux_386 # test if compatible with 32 bit + run: go build -o gitea_linux_386 env: GOOS: linux GOARCH: 386 - - docs: - if: needs.files-changed.outputs.docs == 'true' || needs.files-changed.outputs.actions == 'true' - needs: files-changed - runs-on: ubuntu-latest - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 - - uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0 - with: - node-version: 24 - cache: pnpm - cache-dependency-path: pnpm-lock.yaml - - run: make deps-frontend - - run: make lint-md - - actions: - if: needs.files-changed.outputs.actions == 'true' || needs.files-changed.outputs.actions == 'true' - needs: files-changed - runs-on: ubuntu-latest - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 - with: - go-version-file: go.mod - check-latest: true - - run: make lint-actions diff --git a/.github/workflows/pull-db-tests.yml b/.github/workflows/pull-db-tests.yml index d49fc33dad..c2293ac853 100644 --- a/.github/workflows/pull-db-tests.yml +++ b/.github/workflows/pull-db-tests.yml @@ -7,18 +7,17 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: read + jobs: files-changed: uses: ./.github/workflows/files-changed.yml - permissions: - contents: read test-pgsql: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read services: pgsql: image: postgres:14 @@ -58,21 +57,18 @@ jobs: env: TAGS: bindata - name: run migration tests - run: make test-pgsql-migration + run: GITEA_TEST_DATABASE=pgsql make test-migration - name: run tests - run: make test-pgsql + run: GITEA_TEST_DATABASE=pgsql make test-integration timeout-minutes: 50 env: TAGS: bindata gogit - TEST_TAGS: gogit TEST_LDAP: 1 test-sqlite: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -84,30 +80,32 @@ jobs: with: cache-name: sqlite - run: make deps-backend - - run: GOEXPERIMENT='' make backend + - run: make backend env: TAGS: bindata gogit sqlite sqlite_unlock_notify + GOEXPERIMENT: - name: run migration tests - run: make test-sqlite-migration + run: GITEA_TEST_DATABASE=sqlite make test-migration + env: + TAGS: bindata gogit - name: run tests - run: GOEXPERIMENT='' make test-sqlite + run: GITEA_TEST_DATABASE=sqlite make test-integration timeout-minutes: 50 env: - TAGS: bindata gogit sqlite sqlite_unlock_notify - RACE_ENABLED: true - TEST_TAGS: gogit sqlite sqlite_unlock_notify + TAGS: bindata gogit + GOEXPERIMENT: + GOTEST_FLAGS: -race -timeout=40m test-unit: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read services: elasticsearch: - image: elasticsearch:7.5.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.19.14 env: discovery.type: single-node + xpack.security.enabled: false ports: - "9200:9200" meilisearch: @@ -156,24 +154,21 @@ jobs: - name: unit-tests run: make test-backend test-check env: - TAGS: bindata - RACE_ENABLED: true - GOTESTFLAGS: -timeout=20m + TAGS: bindata sqlite sqlite_unlock_notify + GOTEST_FLAGS: -race -timeout=20m GITHUB_READ_TOKEN: ${{ secrets.GITHUB_READ_TOKEN }} - name: unit-tests-gogit - run: GOEXPERIMENT='' make test-backend test-check + run: make test-backend test-check env: - TAGS: bindata gogit - RACE_ENABLED: true - GOTESTFLAGS: -timeout=20m + TAGS: bindata gogit sqlite sqlite_unlock_notify + GOEXPERIMENT: + GOTEST_FLAGS: -race -timeout=20m GITHUB_READ_TOKEN: ${{ secrets.GITHUB_READ_TOKEN }} test-mysql: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read services: mysql: # the bitnami mysql image has more options than the official one, it's easier to customize @@ -186,9 +181,10 @@ jobs: options: >- --mount type=tmpfs,destination=/bitnami/mysql/data elasticsearch: - image: elasticsearch:7.5.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.19.14 env: discovery.type: single-node + xpack.security.enabled: false ports: - "9200:9200" smtpimap: @@ -215,10 +211,9 @@ jobs: env: TAGS: bindata - name: run migration tests - run: make test-mysql-migration + run: GITEA_TEST_DATABASE=mysql make test-migration - name: run tests - # run: make integration-test-coverage (at the moment, no coverage is really handled) - run: make test-mysql + run: GITEA_TEST_DATABASE=mysql make test-integration env: TAGS: bindata TEST_INDEXER_CODE_ES_URL: "http://elastic:changeme@elasticsearch:9200" @@ -227,8 +222,6 @@ jobs: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.actions == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read services: mssql: image: mcr.microsoft.com/mssql/server:2019-latest @@ -258,9 +251,9 @@ jobs: - run: make backend env: TAGS: bindata - - run: make test-mssql-migration + - run: GITEA_TEST_DATABASE=mssql make test-migration - name: run tests - run: make test-mssql + run: GITEA_TEST_DATABASE=mssql make test-integration timeout-minutes: 50 env: TAGS: bindata diff --git a/.github/workflows/pull-docker-dryrun.yml b/.github/workflows/pull-docker-dryrun.yml index e0c0fff815..43a4f48669 100644 --- a/.github/workflows/pull-docker-dryrun.yml +++ b/.github/workflows/pull-docker-dryrun.yml @@ -7,34 +7,41 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: read + jobs: files-changed: uses: ./.github/workflows/files-changed.yml - permissions: - contents: read - container: + # QEMU-based build is slow (40-50 minutes), so run arm64 and riscv64 when dockerfile changes. + # Run amd64 when any docker-related files change, which is fast (4 minutes). + container-amd64: if: needs.files-changed.outputs.docker == 'true' - needs: files-changed + needs: [files-changed] runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0 - - uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0 - - name: Build regular container image - uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0 + - uses: ./.github/actions/docker-dryrun with: - context: . - platforms: linux/amd64,linux/arm64,linux/riscv64 - push: false - cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootful - - name: Build rootless container image - uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0 + platform: linux/amd64 + + container-arm64: + if: needs.files-changed.outputs.dockerfile == 'true' + needs: [files-changed] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/docker-dryrun with: - context: . - push: false - platforms: linux/amd64,linux/arm64,linux/riscv64 - file: Dockerfile.rootless - cache-from: type=registry,ref=ghcr.io/go-gitea/gitea:buildcache-rootless + platform: linux/arm64 + + container-riscv64: + if: needs.files-changed.outputs.dockerfile == 'true' + needs: [files-changed] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/docker-dryrun + with: + platform: linux/riscv64 diff --git a/.github/workflows/pull-e2e-tests.yml b/.github/workflows/pull-e2e-tests.yml index afa9587022..974d82ea5e 100644 --- a/.github/workflows/pull-e2e-tests.yml +++ b/.github/workflows/pull-e2e-tests.yml @@ -7,18 +7,17 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: read + jobs: files-changed: uses: ./.github/workflows/files-changed.yml - permissions: - contents: read test-e2e: if: needs.files-changed.outputs.backend == 'true' || needs.files-changed.outputs.frontend == 'true' || needs.files-changed.outputs.e2e == 'true' needs: files-changed runs-on: ubuntu-latest - permissions: - contents: read steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0 @@ -39,10 +38,13 @@ jobs: - run: make deps-frontend - run: make frontend - run: make deps-backend - - run: make gitea-e2e + - run: make backend + env: + TAGS: bindata sqlite sqlite_unlock_notify - run: make playwright - run: make test-e2e timeout-minutes: 10 env: + TAGS: bindata sqlite sqlite_unlock_notify FORCE_COLOR: 1 GITEA_TEST_E2E_DEBUG: 1 diff --git a/.gitignore b/.gitignore index 019ee94c7a..76a7578646 100644 --- a/.gitignore +++ b/.gitignore @@ -55,10 +55,7 @@ cpu.out *.log.*.gz /gitea -/gitea-e2e -/gitea-vet /debug -/integrations.test /bin /dist diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3f0c548dcb..f1871f1470 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -139,11 +139,11 @@ Here's how to run the test suite: - run tests (we suggest running them on Linux) -| Command | Action | | -| :------------------------------------------ | :------------------------------------------------------- | ------------------------------------------- | -|``make test[\#SpecificTestName]`` | run unit test(s) | | -|``make test-sqlite[\#SpecificTestName]`` | run [integration](tests/integration) test(s) for SQLite | [More details](tests/integration/README.md) | -|``make test-e2e`` | run [end-to-end](tests/e2e) test(s) using Playwright | | +| Command | Action | | +|:----------------------------------------------|:-----------------------------------------------------| ------------------------------------------- | +| ``make test-backend[\#SpecificTestName]`` | run unit test(s) | | +| ``make test-integration[\#SpecificTestName]`` | run [integration](tests/integration) test(s) | [More details](tests/integration/README.md) | +| ``make test-e2e`` | run [end-to-end](tests/e2e) test(s) using Playwright | | - E2E test environment variables diff --git a/Makefile b/Makefile index ae053a8368..ad7739c07b 100644 --- a/Makefile +++ b/Makefile @@ -7,10 +7,9 @@ export GOEXPERIMENT ?= jsonv2 GO ?= go SHASUM ?= shasum -a 256 -HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes) COMMA := , -XGO_VERSION := go-1.25.x +XGO_VERSION := go-1.26.x AIR_PACKAGE ?= github.com/air-verse/air@v1 # renovate: datasource=go EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3 # renovate: datasource=go @@ -22,15 +21,28 @@ XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1 # renovate: datasource=go ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1.7.11 # renovate: datasource=go -DOCKER_IMAGE ?= gitea/gitea -DOCKER_TAG ?= latest -DOCKER_REF := $(DOCKER_IMAGE):$(DOCKER_TAG) - +HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes) ifeq ($(HAS_GO), yes) CGO_EXTRA_CFLAGS := -DSQLITE_MAX_VARIABLE_NUMBER=32766 CGO_CFLAGS ?= $(shell $(GO) env CGO_CFLAGS) $(CGO_EXTRA_CFLAGS) endif +MAKE_EVIDENCE_DIR := .make_evidence + +# Use sqlite as default database if running tests, only do so for local tests, not in CI. +# CI should explicitly set the database to avoid unexpected results. +ifneq ($(findstring test-,$(MAKECMDGOALS)),) + ifeq ($(CI),) + GITEA_TEST_DATABASE ?= sqlite + endif +endif + +TAGS ?= +ifeq ($(GITEA_TEST_DATABASE),sqlite) + TAGS += sqlite sqlite_unlock_notify +endif +TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags + CGO_ENABLED ?= 0 ifneq (,$(findstring sqlite,$(TAGS))$(findstring pam,$(TAGS))) CGO_ENABLED = 1 @@ -49,15 +61,16 @@ else ifeq ($(patsubst Windows%,Windows,$(OS)),Windows) IS_WINDOWS := yes endif endif + +# GOFLAGS and EXTRA_GOFLAGS are for the 'go build' command only ifeq ($(IS_WINDOWS),yes) GOFLAGS := -v -buildmode=exe EXECUTABLE ?= gitea.exe - EXECUTABLE_E2E ?= gitea-e2e.exe else GOFLAGS := -v EXECUTABLE ?= gitea - EXECUTABLE_E2E ?= gitea-e2e endif +EXTRA_GOFLAGS ?= ifeq ($(shell sed --version 2>/dev/null | grep -q GNU && echo gnu),gnu) SED_INPLACE := sed -i @@ -65,15 +78,8 @@ else SED_INPLACE := sed -i '' endif -EXTRA_GOFLAGS ?= - -MAKE_EVIDENCE_DIR := .make_evidence - -GOTESTFLAGS ?= -ifeq ($(RACE_ENABLED),true) - GOFLAGS += -race - GOTESTFLAGS += -race -endif +# GOTEST_FLAGS is for unit test and integration test +GOTEST_FLAGS ?= -timeout 40m STORED_VERSION_FILE := VERSION @@ -126,12 +132,6 @@ AIR_TMP_DIR := .air GO_LICENSE_FILE := assets/go-licenses.json -TAGS ?= -TAGS_SPLIT := $(subst $(COMMA), ,$(TAGS)) -TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags - -TEST_TAGS ?= $(TAGS_SPLIT) sqlite sqlite_unlock_notify - TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(DIST) $(MAKE_EVIDENCE_DIR) $(AIR_TMP_DIR) GO_DIRS := build cmd models modules routers services tests tools @@ -151,6 +151,7 @@ ESLINT_CONCURRENCY ?= 2 SWAGGER_SPEC := templates/swagger/v1_json.tmpl SWAGGER_SPEC_INPUT := templates/swagger/v1_input.json SWAGGER_EXCLUDE := code.gitea.io/sdk +OPENAPI3_SPEC := templates/swagger/v1_openapi3_json.tmpl TEST_MYSQL_HOST ?= mysql:3306 TEST_MYSQL_DBNAME ?= testgitea @@ -163,13 +164,19 @@ TEST_PGSQL_PASSWORD ?= postgres TEST_PGSQL_SCHEMA ?= gtestschema TEST_MINIO_ENDPOINT ?= minio:9000 TEST_MSSQL_HOST ?= mssql:1433 -TEST_MSSQL_DBNAME ?= gitea +TEST_MSSQL_DBNAME ?= testgitea TEST_MSSQL_USERNAME ?= sa TEST_MSSQL_PASSWORD ?= MwantsaSecurePassword1 # Include local Makefile # Makefile.local is listed in .gitignore -sinclude Makefile.local +ifneq ("$(wildcard Makefile.local)","") + include Makefile.local +endif + +$(foreach v, $(filter TEST_%, $(.VARIABLES)), $(eval MAKEFILE_VARS+=$v=$($v))) +$(foreach v, $(filter GITEA_TEST_%, $(.VARIABLES)), $(eval MAKEFILE_VARS+=$v=$($v))) +export MAKEFILE_VARS .PHONY: all all: build @@ -178,15 +185,8 @@ all: build help: Makefile ## print Makefile help information. @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m[TARGETS] default target: build\033[0m\n\n\033[35mTargets:\033[0m\n"} /^[0-9A-Za-z._-]+:.*?##/ { printf " \033[36m%-45s\033[0m %s\n", $$1, $$2 }' Makefile #$(MAKEFILE_LIST) @printf " \033[36m%-46s\033[0m %s\n" "test-e2e" "test end to end using playwright" - @printf " \033[36m%-46s\033[0m %s\n" "test[#TestSpecificName]" "run unit test" - @printf " \033[36m%-46s\033[0m %s\n" "test-sqlite[#TestSpecificName]" "run integration test for sqlite" - -.PHONY: git-check -git-check: - @if git lfs >/dev/null 2>&1 ; then : ; else \ - echo "Gitea requires git with lfs support to run tests." ; \ - exit 1; \ - fi + @printf " \033[36m%-46s\033[0m %s\n" "test-backend[#TestSpecificName]" "run unit test (sqlite only)" + @printf " \033[36m%-46s\033[0m %s\n" "test-integration[#TestSpecificName]" "run integration test for GITEA_TEST_DATABASE (sqlite, mysql, pgsql, mssql)" .PHONY: clean-all clean-all: clean ## delete backend, frontend and integration files @@ -194,14 +194,8 @@ clean-all: clean ## delete backend, frontend and integration files .PHONY: clean clean: ## delete backend and integration files - rm -rf $(EXECUTABLE) $(EXECUTABLE_E2E) $(DIST) $(BINDATA_DEST_WILDCARD) \ - integrations*.test \ - tests/integration/gitea-integration-* \ - tests/integration/indexers-* \ - tests/sqlite.ini tests/mysql.ini tests/pgsql.ini tests/mssql.ini man/ \ - tests/e2e/gitea-e2e-*/ \ - tests/e2e/indexers-*/ \ - tests/e2e/reports/ tests/e2e/test-artifacts/ tests/e2e/test-snapshots/ + rm -f $(EXECUTABLE) test-*.test tests/*.ini + rm -rf $(DIST) $(BINDATA_DEST_WILDCARD) man tests/integration/gitea-integration-* .PHONY: fmt fmt: ## format the Go and template code @@ -233,7 +227,7 @@ TAGS_PREREQ := $(TAGS_EVIDENCE) endif .PHONY: generate-swagger -generate-swagger: $(SWAGGER_SPEC) ## generate the swagger spec from code comments +generate-swagger: $(SWAGGER_SPEC) $(OPENAPI3_SPEC) ## generate the swagger spec from code comments $(SWAGGER_SPEC): $(GO_SOURCES) $(SWAGGER_SPEC_INPUT) $(GO) run $(SWAGGER_PACKAGE) generate spec --exclude "$(SWAGGER_EXCLUDE)" --input "$(SWAGGER_SPEC_INPUT)" --output './$(SWAGGER_SPEC)' @@ -255,6 +249,21 @@ swagger-validate: ## check if the swagger spec is valid $(GO) run $(SWAGGER_PACKAGE) validate './$(SWAGGER_SPEC)' @$(SED_INPLACE) -E -e 's|"basePath":( *)"/(.*)"|"basePath":\1"\2"|g' './$(SWAGGER_SPEC)' # remove the prefix slash from basePath +.PHONY: generate-openapi3 +generate-openapi3: $(OPENAPI3_SPEC) ## generate the OpenAPI 3.0 spec from the Swagger 2.0 spec + +$(OPENAPI3_SPEC): $(SWAGGER_SPEC) build/generate-openapi.go $(wildcard build/openapi3gen/*.go) + $(GO) run build/generate-openapi.go + +.PHONY: openapi3-check +openapi3-check: generate-openapi3 + @diff=$$(git diff --color=always '$(OPENAPI3_SPEC)'); \ + if [ -n "$$diff" ]; then \ + echo "Please run 'make generate-openapi3' and commit the result:"; \ + printf "%s" "$${diff}"; \ + exit 1; \ + fi + .PHONY: checks checks: checks-frontend checks-backend ## run various consistency checks @@ -262,10 +271,10 @@ checks: checks-frontend checks-backend ## run various consistency checks checks-frontend: lockfile-check svg-check ## check frontend files .PHONY: checks-backend -checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check ## check backend files +checks-backend: tidy-check swagger-check openapi3-check fmt-check swagger-validate security-check ## check backend files .PHONY: lint -lint: lint-frontend lint-backend lint-spell ## lint everything +lint: lint-frontend lint-backend lint-templates lint-swagger lint-spell lint-md lint-actions lint-json lint-yaml ## lint everything .PHONY: lint-fix lint-fix: lint-frontend-fix lint-backend-fix lint-spell-fix ## lint everything and fix issues @@ -373,13 +382,10 @@ watch-frontend: node_modules ## start vite dev server for frontend watch-backend: ## watch backend files and continuously rebuild GITEA_RUN_MODE=dev $(GO) run $(AIR_PACKAGE) -c .air.toml -.PHONY: test -test: test-frontend test-backend ## test everything - .PHONY: test-backend test-backend: ## test backend files - @echo "Running go test with $(GOTESTFLAGS) -tags '$(TEST_TAGS)'..." - @$(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(GO_TEST_PACKAGES) + @echo "Running go test with $(GOTEST_FLAGS) -tags '$(TAGS)'..." + @$(GO) test $(GOTEST_FLAGS) -tags='$(TAGS)' $(GO_TEST_PACKAGES) .PHONY: test-frontend test-frontend: node_modules ## test frontend files @@ -397,10 +403,10 @@ test-check: exit 1; \ fi -.PHONY: test\#% -test\#%: - @echo "Running go test with -tags '$(TEST_TAGS)'..." - @$(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -run $(subst .,/,$*) $(GO_TEST_PACKAGES) +.PHONY: test-backend\#% +test-backend\#%: + @echo "Running go test with -tags '$(TAGS)'..." + @$(GO) test $(GOTEST_FLAGS) -tags='$(TAGS)' -run $(subst .,/,$*) $(GO_TEST_PACKAGES) .PHONY: coverage coverage: @@ -410,8 +416,8 @@ coverage: .PHONY: unit-test-coverage unit-test-coverage: - @echo "Running unit-test-coverage $(GOTESTFLAGS) -tags '$(TEST_TAGS)'..." - @$(GO) test $(GOTESTFLAGS) -timeout=20m -tags='$(TEST_TAGS)' -cover -coverprofile coverage.out $(GO_TEST_PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1 + @echo "Running unit-test-coverage $(GOTEST_FLAGS) -tags '$(TAGS)'..." + @$(GO) test $(GOTEST_FLAGS) -tags='$(TAGS)' -cover -coverprofile coverage.out $(GO_TEST_PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1 .PHONY: tidy tidy: ## run go mod tidy @@ -438,83 +444,33 @@ go-licenses: $(GO_LICENSE_FILE) ## regenerate go licenses $(GO_LICENSE_FILE): go.mod go.sum GO=$(GO) $(GO) run build/generate-go-licenses.go $(GO_LICENSE_FILE) -generate-ini-sqlite: - sed -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-sqlite|g' \ - -e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \ - tests/sqlite.ini.tmpl > tests/sqlite.ini +.PHONY: test-integration +test-integration: + @# Use a compiled binary: testlogger forwards gitea logs to t.Log, so `go test -v` + @# would flood output per passing test. testcache can't help these tests anyway — + @# they mutate the work directory, so cache inputs change between runs. + $(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' -c code.gitea.io/gitea/tests/integration -o ./test-integration-$(GITEA_TEST_DATABASE).test + ./test-integration-$(GITEA_TEST_DATABASE).test -.PHONY: test-sqlite -test-sqlite: integrations.sqlite.test generate-ini-sqlite - GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test +.PHONY: test-integration\#% +test-integration\#%: + $(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' -run $(subst .,/,$*) code.gitea.io/gitea/tests/integration -.PHONY: test-sqlite\#% -test-sqlite\#%: integrations.sqlite.test generate-ini-sqlite - GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.run $(subst .,/,$*) +.PHONY: test-migration +test-migration: migrations.integration.test migrations.individual.test -.PHONY: test-sqlite-migration -test-sqlite-migration: migrations.sqlite.test migrations.individual.sqlite.test +.PHONY: migrations.integration.test +migrations.integration.test: + $(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' code.gitea.io/gitea/tests/integration/migration-test -generate-ini-mysql: - sed -e 's|{{TEST_MYSQL_HOST}}|${TEST_MYSQL_HOST}|g' \ - -e 's|{{TEST_MYSQL_DBNAME}}|${TEST_MYSQL_DBNAME}|g' \ - -e 's|{{TEST_MYSQL_USERNAME}}|${TEST_MYSQL_USERNAME}|g' \ - -e 's|{{TEST_MYSQL_PASSWORD}}|${TEST_MYSQL_PASSWORD}|g' \ - -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-mysql|g' \ - -e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \ - tests/mysql.ini.tmpl > tests/mysql.ini +.PHONY: migrations.individual.test +migrations.individual.test: + @# tests of multiple packages use the same database, don't run in parallel + $(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) -.PHONY: test-mysql -test-mysql: integrations.mysql.test generate-ini-mysql - GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test - -.PHONY: test-mysql\#% -test-mysql\#%: integrations.mysql.test generate-ini-mysql - GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test -test.run $(subst .,/,$*) - -.PHONY: test-mysql-migration -test-mysql-migration: migrations.mysql.test migrations.individual.mysql.test - -generate-ini-pgsql: - sed -e 's|{{TEST_PGSQL_HOST}}|${TEST_PGSQL_HOST}|g' \ - -e 's|{{TEST_PGSQL_DBNAME}}|${TEST_PGSQL_DBNAME}|g' \ - -e 's|{{TEST_PGSQL_USERNAME}}|${TEST_PGSQL_USERNAME}|g' \ - -e 's|{{TEST_PGSQL_PASSWORD}}|${TEST_PGSQL_PASSWORD}|g' \ - -e 's|{{TEST_PGSQL_SCHEMA}}|${TEST_PGSQL_SCHEMA}|g' \ - -e 's|{{TEST_MINIO_ENDPOINT}}|${TEST_MINIO_ENDPOINT}|g' \ - -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-pgsql|g' \ - -e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \ - tests/pgsql.ini.tmpl > tests/pgsql.ini - -.PHONY: test-pgsql -test-pgsql: integrations.pgsql.test generate-ini-pgsql - GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test - -.PHONY: test-pgsql\#% -test-pgsql\#%: integrations.pgsql.test generate-ini-pgsql - GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.run $(subst .,/,$*) - -.PHONY: test-pgsql-migration -test-pgsql-migration: migrations.pgsql.test migrations.individual.pgsql.test - -generate-ini-mssql: - sed -e 's|{{TEST_MSSQL_HOST}}|${TEST_MSSQL_HOST}|g' \ - -e 's|{{TEST_MSSQL_DBNAME}}|${TEST_MSSQL_DBNAME}|g' \ - -e 's|{{TEST_MSSQL_USERNAME}}|${TEST_MSSQL_USERNAME}|g' \ - -e 's|{{TEST_MSSQL_PASSWORD}}|${TEST_MSSQL_PASSWORD}|g' \ - -e 's|{{WORK_PATH}}|$(CURDIR)/tests/$(or $(TEST_TYPE),integration)/gitea-$(or $(TEST_TYPE),integration)-mssql|g' \ - -e 's|{{TEST_LOGGER}}|$(or $(TEST_LOGGER),test$(COMMA)file)|g' \ - tests/mssql.ini.tmpl > tests/mssql.ini - -.PHONY: test-mssql -test-mssql: integrations.mssql.test generate-ini-mssql - GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test - -.PHONY: test-mssql\#% -test-mssql\#%: integrations.mssql.test generate-ini-mssql - GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test -test.run $(subst .,/,$*) - -.PHONY: test-mssql-migration -test-mssql-migration: migrations.mssql.test migrations.individual.mssql.test +.PHONY: migrations.individual.test\#% +migrations.individual.test\#%: + $(GO) test $(GOTEST_FLAGS) -tags '$(TAGS)' code.gitea.io/gitea/models/migrations/$* .PHONY: playwright playwright: deps-frontend @@ -522,109 +478,8 @@ playwright: deps-frontend @pnpm exec playwright install $(if $(GITHUB_ACTIONS),,--with-deps) chromium firefox $(PLAYWRIGHT_FLAGS) .PHONY: test-e2e -test-e2e: playwright $(EXECUTABLE_E2E) - @EXECUTABLE=$(EXECUTABLE_E2E) ./tools/test-e2e.sh $(GITEA_TEST_E2E_FLAGS) - -.PHONY: bench-sqlite -bench-sqlite: integrations.sqlite.test generate-ini-sqlite - GITEA_TEST_CONF=tests/sqlite.ini ./integrations.sqlite.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench . - -.PHONY: bench-mysql -bench-mysql: integrations.mysql.test generate-ini-mysql - GITEA_TEST_CONF=tests/mysql.ini ./integrations.mysql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench . - -.PHONY: bench-mssql -bench-mssql: integrations.mssql.test generate-ini-mssql - GITEA_TEST_CONF=tests/mssql.ini ./integrations.mssql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench . - -.PHONY: bench-pgsql -bench-pgsql: integrations.pgsql.test generate-ini-pgsql - GITEA_TEST_CONF=tests/pgsql.ini ./integrations.pgsql.test -test.cpuprofile=cpu.out -test.run DontRunTests -test.bench . - -.PHONY: integration-test-coverage -integration-test-coverage: integrations.cover.test generate-ini-mysql - GITEA_TEST_CONF=tests/mysql.ini ./integrations.cover.test -test.coverprofile=integration.coverage.out - -.PHONY: integration-test-coverage-sqlite -integration-test-coverage-sqlite: integrations.cover.sqlite.test generate-ini-sqlite - GITEA_TEST_CONF=tests/sqlite.ini ./integrations.cover.sqlite.test -test.coverprofile=integration.coverage.out - -integrations.mysql.test: git-check $(GO_SOURCES) - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.mysql.test - -integrations.pgsql.test: git-check $(GO_SOURCES) - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.pgsql.test - -integrations.mssql.test: git-check $(GO_SOURCES) - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.mssql.test - -integrations.sqlite.test: git-check $(GO_SOURCES) - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -o integrations.sqlite.test -tags '$(TEST_TAGS)' - -integrations.cover.test: git-check $(GO_SOURCES) - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -coverpkg $(shell echo $(GO_TEST_PACKAGES) | tr ' ' ',') -o integrations.cover.test - -integrations.cover.sqlite.test: git-check $(GO_SOURCES) - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration -coverpkg $(shell echo $(GO_TEST_PACKAGES) | tr ' ' ',') -o integrations.cover.sqlite.test -tags '$(TEST_TAGS)' - -.PHONY: migrations.mysql.test -migrations.mysql.test: $(GO_SOURCES) generate-ini-mysql - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.mysql.test - GITEA_TEST_CONF=tests/mysql.ini ./migrations.mysql.test - -.PHONY: migrations.pgsql.test -migrations.pgsql.test: $(GO_SOURCES) generate-ini-pgsql - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.pgsql.test - GITEA_TEST_CONF=tests/pgsql.ini ./migrations.pgsql.test - -.PHONY: migrations.mssql.test -migrations.mssql.test: $(GO_SOURCES) generate-ini-mssql - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.mssql.test - GITEA_TEST_CONF=tests/mssql.ini ./migrations.mssql.test - -.PHONY: migrations.sqlite.test -migrations.sqlite.test: $(GO_SOURCES) generate-ini-sqlite - $(GO) test $(GOTESTFLAGS) -c code.gitea.io/gitea/tests/integration/migration-test -o migrations.sqlite.test -tags '$(TEST_TAGS)' - GITEA_TEST_CONF=tests/sqlite.ini ./migrations.sqlite.test - -.PHONY: migrations.individual.mysql.test -migrations.individual.mysql.test: $(GO_SOURCES) generate-ini-mysql - GITEA_TEST_CONF=tests/mysql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) - -.PHONY: migrations.individual.sqlite.test\#% -migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite - GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$* - -.PHONY: migrations.individual.pgsql.test -migrations.individual.pgsql.test: $(GO_SOURCES) generate-ini-pgsql - GITEA_TEST_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) - -.PHONY: migrations.individual.pgsql.test\#% -migrations.individual.pgsql.test\#%: $(GO_SOURCES) generate-ini-pgsql - GITEA_TEST_CONF=tests/pgsql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$* - -.PHONY: migrations.individual.mssql.test -migrations.individual.mssql.test: $(GO_SOURCES) generate-ini-mssql - GITEA_TEST_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) - -.PHONY: migrations.individual.mssql.test\#% -migrations.individual.mssql.test\#%: $(GO_SOURCES) generate-ini-mssql - GITEA_TEST_CONF=tests/mssql.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$* - -.PHONY: migrations.individual.sqlite.test -migrations.individual.sqlite.test: $(GO_SOURCES) generate-ini-sqlite - GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' -p 1 $(MIGRATE_TEST_PACKAGES) - -.PHONY: migrations.individual.sqlite.test\#% -migrations.individual.sqlite.test\#%: $(GO_SOURCES) generate-ini-sqlite - GITEA_TEST_CONF=tests/sqlite.ini $(GO) test $(GOTESTFLAGS) -tags '$(TEST_TAGS)' code.gitea.io/gitea/models/migrations/$* - -.PHONY: check -check: test - -.PHONY: install $(TAGS_PREREQ) -install: $(wildcard *.go) - CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' +test-e2e: playwright frontend backend + @EXECUTABLE=$(EXECUTABLE) ./tools/test-e2e.sh $(GITEA_TEST_E2E_FLAGS) .PHONY: build build: frontend backend ## build everything @@ -657,9 +512,6 @@ ifneq ($(and $(STATIC),$(findstring pam,$(TAGS))),) endif CGO_ENABLED="$(CGO_ENABLED)" CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@ -$(EXECUTABLE_E2E): $(GO_SOURCES) $(FRONTEND_DEST) - CGO_ENABLED=1 $(GO) build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TEST_TAGS)' -ldflags '-s -w $(EXTLDFLAGS) $(LDFLAGS)' -o $@ - .PHONY: release release: frontend generate release-windows release-linux release-darwin release-freebsd release-copy release-compress vendor release-sources release-check @@ -817,11 +669,6 @@ generate-manpage: ## generate manpage @gzip -9 man/man1/gitea.1 && echo man/man1/gitea.1.gz created @#TODO A small script that formats config-cheat-sheet.en-us.md nicely for use as a config man page -.PHONY: docker -docker: - docker build --disable-content-trust=false -t $(DOCKER_REF) . -# support also build args docker build --build-arg GITEA_VERSION=v1.2.3 --build-arg TAGS="bindata sqlite sqlite_unlock_notify" . - # Disable parallel execution because it would break some targets that don't # specify exact dependencies like 'backend' which does currently not depend # on 'frontend' to enable Node.js-less builds from source tarballs. diff --git a/assets/go-licenses.json b/assets/go-licenses.json index 61bf76702c..e642cb2c59 100644 --- a/assets/go-licenses.json +++ b/assets/go-licenses.json @@ -489,6 +489,11 @@ "path": "github.com/fxamacker/cbor/v2/LICENSE", "licenseText": "MIT License\n\nCopyright (c) 2019-present Faye Amacker\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE." }, + { + "name": "github.com/getkin/kin-openapi", + "path": "github.com/getkin/kin-openapi/LICENSE", + "licenseText": "MIT License\n\nCopyright (c) 2017-2018 the project authors.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n" + }, { "name": "github.com/git-lfs/pktline", "path": "github.com/git-lfs/pktline/LICENSE.md", @@ -554,6 +559,16 @@ "path": "github.com/go-ldap/ldap/v3/LICENSE", "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2011-2015 Michael Mitton (mmitton@gmail.com)\nPortions copyright (c) 2015-2024 go-ldap Authors\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n" }, + { + "name": "github.com/go-openapi/jsonpointer", + "path": "github.com/go-openapi/jsonpointer/LICENSE", + "licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n" + }, + { + "name": "github.com/go-openapi/swag", + "path": "github.com/go-openapi/swag/LICENSE", + "licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n" + }, { "name": "github.com/go-redsync/redsync/v4", "path": "github.com/go-redsync/redsync/v4/LICENSE", @@ -929,6 +944,11 @@ "path": "github.com/modern-go/reflect2/LICENSE", "licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n" }, + { + "name": "github.com/mohae/deepcopy", + "path": "github.com/mohae/deepcopy/LICENSE", + "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2014 Joel\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n" + }, { "name": "github.com/mschoch/smat", "path": "github.com/mschoch/smat/LICENSE", @@ -954,6 +974,16 @@ "path": "github.com/nwaples/rardecode/v2/LICENSE", "licenseText": "Copyright (c) 2015, Nicholas Waples\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" }, + { + "name": "github.com/oasdiff/yaml", + "path": "github.com/oasdiff/yaml/LICENSE", + "licenseText": "The MIT License (MIT)\n\nCopyright (c) 2014 Sam Ghods\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n\nCopyright (c) 2012 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" + }, + { + "name": "github.com/oasdiff/yaml3", + "path": "github.com/oasdiff/yaml3/LICENSE", + "licenseText": "\nThis project is covered by two different licenses: MIT and Apache.\n\n#### MIT License ####\n\nThe following files were ported to Go from C files of libyaml, and thus\nare still covered by their original MIT license, with the additional\ncopyright staring in 2011 when the project was ported over:\n\n apic.go emitterc.go parserc.go readerc.go scannerc.go\n writerc.go yamlh.go yamlprivateh.go\n\nCopyright (c) 2006-2010 Kirill Simonov\nCopyright (c) 2006-2011 Kirill Simonov\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n### Apache License ###\n\nAll the remaining project files are covered by the Apache license:\n\nCopyright (c) 2011-2019 Canonical Ltd\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n" + }, { "name": "github.com/olekukonko/cat", "path": "github.com/olekukonko/cat/LICENSE", @@ -974,16 +1004,6 @@ "path": "github.com/olekukonko/tablewriter/LICENSE.md", "licenseText": "Copyright (C) 2014 by Oleku Konko\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE.\n" }, - { - "name": "github.com/olivere/elastic/v7", - "path": "github.com/olivere/elastic/v7/LICENSE", - "licenseText": "The MIT License (MIT)\nCopyright © 2012-2015 Oliver Eilhard\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the “Software”), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included\nin all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\nIN THE SOFTWARE.\n" - }, - { - "name": "github.com/olivere/elastic/v7/uritemplates", - "path": "github.com/olivere/elastic/v7/uritemplates/LICENSE", - "licenseText": "Copyright (c) 2013 Joshua Tacoma\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\nthe Software, and to permit persons to whom the Software is furnished to do so,\nsubject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n" - }, { "name": "github.com/opencontainers/go-digest", "path": "github.com/opencontainers/go-digest/LICENSE", @@ -994,6 +1014,11 @@ "path": "github.com/opencontainers/image-spec/LICENSE", "licenseText": "\n Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n Copyright 2016 The Linux Foundation.\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n" }, + { + "name": "github.com/perimeterx/marshmallow", + "path": "github.com/perimeterx/marshmallow/LICENSE", + "licenseText": "MIT License\n\nCopyright (c) 2022 PerimeterX\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n" + }, { "name": "github.com/philhofer/fwd", "path": "github.com/philhofer/fwd/LICENSE.md", @@ -1169,6 +1194,11 @@ "path": "github.com/wneessen/go-mail/smtp/LICENSE", "licenseText": "Copyright (c) 2009 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." }, + { + "name": "github.com/woodsbury/decimal128", + "path": "github.com/woodsbury/decimal128/LICENCE", + "licenseText": "BSD Zero Clause License\n\nCopyright (c) 2022 Wade Smith\n\nPermission to use, copy, modify, and/or distribute this software for any\npurpose with or without fee is hereby granted.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\nPERFORMANCE OF THIS SOFTWARE.\n" + }, { "name": "github.com/x448/float16", "path": "github.com/x448/float16/LICENSE", diff --git a/build/generate-openapi.go b/build/generate-openapi.go new file mode 100644 index 0000000000..7b37a5bbba --- /dev/null +++ b/build/generate-openapi.go @@ -0,0 +1,97 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +// generate-openapi converts Gitea's Swagger 2.0 spec into an OpenAPI 3.0 spec. +// +// Gitea generates a Swagger 2.0 spec from code annotations (make generate-swagger). +// This tool converts it to OAS3 so that SDK generators and tools that require +// OAS3 (e.g. progenitor for Rust) can consume it directly. The conversion also +// deduplicates inline enum definitions into named schema components, producing +// cleaner SDK output with proper enum types instead of anonymous strings. +// +// Run: go run build/generate-openapi.go +// Output: templates/swagger/v1_openapi3_json.tmpl + +//go:build ignore + +package main + +import ( + "encoding/json" + "fmt" + "log" + "os" + "regexp" + "sort" + "strings" + + "code.gitea.io/gitea/build/openapi3gen" + + "github.com/getkin/kin-openapi/openapi3" +) + +const ( + swaggerSpecPath = "templates/swagger/v1_json.tmpl" + openapi3OutPath = "templates/swagger/v1_openapi3_json.tmpl" + + appSubUrlVar = "{{.SwaggerAppSubUrl}}" + appVerVar = "{{.SwaggerAppVer}}" + + appSubUrlPlaceholder = "GITEA_APP_SUB_URL_PLACEHOLDER" + appVerPlaceholder = "0.0.0-gitea-placeholder" +) + +var ( + appSubUrlRe = regexp.MustCompile(regexp.QuoteMeta(appSubUrlVar)) + appVerRe = regexp.MustCompile(regexp.QuoteMeta(appVerVar)) + + enumScanDirs = []string{ + "modules/structs", + "modules/commitstatus", + } +) + +func main() { + astEnumMap, err := openapi3gen.ScanSwaggerEnumTypes(enumScanDirs) + if err != nil { + log.Fatalf("scanning swagger:enum annotations: %v", err) + } + names := make([]string, 0, len(astEnumMap)) + for _, n := range astEnumMap { + names = append(names, n) + } + sort.Strings(names) + fmt.Fprintf(os.Stderr, "discovered %d swagger:enum types: %s\n", len(names), strings.Join(names, ", ")) + + data, err := os.ReadFile(swaggerSpecPath) + if err != nil { + log.Fatalf("reading swagger spec: %v", err) + } + + cleaned := appSubUrlRe.ReplaceAll(data, []byte(appSubUrlPlaceholder)) + cleaned = appVerRe.ReplaceAll(cleaned, []byte(appVerPlaceholder)) + + oas3, err := openapi3gen.Convert(cleaned, astEnumMap) + if err != nil { + log.Fatalf("converting to openapi 3.0: %v", err) + } + + oas3.Servers = openapi3.Servers{ + {URL: appSubUrlPlaceholder + "/api/v1"}, + } + + out, err := json.MarshalIndent(oas3, "", " ") + if err != nil { + log.Fatalf("marshaling openapi 3.0: %v", err) + } + + result := strings.ReplaceAll(string(out), appSubUrlPlaceholder, appSubUrlVar) + result = strings.ReplaceAll(result, appVerPlaceholder, appVerVar) + result = strings.TrimSpace(result) + + if err := os.WriteFile(openapi3OutPath, []byte(result), 0o644); err != nil { + log.Fatalf("writing openapi 3.0 spec: %v", err) + } + + fmt.Printf("Generated %s\n", openapi3OutPath) +} diff --git a/build/openapi3gen/convert.go b/build/openapi3gen/convert.go new file mode 100644 index 0000000000..04587b3303 --- /dev/null +++ b/build/openapi3gen/convert.go @@ -0,0 +1,281 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package openapi3gen + +import ( + "fmt" + "regexp" + "strings" + + "code.gitea.io/gitea/modules/json" + + "github.com/getkin/kin-openapi/openapi2" + "github.com/getkin/kin-openapi/openapi2conv" + "github.com/getkin/kin-openapi/openapi3" +) + +// rxDeprecated matches "deprecated" as a word at the start of a description +// or preceded by whitespace/punctuation that indicates a leading marker (e.g. +// "Deprecated: true", "deprecated (use X instead)"). Rejects negated phrases +// like "not deprecated" or "previously deprecated, now supported". +var rxDeprecated = regexp.MustCompile(`(?i)(?:^|[\n.;])\s*deprecated\b`) + +// Convert parses a Swagger 2.0 spec and returns an OAS3 spec, applying +// Gitea-specific post-processing: file-schema fixups, URI formats, +// deprecated flags, and shared-enum extraction. +// +// astEnumMap is a value-set-key → Go-type-name map (built by +// ScanSwaggerEnumTypes). If a shared enum in the spec has no entry in the +// map, Convert returns an error — no fallback naming. +func Convert(swaggerJSON []byte, astEnumMap map[string]string) (*openapi3.T, error) { + var swagger2 openapi2.T + if err := json.Unmarshal(swaggerJSON, &swagger2); err != nil { + return nil, fmt.Errorf("parsing swagger 2.0: %w", err) + } + + oas3, err := openapi2conv.ToV3(&swagger2) + if err != nil { + return nil, fmt.Errorf("converting to openapi 3.0: %w", err) + } + + fixFileSchemas(oas3) + addURIFormats(oas3) + addDeprecatedFlags(oas3) + if err := extractSharedEnums(oas3, astEnumMap); err != nil { + return nil, err + } + return oas3, nil +} + +func fixFileSchemas(doc *openapi3.T) { + for _, pathItem := range doc.Paths.Map() { + for _, op := range []*openapi3.Operation{ + pathItem.Get, pathItem.Post, pathItem.Put, pathItem.Patch, + pathItem.Delete, pathItem.Head, pathItem.Options, pathItem.Trace, + } { + if op == nil { + continue + } + for _, resp := range op.Responses.Map() { + if resp.Value == nil { + continue + } + for _, mediaType := range resp.Value.Content { + fixSchema(mediaType.Schema) + } + } + if op.RequestBody != nil && op.RequestBody.Value != nil { + for _, mediaType := range op.RequestBody.Value.Content { + fixSchema(mediaType.Schema) + } + } + } + } +} + +// fixSchema rewrites any "type: file" schemas to the OAS3 equivalent +// (type: string, format: binary), recursing into Properties, Items, and +// AllOf/OneOf/AnyOf/Not branches. $ref nodes are skipped so shared schemas +// are rewritten exactly once when visited through their declaration. +func fixSchema(ref *openapi3.SchemaRef) { + if ref == nil || ref.Value == nil || ref.Ref != "" { + return + } + s := ref.Value + if s.Type.Is("file") { + s.Type = &openapi3.Types{"string"} + s.Format = "binary" + } + for _, p := range s.Properties { + fixSchema(p) + } + fixSchema(s.Items) + for _, sub := range s.AllOf { + fixSchema(sub) + } + for _, sub := range s.OneOf { + fixSchema(sub) + } + for _, sub := range s.AnyOf { + fixSchema(sub) + } + fixSchema(s.Not) +} + +// addURIFormats sets format: uri on string properties whose names indicate +// they hold URLs. This information is lost in Swagger 2.0 but is valuable +// for code generators. +func addURIFormats(doc *openapi3.T) { + if doc.Components == nil { + return + } + for _, schemaRef := range doc.Components.Schemas { + if schemaRef.Value == nil { + continue + } + for propName, propRef := range schemaRef.Value.Properties { + if propRef == nil || propRef.Value == nil || propRef.Ref != "" { + continue + } + prop := propRef.Value + if !prop.Type.Is("string") || prop.Format != "" { + continue + } + if isURLProperty(propName) { + prop.Format = "uri" + } + } + } +} + +func isURLProperty(name string) bool { + if strings.HasSuffix(name, "_url") { + return true + } + switch name { + case "url", "html_url", "clone_url": + return true + } + return false +} + +// addDeprecatedFlags sets deprecated: true on schema properties whose +// description starts with a "deprecated" marker (e.g. "Deprecated: true" +// or "deprecated (use X instead)"). Does not match negated phrases. +func addDeprecatedFlags(doc *openapi3.T) { + if doc.Components == nil { + return + } + for _, schemaRef := range doc.Components.Schemas { + if schemaRef.Value == nil { + continue + } + for _, propRef := range schemaRef.Value.Properties { + if propRef == nil || propRef.Value == nil || propRef.Ref != "" { + continue + } + if rxDeprecated.MatchString(propRef.Value.Description) { + propRef.Value.Deprecated = true + } + } + } +} + +type enumUsage struct { + schemaName string + propName string + propRef *openapi3.SchemaRef + inItems bool +} + +// extractSharedEnums finds identical enum arrays used by multiple schema +// properties, creates a standalone named schema for each, and replaces +// the inline enums with $ref pointers. +// +// If the derived enum name collides with an existing component schema, or +// no // swagger:enum annotation matches the value set, generation aborts +// with an actionable error — there are no silent fallbacks. +func extractSharedEnums(doc *openapi3.T, astEnumMap map[string]string) error { + if doc.Components == nil { + return nil + } + + enumGroups := map[string][]enumUsage{} + + for schemaName, schemaRef := range doc.Components.Schemas { + if schemaRef.Value == nil { + continue + } + for propName, propRef := range schemaRef.Value.Properties { + if propRef == nil || propRef.Value == nil || propRef.Ref != "" { + continue + } + if len(propRef.Value.Enum) > 1 && propRef.Value.Type.Is("string") { + key := EnumKey(propRef.Value.Enum) + enumGroups[key] = append(enumGroups[key], enumUsage{schemaName, propName, propRef, false}) + } + if propRef.Value.Type.Is("array") && propRef.Value.Items != nil && + propRef.Value.Items.Value != nil && propRef.Value.Items.Ref == "" && + len(propRef.Value.Items.Value.Enum) > 1 && propRef.Value.Items.Value.Type.Is("string") { + key := EnumKey(propRef.Value.Items.Value.Enum) + enumGroups[key] = append(enumGroups[key], enumUsage{schemaName, propName, propRef, true}) + } + } + } + + for key, usages := range enumGroups { + if len(usages) < 2 { + continue + } + + enumName, err := deriveEnumName(key, usages, astEnumMap) + if err != nil { + return err + } + if _, exists := doc.Components.Schemas[enumName]; exists { + return fmt.Errorf("enum name collision: %s already exists as a component schema", enumName) + } + + var enumValues []any + if usages[0].inItems { + enumValues = usages[0].propRef.Value.Items.Value.Enum + } else { + enumValues = usages[0].propRef.Value.Enum + } + + doc.Components.Schemas[enumName] = &openapi3.SchemaRef{ + Value: &openapi3.Schema{ + Type: &openapi3.Types{"string"}, + Enum: enumValues, + }, + } + + ref := "#/components/schemas/" + enumName + + for _, usage := range usages { + if usage.inItems { + usage.propRef.Value.Items = &openapi3.SchemaRef{Ref: ref} + } else { + old := usage.propRef.Value + if old.Description == "" && !old.Deprecated && old.Format == "" { + usage.propRef.Ref = ref + usage.propRef.Value = nil + } else { + usage.propRef.Value = &openapi3.Schema{ + AllOf: openapi3.SchemaRefs{ + {Ref: ref}, + }, + Description: old.Description, + Deprecated: old.Deprecated, + Format: old.Format, + } + } + } + } + } + return nil +} + +// deriveEnumName looks up a shared enum's Go type name from astEnumMap by +// value-set key. If no annotation matches, returns an error identifying the +// offending properties and the fix. +func deriveEnumName(key string, usages []enumUsage, astEnumMap map[string]string) (string, error) { + if name, ok := astEnumMap[key]; ok { + return name, nil + } + + props := map[string]bool{} + for _, u := range usages { + props[fmt.Sprintf("%s.%s", u.schemaName, u.propName)] = true + } + propList := make([]string, 0, len(props)) + for p := range props { + propList = append(propList, p) + } + return "", fmt.Errorf( + "no swagger:enum annotation matches value-set %q used by %d properties: %v; "+ + "fix by adding a named string type with // swagger:enum to modules/structs or modules/commitstatus", + key, len(usages), propList, + ) +} diff --git a/build/openapi3gen/convert_test.go b/build/openapi3gen/convert_test.go new file mode 100644 index 0000000000..a9a715e6c2 --- /dev/null +++ b/build/openapi3gen/convert_test.go @@ -0,0 +1,170 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package openapi3gen + +import ( + "strings" + "testing" + + "github.com/getkin/kin-openapi/openapi3" +) + +func TestDeriveEnumName_hit(t *testing.T) { + key := EnumKey([]any{"red", "green", "blue"}) + astMap := map[string]string{key: "Color"} + usages := []enumUsage{{schemaName: "Paint", propName: "color"}} + got, err := deriveEnumName(key, usages, astMap) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if got != "Color" { + t.Fatalf("got %q, want %q", got, "Color") + } +} + +func TestDeriveEnumName_miss(t *testing.T) { + key := EnumKey([]any{"x", "y"}) + usages := []enumUsage{{schemaName: "Thing", propName: "kind"}} + _, err := deriveEnumName(key, usages, map[string]string{}) + if err == nil { + t.Fatal("expected miss error, got nil") + } + msg := err.Error() + if !strings.Contains(msg, "Thing.kind") { + t.Fatalf("error %q should list the missing usage", msg) + } + if !strings.Contains(msg, "swagger:enum") { + t.Fatalf("error %q should hint at the fix", msg) + } +} + +func TestExtractSharedEnums_usesASTMap(t *testing.T) { + doc := &openapi3.T{ + Components: &openapi3.Components{ + Schemas: openapi3.Schemas{ + "A": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"object"}, + Properties: openapi3.Schemas{ + "color": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"string"}, + Enum: []any{"red", "green", "blue"}, + }}, + }, + }}, + "B": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"object"}, + Properties: openapi3.Schemas{ + "color": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"string"}, + Enum: []any{"red", "green", "blue"}, + }}, + }, + }}, + }, + }, + } + astMap := map[string]string{EnumKey([]any{"red", "green", "blue"}): "Color"} + if err := extractSharedEnums(doc, astMap); err != nil { + t.Fatalf("extractSharedEnums: %v", err) + } + if _, ok := doc.Components.Schemas["Color"]; !ok { + t.Fatalf("expected Color schema to be extracted") + } +} + +func TestFixFileSchemas_recursesIntoNested(t *testing.T) { + fileType := func() *openapi3.SchemaRef { + return &openapi3.SchemaRef{Value: &openapi3.Schema{Type: &openapi3.Types{"file"}}} + } + doc := &openapi3.T{ + Paths: openapi3.NewPaths(), + } + doc.Paths.Set("/upload", &openapi3.PathItem{ + Post: &openapi3.Operation{ + RequestBody: &openapi3.RequestBodyRef{ + Value: &openapi3.RequestBody{ + Content: openapi3.Content{ + "multipart/form-data": { + Schema: &openapi3.SchemaRef{Value: &openapi3.Schema{ + Type: &openapi3.Types{"object"}, + Properties: openapi3.Schemas{ + "attachment": fileType(), + "items": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"array"}, + Items: fileType(), + }}, + "alt": {Value: &openapi3.Schema{ + AllOf: openapi3.SchemaRefs{fileType()}, + }}, + "one": {Value: &openapi3.Schema{ + OneOf: openapi3.SchemaRefs{fileType()}, + }}, + "any": {Value: &openapi3.Schema{ + AnyOf: openapi3.SchemaRefs{fileType()}, + }}, + "not": {Value: &openapi3.Schema{ + Not: fileType(), + }}, + }, + }}, + }, + }, + }, + }, + Responses: openapi3.NewResponses(), + }, + }) + + fixFileSchemas(doc) + + props := doc.Paths.Value("/upload").Post.RequestBody.Value.Content["multipart/form-data"].Schema.Value.Properties + if !props["attachment"].Value.Type.Is("string") || props["attachment"].Value.Format != "binary" { + t.Errorf("nested property not fixed: %+v", props["attachment"].Value) + } + if !props["items"].Value.Items.Value.Type.Is("string") || props["items"].Value.Items.Value.Format != "binary" { + t.Errorf("array items not fixed: %+v", props["items"].Value.Items.Value) + } + if !props["alt"].Value.AllOf[0].Value.Type.Is("string") || props["alt"].Value.AllOf[0].Value.Format != "binary" { + t.Errorf("allOf branch not fixed: %+v", props["alt"].Value.AllOf[0].Value) + } + if !props["one"].Value.OneOf[0].Value.Type.Is("string") || props["one"].Value.OneOf[0].Value.Format != "binary" { + t.Errorf("oneOf branch not fixed: %+v", props["one"].Value.OneOf[0].Value) + } + if !props["any"].Value.AnyOf[0].Value.Type.Is("string") || props["any"].Value.AnyOf[0].Value.Format != "binary" { + t.Errorf("anyOf branch not fixed: %+v", props["any"].Value.AnyOf[0].Value) + } + if !props["not"].Value.Not.Value.Type.Is("string") || props["not"].Value.Not.Value.Format != "binary" { + t.Errorf("not branch not fixed: %+v", props["not"].Value.Not.Value) + } +} + +func TestExtractSharedEnums_missReturnsError(t *testing.T) { + doc := &openapi3.T{ + Components: &openapi3.Components{ + Schemas: openapi3.Schemas{ + "A": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"object"}, + Properties: openapi3.Schemas{ + "color": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"string"}, + Enum: []any{"red", "green"}, + }}, + }, + }}, + "B": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"object"}, + Properties: openapi3.Schemas{ + "color": {Value: &openapi3.Schema{ + Type: &openapi3.Types{"string"}, + Enum: []any{"red", "green"}, + }}, + }, + }}, + }, + }, + } + if err := extractSharedEnums(doc, map[string]string{}); err == nil { + t.Fatal("expected miss error") + } +} diff --git a/build/openapi3gen/enumscan.go b/build/openapi3gen/enumscan.go new file mode 100644 index 0000000000..dd11620549 --- /dev/null +++ b/build/openapi3gen/enumscan.go @@ -0,0 +1,188 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +// Package openapi3gen converts Gitea's Swagger 2.0 spec to an OpenAPI 3.0 +// spec. It discovers Go enum type names by scanning swagger:enum annotations +// in the source tree, then names extracted shared-enum schemas accordingly. +package openapi3gen + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" +) + +// EnumKey returns a canonical key for a set of enum values: values are +// stringified, sorted, and joined with "|". Used to match enum value sets +// across spec properties and scanned Go type declarations. +func EnumKey(values []any) string { + strs := make([]string, len(values)) + for i, v := range values { + strs[i] = fmt.Sprintf("%v", v) + } + sort.Strings(strs) + return strings.Join(strs, "|") +} + +var rxSwaggerEnum = regexp.MustCompile(`swagger:enum\s+(\w+)`) + +// ScanSwaggerEnumTypes walks .go files under each dir and returns a map from +// a canonical value-set key (see EnumKey) to the Go type name declared with +// // swagger:enum TypeName. +// +// Returns an error on parse failure, on an annotation for a type whose +// constants can't be extracted, or on value-set collisions between two +// different enum types. +func ScanSwaggerEnumTypes(dirs []string) (map[string]string, error) { + fset := token.NewFileSet() + parsed := []*ast.File{} + + for _, dir := range dirs { + entries, err := os.ReadDir(dir) + if err != nil { + return nil, fmt.Errorf("reading %s: %w", dir, err) + } + for _, entry := range entries { + if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") { + continue + } + if strings.HasSuffix(entry.Name(), "_test.go") { + continue + } + path := filepath.Join(dir, entry.Name()) + file, err := parser.ParseFile(fset, path, nil, parser.ParseComments) + if err != nil { + return nil, fmt.Errorf("%s: %w", path, err) + } + parsed = append(parsed, file) + } + } + + enumTypes := map[string]string{} // typeName → "" (presence marker) + enumValues := map[string][]any{} // typeName → values + + // Pass 1: collect every // swagger:enum TypeName declaration. + for _, file := range parsed { + for _, decl := range file.Decls { + gd, ok := decl.(*ast.GenDecl) + if !ok || gd.Tok != token.TYPE { + continue + } + if err := collectEnumType(gd, enumTypes); err != nil { + return nil, fmt.Errorf("%s: %w", fset.Position(gd.Pos()).Filename, err) + } + } + } + + // Pass 2: collect const values; now every annotated type is visible. + for _, file := range parsed { + for _, decl := range file.Decls { + gd, ok := decl.(*ast.GenDecl) + if !ok || gd.Tok != token.CONST { + continue + } + collectEnumValues(gd, enumTypes, enumValues) + } + } + + result := map[string]string{} + for typeName := range enumTypes { + values, ok := enumValues[typeName] + if !ok || len(values) == 0 { + return nil, fmt.Errorf("swagger:enum %s has no const block with typed string values", typeName) + } + key := EnumKey(values) + if existing, ok := result[key]; ok && existing != typeName { + return nil, fmt.Errorf("swagger:enum value-set collision: %s and %s both use %q", existing, typeName, key) + } + result[key] = typeName + } + return result, nil +} + +// collectEnumType scans a `type` GenDecl for // swagger:enum annotations, +// handling both the lone form (`// swagger:enum Foo\n type Foo string`) +// where the comment group is attached to the GenDecl, and the grouped form: +// +// type ( +// // swagger:enum Foo +// Foo string +// ) +// +// where the comment group is attached to each TypeSpec. Caveat: Go's parser +// only attaches a CommentGroup when it is immediately adjacent to the decl. +// A blank line (not a `//` continuation line) between the comment and the +// declaration drops the Doc, so annotations MUST sit directly above their +// type. All current annotated files obey this — the rule is noted here so +// a future edit that inserts a blank line fails fast rather than silently. +func collectEnumType(gd *ast.GenDecl, enumTypes map[string]string) error { + if err := registerEnumAnnotation(gd.Doc, gd.Specs, enumTypes); err != nil { + return err + } + for _, spec := range gd.Specs { + ts, ok := spec.(*ast.TypeSpec) + if !ok || ts.Doc == nil { + continue + } + if err := registerEnumAnnotation(ts.Doc, []ast.Spec{ts}, enumTypes); err != nil { + return err + } + } + return nil +} + +func registerEnumAnnotation(doc *ast.CommentGroup, specs []ast.Spec, enumTypes map[string]string) error { + if doc == nil { + return nil + } + matches := rxSwaggerEnum.FindStringSubmatch(doc.Text()) + if len(matches) < 2 { + return nil + } + annotated := matches[1] + for _, spec := range specs { + ts, ok := spec.(*ast.TypeSpec) + if !ok { + continue + } + if ts.Name.Name == annotated { + enumTypes[annotated] = "" + return nil + } + } + return fmt.Errorf("swagger:enum %s: no type declaration with that name in the same decl group; check for a typo", annotated) +} + +func collectEnumValues(gd *ast.GenDecl, enumTypes map[string]string, enumValues map[string][]any) { + for _, spec := range gd.Specs { + vs, ok := spec.(*ast.ValueSpec) + if !ok || vs.Type == nil { + continue + } + ident, ok := vs.Type.(*ast.Ident) + if !ok { + continue + } + if _, isEnum := enumTypes[ident.Name]; !isEnum { + continue + } + for _, val := range vs.Values { + lit, ok := val.(*ast.BasicLit) + if !ok || lit.Kind != token.STRING { + continue + } + unquoted, err := strconv.Unquote(lit.Value) + if err != nil { + continue + } + enumValues[ident.Name] = append(enumValues[ident.Name], unquoted) + } + } +} diff --git a/build/openapi3gen/enumscan_test.go b/build/openapi3gen/enumscan_test.go new file mode 100644 index 0000000000..2e5fe99db0 --- /dev/null +++ b/build/openapi3gen/enumscan_test.go @@ -0,0 +1,239 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package openapi3gen + +import ( + "os" + "path/filepath" + "strings" + "testing" +) + +func TestEnumKey_sortsAndJoins(t *testing.T) { + key := EnumKey([]any{"b", "a", "c"}) + if key != "a|b|c" { + t.Fatalf("EnumKey = %q, want %q", key, "a|b|c") + } +} + +func TestEnumKey_handlesNonStringValues(t *testing.T) { + key := EnumKey([]any{2, 1, 3}) + if key != "1|2|3" { + t.Fatalf("EnumKey = %q, want %q", key, "1|2|3") + } +} + +func TestScanSwaggerEnumTypes_basic(t *testing.T) { + dir := t.TempDir() + src := `package fixture + +// Color is a primary color. +// swagger:enum Color +type Color string + +const ( + ColorRed Color = "red" + ColorGreen Color = "green" + ColorBlue Color = "blue" +) +` + if err := os.WriteFile(filepath.Join(dir, "color.go"), []byte(src), 0o644); err != nil { + t.Fatal(err) + } + + got, err := ScanSwaggerEnumTypes([]string{dir}) + if err != nil { + t.Fatalf("ScanSwaggerEnumTypes: %v", err) + } + wantKey := EnumKey([]any{"red", "green", "blue"}) + if got[wantKey] != "Color" { + t.Fatalf("map[%q] = %q, want %q", wantKey, got[wantKey], "Color") + } +} + +func TestScanSwaggerEnumTypes_orphanAnnotation(t *testing.T) { + dir := t.TempDir() + src := `package fixture + +// swagger:enum Sttype +type StateType string + +const ( + StateOpen StateType = "open" +) +` + if err := os.WriteFile(filepath.Join(dir, "typo.go"), []byte(src), 0o644); err != nil { + t.Fatal(err) + } + + _, err := ScanSwaggerEnumTypes([]string{dir}) + if err == nil { + t.Fatal("expected error for annotation referencing a non-matching type name") + } + if !strings.Contains(err.Error(), "Sttype") { + t.Fatalf("error %q should mention the typo'd name Sttype", err.Error()) + } +} + +func TestScanSwaggerEnumTypes_collision(t *testing.T) { + dir := t.TempDir() + src := `package fixture + +// swagger:enum Alpha +type Alpha string +const ( + AlphaX Alpha = "x" + AlphaY Alpha = "y" +) + +// swagger:enum Beta +type Beta string +const ( + BetaX Beta = "x" + BetaY Beta = "y" +) +` + if err := os.WriteFile(filepath.Join(dir, "dup.go"), []byte(src), 0o644); err != nil { + t.Fatal(err) + } + + _, err := ScanSwaggerEnumTypes([]string{dir}) + if err == nil { + t.Fatal("expected collision error, got nil") + } + msg := err.Error() + if !strings.Contains(msg, "Alpha") || !strings.Contains(msg, "Beta") { + t.Fatalf("error %q should mention both Alpha and Beta", msg) + } +} + +func TestScanSwaggerEnumTypes_parseFailure(t *testing.T) { + dir := t.TempDir() + if err := os.WriteFile(filepath.Join(dir, "bad.go"), []byte("package fixture\nfunc Foo() {"), 0o644); err != nil { + t.Fatal(err) + } + + _, err := ScanSwaggerEnumTypes([]string{dir}) + if err == nil { + t.Fatal("expected parse error, got nil") + } +} + +func TestScanSwaggerEnumTypes_annotationWithoutConsts(t *testing.T) { + dir := t.TempDir() + src := `package fixture + +// swagger:enum Lonely +type Lonely string +` + if err := os.WriteFile(filepath.Join(dir, "lonely.go"), []byte(src), 0o644); err != nil { + t.Fatal(err) + } + + _, err := ScanSwaggerEnumTypes([]string{dir}) + if err == nil { + t.Fatal("expected error for annotation without consts") + } + if !strings.Contains(err.Error(), "Lonely") { + t.Fatalf("error %q should mention Lonely", err.Error()) + } +} + +func TestScanSwaggerEnumTypes_constsAndTypeInDifferentFiles(t *testing.T) { + dir := t.TempDir() + // Name ordering: `a_consts.go` < `b_type.go`, so readdir returns consts first. + // Old single-pass scanner would miss the values; two-pass must not. + constsSrc := `package fixture + +const ( + HueA Hue = "a" + HueB Hue = "b" +) +` + typeSrc := `package fixture + +// swagger:enum Hue +type Hue string +` + if err := os.WriteFile(filepath.Join(dir, "a_consts.go"), []byte(constsSrc), 0o644); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(filepath.Join(dir, "b_type.go"), []byte(typeSrc), 0o644); err != nil { + t.Fatal(err) + } + + got, err := ScanSwaggerEnumTypes([]string{dir}) + if err != nil { + t.Fatalf("ScanSwaggerEnumTypes: %v", err) + } + wantKey := EnumKey([]any{"a", "b"}) + if got[wantKey] != "Hue" { + t.Fatalf("map[%q] = %q, want %q", wantKey, got[wantKey], "Hue") + } +} + +func TestScanSwaggerEnumTypes_constsBeforeType(t *testing.T) { + dir := t.TempDir() + src := `package fixture + +const ( + ShadeDark Shade = "dark" + ShadeLight Shade = "light" +) + +// swagger:enum Shade +type Shade string +` + if err := os.WriteFile(filepath.Join(dir, "shade.go"), []byte(src), 0o644); err != nil { + t.Fatal(err) + } + + got, err := ScanSwaggerEnumTypes([]string{dir}) + if err != nil { + t.Fatalf("ScanSwaggerEnumTypes: %v", err) + } + wantKey := EnumKey([]any{"dark", "light"}) + if got[wantKey] != "Shade" { + t.Fatalf("map[%q] = %q, want %q", wantKey, got[wantKey], "Shade") + } +} + +func TestScanSwaggerEnumTypes_groupedTypeDecl(t *testing.T) { + dir := t.TempDir() + src := `package fixture + +type ( + // swagger:enum Color + Color string + // swagger:enum Shade + Shade string +) + +const ( + ColorRed Color = "red" + ColorBlue Color = "blue" +) + +const ( + ShadeDark Shade = "dark" + ShadeLight Shade = "light" +) +` + if err := os.WriteFile(filepath.Join(dir, "grouped.go"), []byte(src), 0o644); err != nil { + t.Fatal(err) + } + + got, err := ScanSwaggerEnumTypes([]string{dir}) + if err != nil { + t.Fatalf("ScanSwaggerEnumTypes: %v", err) + } + colorKey := EnumKey([]any{"red", "blue"}) + shadeKey := EnumKey([]any{"dark", "light"}) + if got[colorKey] != "Color" { + t.Fatalf("Color: map[%q] = %q, want %q", colorKey, got[colorKey], "Color") + } + if got[shadeKey] != "Shade" { + t.Fatalf("Shade: map[%q] = %q, want %q", shadeKey, got[shadeKey], "Shade") + } +} diff --git a/cmd/dump.go b/cmd/dump.go index 49f4d9e894..40b73f69aa 100644 --- a/cmd/dump.go +++ b/cmd/dump.go @@ -203,8 +203,8 @@ func runDump(ctx context.Context, cmd *cli.Command) error { } }() - targetDBType := cmd.String("database") - if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() { + targetDBType := setting.DatabaseType(cmd.String("database")) + if targetDBType != "" && targetDBType != setting.Database.Type { log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType) } else { log.Info("Dumping database...") diff --git a/cmd/helper.go b/cmd/helper.go index 9d70b05701..ca4cddb49d 100644 --- a/cmd/helper.go +++ b/cmd/helper.go @@ -134,7 +134,7 @@ func PrepareConsoleLoggerLevel(defaultLevel log.Level) func(context.Context, *cl if globalBool(c, "debug") || globalBool(c, "verbose") { level = log.TRACE } - log.SetConsoleLogger(log.DEFAULT, "console-default", level) + log.SetupStderrLogger(log.DEFAULT, "console-stderr", level) return ctx, nil } } diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini index 97af5fa5fb..dd62cf8e83 100644 --- a/custom/conf/app.example.ini +++ b/custom/conf/app.example.ini @@ -1169,6 +1169,11 @@ LEVEL = Info ;; Retarget child pull requests to the parent pull request branch target on merge of parent pull request. It only works on merged PRs where the head and base branch target the same repo. ;RETARGET_CHILDREN_ON_MERGE = true ;; +;; Default source for the pull request title when opening a new PR. +;; "first-commit" uses the oldest commit's summary. +;; "auto" uses commit's summary if the PR only has one commit, normalizes the branch name if multiple commits. +;DEFAULT_TITLE_SOURCE = first-commit +;; ;; Delay mergeable check until page view or API access, for pull requests that have not been updated in the specified days when their base branches get updated. ;; Use "-1" to always check all pull requests (old behavior). Use "0" to always delay the checks. ;DELAY_CHECK_FOR_INACTIVE_DAYS = 7 @@ -1519,7 +1524,7 @@ LEVEL = Info ;; Issue Indexer settings ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; -;; Issue indexer type, currently support: bleve, db, elasticsearch or meilisearch default is bleve +;; Issue indexer type, currently support: bleve, db, elasticsearch (also compatible with OpenSearch) or meilisearch default is bleve ;ISSUE_INDEXER_TYPE = bleve ;; ;; Issue indexer storage path, available when ISSUE_INDEXER_TYPE is bleve @@ -1546,7 +1551,7 @@ LEVEL = Info ;; If empty then it defaults to `sources` only, as if you'd like to disable fully please see REPO_INDEXER_ENABLED. ;REPO_INDEXER_REPO_TYPES = sources,forks,mirrors,templates ;; -;; Code search engine type, could be `bleve` or `elasticsearch`. +;; Code search engine type, could be `bleve` or `elasticsearch` (also compatible with OpenSearch). ;REPO_INDEXER_TYPE = bleve ;; ;; Index file used for code search. available when `REPO_INDEXER_TYPE` is bleve diff --git a/go.mod b/go.mod index d7577bfbf0..156abf4afb 100644 --- a/go.mod +++ b/go.mod @@ -46,6 +46,7 @@ require ( github.com/ethantkoenig/rupture v1.0.1 github.com/felixge/fgprof v0.9.5 github.com/fsnotify/fsnotify v1.9.0 + github.com/getkin/kin-openapi v0.134.0 github.com/gliderlabs/ssh v0.3.8 github.com/go-chi/chi/v5 v5.2.5 github.com/go-chi/cors v1.2.2 @@ -86,7 +87,6 @@ require ( github.com/msteinert/pam/v2 v2.1.0 github.com/nektos/act v0.2.63 github.com/niklasfasching/go-org v1.9.1 - github.com/olivere/elastic/v7 v7.0.32 github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/image-spec v1.1.1 github.com/pquerna/otp v1.5.0 @@ -192,6 +192,8 @@ require ( github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-ini/ini v1.67.0 // indirect + github.com/go-openapi/jsonpointer v0.21.0 // indirect + github.com/go-openapi/swag v0.23.0 // indirect github.com/go-viper/mapstructure/v2 v2.5.0 // indirect github.com/go-webauthn/x v0.2.3 // indirect github.com/goccy/go-json v0.10.6 // indirect @@ -219,7 +221,7 @@ require ( github.com/klauspost/crc32 v1.3.0 // indirect github.com/klauspost/pgzip v1.2.6 // indirect github.com/libdns/libdns v1.1.1 // indirect - github.com/mailru/easyjson v0.9.2 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/markbates/going v1.0.3 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-runewidth v0.0.21 // indirect @@ -232,15 +234,19 @@ require ( github.com/minio/minlz v1.1.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 // indirect github.com/mschoch/smat v0.2.0 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/nwaples/rardecode/v2 v2.2.2 // indirect + github.com/oasdiff/yaml v0.0.0-20260313112342-a3ea61cb4d4c // indirect + github.com/oasdiff/yaml3 v0.0.0-20260224194419-61cd415a242b // indirect github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 // indirect github.com/olekukonko/errors v1.2.0 // indirect github.com/olekukonko/ll v0.1.8 // indirect github.com/olekukonko/tablewriter v1.1.4 // indirect github.com/onsi/ginkgo v1.16.5 // indirect + github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/philhofer/fwd v1.2.0 // indirect github.com/pierrec/lz4/v4 v4.1.26 // indirect github.com/pjbgf/sha1cd v0.5.0 // indirect @@ -260,6 +266,7 @@ require ( github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect github.com/tinylib/msgp v1.6.4 // indirect github.com/unknwon/com v1.0.1 // indirect + github.com/woodsbury/decimal128 v1.3.0 // indirect github.com/x448/float16 v0.8.4 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect diff --git a/go.sum b/go.sum index 0b65e6305f..b8ff05bb0a 100644 --- a/go.sum +++ b/go.sum @@ -267,14 +267,14 @@ github.com/fatih/color v1.19.0 h1:Zp3PiM21/9Ld6FzSKyL5c/BULoe/ONr9KlbYVOfG8+w= github.com/fatih/color v1.19.0/go.mod h1:zNk67I0ZUT1bEGsSGyCZYZNrHuTkJJB+r6Q9VuMi0LE= github.com/felixge/fgprof v0.9.5 h1:8+vR6yu2vvSKn08urWyEuxx75NWPEvybbkBirEpsbVY= github.com/felixge/fgprof v0.9.5/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM= -github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= -github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/fxamacker/cbor/v2 v2.9.1 h1:2rWm8B193Ll4VdjsJY28jxs70IdDsHRWgQYAI80+rMQ= github.com/fxamacker/cbor/v2 v2.9.1/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ= +github.com/getkin/kin-openapi v0.134.0 h1:/L5+1+kfe6dXh8Ot/wqiTgUkjOIEJiC0bbYVziHB8rU= +github.com/getkin/kin-openapi v0.134.0/go.mod h1:wK6ZLG/VgoETO9pcLJ/VmAtIcl/DNlMayNTb716EUxE= github.com/git-lfs/pktline v0.0.0-20230103162542-ca444d533ef1 h1:mtDjlmloH7ytdblogrMz1/8Hqua1y8B4ID+bh3rvod0= github.com/git-lfs/pktline v0.0.0-20230103162542-ca444d533ef1/go.mod h1:fenKRzpXDjNpsIBhuhUzvjCKlDjKam0boRAenTE0Q6A= github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= @@ -310,6 +310,10 @@ github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZR github.com/go-jose/go-jose/v4 v4.1.3/go.mod h1:x4oUasVrzR7071A4TnHLGSPpNOm2a21K9Kf04k1rs08= github.com/go-ldap/ldap/v3 v3.4.13 h1:+x1nG9h+MZN7h/lUi5Q3UZ0fJ1GyDQYbPvbuH38baDQ= github.com/go-ldap/ldap/v3 v3.4.13/go.mod h1:LxsGZV6vbaK0sIvYfsv47rfh4ca0JXokCoKjZxsszv0= +github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= +github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= +github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= +github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg= github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-redis/redis/v7 v7.4.1 h1:PASvf36gyUpr2zdOUS/9Zqc80GbM+9BDyiJSJDDOrTI= @@ -501,9 +505,8 @@ github.com/lib/pq v1.12.3/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA= github.com/libdns/libdns v1.1.1 h1:wPrHrXILoSHKWJKGd0EiAVmiJbFShguILTg9leS/P/U= github.com/libdns/libdns v1.1.1/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mailru/easyjson v0.9.2 h1:dX8U45hQsZpxd80nLvDGihsQ/OxlvTkVUXH2r/8cb2M= -github.com/mailru/easyjson v0.9.2/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= github.com/markbates/going v1.0.3 h1:mY45T5TvW+Xz5A6jY7lf4+NLg9D8+iuStIHyR7M8qsE= github.com/markbates/going v1.0.3/go.mod h1:fQiT6v6yQar9UD6bd/D4Z5Afbk9J6BBVBtLiyY4gp2o= github.com/markbates/goth v1.82.0 h1:8j/c34AjBSTNzO7zTsOyP5IYCQCMBTRBHAbBt/PI0bQ= @@ -547,6 +550,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 h1:j2kD3MT1z4PXCiUllUJF9mWUESr9TWKS7iEKsQ/IipM= github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450/go.mod h1:skjdDftzkFALcuGzYSklqYd8gvat6F1gZJ4YPVbkZpM= github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg= @@ -565,6 +570,10 @@ github.com/nwaples/rardecode/v2 v2.2.2/go.mod h1:7uz379lSxPe6j9nvzxUZ+n7mnJNgjsR github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oasdiff/yaml v0.0.0-20260313112342-a3ea61cb4d4c h1:7ACFcSaQsrWtrH4WHHfUqE1C+f8r2uv8KGaW0jTNjus= +github.com/oasdiff/yaml v0.0.0-20260313112342-a3ea61cb4d4c/go.mod h1:JKox4Gszkxt57kj27u7rvi7IFoIULvCZHUsBTUmQM/s= +github.com/oasdiff/yaml3 v0.0.0-20260224194419-61cd415a242b h1:vivRhVUAa9t1q0Db4ZmezBP8pWQWnXHFokZj0AOea2g= +github.com/oasdiff/yaml3 v0.0.0-20260224194419-61cd415a242b/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 h1:zrbMGy9YXpIeTnGj4EljqMiZsIcE09mmF8XsD5AYOJc= github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6/go.mod h1:rEKTHC9roVVicUIfZK7DYrdIoM0EOr8mK1Hj5s3JjH0= github.com/olekukonko/errors v1.2.0 h1:10Zcn4GeV59t/EGqJc8fUjtFT/FuUh5bTMzZ1XwmCRo= @@ -573,8 +582,6 @@ github.com/olekukonko/ll v0.1.8 h1:ysHCJRGHYKzmBSdz9w5AySztx7lG8SQY+naTGYUbsz8= github.com/olekukonko/ll v0.1.8/go.mod h1:RPRC6UcscfFZgjo1nulkfMH5IM0QAYim0LfnMvUuozw= github.com/olekukonko/tablewriter v1.1.4 h1:ORUMI3dXbMnRlRggJX3+q7OzQFDdvgbN9nVWj1drm6I= github.com/olekukonko/tablewriter v1.1.4/go.mod h1:+kedxuyTtgoZLwif3P1Em4hARJs+mVnzKxmsCL/C5RY= -github.com/olivere/elastic/v7 v7.0.32 h1:R7CXvbu8Eq+WlsLgxmKVKPox0oOwAE/2T9Si5BnvK6E= -github.com/olivere/elastic/v7 v7.0.32/go.mod h1:c7PVmLe3Fxq77PIfY/bZmxY/TAamBhCzZ8xDOE09a9k= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= @@ -591,6 +598,8 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= +github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM= github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM= @@ -653,9 +662,8 @@ github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g= github.com/skeema/knownhosts v1.3.2 h1:EDL9mgf4NzwMXCTfaxSD/o/a5fxDw/xL9nkU28JjdBg= github.com/skeema/knownhosts v1.3.2/go.mod h1:bEg3iQAuw+jyiw+484wwFJoKSLwcfd7fqRy+N0QTiow= +github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304 h1:Jpy1PXuP99tXNrhbq2BaPz9B+jNAvH1JPQQpG/9GCXY= github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfItdAd1r3cck= -github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8= github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= @@ -700,6 +708,8 @@ github.com/tinylib/msgp v1.6.4/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77ro github.com/tstranex/u2f v1.0.0 h1:HhJkSzDDlVSVIVt7pDJwCHQj67k7A5EeBgPmeD+pVsQ= github.com/tstranex/u2f v1.0.0/go.mod h1:eahSLaqAS0zsIEv80+vXT7WanXs7MQQDg3j3wGBSayo= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.15 h1:9DNdB5s+SgV3bQ2ApL10xRc35ck0DuIX/isZvIk+ubY= github.com/ulikunitz/xz v0.5.15/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= @@ -712,6 +722,8 @@ github.com/urfave/cli/v3 v3.4.1/go.mod h1:FJSKtM/9AiiTOJL4fJ6TbMUkxBXn7GO9guZqoZ github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= github.com/wneessen/go-mail v0.7.2 h1:xxPnhZ6IZLSgxShebmZ6DPKh1b6OJcoHfzy7UjOkzS8= github.com/wneessen/go-mail v0.7.2/go.mod h1:+TkW6QP3EVkgTEqHtVmnAE/1MRhmzb8Y9/W3pweuS+k= +github.com/woodsbury/decimal128 v1.3.0 h1:8pffMNWIlC0O5vbyHWFZAt5yWvWcrHA+3ovIIjVWss0= +github.com/woodsbury/decimal128 v1.3.0/go.mod h1:C5UTmyTjW3JftjUFzOVhC20BEQa2a4ZKOB5I6Zjb+ds= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= diff --git a/models/actions/run.go b/models/actions/run.go index b4f4d00171..a44b0ff343 100644 --- a/models/actions/run.go +++ b/models/actions/run.go @@ -71,11 +71,11 @@ func init() { db.RegisterModel(new(ActionRunIndex)) } -func (run *ActionRun) HTMLURL() string { +func (run *ActionRun) HTMLURL(ctxOpt ...context.Context) string { if run.Repo == nil { return "" } - return fmt.Sprintf("%s/actions/runs/%d", run.Repo.HTMLURL(), run.ID) + return fmt.Sprintf("%s/actions/runs/%d", run.Repo.HTMLURL(ctxOpt...), run.ID) } func (run *ActionRun) Link() string { @@ -120,11 +120,7 @@ func (run *ActionRun) RefTooltip() string { } // LoadAttributes load Repo TriggerUser if not loaded -func (run *ActionRun) LoadAttributes(ctx context.Context) (err error) { - if run == nil { - return nil - } - +func (run *ActionRun) LoadAttributes(ctx context.Context) error { if err := run.LoadRepo(ctx); err != nil { return err } @@ -133,18 +129,19 @@ func (run *ActionRun) LoadAttributes(ctx context.Context) (err error) { return err } - if run.TriggerUser == nil { - run.TriggerUserID, run.TriggerUser, err = user_model.GetPossibleUserByID(ctx, run.TriggerUserID) - if err != nil { - return err - } - } + return run.LoadTriggerUser(ctx) +} - return nil +func (run *ActionRun) LoadTriggerUser(ctx context.Context) (err error) { + if run.TriggerUser != nil { + return nil + } + run.TriggerUserID, run.TriggerUser, err = user_model.GetPossibleUserByID(ctx, run.TriggerUserID) + return err } func (run *ActionRun) LoadRepo(ctx context.Context) error { - if run == nil || run.Repo != nil { + if run.Repo != nil { return nil } diff --git a/models/actions/run_attempt.go b/models/actions/run_attempt.go index 8ef2ddf00a..857247b068 100644 --- a/models/actions/run_attempt.go +++ b/models/actions/run_attempt.go @@ -51,10 +51,6 @@ func (attempt *ActionRunAttempt) Duration() time.Duration { } func (attempt *ActionRunAttempt) LoadAttributes(ctx context.Context) (err error) { - if attempt == nil { - return nil - } - if attempt.Run == nil { run, err := GetRunByRepoAndID(ctx, attempt.RepoID, attempt.RunID) if err != nil { diff --git a/models/actions/run_job.go b/models/actions/run_job.go index 0921329997..f0d41ef4b4 100644 --- a/models/actions/run_job.go +++ b/models/actions/run_job.go @@ -120,10 +120,6 @@ func (job *ActionRunJob) LoadRepo(ctx context.Context) error { // LoadAttributes load Run if not loaded func (job *ActionRunJob) LoadAttributes(ctx context.Context) error { - if job == nil { - return nil - } - if err := job.LoadRun(ctx); err != nil { return err } diff --git a/models/actions/run_job_list.go b/models/actions/run_job_list.go index e06b6beb9e..db7554593d 100644 --- a/models/actions/run_job_list.go +++ b/models/actions/run_job_list.go @@ -56,8 +56,10 @@ func (jobs ActionJobList) LoadRuns(ctx context.Context, withRepo bool) error { return err } for _, j := range jobs { - if j.RunID > 0 && j.Run == nil { + if j.Run == nil { j.Run = runs[j.RunID] + } + if j.Run != nil { j.Run.Repo = j.Repo } } diff --git a/models/actions/run_list.go b/models/actions/run_list.go index 82dc97f3e5..0a0840648d 100644 --- a/models/actions/run_list.go +++ b/models/actions/run_list.go @@ -7,6 +7,7 @@ import ( "context" "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/translation" @@ -17,27 +18,39 @@ import ( type RunList []*ActionRun -// GetUserIDs returns a slice of user's id -func (runs RunList) GetUserIDs() []int64 { - return container.FilterSlice(runs, func(run *ActionRun) (int64, bool) { - return run.TriggerUserID, true - }) -} - func (runs RunList) LoadTriggerUser(ctx context.Context) error { - userIDs := runs.GetUserIDs() + userIDs := container.FilterSlice(runs, func(run *ActionRun) (int64, bool) { + return run.TriggerUserID, run.TriggerUser == nil + }) users := make(map[int64]*user_model.User, len(userIDs)) if err := db.GetEngine(ctx).In("id", userIDs).Find(&users); err != nil { return err } for _, run := range runs { - if run.TriggerUserID == user_model.ActionsUserID { - run.TriggerUser = user_model.NewActionsUser() - } else { - run.TriggerUser = users[run.TriggerUserID] - if run.TriggerUser == nil { - run.TriggerUser = user_model.NewGhostUser() - } + if run.TriggerUser != nil { + continue + } + run.TriggerUser = users[run.TriggerUserID] + if run.TriggerUserID < 0 { + run.TriggerUserID, run.TriggerUser, _ = user_model.GetPossibleUserByID(ctx, run.TriggerUserID) + } else if run.TriggerUser == nil { + run.TriggerUserID, run.TriggerUser, _ = user_model.GetPossibleUserByID(ctx, user_model.GhostUserID) + } + } + return nil +} + +func (runs RunList) LoadRepos(ctx context.Context) error { + repoIDs := container.FilterSlice(runs, func(run *ActionRun) (int64, bool) { + return run.RepoID, run.Repo == nil + }) + repos, err := repo_model.GetRepositoriesMapByIDs(ctx, repoIDs) + if err != nil { + return err + } + for _, run := range runs { + if run.Repo == nil { + run.Repo = repos[run.RepoID] } } return nil diff --git a/models/actions/task.go b/models/actions/task.go index 016f91a7bb..7a97eadc79 100644 --- a/models/actions/task.go +++ b/models/actions/task.go @@ -125,9 +125,6 @@ func (task *ActionTask) LoadJob(ctx context.Context) error { // LoadAttributes load Job Steps if not loaded func (task *ActionTask) LoadAttributes(ctx context.Context) error { - if task == nil { - return nil - } if err := task.LoadJob(ctx); err != nil { return err } diff --git a/models/db/conn.go b/models/db/conn.go new file mode 100644 index 0000000000..de6f3cd5ec --- /dev/null +++ b/models/db/conn.go @@ -0,0 +1,173 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package db + +import ( + "errors" + "fmt" + "net" + "net/url" + "os" + "path/filepath" + "strings" + + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" +) + +type ConnOptions struct { + Type setting.DatabaseType + Host string + Database string + User string + Passwd string + Schema string + SSLMode string + + SQLitePath string + SQLiteBusyTimeout int + SQLiteJournalMode string +} + +type SQLiteConnStrOptions struct { + FilePath string + BusyTimeout int + JournalMode string +} + +func GlobalConnOptions() ConnOptions { + return ConnOptions{ + Type: setting.Database.Type, + Host: setting.Database.Host, + Database: setting.Database.Name, + User: setting.Database.User, + Passwd: setting.Database.Passwd, + Schema: setting.Database.Schema, + SSLMode: setting.Database.SSLMode, + + SQLitePath: setting.Database.Path, + SQLiteBusyTimeout: setting.Database.SQLiteBusyTimeout, + SQLiteJournalMode: setting.Database.SQLiteJournalMode, + } +} + +const sqlDriverPostgresSchema = "postgresschema" + +var makeSQLiteConnStr = func(opts SQLiteConnStrOptions) (string, string, error) { + return "", "", errors.New(`this Gitea binary was not built with SQLite3 support, get an official release or rebuild with: -tags sqlite,sqlite_unlock_notify`) +} + +func ConnStrDefaultDatabase(opts ConnOptions) (string, string, error) { + opts.Database, opts.Schema = "", "" + return ConnStr(opts) +} + +func ConnStr(opts ConnOptions) (string, string, error) { + switch { + case opts.Type.IsMySQL(): + // use unix socket or tcp socket + connType := util.Iif(strings.HasPrefix(opts.Host, "/"), "unix", "tcp") + // allow (Postgres-inspired) default value to work in MySQL + tls := util.Iif(opts.SSLMode == "disable", "false", opts.SSLMode) + // in case the database name is a partial connection string which contains "?" parameters + paramSep := util.Iif(strings.Contains(opts.Database, "?"), "&", "?") + connStr := fmt.Sprintf("%s:%s@%s(%s)/%s%sparseTime=true&tls=%s", opts.User, opts.Passwd, connType, opts.Host, opts.Database, paramSep, tls) + return "mysql", connStr, nil + + case opts.Type.IsPostgreSQL(): + connStr := makePgSQLConnStr(opts.Host, opts.User, opts.Passwd, opts.Database, opts.SSLMode) + driver := util.Iif(opts.Schema == "", "postgres", sqlDriverPostgresSchema) + registerPostgresSchemaDriver() + return driver, connStr, nil + + case opts.Type.IsMSSQL(): + host, port := parseMSSQLHostPort(opts.Host) + connStr := fmt.Sprintf("server=%s; port=%s; user id=%s; password=%s;", host, port, opts.User, opts.Passwd) + if opts.Database != "" { + connStr += "; database=" + opts.Database + } + return "mssql", connStr, nil + + case opts.Type.IsSQLite3(): + if opts.SQLitePath == "" { + return "", "", errors.New("sqlite3 database path cannot be empty") + } + if err := os.MkdirAll(filepath.Dir(opts.SQLitePath), os.ModePerm); err != nil { + return "", "", fmt.Errorf("failed to create directories: %w", err) + } + return makeSQLiteConnStr(SQLiteConnStrOptions{ + FilePath: opts.SQLitePath, + JournalMode: opts.SQLiteJournalMode, + BusyTimeout: opts.SQLiteBusyTimeout, + }) + } + return "", "", fmt.Errorf("unknown database type: %s", opts.Type) +} + +// parsePgSQLHostPort parses given input in various forms defined in +// https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING +// and returns proper host and port number. +func parsePgSQLHostPort(info string) (host, port string) { + if h, p, err := net.SplitHostPort(info); err == nil { + host, port = h, p + } else { + // treat the "info" as "host", if it's an IPv6 address, remove the wrapper + host = info + if strings.HasPrefix(host, "[") && strings.HasSuffix(host, "]") { + host = host[1 : len(host)-1] + } + } + + // set fallback values + if host == "" { + host = "127.0.0.1" + } + if port == "" { + port = "5432" + } + return host, port +} + +func makePgSQLConnStr(dbHost, dbUser, dbPasswd, dbName, dbsslMode string) (connStr string) { + dbName, dbParam, _ := strings.Cut(dbName, "?") + host, port := parsePgSQLHostPort(dbHost) + connURL := url.URL{ + Scheme: "postgres", + User: url.UserPassword(dbUser, dbPasswd), + Host: net.JoinHostPort(host, port), + Path: dbName, + OmitHost: false, + RawQuery: dbParam, + } + query := connURL.Query() + if strings.HasPrefix(host, "/") { // looks like a unix socket + query.Add("host", host) + connURL.Host = ":" + port + } + query.Set("sslmode", dbsslMode) + connURL.RawQuery = query.Encode() + return connURL.String() +} + +// parseMSSQLHostPort splits the host into host and port +func parseMSSQLHostPort(info string) (string, string) { + // the default port "0" might be related to MSSQL's dynamic port, maybe it should be double-confirmed in the future + host, port := "127.0.0.1", "0" + if strings.Contains(info, ":") { + host = strings.Split(info, ":")[0] + port = strings.Split(info, ":")[1] + } else if strings.Contains(info, ",") { + host = strings.Split(info, ",")[0] + port = strings.TrimSpace(strings.Split(info, ",")[1]) + } else if len(info) > 0 { + host = info + } + if host == "" { + host = "127.0.0.1" + } + if port == "" { + port = "0" + } + return host, port +} diff --git a/modules/setting/database_test.go b/models/db/conn_test.go similarity index 88% rename from modules/setting/database_test.go rename to models/db/conn_test.go index a742d54f8c..ba33d252f2 100644 --- a/modules/setting/database_test.go +++ b/models/db/conn_test.go @@ -1,7 +1,7 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -package setting +package db import ( "testing" @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" ) -func Test_parsePostgreSQLHostPort(t *testing.T) { +func TestParsePgSQLHostPort(t *testing.T) { tests := map[string]struct { HostPort string Host string @@ -49,14 +49,14 @@ func Test_parsePostgreSQLHostPort(t *testing.T) { for k, test := range tests { t.Run(k, func(t *testing.T) { t.Log(test.HostPort) - host, port := parsePostgreSQLHostPort(test.HostPort) + host, port := parsePgSQLHostPort(test.HostPort) assert.Equal(t, test.Host, host) assert.Equal(t, test.Port, port) }) } } -func Test_getPostgreSQLConnectionString(t *testing.T) { +func TestMakePgSQLConnStr(t *testing.T) { tests := []struct { Host string User string @@ -103,7 +103,7 @@ func Test_getPostgreSQLConnectionString(t *testing.T) { } for _, test := range tests { - connStr := getPostgreSQLConnectionString(test.Host, test.User, test.Passwd, test.Name, test.SSLMode) + connStr := makePgSQLConnStr(test.Host, test.User, test.Passwd, test.Name, test.SSLMode) assert.Equal(t, test.Output, connStr) } } diff --git a/models/db/sql_postgres_with_schema.go b/models/db/driver_postgresschema.go similarity index 92% rename from models/db/sql_postgres_with_schema.go rename to models/db/driver_postgresschema.go index 812fe4a6a6..b673500763 100644 --- a/models/db/sql_postgres_with_schema.go +++ b/models/db/driver_postgresschema.go @@ -18,8 +18,8 @@ var registerOnce sync.Once func registerPostgresSchemaDriver() { registerOnce.Do(func() { - sql.Register("postgresschema", &postgresSchemaDriver{}) - dialects.RegisterDriver("postgresschema", dialects.QueryDriver("postgres")) + sql.Register(sqlDriverPostgresSchema, &postgresSchemaDriver{}) + dialects.RegisterDriver(sqlDriverPostgresSchema, dialects.QueryDriver("postgres")) }) } diff --git a/models/db/driver_sqlite_mattn.go b/models/db/driver_sqlite_mattn.go new file mode 100644 index 0000000000..4988a43d3f --- /dev/null +++ b/models/db/driver_sqlite_mattn.go @@ -0,0 +1,34 @@ +//go:build sqlite + +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package db + +import ( + "fmt" + "strconv" + "strings" + + "code.gitea.io/gitea/modules/setting" + + _ "github.com/mattn/go-sqlite3" +) + +func init() { + setting.SupportedDatabaseTypes = append(setting.SupportedDatabaseTypes, "sqlite3") + makeSQLiteConnStr = makeSQLiteConnStrMattnCGO +} + +func makeSQLiteConnStrMattnCGO(opts SQLiteConnStrOptions) (string, string, error) { + var params []string + params = append(params, "cache=shared") + params = append(params, "mode=rwc") + params = append(params, "_busy_timeout="+strconv.Itoa(opts.BusyTimeout)) + params = append(params, "_txlock=immediate") + if opts.JournalMode != "" { + params = append(params, "_journal_mode="+opts.JournalMode) + } + connStr := fmt.Sprintf("file:%s?%s", opts.FilePath, strings.Join(params, "&")) + return "sqlite3", connStr, nil +} diff --git a/models/db/engine_dump.go b/models/db/engine_dump.go index 63f2d4e093..1d8d555b44 100644 --- a/models/db/engine_dump.go +++ b/models/db/engine_dump.go @@ -3,10 +3,14 @@ package db -import "xorm.io/xorm/schemas" +import ( + "code.gitea.io/gitea/modules/setting" + + "xorm.io/xorm/schemas" +) // DumpDatabase dumps all data from database according the special database SQL syntax to file system. -func DumpDatabase(filePath, dbType string) error { +func DumpDatabase(filePath string, dbType setting.DatabaseType) error { var tbs []*schemas.Table for _, t := range registeredModels { t, err := xormEngine.TableInfo(t) diff --git a/models/db/engine_init.go b/models/db/engine_init.go index ef5db3ff5e..65192d3327 100644 --- a/models/db/engine_init.go +++ b/models/db/engine_init.go @@ -6,7 +6,6 @@ package db import ( "context" "fmt" - "strings" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" @@ -24,31 +23,23 @@ func init() { // newXORMEngine returns a new XORM engine from the configuration func newXORMEngine() (*xorm.Engine, error) { - connStr, err := setting.DBConnStr() + connOpts := GlobalConnOptions() + driver, connStr, err := ConnStr(connOpts) if err != nil { return nil, err } - var engine *xorm.Engine - - if setting.Database.Type.IsPostgreSQL() && len(setting.Database.Schema) > 0 { - // OK whilst we sort out our schema issues - create a schema aware postgres - registerPostgresSchemaDriver() - engine, err = xorm.NewEngine("postgresschema", connStr) - } else { - engine, err = xorm.NewEngine(setting.Database.Type.String(), connStr) - } - + engine, err := xorm.NewEngine(driver, connStr) if err != nil { return nil, err } - switch setting.Database.Type { - case "mysql": + switch { + case connOpts.Type.IsMySQL(): engine.Dialect().SetParams(map[string]string{"rowFormat": "DYNAMIC"}) - case "mssql": + case connOpts.Type.IsMSSQL(): engine.Dialect().SetParams(map[string]string{"DEFAULT_VARCHAR": "nvarchar"}) } - engine.SetSchema(setting.Database.Schema) + engine.SetSchema(connOpts.Schema) return engine, nil } @@ -56,10 +47,7 @@ func newXORMEngine() (*xorm.Engine, error) { func InitEngine(ctx context.Context) error { xe, err := newXORMEngine() if err != nil { - if strings.Contains(err.Error(), "SQLite3 support") { - return fmt.Errorf("sqlite3 requires: -tags sqlite,sqlite_unlock_notify\n%w", err) - } - return fmt.Errorf("failed to connect to database: %w", err) + return fmt.Errorf("failed to init database engine: %w", err) } xe.SetMapper(names.GonicMapper{}) diff --git a/models/db/engine_test.go b/models/db/engine_test.go index 1c218df77f..6a6264b535 100644 --- a/models/db/engine_test.go +++ b/models/db/engine_test.go @@ -30,7 +30,7 @@ func TestDumpDatabase(t *testing.T) { assert.NoError(t, db.GetEngine(t.Context()).Sync(new(Version))) for _, dbType := range setting.SupportedDatabaseTypes { - assert.NoError(t, db.DumpDatabase(filepath.Join(dir, dbType+".sql"), dbType)) + assert.NoError(t, db.DumpDatabase(filepath.Join(dir, dbType+".sql"), setting.DatabaseType(dbType))) } } diff --git a/models/issues/issue.go b/models/issues/issue.go index fe5433fbb2..345b36a82f 100644 --- a/models/issues/issue.go +++ b/models/issues/issue.go @@ -59,17 +59,18 @@ type Issue struct { PosterID int64 `xorm:"INDEX"` Poster *user_model.User `xorm:"-"` OriginalAuthor string - OriginalAuthorID int64 `xorm:"index"` - Title string `xorm:"name"` - Content string `xorm:"LONGTEXT"` - RenderedContent template.HTML `xorm:"-"` - ContentVersion int `xorm:"NOT NULL DEFAULT 0"` - Labels []*Label `xorm:"-"` - isLabelsLoaded bool `xorm:"-"` - MilestoneID int64 `xorm:"INDEX"` - Milestone *Milestone `xorm:"-"` - isMilestoneLoaded bool `xorm:"-"` - Project *project_model.Project `xorm:"-"` + OriginalAuthorID int64 `xorm:"index"` + Title string `xorm:"name"` + Content string `xorm:"LONGTEXT"` + RenderedContent template.HTML `xorm:"-"` + ContentVersion int `xorm:"NOT NULL DEFAULT 0"` + Labels []*Label `xorm:"-"` + isLabelsLoaded bool `xorm:"-"` + MilestoneID int64 `xorm:"INDEX"` + Milestone *Milestone `xorm:"-"` + isMilestoneLoaded bool `xorm:"-"` + Projects []*project_model.Project `xorm:"-"` + isProjectsLoaded bool `xorm:"-"` Priority int AssigneeID int64 `xorm:"-"` Assignee *user_model.User `xorm:"-"` @@ -305,7 +306,7 @@ func (issue *Issue) LoadAttributes(ctx context.Context) (err error) { return err } - if err = issue.LoadProject(ctx); err != nil { + if err = issue.LoadProjects(ctx); err != nil { return err } @@ -355,6 +356,7 @@ func (issue *Issue) ResetAttributesLoaded() { issue.isMilestoneLoaded = false issue.isAttachmentsLoaded = false issue.isAssigneeLoaded = false + issue.isProjectsLoaded = false } // GetIsRead load the `IsRead` field of the issue diff --git a/models/issues/issue_list.go b/models/issues/issue_list.go index 26b93189b8..da407094a7 100644 --- a/models/issues/issue_list.go +++ b/models/issues/issue_list.go @@ -185,7 +185,7 @@ func (issues IssueList) LoadMilestones(ctx context.Context) error { func (issues IssueList) LoadProjects(ctx context.Context) error { issueIDs := issues.getIssueIDs() - projectMaps := make(map[int64]*project_model.Project, len(issues)) + issueProjectMaps := make(map[int64][]*project_model.Project, len(issues)) left := len(issueIDs) type projectWithIssueID struct { @@ -202,19 +202,21 @@ func (issues IssueList) LoadProjects(ctx context.Context) error { Select("project.*, project_issue.issue_id"). Join("INNER", "project_issue", "project.id = project_issue.project_id"). In("project_issue.issue_id", issueIDs[:limit]). + OrderBy("project_issue.issue_id ASC, project.id ASC"). Find(&projects) if err != nil { return err } for _, project := range projects { - projectMaps[project.IssueID] = project.Project + issueProjectMaps[project.IssueID] = append(issueProjectMaps[project.IssueID], project.Project) } left -= limit issueIDs = issueIDs[limit:] } for _, issue := range issues { - issue.Project = projectMaps[issue.ID] + issue.Projects = issueProjectMaps[issue.ID] + issue.isProjectsLoaded = true } return nil } diff --git a/models/issues/issue_list_test.go b/models/issues/issue_list_test.go index e9dc412331..842249bad2 100644 --- a/models/issues/issue_list_test.go +++ b/models/issues/issue_list_test.go @@ -65,10 +65,10 @@ func TestIssueList_LoadAttributes(t *testing.T) { } if issue.ID == int64(1) { assert.Equal(t, int64(400), issue.TotalTrackedTime) - assert.NotNil(t, issue.Project) - assert.Equal(t, int64(1), issue.Project.ID) + assert.NotEmpty(t, issue.Projects) + assert.Equal(t, int64(1), issue.Projects[0].ID) } else { - assert.Nil(t, issue.Project) + assert.Empty(t, issue.Projects) } } } diff --git a/models/issues/issue_project.go b/models/issues/issue_project.go index f78daf77f8..18f0f91c38 100644 --- a/models/issues/issue_project.go +++ b/models/issues/issue_project.go @@ -12,41 +12,38 @@ import ( "code.gitea.io/gitea/modules/util" ) -// LoadProject load the project the issue was assigned to -func (issue *Issue) LoadProject(ctx context.Context) (err error) { - if issue.Project == nil { - var p project_model.Project - has, err := db.GetEngine(ctx).Table("project"). +// LoadProjects loads all projects the issue is assigned to +func (issue *Issue) LoadProjects(ctx context.Context) (err error) { + if !issue.isProjectsLoaded { + err = db.GetEngine(ctx).Table("project"). Join("INNER", "project_issue", "project.id=project_issue.project_id"). - Where("project_issue.issue_id = ?", issue.ID).Get(&p) - if err != nil { - return err - } else if has { - issue.Project = &p + Where("project_issue.issue_id = ?", issue.ID). + OrderBy("project.id ASC"). + Find(&issue.Projects) + if err == nil { + issue.isProjectsLoaded = true } } return err } -func (issue *Issue) projectID(ctx context.Context) int64 { - var ip project_model.ProjectIssue - has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip) - if err != nil || !has { - return 0 - } - return ip.ProjectID +func (issue *Issue) projectIDs(ctx context.Context) (projectIDs []int64, _ error) { + err := db.GetEngine(ctx).Table("project_issue").Where("issue_id = ?", issue.ID).Cols("project_id").Find(&projectIDs) + return projectIDs, err } -// ProjectColumnID return project column id if issue was assigned to one -func (issue *Issue) ProjectColumnID(ctx context.Context) (int64, error) { - var ip project_model.ProjectIssue - has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip) - if err != nil { - return 0, err - } else if !has { - return 0, nil +// ProjectColumnMap returns a map of project ID to column ID for this issue. +func (issue *Issue) ProjectColumnMap(ctx context.Context) (map[int64]int64, error) { + var projIssues []project_model.ProjectIssue + if err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Find(&projIssues); err != nil { + return nil, err } - return ip.ProjectColumnID, nil + + result := make(map[int64]int64, len(projIssues)) + for _, projIssue := range projIssues { + result[projIssue.ProjectID] = projIssue.ProjectColumnID + } + return result, nil } func LoadProjectIssueColumnMap(ctx context.Context, projectID, defaultColumnID int64) (map[int64]int64, error) { @@ -64,66 +61,91 @@ func LoadProjectIssueColumnMap(ctx context.Context, projectID, defaultColumnID i return result, nil } -// IssueAssignOrRemoveProject changes the project associated with an issue -// If newProjectID is 0, the issue is removed from the project -func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID, newColumnID int64) error { +// IssueAssignOrRemoveProject updates the projects associated with an issue. +// It adds projects that are in newProjectIDs but not currently assigned, +// and removes projects that are currently assigned but not in newProjectIDs. +// If newProjectIDs is empty, all projects are removed from the issue. +// When adding an issue to a project, it is placed in the project's default column. +func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectIDs []int64) error { return db.WithTx(ctx, func(ctx context.Context) error { - oldProjectID := issue.projectID(ctx) - if err := issue.LoadRepo(ctx); err != nil { return err } - // Only check if we add a new project and not remove it. - if newProjectID > 0 { - newProject, err := project_model.GetProjectByID(ctx, newProjectID) - if err != nil { - return err - } - if !newProject.CanBeAccessedByOwnerRepo(issue.Repo.OwnerID, issue.Repo) { - return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID) - } - if newColumnID == 0 { - newDefaultColumn, err := newProject.MustDefaultColumn(ctx) - if err != nil { - return err - } - newColumnID = newDefaultColumn.ID - } - } - - if _, err := db.GetEngine(ctx).Where("project_issue.issue_id=?", issue.ID).Delete(&project_model.ProjectIssue{}); err != nil { - return err - } - - if oldProjectID > 0 || newProjectID > 0 { - if _, err := CreateComment(ctx, &CreateCommentOptions{ - Type: CommentTypeProject, - Doer: doer, - Repo: issue.Repo, - Issue: issue, - OldProjectID: oldProjectID, - ProjectID: newProjectID, - }); err != nil { - return err - } - } - if newProjectID == 0 { - return nil - } - if newColumnID == 0 { - panic("newColumnID must not be zero") // shouldn't happen - } - - newSorting, err := project_model.GetColumnIssueNextSorting(ctx, newProjectID, newColumnID) + oldProjectIDs, err := issue.projectIDs(ctx) if err != nil { return err } - return db.Insert(ctx, &project_model.ProjectIssue{ - IssueID: issue.ID, - ProjectID: newProjectID, - ProjectColumnID: newColumnID, - Sorting: newSorting, - }) + + projectsToAdd, projectsToRemove := util.DiffSlice(oldProjectIDs, newProjectIDs) + issue.isProjectsLoaded = false + issue.Projects = nil + + if len(projectsToRemove) > 0 { + if _, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).In("project_id", projectsToRemove).Delete(&project_model.ProjectIssue{}); err != nil { + return err + } + for _, projectID := range projectsToRemove { + if _, err := CreateComment(ctx, &CreateCommentOptions{ + Type: CommentTypeProject, + Doer: doer, + Repo: issue.Repo, + Issue: issue, + OldProjectID: projectID, + ProjectID: 0, + }); err != nil { + return err + } + } + } + + if len(projectsToAdd) > 0 { + projectMap, err := project_model.GetProjectsMapByIDs(ctx, projectsToAdd) + if err != nil { + return err + } + + for _, projectID := range projectsToAdd { + newProject, ok := projectMap[projectID] + if !ok { + return util.NewNotExistErrorf("project %d not found", projectID) + } + if !newProject.CanBeAccessedByOwnerRepo(issue.Repo.OwnerID, issue.Repo) { + return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID) + } + + defaultColumn, err := newProject.MustDefaultColumn(ctx) + if err != nil { + return err + } + + newSorting, err := project_model.GetColumnIssueNextSorting(ctx, projectID, defaultColumn.ID) + if err != nil { + return err + } + + err = db.Insert(ctx, &project_model.ProjectIssue{ + IssueID: issue.ID, + ProjectID: projectID, + ProjectColumnID: defaultColumn.ID, + Sorting: newSorting, + }) + if err != nil { + return err + } + + if _, err := CreateComment(ctx, &CreateCommentOptions{ + Type: CommentTypeProject, + Doer: doer, + Repo: issue.Repo, + Issue: issue, + OldProjectID: 0, + ProjectID: projectID, + }); err != nil { + return err + } + } + } + return nil }) } diff --git a/models/issues/issue_project_multi_test.go b/models/issues/issue_project_multi_test.go new file mode 100644 index 0000000000..b004f994b1 --- /dev/null +++ b/models/issues/issue_project_multi_test.go @@ -0,0 +1,149 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package issues_test + +import ( + "fmt" + "testing" + + issues_model "code.gitea.io/gitea/models/issues" + project_model "code.gitea.io/gitea/models/project" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestIssueMultipleProjects(t *testing.T) { + require.NoError(t, unittest.PrepareTestDatabase()) + + t.Run("GeneralTest", func(t *testing.T) { + // Get test data + issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}) + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + project1 := unittest.AssertExistsAndLoadBean(t, &project_model.Project{ID: 1}) + + // Create a second project for the same repository + project2 := &project_model.Project{ + Title: "Test Project 2", + RepoID: issue1.RepoID, + Type: project_model.TypeRepository, + TemplateType: project_model.TemplateTypeBasicKanban, + } + require.NoError(t, project_model.NewProject(t.Context(), project2)) + defer func() { + _ = project_model.DeleteProjectByID(t.Context(), project2.ID) + }() + + err := issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{}) + require.NoError(t, err) + err = issue1.LoadProjects(t.Context()) + require.NoError(t, err) + require.Empty(t, issue1.Projects) + + // assign issue to both projects (each project uses its own default column) + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{project1.ID}) + require.NoError(t, err) + assert.Nilf(t, issue1.Projects, "Issue's Projects should be nil after IssueAssignOrRemoveProject to ensure it reloads fresh data") + err = issue1.LoadProjects(t.Context()) + require.NoError(t, err) + require.Len(t, issue1.Projects, 1) + + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{project1.ID, project2.ID}) + require.NoError(t, err) + assert.Nilf(t, issue1.Projects, "Issue's Projects should be nil after IssueAssignOrRemoveProject to ensure it reloads fresh data") + err = issue1.LoadProjects(t.Context()) + require.NoError(t, err) + require.Len(t, issue1.Projects, 2) + assert.ElementsMatch(t, []int64{project1.ID, project2.ID}, []int64{issue1.Projects[0].ID, issue1.Projects[1].ID}, "Issue should be in both projects") + + // test issue's project column map + projectColumnMap, err := issue1.ProjectColumnMap(t.Context()) + p1Col, _ := project1.MustDefaultColumn(t.Context()) + p2Col, _ := project2.MustDefaultColumn(t.Context()) + require.NoError(t, err) + assert.Equal(t, p1Col.ID, projectColumnMap[project1.ID]) + assert.Equal(t, p2Col.ID, projectColumnMap[project2.ID]) + + // only keep project2 + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{project2.ID}) + require.NoError(t, err) + err = issue1.LoadProjects(t.Context()) + require.NoError(t, err) + require.Len(t, issue1.Projects, 1) + assert.Equal(t, project2.ID, issue1.Projects[0].ID) + + // also test ResetAttributesLoaded + issue1.Projects = nil + issue1.ResetAttributesLoaded() + err = issue1.LoadProjects(t.Context()) + require.NoError(t, err) + require.Len(t, issue1.Projects, 1) + assert.Equal(t, project2.ID, issue1.Projects[0].ID) + + // remove issue's projects + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{}) + require.NoError(t, err) + err = issue1.LoadProjects(t.Context()) + require.NoError(t, err) + require.Empty(t, issue1.Projects) + }) + + t.Run("QueryByMultipleProjectIDs", func(t *testing.T) { + // Get test data + issue1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}) + issue2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2}) + user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + // Create three projects + var projects []*project_model.Project + for i := 1; i <= 3; i++ { + project := &project_model.Project{ + Title: fmt.Sprintf("Query Test Project %d", i), + RepoID: issue1.RepoID, + Type: project_model.TypeRepository, + TemplateType: project_model.TemplateTypeBasicKanban, + } + require.NoError(t, project_model.NewProject(t.Context(), project)) + projects = append(projects, project) + defer func(id int64) { + _ = project_model.DeleteProjectByID(t.Context(), id) + }(project.ID) + } + + // Assign issue1 to projects 1 and 2 + err := issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{projects[0].ID, projects[1].ID}) + require.NoError(t, err) + + // Assign issue2 to project 3 + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue2, user2, []int64{projects[2].ID}) + require.NoError(t, err) + + // Query for issues in project 3 only (should find issue2) + issues, err := issues_model.Issues(t.Context(), &issues_model.IssuesOptions{ + RepoIDs: []int64{issue1.RepoID}, + ProjectIDs: []int64{projects[2].ID}, + }) + require.NoError(t, err) + assert.NotEmpty(t, issues, "Should find issues in project 3") + + // Verify issue2 is in the results + foundIssue2 := false + for _, issue := range issues { + if issue.ID == issue2.ID { + foundIssue2 = true + break + } + } + assert.True(t, foundIssue2, "Issue 2 should be found when querying project 3") + + // FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: no multiple project filter support yet. Search logic is wrong. It should use "AND" but not "OR". + // Clean up + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue1, user2, []int64{}) + require.NoError(t, err) + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue2, user2, []int64{}) + require.NoError(t, err) + }) +} diff --git a/models/issues/issue_search.go b/models/issues/issue_search.go index 049dcc7de8..f905e629e3 100644 --- a/models/issues/issue_search.go +++ b/models/issues/issue_search.go @@ -16,6 +16,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/util" "xorm.io/builder" "xorm.io/xorm" @@ -36,8 +37,7 @@ type IssuesOptions struct { //nolint:revive // export stutter ReviewedID int64 SubscriberID int64 MilestoneIDs []int64 - ProjectID int64 - ProjectColumnID int64 + ProjectIDs []int64 IsClosed optional.Option[bool] IsPull optional.Option[bool] LabelIDs []int64 @@ -198,26 +198,19 @@ func applyMilestoneCondition(sess *xorm.Session, opts *IssuesOptions) { } func applyProjectCondition(sess *xorm.Session, opts *IssuesOptions) { - if opts.ProjectID > 0 { // specific project - sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id"). - And("project_issue.project_id=?", opts.ProjectID) - } else if opts.ProjectID == db.NoConditionID { // show those that are in no project - sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue").And(builder.Neq{"project_id": 0}))) + projectIDs := util.SliceRemoveAll(opts.ProjectIDs, 0) + if len(projectIDs) == 1 && projectIDs[0] == db.NoConditionID { // show those that are in no project + sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue"))) + } else if len(projectIDs) == 1 && projectIDs[0] > 0 { // single specific project + sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id AND project_issue.project_id = ?", projectIDs[0]) + } else if len(projectIDs) > 1 { // multiple projects + // FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: this logic is not right, it should use "AND" but not "OR" + sess.And(builder.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.In("project_id", projectIDs)))) } - // opts.ProjectID == 0 means all projects, + // empty projectIDs means all projects, // do not need to apply any condition } -func applyProjectColumnCondition(sess *xorm.Session, opts *IssuesOptions) { - // opts.ProjectColumnID == 0 means all project columns, - // do not need to apply any condition - if opts.ProjectColumnID > 0 { - sess.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.Eq{"project_board_id": opts.ProjectColumnID})) - } else if opts.ProjectColumnID == db.NoConditionID { - sess.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.Eq{"project_board_id": 0})) - } -} - func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) { if len(opts.RepoIDs) == 1 { opts.RepoCond = builder.Eq{"issue.repo_id": opts.RepoIDs[0]} @@ -276,8 +269,6 @@ func applyConditions(sess *xorm.Session, opts *IssuesOptions) { applyProjectCondition(sess, opts) - applyProjectColumnCondition(sess, opts) - if opts.IsPull.Has() { sess.And("issue.is_pull=?", opts.IsPull.Value()) } diff --git a/models/issues/issue_test.go b/models/issues/issue_test.go index 55a90f50a1..b935c0fffd 100644 --- a/models/issues/issue_test.go +++ b/models/issues/issue_test.go @@ -424,10 +424,10 @@ func TestIssueLoadAttributes(t *testing.T) { } if issue.ID == int64(1) { assert.Equal(t, int64(400), issue.TotalTrackedTime) - assert.NotNil(t, issue.Project) - assert.Equal(t, int64(1), issue.Project.ID) + assert.NotEmpty(t, issue.Projects) + assert.Equal(t, int64(1), issue.Projects[0].ID) } else { - assert.Nil(t, issue.Project) + assert.Empty(t, issue.Projects) } } } diff --git a/models/issues/pull_list.go b/models/issues/pull_list.go index bf8f9b7d91..6a6abca970 100644 --- a/models/issues/pull_list.go +++ b/models/issues/pull_list.go @@ -71,38 +71,69 @@ func GetUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch } // CanMaintainerWriteToBranch check whether user is a maintainer and could write to the branch -func CanMaintainerWriteToBranch(ctx context.Context, p access_model.Permission, branch string, user *user_model.User) bool { - if p.CanWrite(unit.TypeCode) { - return true +func CanMaintainerWriteToBranch(ctx context.Context, headPerm access_model.Permission, headBranch string, doer *user_model.User) bool { + can, err := canMaintainerWriteToBranch(ctx, headPerm, headBranch, doer) + if err != nil { + log.Error("CanMaintainerWriteToBranch: %v", err) + return false + } + return can +} + +func canMaintainerWriteToBranch(ctx context.Context, headPerm access_model.Permission, headBranch string, doer *user_model.User) (bool, error) { + if headPerm.CanWrite(unit.TypeCode) { + return true, nil } // the code below depends on units to get the repository ID, not ideal but just keep it for now - firstUnitRepoID := p.GetFirstUnitRepoID() + firstUnitRepoID := headPerm.GetFirstUnitRepoID() if firstUnitRepoID == 0 { - return false + return false, nil } - prs, err := GetUnmergedPullRequestsByHeadInfo(ctx, firstUnitRepoID, branch) + prs, err := GetUnmergedPullRequestsByHeadInfo(ctx, firstUnitRepoID, headBranch) if err != nil { - return false + return false, err + } + if _, err := prs.LoadIssues(ctx); err != nil { + return false, err } - for _, pr := range prs { - if pr.AllowMaintainerEdit { - err = pr.LoadBaseRepo(ctx) - if err != nil { - continue - } - prPerm, err := access_model.GetIndividualUserRepoPermission(ctx, pr.BaseRepo, user) - if err != nil { - continue - } - if prPerm.CanWrite(unit.TypeCode) { - return true - } + if !pr.AllowMaintainerEdit { + continue + } + + // check the PR's poster's permissions + // If a "reader" poster created the PR in base repo from head repo, even if it is allowed to be edited by maintainers, + // the maintainers should not be allowed to write, because they don't really have "write" permission in the head repo + if err := pr.Issue.LoadPoster(ctx); err != nil { + return false, err + } + if err := pr.LoadHeadRepo(ctx); err != nil { + return false, err + } + posterHeadPerm, err := access_model.GetIndividualUserRepoPermission(ctx, pr.HeadRepo, pr.Issue.Poster) + if err != nil { + return false, err + } + if !posterHeadPerm.CanWrite(unit.TypeCode) { + continue + } + + // check the doer's permission + // Only allow the doer to edit the PR if they have write access to the base repository + if err := pr.LoadBaseRepo(ctx); err != nil { + return false, err + } + doerBasePerm, err := access_model.GetIndividualUserRepoPermission(ctx, pr.BaseRepo, doer) + if err != nil { + return false, err + } + if doerBasePerm.CanWrite(unit.TypeCode) { + return true, nil } } - return false + return false, nil } // HasUnmergedPullRequestsByHeadInfo checks if there are open and not merged pull request diff --git a/models/issues/pull_list_test.go b/models/issues/pull_list_test.go index 437830701c..302b2ca0ba 100644 --- a/models/issues/pull_list_test.go +++ b/models/issues/pull_list_test.go @@ -6,15 +6,28 @@ package issues_test import ( "testing" + "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/perm" + "code.gitea.io/gitea/models/perm/access" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "xorm.io/builder" ) -func TestPullRequestList_LoadAttributes(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func TestPullRequestList(t *testing.T) { + require.NoError(t, unittest.PrepareTestDatabase()) + t.Run("LoadAttributes", testPullRequestListLoadAttributes) + t.Run("LoadReviewCommentsCounts", testPullRequestListLoadReviewCommentsCounts) + t.Run("LoadReviews", testPullRequestListLoadReviews) + t.Run("CanMaintainerWriteToBranch", testCanMaintainerWriteToBranch) +} +func testPullRequestListLoadAttributes(t *testing.T) { prs := issues_model.PullRequestList{ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}), unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}), @@ -28,9 +41,7 @@ func TestPullRequestList_LoadAttributes(t *testing.T) { assert.NoError(t, issues_model.PullRequestList([]*issues_model.PullRequest{}).LoadAttributes(t.Context())) } -func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testPullRequestListLoadReviewCommentsCounts(t *testing.T) { prs := issues_model.PullRequestList{ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}), unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}), @@ -43,9 +54,7 @@ func TestPullRequestList_LoadReviewCommentsCounts(t *testing.T) { } } -func TestPullRequestList_LoadReviews(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testPullRequestListLoadReviews(t *testing.T) { prs := issues_model.PullRequestList{ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}), unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}), @@ -61,3 +70,73 @@ func TestPullRequestList_LoadReviews(t *testing.T) { assert.EqualValues(t, 10, reviewList[4].ID) assert.EqualValues(t, 22, reviewList[5].ID) } + +func testCanMaintainerWriteToBranch(t *testing.T) { + ctx := t.Context() + baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10}) + headRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 11}) + + _ = baseRepo.LoadOwner(ctx) + _ = headRepo.LoadOwner(ctx) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + // a PR from header's owner + headOwnerPR := &issues_model.PullRequest{ + Issue: &issues_model.Issue{ + RepoID: baseRepo.ID, + PosterID: headRepo.OwnerID, + }, + HeadRepoID: headRepo.ID, + BaseRepoID: baseRepo.ID, + HeadBranch: "pr-from-head-owner", + BaseBranch: "master", + } + require.NoError(t, issues_model.NewPullRequest(ctx, baseRepo, headOwnerPR.Issue, nil, nil, headOwnerPR)) + + // a PR from a user, they might have or not have "write" permission in the target repo + anyUserPR := &issues_model.PullRequest{ + Issue: &issues_model.Issue{ + RepoID: baseRepo.ID, + PosterID: user.ID, + }, + HeadRepoID: headRepo.ID, + BaseRepoID: baseRepo.ID, + HeadBranch: "pr-from-head-user", + BaseBranch: "master", + } + require.NoError(t, issues_model.NewPullRequest(ctx, baseRepo, anyUserPR.Issue, nil, nil, anyUserPR)) + + doerCanWrite := func(doer *user_model.User, pr *issues_model.PullRequest) bool { + headPerm, _ := access.GetIndividualUserRepoPermission(ctx, headRepo, doer) + return issues_model.CanMaintainerWriteToBranch(ctx, headPerm, pr.HeadBranch, doer) + } + + t.Run("NoAllowMaintainerEdit", func(t *testing.T) { + assert.True(t, doerCanWrite(headRepo.Owner, headOwnerPR)) + assert.False(t, doerCanWrite(baseRepo.Owner, headOwnerPR)) + assert.False(t, doerCanWrite(baseRepo.Owner, anyUserPR)) + assert.False(t, doerCanWrite(user, anyUserPR)) + }) + + t.Run("WithAllowMaintainerEdit-HeadPosterReader", func(t *testing.T) { + _, err := db.GetEngine(ctx).Where(builder.In("id", []int64{headOwnerPR.ID, anyUserPR.ID})). + Cols("allow_maintainer_edit"). + Update(&issues_model.PullRequest{AllowMaintainerEdit: true}) + require.NoError(t, err) + assert.True(t, doerCanWrite(baseRepo.Owner, headOwnerPR)) + assert.False(t, doerCanWrite(baseRepo.Owner, anyUserPR)) // poster doesn't have write permission, so maintainer can't write either + }) + + t.Run("WithAllowMaintainerEdit-HeadPosterWriter", func(t *testing.T) { + _, err := db.GetEngine(ctx).Where(builder.In("id", []int64{headOwnerPR.ID, anyUserPR.ID})). + Cols("allow_maintainer_edit"). + Update(&issues_model.PullRequest{AllowMaintainerEdit: true}) + require.NoError(t, err) + err = db.Insert(ctx, &repo_model.Collaboration{RepoID: headRepo.ID, UserID: user.ID, Mode: perm.AccessModeWrite}) + require.NoError(t, err) + err = db.Insert(ctx, &access.Access{RepoID: headRepo.ID, UserID: user.ID, Mode: perm.AccessModeWrite}) + require.NoError(t, err) + assert.True(t, doerCanWrite(baseRepo.Owner, headOwnerPR)) + assert.True(t, doerCanWrite(baseRepo.Owner, anyUserPR)) // now the poster has the write permission + }) +} diff --git a/models/migrations/base/db_test.go b/models/migrations/base/db_test.go index 00635ca72e..ce6e1169d8 100644 --- a/models/migrations/base/db_test.go +++ b/models/migrations/base/db_test.go @@ -6,22 +6,23 @@ package base import ( "testing" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/timeutil" "xorm.io/xorm/names" ) func TestMain(m *testing.M) { - MainTest(m) + migrationtest.MainTest(m) } func Test_DropTableColumns(t *testing.T) { - x, deferable := PrepareTestEnv(t, 0) - if x == nil || t.Failed() { - defer deferable() - return - } + x, deferable := migrationtest.PrepareTestEnv(t, 0) defer deferable() + // FIXME: this logic seems wrong. Need to add an assertion here in the future, but it seems causing failure. + if x == nil || t.Failed() { + t.Skip("PrepareTestEnv did not yield a usable engine") + } type DropTest struct { ID int64 `xorm:"pk autoincr"` diff --git a/models/migrations/base/tests.go b/models/migrations/base/tests.go deleted file mode 100644 index 7482829f1f..0000000000 --- a/models/migrations/base/tests.go +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package base - -import ( - "database/sql" - "fmt" - "os" - "path" - "path/filepath" - "testing" - - "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/models/unittest" - "code.gitea.io/gitea/modules/git" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/tempdir" - "code.gitea.io/gitea/modules/testlogger" - "code.gitea.io/gitea/modules/util" - - "github.com/stretchr/testify/require" - "xorm.io/xorm" - "xorm.io/xorm/schemas" -) - -// FIXME: this file shouldn't be in a normal package, it should only be compiled for tests - -func newXORMEngine(t *testing.T) (*xorm.Engine, error) { - if err := db.InitEngine(t.Context()); err != nil { - return nil, err - } - x := unittest.GetXORMEngine() - return x, nil -} - -func deleteDB() error { - switch { - case setting.Database.Type.IsSQLite3(): - if err := util.Remove(setting.Database.Path); err != nil { - return err - } - return os.MkdirAll(path.Dir(setting.Database.Path), os.ModePerm) - - case setting.Database.Type.IsMySQL(): - db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/", - setting.Database.User, setting.Database.Passwd, setting.Database.Host)) - if err != nil { - return err - } - defer db.Close() - - if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil { - return err - } - - if _, err = db.Exec("CREATE DATABASE IF NOT EXISTS " + setting.Database.Name); err != nil { - return err - } - return nil - case setting.Database.Type.IsPostgreSQL(): - db, err := sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/?sslmode=%s", - setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.SSLMode)) - if err != nil { - return err - } - defer db.Close() - - if _, err = db.Exec("DROP DATABASE IF EXISTS " + setting.Database.Name); err != nil { - return err - } - - if _, err = db.Exec("CREATE DATABASE " + setting.Database.Name); err != nil { - return err - } - db.Close() - - // Check if we need to set up a specific schema - if len(setting.Database.Schema) != 0 { - db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", - setting.Database.User, setting.Database.Passwd, setting.Database.Host, setting.Database.Name, setting.Database.SSLMode)) - if err != nil { - return err - } - defer db.Close() - - schrows, err := db.Query(fmt.Sprintf("SELECT 1 FROM information_schema.schemata WHERE schema_name = '%s'", setting.Database.Schema)) - if err != nil { - return err - } - defer schrows.Close() - - if !schrows.Next() { - // Create and set up a DB schema - _, err = db.Exec("CREATE SCHEMA " + setting.Database.Schema) - if err != nil { - return err - } - } - - // Make the user's default search path the created schema; this will affect new connections - _, err = db.Exec(fmt.Sprintf(`ALTER USER "%s" SET search_path = %s`, setting.Database.User, setting.Database.Schema)) - if err != nil { - return err - } - return nil - } - case setting.Database.Type.IsMSSQL(): - host, port := setting.ParseMSSQLHostPort(setting.Database.Host) - db, err := sql.Open("mssql", fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", - host, port, "master", setting.Database.User, setting.Database.Passwd)) - if err != nil { - return err - } - defer db.Close() - - if _, err = db.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS [%s]", setting.Database.Name)); err != nil { - return err - } - if _, err = db.Exec(fmt.Sprintf("CREATE DATABASE [%s]", setting.Database.Name)); err != nil { - return err - } - } - - return nil -} - -// PrepareTestEnv prepares the test environment and reset the database. The skip parameter should usually be 0. -// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from. -// -// fixtures in `models/migrations/fixtures/` will be loaded automatically -func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, func()) { - t.Helper() - ourSkip := 2 - ourSkip += skip - deferFn := testlogger.PrintCurrentTest(t, ourSkip) - giteaRoot := setting.GetGiteaTestSourceRoot() - require.NoError(t, unittest.SyncDirs(filepath.Join(giteaRoot, "tests/gitea-repositories-meta"), setting.RepoRootPath)) - - if err := deleteDB(); err != nil { - t.Fatalf("unable to reset database: %v", err) - return nil, deferFn - } - - x, err := newXORMEngine(t) - require.NoError(t, err) - if x != nil { - oldDefer := deferFn - deferFn = func() { - oldDefer() - if err := x.Close(); err != nil { - t.Errorf("error during close: %v", err) - } - if err := deleteDB(); err != nil { - t.Errorf("unable to reset database: %v", err) - } - } - } - if err != nil { - return x, deferFn - } - - if len(syncModels) > 0 { - if err := x.Sync(syncModels...); err != nil { - t.Errorf("error during sync: %v", err) - return x, deferFn - } - } - - fixturesDir := filepath.Join(giteaRoot, "models", "migrations", "fixtures", t.Name()) - - if _, err := os.Stat(fixturesDir); err == nil { - t.Logf("initializing fixtures from: %s", fixturesDir) - if err := unittest.InitFixtures( - unittest.FixturesOptions{ - Dir: fixturesDir, - }, x); err != nil { - t.Errorf("error whilst initializing fixtures from %s: %v", fixturesDir, err) - return x, deferFn - } - if err := unittest.LoadFixtures(); err != nil { - t.Errorf("error whilst loading fixtures from %s: %v", fixturesDir, err) - return x, deferFn - } - } else if !os.IsNotExist(err) { - t.Errorf("unexpected error whilst checking for existence of fixtures: %v", err) - } else { - t.Logf("no fixtures found in: %s", fixturesDir) - } - - return x, deferFn -} - -func LoadTableSchemasMap(t *testing.T, x *xorm.Engine) map[string]*schemas.Table { - tables, err := x.DBMetas() - require.NoError(t, err) - tableMap := make(map[string]*schemas.Table) - for _, table := range tables { - tableMap[table.Name] = table - } - return tableMap -} - -func mainTest(m *testing.M) int { - testlogger.Init() - - tempWorkPath, cleanup, err := tempdir.OsTempDir("gitea-test").MkdirTempRandom("migration-test-data-") - if err != nil { - return testlogger.MainErrorf("Unable to create temporary dir for migration test: %v", err) - } - defer cleanup() - - setting.MockBuiltinPaths(tempWorkPath, "", "") - setting.SetupGiteaTestEnv() - - if err = git.InitFull(); err != nil { - return testlogger.MainErrorf("Unable to InitFull: %v", err) - } - setting.LoadDBSetting() - setting.InitLoggersForTest() - return m.Run() -} - -func MainTest(m *testing.M) { - os.Exit(mainTest(m)) -} diff --git a/models/migrations/migrationtest/tests.go b/models/migrations/migrationtest/tests.go new file mode 100644 index 0000000000..ed8bb16ef1 --- /dev/null +++ b/models/migrations/migrationtest/tests.go @@ -0,0 +1,120 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package migrationtest + +import ( + "os" + "path/filepath" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/testlogger" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "xorm.io/xorm" + "xorm.io/xorm/schemas" +) + +// PrepareTestEnv prepares the test environment and reset the database. The skip parameter should usually be 0. +// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from. +// +// fixtures in `models/migrations/fixtures/` will be loaded automatically +func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, func()) { + t.Helper() + ourSkip := 2 + ourSkip += skip + deferFn := testlogger.PrintCurrentTest(t, ourSkip) + giteaRoot := setting.GetGiteaTestSourceRoot() + require.NoError(t, unittest.SyncDirs(filepath.Join(giteaRoot, "tests/gitea-repositories-meta"), setting.RepoRootPath)) + + cleanup, err := unittest.ResetTestDatabase() + if err != nil { + t.Fatalf("unable to reset database: %v", err) + return nil, deferFn + } + { + oldDefer := deferFn + deferFn = func() { + cleanup() + oldDefer() + } + } + + err = db.InitEngine(t.Context()) + if !assert.NoError(t, err) { + return nil, deferFn + } + x := unittest.GetXORMEngine() + { + oldDefer := deferFn + deferFn = func() { + _ = x.Close() + oldDefer() + } + } + + if len(syncModels) > 0 { + if err := x.Sync(syncModels...); err != nil { + t.Errorf("error during sync: %v", err) + return x, deferFn + } + } + + fixturesDir := filepath.Join(giteaRoot, "models", "migrations", "fixtures", t.Name()) + + if _, err := os.Stat(fixturesDir); err == nil { + t.Logf("initializing fixtures from: %s", fixturesDir) + if err := unittest.InitFixtures( + unittest.FixturesOptions{ + Dir: fixturesDir, + }, x); err != nil { + t.Errorf("error whilst initializing fixtures from %s: %v", fixturesDir, err) + return x, deferFn + } + if err := unittest.LoadFixtures(); err != nil { + t.Errorf("error whilst loading fixtures from %s: %v", fixturesDir, err) + return x, deferFn + } + } else if !os.IsNotExist(err) { + t.Errorf("unexpected error whilst checking for existence of fixtures: %v", err) + } else { + t.Logf("no fixtures found in: %s", fixturesDir) + } + + return x, deferFn +} + +func LoadTableSchemasMap(t *testing.T, x *xorm.Engine) map[string]*schemas.Table { + tables, err := x.DBMetas() + require.NoError(t, err) + tableMap := make(map[string]*schemas.Table) + for _, table := range tables { + tableMap[table.Name] = table + } + return tableMap +} + +func mainTest(m *testing.M) int { + testlogger.Init() + err := setting.PrepareIntegrationTestConfig() + if err != nil { + return testlogger.MainErrorf("Unable to prepare integration test config: %v", err) + } + setting.SetupGiteaTestEnv() + + if err = git.InitFull(); err != nil { + return testlogger.MainErrorf("Unable to InitFull: %v", err) + } + setting.LoadDBSetting() + setting.InitLoggersForTest() + return m.Run() +} + +func MainTest(m *testing.M) { + os.Exit(mainTest(m)) +} diff --git a/models/migrations/v1_14/main_test.go b/models/migrations/v1_14/main_test.go index 978f88577c..6ed240c407 100644 --- a/models/migrations/v1_14/main_test.go +++ b/models/migrations/v1_14/main_test.go @@ -6,9 +6,9 @@ package v1_14 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_14/v176_test.go b/models/migrations/v1_14/v176_test.go index 5c1db4db71..aa57b5ad1d 100644 --- a/models/migrations/v1_14/v176_test.go +++ b/models/migrations/v1_14/v176_test.go @@ -6,7 +6,7 @@ package v1_14 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -47,7 +47,7 @@ func Test_RemoveInvalidLabels(t *testing.T) { } // load and prepare the test database - x, deferable := base.PrepareTestEnv(t, 0, new(Comment), new(Issue), new(Repository), new(IssueLabel), new(Label)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Comment), new(Issue), new(Repository), new(IssueLabel), new(Label)) if x == nil || t.Failed() { defer deferable() return diff --git a/models/migrations/v1_14/v177_test.go b/models/migrations/v1_14/v177_test.go index 263f69f338..a86fb98830 100644 --- a/models/migrations/v1_14/v177_test.go +++ b/models/migrations/v1_14/v177_test.go @@ -6,7 +6,7 @@ package v1_14 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" @@ -34,7 +34,7 @@ func Test_DeleteOrphanedIssueLabels(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(IssueLabel), new(Label)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(IssueLabel), new(Label)) if x == nil || t.Failed() { defer deferable() return diff --git a/models/migrations/v1_15/main_test.go b/models/migrations/v1_15/main_test.go index d01585e997..768bbd310b 100644 --- a/models/migrations/v1_15/main_test.go +++ b/models/migrations/v1_15/main_test.go @@ -6,9 +6,9 @@ package v1_15 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_15/v181_test.go b/models/migrations/v1_15/v181_test.go index 73b5c1f3d6..e230c684ea 100644 --- a/models/migrations/v1_15/v181_test.go +++ b/models/migrations/v1_15/v181_test.go @@ -7,7 +7,7 @@ import ( "strings" "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -20,7 +20,7 @@ func Test_AddPrimaryEmail2EmailAddress(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(User)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(User)) if x == nil || t.Failed() { defer deferable() return diff --git a/models/migrations/v1_15/v182_test.go b/models/migrations/v1_15/v182_test.go index 5fc6a0c467..c0a1378534 100644 --- a/models/migrations/v1_15/v182_test.go +++ b/models/migrations/v1_15/v182_test.go @@ -6,7 +6,7 @@ package v1_15 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -20,7 +20,7 @@ func Test_AddIssueResourceIndexTable(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(Issue)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Issue)) if x == nil || t.Failed() { defer deferable() return diff --git a/models/migrations/v1_16/main_test.go b/models/migrations/v1_16/main_test.go index 7f93d6e9e5..c54424788d 100644 --- a/models/migrations/v1_16/main_test.go +++ b/models/migrations/v1_16/main_test.go @@ -6,9 +6,9 @@ package v1_16 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_16/v189_test.go b/models/migrations/v1_16/v189_test.go index fb56ac8e11..44424dd369 100644 --- a/models/migrations/v1_16/v189_test.go +++ b/models/migrations/v1_16/v189_test.go @@ -6,7 +6,7 @@ package v1_16 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/json" "github.com/stretchr/testify/assert" @@ -27,7 +27,7 @@ func (ls *LoginSourceOriginalV189) TableName() string { func Test_UnwrapLDAPSourceCfg(t *testing.T) { // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(LoginSourceOriginalV189)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(LoginSourceOriginalV189)) if x == nil || t.Failed() { defer deferable() return diff --git a/models/migrations/v1_16/v193_test.go b/models/migrations/v1_16/v193_test.go index 2e827f0550..f68dd6d92d 100644 --- a/models/migrations/v1_16/v193_test.go +++ b/models/migrations/v1_16/v193_test.go @@ -6,7 +6,7 @@ package v1_16 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -31,7 +31,7 @@ func Test_AddRepoIDForAttachment(t *testing.T) { } // Prepare and load the testing database - x, deferrable := base.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release)) + x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release)) defer deferrable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_16/v195_test.go b/models/migrations/v1_16/v195_test.go index 946e06e399..bbfa5e162a 100644 --- a/models/migrations/v1_16/v195_test.go +++ b/models/migrations/v1_16/v195_test.go @@ -6,7 +6,7 @@ package v1_16 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -21,7 +21,7 @@ func Test_AddTableCommitStatusIndex(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(CommitStatus)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(CommitStatus)) if x == nil || t.Failed() { defer deferable() return diff --git a/models/migrations/v1_16/v210_test.go b/models/migrations/v1_16/v210_test.go index 3b4ac7aa4b..7bff2572e1 100644 --- a/models/migrations/v1_16/v210_test.go +++ b/models/migrations/v1_16/v210_test.go @@ -6,7 +6,7 @@ package v1_16 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" @@ -44,7 +44,7 @@ func Test_RemigrateU2FCredentials(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(WebauthnCredential), new(U2fRegistration), new(ExpectedWebauthnCredential)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(WebauthnCredential), new(U2fRegistration), new(ExpectedWebauthnCredential)) if x == nil || t.Failed() { defer deferable() return diff --git a/models/migrations/v1_17/main_test.go b/models/migrations/v1_17/main_test.go index 571a4f55a3..8652201871 100644 --- a/models/migrations/v1_17/main_test.go +++ b/models/migrations/v1_17/main_test.go @@ -6,9 +6,9 @@ package v1_17 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_17/v221_test.go b/models/migrations/v1_17/v221_test.go index a2dc0fae55..6fda9b9980 100644 --- a/models/migrations/v1_17/v221_test.go +++ b/models/migrations/v1_17/v221_test.go @@ -7,7 +7,7 @@ import ( "encoding/base32" "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -38,7 +38,7 @@ func Test_StoreWebauthnCredentialIDAsBytes(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(WebauthnCredential), new(ExpectedWebauthnCredential)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(WebauthnCredential), new(ExpectedWebauthnCredential)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_18/main_test.go b/models/migrations/v1_18/main_test.go index ebcfb45a94..b8641526f3 100644 --- a/models/migrations/v1_18/main_test.go +++ b/models/migrations/v1_18/main_test.go @@ -6,9 +6,9 @@ package v1_18 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_18/v229_test.go b/models/migrations/v1_18/v229_test.go index 5722dd3557..638983ad0b 100644 --- a/models/migrations/v1_18/v229_test.go +++ b/models/migrations/v1_18/v229_test.go @@ -7,7 +7,7 @@ import ( "testing" "code.gitea.io/gitea/models/issues" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -16,7 +16,7 @@ func Test_UpdateOpenMilestoneCounts(t *testing.T) { type ExpectedMilestone issues.Milestone // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(issues.Milestone), new(ExpectedMilestone), new(issues.Issue)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(issues.Milestone), new(ExpectedMilestone), new(issues.Issue)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_18/v230_test.go b/models/migrations/v1_18/v230_test.go index 25b2f6525d..e5e28ea63f 100644 --- a/models/migrations/v1_18/v230_test.go +++ b/models/migrations/v1_18/v230_test.go @@ -6,7 +6,7 @@ package v1_18 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -18,7 +18,7 @@ func Test_AddConfidentialClientColumnToOAuth2ApplicationTable(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(oauth2Application)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(oauth2Application)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_19/main_test.go b/models/migrations/v1_19/main_test.go index 87e807be6e..784ca0e46e 100644 --- a/models/migrations/v1_19/main_test.go +++ b/models/migrations/v1_19/main_test.go @@ -6,9 +6,9 @@ package v1_19 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_19/v233_test.go b/models/migrations/v1_19/v233_test.go index 7436ff7483..3f7900c58f 100644 --- a/models/migrations/v1_19/v233_test.go +++ b/models/migrations/v1_19/v233_test.go @@ -6,7 +6,7 @@ package v1_19 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/secret" "code.gitea.io/gitea/modules/setting" @@ -39,7 +39,7 @@ func Test_AddHeaderAuthorizationEncryptedColWebhook(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(Webhook), new(ExpectedWebhook), new(HookTask)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Webhook), new(ExpectedWebhook), new(HookTask)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_20/main_test.go b/models/migrations/v1_20/main_test.go index 2fd63a7118..3ceb9a3c66 100644 --- a/models/migrations/v1_20/main_test.go +++ b/models/migrations/v1_20/main_test.go @@ -6,9 +6,9 @@ package v1_20 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_20/v259_test.go b/models/migrations/v1_20/v259_test.go index 0bf63719e5..3864eecb78 100644 --- a/models/migrations/v1_20/v259_test.go +++ b/models/migrations/v1_20/v259_test.go @@ -8,7 +8,7 @@ import ( "strings" "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -66,7 +66,7 @@ func Test_ConvertScopedAccessTokens(t *testing.T) { }) } - x, deferable := base.PrepareTestEnv(t, 0, new(AccessToken)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(AccessToken)) defer deferable() if x == nil || t.Failed() { t.Skip() diff --git a/models/migrations/v1_21/main_test.go b/models/migrations/v1_21/main_test.go index 536a7ade08..daf98d40f4 100644 --- a/models/migrations/v1_21/main_test.go +++ b/models/migrations/v1_21/main_test.go @@ -6,9 +6,9 @@ package v1_21 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_22/main_test.go b/models/migrations/v1_22/main_test.go index ac8facd6aa..e02c8a5328 100644 --- a/models/migrations/v1_22/main_test.go +++ b/models/migrations/v1_22/main_test.go @@ -6,9 +6,9 @@ package v1_22 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_22/v283_test.go b/models/migrations/v1_22/v283_test.go index 743f860466..8e4c9410bd 100644 --- a/models/migrations/v1_22/v283_test.go +++ b/models/migrations/v1_22/v283_test.go @@ -6,7 +6,7 @@ package v1_22 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -21,7 +21,7 @@ func Test_AddCombinedIndexToIssueUser(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(IssueUser)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(IssueUser)) defer deferable() assert.NoError(t, AddCombinedIndexToIssueUser(x)) diff --git a/models/migrations/v1_22/v286_test.go b/models/migrations/v1_22/v286_test.go index b4a50f6fcb..1bd7fac2f1 100644 --- a/models/migrations/v1_22/v286_test.go +++ b/models/migrations/v1_22/v286_test.go @@ -6,7 +6,7 @@ package v1_22 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" "xorm.io/xorm" @@ -64,7 +64,7 @@ func PrepareOldRepository(t *testing.T) (*xorm.Engine, func()) { } // Prepare and load the testing database - return base.PrepareTestEnv(t, 0, + return migrationtest.PrepareTestEnv(t, 0, new(Repository), new(CommitStatus), new(RepoArchiver), diff --git a/models/migrations/v1_22/v287_test.go b/models/migrations/v1_22/v287_test.go index 2b42a33c38..21946a662a 100644 --- a/models/migrations/v1_22/v287_test.go +++ b/models/migrations/v1_22/v287_test.go @@ -7,7 +7,7 @@ import ( "strconv" "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -20,7 +20,7 @@ func Test_UpdateBadgeColName(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(Badge)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Badge)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_22/v293_test.go b/models/migrations/v1_22/v293_test.go index c7b643c7e0..bc3a33055c 100644 --- a/models/migrations/v1_22/v293_test.go +++ b/models/migrations/v1_22/v293_test.go @@ -6,7 +6,7 @@ package v1_22 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/models/project" "github.com/stretchr/testify/assert" @@ -14,7 +14,7 @@ import ( func Test_CheckProjectColumnsConsistency(t *testing.T) { // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(project.Project), new(project.Column)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(project.Project), new(project.Column)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_22/v294_test.go b/models/migrations/v1_22/v294_test.go index 1cf03d6120..a711b5ec5f 100644 --- a/models/migrations/v1_22/v294_test.go +++ b/models/migrations/v1_22/v294_test.go @@ -6,7 +6,7 @@ package v1_22 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" "xorm.io/xorm/schemas" @@ -20,7 +20,7 @@ func Test_AddUniqueIndexForProjectIssue(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(ProjectIssue)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(ProjectIssue)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_23/main_test.go b/models/migrations/v1_23/main_test.go index f7b2caed83..ffccac0fd3 100644 --- a/models/migrations/v1_23/main_test.go +++ b/models/migrations/v1_23/main_test.go @@ -6,9 +6,9 @@ package v1_23 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_23/v302_test.go b/models/migrations/v1_23/v302_test.go index b008b6fc03..1832adf39a 100644 --- a/models/migrations/v1_23/v302_test.go +++ b/models/migrations/v1_23/v302_test.go @@ -6,7 +6,7 @@ package v1_23 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" @@ -44,7 +44,7 @@ func Test_AddIndexToActionTaskStoppedLogExpired(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(ActionTask)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(ActionTask)) defer deferable() assert.NoError(t, AddIndexToActionTaskStoppedLogExpired(x)) diff --git a/models/migrations/v1_23/v304_test.go b/models/migrations/v1_23/v304_test.go index c3dfa5e7e7..9af84cd257 100644 --- a/models/migrations/v1_23/v304_test.go +++ b/models/migrations/v1_23/v304_test.go @@ -6,7 +6,7 @@ package v1_23 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" @@ -33,7 +33,7 @@ func Test_AddIndexForReleaseSha1(t *testing.T) { } // Prepare and load the testing database - x, deferable := base.PrepareTestEnv(t, 0, new(Release)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Release)) defer deferable() assert.NoError(t, AddIndexForReleaseSha1(x)) diff --git a/models/migrations/v1_25/main_test.go b/models/migrations/v1_25/main_test.go index d2c4a4105d..33c981edb9 100644 --- a/models/migrations/v1_25/main_test.go +++ b/models/migrations/v1_25/main_test.go @@ -6,9 +6,9 @@ package v1_25 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_25/v321_test.go b/models/migrations/v1_25/v321_test.go index 3ef2c68aa3..0749a20e20 100644 --- a/models/migrations/v1_25/v321_test.go +++ b/models/migrations/v1_25/v321_test.go @@ -6,7 +6,7 @@ package v1_25 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -44,12 +44,12 @@ func Test_UseLongTextInSomeColumnsAndFixBugs(t *testing.T) { } // Prepare and load the testing database - x, deferrable := base.PrepareTestEnv(t, 0, new(ReviewState), new(PackageProperty), new(Notice)) + x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(ReviewState), new(PackageProperty), new(Notice)) defer deferrable() require.NoError(t, UseLongTextInSomeColumnsAndFixBugs(x)) - tables := base.LoadTableSchemasMap(t, x) + tables := migrationtest.LoadTableSchemasMap(t, x) table := tables["review_state"] column := table.GetColumn("updated_files") assert.Equal(t, "LONGTEXT", column.SQLType.Name) diff --git a/models/migrations/v1_25/v322_test.go b/models/migrations/v1_25/v322_test.go index 78d890704c..1964614035 100644 --- a/models/migrations/v1_25/v322_test.go +++ b/models/migrations/v1_25/v322_test.go @@ -6,7 +6,7 @@ package v1_25 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/setting" "github.com/stretchr/testify/assert" @@ -23,11 +23,11 @@ func Test_ExtendCommentTreePathLength(t *testing.T) { TreePath string `xorm:"VARCHAR(255)"` } - x, deferrable := base.PrepareTestEnv(t, 0, new(Comment)) + x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(Comment)) defer deferrable() require.NoError(t, ExtendCommentTreePathLength(x)) - table := base.LoadTableSchemasMap(t, x)["comment"] + table := migrationtest.LoadTableSchemasMap(t, x)["comment"] column := table.GetColumn("tree_path") assert.Contains(t, []string{"NVARCHAR", "VARCHAR"}, column.SQLType.Name) assert.EqualValues(t, 4000, column.Length) diff --git a/models/migrations/v1_26/main_test.go b/models/migrations/v1_26/main_test.go index 5aa12d553c..0b271b9bbc 100644 --- a/models/migrations/v1_26/main_test.go +++ b/models/migrations/v1_26/main_test.go @@ -6,9 +6,9 @@ package v1_26 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_26/v325_test.go b/models/migrations/v1_26/v325_test.go index d4a66fee81..3fd658e01b 100644 --- a/models/migrations/v1_26/v325_test.go +++ b/models/migrations/v1_26/v325_test.go @@ -6,7 +6,7 @@ package v1_26 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/require" @@ -38,7 +38,7 @@ func Test_FixMissedRepoIDWhenMigrateAttachments(t *testing.T) { } // Prepare and load the testing database - x, deferrable := base.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release)) + x, deferrable := migrationtest.PrepareTestEnv(t, 0, new(Attachment), new(Issue), new(Release)) defer deferrable() require.NoError(t, FixMissedRepoIDWhenMigrateAttachments(x)) diff --git a/models/migrations/v1_26/v326_test.go b/models/migrations/v1_26/v326_test.go index b92eed35f6..a0225eb774 100644 --- a/models/migrations/v1_26/v326_test.go +++ b/models/migrations/v1_26/v326_test.go @@ -6,7 +6,7 @@ package v1_26 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/test" @@ -57,7 +57,7 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) { TargetURL string } - x, deferable := base.PrepareTestEnv(t, 0, + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(Repository), new(ActionRun), new(ActionRunJob), diff --git a/models/migrations/v1_26/v327_test.go b/models/migrations/v1_26/v327_test.go index 971707be4f..98e948cf05 100644 --- a/models/migrations/v1_26/v327_test.go +++ b/models/migrations/v1_26/v327_test.go @@ -6,7 +6,7 @@ package v1_26 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/require" ) @@ -17,7 +17,7 @@ func Test_AddDisabledToActionRunner(t *testing.T) { Name string } - x, deferable := base.PrepareTestEnv(t, 0, new(ActionRunner)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(ActionRunner)) defer deferable() _, err := x.Insert(&ActionRunner{Name: "runner"}) diff --git a/models/migrations/v1_26/v329_test.go b/models/migrations/v1_26/v329_test.go index cab8e79906..e4bebfb71d 100644 --- a/models/migrations/v1_26/v329_test.go +++ b/models/migrations/v1_26/v329_test.go @@ -6,7 +6,7 @@ package v1_26 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" ) @@ -22,7 +22,7 @@ func (UserBadgeBefore) TableName() string { } func Test_AddUniqueIndexForUserBadge(t *testing.T) { - x, deferable := base.PrepareTestEnv(t, 0, new(UserBadgeBefore)) + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(UserBadgeBefore)) defer deferable() if x == nil || t.Failed() { return diff --git a/models/migrations/v1_27/main_test.go b/models/migrations/v1_27/main_test.go index e269e3df9a..0c6a6a2440 100644 --- a/models/migrations/v1_27/main_test.go +++ b/models/migrations/v1_27/main_test.go @@ -6,9 +6,9 @@ package v1_27 import ( "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" ) func TestMain(m *testing.M) { - base.MainTest(m) + migrationtest.MainTest(m) } diff --git a/models/migrations/v1_27/v331_test.go b/models/migrations/v1_27/v331_test.go index 45f467cf9b..2302fee024 100644 --- a/models/migrations/v1_27/v331_test.go +++ b/models/migrations/v1_27/v331_test.go @@ -8,7 +8,7 @@ import ( "slices" "testing" - "code.gitea.io/gitea/models/migrations/base" + "code.gitea.io/gitea/models/migrations/migrationtest" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -49,7 +49,7 @@ func (actionArtifactBeforeV331) TableName() string { } func Test_AddActionRunAttemptModel(t *testing.T) { - x, deferable := base.PrepareTestEnv(t, 0, + x, deferable := migrationtest.PrepareTestEnv(t, 0, new(actionRunBeforeV331), new(actionRunJobBeforeV331), new(actionArtifactBeforeV331), @@ -69,7 +69,7 @@ func Test_AddActionRunAttemptModel(t *testing.T) { require.NoError(t, AddActionRunAttemptModel(x)) - tableMap := base.LoadTableSchemasMap(t, x) + tableMap := migrationtest.LoadTableSchemasMap(t, x) attemptTable := tableMap["action_run_attempt"] require.NotNil(t, attemptTable) diff --git a/models/packages/package_file.go b/models/packages/package_file.go index 69401eee3e..64bd08f0b2 100644 --- a/models/packages/package_file.go +++ b/models/packages/package_file.go @@ -68,7 +68,7 @@ func TryInsertFile(ctx context.Context, pf *PackageFile) (*PackageFile, error) { // GetFilesByVersionID gets all files of a version func GetFilesByVersionID(ctx context.Context, versionID int64) ([]*PackageFile, error) { pfs := make([]*PackageFile, 0, 10) - return pfs, db.GetEngine(ctx).Where("version_id = ?", versionID).Find(&pfs) + return pfs, db.GetEngine(ctx).Where("version_id = ?", versionID).OrderBy("lower_name, created_unix, id").Find(&pfs) } // GetFileForVersionByID gets a file of a version by id diff --git a/models/project/project.go b/models/project/project.go index 7646c3dd71..7fcef430f2 100644 --- a/models/project/project.go +++ b/models/project/project.go @@ -302,6 +302,15 @@ func GetProjectByID(ctx context.Context, id int64) (*Project, error) { return p, nil } +// GetProjectsMapByIDs returns projects by a list of IDs. +func GetProjectsMapByIDs(ctx context.Context, ids []int64) (map[int64]*Project, error) { + projects := make(map[int64]*Project, len(ids)) + if len(ids) == 0 { + return projects, nil + } + return projects, db.GetEngine(ctx).In("id", ids).Find(&projects) +} + func GetProjectByIDAndOwner(ctx context.Context, id, ownerID int64) (*Project, error) { p := new(Project) has, err := db.GetEngine(ctx).ID(id).And("owner_id = ?", ownerID).Get(p) diff --git a/models/renderhelper/repo_comment.go b/models/renderhelper/repo_comment.go index ae0fbf0abd..d1c587671b 100644 --- a/models/renderhelper/repo_comment.go +++ b/models/renderhelper/repo_comment.go @@ -34,7 +34,7 @@ func (r *RepoComment) ResolveLink(link, preferLinkType string) string { case markup.LinkTypeRoot: return r.ctx.ResolveLinkRoot(link) default: - return r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefPath, link) + return r.ctx.ResolveLinkRelative(r.repoLink, r.opts.CurrentRefSubURL, link) } } @@ -43,7 +43,7 @@ var _ markup.RenderHelper = (*RepoComment)(nil) type RepoCommentOptions struct { DeprecatedRepoName string // it is only a patch for the non-standard "markup" api DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api - CurrentRefPath string // eg: "branch/main" or "commit/11223344" + CurrentRefSubURL string // eg: "branch/main" or "commit/11223344" FootnoteContextID string // the extra context ID for footnotes, used to avoid conflicts with other footnotes in the same page } diff --git a/models/renderhelper/repo_comment_test.go b/models/renderhelper/repo_comment_test.go index 3b13bff73c..1443f8b3c0 100644 --- a/models/renderhelper/repo_comment_test.go +++ b/models/renderhelper/repo_comment_test.go @@ -54,8 +54,8 @@ func TestRepoComment(t *testing.T) { `, rendered) }) - t.Run("WithCurrentRefPath", func(t *testing.T) { - rctx := NewRenderContextRepoComment(t.Context(), repo1, RepoCommentOptions{CurrentRefPath: "/commit/1234"}). + t.Run("WithCurrentRefSubURL", func(t *testing.T) { + rctx := NewRenderContextRepoComment(t.Context(), repo1, RepoCommentOptions{CurrentRefSubURL: "/commit/1234"}). WithMarkupType(markdown.MarkupName) // the ref path is only used to render commit message: a commit message is rendered at the commit page with its commit ID path diff --git a/models/renderhelper/repo_file.go b/models/renderhelper/repo_file.go index 5d0bfd6c80..d9aa71b727 100644 --- a/models/renderhelper/repo_file.go +++ b/models/renderhelper/repo_file.go @@ -35,11 +35,11 @@ func (r *RepoFile) ResolveLink(link, preferLinkType string) (finalLink string) { case markup.LinkTypeRoot: finalLink = r.ctx.ResolveLinkRoot(link) case markup.LinkTypeRaw: - finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "raw", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link) + finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "raw", r.opts.CurrentRefSubURL), r.opts.CurrentTreePath, link) case markup.LinkTypeMedia: - finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "media", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link) + finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "media", r.opts.CurrentRefSubURL), r.opts.CurrentTreePath, link) default: - finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefPath), r.opts.CurrentTreePath, link) + finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "src", r.opts.CurrentRefSubURL), r.opts.CurrentTreePath, link) } return finalLink } @@ -50,8 +50,8 @@ type RepoFileOptions struct { DeprecatedRepoName string // it is only a patch for the non-standard "markup" api DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api - CurrentRefPath string // eg: "branch/main", it is a sub URL path escaped by callers, TODO: rename to CurrentRefSubURL - CurrentTreePath string // eg: "path/to/file" in the repo, it is the tree path without URL path escaping + CurrentRefSubURL string // eg: "branch/main" or "branch/my%20branch", it is a sub URL path escaped by callers + CurrentTreePath string // eg: "path/to/file" in the repo, it is the tree path without URL path escaping } func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository, opts ...RepoFileOptions) *markup.RenderContext { @@ -71,9 +71,8 @@ func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository, }) } // External render's iframe needs this to generate correct links - // TODO: maybe need to make it access "CurrentRefPath" directly (but impossible at the moment due to cycle-import) - // CurrentRefPath is already path-escaped by callers - rctx.RenderOptions.Metas["RefTypeNameSubURL"] = helper.opts.CurrentRefPath + // TODO: maybe need to make it access "CurrentRefSubURL" directly (but impossible at the moment due to cycle-import) + rctx.RenderOptions.Metas["RefTypeNameSubURL"] = helper.opts.CurrentRefSubURL rctx = rctx.WithHelper(helper).WithEnableHeadingIDGeneration(true) return rctx } diff --git a/models/renderhelper/repo_file_test.go b/models/renderhelper/repo_file_test.go index 3b48efba3a..72d98efc66 100644 --- a/models/renderhelper/repo_file_test.go +++ b/models/renderhelper/repo_file_test.go @@ -36,7 +36,7 @@ func TestRepoFile(t *testing.T) { }) t.Run("AbsoluteAndRelative", func(t *testing.T) { - rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefPath: "branch/main"}). + rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefSubURL: "branch/main"}). WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` [/test](/test) @@ -53,8 +53,8 @@ func TestRepoFile(t *testing.T) { `, rendered) }) - t.Run("WithCurrentRefPath", func(t *testing.T) { - rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefPath: "/commit/1234"}). + t.Run("WithCurrentRefSubURL", func(t *testing.T) { + rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{CurrentRefSubURL: "/commit/1234"}). WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` [/test](/test) @@ -66,10 +66,10 @@ func TestRepoFile(t *testing.T) { `, rendered) }) - t.Run("WithCurrentRefPathByTag", func(t *testing.T) { + t.Run("WithCurrentRefSubURLByTag", func(t *testing.T) { rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{ - CurrentRefPath: "/commit/1234", - CurrentTreePath: "my-dir", + CurrentRefSubURL: "/commit/1234", + CurrentTreePath: "my-dir", }). WithMarkupType(markdown.MarkupName) rendered, err := markup.RenderString(rctx, ` @@ -89,8 +89,8 @@ func TestRepoFileOrgMode(t *testing.T) { t.Run("Links", func(t *testing.T) { rctx := NewRenderContextRepoFile(t.Context(), repo1, RepoFileOptions{ - CurrentRefPath: "/commit/1234", - CurrentTreePath: "my-dir", + CurrentRefSubURL: "/commit/1234", + CurrentTreePath: "my-dir", }).WithRelativePath("my-dir/a.org") rendered, err := markup.RenderString(rctx, ` diff --git a/models/renderhelper/repo_wiki.go b/models/renderhelper/repo_wiki.go index 218b1e4a67..61e2b570e5 100644 --- a/models/renderhelper/repo_wiki.go +++ b/models/renderhelper/repo_wiki.go @@ -36,9 +36,9 @@ func (r *RepoWiki) ResolveLink(link, preferLinkType string) (finalLink string) { case markup.LinkTypeRoot: finalLink = r.ctx.ResolveLinkRoot(link) case markup.LinkTypeMedia, markup.LinkTypeRaw: - finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki/raw", r.opts.currentRefPath), r.opts.currentTreePath, link) + finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki/raw", r.opts.currentRefSubURL), r.opts.currentTreePath, link) default: - finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefPath), r.opts.currentTreePath, link) + finalLink = r.ctx.ResolveLinkRelative(path.Join(r.repoLink, "wiki", r.opts.currentRefSubURL), r.opts.currentTreePath, link) } return finalLink } @@ -50,8 +50,8 @@ type RepoWikiOptions struct { DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api // these options are not used at the moment because Wiki doesn't support sub-path, nor branch - currentRefPath string // eg: "branch/main" - currentTreePath string // eg: "path/to/file" in the repo + currentRefSubURL string // eg: "branch/main" + currentTreePath string // eg: "path/to/file" in the repo } func NewRenderContextRepoWiki(ctx context.Context, repo *repo_model.Repository, opts ...RepoWikiOptions) *markup.RenderContext { diff --git a/models/repo/repo.go b/models/repo/repo.go index 25207cc28b..7814bb4876 100644 --- a/models/repo/repo.go +++ b/models/repo/repo.go @@ -376,8 +376,9 @@ func (repo *Repository) CommitLink(commitID string) (result string) { } // APIURL returns the repository API URL -func (repo *Repository) APIURL() string { - return setting.AppURL + "api/v1/repos/" + url.PathEscape(repo.OwnerName) + "/" + url.PathEscape(repo.Name) +func (repo *Repository) APIURL(ctxOpt ...context.Context) string { + ctx := util.OptionalArg(ctxOpt, context.TODO()) + return httplib.MakeAbsoluteURL(ctx, setting.AppSubURL+"/api/v1/repos/"+url.PathEscape(repo.OwnerName)+"/"+url.PathEscape(repo.Name)) } // GetCommitsCountCacheKey returns cache key used for commits count caching. diff --git a/models/unittest/testdb.go b/models/unittest/testdb.go index bd832348e7..116fdab496 100644 --- a/models/unittest/testdb.go +++ b/models/unittest/testdb.go @@ -5,6 +5,8 @@ package unittest import ( "context" + "database/sql" + "errors" "fmt" "os" "path/filepath" @@ -102,6 +104,101 @@ func mainTest(m *testing.M, testOptsArg ...*TestOptions) int { return exitStatus } +func ResetTestDatabase() (cleanup func(), err error) { + defer func() { + if cleanup == nil { + cleanup = func() {} + } + }() + + connOpts := db.GlobalConnOptions() + driverDefault, connStrDefault, err := db.ConnStrDefaultDatabase(connOpts) + if err != nil { + return nil, err + } + driverDatabase, connStrDatabase, err := db.ConnStr(connOpts) + if err != nil { + return nil, err + } + + if connOpts.Type.IsSQLite3() { + if !strings.HasSuffix(connOpts.SQLitePath, "-test.db") { + return nil, errors.New(`testing database file for sqlite3 must end in "-test.db"`) + } + _ = os.Remove(connOpts.SQLitePath) + err = os.MkdirAll(filepath.Dir(connOpts.SQLitePath), os.ModePerm) + if err != nil { + return nil, err + } + cleanup = func() { + _ = os.Remove(connOpts.SQLitePath) + _ = os.Remove(filepath.Dir(connOpts.SQLitePath)) + } + return cleanup, nil + } + + if !strings.Contains(connOpts.Database, "test") { + return nil, fmt.Errorf(`testing database name for %s must contain "test"`, connOpts.Database) + } + + quotedDbName := connOpts.Database + if connOpts.Type.IsMSSQL() { + quotedDbName = `[` + connOpts.Database + `]` + } + + sqlExec := func(sqlDB *sql.DB, sql string) error { + _, err := sqlDB.Exec(sql) + if err != nil { + return fmt.Errorf("failed to execute SQL %q: %w", sql, err) + } + return nil + } + + createDatabase := func() error { + sqlDB, err := sql.Open(driverDefault, connStrDefault) + if err != nil { + return err + } + defer sqlDB.Close() + if err = sqlExec(sqlDB, "DROP DATABASE IF EXISTS "+quotedDbName); err != nil { + return err + } + return sqlExec(sqlDB, "CREATE DATABASE "+quotedDbName) + } + if err = createDatabase(); err != nil { + return nil, err + } + + cleanup = func() { + sqlDB, err := sql.Open(driverDefault, connStrDefault) + if err != nil { + return + } + defer sqlDB.Close() + _, _ = sqlDB.Exec("DROP DATABASE IF EXISTS " + quotedDbName) + } + + createDatabaseSchema := func() error { + if !connOpts.Type.IsPostgreSQL() { + return nil + } + if connOpts.Schema == "" { + return nil + } + sqlDB, err := sql.Open(driverDatabase, connStrDatabase) + if err != nil { + return err + } + defer sqlDB.Close() + if err = sqlExec(sqlDB, "DROP SCHEMA IF EXISTS "+connOpts.Schema); err != nil { + return err + } + return sqlExec(sqlDB, "CREATE SCHEMA "+connOpts.Schema) + } + + return cleanup, createDatabaseSchema() +} + // FixturesOptions fixtures needs to be loaded options type FixturesOptions struct { Dir string @@ -110,11 +207,12 @@ type FixturesOptions struct { // CreateTestEngine creates a memory database and loads the fixture data from fixturesDir func CreateTestEngine(opts FixturesOptions) error { - x, err := xorm.NewEngine("sqlite3", "file::memory:?cache=shared&_txlock=immediate") + driver, connStr, err := db.ConnStr(db.ConnOptions{Type: "sqlite3", SQLitePath: ":memory:"}) + if err != nil { + return err + } + x, err := xorm.NewEngine(driver, connStr) if err != nil { - if strings.Contains(err.Error(), "unknown driver") { - return fmt.Errorf("sqlite3 requires: -tags sqlite,sqlite_unlock_notify\n%w", err) - } return err } x.SetMapper(names.GonicMapper{}) diff --git a/models/user/user.go b/models/user/user.go index 69b97e9b47..0e87d7b7b9 100644 --- a/models/user/user.go +++ b/models/user/user.go @@ -615,6 +615,7 @@ var ( "sitemap.xml", // search engine sitemap "ssh_info", // agit info "swagger.v1.json", + "openapi3.v1.json", "ghost", // reserved name for deleted users (id: -1) "gitea-actions", // gitea builtin user (id: -2) diff --git a/modules/git/catfile_batch_command.go b/modules/git/catfile_batch_command.go index 710561f045..4e18282bf3 100644 --- a/modules/git/catfile_batch_command.go +++ b/modules/git/catfile_batch_command.go @@ -7,8 +7,10 @@ import ( "context" "os" "path/filepath" + "strings" "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" ) @@ -39,6 +41,9 @@ func (b *catFileBatchCommand) getBatch() *catFileBatchCommunicator { } func (b *catFileBatchCommand) QueryContent(obj string) (*CatFileObject, BufferedReader, error) { + if strings.Contains(obj, "\n") { + setting.PanicInDevOrTesting("invalid object name with newline: %q", obj) + } _, err := b.getBatch().reqWriter.Write([]byte("contents " + obj + "\n")) if err != nil { return nil, nil, err @@ -51,6 +56,9 @@ func (b *catFileBatchCommand) QueryContent(obj string) (*CatFileObject, Buffered } func (b *catFileBatchCommand) QueryInfo(obj string) (*CatFileObject, error) { + if strings.Contains(obj, "\n") { + setting.PanicInDevOrTesting("invalid object name with newline: %q", obj) + } _, err := b.getBatch().reqWriter.Write([]byte("info " + obj + "\n")) if err != nil { return nil, err diff --git a/modules/git/catfile_batch_legacy.go b/modules/git/catfile_batch_legacy.go index 795fc4ce3d..595043d1d2 100644 --- a/modules/git/catfile_batch_legacy.go +++ b/modules/git/catfile_batch_legacy.go @@ -8,8 +8,10 @@ import ( "io" "os" "path/filepath" + "strings" "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" ) @@ -50,6 +52,9 @@ func (b *catFileBatchLegacy) getBatchCheck() *catFileBatchCommunicator { } func (b *catFileBatchLegacy) QueryContent(obj string) (*CatFileObject, BufferedReader, error) { + if strings.Contains(obj, "\n") { + setting.PanicInDevOrTesting("invalid object name with newline: %q", obj) + } _, err := io.WriteString(b.getBatchContent().reqWriter, obj+"\n") if err != nil { return nil, nil, err @@ -62,6 +67,9 @@ func (b *catFileBatchLegacy) QueryContent(obj string) (*CatFileObject, BufferedR } func (b *catFileBatchLegacy) QueryInfo(obj string) (*CatFileObject, error) { + if strings.Contains(obj, "\n") { + setting.PanicInDevOrTesting("invalid object name with newline: %q", obj) + } _, err := io.WriteString(b.getBatchCheck().reqWriter, obj+"\n") if err != nil { return nil, err diff --git a/modules/git/gitcmd/command.go b/modules/git/gitcmd/command.go index e9b51802fe..ee447dfd03 100644 --- a/modules/git/gitcmd/command.go +++ b/modules/git/gitcmd/command.go @@ -57,14 +57,12 @@ type Command struct { } func logArgSanitize(arg string) string { - if strings.Contains(arg, "://") && strings.Contains(arg, "@") { - return util.SanitizeCredentialURLs(arg) - } else if filepath.IsAbs(arg) { + if filepath.IsAbs(arg) { base := filepath.Base(arg) dir := filepath.Dir(arg) return ".../" + filepath.Join(filepath.Base(dir), base) } - return arg + return util.SanitizeCredentialURLs(arg) } func (c *Command) LogString() string { diff --git a/modules/git/gitcmd/command_test.go b/modules/git/gitcmd/command_test.go index 19ec02b808..1a9bfe7d75 100644 --- a/modules/git/gitcmd/command_test.go +++ b/modules/git/gitcmd/command_test.go @@ -109,7 +109,10 @@ func TestCommandString(t *testing.T) { assert.Equal(t, cmd.prog+` a "-m msg" "it's a test" "say \"hello\""`, cmd.LogString()) cmd = NewCommand("url: https://a:b@c/", "/root/dir-a/dir-b") - assert.Equal(t, cmd.prog+` "url: https://sanitized-credential@c/" .../dir-a/dir-b`, cmd.LogString()) + assert.Equal(t, cmd.prog+` "url: https://(masked)@c/" .../dir-a/dir-b`, cmd.LogString()) + + cmd = NewCommand("url: a:b@c/", "/root/dir-a/dir-b") + assert.Equal(t, cmd.prog+` "url: (masked)@c/" .../dir-a/dir-b`, cmd.LogString()) } func TestRunStdError(t *testing.T) { diff --git a/modules/gitrepo/compare_test.go b/modules/gitrepo/compare_test.go index 2d2af0934d..91ee32bed5 100644 --- a/modules/gitrepo/compare_test.go +++ b/modules/gitrepo/compare_test.go @@ -4,9 +4,15 @@ package gitrepo import ( + "path/filepath" + "strings" "testing" + "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/util" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) type mockRepository struct { @@ -17,6 +23,32 @@ func (r *mockRepository) RelativePath() string { return r.path } +func TestMergeBaseNoCommonHistory(t *testing.T) { + repoDir := filepath.Join(t.TempDir(), "repo.git") + require.NoError(t, gitcmd.NewCommand("init").AddDynamicArguments(repoDir).Run(t.Context())) + _, _, runErr := gitcmd.NewCommand("fast-import").WithDir(repoDir).WithStdinBytes([]byte(strings.TrimSpace(` +commit refs/heads/branch1 +committer User 1714310400 +0000 +data 12 +First commit +M 100644 inline file1.txt +data 12 +Hello from 1 + +commit refs/heads/branch2 +committer User 1714310400 +0000 +data 13 +Second commit +M 100644 inline file2.txt +data 12 +Hello from 2 +`))).RunStdString(t.Context()) + require.NoError(t, runErr) + mergeBase, err := MergeBase(t.Context(), &mockRepository{path: repoDir}, "branch1", "branch2") + assert.Empty(t, mergeBase) + assert.ErrorIs(t, err, util.ErrNotExist) +} + func TestRepoGetDivergingCommits(t *testing.T) { repo := &mockRepository{path: "repo1_bare"} do, err := GetDivergingCommits(t.Context(), repo, "master", "branch2") diff --git a/modules/gitrepo/merge.go b/modules/gitrepo/merge.go index 8d58e21c8d..5198392949 100644 --- a/modules/gitrepo/merge.go +++ b/modules/gitrepo/merge.go @@ -9,13 +9,17 @@ import ( "strings" "code.gitea.io/gitea/modules/git/gitcmd" + "code.gitea.io/gitea/modules/util" ) // MergeBase checks and returns merge base of two commits. func MergeBase(ctx context.Context, repo Repository, baseCommitID, headCommitID string) (string, error) { - mergeBase, _, err := RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base"). + mergeBase, stderr, err := RunCmdString(ctx, repo, gitcmd.NewCommand("merge-base"). AddDashesAndList(baseCommitID, headCommitID)) if err != nil { + if gitcmd.IsErrorExitCode(err, 1) && strings.TrimSpace(stderr) == "" { + return "", util.NewNotExistErrorf("merge-base for %s and %s doesn't exist", baseCommitID, headCommitID) + } return "", fmt.Errorf("get merge-base of %s and %s failed: %w", baseCommitID, headCommitID, err) } return strings.TrimSpace(mergeBase), nil diff --git a/modules/httplib/serve.go b/modules/httplib/serve.go index 8abf6f1887..6c2fe9b0d6 100644 --- a/modules/httplib/serve.go +++ b/modules/httplib/serve.go @@ -37,6 +37,42 @@ type ServeHeaderOptions struct { LastModified time.Time } +const ( + // Disable JS execution on the same origin, since we serve the file from the same origin as Gitea server. + // This rule can be relaxed in the future as long as it is properly sandboxed. + // "style-src" is for SVG inline styles (from Display SVG files as images instead of text #14101) + serveHeaderCspDefault = "default-src 'none'; style-src 'unsafe-inline'; sandbox" + + // No sandbox attribute for PDF as it breaks rendering in at least Safari. + // This should generally be safe as scripts inside PDF can not escape the PDF document. + // See https://bugs.chromium.org/p/chromium/issues/detail?id=413851 for more discussion. + // HINT: PDF-RENDER-SANDBOX: PDF won't render in sandboxed context + serveHeaderCspPdf = "default-src 'none'; style-src 'unsafe-inline'" + + // For audios and videos, actually it doesn't really need CSP (just like Gitea <= 1.25) + serveHeaderCspAudioVideo = "" +) + +func serveSetHeaderContentRelated(w http.ResponseWriter, contentType string) { + header := w.Header() + contentType = util.IfZero(contentType, typesniffer.MimeTypeApplicationOctetStream) + header.Set("Content-Type", contentType) + header.Set("X-Content-Type-Options", "nosniff") + + csp := serveHeaderCspDefault + if strings.HasPrefix(contentType, "application/pdf") { + csp = serveHeaderCspPdf + } + if strings.HasPrefix(contentType, "video/") || strings.HasPrefix(contentType, "audio/") { + csp = serveHeaderCspAudioVideo + } + if csp != "" { + header.Set("Content-Security-Policy", csp) + } else { + header.Del("Content-Security-Policy") + } +} + // ServeSetHeaders sets necessary content serve headers func ServeSetHeaders(w http.ResponseWriter, opts ServeHeaderOptions) { header := w.Header() @@ -46,24 +82,11 @@ func ServeSetHeaders(w http.ResponseWriter, opts ServeHeaderOptions) { w.Header().Add(gzhttp.HeaderNoCompression, "1") } - contentType := util.IfZero(opts.ContentType, typesniffer.MimeTypeApplicationOctetStream) - header.Set("Content-Type", contentType) - header.Set("X-Content-Type-Options", "nosniff") + serveSetHeaderContentRelated(w, opts.ContentType) if opts.ContentLength != nil { header.Set("Content-Length", strconv.FormatInt(*opts.ContentLength, 10)) } - - // Disable script execution of HTML/SVG files, since we serve the file from the same origin as Gitea server - header.Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; sandbox") - if strings.Contains(contentType, "application/pdf") { - // no sandbox attribute for PDF as it breaks rendering in at least safari. this - // should generally be safe as scripts inside PDF can not escape the PDF document - // see https://bugs.chromium.org/p/chromium/issues/detail?id=413851 for more discussion - // HINT: PDF-RENDER-SANDBOX: PDF won't render in sandboxed context - header.Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'") - } - if opts.Filename != "" && opts.ContentDisposition != "" { header.Set("Content-Disposition", encodeContentDisposition(opts.ContentDisposition, path.Base(opts.Filename))) header.Set("Access-Control-Expose-Headers", "Content-Disposition") diff --git a/modules/httplib/serve_test.go b/modules/httplib/serve_test.go index 38cf4c197f..2a245300b0 100644 --- a/modules/httplib/serve_test.go +++ b/modules/httplib/serve_test.go @@ -12,6 +12,8 @@ import ( "strings" "testing" + "code.gitea.io/gitea/modules/typesniffer" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -106,3 +108,28 @@ func TestServeUserContentByFile(t *testing.T) { test(t, http.StatusPartialContent, data[1:]) }) } + +func TestServeSetHeaderContentRelated(t *testing.T) { + cases := []struct { + contentType string + csp string + }{ + {"", serveHeaderCspDefault}, + {"any", serveHeaderCspDefault}, + {"application/pdf", serveHeaderCspPdf}, + {"application/pdf; other", serveHeaderCspPdf}, + {"audio/mp4", serveHeaderCspAudioVideo}, + {"video/ogg; other", serveHeaderCspAudioVideo}, + {typesniffer.MimeTypeImageSvg, serveHeaderCspDefault}, + } + for _, c := range cases { + w := httptest.NewRecorder() + serveSetHeaderContentRelated(w, c.contentType) + csp := w.Header().Get("Content-Security-Policy") + assert.Equal(t, c.csp, csp, "content-type: %s", c.contentType) + assert.Equal(t, "nosniff", w.Header().Get("X-Content-Type-Options")) // it should always be there + } + + // make sure sandboxed + require.Contains(t, serveHeaderCspDefault, "; sandbox") +} diff --git a/modules/indexer/code/elasticsearch/elasticsearch.go b/modules/indexer/code/elasticsearch/elasticsearch.go index 9d170528ad..f9a3d4156d 100644 --- a/modules/indexer/code/elasticsearch/elasticsearch.go +++ b/modules/indexer/code/elasticsearch/elasticsearch.go @@ -18,8 +18,7 @@ import ( "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/indexer" "code.gitea.io/gitea/modules/indexer/code/internal" - indexer_internal "code.gitea.io/gitea/modules/indexer/internal" - inner_elasticsearch "code.gitea.io/gitea/modules/indexer/internal/elasticsearch" + es "code.gitea.io/gitea/modules/indexer/internal/elasticsearch" "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" @@ -28,23 +27,15 @@ import ( "code.gitea.io/gitea/modules/util" "github.com/go-enry/go-enry/v2" - "github.com/olivere/elastic/v7" ) -const ( - esRepoIndexerLatestVersion = 3 - // multi-match-types, currently only 2 types are used - // Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types - esMultiMatchTypeBestFields = "best_fields" - esMultiMatchTypePhrasePrefix = "phrase_prefix" -) +const esRepoIndexerLatestVersion = 3 var _ internal.Indexer = &Indexer{} // Indexer implements Indexer interface type Indexer struct { - inner *inner_elasticsearch.Indexer - indexer_internal.Indexer // do not composite inner_elasticsearch.Indexer directly to avoid exposing too much + *es.Indexer } func (b *Indexer) SupportedSearchModes() []indexer.SearchMode { @@ -53,12 +44,7 @@ func (b *Indexer) SupportedSearchModes() []indexer.SearchMode { // NewIndexer creates a new elasticsearch indexer func NewIndexer(url, indexerName string) *Indexer { - inner := inner_elasticsearch.NewIndexer(url, indexerName, esRepoIndexerLatestVersion, defaultMapping) - indexer := &Indexer{ - inner: inner, - Indexer: inner, - } - return indexer + return &Indexer{Indexer: es.NewIndexer(url, indexerName, esRepoIndexerLatestVersion, defaultMapping)} } const ( @@ -138,7 +124,7 @@ const ( }` ) -func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]elastic.BulkableRequest, error) { +func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, sha string, update internal.FileUpdate, repo *repo_model.Repository) ([]es.BulkOp, error) { // Ignore vendored files in code search if setting.Indexer.ExcludeVendored && analyze.IsVendor(update.Filename) { return nil, nil @@ -157,8 +143,9 @@ func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, } } + id := internal.FilenameIndexerID(repo.ID, update.Filename) if size > setting.Indexer.MaxIndexerFileSize { - return []elastic.BulkableRequest{b.addDelete(update.Filename, repo)}, nil + return []es.BulkOp{es.DeleteOp(id)}, nil } info, batchReader, err := catFileBatch.QueryContent(update.BlobSha) @@ -177,33 +164,24 @@ func (b *Indexer) addUpdate(ctx context.Context, catFileBatch git.CatFileBatch, if _, err = batchReader.Discard(1); err != nil { return nil, err } - id := internal.FilenameIndexerID(repo.ID, update.Filename) - return []elastic.BulkableRequest{ - elastic.NewBulkIndexRequest(). - Index(b.inner.VersionedIndexName()). - Id(id). - Doc(map[string]any{ - "repo_id": repo.ID, - "filename": update.Filename, - "content": string(charset.ToUTF8DropErrors(fileContents)), - "commit_id": sha, - "language": analyze.GetCodeLanguage(update.Filename, fileContents), - "updated_at": timeutil.TimeStampNow(), - }), - }, nil + return []es.BulkOp{es.IndexOp(id, map[string]any{ + "repo_id": repo.ID, + "filename": update.Filename, + "content": string(charset.ToUTF8DropErrors(fileContents)), + "commit_id": sha, + "language": analyze.GetCodeLanguage(update.Filename, fileContents), + "updated_at": timeutil.TimeStampNow(), + })}, nil } -func (b *Indexer) addDelete(filename string, repo *repo_model.Repository) elastic.BulkableRequest { - id := internal.FilenameIndexerID(repo.ID, filename) - return elastic.NewBulkDeleteRequest(). - Index(b.inner.VersionedIndexName()). - Id(id) +func (b *Indexer) addDelete(filename string, repo *repo_model.Repository) es.BulkOp { + return es.DeleteOp(internal.FilenameIndexerID(repo.ID, filename)) } // Index will save the index data func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha string, changes *internal.RepoChanges) error { - reqs := make([]elastic.BulkableRequest, 0) + ops := make([]es.BulkOp, 0) if len(changes.Updates) > 0 { batch, err := gitrepo.NewBatch(ctx, repo) if err != nil { @@ -212,29 +190,25 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st defer batch.Close() for _, update := range changes.Updates { - updateReqs, err := b.addUpdate(ctx, batch, sha, update, repo) + updateOps, err := b.addUpdate(ctx, batch, sha, update, repo) if err != nil { return err } - if len(updateReqs) > 0 { - reqs = append(reqs, updateReqs...) + if len(updateOps) > 0 { + ops = append(ops, updateOps...) } } } for _, filename := range changes.RemovedFilenames { - reqs = append(reqs, b.addDelete(filename, repo)) + ops = append(ops, b.addDelete(filename, repo)) } - if len(reqs) > 0 { + if len(ops) > 0 { esBatchSize := 50 - for i := 0; i < len(reqs); i += esBatchSize { - _, err := b.inner.Client.Bulk(). - Index(b.inner.VersionedIndexName()). - Add(reqs[i:min(i+esBatchSize, len(reqs))]...). - Do(ctx) - if err != nil { + for i := 0; i < len(ops); i += esBatchSize { + if err := b.Bulk(ctx, ops[i:min(i+esBatchSize, len(ops))]); err != nil { return err } } @@ -246,33 +220,21 @@ func (b *Indexer) Index(ctx context.Context, repo *repo_model.Repository, sha st func (b *Indexer) Delete(ctx context.Context, repoID int64) error { if err := b.doDelete(ctx, repoID); err != nil { // Maybe there is a conflict during the delete operation, so we should retry after a refresh - log.Warn("Deletion of entries of repo %v within index %v was erroneous. Trying to refresh index before trying again", repoID, b.inner.VersionedIndexName(), err) - if err := b.refreshIndex(ctx); err != nil { + log.Warn("Deletion of entries of repo %v within index %v was erroneous: %v. Trying to refresh index before trying again", repoID, b.VersionedIndexName(), err) + if err := b.Refresh(ctx); err != nil { return err } if err := b.doDelete(ctx, repoID); err != nil { - log.Error("Could not delete entries of repo %v within index %v", repoID, b.inner.VersionedIndexName()) + log.Error("Could not delete entries of repo %v within index %v", repoID, b.VersionedIndexName()) return err } } return nil } -func (b *Indexer) refreshIndex(ctx context.Context) error { - if _, err := b.inner.Client.Refresh(b.inner.VersionedIndexName()).Do(ctx); err != nil { - log.Error("Error while trying to refresh index %v", b.inner.VersionedIndexName(), err) - return err - } - - return nil -} - // Delete entries by repoId func (b *Indexer) doDelete(ctx context.Context, repoID int64) error { - _, err := b.inner.Client.DeleteByQuery(b.inner.VersionedIndexName()). - Query(elastic.NewTermsQuery("repo_id", repoID)). - Do(ctx) - return err + return b.DeleteByQuery(ctx, es.TermsQuery("repo_id", repoID)) } // contentMatchIndexPos find words positions for start and the following end on content. It will @@ -291,10 +253,10 @@ func contentMatchIndexPos(content, start, end string) (int, int) { return startIdx, (startIdx + len(start) + endIdx + len(end)) - 9 // remove the length since we give Content the original data } -func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { +func convertResult(searchResult *es.SearchResponse, kw string, pageSize int) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { hits := make([]*internal.SearchResult, 0, pageSize) - for _, hit := range searchResult.Hits.Hits { - repoID, fileName := internal.ParseIndexerID(hit.Id) + for _, hit := range searchResult.Hits { + repoID, fileName := internal.ParseIndexerID(hit.ID) res := make(map[string]any) if err := json.Unmarshal(hit.Source, &res); err != nil { return 0, nil, nil, err @@ -333,111 +295,111 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int) }) } - return searchResult.TotalHits(), hits, extractAggs(searchResult), nil + return searchResult.Total, hits, extractAggs(searchResult), nil } -func extractAggs(searchResult *elastic.SearchResult) []*internal.SearchResultLanguages { - var searchResultLanguages []*internal.SearchResultLanguages - agg, found := searchResult.Aggregations.Terms("language") - if found { - searchResultLanguages = make([]*internal.SearchResultLanguages, 0, 10) - - for _, bucket := range agg.Buckets { - searchResultLanguages = append(searchResultLanguages, &internal.SearchResultLanguages{ - Language: bucket.Key.(string), - Color: enry.GetColor(bucket.Key.(string)), - Count: int(bucket.DocCount), - }) +func extractAggs(searchResult *es.SearchResponse) []*internal.SearchResultLanguages { + buckets, found := searchResult.Aggregations["language"] + if !found { + return nil + } + searchResultLanguages := make([]*internal.SearchResultLanguages, 0, 10) + for _, bucket := range buckets { + // language is mapped as keyword so the key is always a string; if the + // mapping ever changes, skip rather than emit an empty-language bucket. + key, ok := bucket.Key.(string) + if !ok { + continue } + searchResultLanguages = append(searchResultLanguages, &internal.SearchResultLanguages{ + Language: key, + Color: enry.GetColor(key), + Count: int(bucket.DocCount), + }) } return searchResultLanguages } // Search searches for codes and language stats by given conditions. func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int64, []*internal.SearchResult, []*internal.SearchResultLanguages, error) { - var contentQuery elastic.Query searchMode := util.IfZero(opts.SearchMode, b.SupportedSearchModes()[0].ModeValue) + contentQuery := es.Query(es.NewMultiMatchQuery(opts.Keyword, "content").Type(es.MultiMatchTypeBestFields).Operator("and")) if searchMode == indexer.SearchModeExact { - // 1.21 used NewMultiMatchQuery().Type(esMultiMatchTypePhrasePrefix), but later releases changed to NewMatchPhraseQuery - contentQuery = elastic.NewMatchPhraseQuery("content", opts.Keyword) - } else /* words */ { - contentQuery = elastic.NewMultiMatchQuery("content", opts.Keyword).Type(esMultiMatchTypeBestFields).Operator("and") + contentQuery = es.MatchPhraseQuery("content", opts.Keyword) } - kwQuery := elastic.NewBoolQuery().Should( + kwQuery := es.NewBoolQuery().Should( contentQuery, - elastic.NewMultiMatchQuery(opts.Keyword, "filename^10").Type(esMultiMatchTypePhrasePrefix), + es.NewMultiMatchQuery(opts.Keyword, "filename^10").Type(es.MultiMatchTypePhrasePrefix), ) - query := elastic.NewBoolQuery() - query = query.Must(kwQuery) + query := es.NewBoolQuery().Must(kwQuery) if len(opts.RepoIDs) > 0 { - repoStrs := make([]any, 0, len(opts.RepoIDs)) - for _, repoID := range opts.RepoIDs { - repoStrs = append(repoStrs, repoID) - } - repoQuery := elastic.NewTermsQuery("repo_id", repoStrs...) - query = query.Must(repoQuery) + query.Must(es.TermsQuery("repo_id", es.ToAnySlice(opts.RepoIDs)...)) } - var ( - start, pageSize = opts.GetSkipTake() - kw = "" + opts.Keyword + "" - aggregation = elastic.NewTermsAggregation().Field("language").Size(10).OrderByCountDesc() - ) + start, pageSize := opts.GetSkipTake() + kw := "" + opts.Keyword + "" + languageAggs := map[string]any{ + "language": map[string]any{ + "terms": map[string]any{ + "field": "language", + "size": 10, + "order": map[string]any{"_count": "desc"}, + }, + }, + } + // number_of_fragments=0 returns the full highlighted content (no fragmentation). + highlight := map[string]any{ + "fields": map[string]any{ + "content": map[string]any{}, + "filename": map[string]any{}, + }, + "number_of_fragments": 0, + "type": "fvh", + } + sort := []es.SortField{ + {Field: "_score", Desc: true}, + {Field: "updated_at", Desc: false}, + } if len(opts.Language) == 0 { - searchResult, err := b.inner.Client.Search(). - Index(b.inner.VersionedIndexName()). - Aggregation("language", aggregation). - Query(query). - Highlight( - elastic.NewHighlight(). - Field("content"). - Field("filename"). - NumOfFragments(0). // return all highlighting content on fragments - HighlighterType("fvh"), - ). - Sort("_score", false). - Sort("updated_at", true). - From(start).Size(pageSize). - Do(ctx) + resp, err := b.Indexer.Search(ctx, es.SearchRequest{ + Query: query, + Sort: sort, + From: start, + Size: pageSize, + TrackTotal: true, + Aggregations: languageAggs, + Highlight: highlight, + }) if err != nil { return 0, nil, nil, err } - - return convertResult(searchResult, kw, pageSize) + return convertResult(resp, kw, pageSize) } - langQuery := elastic.NewMatchQuery("language", opts.Language) - countResult, err := b.inner.Client.Search(). - Index(b.inner.VersionedIndexName()). - Aggregation("language", aggregation). - Query(query). - Size(0). // We only need stats information - Do(ctx) + countResp, err := b.Indexer.Search(ctx, es.SearchRequest{ + Query: query, + Size: 0, // stats only + TrackTotal: true, + Aggregations: languageAggs, + }) if err != nil { return 0, nil, nil, err } - query = query.Must(langQuery) - searchResult, err := b.inner.Client.Search(). - Index(b.inner.VersionedIndexName()). - Query(query). - Highlight( - elastic.NewHighlight(). - Field("content"). - Field("filename"). - NumOfFragments(0). // return all highlighting content on fragments - HighlighterType("fvh"), - ). - Sort("_score", false). - Sort("updated_at", true). - From(start).Size(pageSize). - Do(ctx) + query.Must(es.MatchQuery("language", opts.Language)) + resp, err := b.Indexer.Search(ctx, es.SearchRequest{ + Query: query, + Sort: sort, + From: start, + Size: pageSize, + TrackTotal: true, + Highlight: highlight, + }) if err != nil { return 0, nil, nil, err } - total, hits, _, err := convertResult(searchResult, kw, pageSize) - - return total, hits, extractAggs(countResult), err + total, hits, _, err := convertResult(resp, kw, pageSize) + return total, hits, extractAggs(countResp), err } diff --git a/modules/indexer/code/indexer_test.go b/modules/indexer/code/indexer_test.go index a884ab733a..a8baf44edc 100644 --- a/modules/indexer/code/indexer_test.go +++ b/modules/indexer/code/indexer_test.go @@ -8,6 +8,7 @@ import ( "os" "slices" "testing" + "time" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" @@ -39,6 +40,16 @@ func TestMain(m *testing.M) { func testIndexer(name string, t *testing.T, indexer internal.Indexer) { t.Run(name, func(t *testing.T) { assert.NoError(t, setupRepositoryIndexes(t.Context(), indexer)) + // Wait for the index to catch up: ES/OpenSearch make writes visible + // only after a refresh (default interval: 1s). Bleve is synchronous + // and passes on the first iteration. + require.Eventually(t, func() bool { + total, _, _, err := indexer.Search(t.Context(), &internal.SearchOptions{ + Keyword: "Description", + Paginator: &db.ListOptions{Page: 1, PageSize: 1}, + }) + return err == nil && total > 0 + }, 10*time.Second, 100*time.Millisecond, "index did not become searchable") keywords := []struct { RepoIDs []int64 diff --git a/modules/indexer/internal/elasticsearch/indexer.go b/modules/indexer/internal/elasticsearch/indexer.go index 265ce26585..eaeb89efaa 100644 --- a/modules/indexer/internal/elasticsearch/indexer.go +++ b/modules/indexer/internal/elasticsearch/indexer.go @@ -4,52 +4,80 @@ package elasticsearch import ( + "bytes" "context" - "errors" "fmt" + "io" + "net" + "net/http" + "net/url" + "slices" + "strconv" + "strings" + "time" "code.gitea.io/gitea/modules/indexer/internal" - - "github.com/olivere/elastic/v7" + "code.gitea.io/gitea/modules/json" ) var _ internal.Indexer = &Indexer{} -// Indexer represents a basic elasticsearch indexer implementation +// Indexer is a narrow wrapper around an Elasticsearch/OpenSearch cluster. +// It targets the REST subset shared by Elasticsearch 7/8/9 and OpenSearch 3. type Indexer struct { - Client *elastic.Client + client *http.Client + base string // base URL with trailing slash, no userinfo + user string + pass string - url string indexName string version int mapping string } -func NewIndexer(url, indexName string, version int, mapping string) *Indexer { +// NewIndexer builds an Indexer. The connection is opened by Init. +func NewIndexer(rawURL, indexName string, version int, mapping string) *Indexer { return &Indexer{ - url: url, + base: rawURL, indexName: indexName, version: version, mapping: mapping, } } -// Init initializes the indexer +// Init connects and creates the versioned index if missing, returning true if it already existed. func (i *Indexer) Init(ctx context.Context) (bool, error) { - if i == nil { - return false, errors.New("cannot init nil indexer") - } - if i.Client != nil { - return false, errors.New("indexer is already initialized") - } - - client, err := i.initClient() + parsed, err := url.Parse(i.base) if err != nil { - return false, err + return false, fmt.Errorf("parse elasticsearch url: %w", err) + } + if parsed.User != nil { + i.user = parsed.User.Username() + i.pass, _ = parsed.User.Password() + parsed.User = nil + } + base := parsed.String() + if !strings.HasSuffix(base, "/") { + base += "/" + } + i.base = base + // No client-level Timeout: bulk/_delete_by_query can legitimately run for + // minutes on large repos. Per-request deadlines come from the caller's ctx; + // transport-level timeouts cover stalled connects/handshakes/headers so a + // half-open server cannot wedge the indexer indefinitely. + i.client = &http.Client{ + Transport: &http.Transport{ + Proxy: http.ProxyFromEnvironment, + DialContext: (&net.Dialer{Timeout: 30 * time.Second, KeepAlive: 30 * time.Second}).DialContext, + TLSHandshakeTimeout: 10 * time.Second, + ResponseHeaderTimeout: 30 * time.Second, + ExpectContinueTimeout: 1 * time.Second, + IdleConnTimeout: 90 * time.Second, + MaxIdleConns: 100, + }, } - i.Client = client - exists, err := i.Client.IndexExists(i.VersionedIndexName()).Do(ctx) + exists, err := i.indexExists(ctx, i.VersionedIndexName()) if err != nil { return false, err } @@ -61,34 +89,321 @@ func (i *Indexer) Init(ctx context.Context) (bool, error) { return false, err } - return exists, nil + return false, nil } -// Ping checks if the indexer is available +// Ping returns an error when the cluster is unusable (status != green/yellow). func (i *Indexer) Ping(ctx context.Context) error { - if i == nil { - return errors.New("cannot ping nil indexer") + var body struct { + Status string `json:"status"` } - if i.Client == nil { - return errors.New("indexer is not initialized") - } - - resp, err := i.Client.ClusterHealth().Do(ctx) - if err != nil { + if err := i.doJSON(ctx, http.MethodGet, "_cluster/health", nil, &body); err != nil { return err } - if resp.Status != "green" && resp.Status != "yellow" { - // It's healthy if the status is green, and it's available if the status is yellow, - // see https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html - return fmt.Errorf("status of elasticsearch cluster is %s", resp.Status) + // Healthy = green; usable = yellow. Red is unusable. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html + if body.Status != "green" && body.Status != "yellow" { + return fmt.Errorf("status of elasticsearch cluster is %s", body.Status) } return nil } -// Close closes the indexer +// Close releases idle HTTP connections held by the client. func (i *Indexer) Close() { - if i == nil { + if i == nil || i.client == nil { return } - i.Client = nil + i.client.CloseIdleConnections() + i.client = nil +} + +// Bulk submits index/delete ops. Returns the first item-level failure, if any. +func (i *Indexer) Bulk(ctx context.Context, ops []BulkOp) error { + if len(ops) == 0 { + return nil + } + + index := i.VersionedIndexName() + var buf bytes.Buffer + buf.Grow(len(ops) * 256) + for _, op := range ops { + meta := map[string]any{op.action: map[string]any{"_index": index, "_id": op.id}} + if err := writeJSONLine(&buf, meta); err != nil { + return err + } + if op.action == bulkActionIndex { + if err := writeJSONLine(&buf, op.doc); err != nil { + return err + } + } + } + + res, err := i.do(ctx, http.MethodPost, urlPath(index, "_bulk"), "application/x-ndjson", bytes.NewReader(buf.Bytes())) + if err != nil { + return err + } + defer drainAndClose(res) + + var body struct { + Errors bool `json:"errors"` + Items []map[string]struct { + Status int `json:"status"` + Error json.Value `json:"error"` + } `json:"items"` + } + if err := json.NewDecoder(res.Body).Decode(&body); err != nil { + return err + } + if !body.Errors { + return nil + } + return firstBulkError(body.Items) +} + +// firstBulkError returns the first item-level failure in a bulk response. +// Each items entry is a single-key map ({"index": {...}} or {"delete": {...}}). +// Delete-of-missing (404) is idempotent and not reported. +func firstBulkError(items []map[string]struct { + Status int `json:"status"` + Error json.Value `json:"error"` +}, +) error { + for _, item := range items { + for action, result := range item { + if action == bulkActionDelete && result.Status == http.StatusNotFound { + continue + } + if result.Status >= 300 { + return fmt.Errorf("bulk %s failed (status %d): %s", action, result.Status, string(result.Error)) + } + } + } + return nil +} + +// Index writes a single document. +func (i *Indexer) Index(ctx context.Context, id string, doc any) error { + body, err := json.Marshal(doc) + if err != nil { + return err + } + return i.doJSON(ctx, http.MethodPut, urlPath(i.VersionedIndexName(), "_doc", id), bytes.NewReader(body), nil) +} + +// Delete removes a single document by id. Missing ids are not an error. +func (i *Indexer) Delete(ctx context.Context, id string) error { + res, err := i.do(ctx, http.MethodDelete, urlPath(i.VersionedIndexName(), "_doc", id), "", nil, http.StatusNotFound) + if err != nil { + return err + } + drainAndClose(res) + return nil +} + +// DeleteByQuery removes every document matching the query. +func (i *Indexer) DeleteByQuery(ctx context.Context, query Query) error { + body, err := json.Marshal(map[string]any{"query": query.querySource()}) + if err != nil { + return err + } + return i.doJSON(ctx, http.MethodPost, urlPath(i.VersionedIndexName(), "_delete_by_query"), bytes.NewReader(body), nil) +} + +// Refresh forces a refresh so recent writes are searchable. +func (i *Indexer) Refresh(ctx context.Context) error { + return i.doJSON(ctx, http.MethodPost, urlPath(i.VersionedIndexName(), "_refresh"), nil, nil) +} + +// Search runs a search request and decodes the reply. +func (i *Indexer) Search(ctx context.Context, req SearchRequest) (*SearchResponse, error) { + body := map[string]any{} + if req.Query != nil { + body["query"] = req.Query.querySource() + } + if len(req.Sort) > 0 { + sorts := make([]map[string]any, len(req.Sort)) + for idx, s := range req.Sort { + sorts[idx] = s.source() + } + body["sort"] = sorts + } + if req.From > 0 { + body["from"] = req.From + } + body["size"] = req.Size + if len(req.Aggregations) > 0 { + body["aggs"] = req.Aggregations + } + if len(req.Highlight) > 0 { + body["highlight"] = req.Highlight + } + + payload, err := json.Marshal(body) + if err != nil { + return nil, err + } + + // Default track_total_hits is 10000 (capped count); send it explicitly so + // callers can choose between exact totals (true) and skipping counting (false). + path := urlPath(i.VersionedIndexName(), "_search") + "?track_total_hits=" + strconv.FormatBool(req.TrackTotal) + res, err := i.do(ctx, http.MethodPost, path, "application/json", bytes.NewReader(payload)) + if err != nil { + return nil, err + } + defer drainAndClose(res) + return decodeSearchResponse(res.Body) +} + +func (i *Indexer) indexExists(ctx context.Context, name string) (bool, error) { + res, err := i.do(ctx, http.MethodHead, urlPath(name), "", nil, http.StatusNotFound) + if err != nil { + return false, err + } + drainAndClose(res) + return res.StatusCode == http.StatusOK, nil +} + +func (i *Indexer) createIndex(ctx context.Context) error { + var body struct { + Acknowledged bool `json:"acknowledged"` + } + if err := i.doJSON(ctx, http.MethodPut, urlPath(i.VersionedIndexName()), bytes.NewBufferString(i.mapping), &body); err != nil { + return fmt.Errorf("create index %s: %w", i.VersionedIndexName(), err) + } + if !body.Acknowledged { + return fmt.Errorf("create index %s not acknowledged", i.VersionedIndexName()) + } + + i.checkOldIndexes(ctx) + return nil +} + +// do sends a request and returns the response. Status >= 300 is turned into +// an error unless the status appears in okStatus. The caller closes Body. +func (i *Indexer) do(ctx context.Context, method, path, contentType string, body io.Reader, okStatus ...int) (*http.Response, error) { + req, err := http.NewRequestWithContext(ctx, method, i.base+path, body) + if err != nil { + return nil, err + } + if contentType != "" { + req.Header.Set("Content-Type", contentType) + } + if i.user != "" || i.pass != "" { + req.SetBasicAuth(i.user, i.pass) + } + res, err := i.client.Do(req) + if err != nil { + return nil, err + } + if res.StatusCode >= 300 && !slices.Contains(okStatus, res.StatusCode) { + msg := readErrBody(res) + res.Body.Close() + return nil, fmt.Errorf("%s %s: %s", method, path, msg) + } + return res, nil +} + +// doJSON sends a request with a JSON body and, when out is non-nil, decodes +// the JSON response into it. +func (i *Indexer) doJSON(ctx context.Context, method, path string, body io.Reader, out any) error { + contentType := "" + if body != nil { + contentType = "application/json" + } + res, err := i.do(ctx, method, path, contentType, body) + if err != nil { + return err + } + defer drainAndClose(res) + if out == nil { + return nil + } + return json.NewDecoder(res.Body).Decode(out) +} + +// drainAndClose discards any unread response body before closing so the +// underlying TCP connection can be reused for keep-alive. +func drainAndClose(res *http.Response) { + _, _ = io.Copy(io.Discard, res.Body) + res.Body.Close() +} + +func writeJSONLine(buf *bytes.Buffer, v any) error { + enc, err := json.Marshal(v) + if err != nil { + return err + } + buf.Write(enc) + buf.WriteByte('\n') + return nil +} + +// readErrBody reads up to 4 KiB of an error response and drains the rest so +// the underlying connection can be reused (keep-alive needs Body fully read). +func readErrBody(res *http.Response) string { + const limit = 4 << 10 + b, _ := io.ReadAll(io.LimitReader(res.Body, limit)) + _, _ = io.Copy(io.Discard, res.Body) + return fmt.Sprintf("status %d: %s", res.StatusCode, bytes.TrimSpace(b)) +} + +func decodeSearchResponse(r io.Reader) (*SearchResponse, error) { + var raw struct { + Hits struct { + Total struct { + Value int64 `json:"value"` + } `json:"total"` + Hits []struct { + ID string `json:"_id"` + Score float64 `json:"_score"` + Source json.Value `json:"_source"` + Highlight map[string][]string `json:"highlight"` + } `json:"hits"` + } `json:"hits"` + Aggregations map[string]struct { + Buckets []struct { + Key any `json:"key"` + DocCount int64 `json:"doc_count"` + } `json:"buckets"` + } `json:"aggregations"` + } + if err := json.NewDecoder(r).Decode(&raw); err != nil { + return nil, err + } + + resp := &SearchResponse{ + Total: raw.Hits.Total.Value, + Hits: make([]SearchHit, 0, len(raw.Hits.Hits)), + } + for _, h := range raw.Hits.Hits { + resp.Hits = append(resp.Hits, SearchHit{ + ID: h.ID, + Score: h.Score, + Source: h.Source, + Highlight: h.Highlight, + }) + } + if len(raw.Aggregations) > 0 { + resp.Aggregations = make(map[string][]AggBucket, len(raw.Aggregations)) + for name, agg := range raw.Aggregations { + buckets := make([]AggBucket, len(agg.Buckets)) + for idx, b := range agg.Buckets { + buckets[idx] = AggBucket{Key: b.Key, DocCount: b.DocCount} + } + resp.Aggregations[name] = buckets + } + } + return resp, nil +} + +// urlPath joins path segments with `/` and percent-escapes each. +func urlPath(segments ...string) string { + var b bytes.Buffer + for idx, s := range segments { + if idx > 0 { + b.WriteByte('/') + } + b.WriteString(url.PathEscape(s)) + } + return b.String() } diff --git a/modules/indexer/internal/elasticsearch/indexer_test.go b/modules/indexer/internal/elasticsearch/indexer_test.go new file mode 100644 index 0000000000..774d6fbd6c --- /dev/null +++ b/modules/indexer/internal/elasticsearch/indexer_test.go @@ -0,0 +1,44 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package elasticsearch + +import ( + "os" + "strings" + "testing" + + "github.com/stretchr/testify/require" +) + +func newRealIndexer(t *testing.T) *Indexer { + t.Helper() + url := "http://elasticsearch:9200" + if os.Getenv("CI") == "" { + url = os.Getenv("TEST_ELASTICSEARCH_URL") + if url == "" { + t.Skip("TEST_ELASTICSEARCH_URL not set and not running in CI") + } + } + indexName := "gitea_test_" + strings.ReplaceAll(strings.ToLower(t.Name()), "/", "_") + ix := NewIndexer(url, indexName, 1, `{"mappings":{"properties":{"x":{"type":"keyword"}}}}`) + _, err := ix.Init(t.Context()) + require.NoError(t, err) + t.Cleanup(ix.Close) + return ix +} + +func TestPing(t *testing.T) { + ix := newRealIndexer(t) + require.NoError(t, ix.Ping(t.Context())) +} + +func TestDeleteSwallows404(t *testing.T) { + ix := newRealIndexer(t) + require.NoError(t, ix.Delete(t.Context(), "missing-id")) +} + +func TestBulkAcceptsDelete404(t *testing.T) { + ix := newRealIndexer(t) + require.NoError(t, ix.Bulk(t.Context(), []BulkOp{DeleteOp("missing-id")})) +} diff --git a/modules/indexer/internal/elasticsearch/query.go b/modules/indexer/internal/elasticsearch/query.go new file mode 100644 index 0000000000..8f8cf74303 --- /dev/null +++ b/modules/indexer/internal/elasticsearch/query.go @@ -0,0 +1,132 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package elasticsearch + +// MultiMatch types used by the call sites. See +// https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-multi-match-query.html#multi-match-types +const ( + MultiMatchTypeBestFields = "best_fields" + MultiMatchTypePhrasePrefix = "phrase_prefix" +) + +// ToAnySlice converts []T to []any for variadic query args like TermsQuery. +func ToAnySlice[T any](s []T) []any { + out := make([]any, len(s)) + for idx, v := range s { + out[idx] = v + } + return out +} + +// Query is an Elasticsearch query DSL node. It marshals to the JSON +// object expected by the ES query API. +type Query interface { + querySource() map[string]any +} + +type rawQuery map[string]any + +func (q rawQuery) querySource() map[string]any { return q } + +// TermQuery matches documents whose `field` exactly equals `value`. +func TermQuery(field string, value any) Query { + return rawQuery{"term": map[string]any{field: value}} +} + +// TermsQuery matches documents whose `field` equals any of `values`. +func TermsQuery(field string, values ...any) Query { + return rawQuery{"terms": map[string]any{field: values}} +} + +// MatchQuery is a full-text match on a single field. +func MatchQuery(field string, value any) Query { + return rawQuery{"match": map[string]any{field: value}} +} + +// MatchPhraseQuery matches the exact phrase on `field`. +func MatchPhraseQuery(field, value string) Query { + return rawQuery{"match_phrase": map[string]any{field: value}} +} + +// MultiMatchQuery is the fluent builder for a multi_match query. +type MultiMatchQuery struct { + query any + fields []string + typ string + operator string +} + +// NewMultiMatchQuery creates a multi_match query over the given fields. +func NewMultiMatchQuery(query any, fields ...string) *MultiMatchQuery { + return &MultiMatchQuery{query: query, fields: fields} +} + +func (m *MultiMatchQuery) Type(t string) *MultiMatchQuery { m.typ = t; return m } +func (m *MultiMatchQuery) Operator(op string) *MultiMatchQuery { m.operator = op; return m } + +func (m *MultiMatchQuery) querySource() map[string]any { + body := map[string]any{"query": m.query} + if len(m.fields) > 0 { + body["fields"] = m.fields + } + if m.typ != "" { + body["type"] = m.typ + } + if m.operator != "" { + body["operator"] = m.operator + } + return map[string]any{"multi_match": body} +} + +// RangeQuery is the fluent builder for a range query. +type RangeQuery struct { + field string + body map[string]any +} + +func NewRangeQuery(field string) *RangeQuery { + return &RangeQuery{field: field, body: map[string]any{}} +} + +func (r *RangeQuery) Gte(v any) *RangeQuery { r.body["gte"] = v; return r } +func (r *RangeQuery) Lte(v any) *RangeQuery { r.body["lte"] = v; return r } + +func (r *RangeQuery) querySource() map[string]any { + return map[string]any{"range": map[string]any{r.field: r.body}} +} + +// BoolQuery is the fluent builder for a bool query. +type BoolQuery struct { + must []Query + should []Query + mustNot []Query +} + +func NewBoolQuery() *BoolQuery { return &BoolQuery{} } + +func (b *BoolQuery) Must(q ...Query) *BoolQuery { b.must = append(b.must, q...); return b } +func (b *BoolQuery) Should(q ...Query) *BoolQuery { b.should = append(b.should, q...); return b } +func (b *BoolQuery) MustNot(q ...Query) *BoolQuery { b.mustNot = append(b.mustNot, q...); return b } + +func (b *BoolQuery) querySource() map[string]any { + body := map[string]any{} + if len(b.must) > 0 { + body["must"] = querySlice(b.must) + } + if len(b.should) > 0 { + body["should"] = querySlice(b.should) + } + if len(b.mustNot) > 0 { + body["must_not"] = querySlice(b.mustNot) + } + return map[string]any{"bool": body} +} + +func querySlice(queries []Query) []map[string]any { + out := make([]map[string]any, len(queries)) + for idx, q := range queries { + out[idx] = q.querySource() + } + return out +} diff --git a/modules/indexer/internal/elasticsearch/types.go b/modules/indexer/internal/elasticsearch/types.go new file mode 100644 index 0000000000..106f2faa8e --- /dev/null +++ b/modules/indexer/internal/elasticsearch/types.go @@ -0,0 +1,76 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package elasticsearch + +import "code.gitea.io/gitea/modules/json" + +const ( + bulkActionIndex = "index" + bulkActionDelete = "delete" +) + +// BulkOp is a single write inside a Bulk call. Construct with IndexOp or DeleteOp. +type BulkOp struct { + action string + id string + doc any +} + +// IndexOp builds a bulk index operation. +func IndexOp(id string, doc any) BulkOp { + return BulkOp{action: bulkActionIndex, id: id, doc: doc} +} + +// DeleteOp builds a bulk delete operation. +func DeleteOp(id string) BulkOp { + return BulkOp{action: bulkActionDelete, id: id} +} + +// SortField is one entry of the search sort array. +type SortField struct { + Field string + Desc bool +} + +func (s SortField) source() map[string]any { + order := "asc" + if s.Desc { + order = "desc" + } + return map[string]any{s.Field: map[string]any{"order": order}} +} + +// SearchRequest captures everything Gitea sends to the _search endpoint. +// Aggregations and Highlight are raw ES JSON bodies — callers write them as +// map[string]any since each has exactly one call site with a fixed shape. +type SearchRequest struct { + Query Query + Sort []SortField + From int + Size int + TrackTotal bool + Aggregations map[string]any + Highlight map[string]any +} + +// SearchHit is a single result row. +type SearchHit struct { + ID string + Score float64 + Source json.Value + Highlight map[string][]string +} + +// AggBucket is a terms-aggregation bucket. +type AggBucket struct { + Key any + DocCount int64 +} + +// SearchResponse is Gitea's decoded view of the search reply. +type SearchResponse struct { + Total int64 + Hits []SearchHit + Aggregations map[string][]AggBucket +} diff --git a/modules/indexer/internal/elasticsearch/util.go b/modules/indexer/internal/elasticsearch/util.go index 9e034bd553..2e96d4220a 100644 --- a/modules/indexer/internal/elasticsearch/util.go +++ b/modules/indexer/internal/elasticsearch/util.go @@ -6,14 +6,11 @@ package elasticsearch import ( "context" "fmt" - "time" "code.gitea.io/gitea/modules/log" - - "github.com/olivere/elastic/v7" ) -// VersionedIndexName returns the full index name with version +// VersionedIndexName returns the full index name with version suffix. func (i *Indexer) VersionedIndexName() string { return versionedIndexName(i.indexName, i.version) } @@ -26,41 +23,10 @@ func versionedIndexName(indexName string, version int) string { return fmt.Sprintf("%s.v%d", indexName, version) } -func (i *Indexer) createIndex(ctx context.Context) error { - createIndex, err := i.Client.CreateIndex(i.VersionedIndexName()).BodyString(i.mapping).Do(ctx) - if err != nil { - return err - } - if !createIndex.Acknowledged { - return fmt.Errorf("create index %s with %s failed", i.VersionedIndexName(), i.mapping) - } - - i.checkOldIndexes(ctx) - - return nil -} - -func (i *Indexer) initClient() (*elastic.Client, error) { - opts := []elastic.ClientOptionFunc{ - elastic.SetURL(i.url), - elastic.SetSniff(false), - elastic.SetHealthcheckInterval(10 * time.Second), - elastic.SetGzip(false), - } - - logger := log.GetLogger(log.DEFAULT) - - opts = append(opts, elastic.SetTraceLog(&log.PrintfLogger{Logf: logger.Trace})) - opts = append(opts, elastic.SetInfoLog(&log.PrintfLogger{Logf: logger.Info})) - opts = append(opts, elastic.SetErrorLog(&log.PrintfLogger{Logf: logger.Error})) - - return elastic.NewClient(opts...) -} - func (i *Indexer) checkOldIndexes(ctx context.Context) { - for v := 0; v < i.version; v++ { + for v := range i.version { indexName := versionedIndexName(i.indexName, v) - exists, err := i.Client.IndexExists(indexName).Do(ctx) + exists, err := i.indexExists(ctx, indexName) if err == nil && exists { log.Warn("Found older elasticsearch index named %q, Gitea will keep the old NOT DELETED. You can delete the old version after the upgrade succeed.", indexName) } diff --git a/modules/indexer/issues/bleve/bleve.go b/modules/indexer/issues/bleve/bleve.go index 39d96cab98..219d4163d6 100644 --- a/modules/indexer/issues/bleve/bleve.go +++ b/modules/indexer/issues/bleve/bleve.go @@ -27,7 +27,7 @@ import ( const ( issueIndexerAnalyzer = "issueIndexer" issueIndexerDocType = "issueIndexerDocType" - issueIndexerLatestVersion = 5 + issueIndexerLatestVersion = 6 ) const unicodeNormalizeName = "unicodeNormalize" @@ -83,8 +83,8 @@ func generateIssueIndexMapping() (mapping.IndexMapping, error) { docMapping.AddFieldMappingsAt("label_ids", numberFieldMapping) docMapping.AddFieldMappingsAt("no_label", boolFieldMapping) docMapping.AddFieldMappingsAt("milestone_id", numberFieldMapping) - docMapping.AddFieldMappingsAt("project_id", numberFieldMapping) - docMapping.AddFieldMappingsAt("project_board_id", numberFieldMapping) + docMapping.AddFieldMappingsAt("project_ids", numberFieldMapping) + docMapping.AddFieldMappingsAt("no_project", boolFieldMapping) docMapping.AddFieldMappingsAt("poster_id", numberFieldMapping) docMapping.AddFieldMappingsAt("assignee_id", numberFieldMapping) docMapping.AddFieldMappingsAt("mention_ids", numberFieldMapping) @@ -241,11 +241,15 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( queries = append(queries, bleve.NewDisjunctionQuery(milestoneQueries...)) } - if options.ProjectID.Has() { - queries = append(queries, inner_bleve.NumericEqualityQuery(options.ProjectID.Value(), "project_id")) - } - if options.ProjectColumnID.Has() { - queries = append(queries, inner_bleve.NumericEqualityQuery(options.ProjectColumnID.Value(), "project_board_id")) + if options.NoProjectOnly { + queries = append(queries, inner_bleve.BoolFieldQuery(true, "no_project")) + } else if len(options.ProjectIDs) > 0 { + var projectQueries []query.Query + for _, projectID := range options.ProjectIDs { + projectQueries = append(projectQueries, inner_bleve.NumericEqualityQuery(projectID, "project_ids")) + } + // FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: this logic is not right, it should use "AND" but not "OR" + queries = append(queries, bleve.NewDisjunctionQuery(projectQueries...)) } if options.PosterID != "" { diff --git a/modules/indexer/issues/db/options.go b/modules/indexer/issues/db/options.go index 380a25dc23..7a66efe791 100644 --- a/modules/indexer/issues/db/options.go +++ b/modules/indexer/issues/db/options.go @@ -13,6 +13,7 @@ import ( "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/indexer/issues/internal" "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/util" ) func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_model.IssuesOptions, error) { @@ -65,8 +66,7 @@ func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_m ReviewRequestedID: convertID(options.ReviewRequestedID), ReviewedID: convertID(options.ReviewedID), SubscriberID: convertID(options.SubscriberID), - ProjectID: convertID(options.ProjectID), - ProjectColumnID: convertID(options.ProjectColumnID), + ProjectIDs: util.Iif(options.NoProjectOnly, []int64{db.NoConditionID}, options.ProjectIDs), IsClosed: options.IsClosed, IsPull: options.IsPull, IncludedLabelNames: nil, diff --git a/modules/indexer/issues/dboptions.go b/modules/indexer/issues/dboptions.go index f17724664d..f4582d38dd 100644 --- a/modules/indexer/issues/dboptions.go +++ b/modules/indexer/issues/dboptions.go @@ -46,10 +46,10 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp searchOpt.MilestoneIDs = opts.MilestoneIDs } - if opts.ProjectID > 0 { - searchOpt.ProjectID = optional.Some(opts.ProjectID) - } else if opts.ProjectID == db.NoConditionID { // FIXME: this is inconsistent from other places - searchOpt.ProjectID = optional.Some[int64](0) // Those issues with no project(projectid==0) + if len(opts.ProjectIDs) == 1 && opts.ProjectIDs[0] == db.NoConditionID { + searchOpt.NoProjectOnly = true + } else { + searchOpt.ProjectIDs = opts.ProjectIDs } searchOpt.AssigneeID = opts.AssigneeID @@ -65,7 +65,6 @@ func ToSearchOptions(keyword string, opts *issues_model.IssuesOptions) *SearchOp return nil } - searchOpt.ProjectColumnID = convertID(opts.ProjectColumnID) searchOpt.PosterID = opts.PosterID searchOpt.MentionID = convertID(opts.MentionedID) searchOpt.ReviewedID = convertID(opts.ReviewedID) diff --git a/modules/indexer/issues/elasticsearch/elasticsearch.go b/modules/indexer/issues/elasticsearch/elasticsearch.go index 9d627466ef..8286e72f49 100644 --- a/modules/indexer/issues/elasticsearch/elasticsearch.go +++ b/modules/indexer/issues/elasticsearch/elasticsearch.go @@ -11,27 +11,18 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/indexer" indexer_internal "code.gitea.io/gitea/modules/indexer/internal" - inner_elasticsearch "code.gitea.io/gitea/modules/indexer/internal/elasticsearch" + es "code.gitea.io/gitea/modules/indexer/internal/elasticsearch" "code.gitea.io/gitea/modules/indexer/issues/internal" "code.gitea.io/gitea/modules/util" - - "github.com/olivere/elastic/v7" ) -const ( - issueIndexerLatestVersion = 2 - // multi-match-types, currently only 2 types are used - // Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types - esMultiMatchTypeBestFields = "best_fields" - esMultiMatchTypePhrasePrefix = "phrase_prefix" -) +const issueIndexerLatestVersion = 3 var _ internal.Indexer = &Indexer{} // Indexer implements Indexer interface type Indexer struct { - inner *inner_elasticsearch.Indexer - indexer_internal.Indexer // do not composite inner_elasticsearch.Indexer directly to avoid exposing too much + *es.Indexer } func (b *Indexer) SupportedSearchModes() []indexer.SearchMode { @@ -41,12 +32,7 @@ func (b *Indexer) SupportedSearchModes() []indexer.SearchMode { // NewIndexer creates a new elasticsearch indexer func NewIndexer(url, indexerName string) *Indexer { - inner := inner_elasticsearch.NewIndexer(url, indexerName, issueIndexerLatestVersion, defaultMapping) - indexer := &Indexer{ - inner: inner, - Indexer: inner, - } - return indexer + return &Indexer{Indexer: es.NewIndexer(url, indexerName, issueIndexerLatestVersion, defaultMapping)} } const ( @@ -68,8 +54,8 @@ const ( "label_ids": { "type": "integer", "index": true }, "no_label": { "type": "boolean", "index": true }, "milestone_id": { "type": "integer", "index": true }, - "project_id": { "type": "integer", "index": true }, - "project_board_id": { "type": "integer", "index": true }, + "project_ids": { "type": "integer", "index": true }, + "no_project": { "type": "boolean", "index": true }, "poster_id": { "type": "integer", "index": true }, "assignee_id": { "type": "integer", "index": true }, "mention_ids": { "type": "integer", "index": true }, @@ -93,29 +79,14 @@ func (b *Indexer) Index(ctx context.Context, issues ...*internal.IndexerData) er return nil } else if len(issues) == 1 { issue := issues[0] - _, err := b.inner.Client.Index(). - Index(b.inner.VersionedIndexName()). - Id(strconv.FormatInt(issue.ID, 10)). - BodyJson(issue). - Do(ctx) - return err + return b.Indexer.Index(ctx, strconv.FormatInt(issue.ID, 10), issue) } - reqs := make([]elastic.BulkableRequest, 0) + ops := make([]es.BulkOp, 0, len(issues)) for _, issue := range issues { - reqs = append(reqs, - elastic.NewBulkIndexRequest(). - Index(b.inner.VersionedIndexName()). - Id(strconv.FormatInt(issue.ID, 10)). - Doc(issue), - ) + ops = append(ops, es.IndexOp(strconv.FormatInt(issue.ID, 10), issue)) } - - _, err := b.inner.Client.Bulk(). - Index(b.inner.VersionedIndexName()). - Add(reqs...). - Do(graceful.GetManager().HammerContext()) - return err + return b.Bulk(graceful.GetManager().HammerContext(), ops) } // Delete deletes indexes by ids @@ -123,129 +94,116 @@ func (b *Indexer) Delete(ctx context.Context, ids ...int64) error { if len(ids) == 0 { return nil } else if len(ids) == 1 { - _, err := b.inner.Client.Delete(). - Index(b.inner.VersionedIndexName()). - Id(strconv.FormatInt(ids[0], 10)). - Do(ctx) - return err + return b.Indexer.Delete(ctx, strconv.FormatInt(ids[0], 10)) } - reqs := make([]elastic.BulkableRequest, 0) + ops := make([]es.BulkOp, 0, len(ids)) for _, id := range ids { - reqs = append(reqs, - elastic.NewBulkDeleteRequest(). - Index(b.inner.VersionedIndexName()). - Id(strconv.FormatInt(id, 10)), - ) + ops = append(ops, es.DeleteOp(strconv.FormatInt(id, 10))) } - - _, err := b.inner.Client.Bulk(). - Index(b.inner.VersionedIndexName()). - Add(reqs...). - Do(graceful.GetManager().HammerContext()) - return err + return b.Bulk(graceful.GetManager().HammerContext(), ops) } // Search searches for issues by given conditions. // Returns the matching issue IDs func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (*internal.SearchResult, error) { - query := elastic.NewBoolQuery() + query := es.NewBoolQuery() if options.Keyword != "" { searchMode := util.IfZero(options.SearchMode, b.SupportedSearchModes()[0].ModeValue) + mm := es.NewMultiMatchQuery(options.Keyword, "title", "content", "comments") if searchMode == indexer.SearchModeExact { - query.Must(elastic.NewMultiMatchQuery(options.Keyword, "title", "content", "comments").Type(esMultiMatchTypePhrasePrefix)) - } else /* words */ { - query.Must(elastic.NewMultiMatchQuery(options.Keyword, "title", "content", "comments").Type(esMultiMatchTypeBestFields).Operator("and")) + mm = mm.Type(es.MultiMatchTypePhrasePrefix) + } else { + mm = mm.Type(es.MultiMatchTypeBestFields).Operator("and") } + query.Must(mm) } if len(options.RepoIDs) > 0 { - q := elastic.NewBoolQuery() - q.Should(elastic.NewTermsQuery("repo_id", toAnySlice(options.RepoIDs)...)) + q := es.NewBoolQuery() + q.Should(es.TermsQuery("repo_id", es.ToAnySlice(options.RepoIDs)...)) if options.AllPublic { - q.Should(elastic.NewTermQuery("is_public", true)) + q.Should(es.TermQuery("is_public", true)) } query.Must(q) } if options.IsPull.Has() { - query.Must(elastic.NewTermQuery("is_pull", options.IsPull.Value())) + query.Must(es.TermQuery("is_pull", options.IsPull.Value())) } if options.IsClosed.Has() { - query.Must(elastic.NewTermQuery("is_closed", options.IsClosed.Value())) + query.Must(es.TermQuery("is_closed", options.IsClosed.Value())) } if options.IsArchived.Has() { - query.Must(elastic.NewTermQuery("is_archived", options.IsArchived.Value())) + query.Must(es.TermQuery("is_archived", options.IsArchived.Value())) } if options.NoLabelOnly { - query.Must(elastic.NewTermQuery("no_label", true)) + query.Must(es.TermQuery("no_label", true)) } else { if len(options.IncludedLabelIDs) > 0 { - q := elastic.NewBoolQuery() + q := es.NewBoolQuery() for _, labelID := range options.IncludedLabelIDs { - q.Must(elastic.NewTermQuery("label_ids", labelID)) + q.Must(es.TermQuery("label_ids", labelID)) } query.Must(q) } else if len(options.IncludedAnyLabelIDs) > 0 { - query.Must(elastic.NewTermsQuery("label_ids", toAnySlice(options.IncludedAnyLabelIDs)...)) + query.Must(es.TermsQuery("label_ids", es.ToAnySlice(options.IncludedAnyLabelIDs)...)) } if len(options.ExcludedLabelIDs) > 0 { - q := elastic.NewBoolQuery() + q := es.NewBoolQuery() for _, labelID := range options.ExcludedLabelIDs { - q.MustNot(elastic.NewTermQuery("label_ids", labelID)) + q.MustNot(es.TermQuery("label_ids", labelID)) } query.Must(q) } } if len(options.MilestoneIDs) > 0 { - query.Must(elastic.NewTermsQuery("milestone_id", toAnySlice(options.MilestoneIDs)...)) + query.Must(es.TermsQuery("milestone_id", es.ToAnySlice(options.MilestoneIDs)...)) } - if options.ProjectID.Has() { - query.Must(elastic.NewTermQuery("project_id", options.ProjectID.Value())) - } - if options.ProjectColumnID.Has() { - query.Must(elastic.NewTermQuery("project_board_id", options.ProjectColumnID.Value())) + if options.NoProjectOnly { + query.Must(es.TermQuery("no_project", true)) + } else if len(options.ProjectIDs) > 0 { + // FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: this logic is not right, it should use "AND" but not "OR" + query.Must(es.TermsQuery("project_ids", es.ToAnySlice(options.ProjectIDs)...)) } if options.PosterID != "" { // "(none)" becomes 0, it means no poster posterIDInt64, _ := strconv.ParseInt(options.PosterID, 10, 64) - query.Must(elastic.NewTermQuery("poster_id", posterIDInt64)) + query.Must(es.TermQuery("poster_id", posterIDInt64)) } if options.AssigneeID != "" { if options.AssigneeID == "(any)" { - q := elastic.NewRangeQuery("assignee_id") - q.Gte(1) - query.Must(q) + query.Must(es.NewRangeQuery("assignee_id").Gte(1)) } else { // "(none)" becomes 0, it means no assignee assigneeIDInt64, _ := strconv.ParseInt(options.AssigneeID, 10, 64) - query.Must(elastic.NewTermQuery("assignee_id", assigneeIDInt64)) + query.Must(es.TermQuery("assignee_id", assigneeIDInt64)) } } if options.MentionID.Has() { - query.Must(elastic.NewTermQuery("mention_ids", options.MentionID.Value())) + query.Must(es.TermQuery("mention_ids", options.MentionID.Value())) } if options.ReviewedID.Has() { - query.Must(elastic.NewTermQuery("reviewed_ids", options.ReviewedID.Value())) + query.Must(es.TermQuery("reviewed_ids", options.ReviewedID.Value())) } if options.ReviewRequestedID.Has() { - query.Must(elastic.NewTermQuery("review_requested_ids", options.ReviewRequestedID.Value())) + query.Must(es.TermQuery("review_requested_ids", options.ReviewRequestedID.Value())) } if options.SubscriberID.Has() { - query.Must(elastic.NewTermQuery("subscriber_ids", options.SubscriberID.Value())) + query.Must(es.TermQuery("subscriber_ids", options.SubscriberID.Value())) } if options.UpdatedAfterUnix.Has() || options.UpdatedBeforeUnix.Has() { - q := elastic.NewRangeQuery("updated_unix") + q := es.NewRangeQuery("updated_unix") if options.UpdatedAfterUnix.Has() { q.Gte(options.UpdatedAfterUnix.Value()) } @@ -258,9 +216,9 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( if options.SortBy == "" { options.SortBy = internal.SortByCreatedAsc } - sortBy := []elastic.Sorter{ + sortBy := []es.SortField{ parseSortBy(options.SortBy), - elastic.NewFieldSort("id").Desc(), + {Field: "id", Desc: true}, } // See https://stackoverflow.com/questions/35206409/elasticsearch-2-1-result-window-is-too-large-index-max-result-window/35221900 @@ -268,43 +226,30 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( const maxPageSize = 10000 skip, limit := indexer_internal.ParsePaginator(options.Paginator, maxPageSize) - searchResult, err := b.inner.Client.Search(). - Index(b.inner.VersionedIndexName()). - Query(query). - SortBy(sortBy...). - From(skip).Size(limit). - Do(ctx) + resp, err := b.Indexer.Search(ctx, es.SearchRequest{ + Query: query, + Sort: sortBy, + From: skip, + Size: limit, + TrackTotal: true, + }) if err != nil { return nil, err } - hits := make([]internal.Match, 0, limit) - for _, hit := range searchResult.Hits.Hits { - id, _ := strconv.ParseInt(hit.Id, 10, 64) - hits = append(hits, internal.Match{ - ID: id, - }) + hits := make([]internal.Match, 0, len(resp.Hits)) + for _, hit := range resp.Hits { + id, _ := strconv.ParseInt(hit.ID, 10, 64) + hits = append(hits, internal.Match{ID: id}) } return &internal.SearchResult{ - Total: searchResult.TotalHits(), + Total: resp.Total, Hits: hits, }, nil } -func toAnySlice[T any](s []T) []any { - ret := make([]any, 0, len(s)) - for _, item := range s { - ret = append(ret, item) - } - return ret -} - -func parseSortBy(sortBy internal.SortBy) elastic.Sorter { - field := strings.TrimPrefix(string(sortBy), "-") - ret := elastic.NewFieldSort(field) - if strings.HasPrefix(string(sortBy), "-") { - ret.Desc() - } - return ret +func parseSortBy(sortBy internal.SortBy) es.SortField { + field, desc := strings.CutPrefix(string(sortBy), "-") + return es.SortField{Field: field, Desc: desc} } diff --git a/modules/indexer/issues/elasticsearch/elasticsearch_test.go b/modules/indexer/issues/elasticsearch/elasticsearch_test.go index cb9ed3889d..9b0eaebef6 100644 --- a/modules/indexer/issues/elasticsearch/elasticsearch_test.go +++ b/modules/indexer/issues/elasticsearch/elasticsearch_test.go @@ -6,6 +6,7 @@ package elasticsearch import ( "fmt" "net/http" + "net/url" "os" "testing" "time" @@ -17,19 +18,36 @@ import ( func TestElasticsearchIndexer(t *testing.T) { // The elasticsearch instance started by pull-db-tests.yml > test-unit > services > elasticsearch - url := "http://elastic:changeme@elasticsearch:9200" + rawURL := "http://elastic:changeme@elasticsearch:9200" if os.Getenv("CI") == "" { // Make it possible to run tests against a local elasticsearch instance - url = os.Getenv("TEST_ELASTICSEARCH_URL") - if url == "" { + rawURL = os.Getenv("TEST_ELASTICSEARCH_URL") + if rawURL == "" { t.Skip("TEST_ELASTICSEARCH_URL not set and not running in CI") return } } + // Go's net/http does not auto-attach URL userinfo as Basic Auth, so extract + // it and set the header explicitly; otherwise auth-enforced clusters answer + // 401 and the probe never reports ready. + parsed, err := url.Parse(rawURL) + require.NoError(t, err) + user := parsed.User + parsed.User = nil + probeURL := parsed.String() + require.Eventually(t, func() bool { - resp, err := http.Get(url) + req, err := http.NewRequest(http.MethodGet, probeURL, nil) + if err != nil { + return false + } + if user != nil { + pass, _ := user.Password() + req.SetBasicAuth(user.Username(), pass) + } + resp, err := http.DefaultClient.Do(req) if err != nil { return false } @@ -37,7 +55,7 @@ func TestElasticsearchIndexer(t *testing.T) { return resp.StatusCode == http.StatusOK }, time.Minute, time.Second, "Expected elasticsearch to be up") - indexer := NewIndexer(url, fmt.Sprintf("test_elasticsearch_indexer_%d", time.Now().Unix())) + indexer := NewIndexer(rawURL, fmt.Sprintf("test_elasticsearch_indexer_%d", time.Now().Unix())) defer indexer.Close() tests.TestIndexer(t, indexer) diff --git a/modules/indexer/issues/indexer_test.go b/modules/indexer/issues/indexer_test.go index 3e38ac49b7..a67e96c0a2 100644 --- a/modules/indexer/issues/indexer_test.go +++ b/modules/indexer/issues/indexer_test.go @@ -416,28 +416,42 @@ func searchIssueInProject(t *testing.T) { }{ { SearchOptions{ - ProjectID: optional.Some(int64(1)), + ProjectIDs: []int64{1}, }, []int64{5, 3, 2, 1}, }, - { - SearchOptions{ - ProjectColumnID: optional.Some(int64(1)), - }, - []int64{1}, - }, - { - SearchOptions{ - ProjectColumnID: optional.Some(int64(0)), // issue with in default column - }, - []int64{2}, - }, } for _, test := range tests { issueIDs, _, err := SearchIssues(t.Context(), &test.opts) require.NoError(t, err) assert.Equal(t, test.expectedIDs, issueIDs) } + + // Test filtering for issues with no project assigned using dynamic validation + t.Run("no project assigned", func(t *testing.T) { + issueIDs, total, err := SearchIssues(t.Context(), &SearchOptions{ + ProjectIDs: []int64{db.NoConditionID}, + }) + require.NoError(t, err) + assert.NotEmpty(t, issueIDs) + assert.Equal(t, total, int64(len(issueIDs))) + + // Verify each returned issue actually has no project + for _, issueID := range issueIDs { + issue, err := issues.GetIssueByID(t.Context(), issueID) + require.NoError(t, err) + err = issue.LoadProjects(t.Context()) + require.NoError(t, err) + assert.Empty(t, issue.Projects, "Issue %d should have no projects", issueID) + } + + // Count total issues with no project to verify we got them all + allIssues, err := issues.Issues(t.Context(), &issues.IssuesOptions{ + ProjectIDs: []int64{db.NoConditionID}, + }) + require.NoError(t, err) + assert.Len(t, issueIDs, len(allIssues), "Should return all issues with no project") + }) } func searchIssueWithPaginator(t *testing.T) { diff --git a/modules/indexer/issues/internal/model.go b/modules/indexer/issues/internal/model.go index 0d4f0f727d..84979a8e64 100644 --- a/modules/indexer/issues/internal/model.go +++ b/modules/indexer/issues/internal/model.go @@ -30,8 +30,9 @@ type IndexerData struct { LabelIDs []int64 `json:"label_ids"` NoLabel bool `json:"no_label"` // True if LabelIDs is empty MilestoneID int64 `json:"milestone_id"` - ProjectID int64 `json:"project_id"` - ProjectColumnID int64 `json:"project_board_id"` // the key should be kept as project_board_id to keep compatible + ProjectIDs []int64 `json:"project_ids"` + NoProject bool `json:"no_project"` // True if ProjectIDs is empty + ProjectColumnMap map[int64]int64 `json:"project_column_map,omitempty"` // Maps project ID to column ID for each project the issue is in PosterID int64 `json:"poster_id"` AssigneeID int64 `json:"assignee_id"` MentionIDs []int64 `json:"mention_ids"` @@ -94,8 +95,8 @@ type SearchOptions struct { MilestoneIDs []int64 // milestones the issues have - ProjectID optional.Option[int64] // project the issues belong to - ProjectColumnID optional.Option[int64] // project column the issues belong to + ProjectIDs []int64 // project the issues belong to. FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: no multiple project filter support yet. Search logic is wrong. + NoProjectOnly bool // if the issues have no project, if true, ProjectIDs will be ignored PosterID string // poster of the issues, "(none)" or "(any)" or a user ID AssigneeID string // assignee of the issues, "(none)" or "(any)" or a user ID diff --git a/modules/indexer/issues/internal/tests/tests.go b/modules/indexer/issues/internal/tests/tests.go index 7aebbbcd58..20e64a5955 100644 --- a/modules/indexer/issues/internal/tests/tests.go +++ b/modules/indexer/issues/internal/tests/tests.go @@ -116,6 +116,16 @@ var cases = []*testIndexerCase{ assert.Equal(t, len(data), int(result.Total)) }, }, + { + // Exercises the single-doc Index/Delete fast path in backends that have one (e.g. Elasticsearch). + Name: "single-doc index", + ExtraData: []*internal.IndexerData{ + {ID: 999, Title: "solo-issue-marker"}, + }, + SearchOptions: &internal.SearchOptions{Keyword: "solo-issue-marker"}, + ExpectedIDs: []int64{999}, + ExpectedTotal: 1, + }, { Name: "Keyword", ExtraData: []*internal.IndexerData{ @@ -301,75 +311,41 @@ var cases = []*testIndexerCase{ }, }, { - Name: "ProjectID", + Name: "ProjectIDs", SearchOptions: &internal.SearchOptions{ Paginator: &db.ListOptions{ PageSize: 5, }, - ProjectID: optional.Some(int64(1)), + ProjectIDs: []int64{1}, }, Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { assert.Len(t, result.Hits, 5) for _, v := range result.Hits { - assert.Equal(t, int64(1), data[v.ID].ProjectID) + assert.Contains(t, data[v.ID].ProjectIDs, int64(1)) } assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { - return v.ProjectID == 1 + return slices.Contains(v.ProjectIDs, int64(1)) }), result.Total) }, }, { - Name: "no ProjectID", + Name: "no ProjectIDs (empty array)", SearchOptions: &internal.SearchOptions{ Paginator: &db.ListOptions{ - PageSize: 5, + PageSize: 50, }, - ProjectID: optional.Some(int64(0)), + NoProjectOnly: true, }, Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { - assert.Len(t, result.Hits, 5) + // Verify only issues with no projects are returned for _, v := range result.Hits { - assert.Equal(t, int64(0), data[v.ID].ProjectID) + assert.Empty(t, data[v.ID].ProjectIDs, "Issue %d should have no projects", v.ID) } - assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { - return v.ProjectID == 0 - }), result.Total) - }, - }, - { - Name: "ProjectColumnID", - SearchOptions: &internal.SearchOptions{ - Paginator: &db.ListOptions{ - PageSize: 5, - }, - ProjectColumnID: optional.Some(int64(1)), - }, - Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { - assert.Len(t, result.Hits, 5) - for _, v := range result.Hits { - assert.Equal(t, int64(1), data[v.ID].ProjectColumnID) - } - assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { - return v.ProjectColumnID == 1 - }), result.Total) - }, - }, - { - Name: "no ProjectColumnID", - SearchOptions: &internal.SearchOptions{ - Paginator: &db.ListOptions{ - PageSize: 5, - }, - ProjectColumnID: optional.Some(int64(0)), - }, - Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) { - assert.Len(t, result.Hits, 5) - for _, v := range result.Hits { - assert.Equal(t, int64(0), data[v.ID].ProjectColumnID) - } - assert.Equal(t, countIndexerData(data, func(v *internal.IndexerData) bool { - return v.ProjectColumnID == 0 - }), result.Total) + // Verify we got ALL issues with no projects + expectedCount := countIndexerData(data, func(v *internal.IndexerData) bool { + return len(v.ProjectIDs) == 0 + }) + assert.Equal(t, expectedCount, result.Total, "Should return all %d issues with no project", expectedCount) }, }, { @@ -706,6 +682,10 @@ func generateDefaultIndexerData() []*internal.IndexerData { for i := range subscriberIDs { subscriberIDs[i] = int64(i) + 1 // SubscriberID should not be 0 } + projectIDs := make([]int64, id%5) + for i := range projectIDs { + projectIDs[i] = int64(i) + 1 // projectID should not be 0 + } data = append(data, &internal.IndexerData{ ID: id, @@ -719,8 +699,8 @@ func generateDefaultIndexerData() []*internal.IndexerData { LabelIDs: labelIDs, NoLabel: len(labelIDs) == 0, MilestoneID: issueIndex % 4, - ProjectID: issueIndex % 5, - ProjectColumnID: issueIndex % 6, + ProjectIDs: projectIDs, + NoProject: len(projectIDs) == 0, PosterID: id%10 + 1, // PosterID should not be 0 AssigneeID: issueIndex % 10, MentionIDs: mentionIDs, diff --git a/modules/indexer/issues/meilisearch/meilisearch.go b/modules/indexer/issues/meilisearch/meilisearch.go index 5715cf4794..6ac6d239c8 100644 --- a/modules/indexer/issues/meilisearch/meilisearch.go +++ b/modules/indexer/issues/meilisearch/meilisearch.go @@ -20,7 +20,7 @@ import ( ) const ( - issueIndexerLatestVersion = 4 + issueIndexerLatestVersion = 5 // TODO: make this configurable if necessary maxTotalHits = 10000 @@ -71,8 +71,8 @@ func NewIndexer(url, apiKey, indexerName string) *Indexer { "label_ids", "no_label", "milestone_id", - "project_id", - "project_board_id", + "project_ids", + "no_project", "poster_id", "assignee_id", "mention_ids", @@ -182,11 +182,11 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) ( query.And(inner_meilisearch.NewFilterIn("milestone_id", options.MilestoneIDs...)) } - if options.ProjectID.Has() { - query.And(inner_meilisearch.NewFilterEq("project_id", options.ProjectID.Value())) - } - if options.ProjectColumnID.Has() { - query.And(inner_meilisearch.NewFilterEq("project_board_id", options.ProjectColumnID.Value())) + if options.NoProjectOnly { + query.And(inner_meilisearch.NewFilterEq("no_project", true)) + } else if len(options.ProjectIDs) > 0 { + // FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: this logic is not right, it should use "AND" but not "OR" + query.And(inner_meilisearch.NewFilterIn("project_ids", options.ProjectIDs...)) } if options.PosterID != "" { diff --git a/modules/indexer/issues/util.go b/modules/indexer/issues/util.go index 7647be58e8..fafe9b8bbd 100644 --- a/modules/indexer/issues/util.go +++ b/modules/indexer/issues/util.go @@ -87,14 +87,9 @@ func getIssueIndexerData(ctx context.Context, issueID int64) (*internal.IndexerD return nil, false, err } - var projectID int64 - if issue.Project != nil { - projectID = issue.Project.ID - } - - projectColumnID, err := issue.ProjectColumnID(ctx) - if err != nil { - return nil, false, err + projectIDs := make([]int64, 0, len(issue.Projects)) + for _, project := range issue.Projects { + projectIDs = append(projectIDs, project.ID) } if err := issue.Repo.LoadOwner(ctx); err != nil { @@ -114,8 +109,8 @@ func getIssueIndexerData(ctx context.Context, issueID int64) (*internal.IndexerD LabelIDs: labels, NoLabel: len(labels) == 0, MilestoneID: issue.MilestoneID, - ProjectID: projectID, - ProjectColumnID: projectColumnID, + ProjectIDs: projectIDs, + NoProject: len(projectIDs) == 0, PosterID: issue.PosterID, AssigneeID: issue.AssigneeID, MentionIDs: mentionIDs, diff --git a/modules/log/logger_global.go b/modules/log/logger_global.go index 2bc8c4f449..6dacf583a7 100644 --- a/modules/log/logger_global.go +++ b/modules/log/logger_global.go @@ -75,12 +75,23 @@ func IsLoggerEnabled(name string) bool { return GetManager().GetLogger(name).IsEnabled() } -func SetConsoleLogger(loggerName, writerName string, level Level) { +func SetupStderrLogger(loggerName, writerName string, level Level) { writer := NewEventWriterConsole(writerName, WriterMode{ - Level: level, - Flags: FlagsFromBits(LstdFlags), - Colorize: CanColorStdout, - WriterOption: WriterConsoleOption{}, + Level: level, + Flags: FlagsFromBits(LstdFlags), + Colorize: CanColorStderr, + + // For most CLI commands, it's better to use Stderr as log output: + // this logger is installed early (app.Before), before subcommands like "dump" redirect logging to stderr. + // If Stdout, early log output (e.g.: warning during config loading) goes to stdout + // and corrupts any command that writes data to stdout (e.g. "gitea dump --file -"). + // + // It is inconsistent with the web server's default console logger from config + // (which will be initialized later and use Stdout by default), but there is no other way at the moment: + // many existing users depend on such behavior to collect web logs (e.g. fail2ban). + // + // Maybe need to refactor the logger system again in the future. + WriterOption: WriterConsoleOption{Stderr: true}, }) GetManager().GetLogger(loggerName).ReplaceAllWriters(writer) } diff --git a/modules/setting/database.go b/modules/setting/database.go index 1a4bf64805..2b069a6292 100644 --- a/modules/setting/database.go +++ b/modules/setting/database.go @@ -4,13 +4,7 @@ package setting import ( - "errors" - "fmt" - "net" - "net/url" - "os" "path/filepath" - "strings" "time" ) @@ -20,24 +14,22 @@ var ( // DatabaseTypeNames contains the friendly names for all database types DatabaseTypeNames = map[string]string{"mysql": "MySQL", "postgres": "PostgreSQL", "mssql": "MSSQL", "sqlite3": "SQLite3"} - // EnableSQLite3 use SQLite3, set by build flag - EnableSQLite3 bool - // Database holds the database settings Database = struct { - Type DatabaseType - Host string - Name string - User string - Passwd string - Schema string - SSLMode string - Path string + Type DatabaseType + Host string + Name string + User string + Passwd string + Schema string + SSLMode string + Path string + + SQLiteBusyTimeout int + SQLiteJournalMode string + LogSQL bool - MysqlCharset string CharsetCollation string - Timeout int // seconds - SQLiteJournalMode string DBConnectRetries int DBConnectBackoff time.Duration MaxIdleConns int @@ -47,7 +39,7 @@ var ( AutoMigration bool SlowQueryThreshold time.Duration }{ - Timeout: 500, + SQLiteBusyTimeout: 500, IterateBufferSize: 50, } ) @@ -64,15 +56,14 @@ func loadDBSetting(rootCfg ConfigProvider) { Database.Host = sec.Key("HOST").String() Database.Name = sec.Key("NAME").String() Database.User = sec.Key("USER").String() - if len(Database.Passwd) == 0 { - Database.Passwd = sec.Key("PASSWD").String() - } + Database.Passwd = sec.Key("PASSWD").String() + Database.Schema = sec.Key("SCHEMA").String() Database.SSLMode = sec.Key("SSL_MODE").MustString("disable") Database.CharsetCollation = sec.Key("CHARSET_COLLATION").String() Database.Path = sec.Key("PATH").MustString(filepath.Join(AppDataPath, "gitea.db")) - Database.Timeout = sec.Key("SQLITE_TIMEOUT").MustInt(500) + Database.SQLiteBusyTimeout = sec.Key("SQLITE_TIMEOUT").MustInt(500) Database.SQLiteJournalMode = sec.Key("SQLITE_JOURNAL_MODE").MustString("") Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2) @@ -91,123 +82,9 @@ func loadDBSetting(rootCfg ConfigProvider) { Database.SlowQueryThreshold = sec.Key("SLOW_QUERY_THRESHOLD").MustDuration(5 * time.Second) } -// DBConnStr returns database connection string -func DBConnStr() (string, error) { - var connStr string - paramSep := "?" - if strings.Contains(Database.Name, paramSep) { - paramSep = "&" - } - switch Database.Type { - case "mysql": - connType := "tcp" - if len(Database.Host) > 0 && Database.Host[0] == '/' { // looks like a unix socket - connType = "unix" - } - tls := Database.SSLMode - if tls == "disable" { // allow (Postgres-inspired) default value to work in MySQL - tls = "false" - } - connStr = fmt.Sprintf("%s:%s@%s(%s)/%s%sparseTime=true&tls=%s", - Database.User, Database.Passwd, connType, Database.Host, Database.Name, paramSep, tls) - case "postgres": - connStr = getPostgreSQLConnectionString(Database.Host, Database.User, Database.Passwd, Database.Name, Database.SSLMode) - case "mssql": - host, port := ParseMSSQLHostPort(Database.Host) - connStr = fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", host, port, Database.Name, Database.User, Database.Passwd) - case "sqlite3": - if !EnableSQLite3 { - return "", errors.New("this Gitea binary was not built with SQLite3 support") - } - if err := os.MkdirAll(filepath.Dir(Database.Path), os.ModePerm); err != nil { - return "", fmt.Errorf("Failed to create directories: %w", err) - } - journalMode := "" - if Database.SQLiteJournalMode != "" { - journalMode = "&_journal_mode=" + Database.SQLiteJournalMode - } - connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate%s", - Database.Path, Database.Timeout, journalMode) - default: - return "", fmt.Errorf("unknown database type: %s", Database.Type) - } - - return connStr, nil -} - -// parsePostgreSQLHostPort parses given input in various forms defined in -// https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING -// and returns proper host and port number. -func parsePostgreSQLHostPort(info string) (host, port string) { - if h, p, err := net.SplitHostPort(info); err == nil { - host, port = h, p - } else { - // treat the "info" as "host", if it's an IPv6 address, remove the wrapper - host = info - if strings.HasPrefix(host, "[") && strings.HasSuffix(host, "]") { - host = host[1 : len(host)-1] - } - } - - // set fallback values - if host == "" { - host = "127.0.0.1" - } - if port == "" { - port = "5432" - } - return host, port -} - -func getPostgreSQLConnectionString(dbHost, dbUser, dbPasswd, dbName, dbsslMode string) (connStr string) { - dbName, dbParam, _ := strings.Cut(dbName, "?") - host, port := parsePostgreSQLHostPort(dbHost) - connURL := url.URL{ - Scheme: "postgres", - User: url.UserPassword(dbUser, dbPasswd), - Host: net.JoinHostPort(host, port), - Path: dbName, - OmitHost: false, - RawQuery: dbParam, - } - query := connURL.Query() - if strings.HasPrefix(host, "/") { // looks like a unix socket - query.Add("host", host) - connURL.Host = ":" + port - } - query.Set("sslmode", dbsslMode) - connURL.RawQuery = query.Encode() - return connURL.String() -} - -// ParseMSSQLHostPort splits the host into host and port -func ParseMSSQLHostPort(info string) (string, string) { - // the default port "0" might be related to MSSQL's dynamic port, maybe it should be double-confirmed in the future - host, port := "127.0.0.1", "0" - if strings.Contains(info, ":") { - host = strings.Split(info, ":")[0] - port = strings.Split(info, ":")[1] - } else if strings.Contains(info, ",") { - host = strings.Split(info, ",")[0] - port = strings.TrimSpace(strings.Split(info, ",")[1]) - } else if len(info) > 0 { - host = info - } - if host == "" { - host = "127.0.0.1" - } - if port == "" { - port = "0" - } - return host, port -} - +// DatabaseType FIXME: it is also used directly with "schemas.DBType", so the names must be consistent type DatabaseType string -func (t DatabaseType) String() string { - return string(t) -} - func (t DatabaseType) IsSQLite3() bool { return t == "sqlite3" } diff --git a/modules/setting/database_sqlite.go b/modules/setting/database_sqlite.go deleted file mode 100644 index c1037cfb27..0000000000 --- a/modules/setting/database_sqlite.go +++ /dev/null @@ -1,15 +0,0 @@ -//go:build sqlite - -// Copyright 2014 The Gogs Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package setting - -import ( - _ "github.com/mattn/go-sqlite3" -) - -func init() { - EnableSQLite3 = true - SupportedDatabaseTypes = append(SupportedDatabaseTypes, "sqlite3") -} diff --git a/modules/setting/log.go b/modules/setting/log.go index 59866c7605..764c2a82f4 100644 --- a/modules/setting/log.go +++ b/modules/setting/log.go @@ -256,7 +256,7 @@ func initLoggerByName(manager *log.LoggerManager, rootCfg ConfigProvider, logger } func InitSQLLoggersForCli(level log.Level) { - log.SetConsoleLogger("xorm", "console", level) + log.SetupStderrLogger("xorm", "console-stderr", level) } func IsAccessLogEnabled() bool { diff --git a/modules/setting/repository.go b/modules/setting/repository.go index 9195b7ee50..a8bc91c089 100644 --- a/modules/setting/repository.go +++ b/modules/setting/repository.go @@ -18,6 +18,12 @@ const ( RepoCreatingPublic = "public" ) +// enumerates the values for [repository.pull-request] DEFAULT_TITLE_SOURCE +const ( + RepoPRTitleSourceFirstCommit = "first-commit" + RepoPRTitleSourceAuto = "auto" +) + // ItemsPerPage maximum items per page in forks, watchers and stars of a repo const ItemsPerPage = 40 @@ -89,6 +95,7 @@ var ( RetargetChildrenOnMerge bool DelayCheckForInactiveDays int DefaultDeleteBranchAfterMerge bool + DefaultTitleSource string } `ini:"repository.pull-request"` // Issue Setting @@ -213,6 +220,7 @@ var ( RetargetChildrenOnMerge bool DelayCheckForInactiveDays int DefaultDeleteBranchAfterMerge bool + DefaultTitleSource string }{ WorkInProgressPrefixes: []string{"WIP:", "[WIP]"}, // Same as GitHub. See @@ -229,6 +237,7 @@ var ( AddCoCommitterTrailers: true, RetargetChildrenOnMerge: true, DelayCheckForInactiveDays: 7, + DefaultTitleSource: RepoPRTitleSourceFirstCommit, }, // Issue settings diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 3c1ad14428..5bfe68697a 100644 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -41,9 +41,9 @@ func init() { AppVer = "dev" } - // We can rely on log.CanColorStdout being set properly because modules/log/console_windows.go comes before modules/setting/setting.go lexicographically + // FIXME: the logger shouldn't be initialized here, the app entry should initialize the logger // By default set this logger at Info - we'll change it later, but we need to start with something. - log.SetConsoleLogger(log.DEFAULT, "console", log.INFO) + log.SetupStderrLogger(log.DEFAULT, "console-stderr", log.INFO) } // IsRunUserMatchCurrentUser returns false if configured run user does not match diff --git a/modules/setting/testenv.go b/modules/setting/testenv.go index d8663d07e2..24d489d77c 100644 --- a/modules/setting/testenv.go +++ b/modules/setting/testenv.go @@ -4,6 +4,7 @@ package setting import ( + "errors" "fmt" "os" "path/filepath" @@ -13,6 +14,8 @@ import ( "code.gitea.io/gitea/modules/auth/password/hash" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" + + "github.com/kballard/go-shellquote" ) var giteaTestSourceRoot *string // intentionally use a pointer to make sure the uninitialized access panics @@ -21,6 +24,16 @@ func GetGiteaTestSourceRoot() string { return *giteaTestSourceRoot } +func detectGiteaTestRoot() string { + _, filename, _, _ := runtime.Caller(0) + giteaRoot := filepath.Dir(filepath.Dir(filepath.Dir(filename))) + fixturesDir := filepath.Join(giteaRoot, "models", "fixtures") + if _, err := os.Stat(fixturesDir); err != nil { + panic("in gitea source code directory, fixtures directory not found: " + fixturesDir) + } + return giteaRoot +} + func SetupGiteaTestEnv() { if giteaTestSourceRoot != nil { return // already initialized @@ -41,12 +54,7 @@ func SetupGiteaTestEnv() { initGiteaRoot := func() string { giteaRoot := os.Getenv("GITEA_TEST_ROOT") if giteaRoot == "" { - _, filename, _, _ := runtime.Caller(0) - giteaRoot = filepath.Dir(filepath.Dir(filepath.Dir(filename))) - fixturesDir := filepath.Join(giteaRoot, "models", "fixtures") - if _, err := os.Stat(fixturesDir); err != nil { - panic("in gitea source code directory, fixtures directory not found: " + fixturesDir) - } + giteaRoot = detectGiteaTestRoot() } giteaTestSourceRoot = &giteaRoot return giteaRoot @@ -117,3 +125,51 @@ func SetupGiteaTestEnv() { _ = os.Setenv("GITEA_ROOT", giteaRoot) _ = os.Setenv("GITEA_CONF", giteaConf) // test fixture git hooks use "$GITEA_ROOT/$GITEA_CONF" in their scripts } + +func PrepareIntegrationTestConfig() error { + giteaTestRoot := detectGiteaTestRoot() + isInCI := os.Getenv("CI") != "" + testDatabase := os.Getenv("GITEA_TEST_DATABASE") + if testDatabase == "" { + if isInCI { + return errors.New("GITEA_TEST_DATABASE environment variable not set") + } + // for local development, default to sqlite. CI needs to explicitly set a database to avoid unexpected results + testDatabase = "sqlite" + _, _ = fmt.Fprintf(os.Stderr, "Environment variable GITEA_TEST_DATABASE not set - defaulting to %s\n", testDatabase) + } + + _ = os.Setenv("GITEA_TEST_ROOT", giteaTestRoot) + _ = os.Setenv("GITEA_TEST_CONF", filepath.Join("tests", testDatabase+".ini")) + + workPath := filepath.Join(giteaTestRoot, "tests/integration/gitea-integration-"+testDatabase) + if err := os.MkdirAll(workPath, 0o755); err != nil { + return err + } + + confFile := filepath.Join(giteaTestRoot, "tests", testDatabase+".ini") + tmplBuf, err := os.ReadFile(confFile + ".tmpl") + if err != nil { + return err + } + tmpl := string(tmplBuf) + envVars, err := shellquote.Split(os.Getenv("MAKEFILE_VARS")) + if err != nil { + return err + } + envVarMap := map[string]string{ + "TEST_WORK_PATH": workPath, + "TEST_LOGGER": "test,file", + } + for _, env := range append(os.Environ(), envVars...) { + k, v, _ := strings.Cut(env, "=") + k = strings.TrimSpace(k) + v = strings.TrimSpace(v) + envVarMap[k] = v + } + for k, v := range envVarMap { + tmpl = strings.ReplaceAll(tmpl, fmt.Sprintf("{{%s}}", k), v) + } + err = os.WriteFile(confFile, []byte(tmpl), 0o644) + return err +} diff --git a/modules/structs/admin_user.go b/modules/structs/admin_user.go index d158a5fd31..ee65e016bc 100644 --- a/modules/structs/admin_user.go +++ b/modules/structs/admin_user.go @@ -30,7 +30,7 @@ type CreateUserOption struct { // Whether the user has restricted access privileges Restricted *bool `json:"restricted"` // User visibility level: public, limited, or private - Visibility string `json:"visibility" binding:"In(,public,limited,private)"` + Visibility UserVisibility `json:"visibility" binding:"In(,public,limited,private)"` // For explicitly setting the user creation timestamp. Useful when users are // migrated from other systems. When omitted, the user's creation timestamp @@ -79,5 +79,5 @@ type EditUserOption struct { // Whether the user has restricted access privileges Restricted *bool `json:"restricted"` // User visibility level: public, limited, or private - Visibility string `json:"visibility" binding:"In(,public,limited,private)"` + Visibility UserVisibility `json:"visibility" binding:"In(,public,limited,private)"` } diff --git a/modules/structs/issue.go b/modules/structs/issue.go index fd29727a43..f108cf3d0a 100644 --- a/modules/structs/issue.go +++ b/modules/structs/issue.go @@ -60,6 +60,7 @@ type Issue struct { Attachments []*Attachment `json:"assets"` Labels []*Label `json:"labels"` Milestone *Milestone `json:"milestone"` + Projects []*Project `json:"projects"` // deprecated Assignee *User `json:"assignee"` Assignees []*User `json:"assignees"` @@ -100,7 +101,9 @@ type CreateIssueOption struct { Milestone int64 `json:"milestone"` // list of label ids Labels []int64 `json:"labels"` - Closed bool `json:"closed"` + // list of project ids + Projects []int64 `json:"projects"` + Closed bool `json:"closed"` } // EditIssueOption options for editing an issue @@ -112,7 +115,9 @@ type EditIssueOption struct { Assignee *string `json:"assignee"` Assignees []string `json:"assignees"` Milestone *int64 `json:"milestone"` - State *string `json:"state"` + // list of project ids to set (replaces existing projects) + Projects *[]int64 `json:"projects"` + State *string `json:"state"` // swagger:strfmt date-time Deadline *time.Time `json:"due_date"` RemoveDeadline *bool `json:"unset_due_date"` diff --git a/modules/structs/org.go b/modules/structs/org.go index 723689cb53..b93f68b600 100644 --- a/modules/structs/org.go +++ b/modules/structs/org.go @@ -22,7 +22,7 @@ type Organization struct { // The location of the organization Location string `json:"location"` // The visibility level of the organization (public, limited, private) - Visibility string `json:"visibility"` + Visibility UserVisibility `json:"visibility"` // Whether repository administrators can change team access RepoAdminChangeTeamAccess bool `json:"repo_admin_change_team_access"` // username of the organization @@ -60,8 +60,7 @@ type CreateOrgOption struct { // The location of the organization Location string `json:"location" binding:"MaxSize(50)"` // possible values are `public` (default), `limited` or `private` - // enum: ["public","limited","private"] - Visibility string `json:"visibility" binding:"In(,public,limited,private)"` + Visibility UserVisibility `json:"visibility" binding:"In(,public,limited,private)"` // Whether repository administrators can change team access RepoAdminChangeTeamAccess bool `json:"repo_admin_change_team_access"` } @@ -79,8 +78,7 @@ type EditOrgOption struct { // The location of the organization Location *string `json:"location" binding:"MaxSize(50)"` // possible values are `public`, `limited` or `private` - // enum: ["public","limited","private"] - Visibility *string `json:"visibility" binding:"In(,public,limited,private)"` + Visibility *UserVisibility `json:"visibility" binding:"In(,public,limited,private)"` // Whether repository administrators can change team access RepoAdminChangeTeamAccess *bool `json:"repo_admin_change_team_access"` } diff --git a/modules/structs/org_team.go b/modules/structs/org_team.go index f730a5681c..20959931d3 100644 --- a/modules/structs/org_team.go +++ b/modules/structs/org_team.go @@ -15,9 +15,8 @@ type Team struct { // The organization that the team belongs to Organization *Organization `json:"organization"` // Whether the team has access to all repositories in the organization - IncludesAllRepositories bool `json:"includes_all_repositories"` - // enum: ["none","read","write","admin","owner"] - Permission string `json:"permission"` + IncludesAllRepositories bool `json:"includes_all_repositories"` + Permission AccessLevelName `json:"permission"` // example: ["repo.code","repo.issues","repo.ext_issues","repo.wiki","repo.pulls","repo.releases","repo.projects","repo.ext_wiki"] // Deprecated: This variable should be replaced by UnitsMap and will be dropped in later versions. Units []string `json:"units"` @@ -34,9 +33,8 @@ type CreateTeamOption struct { // The description of the team Description string `json:"description" binding:"MaxSize(255)"` // Whether the team has access to all repositories in the organization - IncludesAllRepositories bool `json:"includes_all_repositories"` - // enum: ["read","write","admin"] - Permission string `json:"permission"` + IncludesAllRepositories bool `json:"includes_all_repositories"` + Permission RepoWritePermission `json:"permission"` // example: ["repo.actions","repo.code","repo.issues","repo.ext_issues","repo.wiki","repo.ext_wiki","repo.pulls","repo.releases","repo.projects","repo.ext_wiki"] // Deprecated: This variable should be replaced by UnitsMap and will be dropped in later versions. Units []string `json:"units"` @@ -53,9 +51,8 @@ type EditTeamOption struct { // The description of the team Description *string `json:"description" binding:"MaxSize(255)"` // Whether the team has access to all repositories in the organization - IncludesAllRepositories *bool `json:"includes_all_repositories"` - // enum: ["read","write","admin"] - Permission string `json:"permission"` + IncludesAllRepositories *bool `json:"includes_all_repositories"` + Permission RepoWritePermission `json:"permission"` // example: ["repo.code","repo.issues","repo.ext_issues","repo.wiki","repo.pulls","repo.releases","repo.projects","repo.ext_wiki"] // Deprecated: This variable should be replaced by UnitsMap and will be dropped in later versions. Units []string `json:"units"` diff --git a/modules/structs/project.go b/modules/structs/project.go new file mode 100644 index 0000000000..5feb122767 --- /dev/null +++ b/modules/structs/project.go @@ -0,0 +1,33 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package structs + +import ( + "time" +) + +// Project represents a project +// swagger:model +type Project struct { + // ID is the unique identifier for the project + ID int64 `json:"id"` + // Title is the title of the project + Title string `json:"title"` + // Description provides details about the project + Description string `json:"description"` + // OwnerID is the owner of the project (for org-level projects) + OwnerID int64 `json:"owner_id,omitempty"` + // RepoID is the repository this project belongs to (for repo-level projects) + RepoID int64 `json:"repo_id,omitempty"` + // CreatorID is the user who created the project + CreatorID int64 `json:"creator_id"` + // IsClosed indicates if the project is closed + IsClosed bool `json:"is_closed"` + // swagger:strfmt date-time + Created time.Time `json:"created_at"` + // swagger:strfmt date-time + Updated time.Time `json:"updated_at"` + // swagger:strfmt date-time + Closed *time.Time `json:"closed_at,omitempty"` +} diff --git a/modules/structs/repo.go b/modules/structs/repo.go index 7cd64fd7a4..0c3a0ab44e 100644 --- a/modules/structs/repo.go +++ b/modules/structs/repo.go @@ -8,6 +8,15 @@ import ( "time" ) +// ObjectFormatName is the git hash algorithm used by a repository. +// swagger:enum ObjectFormatName +type ObjectFormatName string + +const ( + ObjectFormatSHA1 ObjectFormatName = "sha1" + ObjectFormatSHA256 ObjectFormatName = "sha256" +) + // Permission represents a set of permissions type Permission struct { Admin bool `json:"admin"` // Admin indicates if the user is an administrator of the repository. @@ -114,8 +123,7 @@ type Repository struct { Internal bool `json:"internal"` MirrorInterval string `json:"mirror_interval"` // ObjectFormatName of the underlying git repository - // enum: ["sha1","sha256"] - ObjectFormatName string `json:"object_format_name"` + ObjectFormatName ObjectFormatName `json:"object_format_name"` // swagger:strfmt date-time MirrorUpdated time.Time `json:"mirror_updated"` RepoTransfer *RepoTransfer `json:"repo_transfer,omitempty"` @@ -153,8 +161,7 @@ type CreateRepoOption struct { // enum: ["default","collaborator","committer","collaboratorcommitter"] TrustModel string `json:"trust_model"` // ObjectFormatName of the underlying git repository, empty string for default (sha1) - // enum: ["sha1","sha256"] - ObjectFormatName string `json:"object_format_name" binding:"MaxSize(6)"` + ObjectFormatName ObjectFormatName `json:"object_format_name" binding:"MaxSize(6)"` } // EditRepoOption options when editing a repository's properties @@ -229,6 +236,12 @@ type EditRepoOption struct { MirrorInterval *string `json:"mirror_interval,omitempty"` // enable prune - remove obsolete remote-tracking references when mirroring EnablePrune *bool `json:"enable_prune,omitempty"` + // authentication username for the remote repository (mirrors) + MirrorUsername *string `json:"mirror_username,omitempty"` + // authentication password for the remote repository (mirrors) + MirrorPassword *string `json:"mirror_password,omitempty"` + // authentication token for the remote repository (mirrors) + MirrorToken *string `json:"mirror_token,omitempty"` } // GenerateRepoOption options when creating a repository using a template diff --git a/modules/structs/repo_collaborator.go b/modules/structs/repo_collaborator.go index 6b315df403..fb36ecf485 100644 --- a/modules/structs/repo_collaborator.go +++ b/modules/structs/repo_collaborator.go @@ -3,17 +3,41 @@ package structs +// RepoWritePermission is a permission level callers may grant to a team or +// collaborator on input. Output fields use AccessLevelName instead. +// swagger:enum RepoWritePermission +type RepoWritePermission string + +const ( + RepoWritePermissionRead RepoWritePermission = "read" + RepoWritePermissionWrite RepoWritePermission = "write" + RepoWritePermissionAdmin RepoWritePermission = "admin" +) + +// AccessLevelName is the string rendering of a perm.AccessMode produced on +// API responses. Callers must not send these values; use RepoWritePermission +// on input. +// swagger:enum AccessLevelName +type AccessLevelName string + +const ( + AccessLevelNameNone AccessLevelName = "none" + AccessLevelNameRead AccessLevelName = "read" + AccessLevelNameWrite AccessLevelName = "write" + AccessLevelNameAdmin AccessLevelName = "admin" + AccessLevelNameOwner AccessLevelName = "owner" +) + // AddCollaboratorOption options when adding a user as a collaborator of a repository type AddCollaboratorOption struct { - // enum: ["read","write","admin"] // Permission level to grant the collaborator - Permission *string `json:"permission"` + Permission *RepoWritePermission `json:"permission"` } // RepoCollaboratorPermission to get repository permission for a collaborator type RepoCollaboratorPermission struct { // Permission level of the collaborator - Permission string `json:"permission"` + Permission AccessLevelName `json:"permission"` // RoleName is the name of the permission role RoleName string `json:"role_name"` // User information of the collaborator diff --git a/modules/structs/user.go b/modules/structs/user.go index 90dbcff25c..a25c0c5a1e 100644 --- a/modules/structs/user.go +++ b/modules/structs/user.go @@ -51,7 +51,7 @@ type User struct { // the user's description Description string `json:"description"` // User visibility level option: public, limited, private - Visibility string `json:"visibility"` + Visibility UserVisibility `json:"visibility"` // user counts Followers int `json:"followers_count"` diff --git a/modules/structs/visible_type.go b/modules/structs/visible_type.go index b5ff353b87..f537884963 100644 --- a/modules/structs/visible_type.go +++ b/modules/structs/visible_type.go @@ -56,3 +56,14 @@ func ExtractKeysFromMapString(in map[string]VisibleType) (keys []string) { } return keys } + +// UserVisibility defines the visibility level of a user or organization as +// rendered in API payloads. The DB representation is VisibleType (int). +// swagger:enum UserVisibility +type UserVisibility string + +const ( + UserVisibilityPublic UserVisibility = "public" + UserVisibilityLimited UserVisibility = "limited" + UserVisibilityPrivate UserVisibility = "private" +) diff --git a/modules/templates/page.go b/modules/templates/page.go index 32e52bb68e..475cb0d678 100644 --- a/modules/templates/page.go +++ b/modules/templates/page.go @@ -5,6 +5,7 @@ package templates import ( "context" + "fmt" "html/template" "io" "net/http" @@ -36,7 +37,11 @@ func (r *pageRenderer) funcMapDummy() template.FuncMap { } func (r *pageRenderer) TemplateLookup(tmpl string, templateCtx context.Context) (TemplateExecutor, error) { //nolint:revive // we don't use ctx, only pass it to the template executor - return r.tmplRenderer.Templates().Executor(tmpl, r.funcMap(templateCtx)) + tmpls := r.tmplRenderer.Templates() + if tmpls == nil { + return nil, fmt.Errorf("no templates defined for %s", tmpl) + } + return tmpls.Executor(tmpl, r.funcMap(templateCtx)) } func (r *pageRenderer) HTML(w io.Writer, status int, tplName TplName, data any, templateCtx context.Context) error { //nolint:revive // we don't use ctx, only pass it to the template executor diff --git a/modules/templates/util_slice.go b/modules/templates/util_slice.go index a3318cc11a..e74b308471 100644 --- a/modules/templates/util_slice.go +++ b/modules/templates/util_slice.go @@ -6,6 +6,11 @@ package templates import ( "fmt" "reflect" + "slices" + "strconv" + "strings" + + "code.gitea.io/gitea/modules/util" ) type SliceUtils struct{} @@ -33,3 +38,29 @@ func (su *SliceUtils) Contains(s, v any) bool { } return false } + +// JoinInt64 joins a slice of int64 values into a comma-separated string. +func (su *SliceUtils) JoinInt64(values []int64) string { + if len(values) == 0 { + return "" + } + strs := make([]string, len(values)) + for i, v := range values { + strs[i] = strconv.FormatInt(v, 10) + } + return strings.Join(strs, ",") +} + +func (su *SliceUtils) JoinToggleIDs(values []int64, target int64) (ret struct { + IsIncluded bool + ToggledIDs string +}, +) { + ret.IsIncluded = slices.Contains(values, target) + if ret.IsIncluded { + ret.ToggledIDs = su.JoinInt64(util.SliceRemoveAll(slices.Clone(values), target)) + } else { + ret.ToggledIDs = su.JoinInt64(append(values, target)) + } + return ret +} diff --git a/modules/templates/util_test.go b/modules/templates/util_test.go index a6448a6ff2..88b05485af 100644 --- a/modules/templates/util_test.go +++ b/modules/templates/util_test.go @@ -70,6 +70,16 @@ func TestUtils(t *testing.T) { actual = execTmpl("{{StringUtils.Contains .String .Value}}", map[string]any{"String": "abc", "Value": "x"}) assert.Equal(t, "false", actual) + // Test JoinInt64 + actual = execTmpl("{{SliceUtils.JoinInt64 .Values}}", map[string]any{"Values": []int64{1, 2, 3}}) + assert.Equal(t, "1,2,3", actual) + + actual = execTmpl("{{SliceUtils.JoinInt64 .Values}}", map[string]any{"Values": []int64{}}) + assert.Empty(t, actual) + + actual = execTmpl("{{SliceUtils.JoinInt64 .Values}}", map[string]any{"Values": []int64{42}}) + assert.Equal(t, "42", actual) + tmpl := template.New("test") tmpl.Funcs(template.FuncMap{"SliceUtils": NewSliceUtils, "StringUtils": NewStringUtils}) template.Must(tmpl.Parse("{{SliceUtils.Contains .Slice .Value}}")) diff --git a/modules/util/diff_slice_test.go b/modules/util/diff_slice_test.go new file mode 100644 index 0000000000..cd61fb571b --- /dev/null +++ b/modules/util/diff_slice_test.go @@ -0,0 +1,74 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package util + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDiffSliceBasic(t *testing.T) { + // Typical integer cases + t.Run("additions", func(t *testing.T) { + added, removed := DiffSlice([]int{1, 2}, []int{1, 2, 3}) + assert.Equal(t, []int{3}, added) + assert.Empty(t, removed) + }) + + t.Run("removals", func(t *testing.T) { + added, removed := DiffSlice([]int{1, 2, 3}, []int{1, 2}) + assert.Empty(t, added) + assert.Equal(t, []int{3}, removed) + }) + + t.Run("no changes", func(t *testing.T) { + added, removed := DiffSlice([]int{1, 2}, []int{1, 2}) + assert.Empty(t, added) + assert.Empty(t, removed) + }) + + t.Run("empty slices", func(t *testing.T) { + added, removed := DiffSlice([]int{}, []int{}) + assert.Empty(t, added) + assert.Empty(t, removed) + }) + + t.Run("overlapping elements", func(t *testing.T) { + added, removed := DiffSlice([]int{1, 2, 4}, []int{2, 3, 4}) + assert.Equal(t, []int{3}, added) + assert.Equal(t, []int{1}, removed) + }) +} + +func TestDiffSliceOrderAndDuplicates(t *testing.T) { + oldSlice := []int{1, 2, 2, 3} + newSlice := []int{2, 4, 2, 5} + + added, removed := DiffSlice(oldSlice, newSlice) + assert.Equal(t, []int{4, 5}, added) + assert.Equal(t, []int{1, 3}, removed) +} + +func TestDiffSliceDeduplicatesOutput(t *testing.T) { + // Test case from issue: newSlice contains [4, 4, 5] and oldSlice is [1] + // added should return [4, 5], not [4, 4, 5] + t.Run("deduplicates added", func(t *testing.T) { + added, removed := DiffSlice([]int{1}, []int{4, 4, 5}) + assert.Equal(t, []int{4, 5}, added) + assert.Equal(t, []int{1}, removed) + }) + + t.Run("deduplicates removed", func(t *testing.T) { + added, removed := DiffSlice([]int{1, 1, 2}, []int{3}) + assert.Equal(t, []int{3}, added) + assert.Equal(t, []int{1, 2}, removed) + }) + + t.Run("deduplicates both", func(t *testing.T) { + added, removed := DiffSlice([]int{1, 1, 2, 2}, []int{3, 3, 4, 4}) + assert.Equal(t, []int{3, 4}, added) + assert.Equal(t, []int{1, 2}, removed) + }) +} diff --git a/modules/util/sanitize.go b/modules/util/sanitize.go index 0dd8b342a2..88ca34788f 100644 --- a/modules/util/sanitize.go +++ b/modules/util/sanitize.go @@ -5,7 +5,8 @@ package util import ( "bytes" - "unicode" + "net" + "strings" ) type sanitizedError struct { @@ -25,48 +26,103 @@ func SanitizeErrorCredentialURLs(err error) error { return sanitizedError{err: err} } -const userPlaceholder = "sanitized-credential" - var schemeSep = []byte("://") -// SanitizeCredentialURLs remove all credentials in URLs (starting with "scheme://") for the input string: "https://user:pass@domain.com" => "https://sanitized-credential@domain.com" +const userInfoPlaceholder = "(masked)" + +// SanitizeCredentialURLs remove all credentials in URLs for the input string: +// * "https://userinfo@domain.com" => "https://***@domain.com" +// * "user:pass@domain.com" => "***@domain.com" +// "***" is a magic string internally used, doesn't guarantee to be anything. func SanitizeCredentialURLs(s string) string { + sepColPos := strings.Index(s, ":") + if sepColPos == -1 { + return s // fast path: no colon, unlikely contain any URL credential + } + sepAtPos := strings.Index(s[sepColPos+1:], "@") + for sepAtPos == -1 { + return s // fast path: no "@" after colon, unlikely contain any URL credential + } + sepAtPos += sepColPos + 1 + + res := make([]byte, 0, len(s)+len(userInfoPlaceholder)) // a best guess to avoid too many re-allocations bs := UnsafeStringToBytes(s) - schemeSepPos := bytes.Index(bs, schemeSep) - if schemeSepPos == -1 || bytes.IndexByte(bs[schemeSepPos:], '@') == -1 { - return s // fast return if there is no URL scheme or no userinfo - } - out := make([]byte, 0, len(bs)+len(userPlaceholder)) - for schemeSepPos != -1 { - schemeSepPos += 3 // skip the "://" - sepAtPos := -1 // the possible '@' position: "https://foo@[^here]host" - sepEndPos := schemeSepPos // the possible end position: "The https://host[^here] in log for test" - sepLoop: - for ; sepEndPos < len(bs); sepEndPos++ { - c := bs[sepEndPos] - if ('A' <= c && c <= 'Z') || ('a' <= c && c <= 'z') || ('0' <= c && c <= '9') { - continue - } + for { + // left part (before "@") is likely to be the "userinfo" (single username, or "username:password") + leftPos := sepAtPos - 1 + leftLoop: + for leftPos >= 0 { + c := bs[leftPos] switch c { - case '@': - sepAtPos = sepEndPos case '-', '.', '_', '~', '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '%': - continue // due to RFC 3986, userinfo can contain - . _ ~ ! $ & ' ( ) * + , ; = : and any percent-encoded chars + // RFC 3986, userinfo can contain - . _ ~ ! $ & ' ( ) * + , ; = : and any percent-encoded chars default: - break sepLoop // if it is an invalid char for URL (eg: space, '/', and others), stop the loop + valid := 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9' + if !valid { + break leftLoop + } } + leftPos-- } - // if there is '@', and the string is like "s://u@h", then hide the "u" part - if sepAtPos != -1 && (schemeSepPos >= 4 && unicode.IsLetter(rune(bs[schemeSepPos-4]))) && sepAtPos-schemeSepPos > 0 && sepEndPos-sepAtPos > 0 { - out = append(out, bs[:schemeSepPos]...) - out = append(out, userPlaceholder...) - out = append(out, bs[sepAtPos:sepEndPos]...) + // left pos should point to the beginning of the left part, this pos is always valid in the buffer + leftPos++ + + // right part is likely to be the host (domain name, ip address) + rightPos := sepAtPos + 1 + rightLoop: + for rightPos < len(bs) { + c := bs[rightPos] + switch c { + case '.', '-': + // valid host char + case '[': + // ipv6 begin + if rightPos != sepAtPos+1 { + break rightLoop + } + case ']': + // ipv6 end + rightPos++ + break rightLoop + default: + valid := 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9' + if bs[sepAtPos+1] == '[' { + // ipv6 host + valid = 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' || '0' <= c && c <= '9' || c == ':' + } + if !valid { + break rightLoop + } + } + rightPos++ + } + + leading, leftPart, rightPart := bs[:leftPos], bs[leftPos:sepAtPos], bs[sepAtPos+1:rightPos] + + // Either: + // * git log message: "user:pass@host" (it contains a colon in userinfo), ignore "git@host" pattern + // * http like URL: "https://userinfo@host.com" (it has "://" before the userinfo) + needSanitize := bytes.IndexByte(leftPart, ':') >= 0 || bytes.HasSuffix(leading, schemeSep) + needSanitize = needSanitize && len(leftPart) > 0 && len(rightPart) > 0 + // TODO: can also do more checks for right part + // for example: ipv6 quick check + if needSanitize && rightPart[0] == '[' { + needSanitize = rightPart[len(rightPart)-1] == ']' && net.ParseIP(UnsafeBytesToString(rightPart[1:len(rightPart)-1])) != nil + } + if needSanitize { + res = append(res, leading...) + res = append(res, userInfoPlaceholder...) + res = append(res, '@') + res = append(res, rightPart...) } else { - out = append(out, bs[:sepEndPos]...) + res = append(res, bs[:rightPos]...) + } + bs = bs[rightPos:] + sepAtPos = bytes.IndexByte(bs, '@') + if sepAtPos == -1 { + break } - bs = bs[sepEndPos:] - schemeSepPos = bytes.Index(bs, schemeSep) } - out = append(out, bs...) - return UnsafeBytesToString(out) + res = append(res, bs...) + return UnsafeBytesToString(res) } diff --git a/modules/util/sanitize_test.go b/modules/util/sanitize_test.go index 0bcfd45ca4..c1a80016f8 100644 --- a/modules/util/sanitize_test.go +++ b/modules/util/sanitize_test.go @@ -13,7 +13,7 @@ import ( func TestSanitizeErrorCredentialURLs(t *testing.T) { err := errors.New("error with https://a@b.com") se := SanitizeErrorCredentialURLs(err) - assert.Equal(t, "error with https://"+userPlaceholder+"@b.com", se.Error()) + assert.Equal(t, "error with https://"+userInfoPlaceholder+"@b.com", se.Error()) } func TestSanitizeCredentialURLs(t *testing.T) { @@ -27,15 +27,35 @@ func TestSanitizeCredentialURLs(t *testing.T) { }, { "https://mytoken@github.com/go-gitea/test_repo.git", - "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", + "https://" + userInfoPlaceholder + "@github.com/go-gitea/test_repo.git", }, { "https://user:password@github.com/go-gitea/test_repo.git", - "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", + "https://" + userInfoPlaceholder + "@github.com/go-gitea/test_repo.git", + }, + { + "https://user:password@[::]/go-gitea/test_repo.git", + "https://" + userInfoPlaceholder + "@[::]/go-gitea/test_repo.git", + }, + { + "https://user:password@[2001:db8::1]:8080/go-gitea/test_repo.git", + "https://" + userInfoPlaceholder + "@[2001:db8::1]:8080/go-gitea/test_repo.git", + }, + { + "see https://u:p@[::1]/x and https://u2:p2@h2", + "see https://" + userInfoPlaceholder + "@[::1]/x and https://" + userInfoPlaceholder + "@h2", + }, + { + "https://user:secret@[unclosed-ipv6", + "https://user:secret@[unclosed-ipv6", + }, + { + "https://user:secret@[invalid-ipv6]", + "https://user:secret@[invalid-ipv6]", }, { "ftp://x@", - "ftp://" + userPlaceholder + "@", + "ftp://x@", }, { "ftp://x/@", @@ -43,27 +63,40 @@ func TestSanitizeCredentialURLs(t *testing.T) { }, { "ftp://u@x/@", // test multiple @ chars - "ftp://" + userPlaceholder + "@x/@", + "ftp://" + userInfoPlaceholder + "@x/@", }, { "😊ftp://u@x😊", // test unicode - "😊ftp://" + userPlaceholder + "@x😊", + "😊ftp://" + userInfoPlaceholder + "@x😊", }, { "://@", "://@", }, { - "//u:p@h", // do not process URLs without explicit scheme, they are not treated as "valid" URLs because there is no scheme context in string "//u:p@h", + "//" + userInfoPlaceholder + "@h", }, { - "s://u@h", // the minimal pattern to be sanitized - "s://" + userPlaceholder + "@h", + "s://u@h", + "s://" + userInfoPlaceholder + "@h", }, { "URLs in log https://u:b@h and https://u:b@h:80/, with https://h.com and u@h.com", - "URLs in log https://" + userPlaceholder + "@h and https://" + userPlaceholder + "@h:80/, with https://h.com and u@h.com", + "URLs in log https://" + userInfoPlaceholder + "@h and https://" + userInfoPlaceholder + "@h:80/, with https://h.com and u@h.com", + }, + { + "fatal: unable to look up username:token@github.com (port 9418)", + "fatal: unable to look up " + userInfoPlaceholder + "@github.com (port 9418)", + }, + { + "git failed for user:token@github.com/go-gitea/test_repo.git", + "git failed for " + userInfoPlaceholder + "@github.com/go-gitea/test_repo.git", + }, + { + // SSH-form git URL ("git@host:path") must not let a later credential URL through + "failed remote git@github.com:foo, retried via https://user:tok@github.com/foo", + "failed remote git@github.com:foo, retried via https://" + userInfoPlaceholder + "@github.com/foo", }, } diff --git a/modules/util/util.go b/modules/util/util.go index 04d0fb584d..17733736be 100644 --- a/modules/util/util.go +++ b/modules/util/util.go @@ -15,6 +15,8 @@ import ( "strings" "sync" + "code.gitea.io/gitea/modules/container" + "golang.org/x/text/cases" "golang.org/x/text/language" ) @@ -291,3 +293,21 @@ func NormalizeStringEOL(input string) string { // Other than this, we should respect the original content, even leading or trailing spaces. return UnsafeBytesToString(NormalizeEOL(UnsafeStringToBytes(input))) } + +func DiffSlice[T comparable](oldSlice, newSlice []T) (added, removed []T) { + oldSet := container.SetOf(oldSlice...) + newSet := container.SetOf(newSlice...) + + addedSet, removedSet := container.Set[T]{}, container.Set[T]{} + for _, v := range newSlice { + if !oldSet.Contains(v) && addedSet.Add(v) { + added = append(added, v) + } + } + for _, v := range oldSlice { + if !newSet.Contains(v) && removedSet.Add(v) { + removed = append(removed, v) + } + } + return added, removed +} diff --git a/options/locale/locale_en-US.json b/options/locale/locale_en-US.json index 6281ff8f54..80692acdaf 100644 --- a/options/locale/locale_en-US.json +++ b/options/locale/locale_en-US.json @@ -1385,6 +1385,7 @@ "repo.projects.column.delete": "Delete Column", "repo.projects.column.deletion_desc": "Deleting a project column moves all related issues to the default column. Continue?", "repo.projects.column.color": "Color", + "repo.projects.column": "Column", "repo.projects.open": "Open", "repo.projects.close": "Close", "repo.projects.column.assigned_to": "Assigned to", @@ -1784,6 +1785,7 @@ "repo.pulls.review_only_possible_for_full_diff": "Review is only possible when viewing the full diff", "repo.pulls.filter_changes_by_commit": "Filter by commit", "repo.pulls.nothing_to_compare": "These branches are equal. There is no need to create a pull request.", + "repo.pulls.no_common_history": "These branches do not share a common merge base. Select a different base or compare branch.", "repo.pulls.nothing_to_compare_have_tag": "The selected branches/tags are equal.", "repo.pulls.nothing_to_compare_and_allow_empty_pr": "These branches are equal. This PR will be empty.", "repo.pulls.has_pull_request": "A pull request between these branches already exists: %[2]s#%[3]d", @@ -3313,7 +3315,6 @@ "admin.config.cache_config": "Cache Configuration", "admin.config.cache_adapter": "Cache Adapter", "admin.config.cache_interval": "Cache Interval", - "admin.config.cache_conn": "Cache Connection", "admin.config.cache_item_ttl": "Cache Item TTL", "admin.config.cache_test": "Test Cache", "admin.config.cache_test_failed": "Failed to probe the cache: %v.", @@ -3328,7 +3329,6 @@ "admin.config.instance_web_banner.message_placeholder": "Banner message (supports markdown)", "admin.config.session_config": "Session Configuration", "admin.config.session_provider": "Session Provider", - "admin.config.provider_config": "Provider Config", "admin.config.cookie_name": "Cookie Name", "admin.config.gc_interval_time": "GC Interval Time", "admin.config.session_life_time": "Session Life Time", diff --git a/options/locale/locale_fr-FR.json b/options/locale/locale_fr-FR.json index ff73e9a46d..870e1be56f 100644 --- a/options/locale/locale_fr-FR.json +++ b/options/locale/locale_fr-FR.json @@ -122,6 +122,7 @@ "unpin": "Désépingler", "artifacts": "Artefacts", "expired": "Expiré", + "artifact_expires_at": "Expire le %s", "confirm_delete_artifact": "Êtes-vous sûr de vouloir supprimer l’artefact « %s » ?", "archived": "Archivé", "concept_system_global": "Global", @@ -173,6 +174,8 @@ "search.org_kind": "Chercher des organisations…", "search.team_kind": "Chercher des équipes…", "search.code_kind": "Chercher du code…", + "search.code_empty": "Lancer une recherche de code.", + "search.code_empty_description": "Entrer un mot clé pour rechercher dans le code.", "search.code_search_unavailable": "La recherche dans le code n’est pas disponible actuellement. Veuillez contacter l’administrateur de votre instance Gitea.", "search.code_search_by_git_grep": "Les résultats de recherche de code actuels sont fournis par « git grep ». L’administrateur peut activer l’indexeur de dépôt, qui pourrait fournir de meilleurs résultats.", "search.package_kind": "Chercher des paquets…", @@ -221,6 +224,7 @@ "error.occurred": "Une erreur s’est produite", "error.report_message": "Si vous pensez qu’il s’agit d’un bug Gitea, veuillez consulter notre board GitHub ou ouvrir un nouveau ticket si nécessaire.", "error.not_found": "La cible n'a pu être trouvée.", + "error.permission_denied": "Autorisation refusée.", "error.network_error": "Erreur réseau", "startpage.app_desc": "Un service Git auto-hébergé sans prise de tête", "startpage.install": "Facile à installer", @@ -267,7 +271,7 @@ "install.lfs_path": "Répertoire racine Git LFS", "install.lfs_path_helper": "Les fichiers suivis par Git LFS seront stockés dans ce dossier. Laissez vide pour désactiver LFS.", "install.run_user": "Exécuter avec le compte d'un autre utilisateur", - "install.run_user_helper": "Le nom d'utilisateur du système d'exploitation sous lequel Gitea fonctionne. Notez que cet utilisateur doit avoir accès au dossier racine du dépôt.", + "install.run_user_helper": "L’utilisateur système exécutant Gitea, devant avoir la permission d’écriture dans le répertoire data. Ce nom est automatiquement détecté et ne peux être modifié. Pour changer d’utilisateur, redémarrez Gitea avec son compte respectif.", "install.domain": "Domaine du serveur", "install.domain_helper": "Domaine ou adresse d'hôte pour le serveur.", "install.ssh_port": "Port du serveur SSH", @@ -314,7 +318,6 @@ "install.invalid_db_table": "La table \"%s\" de la base de données est invalide : %v", "install.invalid_repo_path": "Le chemin racine du dépôt est invalide : %v", "install.invalid_app_data_path": "Le chemin des données de l'application est invalide : %v", - "install.run_user_not_match": "Le nom d'utilisateur sous lequel Gitea est configuré n'est pas le nom d'utilisateur actuel: %s -> %s", "install.internal_token_failed": "Impossible de générer le jeton interne : %v", "install.secret_key_failed": "Impossible de générer la clé secrète : %v", "install.save_config_failed": "L'enregistrement de la configuration %v a échoué", @@ -636,14 +639,8 @@ "user.block.unblock.failure": "Impossible de débloquer l’utilisateur : %s", "user.block.blocked": "Vous avez bloqué cet utilisateur.", "user.block.title": "Bloquer un utilisateur", - "user.block.info": "Bloquer un utilisateur l’empêche d’interagir avec des dépôts, comme ouvrir ou commenter des demandes de fusion ou des tickets. Apprenez-en plus sur le blocage d’un utilisateur.", - "user.block.info_1": "Bloquer un utilisateur empêche les actions suivantes sur votre compte et vos dépôts :", - "user.block.info_2": "suivre votre compte", - "user.block.info_3": "vous envoyer des notifications en vous @mentionnant", - "user.block.info_4": "vous inviter en tant que collaborateur de son(ses) dépôt(s)", - "user.block.info_5": "aimer, bifurquer ou suivre vos dépôts", - "user.block.info_6": "ouvrir ou commenter vos tickets et demandes d’ajouts", - "user.block.info_7": "réagir à vos commentaires dans les tickets ou les demandes d’ajout", + "user.block.info": "Bloquer un utilisateur l’empêche d’interagir avec des dépôts, comme ouvrir ou commenter des demandes d’ajouts ou des tickets.", + "user.block.info.docs": "En savoir plus sur le blocage d’un utilisateur.", "user.block.user_to_block": "Utilisateur à bloquer", "user.block.note": "Note", "user.block.note.title": "Note facultative :", @@ -1043,6 +1040,7 @@ "repo.forks": "Bifurcations", "repo.stars": "Favoris", "repo.reactions_more": "et %d de plus", + "repo.reactions": "Réactions", "repo.unit_disabled": "L'administrateur du site a désactivé cette section du dépôt.", "repo.language_other": "Autre", "repo.adopt_search": "Entrez un nom d’utilisateur pour rechercher les dépôts dépossédés… (laissez vide pour tous trouver)", @@ -1063,8 +1061,8 @@ "repo.transfer.accept_desc": "Transférer à « %s »", "repo.transfer.reject": "Refuser le transfert", "repo.transfer.reject_desc": "Annuler le transfert à « %s »", - "repo.transfer.no_permission_to_accept": "Vous n’êtes pas autorisé à accepter ce transfert.", - "repo.transfer.no_permission_to_reject": "Vous n’êtes pas autorisé à rejeter ce transfert.", + "repo.transfer.is_transferring": "Transfert en cours…", + "repo.transfer.is_transferring_prompt": "Le dépôt est en cours de transfert vers %s", "repo.desc.private": "Privé", "repo.desc.public": "Publique", "repo.desc.public_access": "Accès public", @@ -1215,7 +1213,7 @@ "repo.ambiguous_runes_description": "Ce fichier contient des caractères Unicode qui peuvent être confondus avec d'autres caractères. Si vous pensez que c'est intentionnel, vous pouvez ignorer cet avertissement. Utilisez le bouton Échappe pour les dévoiler.", "repo.invisible_runes_line": "Cette ligne contient des caractères Unicode invisibles.", "repo.ambiguous_runes_line": "Cette ligne contient des caractères Unicode ambigus.", - "repo.ambiguous_character": "%[1]c [U+%04[1]X] peut être confondu avec %[2]c [U+%04[2]X].", + "repo.ambiguous_character": "%[1]s peut être confondu avec %[2]s.", "repo.escape_control_characters": "Échapper", "repo.unescape_control_characters": "Annuler l'échappement", "repo.file_copy_permalink": "Copier le lien permanent", @@ -1356,10 +1354,13 @@ "repo.projects.desc": "Gérer les tickets et les demandes d’ajouts dans les projets.", "repo.projects.description": "Description (facultative)", "repo.projects.description_placeholder": "Description", + "repo.projects.empty": "Aucun projet pour le moment.", + "repo.projects.empty_description": "Créer un projet pour coordonner les tickets et les demandes d’ajout.", "repo.projects.create": "Créer un projet", "repo.projects.title": "Titre", "repo.projects.new": "Nouveau projet", "repo.projects.new_subheader": "Coordonnez, surveillez, et mettez à jour votre travail en un seul endroit, afin que les projets restent transparents et dans les délais.", + "repo.projects.no_results": "Aucun projet ne correspond à votre recherche.", "repo.projects.create_success": "Le projet \"%s\" a été créé.", "repo.projects.deletion": "Supprimer le projet", "repo.projects.deletion_desc": "Supprimer un projet efface également de tous les tickets liés. Voulez vous continuer?", @@ -1384,6 +1385,7 @@ "repo.projects.column.delete": "Supprimer la colonne", "repo.projects.column.deletion_desc": "La suppression d’une colonne déplace tous ses tickets dans la colonne par défaut. Continuer ?", "repo.projects.column.color": "Couleur", + "repo.projects.column": "Colonne", "repo.projects.open": "Ouvrir", "repo.projects.close": "Fermer", "repo.projects.column.assigned_to": "Assigné à", @@ -1401,11 +1403,12 @@ "repo.issues.new": "Nouveau ticket", "repo.issues.new.title_empty": "Le titre ne peut pas être vide", "repo.issues.new.labels": "Labels", - "repo.issues.new.no_label": "Sans labels", + "repo.issues.new.no_labels": "Pas d’étiquette", "repo.issues.new.clear_labels": "Effacer les labels", "repo.issues.new.projects": "Projets", "repo.issues.new.clear_projects": "Effacer les projets", "repo.issues.new.no_projects": "Sans projet", + "repo.issues.new.no_column": "Pas de colonne", "repo.issues.new.open_projects": "Projets ouverts", "repo.issues.new.closed_projects": "Projets clôturés", "repo.issues.new.no_items": "Pas d'élément", @@ -1531,6 +1534,7 @@ "repo.issues.context.edit": "Éditer", "repo.issues.context.delete": "Supprimer", "repo.issues.no_content": "Sans contenu.", + "repo.issues.comment_no_content": "Aucun commentaire fourni.", "repo.issues.close": "Fermer le ticket", "repo.issues.comment_pull_merged_at": "a fusionné la révision %[1]s dans %[2]s %[3]s", "repo.issues.comment_manually_pull_merged_at": "a fusionné manuellement la révision %[1]s dans %[2]s %[3]s", @@ -1781,6 +1785,7 @@ "repo.pulls.review_only_possible_for_full_diff": "Une évaluation n'est possible que lorsque vous affichez le différentiel complet.", "repo.pulls.filter_changes_by_commit": "Filtrer par révision", "repo.pulls.nothing_to_compare": "Ces branches sont identiques. Il n’y a pas besoin de créer une demande d'ajout.", + "repo.pulls.no_common_history": "Ces branches ne partagent pas de base de fusion commune. Sélectionnez une autre base ou branche de comparaison.", "repo.pulls.nothing_to_compare_have_tag": "Les branches/étiquettes sélectionnées sont équivalentes.", "repo.pulls.nothing_to_compare_and_allow_empty_pr": "Ces branches sont égales. Cette demande d'ajout sera vide.", "repo.pulls.has_pull_request": "'Il existe déjà une demande d'ajout entre ces deux branches : %[2]s#%[3]d'", @@ -1847,6 +1852,7 @@ "repo.pulls.merge_manually": "Fusionner manuellement", "repo.pulls.merge_commit_id": "L'ID de la révision de fusion", "repo.pulls.require_signed_wont_sign": "La branche nécessite des révisions signées mais cette fusion ne sera pas signée", + "repo.pulls.require_signed_head_commits_unverified": "La branche nécessite des révisions signées, mais une ou plusieurs révisions de cette demande de fusion ne sont pas vérifiées", "repo.pulls.invalid_merge_option": "Vous ne pouvez pas utiliser cette option de fusion pour cette demande.", "repo.pulls.merge_conflict": "Échec de la fusion : il y a eu un conflit lors de la fusion. Conseil : Essayez une stratégie différente.", "repo.pulls.merge_conflict_summary": "Message d'erreur", @@ -2720,6 +2726,8 @@ "org.members": "Membres", "org.teams": "Équipes", "org.code": "Code", + "org.repos.empty": "Aucun dépôt pour le moment.", + "org.repos.empty_description": "Créez un dépôt pour partager du code avec cette organisation.", "org.lower_members": "Membres", "org.lower_repositories": "dépôts", "org.create_new_team": "Nouvelle équipe", @@ -2818,7 +2826,7 @@ "org.teams.manage_team_member_prompt": "Les membres sont gérés par des équipes. Ajoutez des utilisateurs à une équipe pour les inviter dans cette organisation.", "org.teams.update_settings": "Appliquer les paramètres", "org.teams.delete_team": "Supprimer l'équipe", - "org.teams.add_team_member": "Ajouter un Membre", + "org.teams.add_team_member": "Ajouter un membre", "org.teams.invite_team_member": "Inviter à %s", "org.teams.invite_team_member.list": "Invitations en attente", "org.teams.delete_team_title": "Supprimer l'équipe", @@ -2856,6 +2864,8 @@ "org.worktime.date_range_end": "Date de fin", "org.worktime.query": "Demande", "org.worktime.time": "Durée", + "org.worktime.empty": "Aucun temps de travail pour le moment.", + "org.worktime.empty_description": "Ajuster la période pour consulter le temps imputé.", "org.worktime.by_repositories": "Par dépôts", "org.worktime.by_milestones": "Par jalons", "org.worktime.by_members": "Par membres", @@ -3170,6 +3180,8 @@ "admin.auths.oauth2_required_claim_name_helper": "Définissez ce nom pour restreindre la connexion depuis cette source aux utilisateurs ayant une réclamation avec ce nom", "admin.auths.oauth2_required_claim_value": "Valeur de réclamation requise", "admin.auths.oauth2_required_claim_value_helper": "Restreindre la connexion depuis cette source aux utilisateurs ayant réclamé cette valeur.", + "admin.auths.open_id_connect_external_id_claim": "Nom déclaré de l’ID externe (facultatif)", + "admin.auths.open_id_connect_external_id_claim_helper": "Nom de la demande à utiliser en tant qu’identité externe de l’utilisateur, par défaut « sub ». Pour Azure AD / Entra ID, utilisez « oid » pour migrer depuis un fournisseur Azure AD V2. Remarque : la demande « oid » nécessite « profile » dans le champ Permissions ci-dessus.", "admin.auths.oauth2_group_claim_name": "Réclamer le nom fournissant les noms de groupe pour cette source. (facultatif)", "admin.auths.oauth2_full_name_claim_name": "Nom complet réclamé. (Optionnel. Si défini, le nom complet de l’utilisateur sera toujours synchronisé avec cette réclamation)", "admin.auths.oauth2_ssh_public_key_claim_name": "Nom réclamé de la clé publique SSH", @@ -3222,10 +3234,8 @@ "admin.config.server_config": "Configuration du serveur", "admin.config.app_name": "Titre du site", "admin.config.app_ver": "Version de Gitea", - "admin.config.app_url": "URL de base de Gitea", "admin.config.custom_conf": "Chemin du fichier de configuration", "admin.config.custom_file_root_path": "Emplacement personnalisé du fichier racine", - "admin.config.domain": "Domaine du serveur", "admin.config.disable_router_log": "Désactiver la Journalisation du Routeur", "admin.config.run_user": "Exécuter avec l'utilisateur", "admin.config.run_mode": "Mode d'Éxécution", @@ -3305,7 +3315,6 @@ "admin.config.cache_config": "Configuration du cache", "admin.config.cache_adapter": "Adaptateur du Cache", "admin.config.cache_interval": "Intervales du Cache", - "admin.config.cache_conn": "Liaison du Cache", "admin.config.cache_item_ttl": "Durée de vie des éléments dans le cache", "admin.config.cache_test": "Test du cache", "admin.config.cache_test_failed": "Impossible d’interroger le cache : %v.", @@ -3320,7 +3329,6 @@ "admin.config.instance_web_banner.message_placeholder": "Message de bannière (supporte markdown)", "admin.config.session_config": "Configuration de session", "admin.config.session_provider": "Fournisseur de session", - "admin.config.provider_config": "Configuration du fournisseur", "admin.config.cookie_name": "Nom du cookie", "admin.config.gc_interval_time": "Intervals GC", "admin.config.session_life_time": "Durée des sessions", @@ -3506,6 +3514,7 @@ "packages.dependencies": "Dépendances", "packages.keywords": "Mots-clés", "packages.details": "Détails", + "packages.name": "Nom du paquet", "packages.details.author": "Auteur", "packages.details.project_site": "Site du projet", "packages.details.repository_site": "Site du dépôt", @@ -3601,6 +3610,18 @@ "packages.swift.registry": "Configurez ce registre à partir d'un terminal :", "packages.swift.install": "Ajoutez le paquet dans votre fichier Package.swift:", "packages.swift.install2": "et exécutez la commande suivante :", + "packages.terraform.install": "Définissez votre état pour utiliser le backend HTTP", + "packages.terraform.install2": "et exécutez la commande suivante :", + "packages.terraform.lock_status": "Verrouiller le statut", + "packages.terraform.locked_by": "Verrouillé par %s.", + "packages.terraform.unlocked": "Déverrouillé", + "packages.terraform.lock": "Verrouiller", + "packages.terraform.unlock": "Déverrouiller", + "packages.terraform.lock.success": "L’état Terraform a été verrouillé avec succès.", + "packages.terraform.unlock.success": "L’état Terraform a été déverrouillé avec succès.", + "packages.terraform.lock.error.already_locked": "L’état Terraform est déjà verrouillé.", + "packages.terraform.delete.locked": "L’état Terraform est verrouillé et ne peut pas être supprimé.", + "packages.terraform.delete.latest": "La dernière version d’un état Terraform ne peut pas être supprimée.", "packages.vagrant.install": "Pour ajouter une machine Vagrant, exécutez la commande suivante :", "packages.settings.link": "Lier ce paquet à un dépôt", "packages.settings.link.description": "Si vous associez un paquet à un dépôt, le paquet sera inclus dans sa liste des paquets. Seul les dépôts d’un même propriétaire peuvent être associés. Laisser ce champ vide supprimera le lien.", @@ -3614,8 +3635,13 @@ "packages.settings.delete": "Supprimer le paquet", "packages.settings.delete.description": "Supprimer un paquet est permanent et irréversible.", "packages.settings.delete.notice": "Vous êtes sur le point de supprimer %s (%s). Cette opération est irréversible, êtes-vous sûr ?", + "packages.settings.delete.notice.package": "Vous êtes sur le point de supprimer %s dans toutes ses versions. C’est une opération irréversible, êtes-vous sûr ?", "packages.settings.delete.success": "Le paquet a été supprimé.", + "packages.settings.delete.version.success": "La version du paquet a été supprimée.", "packages.settings.delete.error": "Impossible de supprimer le paquet.", + "packages.settings.delete.version": "Supprimer la version", + "packages.settings.delete.confirm": "Saisissez le nom du paquet pour confirmer", + "packages.settings.delete.invalid_package_name": "Le nom de paquet saisi est incorrect.", "packages.owner.settings.cargo.title": "Index du Registre Cargo", "packages.owner.settings.cargo.initialize": "Initialiser l'index", "packages.owner.settings.cargo.initialize.description": "Un dépôt Git d’index spécial est nécessaire pour utiliser le registre Cargo. Utiliser cette option va (re)créer le dépôt et le configurer automatiquement.", @@ -3746,9 +3772,11 @@ "actions.runs.delete.description": "Êtes-vous sûr de vouloir supprimer définitivement cette exécution ? Cette action ne peut pas être annulée.", "actions.runs.not_done": "Cette exécution du flux de travail n’est pas terminée.", "actions.runs.view_workflow_file": "Voir le fichier du flux de travail", - "actions.runs.workflow_graph": "Graphique du flux", "actions.runs.summary": "Résumé", "actions.runs.all_jobs": "Toutes les tâches", + "actions.runs.attempt": "Tentative", + "actions.runs.latest": "Dernière", + "actions.runs.latest_attempt": "Dernière tentative", "actions.runs.triggered_via": "Déclenché via %s", "actions.runs.total_duration": "Durée totale :", "actions.workflow.disable": "Désactiver le flux de travail", diff --git a/options/locale/locale_ga-IE.json b/options/locale/locale_ga-IE.json index cee97810b6..3584e0b597 100644 --- a/options/locale/locale_ga-IE.json +++ b/options/locale/locale_ga-IE.json @@ -122,6 +122,7 @@ "unpin": "Díphoráil", "artifacts": "Déantáin", "expired": "Imithe in éag", + "artifact_expires_at": "Éagaíonn ag %s", "confirm_delete_artifact": "An bhfuil tú cinnte gur mian leat an déantán '%s' a scriosadh?", "archived": "Cartlann", "concept_system_global": "Domhanda", @@ -223,6 +224,7 @@ "error.occurred": "Tharla earráid", "error.report_message": "Má chreideann tú gur fabht Gitea é seo, déan cuardach le haghaidh ceisteanna ar GitHub nó oscail eagrán nua más gá.", "error.not_found": "Ní raibh an sprioc in ann a fháil.", + "error.permission_denied": "Cead diúltaithe.", "error.network_error": "Earráid líonra", "startpage.app_desc": "Seirbhís Git gan phian, féin-óstáil", "startpage.install": "Éasca a shuiteáil", @@ -637,14 +639,8 @@ "user.block.unblock.failure": "Theip ar an úsáideoir a díbhlocáil: %s", "user.block.blocked": "Chuir tú bac ar an úsáideoir seo.", "user.block.title": "Cuir bac ar úsáideoir", - "user.block.info": "Cuireann blocáil úsáideora cosc orthu idirghníomhú le stórais, mar shampla iarratais tarraingthe nó saincheisteanna a oscailt nó trácht a dhéanamh orthu. Níos mó a fhoghlaim faoi bhac úsáideora.", - "user.block.info_1": "Cuireann blocáil úsáideora cosc ar na gníomhartha seo a leanas ar do chuntas agus ar do stór:", - "user.block.info_2": "ag leanúint do chuntas", - "user.block.info_3": "seol fógraí chugat ag @mentioning d'ainm úsáideora", - "user.block.info_4": "ag tabhairt cuireadh duit mar chomhoibritheoir chuig a stórtha", - "user.block.info_5": "ag réaladh, ag forcáil nó ag féachaint ar stórais", - "user.block.info_6": "ceisteanna nó iarrataí tarraingthe a oscailt agus trácht", - "user.block.info_7": "ag freagairt do do thuairimí i saincheisteanna nó i n-iarratais tarraingthe", + "user.block.info": "Má chuireann bac ar úsáideoir, cuirtear cosc ​​​​orthu idirghníomhú le stórtha, amhail iarratais tarraingthe nó saincheisteanna a oscailt nó trácht a dhéanamh orthu.", + "user.block.info.docs": "Foghlaim tuilleadh faoi úsáideoir a bhlocáil.", "user.block.user_to_block": "Úsáideoir chun blocáil", "user.block.note": "Nóta", "user.block.note.title": "Nóta roghnach:", @@ -1065,8 +1061,8 @@ "repo.transfer.accept_desc": "Aistriú chuig “%s”", "repo.transfer.reject": "Diúltaigh aistriú", "repo.transfer.reject_desc": "Cealaigh aistriú chuig \"%s\"", - "repo.transfer.no_permission_to_accept": "Níl cead agat glacadh leis an aistriú seo.", - "repo.transfer.no_permission_to_reject": "Níl cead agat an aistriú seo a dhiúltú.", + "repo.transfer.is_transferring": "Ag aistriú…", + "repo.transfer.is_transferring_prompt": "Tá an stórlann á aistriú go %s", "repo.desc.private": "Príobháideach", "repo.desc.public": "Poiblí", "repo.desc.public_access": "Rochtain Phoiblí", @@ -1389,6 +1385,7 @@ "repo.projects.column.delete": "Scrios Colún", "repo.projects.column.deletion_desc": "Ag scriosadh colún tionscadail aistríonn gach saincheist ghaolmhar chuig an gcolún. Lean ar aghaidh?", "repo.projects.column.color": "Dath", + "repo.projects.column": "Colún", "repo.projects.open": "Oscailte", "repo.projects.close": "Dún", "repo.projects.column.assigned_to": "Sannta do", @@ -1406,11 +1403,12 @@ "repo.issues.new": "Eagrán Nua", "repo.issues.new.title_empty": "Ní féidir leis an teideal a bheith folamh", "repo.issues.new.labels": "Lipéid", - "repo.issues.new.no_label": "Gan Lipéad", + "repo.issues.new.no_labels": "Gan lipéid", "repo.issues.new.clear_labels": "Lipéid shoiléir", "repo.issues.new.projects": "Tionscadail", "repo.issues.new.clear_projects": "Tionscadail soiléire", - "repo.issues.new.no_projects": "Gan aon tionscadal", + "repo.issues.new.no_projects": "Gan aon tionscadail", + "repo.issues.new.no_column": "Gan aon cholún", "repo.issues.new.open_projects": "Tionscadail Oscailte", "repo.issues.new.closed_projects": "Tionscadail Dúnta", "repo.issues.new.no_items": "Gan aon earraí", @@ -1787,6 +1785,7 @@ "repo.pulls.review_only_possible_for_full_diff": "Ní féidir athbhreithniú a dhéanamh ach amháin nuair a bhreathnaítear ar an difríocht iomlán", "repo.pulls.filter_changes_by_commit": "Scagaigh de réir tiomantas", "repo.pulls.nothing_to_compare": "Tá na brainsí seo cothrom. Ní gá iarratas tarraingthe a chruthú.", + "repo.pulls.no_common_history": "Níl bonn cumaisc coitianta ag na brainsí seo. Roghnaigh bonn difriúil nó cuir brainse i gcomparáid.", "repo.pulls.nothing_to_compare_have_tag": "Tá na brainsí/clibeanna roghnaithe comhionann.", "repo.pulls.nothing_to_compare_and_allow_empty_pr": "Tá na brainsí seo cothrom. Beidh an PR seo folamh.", "repo.pulls.has_pull_request": "Tá iarratas tarraingthe idir na brainsí seo ann cheana: %[2]s#%[3]d", @@ -1853,6 +1852,7 @@ "repo.pulls.merge_manually": "Cumaisc de láimh", "repo.pulls.merge_commit_id": "ID an tiomantis cumaisc", "repo.pulls.require_signed_wont_sign": "Éilíonn an bhrainse tiomáintí shínithe, ach ní shínífear an cumasc seo", + "repo.pulls.require_signed_head_commits_unverified": "Teastaíonn gealltanais sínithe ón mbrainse ach ní dheimhnítear gealltanas amháin nó níos mó ar an iarratas tarraingte seo", "repo.pulls.invalid_merge_option": "Ní féidir leat an rogha cumaisc seo a úsáid don iarratas tarraingthe seo.", "repo.pulls.merge_conflict": "Theip ar an gCumasc: Bhí coimhlint ann agus an cumasc á dhéanamh. Leid: Bain triail as straitéis dhifriúil.", "repo.pulls.merge_conflict_summary": "Teachtaireacht Earráide", @@ -3315,7 +3315,6 @@ "admin.config.cache_config": "Cumraíocht taisce", "admin.config.cache_adapter": "Cuibheoir taisce", "admin.config.cache_interval": "Eatramh Taisce", - "admin.config.cache_conn": "Ceangal Taisce", "admin.config.cache_item_ttl": "Mír Taisce TTL", "admin.config.cache_test": "Taisce Tástáil", "admin.config.cache_test_failed": "Theip ar an taisce a thaiscéaladh: %v.", @@ -3330,7 +3329,6 @@ "admin.config.instance_web_banner.message_placeholder": "Teachtaireacht meirge (tacaíonn sé le Markdown)", "admin.config.session_config": "Cumraíocht Seisiúin", "admin.config.session_provider": "Soláthraí Seisiúin", - "admin.config.provider_config": "Cumraíocht Soláthraí", "admin.config.cookie_name": "Ainm Fianán", "admin.config.gc_interval_time": "Am Eatramh GC", "admin.config.session_life_time": "Am Saoil na Seisiúin", @@ -3774,9 +3772,11 @@ "actions.runs.delete.description": "An bhfuil tú cinnte gur mian leat an rith sreabha oibre seo a scriosadh go buan? Ní féidir an gníomh seo a chealú.", "actions.runs.not_done": "Níl an rith sreabha oibre seo críochnaithe.", "actions.runs.view_workflow_file": "Féach ar chomhad sreabha oibre", - "actions.runs.workflow_graph": "Graf Sreabhadh Oibre", "actions.runs.summary": "Achoimre", "actions.runs.all_jobs": "Gach post", + "actions.runs.attempt": "Iarracht", + "actions.runs.latest": "Is déanaí", + "actions.runs.latest_attempt": "An iarracht is déanaí", "actions.runs.triggered_via": "Spreagtha trí %s", "actions.runs.total_duration": "Fad iomlán:", "actions.workflow.disable": "Díchumasaigh sreabhadh oibre", diff --git a/options/locale/locale_zh-CN.json b/options/locale/locale_zh-CN.json index 0bab3a0010..5960e554b7 100644 --- a/options/locale/locale_zh-CN.json +++ b/options/locale/locale_zh-CN.json @@ -2474,7 +2474,7 @@ "repo.settings.tags.protection.allowed.noone": "无", "repo.settings.tags.protection.create": "保护 Git 标签", "repo.settings.tags.protection.none": "没有受保护的 Git 标签。", - "repo.settings.tags.protection.pattern.description": "您可以使用单个名称或 glob 表达式匹配或正则表达式来匹配多个 Git 标签。了解详情请访问 保护标签指南。", + "repo.settings.tags.protection.pattern.description": "您可以使用单个名称或 glob 表达式匹配或正则表达式来匹配多个 Git 标签。欲了解详情请访问 保护标签指南。", "repo.settings.bot_token": "Bot 令牌", "repo.settings.chat_id": "聊天 ID", "repo.settings.thread_id": "线程 ID", @@ -3099,11 +3099,11 @@ "admin.packages.size": "大小", "admin.packages.published": "已发布", "admin.defaulthooks": "默认 Web 钩子", - "admin.defaulthooks.desc": "当某些 Gitea 事件触发时,Web 钩子自动向服务器发出 HTTP POST 请求。这里定义的 Web 钩子是默认配置,将被复制到所有新的仓库中。详情请访问 Web 钩子指南。", + "admin.defaulthooks.desc": "当某些 Gitea 事件触发时,Web 钩子自动向服务器发出 HTTP POST 请求。这里定义的 Web 钩子是默认配置,将被复制到所有新的仓库中。欲了解详情请访问 Web 钩子指南。", "admin.defaulthooks.add_webhook": "添加默认 Web 钩子", "admin.defaulthooks.update_webhook": "更新默认 Web 钩子", "admin.systemhooks": "系统 Web 钩子", - "admin.systemhooks.desc": "当某些 Gitea 事件触发时,Web 钩子自动向服务器发出 HTTP POST 请求。这里定义的 Web 钩子将作用于系统上的所有仓库,所以请考虑这可能带来的任何性能影响。了解详情请访问 Web 钩子指南。", + "admin.systemhooks.desc": "当某些 Gitea 事件触发时,Web 钩子自动向服务器发出 HTTP POST 请求。这里定义的 Web 钩子将作用于系统上的所有仓库,所以请考虑这可能带来的任何性能影响。欲了解详情请访问 Web 钩子指南。", "admin.systemhooks.add_webhook": "添加系统 Web 钩子", "admin.systemhooks.update_webhook": "更新系统 Web 钩子", "admin.auths.auth_manage_panel": "认证源管理", diff --git a/package.json b/package.json index 04f27fec82..eafd95da1f 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ "@github/paste-markdown": "1.5.3", "@github/text-expander-element": "2.9.4", "@lezer/highlight": "1.2.3", - "@mcaptcha/vanilla-glue": "0.1.0-alpha-3", + "@mcaptcha/vanilla-glue": "0.1.0-rc2", "@mermaid-js/layout-elk": "0.2.1", "@primer/octicons": "19.24.1", "@replit/codemirror-indentation-markers": "6.5.3", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index cca57d3b19..7ebedae9a7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -89,8 +89,8 @@ importers: specifier: 1.2.3 version: 1.2.3 '@mcaptcha/vanilla-glue': - specifier: 0.1.0-alpha-3 - version: 0.1.0-alpha-3 + specifier: 0.1.0-rc2 + version: 0.1.0-rc2 '@mermaid-js/layout-elk': specifier: 0.2.1 version: 0.2.1(mermaid@11.14.0) @@ -978,11 +978,11 @@ packages: '@marijn/find-cluster-break@1.0.2': resolution: {integrity: sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==} - '@mcaptcha/core-glue@0.1.0-alpha-5': - resolution: {integrity: sha512-16qWm5O5X0Y9LXULULaAks8Vf9FNlUUBcR5KDt49aWhFhG5++JzxNmCwQM9EJSHNU7y0U+FdyAWcGmjfKlkRLA==} + '@mcaptcha/core-glue@0.1.0-rc1': + resolution: {integrity: sha512-P4SgUioJDR38QpnP9sPY72NyaYex8MXD6RbzrfKra+ngamT26XjqVZEHBiZU2RT7u0SsWhuko4N1ntNOghsgpg==} - '@mcaptcha/vanilla-glue@0.1.0-alpha-3': - resolution: {integrity: sha512-GT6TJBgmViGXcXiT5VOr+h/6iOnThSlZuCoOWncubyTZU9R3cgU5vWPkF7G6Ob6ee2CBe3yqBxxk24CFVGTVXw==} + '@mcaptcha/vanilla-glue@0.1.0-rc2': + resolution: {integrity: sha512-LDjn9lrKioJ3zwaQOfql7PXsnxCAHg7b1rPw7G0OxpvVE7xLB/a40SHfIIiocce2VS9TPI4MbcKm5pcuy8fU5g==} '@mermaid-js/layout-elk@0.2.1': resolution: {integrity: sha512-MX9jwhMyd5zDcFsYcl3duDUkKhjVRUCGEQrdCeNV5hCIR6+3FuDDbRbFmvVbAu15K1+juzsYGG+K8MDvCY1Amg==} @@ -4856,11 +4856,11 @@ snapshots: '@marijn/find-cluster-break@1.0.2': {} - '@mcaptcha/core-glue@0.1.0-alpha-5': {} + '@mcaptcha/core-glue@0.1.0-rc1': {} - '@mcaptcha/vanilla-glue@0.1.0-alpha-3': + '@mcaptcha/vanilla-glue@0.1.0-rc2': dependencies: - '@mcaptcha/core-glue': 0.1.0-alpha-5 + '@mcaptcha/core-glue': 0.1.0-rc1 '@mermaid-js/layout-elk@0.2.1(mermaid@11.14.0)': dependencies: diff --git a/routers/api/v1/admin/org.go b/routers/api/v1/admin/org.go index 6390bb7e82..28c21de15f 100644 --- a/routers/api/v1/admin/org.go +++ b/routers/api/v1/admin/org.go @@ -48,7 +48,7 @@ func CreateOrg(ctx *context.APIContext) { visibility := api.VisibleTypePublic if form.Visibility != "" { - visibility = api.VisibilityModes[form.Visibility] + visibility = api.VisibilityModes[string(form.Visibility)] } org := &organization.Organization{ diff --git a/routers/api/v1/admin/user.go b/routers/api/v1/admin/user.go index b9dd12f8ff..8a6924b4a3 100644 --- a/routers/api/v1/admin/user.go +++ b/routers/api/v1/admin/user.go @@ -123,7 +123,7 @@ func CreateUser(ctx *context.APIContext) { } if form.Visibility != "" { - visibility := api.VisibilityModes[form.Visibility] + visibility := api.VisibilityModes[string(form.Visibility)] overwriteDefault.Visibility = &visibility } @@ -239,7 +239,7 @@ func EditUser(ctx *context.APIContext) { Description: optional.FromPtr(form.Description), IsActive: optional.FromPtr(form.Active), IsAdmin: user_service.UpdateOptionFieldFromPtr(form.Admin), - Visibility: optional.FromMapLookup(api.VisibilityModes, form.Visibility), + Visibility: optional.FromMapLookup(api.VisibilityModes, string(form.Visibility)), AllowGitHook: optional.FromPtr(form.AllowGitHook), AllowImportLocal: optional.FromPtr(form.AllowImportLocal), MaxRepoCreation: optional.FromPtr(form.MaxRepoCreation), diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 633aa77430..a8bfa0965e 100644 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -1272,7 +1272,7 @@ func Routes() *web.Router { m.Delete("", reqRepoWriter(unit.TypeActions), repo.DeleteArtifact) }) m.Get("/artifacts/{artifact_id}/zip", repo.DownloadArtifact) - }, reqRepoReader(unit.TypeActions), context.ReferencesGitRepo(true)) + }, reqRepoReader(unit.TypeActions)) m.Group("/keys", func() { m.Combo("").Get(repo.ListDeployKeys). Post(bind(api.CreateKeyOption{}), repo.CreateDeployKey) diff --git a/routers/api/v1/org/org.go b/routers/api/v1/org/org.go index 7c6d11bbc4..2df871a0aa 100644 --- a/routers/api/v1/org/org.go +++ b/routers/api/v1/org/org.go @@ -258,7 +258,7 @@ func Create(ctx *context.APIContext) { visibility := api.VisibleTypePublic if form.Visibility != "" { - visibility = api.VisibilityModes[form.Visibility] + visibility = api.VisibilityModes[string(form.Visibility)] } org := &organization.Organization{ @@ -402,7 +402,7 @@ func Edit(ctx *context.APIContext) { Description: optional.FromPtr(form.Description), Website: optional.FromPtr(form.Website), Location: optional.FromPtr(form.Location), - Visibility: optional.FromMapLookup(api.VisibilityModes, optional.FromPtr(form.Visibility).Value()), + Visibility: optional.FromMapLookup(api.VisibilityModes, string(optional.FromPtr(form.Visibility).Value())), RepoAdminChangeTeamAccess: optional.FromPtr(form.RepoAdminChangeTeamAccess), } if err := user_service.UpdateUser(ctx, ctx.Org.Organization.AsUser(), opts); err != nil { diff --git a/routers/api/v1/org/team.go b/routers/api/v1/org/team.go index 659218e837..74ad987540 100644 --- a/routers/api/v1/org/team.go +++ b/routers/api/v1/org/team.go @@ -210,7 +210,7 @@ func CreateTeam(ctx *context.APIContext) { // "422": // "$ref": "#/responses/validationError" form := web.GetForm(ctx).(*api.CreateTeamOption) - teamPermission := perm.ParseAccessMode(form.Permission, perm.AccessModeNone, perm.AccessModeAdmin) + teamPermission := perm.ParseAccessMode(string(form.Permission), perm.AccessModeNone, perm.AccessModeAdmin) team := &organization.Team{ OrgID: ctx.Org.Organization.ID, Name: form.Name, @@ -224,7 +224,7 @@ func CreateTeam(ctx *context.APIContext) { if len(form.UnitsMap) > 0 { attachTeamUnitsMap(team, form.UnitsMap) } else if len(form.Units) > 0 { - unitPerm := perm.ParseAccessMode(form.Permission, perm.AccessModeRead, perm.AccessModeWrite) + unitPerm := perm.ParseAccessMode(string(form.Permission), perm.AccessModeRead, perm.AccessModeWrite) attachTeamUnits(team, unitPerm, form.Units) } else { ctx.APIErrorInternal(errors.New("units permission should not be empty")) @@ -298,7 +298,7 @@ func EditTeam(ctx *context.APIContext) { isAuthChanged := false isIncludeAllChanged := false if !team.IsOwnerTeam() && len(form.Permission) != 0 { - teamPermission := perm.ParseAccessMode(form.Permission, perm.AccessModeNone, perm.AccessModeAdmin) + teamPermission := perm.ParseAccessMode(string(form.Permission), perm.AccessModeNone, perm.AccessModeAdmin) if team.AccessMode != teamPermission { isAuthChanged = true team.AccessMode = teamPermission @@ -314,7 +314,7 @@ func EditTeam(ctx *context.APIContext) { if len(form.UnitsMap) > 0 { attachTeamUnitsMap(team, form.UnitsMap) } else if len(form.Units) > 0 { - unitPerm := perm.ParseAccessMode(form.Permission, perm.AccessModeRead, perm.AccessModeWrite) + unitPerm := perm.ParseAccessMode(string(form.Permission), perm.AccessModeRead, perm.AccessModeWrite) attachTeamUnits(team, unitPerm, form.Units) } } else { diff --git a/routers/api/v1/repo/action.go b/routers/api/v1/repo/action.go index 8a0be250da..1d38cc2f53 100644 --- a/routers/api/v1/repo/action.go +++ b/routers/api/v1/repo/action.go @@ -848,6 +848,12 @@ func ListActionTasks(ctx *context.APIContext) { res := new(api.ActionTaskResponse) res.TotalCount = total + taskList := actions_model.TaskList(tasks) + if err := taskList.LoadAttributes(ctx); err != nil { + ctx.APIErrorInternal(err) + return + } + res.Entries = make([]*api.ActionTask, len(tasks)) for i := range tasks { convertedTask, err := convert.ToActionTask(ctx, tasks[i]) @@ -859,7 +865,7 @@ func ListActionTasks(ctx *context.APIContext) { } ctx.SetLinkHeader(total, listOptions.PageSize) - ctx.SetTotalCountHeader(total) // Duplicates api response field but it's better to set it for consistency + ctx.SetTotalCountHeader(total) // Duplicates api response field, but it's better to set it for consistency ctx.JSON(http.StatusOK, &res) } @@ -1155,6 +1161,7 @@ func getCurrentRepoActionRunByID(ctx *context.APIContext) *actions_model.ActionR ctx.APIErrorInternal(err) return nil } + run.Repo = ctx.Repo.Repository return run } @@ -1226,7 +1233,7 @@ func GetWorkflowRun(ctx *context.APIContext) { return } - convertedRun, err := convert.ToActionWorkflowRun(ctx, ctx.Repo.Repository, run, nil) + convertedRun, err := convert.ToActionWorkflowRun(ctx, run, nil) if err != nil { ctx.APIErrorInternal(err) return @@ -1275,7 +1282,7 @@ func GetWorkflowRunAttempt(ctx *context.APIContext) { return } - convertedRun, err := convert.ToActionWorkflowRun(ctx, ctx.Repo.Repository, run, attempt) + convertedRun, err := convert.ToActionWorkflowRun(ctx, run, attempt) if err != nil { ctx.APIErrorInternal(err) return @@ -1330,7 +1337,7 @@ func RerunWorkflowRun(ctx *context.APIContext) { return } - convertedRun, err := convert.ToActionWorkflowRun(ctx, ctx.Repo.Repository, run, nil) + convertedRun, err := convert.ToActionWorkflowRun(ctx, run, nil) if err != nil { ctx.APIErrorInternal(err) return diff --git a/routers/api/v1/repo/collaborators.go b/routers/api/v1/repo/collaborators.go index 2c40e26508..7d9b290c2a 100644 --- a/routers/api/v1/repo/collaborators.go +++ b/routers/api/v1/repo/collaborators.go @@ -181,7 +181,7 @@ func AddOrUpdateCollaborator(ctx *context.APIContext) { p := perm.AccessModeWrite if form.Permission != nil { - p = perm.ParseAccessMode(*form.Permission, perm.AccessModeRead, perm.AccessModeWrite, perm.AccessModeAdmin) + p = perm.ParseAccessMode(string(*form.Permission), perm.AccessModeRead, perm.AccessModeWrite, perm.AccessModeAdmin) } if err := repo_service.AddOrUpdateCollaborator(ctx, ctx.Repo.Repository, collaborator, p); err != nil { diff --git a/routers/api/v1/repo/issue.go b/routers/api/v1/repo/issue.go index f8c1c67f06..39ca7fb77e 100644 --- a/routers/api/v1/repo/issue.go +++ b/routers/api/v1/repo/issue.go @@ -690,11 +690,11 @@ func CreateIssue(ctx *context.APIContext) { form.Labels = make([]int64, 0) } - if err := issue_service.NewIssue(ctx, ctx.Repo.Repository, issue, form.Labels, nil, assigneeIDs, 0); err != nil { - if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) { - ctx.APIError(http.StatusBadRequest, err) - } else if errors.Is(err, user_model.ErrBlockedUser) { + if err := issue_service.NewIssue(ctx, ctx.Repo.Repository, issue, form.Labels, nil, assigneeIDs, form.Projects); err != nil { + if errors.Is(err, user_model.ErrBlockedUser) { ctx.APIError(http.StatusForbidden, err) + } else if errors.Is(err, util.ErrPermissionDenied) || errors.Is(err, util.ErrNotExist) { + ctx.APIError(http.StatusBadRequest, err) } else { ctx.APIErrorInternal(err) } @@ -913,6 +913,18 @@ func EditIssue(ctx *context.APIContext) { } } + // Update projects if provided + if canWrite && form.Projects != nil { + if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, ctx.Doer, *form.Projects); err != nil { + if errors.Is(err, util.ErrPermissionDenied) || errors.Is(err, util.ErrNotExist) { + ctx.APIError(http.StatusBadRequest, err) + } else { + ctx.APIErrorInternal(err) + } + return + } + } + // Refetch from database to assign some automatic values issue, err = issues_model.GetIssueByID(ctx, issue.ID) if err != nil { diff --git a/routers/api/v1/repo/migrate.go b/routers/api/v1/repo/migrate.go index dc99cf8c16..7431493a3f 100644 --- a/routers/api/v1/repo/migrate.go +++ b/routers/api/v1/repo/migrate.go @@ -257,7 +257,7 @@ func handleRemoteAddrError(ctx *context.APIContext, err error) { addrErr := err.(*git.ErrInvalidCloneAddr) switch { case addrErr.IsURLError: - ctx.APIError(http.StatusUnprocessableEntity, err) + ctx.APIError(http.StatusUnprocessableEntity, "The provided URL is invalid.") case addrErr.IsPermissionDenied: if addrErr.LocalPath { ctx.APIError(http.StatusUnprocessableEntity, "You are not allowed to import local repositories.") diff --git a/routers/api/v1/repo/pull.go b/routers/api/v1/repo/pull.go index aeecc13f4e..2426a6b3c2 100644 --- a/routers/api/v1/repo/pull.go +++ b/routers/api/v1/repo/pull.go @@ -521,7 +521,7 @@ func CreatePullRequest(ctx *context.APIContext) { BaseBranch: compareResult.BaseRef.ShortName(), HeadRepo: compareResult.HeadRepo, BaseRepo: repo, - MergeBase: compareResult.MergeBase, + MergeBase: compareResult.CompareBase, Type: issues_model.PullRequestGitea, } @@ -1569,7 +1569,7 @@ func GetPullRequestFiles(ctx *context.APIContext) { return } - startCommitID := compareInfo.MergeBase + startCommitID := compareInfo.CompareBase endCommitID := headCommitID maxLines := setting.Git.MaxGitDiffLines diff --git a/routers/api/v1/repo/repo.go b/routers/api/v1/repo/repo.go index 24f486be9d..8b0dc7c863 100644 --- a/routers/api/v1/repo/repo.go +++ b/routers/api/v1/repo/repo.go @@ -21,6 +21,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" unit_model "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/label" "code.gitea.io/gitea/modules/log" @@ -37,6 +38,8 @@ import ( "code.gitea.io/gitea/services/convert" feed_service "code.gitea.io/gitea/services/feed" "code.gitea.io/gitea/services/issue" + "code.gitea.io/gitea/services/migrations" + mirror_service "code.gitea.io/gitea/services/mirror" repo_service "code.gitea.io/gitea/services/repository" ) @@ -262,7 +265,7 @@ func CreateUserRepo(ctx *context.APIContext, owner *user_model.User, opt api.Cre DefaultBranch: opt.DefaultBranch, TrustModel: repo_model.ToTrustModel(opt.TrustModel), IsTemplate: opt.Template, - ObjectFormatName: opt.ObjectFormatName, + ObjectFormatName: string(opt.ObjectFormatName), }) if err != nil { if repo_model.IsErrRepoAlreadyExist(err) { @@ -628,7 +631,11 @@ func Edit(ctx *context.APIContext) { } } - if opts.MirrorInterval != nil || opts.EnablePrune != nil { + if opts.MirrorInterval != nil || + opts.EnablePrune != nil || + opts.MirrorUsername != nil || + opts.MirrorPassword != nil || + opts.MirrorToken != nil { if err := updateMirror(ctx, opts); err != nil { return } @@ -1059,6 +1066,57 @@ func updateMirror(ctx *context.APIContext, opts api.EditRepoOption) error { log.Trace("Repository %s Mirror[%d] Set EnablePrune: %t", repo.FullName(), mirror.ID, mirror.EnablePrune) } + authUpdateRequested := opts.MirrorPassword != nil || opts.MirrorToken != nil || opts.MirrorUsername != nil + if authUpdateRequested { + remoteURL, err := gitrepo.GitRemoteGetURL(ctx, repo, mirror.GetRemoteName()) + if err != nil { + ctx.APIErrorInternal(err) + return err + } + + authUsername := "" + if opts.MirrorUsername != nil { + authUsername = *opts.MirrorUsername + } else if remoteURL.User != nil { + authUsername = remoteURL.User.Username() + } + + authPassword := "" + authToken := "" + if opts.MirrorPassword != nil { + authPassword = *opts.MirrorPassword + } + if opts.MirrorToken != nil { + authToken = *opts.MirrorToken + } + + if opts.MirrorPassword == nil && opts.MirrorToken == nil && remoteURL.User != nil && (authUsername == "" || authUsername == remoteURL.User.Username()) { + authPassword, _ = remoteURL.User.Password() + } + + if authToken != "" { + authPassword = authToken + } + + composedAddress, err := git.ParseRemoteAddr(repo.OriginalURL, authUsername, authPassword) + if err == nil { + err = migrations.IsMigrateURLAllowed(composedAddress, ctx.Doer) + } + if err != nil { + handleRemoteAddrError(ctx, err) + return err + } + + if err := mirror_service.UpdateAddress(ctx, mirror, composedAddress); err != nil { + ctx.APIErrorInternal(err) + return err + } + + if sanitized, err := util.SanitizeURL(repo.OriginalURL); err == nil { + mirror.RemoteAddress = sanitized + } + } + // finally update the mirror in the DB if err := repo_model.UpdateMirror(ctx, mirror); err != nil { log.Error("Failed to Set Mirror Interval: %s", err) diff --git a/routers/api/v1/shared/action.go b/routers/api/v1/shared/action.go index 1b12023d7a..6f0c024843 100644 --- a/routers/api/v1/shared/action.go +++ b/routers/api/v1/shared/action.go @@ -11,6 +11,7 @@ import ( "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" @@ -62,6 +63,12 @@ func ListJobs(ctx *context.APIContext, ownerID, repoID, runID int64, runAttemptI res := new(api.ActionWorkflowJobsResponse) res.TotalCount = total + jobList := actions_model.ActionJobList(jobs) + if err := jobList.LoadAttributes(ctx, true); err != nil { + ctx.APIErrorInternal(err) + return + } + res.Entries = make([]*api.ActionWorkflowJob, len(jobs)) isRepoLevel := repoID != 0 && ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == repoID @@ -70,11 +77,11 @@ func ListJobs(ctx *context.APIContext, ownerID, repoID, runID int64, runAttemptI if isRepoLevel { repository = ctx.Repo.Repository } else { - repository, err = repo_model.GetRepositoryByID(ctx, jobs[i].RepoID) - if err != nil { - ctx.APIErrorInternal(err) + if jobs[i].Run == nil || jobs[i].Run.Repo == nil { + ctx.APIErrorInternal(fmt.Errorf("job %d is missing its run or repository", jobs[i].ID)) return } + repository = jobs[i].Run.Repo } convertedWorkflowJob, err := convert.ToActionWorkflowJob(ctx, repository, nil, jobs[i]) @@ -169,21 +176,28 @@ func ListRuns(ctx *context.APIContext, ownerID, repoID int64) { res := new(api.ActionWorkflowRunsResponse) res.TotalCount = total - res.Entries = make([]*api.ActionWorkflowRun, len(runs)) - isRepoLevel := repoID != 0 && ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == repoID - for i := range runs { - var repository *repo_model.Repository - if isRepoLevel { - repository = ctx.Repo.Repository - } else { - repository, err = repo_model.GetRepositoryByID(ctx, runs[i].RepoID) - if err != nil { - ctx.APIErrorInternal(err) - return - } - } + runList := actions_model.RunList(runs) + if err := runList.LoadTriggerUser(ctx); err != nil { + ctx.APIErrorInternal(err) + return + } - convertedRun, err := convert.ToActionWorkflowRun(ctx, repository, runs[i], nil) + if err := runList.LoadRepos(ctx); err != nil { + ctx.APIErrorInternal(err) + return + } + repos := repo_model.RepositoryList(container.FilterSlice(runs, func(r *actions_model.ActionRun) (*repo_model.Repository, bool) { + return r.Repo, r.Repo != nil + })) + if err := repos.LoadOwners(ctx); err != nil { + ctx.APIErrorInternal(err) + return + } + + res.Entries = make([]*api.ActionWorkflowRun, len(runs)) + for i := range runs { + // TODO: load run attempts in batch + convertedRun, err := convert.ToActionWorkflowRun(ctx, runs[i], nil) if err != nil { ctx.APIErrorInternal(err) return diff --git a/routers/common/markup.go b/routers/common/markup.go index 35b1b21f6a..05f48c7902 100644 --- a/routers/common/markup.go +++ b/routers/common/markup.go @@ -71,7 +71,7 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur case "gfm": // legacy mode rctx = renderhelper.NewRenderContextRepoFile(ctx, repoModel, renderhelper.RepoFileOptions{ DeprecatedOwnerName: repoOwnerName, DeprecatedRepoName: repoName, - CurrentRefPath: refPath, CurrentTreePath: treePath, + CurrentRefSubURL: refPath, CurrentTreePath: treePath, }) rctx = rctx.WithMarkupType(markdown.MarkupName) case "comment": @@ -87,7 +87,7 @@ func RenderMarkup(ctx *context.Base, ctxRepo *context.Repository, mode, text, ur case "file": rctx = renderhelper.NewRenderContextRepoFile(ctx, repoModel, renderhelper.RepoFileOptions{ DeprecatedOwnerName: repoOwnerName, DeprecatedRepoName: repoName, - CurrentRefPath: refPath, CurrentTreePath: treePath, + CurrentRefSubURL: refPath, CurrentTreePath: treePath, }) rctx = rctx.WithMarkupType("").WithRelativePath(filePath) // render the repo file content by its extension default: diff --git a/routers/init.go b/routers/init.go index 92eab5eaf2..e04b711c4d 100644 --- a/routers/init.go +++ b/routers/init.go @@ -134,12 +134,6 @@ func InitWebInstalled(ctx context.Context) { external.RegisterRenderers() markup.Init(markup_service.FormalRenderHelperFuncs()) - if setting.EnableSQLite3 { - log.Info("SQLite3 support is enabled") - } else if setting.Database.Type.IsSQLite3() { - log.Fatal("SQLite3 support is disabled, but it is used for database setting. Please get or build a Gitea release with SQLite3 support.") - } - mustInitCtx(ctx, common.InitDBEngine) log.Info("ORM engine initialization successful!") mustInit(system.Init) diff --git a/routers/install/install.go b/routers/install/install.go index a0f32fb939..718ede6564 100644 --- a/routers/install/install.go +++ b/routers/install/install.go @@ -76,7 +76,7 @@ func Install(ctx *context.Context) { form.DbSchema = setting.Database.Schema form.SSLMode = setting.Database.SSLMode - curDBType := setting.Database.Type.String() + curDBType := string(setting.Database.Type) if !slices.Contains(setting.SupportedDatabaseTypes, curDBType) { curDBType = "mysql" } @@ -328,7 +328,7 @@ func SubmitInstall(ctx *context.Context) { cfg.Section("").Key("WORK_PATH").SetValue(setting.AppWorkPath) cfg.Section("").Key("RUN_MODE").SetValue("prod") - cfg.Section("database").Key("DB_TYPE").SetValue(setting.Database.Type.String()) + cfg.Section("database").Key("DB_TYPE").SetValue(string(setting.Database.Type)) cfg.Section("database").Key("HOST").SetValue(setting.Database.Host) cfg.Section("database").Key("NAME").SetValue(setting.Database.Name) cfg.Section("database").Key("USER").SetValue(setting.Database.User) diff --git a/routers/web/admin/admin_test.go b/routers/web/admin/admin_test.go index ecdd462f9e..929faa1968 100644 --- a/routers/web/admin/admin_test.go +++ b/routers/web/admin/admin_test.go @@ -16,64 +16,6 @@ import ( "github.com/stretchr/testify/require" ) -func TestShadowPassword(t *testing.T) { - kases := []struct { - Provider string - CfgItem string - Result string - }{ - { - Provider: "redis", - CfgItem: "network=tcp,addr=:6379,password=gitea,db=0,pool_size=100,idle_timeout=180", - Result: "network=tcp,addr=:6379,password=******,db=0,pool_size=100,idle_timeout=180", - }, - { - Provider: "mysql", - CfgItem: "root:@tcp(localhost:3306)/gitea?charset=utf8", - Result: "root:******@tcp(localhost:3306)/gitea?charset=utf8", - }, - { - Provider: "mysql", - CfgItem: "/gitea?charset=utf8", - Result: "/gitea?charset=utf8", - }, - { - Provider: "mysql", - CfgItem: "user:mypassword@/dbname", - Result: "user:******@/dbname", - }, - { - Provider: "postgres", - CfgItem: "user=pqgotest dbname=pqgotest sslmode=verify-full", - Result: "user=pqgotest dbname=pqgotest sslmode=verify-full", - }, - { - Provider: "postgres", - CfgItem: "user=pqgotest password= dbname=pqgotest sslmode=verify-full", - Result: "user=pqgotest password=****** dbname=pqgotest sslmode=verify-full", - }, - { - Provider: "postgres", - CfgItem: "postgres://user:pass@hostname/dbname", - Result: "postgres://user:******@hostname/dbname", - }, - { - Provider: "couchbase", - CfgItem: "http://dev-couchbase.example.com:8091/", - Result: "http://dev-couchbase.example.com:8091/", - }, - { - Provider: "couchbase", - CfgItem: "http://user:the_password@dev-couchbase.example.com:8091/", - Result: "http://user:******@dev-couchbase.example.com:8091/", - }, - } - - for _, k := range kases { - assert.Equal(t, k.Result, shadowPassword(k.Provider, k.CfgItem)) - } -} - func TestSelfCheckPost(t *testing.T) { defer test.MockVariableValue(&setting.PublicURLDetection)() defer test.MockVariableValue(&setting.AppURL, "http://config/sub/")() diff --git a/routers/web/admin/config.go b/routers/web/admin/config.go index bf48e554df..03a15b6713 100644 --- a/routers/web/admin/config.go +++ b/routers/web/admin/config.go @@ -7,8 +7,6 @@ package admin import ( "errors" "net/http" - "net/url" - "strings" system_model "code.gitea.io/gitea/models/system" "code.gitea.io/gitea/modules/cache" @@ -59,63 +57,6 @@ func TestCache(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + "/-/admin/config") } -func shadowPasswordKV(cfgItem, splitter string) string { - fields := strings.Split(cfgItem, splitter) - for i := range fields { - if strings.HasPrefix(fields[i], "password=") { - fields[i] = "password=******" - break - } - } - return strings.Join(fields, splitter) -} - -func shadowURL(provider, cfgItem string) string { - u, err := url.Parse(cfgItem) - if err != nil { - log.Error("Shadowing Password for %v failed: %v", provider, err) - return cfgItem - } - if u.User != nil { - atIdx := strings.Index(cfgItem, "@") - if atIdx > 0 { - colonIdx := strings.LastIndex(cfgItem[:atIdx], ":") - if colonIdx > 0 { - return cfgItem[:colonIdx+1] + "******" + cfgItem[atIdx:] - } - } - } - return cfgItem -} - -func shadowPassword(provider, cfgItem string) string { - switch provider { - case "redis": - return shadowPasswordKV(cfgItem, ",") - case "mysql": - // root:@tcp(localhost:3306)/macaron?charset=utf8 - atIdx := strings.Index(cfgItem, "@") - if atIdx > 0 { - colonIdx := strings.Index(cfgItem[:atIdx], ":") - if colonIdx > 0 { - return cfgItem[:colonIdx+1] + "******" + cfgItem[atIdx:] - } - } - return cfgItem - case "postgres": - // user=jiahuachen dbname=macaron port=5432 sslmode=disable - if !strings.HasPrefix(cfgItem, "postgres://") { - return shadowPasswordKV(cfgItem, " ") - } - fallthrough - case "couchbase": - return shadowURL(provider, cfgItem) - // postgres://pqgotest:password@localhost/pqgotest?sslmode=verify-full - // Notice: use shadowURL - } - return cfgItem -} - // Config show admin config page func Config(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("admin.config_summary") @@ -150,8 +91,6 @@ func Config(ctx *context.Context) { ctx.Data["CacheAdapter"] = setting.CacheService.Adapter ctx.Data["CacheInterval"] = setting.CacheService.Interval - - ctx.Data["CacheConn"] = shadowPassword(setting.CacheService.Adapter, setting.CacheService.Conn) ctx.Data["CacheItemTTL"] = setting.CacheService.TTL sessionCfg := setting.SessionConfig @@ -169,7 +108,7 @@ func Config(ctx *context.Context) { sessionCfg.Secure = realSession.Secure sessionCfg.Domain = realSession.Domain } - sessionCfg.ProviderConfig = shadowPassword(sessionCfg.Provider, sessionCfg.ProviderConfig) + sessionCfg.ProviderConfig = "" ctx.Data["SessionConfig"] = sessionCfg ctx.Data["Git"] = setting.Git diff --git a/routers/web/auth/auth.go b/routers/web/auth/auth.go index 1baa022521..0503bd02f8 100644 --- a/routers/web/auth/auth.go +++ b/routers/web/auth/auth.go @@ -64,7 +64,7 @@ func prepareCommonAuthPageData(ctx *context.Context, opt CommonAuthOptions) { ctx.Data["RecaptchaSitekey"] = setting.Service.RecaptchaSitekey ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey - ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL + ctx.Data["McaptchaURL"] = strings.TrimSuffix(setting.Service.McaptchaURL, "/") ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey if setting.Service.CaptchaType == setting.ImageCaptcha { ctx.Data["Captcha"] = context.GetImageCaptcha() diff --git a/routers/web/healthcheck/check.go b/routers/web/healthcheck/check.go index de9b2c8ec1..116aab886b 100644 --- a/routers/web/healthcheck/check.go +++ b/routers/web/healthcheck/check.go @@ -111,16 +111,10 @@ func checkDatabase(ctx context.Context, checks checks) status { } if setting.Database.Type.IsSQLite3() && st.Status == pass { - if !setting.EnableSQLite3 { + if _, err := os.Stat(setting.Database.Path); err != nil { st.Status = fail st.Time = getCheckTime() - log.Error("SQLite3 health check failed with error: %v", "this Gitea binary is built without SQLite3 enabled") - } else { - if _, err := os.Stat(setting.Database.Path); err != nil { - st.Status = fail - st.Time = getCheckTime() - log.Error("SQLite3 file exists check failed with error: %v", err) - } + log.Error("SQLite3 file exists check failed with error: %v", err) } } diff --git a/routers/web/org/home.go b/routers/web/org/home.go index 262b001e6a..56475c47f0 100644 --- a/routers/web/org/home.go +++ b/routers/web/org/home.go @@ -182,7 +182,7 @@ func prepareOrgProfileReadme(ctx *context.Context, prepareResult *shared_user.Pr } rctx := renderhelper.NewRenderContextRepoFile(ctx, profileRepo, renderhelper.RepoFileOptions{ - CurrentRefPath: path.Join("branch", util.PathEscapeSegments(profileRepo.DefaultBranch)), + CurrentRefSubURL: path.Join("branch", util.PathEscapeSegments(profileRepo.DefaultBranch)), }) ctx.Data["ProfileReadmeContent"], err = markdown.RenderString(rctx, readmeBytes) if err != nil { diff --git a/routers/web/org/teams.go b/routers/web/org/teams.go index 10803c9fbf..7312478299 100644 --- a/routers/web/org/teams.go +++ b/routers/web/org/teams.go @@ -609,6 +609,8 @@ func DeleteTeam(ctx *context.Context) { // TeamInvite renders the team invite page func TeamInvite(ctx *context.Context) { invite, org, team, inviter, err := getTeamInviteFromContext(ctx) + // TODO: to quickly debug the UI, can uncomment this (don't worry, it won't pass CI lint) + // invite, org, team, inviter, err = &org_model.TeamInvite{}, &org_model.Organization{}, &org_model.Team{}, ctx.Doer, nil if err != nil { if org_model.IsErrTeamInviteNotFound(err) { ctx.NotFound(err) diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go index 736a2dff00..84b51bba5f 100644 --- a/routers/web/repo/commit.go +++ b/routers/web/repo/commit.go @@ -9,7 +9,6 @@ import ( "fmt" "html/template" "net/http" - "path" "strings" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -97,8 +96,6 @@ func Commits(ctx *context.Context) { } else { ctx.Data["CommitsTagsMap"] = commitsTagsMap } - ctx.Data["Username"] = ctx.Repo.Owner.Name - ctx.Data["Reponame"] = ctx.Repo.Repository.Name ctx.Data["CommitCount"] = commitsCount pager := context.NewPagination(commitsCount, pageSize, page, 5) @@ -164,9 +161,6 @@ func Graph(ctx *context.Context) { ctx.Data["AllRefs"] = gitRefs - ctx.Data["Username"] = ctx.Repo.Owner.Name - ctx.Data["Reponame"] = ctx.Repo.Repository.Name - divOnly := ctx.FormBool("div-only") queryParams := ctx.Req.URL.Query() queryParams.Del("div-only") @@ -210,8 +204,6 @@ func SearchCommits(ctx *context.Context) { if all { ctx.Data["All"] = true } - ctx.Data["Username"] = ctx.Repo.Owner.Name - ctx.Data["Reponame"] = ctx.Repo.Repository.Name ctx.HTML(http.StatusOK, tplCommits) } @@ -249,8 +241,6 @@ func FileHistory(ctx *context.Context) { return } - ctx.Data["Username"] = ctx.Repo.Owner.Name - ctx.Data["Reponame"] = ctx.Repo.Repository.Name ctx.Data["FileTreePath"] = ctx.Repo.TreePath ctx.Data["CommitCount"] = commitsCount @@ -322,7 +312,7 @@ func Diff(ctx *context.Context) { MaxLines: maxLines, MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters, MaxFiles: maxFiles, - WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)), + WhitespaceBehavior: gitdiff.GetWhitespaceFlag(GetWhitespaceBehavior(ctx)), }, files...) if err != nil { ctx.NotFound(err) @@ -347,8 +337,6 @@ func Diff(ctx *context.Context) { ctx.Data["CommitID"] = commitID ctx.Data["AfterCommitID"] = commitID - ctx.Data["Username"] = userName - ctx.Data["Reponame"] = repoName var parentCommit *git.Commit var parentCommitID string @@ -409,7 +397,7 @@ func Diff(ctx *context.Context) { if err == nil { ctx.Data["NoteCommit"] = note.Commit ctx.Data["NoteAuthor"] = user_model.ValidateCommitWithEmail(ctx, note.Commit) - rctx := renderhelper.NewRenderContextRepoComment(ctx, ctx.Repo.Repository, renderhelper.RepoCommentOptions{CurrentRefPath: path.Join("commit", util.PathEscapeSegments(commitID))}) + rctx := renderhelper.NewRenderContextRepoComment(ctx, ctx.Repo.Repository, renderhelper.RepoCommentOptions{CurrentRefSubURL: "commit/" + util.PathEscapeSegments(commitID)}) htmlMessage := template.HTML(template.HTMLEscapeString(string(charset.ToUTF8WithFallback(note.Message, charset.ConvertOpts{})))) ctx.Data["NoteRendered"], err = markup.PostProcessCommitMessage(rctx, htmlMessage) if err != nil { diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go index 7598ce561c..83451843ce 100644 --- a/routers/web/repo/compare.go +++ b/routers/web/repo/compare.go @@ -13,6 +13,7 @@ import ( "path/filepath" "sort" "strings" + "unicode" "code.gitea.io/gitea/models/db" git_model "code.gitea.io/gitea/models/git" @@ -189,8 +190,18 @@ func setCsvCompareContext(ctx *context.Context) { } } -// ParseCompareInfo parse compare info between two commit for preparing comparing references -func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { +type comparePageInfoType struct { + compareInfo *git_service.CompareInfo + nothingToCompare bool + allowCreatePull bool +} + +func newComparePageInfo() *comparePageInfoType { + return &comparePageInfoType{} +} + +// parseCompareInfo parse compare info between two commit for preparing comparing references +func (cpi *comparePageInfoType) parseCompareInfo(ctx *context.Context) error { baseRepo := ctx.Repo.Repository fileOnly := ctx.FormBool("file-only") @@ -199,47 +210,29 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { // remove the check when we support compare with carets if compareReq.BaseOriRefSuffix != "" { - ctx.HTTPError(http.StatusBadRequest, "Unsupported comparison syntax: ref with suffix") - return nil + return util.NewInvalidArgumentErrorf("unsupported comparison syntax: ref with suffix") } // 2 get repository and owner for head headOwner, headRepo, err := common.GetHeadOwnerAndRepo(ctx, baseRepo, compareReq) - switch { - case errors.Is(err, util.ErrInvalidArgument): - ctx.HTTPError(http.StatusBadRequest, err.Error()) - return nil - case errors.Is(err, util.ErrNotExist): - ctx.NotFound(nil) - return nil - case err != nil: - ctx.ServerError("GetHeadOwnerAndRepo", err) - return nil + if err != nil { + return err } - isSameRepo := baseRepo.ID == headRepo.ID - // 3 permission check // base repository's code unit read permission check has been done on web.go permBase := ctx.Repo.Permission // If we're not merging from the same repo: + isSameRepo := baseRepo.ID == headRepo.ID if !isSameRepo { // Assert ctx.Doer has permission to read headRepo's codes permHead, err := access_model.GetDoerRepoPermission(ctx, headRepo, ctx.Doer) if err != nil { - ctx.ServerError("GetDoerRepoPermission", err) - return nil + return err } if !permHead.CanRead(unit.TypeCode) { - if log.IsTrace() { - log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v", - ctx.Doer, - headRepo, - permHead) - } - ctx.NotFound(nil) - return nil + return util.NewNotExistErrorf("") // permission: no error message for end users } ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode) } @@ -250,24 +243,16 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { baseRef := ctx.Repo.GitRepo.UnstableGuessRefByShortName(baseRefName) if baseRef == "" { - ctx.NotFound(nil) - return nil + return util.NewNotExistErrorf("no base ref: %s", baseRefName) } - var headGitRepo *git.Repository - if isSameRepo { - headGitRepo = ctx.Repo.GitRepo - } else { - headGitRepo, err = gitrepo.OpenRepository(ctx, headRepo) - if err != nil { - ctx.ServerError("OpenRepository", err) - return nil - } - defer headGitRepo.Close() + headGitRepo, err := gitrepo.RepositoryFromRequestContextOrOpen(ctx, headRepo) + if err != nil { + return err } + headRef := headGitRepo.UnstableGuessRefByShortName(headRefName) if headRef == "" { - ctx.NotFound(nil) - return nil + return util.NewNotExistErrorf("no head ref: %s", headRefName) } ctx.Data["BaseName"] = baseRepo.OwnerName @@ -291,12 +276,9 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { var rootRepo *repo_model.Repository if baseRepo.IsFork { err = baseRepo.GetBaseRepo(ctx) - if err != nil { - if !repo_model.IsErrRepoNotExist(err) { - ctx.ServerError("Unable to find root repo", err) - return nil - } - } else { + if err != nil && !repo_model.IsErrRepoNotExist(err) { + return err + } else if err == nil { rootRepo = baseRepo.BaseRepo } } @@ -313,42 +295,10 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { } } - has := headRepo != nil - // 3. If the base is a forked from "RootRepo" and the owner of - // the "RootRepo" is the :headUser - set headRepo to that - if !has && rootRepo != nil && rootRepo.OwnerID == headOwner.ID { - headRepo = rootRepo - has = true - } - - // 4. If the ctx.Doer has their own fork of the baseRepo and the headUser is the ctx.Doer - // set the headRepo to the ownFork - if !has && ownForkRepo != nil && ownForkRepo.OwnerID == headOwner.ID { - headRepo = ownForkRepo - has = true - } - - // 5. If the headOwner has a fork of the baseRepo - use that - if !has { - headRepo = repo_model.GetForkedRepo(ctx, headOwner.ID, baseRepo.ID) - has = headRepo != nil - } - - // 6. If the baseRepo is a fork and the headUser has a fork of that use that - if !has && baseRepo.IsFork { - headRepo = repo_model.GetForkedRepo(ctx, headOwner.ID, baseRepo.ForkID) - has = headRepo != nil - } - - // 7. Otherwise if we're not the same repo and haven't found a repo give up - if !isSameRepo && !has { - ctx.Data["PageIsComparePull"] = false - } - ctx.Data["HeadRepo"] = headRepo ctx.Data["BaseCompareRepo"] = ctx.Repo.Repository - // If we have a rootRepo and it's different from: + // If we have a rootRepo, and it's different from: // 1. the computed base // 2. the computed head // then get the branches of it @@ -361,17 +311,15 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { if !fileOnly { branches, tags, err := getBranchesAndTagsForRepo(ctx, rootRepo) if err != nil { - ctx.ServerError("GetBranchesForRepo", err) - return nil + return err } - ctx.Data["RootRepoBranches"] = branches ctx.Data["RootRepoTags"] = tags } } } - // If we have a ownForkRepo and it's different from: + // If we have a ownForkRepo, and it's different from: // 1. The computed base // 2. The computed head // 3. The rootRepo (if we have one) @@ -386,8 +334,7 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { if !fileOnly { branches, tags, err := getBranchesAndTagsForRepo(ctx, ownForkRepo) if err != nil { - ctx.ServerError("GetBranchesForRepo", err) - return nil + return err } ctx.Data["OwnForkRepoBranches"] = branches ctx.Data["OwnForkRepoTags"] = tags @@ -395,42 +342,58 @@ func ParseCompareInfo(ctx *context.Context) *git_service.CompareInfo { } } - // Treat as pull request if both references are branches - if ctx.Data["PageIsComparePull"] == nil { - ctx.Data["PageIsComparePull"] = baseRef.IsBranch() && headRef.IsBranch() && permBase.CanReadIssuesOrPulls(true) - } - - if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) { - if log.IsTrace() { - log.Trace("Permission Denied: User: %-v cannot create/read pull requests in Repo: %-v\nUser in baseRepo has Permissions: %-+v", - ctx.Doer, - baseRepo, - permBase) - } - ctx.NotFound(nil) - return nil - } - compareInfo, err := git_service.GetCompareInfo(ctx, baseRepo, headRepo, headGitRepo, baseRef, headRef, compareReq.DirectComparison(), fileOnly) if err != nil { - ctx.ServerError("GetCompareInfo", err) - return nil + return err } - if compareReq.DirectComparison() { - ctx.Data["BeforeCommitID"] = compareInfo.BaseCommitID - } else { - ctx.Data["BeforeCommitID"] = compareInfo.MergeBase - } - return &compareInfo + + // Treat as pull request if both references are branches + cpi.allowCreatePull = baseRef.IsBranch() && headRef.IsBranch() && permBase.CanReadIssuesOrPulls(true) + cpi.allowCreatePull = cpi.allowCreatePull && compareInfo.CompareBase != "" + cpi.compareInfo = &compareInfo + return nil } -func prepareNewPullRequestTitleContent(ci *git_service.CompareInfo, commits []*git_model.SignCommitWithStatuses) (title, content string) { - title = ci.HeadRef.ShortName() +// autoTitleFromBranchName humanizes a branch name into a PR title. +func autoTitleFromBranchName(name string) string { + var buf strings.Builder + var prevIsSpace bool + runes := []rune(name) + for i, r := range runes { + isSpace := unicode.IsSpace(r) + if r == '-' || r == '_' || isSpace { + if !prevIsSpace { + buf.WriteRune(' ') + } + prevIsSpace = true + continue + } + if !prevIsSpace && unicode.IsUpper(r) { + needSpace := i > 0 && unicode.IsLower(runes[i-1]) || i < len(runes)-1 && unicode.IsLower(runes[i+1]) + if needSpace { + buf.WriteRune(' ') + } + } + buf.WriteRune(unicode.ToLower(r)) + prevIsSpace = isSpace + } + out := strings.TrimSpace(buf.String()) + if out == "" { + return out + } + outRunes := []rune(out) + outRunes[0] = unicode.ToUpper(outRunes[0]) + return string(outRunes) +} - if len(commits) > 0 { +func prepareNewPullRequestTitleContent(ci *git_service.CompareInfo, commits []*git_model.SignCommitWithStatuses, defaultTitleSource string) (title, content string) { + useFirstCommitAsTitle := len(commits) == 1 || (defaultTitleSource == setting.RepoPRTitleSourceFirstCommit && len(commits) > 0) + if useFirstCommitAsTitle { // the "commits" are from "ShowPrettyFormatLogToList", which is ordered from newest to oldest, here take the oldest one c := commits[len(commits)-1] title = strings.TrimSpace(c.UserCommit.Summary()) + } else { + title = autoTitleFromBranchName(ci.HeadRef.ShortName()) } if len(commits) == 1 { @@ -454,16 +417,18 @@ func prepareNewPullRequestTitleContent(ci *git_service.CompareInfo, commits []*g return title, content } -// PrepareCompareDiff renders compare diff page -func PrepareCompareDiff( - ctx *context.Context, - ci *git_service.CompareInfo, - whitespaceBehavior gitcmd.TrustedCmdArgs, -) (nothingToCompare bool) { +// prepareCompareDiff renders compare diff page. TODO: need to refactor it and other "compare diff" related functions together +func (cpi *comparePageInfoType) prepareCompareDiff(ctx *context.Context, whitespaceBehavior gitcmd.TrustedCmdArgs) { + ci := cpi.compareInfo + if ci.CompareBase == "" { + cpi.nothingToCompare = true + return + } repo := ctx.Repo.Repository headCommitID := ci.HeadCommitID ctx.Data["CommitRepoLink"] = ci.HeadRepo.Link() + ctx.Data["BeforeCommitID"] = ci.CompareBase ctx.Data["AfterCommitID"] = headCommitID // follow GitHub's behavior: autofill the form and expand @@ -473,26 +438,18 @@ func PrepareCompareDiff( ctx.Data["TitleQuery"] = newPrFormTitle ctx.Data["BodyQuery"] = newPrFormBody - if (headCommitID == ci.MergeBase && !ci.DirectComparison()) || - headCommitID == ci.BaseCommitID { - ctx.Data["IsNothingToCompare"] = true - if unit, err := repo.GetUnit(ctx, unit.TypePullRequests); err == nil { - config := unit.PullRequestsConfig() + if headCommitID == ci.CompareBase { + config := repo.MustGetUnit(ctx, unit.TypePullRequests).PullRequestsConfig() + // if auto-detect manual merge, an empty PR will be closed immediately because it is already on base branch + supportEmptyPr := !config.AutodetectManualMerge + acrossRepoPr := !ci.IsSameRef() + ctx.Data["AllowEmptyPr"] = supportEmptyPr && acrossRepoPr - if !config.AutodetectManualMerge { - ctx.Data["AllowEmptyPr"] = !ci.IsSameRef() - return ci.IsSameRef() - } - - ctx.Data["AllowEmptyPr"] = false - } - return true + cpi.nothingToCompare = true + return } - beforeCommitID := ci.MergeBase - if ci.DirectComparison() { - beforeCommitID = ci.BaseCommitID - } + beforeCommitID := ci.CompareBase maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles files := ctx.FormStrings("files") @@ -515,12 +472,12 @@ func PrepareCompareDiff( }, ctx.FormStrings("files")...) if err != nil { ctx.ServerError("GetDiff", err) - return false + return } diffShortStat, err := gitdiff.GetDiffShortStat(ctx, ci.HeadRepo, ci.HeadGitRepo, beforeCommitID, headCommitID) if err != nil { ctx.ServerError("GetDiffShortStat", err) - return false + return } ctx.Data["DiffShortStat"] = diffShortStat ctx.Data["Diff"] = diff @@ -535,7 +492,7 @@ func PrepareCompareDiff( diffTree, err := gitdiff.GetDiffTree(ctx, ci.HeadGitRepo, false, beforeCommitID, headCommitID) if err != nil { ctx.ServerError("GetDiffTree", err) - return false + return } renderedIconPool := fileicon.NewRenderedIconPool() @@ -548,7 +505,7 @@ func PrepareCompareDiff( headCommit, err := ci.HeadGitRepo.GetCommit(headCommitID) if err != nil { ctx.ServerError("GetCommit", err) - return false + return } baseGitRepo := ctx.Repo.GitRepo @@ -556,24 +513,20 @@ func PrepareCompareDiff( beforeCommit, err := baseGitRepo.GetCommit(beforeCommitID) if err != nil { ctx.ServerError("GetCommit", err) - return false + return } commits, err := processGitCommits(ctx, ci.Commits) if err != nil { ctx.ServerError("processGitCommits", err) - return false + return } ctx.Data["Commits"] = commits ctx.Data["CommitCount"] = len(commits) - ctx.Data["title"], ctx.Data["content"] = prepareNewPullRequestTitleContent(ci, commits) - ctx.Data["Username"] = ci.HeadRepo.OwnerName - ctx.Data["Reponame"] = ci.HeadRepo.Name + ctx.Data["title"], ctx.Data["content"] = prepareNewPullRequestTitleContent(ci, commits, setting.Repository.PullRequest.DefaultTitleSource) setCompareContext(ctx, beforeCommit, headCommit, ci.HeadRepo.OwnerName, repo.Name) - - return false } func getBranchesAndTagsForRepo(ctx gocontext.Context, repo *repo_model.Repository) (branches, tags []string, err error) { @@ -594,16 +547,22 @@ func getBranchesAndTagsForRepo(ctx gocontext.Context, repo *repo_model.Repositor // CompareDiff show different from one commit to another commit func CompareDiff(ctx *context.Context) { - ci := ParseCompareInfo(ctx) - if ctx.Written() { + comparePageInfo := newComparePageInfo() + err := comparePageInfo.parseCompareInfo(ctx) + if errors.Is(err, util.ErrNotExist) || errors.Is(err, util.ErrInvalidArgument) { + ctx.NotFound(nil) + return + } else if err != nil { + ctx.ServerError("ParseCompareInfo", err) return } - + ci := comparePageInfo.compareInfo ctx.Data["PageIsViewCode"] = true ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes ctx.Data["CompareInfo"] = ci - nothingToCompare := PrepareCompareDiff(ctx, ci, gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string))) + // TODO: need to refactor "prepare compare" related functions together + comparePageInfo.prepareCompareDiff(ctx, gitdiff.GetWhitespaceFlag(GetWhitespaceBehavior(ctx))) if ctx.Written() { return } @@ -621,16 +580,13 @@ func CompareDiff(ctx *context.Context) { return } - headBranches, err := git_model.FindBranchNames(ctx, git_model.FindBranchOptions{ - RepoID: ci.HeadRepo.ID, - ListOptions: db.ListOptionsAll, - IsDeletedBranch: optional.Some(false), - }) + headBranches, headTags, err := getBranchesAndTagsForRepo(ctx, ci.HeadRepo) if err != nil { - ctx.ServerError("GetBranches", err) + ctx.ServerError("GetBranchesAndTagsForRepo", err) return } ctx.Data["HeadBranches"] = headBranches + ctx.Data["HeadTags"] = headTags // For compare repo branches PrepareBranchList(ctx) @@ -638,14 +594,23 @@ func CompareDiff(ctx *context.Context) { return } - headTags, err := repo_model.GetTagNamesByRepoID(ctx, ci.HeadRepo.ID) - if err != nil { - ctx.ServerError("GetTagNamesByRepoID", err) - return + if ci.CompareBase != "" { + comparePageInfo.prepareCreatePullRequestPage(ctx) + if ctx.Written() { + return + } + } else { + ctx.Flash.Error(ctx.Tr("repo.pulls.no_common_history"), true) + ctx.Data["CommitCount"] = 0 } - ctx.Data["HeadTags"] = headTags + ctx.Data["PageIsComparePull"] = comparePageInfo.allowCreatePull + ctx.Data["IsNothingToCompare"] = comparePageInfo.nothingToCompare + ctx.HTML(http.StatusOK, tplCompare) +} - if ctx.Data["PageIsComparePull"] == true { +func (cpi *comparePageInfoType) prepareCreatePullRequestPage(ctx *context.Context) { + ci := cpi.compareInfo + if cpi.allowCreatePull { pr, err := issues_model.GetUnmergedPullRequest(ctx, ci.HeadRepo.ID, ctx.Repo.Repository.ID, ci.HeadRef.ShortName(), ci.BaseRef.ShortName(), issues_model.PullRequestFlowGithub) if err != nil { if !issues_model.IsErrPullRequestNotExist(err) { @@ -663,7 +628,7 @@ func CompareDiff(ctx *context.Context) { return } - if !nothingToCompare { + if !cpi.nothingToCompare { // Setup information for new form. pageMetaData := retrieveRepoIssueMetaData(ctx, ctx.Repo.Repository, nil, true) if ctx.Written() { @@ -675,8 +640,8 @@ func CompareDiff(ctx *context.Context) { } } } - beforeCommitID := ctx.Data["BeforeCommitID"].(string) - afterCommitID := ctx.Data["AfterCommitID"].(string) + beforeCommitID := cpi.compareInfo.CompareBase + afterCommitID := cpi.compareInfo.HeadCommitID ctx.Data["Title"] = "Comparing " + base.ShortSha(beforeCommitID) + ci.CompareSeparator + base.ShortSha(afterCommitID) @@ -685,7 +650,7 @@ func CompareDiff(ctx *context.Context) { if content, ok := ctx.Data["content"].(string); ok && content != "" { // If a template content is set, prepend the "content". In this case that's only // applicable if you have one commit to compare and that commit has a message. - // In that case the commit message will be prepend to the template body. + // In that case the commit message will be prepended to the template body. if templateContent, ok := ctx.Data[pullRequestTemplateKey].(string); ok && templateContent != "" { // Re-use the same key as that's prioritized over the "content" key. // Add two new lines between the content to ensure there's always at least @@ -713,14 +678,8 @@ func CompareDiff(ctx *context.Context) { ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.Permission.CanWrite(unit.TypePullRequests) - if unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypePullRequests); err == nil { - config := unit.PullRequestsConfig() - ctx.Data["AllowMaintainerEdit"] = config.DefaultAllowMaintainerEdit - } else { - ctx.Data["AllowMaintainerEdit"] = false - } - - ctx.HTML(http.StatusOK, tplCompare) + prConfig := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypePullRequests).PullRequestsConfig() + ctx.Data["AllowMaintainerEdit"] = prConfig.DefaultAllowMaintainerEdit } // attachCommentsToLines attaches comments to their corresponding diff lines diff --git a/routers/web/repo/compare_test.go b/routers/web/repo/compare_test.go index 700aba8821..63b0f287e5 100644 --- a/routers/web/repo/compare_test.go +++ b/routers/web/repo/compare_test.go @@ -13,6 +13,7 @@ import ( issues_model "code.gitea.io/gitea/models/issues" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" git_service "code.gitea.io/gitea/services/git" "code.gitea.io/gitea/services/gitdiff" @@ -61,31 +62,66 @@ func TestNewPullRequestTitleContent(t *testing.T) { } } - title, content := prepareNewPullRequestTitleContent(ci, nil) - assert.Equal(t, "head-branch", title) + // no commit + title, content := prepareNewPullRequestTitleContent(ci, nil, setting.RepoPRTitleSourceAuto) + assert.Equal(t, "Head branch", title) assert.Empty(t, content) - title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("title-only")}) - assert.Equal(t, "title-only", title) + title, content = prepareNewPullRequestTitleContent(ci, nil, setting.RepoPRTitleSourceFirstCommit) + assert.Equal(t, "Head branch", title) assert.Empty(t, content) - title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("title-" + strings.Repeat("a", 255))}) - assert.Equal(t, "title-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa…", title) - assert.Equal(t, "…aaaaaaaaa\n", content) - - title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("title\nbody")}) - assert.Equal(t, "title", title) + // single commit + title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("single-commit-title\nbody")}, setting.RepoPRTitleSourceAuto) + assert.Equal(t, "single-commit-title", title) assert.Equal(t, "body", content) - title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("a\xf0\xf0\xf0\nb\xf0\xf0\xf0")}) - assert.Equal(t, "a?", title) // FIXME: GIT-COMMIT-MESSAGE-ENCODING: "title" doesn't use the same charset converting logic as "content" - assert.Equal(t, "b"+string(utf8.RuneError)+string(utf8.RuneError), content) + title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("single-commit-title\nbody")}, setting.RepoPRTitleSourceFirstCommit) + assert.Equal(t, "single-commit-title", title) + assert.Equal(t, "body", content) - title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{ + // multiple commits + commits := []*git_model.SignCommitWithStatuses{ // ordered from newest to oldest mockCommit("title2\nbody2"), mockCommit("title1\nbody1"), - }) + } + title, content = prepareNewPullRequestTitleContent(ci, commits, setting.RepoPRTitleSourceAuto) + assert.Equal(t, "Head branch", title) + assert.Empty(t, content) + + title, content = prepareNewPullRequestTitleContent(ci, commits, setting.RepoPRTitleSourceFirstCommit) assert.Equal(t, "title1", title) assert.Empty(t, content) + + // title string handling + title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("title-" + strings.Repeat("a", 255))}, setting.RepoPRTitleSourceFirstCommit) + assert.Equal(t, "title-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa…", title) + assert.Equal(t, "…aaaaaaaaa\n", content) + + title, content = prepareNewPullRequestTitleContent(ci, []*git_model.SignCommitWithStatuses{mockCommit("a\xf0\xf0\xf0\nb\xf0\xf0\xf0")}, setting.RepoPRTitleSourceFirstCommit) + assert.Equal(t, "a?", title) // FIXME: GIT-COMMIT-MESSAGE-ENCODING: "title" doesn't use the same charset converting logic as "content" + assert.Equal(t, "b"+string(utf8.RuneError)+string(utf8.RuneError), content) +} + +func TestAutoTitleFromBranchName(t *testing.T) { + cases := []struct { + branch string + want string + }{ + {"fix/the-bug", "Fix/the bug"}, + {"Already-Capitalized", "Already capitalized"}, + {"ALL-CAPS-BRANCH", "All caps branch"}, + {"FixHTMLBug", "Fix html bug"}, + {"MixedCase-Name", "Mixed case name"}, + {"fooBar-baz", "Foo bar baz"}, + {"foo/BAR", "Foo/bar"}, + {"_leading-underscore", "Leading underscore"}, + {"CamelCase", "Camel case"}, + {"foo--double-dash", "Foo double dash"}, + {"123-fix", "123 fix"}, + } + for _, c := range cases { + assert.Equal(t, c.want, autoTitleFromBranchName(c.branch), "branch: %q", c.branch) + } } diff --git a/routers/web/repo/issue_list.go b/routers/web/repo/issue_list.go index 60d7a4f24d..fc891ac8e4 100644 --- a/routers/web/repo/issue_list.go +++ b/routers/web/repo/issue_list.go @@ -38,6 +38,14 @@ func retrieveProjectsForIssueList(ctx *context.Context, repo *repo_model.Reposit ctx.Data["OpenProjects"], ctx.Data["ClosedProjects"] = retrieveProjectsInternal(ctx, repo) } +// parseProjectIDsFromQuery parses the comma-separated `project` (preferred) or `projects` +// query parameter into a slice of int64 IDs. +func parseProjectIDsFromQuery(ctx *context.Context) []int64 { + // FIXME: ISSUE-MULTIPLE-PROJECTS-FILTER: no multiple project filter support yet + // Although here parses the project parameter as a slice, the "search" logic is wrong + return ctx.FormStringInt64s("project") +} + // SearchIssues searches for issues across the repositories that the user has access to func SearchIssues(ctx *context.Context) { before, since, err := context.GetQueryBeforeSince(ctx.Base) @@ -156,10 +164,7 @@ func SearchIssues(ctx *context.Context) { } } - projectID := optional.None[int64]() - if v := ctx.FormInt64("project"); v > 0 { - projectID = optional.Some(v) - } + includedProjectIDs := parseProjectIDsFromQuery(ctx) // this api is also used in UI, // so the default limit is set to fit UI needs @@ -182,7 +187,7 @@ func SearchIssues(ctx *context.Context) { IsClosed: isClosed, IncludedAnyLabelIDs: includedAnyLabels, MilestoneIDs: includedMilestones, - ProjectID: projectID, + ProjectIDs: includedProjectIDs, SortBy: issue_indexer.SortByCreatedDesc, } @@ -298,11 +303,6 @@ func SearchRepoIssuesJSON(ctx *context.Context) { } } - projectID := optional.None[int64]() - if v := ctx.FormInt64("project"); v > 0 { - projectID = optional.Some(v) - } - isPull := optional.None[bool]() switch ctx.FormString("type") { case "pulls": @@ -330,13 +330,20 @@ func SearchRepoIssuesJSON(ctx *context.Context) { Page: ctx.FormInt("page"), PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), }, - Keyword: keyword, - RepoIDs: []int64{ctx.Repo.Repository.ID}, - IsPull: isPull, - IsClosed: isClosed, - ProjectID: projectID, - SortBy: issue_indexer.SortByCreatedDesc, + Keyword: keyword, + RepoIDs: []int64{ctx.Repo.Repository.ID}, + IsPull: isPull, + IsClosed: isClosed, + SortBy: issue_indexer.SortByCreatedDesc, } + + projectIDs := parseProjectIDsFromQuery(ctx) + if len(projectIDs) == 1 && projectIDs[0] == -1 { + searchOpt.NoProjectOnly = true + } else if len(projectIDs) > 0 { + searchOpt.ProjectIDs = projectIDs + } + if since != 0 { searchOpt.UpdatedAfterUnix = optional.Some(since) } @@ -467,7 +474,7 @@ func renderMilestones(ctx *context.Context) { ctx.Data["ClosedMilestones"] = closedMilestones } -func prepareIssueFilterAndList(ctx *context.Context, milestoneID, projectID int64, isPullOption optional.Option[bool]) { +func prepareIssueFilterAndList(ctx *context.Context, milestoneID int64, projectIDs []int64, isPullOption optional.Option[bool]) { var err error viewType := ctx.FormString("type") sortType := ctx.FormString("sort") @@ -520,7 +527,7 @@ func prepareIssueFilterAndList(ctx *context.Context, milestoneID, projectID int6 RepoIDs: []int64{repo.ID}, LabelIDs: preparedLabelFilter.SelectedLabelIDs, MilestoneIDs: mileIDs, - ProjectID: projectID, + ProjectIDs: projectIDs, AssigneeID: assigneeID, MentionedID: mentionedID, PosterID: posterUserID, @@ -529,6 +536,7 @@ func prepareIssueFilterAndList(ctx *context.Context, milestoneID, projectID int6 IsPull: isPullOption, IssueIDs: nil, } + if keyword != "" { keywordMatchedIssueIDs, _, err = issue_indexer.SearchIssues(ctx, issue_indexer.ToSearchOptions(keyword, statsOpts)) if err != nil { @@ -600,7 +608,7 @@ func prepareIssueFilterAndList(ctx *context.Context, milestoneID, projectID int6 ReviewRequestedID: reviewRequestedID, ReviewedID: reviewedID, MilestoneIDs: mileIDs, - ProjectID: projectID, + ProjectIDs: projectIDs, IsClosed: isShowClosed, IsPull: isPullOption, LabelIDs: preparedLabelFilter.SelectedLabelIDs, @@ -708,7 +716,7 @@ func prepareIssueFilterAndList(ctx *context.Context, milestoneID, projectID int6 ctx.Data["ViewType"] = viewType ctx.Data["SortType"] = sortType ctx.Data["MilestoneID"] = milestoneID - ctx.Data["ProjectID"] = projectID + ctx.Data["ProjectIDs"] = projectIDs ctx.Data["AssigneeID"] = assigneeID ctx.Data["PosterUsername"] = posterUsername ctx.Data["Keyword"] = keyword @@ -749,7 +757,9 @@ func Issues(ctx *context.Context) { ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo) } - prepareIssueFilterAndList(ctx, ctx.FormInt64("milestone"), ctx.FormInt64("project"), optional.Some(isPullList)) + projectIDs := parseProjectIDsFromQuery(ctx) + + prepareIssueFilterAndList(ctx, ctx.FormInt64("milestone"), projectIDs, optional.Some(isPullList)) if ctx.Written() { return } diff --git a/routers/web/repo/issue_new.go b/routers/web/repo/issue_new.go index 861709d2ff..d442f2804f 100644 --- a/routers/web/repo/issue_new.go +++ b/routers/web/repo/issue_new.go @@ -121,7 +121,8 @@ func NewIssue(ctx *context.Context) { } pageMetaData.MilestonesData.SelectedMilestoneID = ctx.FormInt64("milestone") - pageMetaData.ProjectsData.SelectedProjectIDs, _ = base.StringsToInt64s(strings.Split(ctx.FormString("project"), ",")) + + pageMetaData.SetSelectedProjectIDs(parseProjectIDsFromQuery(ctx)) if len(pageMetaData.ProjectsData.SelectedProjectIDs) == 1 { ctx.Data["redirect_after_creation"] = "project" } @@ -237,8 +238,9 @@ func toSet[ItemType any, KeyType comparable](slice []ItemType, keyFunc func(Item // ValidateRepoMetasForNewIssue check and returns repository's meta information func ValidateRepoMetasForNewIssue(ctx *context.Context, form forms.CreateIssueForm, isPull bool) (ret struct { - LabelIDs, AssigneeIDs []int64 - MilestoneID, ProjectID int64 + LabelIDs, AssigneeIDs []int64 + MilestoneID int64 + ProjectIDs []int64 Reviewers []*user_model.User TeamReviewers []*organization.Team @@ -249,7 +251,7 @@ func ValidateRepoMetasForNewIssue(ctx *context.Context, form forms.CreateIssueFo return ret } - inputLabelIDs, _ := base.StringsToInt64s(strings.Split(form.LabelIDs, ",")) + inputLabelIDs := ctx.FormStringInt64s("label_ids") candidateLabels := toSet(pageMetaData.LabelsData.AllLabels, func(label *issues_model.Label) int64 { return label.ID }) if len(inputLabelIDs) > 0 && !candidateLabels.Contains(inputLabelIDs...) { ctx.NotFound(nil) @@ -265,13 +267,8 @@ func ValidateRepoMetasForNewIssue(ctx *context.Context, form forms.CreateIssueFo } pageMetaData.MilestonesData.SelectedMilestoneID = form.MilestoneID - allProjects := append(slices.Clone(pageMetaData.ProjectsData.OpenProjects), pageMetaData.ProjectsData.ClosedProjects...) - candidateProjects := toSet(allProjects, func(project *project_model.Project) int64 { return project.ID }) - if form.ProjectID > 0 && !candidateProjects.Contains(form.ProjectID) { - ctx.NotFound(nil) - return ret - } - pageMetaData.ProjectsData.SelectedProjectIDs = util.Iif(form.ProjectID > 0, []int64{form.ProjectID}, nil) + inputProjectIDs := ctx.FormStringInt64s("project_ids") + pageMetaData.SetSelectedProjectIDs(inputProjectIDs) // prepare assignees candidateAssignees := toSet(pageMetaData.AssigneesData.CandidateAssignees, func(user *user_model.User) int64 { return user.ID }) @@ -316,7 +313,8 @@ func ValidateRepoMetasForNewIssue(ctx *context.Context, form forms.CreateIssueFo } } - ret.LabelIDs, ret.AssigneeIDs, ret.MilestoneID, ret.ProjectID = inputLabelIDs, inputAssigneeIDs, form.MilestoneID, form.ProjectID + // Return only the validated IDs. + ret.LabelIDs, ret.AssigneeIDs, ret.MilestoneID, ret.ProjectIDs = inputLabelIDs, inputAssigneeIDs, form.MilestoneID, inputProjectIDs ret.Reviewers, ret.TeamReviewers = reviewers, teamReviewers return ret } @@ -324,26 +322,17 @@ func ValidateRepoMetasForNewIssue(ctx *context.Context, form forms.CreateIssueFo // NewIssuePost response for creating new issue func NewIssuePost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.CreateIssueForm) - ctx.Data["Title"] = ctx.Tr("repo.issues.new") - ctx.Data["PageIsIssueList"] = true - ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo) - ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes - ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled - upload.AddUploadContext(ctx, "comment") - var ( - repo = ctx.Repo.Repository - attachments []string - ) + repo := ctx.Repo.Repository validateRet := ValidateRepoMetasForNewIssue(ctx, *form, false) if ctx.Written() { return } - labelIDs, assigneeIDs, milestoneID, projectID := validateRet.LabelIDs, validateRet.AssigneeIDs, validateRet.MilestoneID, validateRet.ProjectID + labelIDs, assigneeIDs, milestoneID, projectIDs := validateRet.LabelIDs, validateRet.AssigneeIDs, validateRet.MilestoneID, validateRet.ProjectIDs - if projectID > 0 { + if len(projectIDs) > 0 { if !ctx.Repo.Permission.CanRead(unit.TypeProjects) { // User must also be able to see the project. ctx.HTTPError(http.StatusBadRequest, "user hasn't permissions to read projects") @@ -351,6 +340,7 @@ func NewIssuePost(ctx *context.Context) { } } + var attachments []string if setting.Attachment.Enabled { attachments = form.Files } @@ -383,7 +373,7 @@ func NewIssuePost(ctx *context.Context) { Ref: form.Ref, } - if err := issue_service.NewIssue(ctx, repo, issue, labelIDs, attachments, assigneeIDs, projectID); err != nil { + if err := issue_service.NewIssue(ctx, repo, issue, labelIDs, attachments, assigneeIDs, projectIDs); err != nil { if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) { ctx.HTTPError(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error()) } else if errors.Is(err, user_model.ErrBlockedUser) { @@ -395,8 +385,9 @@ func NewIssuePost(ctx *context.Context) { } log.Trace("Issue created: %d/%d", repo.ID, issue.ID) - if ctx.FormString("redirect_after_creation") == "project" && projectID > 0 { - project, err := project_model.GetProjectByID(ctx, projectID) + if ctx.FormString("redirect_after_creation") == "project" && len(projectIDs) > 0 { + // When issue is in multiple projects, redirect to first project from form order. + project, err := project_model.GetProjectByID(ctx, projectIDs[0]) if err == nil { if project.Type == project_model.TypeOrganization { ctx.JSONRedirect(project_model.ProjectLinkForOrg(ctx.Repo.Owner, project.ID)) diff --git a/routers/web/repo/issue_page_meta.go b/routers/web/repo/issue_page_meta.go index 9c7ac65a1f..428171dd0e 100644 --- a/routers/web/repo/issue_page_meta.go +++ b/routers/web/repo/issue_page_meta.go @@ -40,7 +40,7 @@ type issueSidebarProjectCardData struct { } type issueSidebarProjectsData struct { - SelectedProjectIDs []int64 // TODO: support multiple projects in the future + SelectedProjectIDs []int64 ProjectCards []*issueSidebarProjectCardData OpenProjects []*project_model.Project @@ -171,33 +171,49 @@ func (d *IssuePageMetaData) retrieveAssigneesData(ctx *context.Context) { ctx.Data["Assignees"] = d.AssigneesData.CandidateAssignees } -func (d *IssuePageMetaData) retrieveProjectData(ctx *context.Context) { - if d.Issue == nil || d.Issue.Project == nil { +func (d *IssuePageMetaData) retrieveProjectCardsForExistingIssue(ctx *context.Context) { + if err := d.Issue.LoadProjects(ctx); err != nil { + ctx.ServerError("LoadProjects", err) return } - columns, err := d.Issue.Project.GetColumns(ctx) + + // Load column mappings for all projects + projectColumnMap, err := d.Issue.ProjectColumnMap(ctx) if err != nil { - ctx.ServerError("GetProjectColumns", err) + ctx.ServerError("ProjectColumnMap", err) return } - columnID, err := d.Issue.ProjectColumnID(ctx) - if err != nil { - ctx.ServerError("ProjectColumnID", err) - return - } - var selectedColumn *project_model.Column - for _, col := range columns { - if col.ID == columnID { - selectedColumn = col - break + + // Build project cards for each project + d.ProjectsData.ProjectCards = make([]*issueSidebarProjectCardData, 0, len(d.Issue.Projects)) + for _, project := range d.Issue.Projects { + columns, err := project.GetColumns(ctx) + if err != nil { + ctx.ServerError("GetProjectColumns", err) + return } - } - d.ProjectsData.ProjectCards = []*issueSidebarProjectCardData{ - { - Project: d.Issue.Project, + + var selectedColumn *project_model.Column + columnID := projectColumnMap[project.ID] + for _, col := range columns { + if col.ID == columnID { + selectedColumn = col + break + } + } + + if selectedColumn == nil { + selectedColumn, err = project.MustDefaultColumn(ctx) + if err != nil { + ctx.ServerError("MustDefaultColumn", err) + return + } + } + d.ProjectsData.ProjectCards = append(d.ProjectsData.ProjectCards, &issueSidebarProjectCardData{ + Project: project, Columns: columns, SelectedColumn: selectedColumn, - }, + }) } d.ProjectsData.SelectedProjectIDs = make([]int64, 0, len(d.ProjectsData.ProjectCards)) for _, card := range d.ProjectsData.ProjectCards { @@ -205,6 +221,29 @@ func (d *IssuePageMetaData) retrieveProjectData(ctx *context.Context) { } } +func (d *IssuePageMetaData) retrieveProjectData(ctx *context.Context) { + if d.Issue == nil { + return + } + d.retrieveProjectCardsForExistingIssue(ctx) +} + +func (d *IssuePageMetaData) SetSelectedProjectIDs(ids []int64) { + allProjects := map[int64]*project_model.Project{} + for _, p := range d.ProjectsData.OpenProjects { + allProjects[p.ID] = p + } + for _, p := range d.ProjectsData.ClosedProjects { + allProjects[p.ID] = p + } + for _, id := range ids { + if project, ok := allProjects[id]; ok { + d.ProjectsData.ProjectCards = append(d.ProjectsData.ProjectCards, &issueSidebarProjectCardData{Project: project}) + } + } + d.ProjectsData.SelectedProjectIDs = ids +} + func (d *IssuePageMetaData) retrieveProjectsDataForIssueWriter(ctx *context.Context) { d.ProjectsData.OpenProjects, d.ProjectsData.ClosedProjects = retrieveProjectsInternal(ctx, ctx.Repo.Repository) } diff --git a/routers/web/repo/issue_view.go b/routers/web/repo/issue_view.go index af13a1156e..852b880ab0 100644 --- a/routers/web/repo/issue_view.go +++ b/routers/web/repo/issue_view.go @@ -18,7 +18,6 @@ import ( "code.gitea.io/gitea/models/organization" access_model "code.gitea.io/gitea/models/perm/access" project_model "code.gitea.io/gitea/models/project" - pull_model "code.gitea.io/gitea/models/pull" "code.gitea.io/gitea/models/renderhelper" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -826,6 +825,7 @@ func (prInfo *pullRequestViewInfo) prepareMergeBox(ctx *context.Context, issue * panic("impossible, issue must be the same") } + pull := issue.PullRequest data := &pullMergeBoxData{} prInfo.MergeBoxData = data @@ -834,14 +834,12 @@ func (prInfo *pullRequestViewInfo) prepareMergeBox(ctx *context.Context, issue * statusCheckData = &pullCommitStatusCheckData{} // make the following logic easier, no need to keep checking "nil" } - pull := issue.PullRequest canDelete := false allowMerge := false canWriteToHeadRepo := false pull_service.StartPullRequestCheckOnView(ctx, pull) - ctx.Data["GetCommitMessages"] = "" if !prInfo.IsPullRequestBroken { var err error ctx.Data["UpdateAllowed"], ctx.Data["UpdateByRebaseAllowed"], err = pull_service.IsUserAllowedToUpdate(ctx, pull, ctx.Doer) @@ -849,7 +847,6 @@ func (prInfo *pullRequestViewInfo) prepareMergeBox(ctx *context.Context, issue * ctx.ServerError("IsUserAllowedToUpdate", err) return } - ctx.Data["GetCommitMessages"] = pull_service.GetSquashMergeCommitMessages(ctx, pull) } if pull.IsFilesConflicted() { @@ -903,59 +900,11 @@ func (prInfo *pullRequestViewInfo) prepareMergeBox(ctx *context.Context, issue * } } - data.ReloadingInterval = util.Iif(pull != nil && pull.IsChecking(), 2000, 0) - ctx.Data["CanWriteToHeadRepo"] = canWriteToHeadRepo - ctx.Data["ShowMergeInstructions"] = canWriteToHeadRepo + data.ReloadingInterval = util.Iif(pull.IsChecking(), 2000, 0) + data.ShowMergeInstructions = canWriteToHeadRepo + data.ShowPullCommands = pull.HeadRepo != nil && !pull.HasMerged && !issue.IsClosed ctx.Data["AllowMerge"] = allowMerge - prUnit, err := issue.Repo.GetUnit(ctx, unit.TypePullRequests) - if err != nil { - ctx.ServerError("GetUnit", err) - return - } - prConfig := prUnit.PullRequestsConfig() - - ctx.Data["AutodetectManualMerge"] = prConfig.AutodetectManualMerge - - var mergeStyle repo_model.MergeStyle - // Check correct values and select default - if ms, ok := ctx.Data["MergeStyle"].(repo_model.MergeStyle); !ok || - !prConfig.IsMergeStyleAllowed(ms) { - if prConfig.IsMergeStyleAllowed(prConfig.DefaultMergeStyle) && !ok { - mergeStyle = prConfig.DefaultMergeStyle - } else if prConfig.AllowMerge { - mergeStyle = repo_model.MergeStyleMerge - } else if prConfig.AllowRebase { - mergeStyle = repo_model.MergeStyleRebase - } else if prConfig.AllowRebaseMerge { - mergeStyle = repo_model.MergeStyleRebaseMerge - } else if prConfig.AllowSquash { - mergeStyle = repo_model.MergeStyleSquash - } else if prConfig.AllowFastForwardOnly { - mergeStyle = repo_model.MergeStyleFastForwardOnly - } else if prConfig.AllowManualMerge { - mergeStyle = repo_model.MergeStyleManuallyMerged - } - } - - ctx.Data["MergeStyle"] = mergeStyle - - defaultMergeMessage, defaultMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, mergeStyle) - if err != nil { - ctx.ServerError("GetDefaultMergeMessage", err) - return - } - ctx.Data["DefaultMergeMessage"] = defaultMergeMessage - ctx.Data["DefaultMergeBody"] = defaultMergeBody - - defaultSquashMergeMessage, defaultSquashMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, repo_model.MergeStyleSquash) - if err != nil { - ctx.ServerError("GetDefaultSquashMergeMessage", err) - return - } - ctx.Data["DefaultSquashMergeMessage"] = defaultSquashMergeMessage - ctx.Data["DefaultSquashMergeBody"] = defaultSquashMergeBody - pb := prInfo.ProtectedBranchRule if pb != nil { pb.Repo = pull.BaseRepo @@ -995,6 +944,9 @@ func (prInfo *pullRequestViewInfo) prepareMergeBox(ctx *context.Context, issue * return } + prConfig := issue.Repo.MustGetUnit(ctx, unit.TypePullRequests).PullRequestsConfig() + data.AutodetectManualMerge = prConfig.AutodetectManualMerge + stillCanManualMerge := func() bool { if pull.HasMerged || issue.IsClosed || !ctx.IsSigned { return false @@ -1007,13 +959,6 @@ func (prInfo *pullRequestViewInfo) prepareMergeBox(ctx *context.Context, issue * ctx.Data["StillCanManualMerge"] = stillCanManualMerge() - // Check if there is a pending pr merge - ctx.Data["HasPendingPullRequestMerge"], ctx.Data["PendingPullRequestMerge"], err = pull_model.GetScheduledMergeByPullID(ctx, pull.ID) - if err != nil { - ctx.ServerError("GetScheduledMergeByPullID", err) - return - } - enableStatusCheck := pb != nil && pb.EnableStatusCheck ctx.Data["EnableStatusCheck"] = enableStatusCheck @@ -1043,6 +988,7 @@ func (prInfo *pullRequestViewInfo) prepareMergeBox(ctx *context.Context, issue * (!data.requireSigned || data.willSign) // signing requirement is satisfied ctx.Data["PullMergeBoxData"] = prInfo.MergeBoxData + prInfo.prepareMergeBoxFormProps(ctx) } func prepareIssueViewContent(ctx *context.Context, issue *issues_model.Issue) { diff --git a/routers/web/repo/middlewares.go b/routers/web/repo/middlewares.go index c7c9da498b..ee525a0db7 100644 --- a/routers/web/repo/middlewares.go +++ b/routers/web/repo/middlewares.go @@ -9,6 +9,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/gitdiff" @@ -80,6 +81,14 @@ func SetWhitespaceBehavior(ctx *context.Context) { } } +func GetWhitespaceBehavior(ctx *context.Context) string { + behavior, ok := ctx.Data["WhitespaceBehavior"].(string) + if !ok { + setting.PanicInDevOrTesting("WhitespaceBehavior is not set in context data or is not a string") + } + return behavior +} + // SetShowOutdatedComments set the show outdated comments option as context variable func SetShowOutdatedComments(ctx *context.Context) { showOutdatedCommentsValue := ctx.FormString("show-outdated") @@ -95,3 +104,11 @@ func SetShowOutdatedComments(ctx *context.Context) { } ctx.Data["ShowOutdatedComments"], _ = strconv.ParseBool(showOutdatedCommentsValue) } + +func GetShowOutdatedComments(ctx *context.Context) bool { + show, ok := ctx.Data["ShowOutdatedComments"].(bool) + if !ok { + setting.PanicInDevOrTesting("ShowOutdatedComments is not set in context data or is not a bool") + } + return show +} diff --git a/routers/web/repo/milestone.go b/routers/web/repo/milestone.go index 5e23c1c413..759b9910d8 100644 --- a/routers/web/repo/milestone.go +++ b/routers/web/repo/milestone.go @@ -238,7 +238,7 @@ func DeleteMilestone(ctx *context.Context) { // MilestoneIssuesAndPulls lists all the issues and pull requests of the milestone func MilestoneIssuesAndPulls(ctx *context.Context) { milestoneID := ctx.PathParamInt64("id") - projectID := ctx.FormInt64("project") + projectIDs := parseProjectIDsFromQuery(ctx) milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID) if err != nil { if issues_model.IsErrMilestoneNotExist(err) { @@ -260,7 +260,7 @@ func MilestoneIssuesAndPulls(ctx *context.Context) { ctx.Data["Title"] = milestone.Name ctx.Data["Milestone"] = milestone - prepareIssueFilterAndList(ctx, milestoneID, projectID, optional.None[bool]()) + prepareIssueFilterAndList(ctx, milestoneID, projectIDs, optional.None[bool]()) ret := issue.ParseTemplatesFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo) ctx.Data["NewIssueChooseTemplate"] = len(ret.IssueTemplates) > 0 diff --git a/routers/web/repo/projects.go b/routers/web/repo/projects.go index a94051f298..8690e75463 100644 --- a/routers/web/repo/projects.go +++ b/routers/web/repo/projects.go @@ -17,6 +17,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/setting" @@ -447,13 +448,12 @@ func UpdateIssueProject(ctx *context.Context) { return } - projectID := ctx.FormInt64("id") + projectIDs := ctx.FormStringInt64s("id") + var failedIssues []int64 for _, issue := range issues { - if issue.Project != nil && issue.Project.ID == projectID { - continue - } - if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, ctx.Doer, projectID, 0); err != nil { + if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, ctx.Doer, projectIDs); err != nil { if errors.Is(err, util.ErrPermissionDenied) { + failedIssues = append(failedIssues, issue.ID) continue } ctx.ServerError("IssueAssignOrRemoveProject", err) @@ -461,6 +461,10 @@ func UpdateIssueProject(ctx *context.Context) { } } + if len(failedIssues) > 0 { + log.Warn("Failed to assign projects to %d issues due to permission denied: %v", len(failedIssues), failedIssues) + } + ctx.JSONOK() } @@ -477,12 +481,12 @@ func UpdateIssueProjectColumn(ctx *context.Context) { return } - if err := issue.LoadProject(ctx); err != nil { - ctx.ServerError("LoadProject", err) + if err := issue.LoadProjects(ctx); err != nil { + ctx.ServerError("LoadProjects", err) return } - issueProjects := []*project_model.Project{issue.Project} // TODO: this is for the multiple project support in the future + issueProjects := issue.Projects // it must make sure the requested column is in this issue's projects var columnProject *project_model.Project diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go index c532cbba22..7406c1d122 100644 --- a/routers/web/repo/pull.go +++ b/routers/web/repo/pull.go @@ -164,12 +164,13 @@ func getPullInfo(ctx *context.Context) (issue *issues_model.Issue, ok bool) { func (prInfo *pullRequestViewInfo) setTemplateDataMergeTarget(ctx *context.Context) { pull := prInfo.issue.PullRequest if ctx.Repo.Owner.Name == pull.MustHeadUserName(ctx) { - ctx.Data["HeadTarget"] = pull.HeadBranch + prInfo.headTarget = pull.HeadBranch } else if pull.HeadRepo == nil { - ctx.Data["HeadTarget"] = ctx.Locale.Tr("repo.pull.deleted_branch", pull.HeadBranch) + prInfo.headTarget = ctx.Locale.TrString("repo.pull.deleted_branch", pull.HeadBranch) } else { - ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + "/" + pull.HeadRepo.Name + ":" + pull.HeadBranch + prInfo.headTarget = pull.MustHeadUserName(ctx) + "/" + pull.HeadRepo.Name + ":" + pull.HeadBranch } + ctx.Data["HeadTarget"] = prInfo.headTarget ctx.Data["BaseTarget"] = pull.BaseBranch headBranchLink := "" if pull.Flow == issues_model.PullRequestFlowGithub { @@ -268,6 +269,11 @@ type pullMergeBoxData struct { HasOverridableBlockers bool CanMergeNow bool + MergeFormProps map[string]any + ShowPullCommands bool + ShowMergeInstructions bool + AutodetectManualMerge bool + // don't expose unneeded fields to templates, need more refactoring changes hasStatusCheckBlocker bool isPullBranchDeletable bool @@ -289,6 +295,7 @@ type pullRequestViewInfo struct { IsPullRequestBroken bool HeadBranchCommitID string + headTarget string // for display purpose only CompareInfo git_service.CompareInfo ProtectedBranchRule *git_model.ProtectedBranch @@ -537,7 +544,7 @@ func (prInfo *pullRequestViewInfo) prepareViewOpenPullInfo(ctx *context.Context) ctx.Data["PullHeadCommitID"] = prInfo.CompareInfo.HeadCommitID - if prInfo.CompareInfo.HeadCommitID == prInfo.CompareInfo.MergeBase { + if prInfo.CompareInfo.HeadCommitID == prInfo.CompareInfo.CompareBase { ctx.Data["IsNothingToCompare"] = true } @@ -615,9 +622,6 @@ func ViewPullCommits(ctx *context.Context) { return } - ctx.Data["Username"] = ctx.Repo.Owner.Name - ctx.Data["Reponame"] = ctx.Repo.Repository.Name - commits, err := processGitCommits(ctx, prCompareInfo.Commits) if err != nil { ctx.ServerError("processGitCommits", err) @@ -673,7 +677,7 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) { headCommitID := prCompareInfo.HeadCommitID isSingleCommit := beforeCommitID == "" && afterCommitID != "" ctx.Data["IsShowingOnlySingleCommit"] = isSingleCommit - isShowAllCommits := (beforeCommitID == "" || beforeCommitID == prCompareInfo.MergeBase) && (afterCommitID == "" || afterCommitID == headCommitID) + isShowAllCommits := (beforeCommitID == "" || beforeCommitID == prCompareInfo.CompareBase) && (afterCommitID == "" || afterCommitID == headCommitID) ctx.Data["IsShowingAllCommits"] = isShowAllCommits if afterCommitID == "" || afterCommitID == headCommitID { @@ -688,8 +692,8 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) { var beforeCommit *git.Commit var err error if !isSingleCommit { - if beforeCommitID == "" || beforeCommitID == prCompareInfo.MergeBase { - beforeCommitID = prCompareInfo.MergeBase + if beforeCommitID == "" || beforeCommitID == prCompareInfo.CompareBase { + beforeCommitID = prCompareInfo.CompareBase // merge base commit is not in the list of the pull request commits beforeCommit, err = gitRepo.GetCommit(beforeCommitID) if err != nil { @@ -712,7 +716,7 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) { beforeCommitID = beforeCommit.ID.String() } - ctx.Data["MergeBase"] = prCompareInfo.MergeBase + ctx.Data["CompareInfo"] = prCompareInfo ctx.Data["AfterCommitID"] = afterCommitID ctx.Data["BeforeCommitID"] = beforeCommitID @@ -730,7 +734,7 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) { MaxLines: maxLines, MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters, MaxFiles: maxFiles, - WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)), + WhitespaceBehavior: gitdiff.GetWhitespaceFlag(GetWhitespaceBehavior(ctx)), } diff, err := gitdiff.GetDiffForRender(ctx, ctx.Repo.RepoLink, gitRepo, diffOptions, files...) @@ -769,7 +773,7 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) { "numberOfViewedFiles": numViewedFiles, } - if err = diff.LoadComments(ctx, issue, ctx.Doer, ctx.Data["ShowOutdatedComments"].(bool)); err != nil { + if err = diff.LoadComments(ctx, issue, ctx.Doer, GetShowOutdatedComments(ctx)); err != nil { ctx.ServerError("LoadComments", err) return } @@ -881,13 +885,13 @@ func viewPullFiles(ctx *context.Context, beforeCommitID, afterCommitID string) { if pull.HeadRepo != nil { if !pull.HasMerged && ctx.Doer != nil { - perm, err := access_model.GetDoerRepoPermission(ctx, pull.HeadRepo, ctx.Doer) + headPerm, err := access_model.GetDoerRepoPermission(ctx, pull.HeadRepo, ctx.Doer) if err != nil { ctx.ServerError("GetDoerRepoPermission", err) return } - if perm.CanWrite(unit.TypeCode) || issues_model.CanMaintainerWriteToBranch(ctx, perm, pull.HeadBranch, ctx.Doer) { + if issues_model.CanMaintainerWriteToBranch(ctx, headPerm, pull.HeadBranch, ctx.Doer) { ctx.Data["CanEditFile"] = true ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.edit_this_file") ctx.Data["HeadRepoLink"] = pull.HeadRepo.Link() @@ -1274,31 +1278,32 @@ func PullsNewRedirect(ctx *context.Context) { // CompareAndPullRequestPost response for creating pull request func CompareAndPullRequestPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.CreateIssueForm) - ctx.Data["Title"] = ctx.Tr("repo.pulls.compare_changes") - ctx.Data["PageIsComparePull"] = true - ctx.Data["IsDiffCompare"] = true - ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes - ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled - upload.AddUploadContext(ctx, "comment") - ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.Permission.CanWrite(unit.TypePullRequests) - - var ( - repo = ctx.Repo.Repository - attachments []string - ) - - ci := ParseCompareInfo(ctx) - if ctx.Written() { + repo := ctx.Repo.Repository + comparePageInfo := newComparePageInfo() + err := comparePageInfo.parseCompareInfo(ctx) + if errors.Is(err, util.ErrNotExist) { + ctx.JSONErrorNotFound() + return + } else if errors.Is(err, util.ErrInvalidArgument) { + ctx.JSONError(err.Error()) + return + } else if err != nil { + ctx.ServerError("ParseCompareInfo", err) + return + } + ci := comparePageInfo.compareInfo + if ci.CompareBase == "" { + ctx.JSONError(ctx.Tr("repo.pulls.no_common_history")) return } - validateRet := ValidateRepoMetasForNewIssue(ctx, *form, true) if ctx.Written() { return } - labelIDs, assigneeIDs, milestoneID, projectID := validateRet.LabelIDs, validateRet.AssigneeIDs, validateRet.MilestoneID, validateRet.ProjectID + labelIDs, assigneeIDs, milestoneID, projectIDs := validateRet.LabelIDs, validateRet.AssigneeIDs, validateRet.MilestoneID, validateRet.ProjectIDs + var attachments []string if setting.Attachment.Enabled { attachments = form.Files } @@ -1348,7 +1353,7 @@ func CompareAndPullRequestPost(ctx *context.Context) { BaseBranch: ci.BaseRef.ShortName(), HeadRepo: ci.HeadRepo, BaseRepo: repo, - MergeBase: ci.MergeBase, + MergeBase: ci.CompareBase, Type: issues_model.PullRequestGitea, AllowMaintainerEdit: form.AllowMaintainerEdit, } @@ -1363,7 +1368,7 @@ func CompareAndPullRequestPost(ctx *context.Context) { AssigneeIDs: assigneeIDs, Reviewers: validateRet.Reviewers, TeamReviewers: validateRet.TeamReviewers, - ProjectID: projectID, + ProjectIDs: projectIDs, } if err := pull_service.NewPullRequest(ctx, prOpts); err != nil { switch { diff --git a/routers/web/repo/pull_merge_form.go b/routers/web/repo/pull_merge_form.go new file mode 100644 index 0000000000..b390fd6934 --- /dev/null +++ b/routers/web/repo/pull_merge_form.go @@ -0,0 +1,147 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package repo + +import ( + "html/template" + + pull_model "code.gitea.io/gitea/models/pull" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unit" + "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/services/context" + pull_service "code.gitea.io/gitea/services/pull" +) + +func (prInfo *pullRequestViewInfo) prepareMergeBoxFormProps(ctx *context.Context) { + pull := prInfo.issue.PullRequest + prConfig := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypePullRequests).PullRequestsConfig() + + // Check correct values and select default + var mergeStyle repo_model.MergeStyle + if prConfig.IsMergeStyleAllowed(prConfig.DefaultMergeStyle) { + mergeStyle = prConfig.DefaultMergeStyle + } else if prConfig.AllowMerge { + mergeStyle = repo_model.MergeStyleMerge + } else if prConfig.AllowRebase { + mergeStyle = repo_model.MergeStyleRebase + } else if prConfig.AllowRebaseMerge { + mergeStyle = repo_model.MergeStyleRebaseMerge + } else if prConfig.AllowSquash { + mergeStyle = repo_model.MergeStyleSquash + } else if prConfig.AllowFastForwardOnly { + mergeStyle = repo_model.MergeStyleFastForwardOnly + } else if prConfig.AllowManualMerge { + mergeStyle = repo_model.MergeStyleManuallyMerged + } + if mergeStyle == "" { + return + } + + // Check if there is a pending pr merge + hasPendingPullRequestMerge, pendingPullRequestMerge, err := pull_model.GetScheduledMergeByPullID(ctx, pull.ID) + if err != nil { + ctx.ServerError("GetScheduledMergeByPullID", err) + return + } + + var hasPendingPullRequestMergeTip template.HTML + if hasPendingPullRequestMerge { + createdPRMergeStr := templates.TimeSince(pendingPullRequestMerge.CreatedUnix) + hasPendingPullRequestMergeTip = ctx.Locale.Tr("repo.pulls.auto_merge_has_pending_schedule", pendingPullRequestMerge.Doer.Name, createdPRMergeStr) + } + + defaultMergeTitle, defaultMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, mergeStyle) + if err != nil { + ctx.ServerError("GetDefaultMergeMessage", err) + return + } + defaultSquashMergeTitle, defaultSquashMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, repo_model.MergeStyleSquash) + if err != nil { + ctx.ServerError("GetDefaultSquashMergeMessage", err) + return + } + + var defaultSquashMergeCommitMessages string + if !prInfo.IsPullRequestBroken { + defaultSquashMergeCommitMessages = pull_service.GetSquashMergeCommitMessages(ctx, pull) + } + + allOverridableChecksOk := !prInfo.MergeBoxData.HasOverridableBlockers + prInfo.MergeBoxData.MergeFormProps = map[string]any{ + "baseLink": prInfo.issue.Link(), + "textCancel": ctx.Locale.Tr("cancel"), + "textDeleteBranch": ctx.Locale.Tr("repo.branch.delete", prInfo.headTarget), + "textAutoMergeButtonWhenSucceed": ctx.Locale.Tr("repo.pulls.auto_merge_button_when_succeed"), + "textAutoMergeWhenSucceed": ctx.Locale.Tr("repo.pulls.auto_merge_when_succeed"), + "textAutoMergeCancelSchedule": ctx.Locale.Tr("repo.pulls.auto_merge_cancel_schedule"), + "textClearMergeMessage": ctx.Locale.Tr("repo.pulls.clear_merge_message"), + "textClearMergeMessageHint": ctx.Locale.Tr("repo.pulls.clear_merge_message_hint"), + "textMergeCommitId": ctx.Locale.Tr("repo.pulls.merge_commit_id"), + + "canMergeNow": prInfo.MergeBoxData.CanMergeNow, + "allOverridableChecksOk": allOverridableChecksOk, + "emptyCommit": pull.IsEmpty(), + "pullHeadCommitID": prInfo.CompareInfo.HeadCommitID, + "isPullBranchDeletable": prInfo.MergeBoxData.isPullBranchDeletable, + "defaultMergeStyle": mergeStyle, + "defaultDeleteBranchAfterMerge": prConfig.DefaultDeleteBranchAfterMerge, + "mergeMessageFieldPlaceHolder": ctx.Locale.Tr("repo.editor.commit_message_desc"), + "defaultMergeMessage": defaultMergeBody, + + "hasPendingPullRequestMerge": hasPendingPullRequestMerge, + "hasPendingPullRequestMergeTip": hasPendingPullRequestMergeTip, + } + + // if this pr can be merged now, then hide the auto merge + generalHideAutoMerge := prInfo.MergeBoxData.CanMergeNow && allOverridableChecksOk + + prInfo.MergeBoxData.MergeFormProps["mergeStyles"] = []any{ + map[string]any{ + "name": "merge", + "allowed": prConfig.AllowMerge, + "textDoMerge": ctx.Locale.Tr("repo.pulls.merge_pull_request"), + "mergeTitleFieldText": defaultMergeTitle, + "mergeMessageFieldText": defaultMergeBody, + "hideAutoMerge": generalHideAutoMerge, + }, + map[string]any{ + "name": "rebase", + "allowed": prConfig.AllowRebase, + "textDoMerge": ctx.Locale.Tr("repo.pulls.rebase_merge_pull_request"), + "hideMergeMessageTexts": true, + "hideAutoMerge": generalHideAutoMerge, + }, + map[string]any{ + "name": "rebase-merge", + "allowed": prConfig.AllowRebaseMerge, + "textDoMerge": ctx.Locale.Tr("repo.pulls.rebase_merge_commit_pull_request"), + "mergeTitleFieldText": defaultMergeTitle, + "mergeMessageFieldText": defaultMergeBody, + "hideAutoMerge": generalHideAutoMerge, + }, + map[string]any{ + "name": "squash", + "allowed": prConfig.AllowSquash, + "textDoMerge": ctx.Locale.Tr("repo.pulls.squash_merge_pull_request"), + "mergeTitleFieldText": defaultSquashMergeTitle, + "mergeMessageFieldText": defaultSquashMergeCommitMessages + defaultSquashMergeBody, + "hideAutoMerge": generalHideAutoMerge, + }, + map[string]any{ + "name": "fast-forward-only", + "allowed": prConfig.AllowFastForwardOnly && pull.CommitsBehind == 0, + "textDoMerge": ctx.Locale.Tr("repo.pulls.fast_forward_only_merge_pull_request"), + "hideMergeMessageTexts": true, + "hideAutoMerge": generalHideAutoMerge, + }, + map[string]any{ + "name": "manually-merged", + "allowed": prConfig.AllowManualMerge, + "textDoMerge": ctx.Locale.Tr("repo.pulls.merge_manually"), + "hideMergeMessageTexts": true, + "hideAutoMerge": true, + }, + } +} diff --git a/routers/web/repo/pull_review.go b/routers/web/repo/pull_review.go index eb8e8fa677..bbcf6630b6 100644 --- a/routers/web/repo/pull_review.go +++ b/routers/web/repo/pull_review.go @@ -169,7 +169,7 @@ func UpdateResolveConversation(ctx *context.Context) { func renderConversation(ctx *context.Context, comment *issues_model.Comment, origin string) { ctx.Data["PageIsPullFiles"] = origin == "diff" - showOutdatedComments := origin == "timeline" || ctx.Data["ShowOutdatedComments"].(bool) + showOutdatedComments := origin == "timeline" || GetShowOutdatedComments(ctx) comments, err := issues_model.FetchCodeCommentsByLine(ctx, comment.Issue, ctx.Doer, comment.TreePath, comment.Line, showOutdatedComments) if err != nil { ctx.ServerError("FetchCodeCommentsByLine", err) diff --git a/routers/web/repo/render.go b/routers/web/repo/render.go index ace871a9f1..4a68c96aaa 100644 --- a/routers/web/repo/render.go +++ b/routers/web/repo/render.go @@ -40,8 +40,8 @@ func RenderFile(ctx *context.Context) { defer blobReader.Close() rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ - CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), - CurrentTreePath: path.Dir(ctx.Repo.TreePath), + CurrentRefSubURL: ctx.Repo.RefTypeNameSubURL(), + CurrentTreePath: path.Dir(ctx.Repo.TreePath), }).WithRelativePath(ctx.Repo.TreePath).WithStandalonePage(markup.StandalonePageOptions{ CurrentWebTheme: ctx.TemplateContext.CurrentWebTheme(), RenderQueryString: ctx.Req.URL.RawQuery, diff --git a/routers/web/repo/setting/protected_branch.go b/routers/web/repo/setting/protected_branch.go index 4374e95340..a5a25e6c4e 100644 --- a/routers/web/repo/setting/protected_branch.go +++ b/routers/web/repo/setting/protected_branch.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/glob" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/web/repo" @@ -312,10 +313,14 @@ func DeleteProtectedBranchRulePost(ctx *context.Context) { } func UpdateBranchProtectionPriories(ctx *context.Context) { - form := web.GetForm(ctx).(*forms.ProtectBranchPriorityForm) - repo := ctx.Repo.Repository - - if err := git_model.UpdateProtectBranchPriorities(ctx, repo, form.IDs); err != nil { + var form struct { + IDs []int64 `json:"ids"` + } + if err := json.NewDecoder(ctx.Req.Body).Decode(&form); err != nil { + ctx.JSONError("invalid argument") + return + } + if err := git_model.UpdateProtectBranchPriorities(ctx, ctx.Repo.Repository, form.IDs); err != nil { ctx.ServerError("UpdateProtectBranchPriorities", err) return } diff --git a/routers/web/repo/setting/setting.go b/routers/web/repo/setting/setting.go index 703d002250..816fd91cd8 100644 --- a/routers/web/repo/setting/setting.go +++ b/routers/web/repo/setting/setting.go @@ -265,8 +265,13 @@ func handleSettingsPostMirror(ctx *context.Context) { handleSettingRemoteAddrError(ctx, err, form) return } - if u.User != nil && form.MirrorPassword == "" && form.MirrorUsername == u.User.Username() { - form.MirrorPassword, _ = u.User.Password() + if u.User != nil { + if form.MirrorUsername == "" { + form.MirrorUsername = u.User.Username() + } + if form.MirrorPassword == "" && form.MirrorUsername == u.User.Username() { + form.MirrorPassword, _ = u.User.Password() + } } address, err := git.ParseRemoteAddr(form.MirrorAddress, form.MirrorUsername, form.MirrorPassword) diff --git a/routers/web/repo/setting/settings_test.go b/routers/web/repo/setting/settings_test.go index 4c65b696c5..154d01fda4 100644 --- a/routers/web/repo/setting/settings_test.go +++ b/routers/web/repo/setting/settings_test.go @@ -13,15 +13,18 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/gitrepo" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/test" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/contexttest" "code.gitea.io/gitea/services/forms" + mirror_service "code.gitea.io/gitea/services/mirror" repo_service "code.gitea.io/gitea/services/repository" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestAddReadOnlyDeployKey(t *testing.T) { @@ -386,3 +389,45 @@ func TestDeleteTeam(t *testing.T) { assert.False(t, repo_service.HasRepository(t.Context(), team, re.ID)) } + +func TestHandleSettingsPostMirrorPreservesExistingUsername(t *testing.T) { + defer test.MockVariableValue(&setting.Mirror.Enabled, true)() + + unittest.PrepareTestEnv(t) + + // Use the existing fixture mirror repo (org3/repo5) which has a git repo on disk. + mirrorRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 5}) + mirror := unittest.AssertExistsAndLoadBean(t, &repo_model.Mirror{RepoID: 5}) + + require.NoError(t, mirror_service.UpdateAddress(t.Context(), mirror, "https://existing-user:existing-password@example.com/user2/repo1.git")) + + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + ctx, _ := contexttest.MockContext(t, mirrorRepo.Link()+"/settings") + contexttest.LoadUser(t, ctx, user.ID) + contexttest.LoadRepo(t, ctx, mirrorRepo.ID) + + web.SetForm(ctx, &forms.RepoSettingForm{ + Interval: "8h", + MirrorAddress: "https://example.com/user2/repo1.git", + MirrorPassword: "updated-password", + }) + + handleSettingsPostMirror(ctx) + + assert.Equal(t, http.StatusSeeOther, ctx.Resp.WrittenStatus()) + + updatedMirror := unittest.AssertExistsAndLoadBean(t, &repo_model.Mirror{RepoID: mirrorRepo.ID}) + assert.Equal(t, "https://example.com/user2/repo1.git", updatedMirror.RemoteAddress) + + updatedRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: mirrorRepo.ID}) + assert.Equal(t, "https://example.com/user2/repo1.git", updatedRepo.OriginalURL) + + remoteURL, err := gitrepo.GitRemoteGetURL(t.Context(), updatedRepo, updatedMirror.GetRemoteName()) + require.NoError(t, err) + require.NotNil(t, remoteURL.User) + assert.Equal(t, "existing-user", remoteURL.User.Username()) + password, ok := remoteURL.User.Password() + require.True(t, ok) + assert.Equal(t, "updated-password", password) +} diff --git a/routers/web/repo/star.go b/routers/web/repo/star.go index 8cfbfefdf1..c93c877d63 100644 --- a/routers/web/repo/star.go +++ b/routers/web/repo/star.go @@ -11,7 +11,7 @@ import ( "code.gitea.io/gitea/services/context" ) -const tplStarUnstar templates.TplName = "repo/star_unstar" +const tplStarUnstar templates.TplName = "repo/header/star" func ActionStar(ctx *context.Context) { err := repo_model.StarRepo(ctx, ctx.Doer, ctx.Repo.Repository, ctx.PathParam("action") == "star") diff --git a/routers/web/repo/view_file.go b/routers/web/repo/view_file.go index 8d7721103a..f6c97e83bb 100644 --- a/routers/web/repo/view_file.go +++ b/routers/web/repo/view_file.go @@ -59,8 +59,8 @@ func prepareFileViewLfsAttrs(ctx *context.Context) (*attribute.Attributes, bool) func handleFileViewRenderMarkup(ctx *context.Context, prefetchBuf []byte, utf8Reader io.Reader) bool { rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ - CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), - CurrentTreePath: path.Dir(ctx.Repo.TreePath), + CurrentRefSubURL: ctx.Repo.RefTypeNameSubURL(), + CurrentTreePath: path.Dir(ctx.Repo.TreePath), }).WithRelativePath(ctx.Repo.TreePath) renderer := rctx.DetectMarkupRenderer(prefetchBuf) diff --git a/routers/web/repo/view_readme.go b/routers/web/repo/view_readme.go index 25e1f87806..79ca9efc36 100644 --- a/routers/web/repo/view_readme.go +++ b/routers/web/repo/view_readme.go @@ -190,8 +190,8 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{}) rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ - CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), - CurrentTreePath: path.Dir(readmeFullPath), + CurrentRefSubURL: ctx.Repo.RefTypeNameSubURL(), + CurrentTreePath: path.Dir(readmeFullPath), }).WithRelativePath(readmeFullPath) renderer := rctx.DetectMarkupRenderer(buf) if renderer != nil { diff --git a/routers/web/repo/watch.go b/routers/web/repo/watch.go index a7fbfc168b..616e1ee89c 100644 --- a/routers/web/repo/watch.go +++ b/routers/web/repo/watch.go @@ -11,7 +11,7 @@ import ( "code.gitea.io/gitea/services/context" ) -const tplWatchUnwatch templates.TplName = "repo/watch_unwatch" +const tplWatchUnwatch templates.TplName = "repo/header/watch" func ActionWatch(ctx *context.Context) { err := repo_model.WatchRepo(ctx, ctx.Doer, ctx.Repo.Repository, ctx.PathParam("action") == "watch") diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go index 39075dbdf6..78fb0ca0af 100644 --- a/routers/web/repo/wiki.go +++ b/routers/web/repo/wiki.go @@ -334,9 +334,6 @@ func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) ctx.Data["Title"] = displayName ctx.Data["title"] = displayName - ctx.Data["Username"] = ctx.Repo.Owner.Name - ctx.Data["Reponame"] = ctx.Repo.Repository.Name - // lookup filename in wiki - get page content, gitTree entry , real filename _, entry, pageFilename, noEntry := wikiContentsByName(ctx, commit, pageName) if noEntry { diff --git a/routers/web/swagger_json.go b/routers/web/swagger_json.go index 52f6beaf59..da41fd9013 100644 --- a/routers/web/swagger_json.go +++ b/routers/web/swagger_json.go @@ -16,3 +16,10 @@ func SwaggerV1Json(ctx *context.Context) { ctx.Data["SwaggerAppSubUrl"] = setting.AppSubURL // it is JS-safe ctx.JSONTemplate("swagger/v1_json") } + +// OpenAPI3Json render OpenAPI 3.0 json (auto-converted from Swagger 2.0) +func OpenAPI3Json(ctx *context.Context) { + ctx.Data["SwaggerAppVer"] = template.HTML(template.JSEscapeString(setting.AppVer)) + ctx.Data["SwaggerAppSubUrl"] = setting.AppSubURL // it is JS-safe + ctx.JSONTemplate("swagger/v1_openapi3_json") +} diff --git a/routers/web/user/package.go b/routers/web/user/package.go index 1484ba2fdf..d25dc45ba8 100644 --- a/routers/web/user/package.go +++ b/routers/web/user/package.go @@ -566,7 +566,11 @@ func DownloadPackageFile(ctx *context.Context) { return } - packages_helper.ServePackageFile(ctx, s, u, pf) + packages_helper.ServePackageFile(ctx, s, u, pf, httplib.ServeHeaderOptions{ + Filename: pf.Name, + LastModified: pf.CreatedUnix.AsLocalTime(), + ContentDisposition: httplib.ContentDispositionAttachment, + }) } // ActionPackageTerraformLock locks a terraform state diff --git a/routers/web/user/profile.go b/routers/web/user/profile.go index faf2f442a2..b1d00520c2 100644 --- a/routers/web/user/profile.go +++ b/routers/web/user/profile.go @@ -251,7 +251,7 @@ func prepareUserProfileTabData(ctx *context.Context, profileDbRepo *repo_model.R log.Error("failed to GetBlobContent: %v", err) } else { rctx := renderhelper.NewRenderContextRepoFile(ctx, profileDbRepo, renderhelper.RepoFileOptions{ - CurrentRefPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)), + CurrentRefSubURL: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)), }) if profileContent, err := markdown.RenderString(rctx, bytes); err != nil { log.Error("failed to RenderString: %v", err) diff --git a/routers/web/web.go b/routers/web/web.go index d70eb2d02d..ecd75250d2 100644 --- a/routers/web/web.go +++ b/routers/web/web.go @@ -1175,7 +1175,7 @@ func registerWebRoutes(m *web.Router, webAuth *AuthMiddleware) { m.Combo("/edit").Get(repo_setting.SettingsProtectedBranch). Post(web.Bind(forms.ProtectBranchForm{}), context.RepoMustNotBeArchived(), repo_setting.SettingsProtectedBranchPost) m.Post("/{id}/delete", repo_setting.DeleteProtectedBranchRulePost) - m.Post("/priority", web.Bind(forms.ProtectBranchPriorityForm{}), context.RepoMustNotBeArchived(), repo_setting.UpdateBranchProtectionPriories) + m.Post("/priority", context.RepoMustNotBeArchived(), repo_setting.UpdateBranchProtectionPriories) }) m.Group("/tags", func() { @@ -1751,6 +1751,7 @@ func registerWebRoutes(m *web.Router, webAuth *AuthMiddleware) { if setting.API.EnableSwagger { m.Get("/swagger.v1.json", SwaggerV1Json) + m.Get("/openapi3.v1.json", OpenAPI3Json) } if !setting.IsProd || setting.IsInE2eTesting() { diff --git a/services/actions/notifier.go b/services/actions/notifier.go index c3b2003b3c..4b2e87afad 100644 --- a/services/actions/notifier.go +++ b/services/actions/notifier.go @@ -815,7 +815,8 @@ func (n *actionsNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *rep log.Error("GetActionWorkflow: %v", err) return } - convertedRun, err := convert.ToActionWorkflowRun(ctx, repo, run, nil) + run.Repo = repo + convertedRun, err := convert.ToActionWorkflowRun(ctx, run, nil) if err != nil { log.Error("ToActionWorkflowRun: %v", err) return diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go index b2dc3f9840..3a0dff490a 100644 --- a/services/actions/schedule_tasks.go +++ b/services/actions/schedule_tasks.go @@ -132,14 +132,22 @@ func CreateScheduleTask(ctx context.Context, spec *actions_model.ActionScheduleS } func withScheduleInEventPayload(eventPayload, schedule string) string { - if schedule == "" || eventPayload == "" { + if schedule == "" { return eventPayload } - event := map[string]any{} - if err := json.Unmarshal([]byte(eventPayload), &event); err != nil { - log.Error("withScheduleInEventPayload: unmarshal: %v", err) - return eventPayload + // eventPayload originates from json.Marshal(input.Payload) in handleSchedules, + // so a nil payload is stored as the literal "null" and pre-existing rows may be + // empty. Both cases start from a fresh map so the schedule field can still be set. + var event map[string]any + if eventPayload != "" { + if err := json.Unmarshal([]byte(eventPayload), &event); err != nil { + log.Error("withScheduleInEventPayload: unmarshal: %v", err) + return eventPayload + } + } + if event == nil { + event = map[string]any{} } event["schedule"] = schedule diff --git a/services/actions/schedule_tasks_test.go b/services/actions/schedule_tasks_test.go index 770b842623..f2c7e656e6 100644 --- a/services/actions/schedule_tasks_test.go +++ b/services/actions/schedule_tasks_test.go @@ -22,9 +22,20 @@ func TestWithScheduleInEventPayload(t *testing.T) { assert.Equal(t, "refs/heads/main", event["ref"]) }) - t.Run("keeps empty payload", func(t *testing.T) { + t.Run("adds schedule to null payload", func(t *testing.T) { + updated := withScheduleInEventPayload("null", "37 12 5 1 2") + + event := map[string]any{} + assert.NoError(t, json.Unmarshal([]byte(updated), &event)) + assert.Equal(t, "37 12 5 1 2", event["schedule"]) + }) + + t.Run("adds schedule to empty payload", func(t *testing.T) { updated := withScheduleInEventPayload("", "37 12 5 1 2") - assert.Empty(t, updated) + + event := map[string]any{} + assert.NoError(t, json.Unmarshal([]byte(updated), &event)) + assert.Equal(t, "37 12 5 1 2", event["schedule"]) }) t.Run("keeps payload when schedule empty", func(t *testing.T) { diff --git a/services/auth/basic.go b/services/auth/basic.go index dda6451c36..e8a4a2e8f7 100644 --- a/services/auth/basic.go +++ b/services/auth/basic.go @@ -69,7 +69,7 @@ func (b *Basic) parseAuthBasic(req *http.Request) (ret struct{ authToken, uname, // VerifyAuthToken only the access token provided as parameter, used by other auth methods that want to reuse access token verification logic func (b *Basic) VerifyAuthToken(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore, authToken string) (*user_model.User, error) { // get oauth2 token's user's ID - _, uid := GetOAuthAccessTokenScopeAndUserID(req.Context(), authToken) + accessTokenScope, uid := GetOAuthAccessTokenScopeAndUserID(req.Context(), authToken) if uid != 0 { log.Trace("Basic Authorization: Valid OAuthAccessToken for user[%d]", uid) @@ -81,6 +81,7 @@ func (b *Basic) VerifyAuthToken(req *http.Request, w http.ResponseWriter, store store.GetData()["LoginMethod"] = OAuth2TokenMethodName store.GetData()["IsApiToken"] = true + store.GetData()["ApiTokenScope"] = accessTokenScope return u, nil } diff --git a/services/context/base_form.go b/services/context/base_form.go index 81fd7cd328..b734ab199a 100644 --- a/services/context/base_form.go +++ b/services/context/base_form.go @@ -7,6 +7,7 @@ import ( "strconv" "strings" + "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/util" ) @@ -35,6 +36,11 @@ func (b *Base) FormStrings(key string) []string { return nil } +func (b *Base) FormStringInt64s(key string) []int64 { + vals, _ := base.StringsToInt64s(strings.Split(b.FormString(key), ",")) + return vals +} + // FormTrim returns the first value for the provided key in the form as a space trimmed string func (b *Base) FormTrim(key string) string { return strings.TrimSpace(b.Req.FormValue(key)) diff --git a/services/context/captcha.go b/services/context/captcha.go index b1129a05b2..50beddb208 100644 --- a/services/context/captcha.go +++ b/services/context/captcha.go @@ -48,7 +48,7 @@ func GetImageCaptcha() *captcha.Captcha { const ( gRecaptchaResponseField = "g-recaptcha-response" hCaptchaResponseField = "h-captcha-response" - mCaptchaResponseField = "m-captcha-response" + mCaptchaResponseField = "mcaptcha__token" // this form key is hard-coded in the mcaptcha frontend library cfTurnstileResponseField = "cf-turnstile-response" ) diff --git a/services/context/context_template.go b/services/context/context_template.go index b63aaf4c3c..0f083d097e 100644 --- a/services/context/context_template.go +++ b/services/context/context_template.go @@ -5,13 +5,11 @@ package context import ( "context" - "fmt" "html" "html/template" "net/http" "strconv" "strings" - "sync" "time" "code.gitea.io/gitea/modules/httplib" @@ -91,31 +89,14 @@ func (c TemplateContext) AppFullLink(link ...string) template.URL { return template.URL(s + "/" + strings.TrimPrefix(link[0], "/")) } -var globalVars = sync.OnceValue(func() (ret struct { - scriptImportRemainingPart string -}, -) { - // add onerror handler to alert users when the script fails to load: - // * for end users: there were many users reporting that "UI doesn't work", actually they made mistakes in their config - // * for developers: help them to remember to run "make watch-frontend" to build frontend assets - // the message will be directly put in the onerror JS code's string - onScriptErrorPrompt := `Please make sure the asset files can be accessed.` - if !setting.IsProd { - onScriptErrorPrompt += `\n\nFor development, run: make watch-frontend.` - } - onScriptErrorJS := fmt.Sprintf(`alert('Failed to load asset file from ' + this.src + '. %s')`, onScriptErrorPrompt) - ret.scriptImportRemainingPart = `onerror="` + html.EscapeString(onScriptErrorJS) + `">` - return ret -}) - func (c TemplateContext) ScriptImport(path string, typ ...string) template.HTML { if len(typ) > 0 { if typ[0] == "module" { - return template.HTML(``) } panic("unsupported script type: " + typ[0]) } - return template.HTML(``) } func (c TemplateContext) CspScriptNonce() (ret string) { diff --git a/services/convert/action_test.go b/services/convert/action_test.go index 9ecb4a2ca6..9efc0e36a8 100644 --- a/services/convert/action_test.go +++ b/services/convert/action_test.go @@ -115,12 +115,12 @@ func TestToActionWorkflowRun_UsesTriggerEvent(t *testing.T) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}) run := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRun{ID: 803}) - + run.Repo = repo // Scheduled runs keep Event as the registration event (push) and use TriggerEvent as the real trigger. run.Event = "push" run.TriggerEvent = "schedule" - apiRun, err := ToActionWorkflowRun(t.Context(), repo, run, nil) + apiRun, err := ToActionWorkflowRun(t.Context(), run, nil) require.NoError(t, err) assert.Equal(t, "schedule", apiRun.Event) } diff --git a/services/convert/convert.go b/services/convert/convert.go index 29f46d37ad..a7622644d8 100644 --- a/services/convert/convert.go +++ b/services/convert/convert.go @@ -29,6 +29,7 @@ import ( "code.gitea.io/gitea/modules/actions" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" @@ -222,14 +223,18 @@ func ToTag(repo *repo_model.Repository, t *git.Tag) *api.Tag { } } -// ToActionTask convert a actions_model.ActionTask to an api.ActionTask +// ToActionTask convert an actions_model.ActionTask to an api.ActionTask func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.ActionTask, error) { - if err := t.LoadAttributes(ctx); err != nil { + // don't need Steps here, only need to load job and its run + if err := t.LoadJob(ctx); err != nil { + return nil, err + } + if err := t.Job.LoadRun(ctx); err != nil { + return nil, err + } + if err := t.Job.Run.LoadRepo(ctx); err != nil { return nil, err } - - url := strings.TrimSuffix(setting.AppURL, "/") + t.GetRunLink() - return &api.ActionTask{ ID: t.ID, Name: t.Job.Name, @@ -240,23 +245,25 @@ func ToActionTask(ctx context.Context, t *actions_model.ActionTask) (*api.Action DisplayTitle: t.Job.Run.Title, Status: t.Status.String(), WorkflowID: t.Job.Run.WorkflowID, - URL: url, + URL: httplib.MakeAbsoluteURL(ctx, t.Job.Run.Link()), CreatedAt: t.Created.AsLocalTime(), UpdatedAt: t.Updated.AsLocalTime(), RunStartedAt: t.Started.AsLocalTime(), }, nil } -func ToActionWorkflowRun(ctx context.Context, repo *repo_model.Repository, run *actions_model.ActionRun, attempt *actions_model.ActionRunAttempt) (*api.ActionWorkflowRun, error) { - if err := run.LoadAttributes(ctx); err != nil { +func ToActionWorkflowRun(ctx context.Context, run *actions_model.ActionRun, attempt *actions_model.ActionRunAttempt) (_ *api.ActionWorkflowRun, err error) { + if err := run.LoadRepo(ctx); err != nil { + return nil, err + } + if err := run.LoadTriggerUser(ctx); err != nil { return nil, err } if attempt == nil { - if latestAttempt, has, err := run.GetLatestAttempt(ctx); err != nil { + attempt, _, err = run.GetLatestAttempt(ctx) + if err != nil { return nil, err - } else if has { - attempt = latestAttempt } } @@ -272,6 +279,7 @@ func ToActionWorkflowRun(ctx context.Context, repo *repo_model.Repository, run * var previousAttemptURL *string if attempt != nil { + attempt.Run = run if err := attempt.LoadAttributes(ctx); err != nil { return nil, err } @@ -281,16 +289,15 @@ func ToActionWorkflowRun(ctx context.Context, repo *repo_model.Repository, run * completedAt = attempt.Stopped.AsLocalTime() triggerUser = attempt.TriggerUser if attempt.Attempt > 1 { - url := fmt.Sprintf("%s/actions/runs/%d/attempts/%d", repo.APIURL(), run.ID, attempt.Attempt-1) - previousAttemptURL = &url + previousAttemptURL = new(fmt.Sprintf("%s/actions/runs/%d/attempts/%d", run.Repo.APIURL(ctx), run.ID, attempt.Attempt-1)) } } return &api.ActionWorkflowRun{ ID: run.ID, - URL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), run.ID), + URL: fmt.Sprintf("%s/actions/runs/%d", run.Repo.APIURL(ctx), run.ID), PreviousAttemptURL: previousAttemptURL, - HTMLURL: run.HTMLURL(), + HTMLURL: run.HTMLURL(ctx), RunNumber: run.Index, RunAttempt: runAttempt, StartedAt: startedAt, @@ -302,7 +309,7 @@ func ToActionWorkflowRun(ctx context.Context, repo *repo_model.Repository, run * Status: status, Conclusion: conclusion, Path: fmt.Sprintf("%s@%s", run.WorkflowID, run.Ref), - Repository: ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeNone}), + Repository: ToRepo(ctx, run.Repo, access_model.Permission{AccessMode: perm.AccessModeNone}), TriggerActor: ToUser(ctx, triggerUser, nil), Actor: ToUser(ctx, actor, nil), }, nil @@ -400,11 +407,11 @@ func ToActionWorkflowJob(ctx context.Context, repo *repo_model.Repository, task return &api.ActionWorkflowJob{ ID: job.ID, // missing api endpoint for this location - URL: fmt.Sprintf("%s/actions/jobs/%d", repo.APIURL(), job.ID), - HTMLURL: fmt.Sprintf("%s/jobs/%d", job.Run.HTMLURL(), job.ID), + URL: fmt.Sprintf("%s/actions/jobs/%d", repo.APIURL(ctx), job.ID), + HTMLURL: fmt.Sprintf("%s/jobs/%d", job.Run.HTMLURL(ctx), job.ID), RunID: job.RunID, // Missing api endpoint for this location, artifacts are available under a nested url - RunURL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(), job.RunID), + RunURL: fmt.Sprintf("%s/actions/runs/%d", repo.APIURL(ctx), job.RunID), Name: job.Name, Labels: job.RunsOn, RunAttempt: job.Attempt, @@ -704,7 +711,7 @@ func ToOrganization(ctx context.Context, org *organization.Organization) *api.Or Description: org.Description, Website: org.Website, Location: org.Location, - Visibility: org.Visibility.String(), + Visibility: api.UserVisibility(org.Visibility.String()), RepoAdminChangeTeamAccess: org.RepoAdminChangeTeamAccess, } } @@ -733,7 +740,7 @@ func ToTeams(ctx context.Context, teams []*organization.Team, loadOrgs bool) ([] Description: t.Description, IncludesAllRepositories: t.IncludesAllRepositories, CanCreateOrgRepo: t.CanCreateOrgRepo, - Permission: t.AccessMode.ToString(), + Permission: api.AccessLevelName(t.AccessMode.ToString()), Units: t.GetUnitNames(), UnitsMap: t.GetUnitsMap(), } diff --git a/services/convert/issue.go b/services/convert/issue.go index 61f11d8f19..8e3adaa82d 100644 --- a/services/convert/issue.go +++ b/services/convert/issue.go @@ -95,6 +95,13 @@ func toIssue(ctx context.Context, doer *user_model.User, issue *issues_model.Iss apiIssue.Milestone = ToAPIMilestone(issue.Milestone) } + if err := issue.LoadProjects(ctx); err != nil { + return &api.Issue{} + } + if len(issue.Projects) > 0 { + apiIssue.Projects = ToAPIProjectList(issue.Projects) + } + if err := issue.LoadAssignees(ctx); err != nil { return &api.Issue{} } diff --git a/services/convert/project.go b/services/convert/project.go new file mode 100644 index 0000000000..b66de746ca --- /dev/null +++ b/services/convert/project.go @@ -0,0 +1,37 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package convert + +import ( + project_model "code.gitea.io/gitea/models/project" + api "code.gitea.io/gitea/modules/structs" +) + +// ToAPIProject converts a Project to API format +func ToAPIProject(p *project_model.Project) *api.Project { + apiProject := &api.Project{ + ID: p.ID, + Title: p.Title, + Description: p.Description, + OwnerID: p.OwnerID, + RepoID: p.RepoID, + CreatorID: p.CreatorID, + IsClosed: p.IsClosed, + Created: p.CreatedUnix.AsTime(), + Updated: p.UpdatedUnix.AsTime(), + } + if p.IsClosed && p.ClosedDateUnix > 0 { + apiProject.Closed = p.ClosedDateUnix.AsTimePtr() + } + return apiProject +} + +// ToAPIProjectList converts a list of Projects to API format +func ToAPIProjectList(projects []*project_model.Project) []*api.Project { + result := make([]*api.Project, len(projects)) + for i := range projects { + result[i] = ToAPIProject(projects[i]) + } + return result +} diff --git a/services/convert/repository.go b/services/convert/repository.go index 3c9cc83ccb..503f6bb2a3 100644 --- a/services/convert/repository.go +++ b/services/convert/repository.go @@ -251,7 +251,7 @@ func innerToRepo(ctx context.Context, repo *repo_model.Repository, permissionInR MirrorUpdated: mirrorUpdated, RepoTransfer: transfer, Topics: util.SliceNilAsEmpty(repo.Topics), - ObjectFormatName: repo.ObjectFormatName, + ObjectFormatName: api.ObjectFormatName(repo.ObjectFormatName), Licenses: util.SliceNilAsEmpty(repoLicenses.StringList()), } } diff --git a/services/convert/user.go b/services/convert/user.go index cee4de5091..ce9b256061 100644 --- a/services/convert/user.go +++ b/services/convert/user.go @@ -65,7 +65,7 @@ func toUser(ctx context.Context, user *user_model.User, signed, authed bool) *ap StarredRepos: user.NumStars, } - result.Visibility = user.Visibility.String() + result.Visibility = api.UserVisibility(user.Visibility.String()) // hide primary email if API caller is anonymous or user keep email private if signed && (!user.KeepEmailPrivate || authed) { @@ -104,7 +104,7 @@ func User2UserSettings(user *user_model.User) api.UserSettings { func ToUserAndPermission(ctx context.Context, user, doer *user_model.User, accessMode perm.AccessMode) api.RepoCollaboratorPermission { return api.RepoCollaboratorPermission{ User: ToUser(ctx, user, doer), - Permission: accessMode.ToString(), + Permission: api.AccessLevelName(accessMode.ToString()), RoleName: accessMode.ToString(), } } diff --git a/services/convert/user_test.go b/services/convert/user_test.go index d32cffb512..349b0058fe 100644 --- a/services/convert/user_test.go +++ b/services/convert/user_test.go @@ -29,11 +29,11 @@ func TestUser_ToUser(t *testing.T) { apiUser = toUser(t.Context(), user1, false, false) assert.False(t, apiUser.IsAdmin) - assert.Equal(t, api.VisibleTypePublic.String(), apiUser.Visibility) + assert.Equal(t, api.UserVisibilityPublic, apiUser.Visibility) user31 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate}) apiUser = toUser(t.Context(), user31, true, true) assert.False(t, apiUser.IsAdmin) - assert.Equal(t, api.VisibleTypePrivate.String(), apiUser.Visibility) + assert.Equal(t, api.UserVisibilityPrivate, apiUser.Visibility) } diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go index 09b9b2690c..d8e019f860 100644 --- a/services/forms/repo_form.go +++ b/services/forms/repo_form.go @@ -202,10 +202,6 @@ func (f *ProtectBranchForm) Validate(req *http.Request, errs binding.Errors) bin return middleware.Validate(errs, ctx.Data, f, ctx.Locale) } -type ProtectBranchPriorityForm struct { - IDs []int64 -} - // WebhookForm form for changing web hook type WebhookForm struct { Name string `binding:"MaxSize(255)"` @@ -412,12 +408,10 @@ func (f *NewPackagistHookForm) Validate(req *http.Request, errs binding.Errors) // CreateIssueForm form for creating issue type CreateIssueForm struct { Title string `binding:"Required;MaxSize(255)"` - LabelIDs string `form:"label_ids"` AssigneeIDs string `form:"assignee_ids"` ReviewerIDs string `form:"reviewer_ids"` Ref string `form:"ref"` MilestoneID int64 - ProjectID int64 Content string Files []string AllowMaintainerEdit bool diff --git a/services/git/compare.go b/services/git/compare.go index a8c2980112..6102d2418d 100644 --- a/services/git/compare.go +++ b/services/git/compare.go @@ -16,17 +16,23 @@ import ( // CompareInfo represents needed information for comparing references. type CompareInfo struct { - BaseRepo *repo_model.Repository - BaseRef git.RefName - BaseCommitID string - HeadRepo *repo_model.Repository - HeadGitRepo *git.Repository - HeadRef git.RefName - HeadCommitID string + BaseRepo *repo_model.Repository + BaseRef git.RefName + BaseCommitID string + HeadRepo *repo_model.Repository + HeadGitRepo *git.Repository + HeadRef git.RefName + HeadCommitID string + CompareSeparator string - MergeBase string - Commits []*git.Commit - NumFiles int + + // CompareBase is the left-side commit ID used for comparing + // for "...": it is merge base (empty for no merge base) + // for direct comparison "..": it is base commit ID + CompareBase string + + Commits []*git.Commit + NumFiles int } func (ci *CompareInfo) IsSameRepository() bool { @@ -45,6 +51,7 @@ func (ci *CompareInfo) DirectComparison() bool { // GetCompareInfo generates and returns compare information between base and head branches of repositories. // It does its best to fill the fields as many as it can. +// MergeBase can be empty if the base and head are unrelated. func GetCompareInfo(ctx context.Context, baseRepo, headRepo *repo_model.Repository, headGitRepo *git.Repository, baseRef, headRef git.RefName, directComparison, fileOnly bool) (compareInfo CompareInfo, err error) { baseCommitID, err1 := gitrepo.GetFullCommitID(ctx, baseRepo, baseRef.String()) headCommitID, err2 := gitrepo.GetFullCommitID(ctx, headRepo, headRef.String()) @@ -74,12 +81,16 @@ func GetCompareInfo(ctx context.Context, baseRepo, headRepo *repo_model.Reposito } if !directComparison { - compareInfo.MergeBase, err = gitrepo.MergeBase(ctx, headRepo, compareInfo.BaseCommitID, compareInfo.HeadCommitID) - if err != nil { + compareInfo.CompareBase, err = gitrepo.MergeBase(ctx, headRepo, compareInfo.BaseCommitID, compareInfo.HeadCommitID) + if err != nil && !errors.Is(err, util.ErrNotExist) { return compareInfo, fmt.Errorf("MergeBase: %w", err) } } else { - compareInfo.MergeBase = compareInfo.BaseCommitID + compareInfo.CompareBase = compareInfo.BaseCommitID + } + + if compareInfo.CompareBase == "" { + return compareInfo, nil } // We have a common base - therefore we know that ... should work @@ -88,16 +99,14 @@ func GetCompareInfo(ctx context.Context, baseRepo, headRepo *repo_model.Reposito // which is different from the meaning of "..." in git diff (where it implies diffing from the merge base). // For listing PR commits, we must use merge-base..head to include only the commits introduced by the head branch. // Otherwise, commits newly pushed to the base branch would also be included, which is incorrect. - compareInfo.Commits, err = headGitRepo.ShowPrettyFormatLogToList(ctx, compareInfo.MergeBase+".."+compareInfo.HeadCommitID) + compareInfo.Commits, err = headGitRepo.ShowPrettyFormatLogToList(ctx, compareInfo.CompareBase+".."+compareInfo.HeadCommitID) if err != nil { return compareInfo, fmt.Errorf("ShowPrettyFormatLogToList: %w", err) } - } else { - compareInfo.Commits = []*git.Commit{} } // Count number of changed files. - // This probably should be removed as we need to use shortstat elsewhere + // TODO: This probably should be removed as we need to use shortstat elsewhere // Now there is git diff --shortstat but this appears to be slower than simply iterating with --nameonly compareInfo.NumFiles, err = headGitRepo.GetDiffNumChangedFiles(compareInfo.BaseCommitID, compareInfo.HeadCommitID, directComparison) return compareInfo, err diff --git a/services/issue/issue.go b/services/issue/issue.go index 5b57b2453e..2bece1c7bb 100644 --- a/services/issue/issue.go +++ b/services/issue/issue.go @@ -23,7 +23,7 @@ import ( ) // NewIssue creates new issue with labels for repository. -func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *issues_model.Issue, labelIDs []int64, uuids []string, assigneeIDs []int64, projectID int64) error { +func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *issues_model.Issue, labelIDs []int64, uuids []string, assigneeIDs, projectIDs []int64) error { if err := issue.LoadPoster(ctx); err != nil { return err } @@ -41,8 +41,9 @@ func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *issues_mo return err } } - if projectID > 0 { - if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, issue.Poster, projectID, 0); err != nil { + if len(projectIDs) > 0 { + err := issues_model.IssueAssignOrRemoveProject(ctx, issue, issue.Poster, projectIDs) + if err != nil { return err } } diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go index 99f8dba92f..0cdd96496d 100644 --- a/services/migrations/migrate.go +++ b/services/migrations/migrate.go @@ -218,7 +218,7 @@ func migrateRepository(ctx context.Context, doer *user_model.User, downloader ba // We don't actually need to check the OriginalURL as it isn't used anywhere } - log.Trace("migrating git data from %s", repo.CloneURL) + log.Trace("migrating git data from %s", util.SanitizeCredentialURLs(repo.CloneURL)) messenger("repo.migrate.migrating_git") if err = uploader.CreateRepo(ctx, repo, opts); err != nil { return err diff --git a/services/projects/issue.go b/services/projects/issue.go index ece9910cd2..5c691a95eb 100644 --- a/services/projects/issue.go +++ b/services/projects/issue.go @@ -59,11 +59,13 @@ func MoveIssuesOnProjectColumn(ctx context.Context, doer *user_model.User, colum continue } - projectColumnID, err := curIssue.ProjectColumnID(ctx) + projectColumnMap, err := curIssue.ProjectColumnMap(ctx) if err != nil { return err } + projectColumnID := projectColumnMap[column.ProjectID] + if projectColumnID != column.ID { // add timeline to issue if _, err := issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{ @@ -80,7 +82,16 @@ func MoveIssuesOnProjectColumn(ctx context.Context, doer *user_model.User, colum } } - _, err = db.Exec(ctx, "UPDATE `project_issue` SET project_board_id=?, sorting=? WHERE issue_id=?", column.ID, sorting, issueID) + // Update the column and sorting for this specific issue in this specific project. + // IMPORTANT: The WHERE clause must include both issue_id AND project_id to ensure + // that moving an issue's column in one project doesn't affect its column in other + // projects when the issue is assigned to multiple projects. + _, err = db.GetEngine(ctx).Table("project_issue"). + Where("issue_id = ? AND project_id = ?", issueID, column.ProjectID). + Update(map[string]any{ + "project_board_id": column.ID, + "sorting": sorting, + }) if err != nil { return err } @@ -117,7 +128,7 @@ func LoadIssuesAssigneesForProject(ctx context.Context, issuesMap map[int64]issu // LoadIssuesFromProject load issues assigned to each project column inside the given project func LoadIssuesFromProject(ctx context.Context, project *project_model.Project, opts *issues_model.IssuesOptions) (results map[int64]issues_model.IssueList, _ error) { issueList, err := issues_model.Issues(ctx, opts.Copy(func(o *issues_model.IssuesOptions) { - o.ProjectID = project.ID + o.ProjectIDs = []int64{project.ID} o.SortType = "project-column-sorting" })) if err != nil { @@ -211,10 +222,10 @@ func LoadIssueNumbersForProject(ctx context.Context, project *project_model.Proj // for user or org projects, we need to check access permissions opts := issues_model.IssuesOptions{ - ProjectID: project.ID, - Doer: doer, - AllPublic: doer == nil, - Owner: project.Owner, + ProjectIDs: []int64{project.ID}, + Doer: doer, + AllPublic: doer == nil, + Owner: project.Owner, } var err error diff --git a/services/projects/issue_test.go b/services/projects/issue_test.go index 17d0fef2e6..ec31b22b90 100644 --- a/services/projects/issue_test.go +++ b/services/projects/issue_test.go @@ -102,28 +102,18 @@ func Test_Projects(t *testing.T) { assert.NoError(t, err) }() - column1 := project_model.Column{ - Title: "column 1", - ProjectID: project1.ID, - } - err = project_model.NewColumn(t.Context(), &column1) - assert.NoError(t, err) - - column2 := project_model.Column{ - Title: "column 2", - ProjectID: project1.ID, - } - err = project_model.NewColumn(t.Context(), &column2) + // Get the default column created by the template (issues will be assigned here) + defaultColumn, err := project1.MustDefaultColumn(t.Context()) assert.NoError(t, err) // issue 6 belongs to private repo 3 under org 3 issue6 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 6}) - err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue6, user2, project1.ID, column1.ID) + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue6, user2, []int64{project1.ID}) assert.NoError(t, err) // issue 16 belongs to public repo 16 under org 3 issue16 := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 16}) - err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue16, user2, project1.ID, column1.ID) + err = issues_model.IssueAssignOrRemoveProject(t.Context(), issue16, user2, []int64{project1.ID}) assert.NoError(t, err) projects, err := db.Find[project_model.Project](t.Context(), project_model.SearchOptions{ @@ -139,8 +129,8 @@ func Test_Projects(t *testing.T) { Doer: userAdmin, }) assert.NoError(t, err) - assert.Len(t, columnIssues, 1) // column1 has 2 issues, 6 will not contains here because 0 issues - assert.Len(t, columnIssues[column1.ID], 2) // user2 can visit both issues, one from public repository one from private repository + assert.Len(t, columnIssues, 1) // default column has 2 issues + assert.Len(t, columnIssues[defaultColumn.ID], 2) // admin can visit both issues, one from public repository one from private repository }) t.Run("Anonymous user", func(t *testing.T) { @@ -149,7 +139,7 @@ func Test_Projects(t *testing.T) { }) assert.NoError(t, err) assert.Len(t, columnIssues, 1) - assert.Len(t, columnIssues[column1.ID], 1) // anonymous user can only visit public repo issues + assert.Len(t, columnIssues[defaultColumn.ID], 1) // anonymous user can only visit public repo issues }) t.Run("Authenticated user with no permission to the private repo", func(t *testing.T) { @@ -159,7 +149,7 @@ func Test_Projects(t *testing.T) { }) assert.NoError(t, err) assert.Len(t, columnIssues, 1) - assert.Len(t, columnIssues[column1.ID], 1) // user4 can only visit public repo issues + assert.Len(t, columnIssues[defaultColumn.ID], 1) // user2 can only visit public repo issues }) }) diff --git a/services/pull/check.go b/services/pull/check.go index bc706844b2..241eb633b6 100644 --- a/services/pull/check.go +++ b/services/pull/check.go @@ -339,18 +339,25 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName) - // Get the commit from BaseBranch where the pull request got merged + // Get the commit from BaseBranch where the pull request got merged. + // When several PRs targeting the same base are merged in a single push, + // rev-list returns one line per merge commit on the ancestry path; we + // only want the first one (the oldest, with --reverse, i.e. the merge + // commit that actually introduced this PR). mergeCommit, _, err := gitrepo.RunCmdString(ctx, pr.BaseRepo, gitcmd.NewCommand("rev-list", "--ancestry-path", "--merges", "--reverse"). AddDynamicArguments(prHeadCommitID+".."+pr.BaseBranch)) if err != nil { return nil, fmt.Errorf("git rev-list --ancestry-path --merges --reverse: %w", err) - } else if len(mergeCommit) < objectFormat.FullLength() { + } + + // only use the latest commit as merge commit if the output contains multiple commits + mergeCommit = strings.TrimSpace(mergeCommit) + mergeCommit, _, _ = strings.Cut(mergeCommit, "\n") + if len(mergeCommit) < objectFormat.FullLength() { // PR was maybe fast-forwarded, so just use last commit of PR mergeCommit = prHeadCommitID } - mergeCommit = strings.TrimSpace(mergeCommit) - commit, err := gitRepo.GetCommit(mergeCommit) if err != nil { return nil, fmt.Errorf("GetMergeCommit[%s]: %w", mergeCommit, err) diff --git a/services/pull/pull.go b/services/pull/pull.go index 891e358b68..b852dc2ed1 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -50,7 +50,7 @@ type NewPullRequestOptions struct { AssigneeIDs []int64 Reviewers []*user_model.User TeamReviewers []*organization.Team - ProjectID int64 + ProjectIDs []int64 } // NewPullRequest creates new pull request with labels for repository. @@ -110,8 +110,8 @@ func NewPullRequest(ctx context.Context, opts *NewPullRequestOptions) error { assigneeCommentMap[assigneeID] = comment } - if opts.ProjectID > 0 && canAssignProject { - if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, issue.Poster, opts.ProjectID, 0); err != nil { + if len(opts.ProjectIDs) > 0 && canAssignProject { + if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, issue.Poster, opts.ProjectIDs); err != nil { return err } } diff --git a/services/webhook/notifier.go b/services/webhook/notifier.go index d2575e9931..7627935a32 100644 --- a/services/webhook/notifier.go +++ b/services/webhook/notifier.go @@ -1043,7 +1043,8 @@ func (*webhookNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *repo_ return } - convertedRun, err := convert.ToActionWorkflowRun(ctx, repo, run, nil) + run.Repo = repo + convertedRun, err := convert.ToActionWorkflowRun(ctx, run, nil) if err != nil { log.Error("ToActionWorkflowRun: %v", err) return diff --git a/templates/admin/badge/users.tmpl b/templates/admin/badge/users.tmpl index 97d332010f..711110ca68 100644 --- a/templates/admin/badge/users.tmpl +++ b/templates/admin/badge/users.tmpl @@ -13,18 +13,18 @@ {{if .Users}}
-
+
{{range .Users}} -
-
+
+ -
-
+
+
{{template "shared/user/name" .}}
-
+
{{ctx.Locale.Tr "admin.badges.remove_user"}} diff --git a/templates/admin/badge/view.tmpl b/templates/admin/badge/view.tmpl index efd31f4c41..cbe5c7b710 100644 --- a/templates/admin/badge/view.tmpl +++ b/templates/admin/badge/view.tmpl @@ -10,18 +10,18 @@
-
-
+
+
{{if .Badge.ImageURL}} -
+
{{.Badge.Description}}
{{end}} -
-
+
+
{{.Badge.Slug}}
-
+
{{.Badge.Description}}
diff --git a/templates/admin/config.tmpl b/templates/admin/config.tmpl index b68f2c1a7a..c381c5bf1d 100644 --- a/templates/admin/config.tmpl +++ b/templates/admin/config.tmpl @@ -244,8 +244,6 @@
{{.CacheInterval}} {{ctx.Locale.Tr "tool.raw_seconds"}}
{{end}} {{if .CacheConn}} -
{{ctx.Locale.Tr "admin.config.cache_conn"}}
-
{{.CacheConn}}
{{ctx.Locale.Tr "admin.config.cache_item_ttl"}}
{{.CacheItemTTL}}
{{end}} @@ -266,8 +264,6 @@
{{ctx.Locale.Tr "admin.config.session_provider"}}
{{.SessionConfig.Provider}}
-
{{ctx.Locale.Tr "admin.config.provider_config"}}
-
{{if .SessionConfig.ProviderConfig}}{{.SessionConfig.ProviderConfig}}{{else}}-{{end}}
{{ctx.Locale.Tr "admin.config.cookie_name"}}
{{.SessionConfig.CookieName}}
{{ctx.Locale.Tr "admin.config.gc_interval_time"}}
diff --git a/templates/admin/user/view_details.tmpl b/templates/admin/user/view_details.tmpl index db61bc9359..45fb83d125 100644 --- a/templates/admin/user/view_details.tmpl +++ b/templates/admin/user/view_details.tmpl @@ -1,10 +1,10 @@ -
-
-
+
+
+
{{ctx.AvatarUtils.Avatar .User 48}}
-
-
+
+
{{template "shared/user/name" .User}} {{if .User.IsAdmin}} {{ctx.Locale.Tr "admin.users.admin"}} @@ -13,34 +13,34 @@ {{ctx.Locale.Tr "admin.users.bot"}} {{end}}
-
+
{{ctx.Locale.Tr "admin.users.auth_source"}}: {{Iif (eq .LoginSource.ID 0) (ctx.Locale.Tr "admin.users.local") .LoginSource.Name}}
-
+
{{ctx.Locale.Tr "admin.users.activated"}}: {{svg (Iif .User.IsActive "octicon-check" "octicon-x")}}
-
+
{{ctx.Locale.Tr "admin.users.prohibit_login"}}: {{svg (Iif .User.ProhibitLogin "octicon-check" "octicon-x")}}
-
+
{{ctx.Locale.Tr "admin.users.restricted"}}: {{svg (Iif .User.IsRestricted "octicon-check" "octicon-x")}}
-
+
{{ctx.Locale.Tr "settings.visibility"}}: {{if .User.Visibility.IsPublic}}{{ctx.Locale.Tr "settings.visibility.public"}}{{end}} {{if .User.Visibility.IsLimited}}{{ctx.Locale.Tr "settings.visibility.limited"}}{{end}} {{if .User.Visibility.IsPrivate}}{{ctx.Locale.Tr "settings.visibility.private"}}{{end}}
-
+
{{ctx.Locale.Tr "admin.users.2fa"}}: {{svg (Iif .TwoFactorEnabled "octicon-check" "octicon-x")}}
{{if .User.Language}} -
+
{{ctx.Locale.Tr "settings.language"}}: {{range .AllLangs}}{{if eq $.User.Language .Lang}}{{.Name}}{{end}}{{end}} @@ -48,12 +48,12 @@
{{end}} {{if .User.Location}} -
+
{{svg "octicon-location"}}{{.User.Location}}
{{end}} {{if .User.Website}} -
+
{{svg "octicon-link"}} {{.User.Website}} diff --git a/templates/admin/user/view_emails.tmpl b/templates/admin/user/view_emails.tmpl index 7e77206f1c..29f059e05f 100644 --- a/templates/admin/user/view_emails.tmpl +++ b/templates/admin/user/view_emails.tmpl @@ -1,7 +1,7 @@ -
+
{{range .Emails}} -
-
+
+
{{.Email}} {{if .IsPrimary}} diff --git a/templates/base/footer.tmpl b/templates/base/footer.tmpl index 5a218bb62a..b7443345ad 100644 --- a/templates/base/footer.tmpl +++ b/templates/base/footer.tmpl @@ -11,5 +11,8 @@ {{template "base/footer_content" .}} {{ctx.ScriptImport "js/index.js" "module"}} {{template "custom/footer" .}} + diff --git a/templates/devtest/flex-list.tmpl b/templates/devtest/flex-list.tmpl index c0863ba4f0..de59473a70 100644 --- a/templates/devtest/flex-list.tmpl +++ b/templates/devtest/flex-list.tmpl @@ -1,28 +1,28 @@ {{template "devtest/devtest-header"}}
-

Flex List (standalone)

+

Flex List (standalone)

-
-
-
+
+
+
{{svg "octicon-info" 32}}
-
-
+
+
Flex Item with label
-
+
consists of leading/main/trailing part
-
+
main part contains title and (multiple) body lines
-
+
@@ -35,42 +35,42 @@
-
-
+
+
{{svg "octicon-info" 32}}
-
-
+
+
Very loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong title
-
+
consists of leading/main/trailing part
-
+
Very loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong content Truncate very loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong content
-
+
-
-
+
+
{{svg "octicon-repo" 32}}
-
-
-
+
+
+
gitea-org / gitea {{svg "octicon-repo-forked"}}
- -
+
when inside header, the trailing part will wrap below the title
@@ -87,27 +87,48 @@
-

Flex List (with "ui segment")

+

Flex List (with "ui segment")

-
-
item 1
-
item 2
+
+
item 1
+
item 2
-

Flex List (with "ui segment")

-
-
item 1
-
item 2
+

segment header

+
+
item 1
+
item 2
+
+
+

Flex List (with "ui segment fitted")

+
+
+
item 1
+
item 2
-

If parent provides the padding/margin space:

-
-
-
item 1 (no padding top)
-
item 2 (no padding bottom)
+

If parent provides border or padding:

+
+
before divider
+
+
+
item 1
+
item 2
+
+
after divider
+
+
+
before divider
+
+
+
item 1
+
item 2
+
+
+
after divider
diff --git a/templates/devtest/fomantic-dropdown.tmpl b/templates/devtest/fomantic-dropdown.tmpl index a10dc890ce..8fea230768 100644 --- a/templates/devtest/fomantic-dropdown.tmpl +++ b/templates/devtest/fomantic-dropdown.tmpl @@ -4,7 +4,7 @@

Dropdown