diff --git a/.github/workflows/pull-compliance.yml b/.github/workflows/pull-compliance.yml index e44a787587..b4f5002082 100644 --- a/.github/workflows/pull-compliance.yml +++ b/.github/workflows/pull-compliance.yml @@ -38,7 +38,7 @@ jobs: contents: read steps: - uses: actions/checkout@v6 - - uses: astral-sh/setup-uv@v7 + - uses: astral-sh/setup-uv@v8.0.0 - run: uv python install 3.14 - uses: pnpm/action-setup@v5 - uses: actions/setup-node@v6 @@ -58,7 +58,7 @@ jobs: contents: read steps: - uses: actions/checkout@v6 - - uses: astral-sh/setup-uv@v7 + - uses: astral-sh/setup-uv@v8.0.0 - run: uv python install 3.14 - run: make deps-py - run: make lint-yaml @@ -122,7 +122,7 @@ jobs: go-version-file: go.mod check-latest: true - run: make deps-backend deps-tools - - run: make lint-go-windows lint-go-gitea-vet + - run: make lint-go-windows env: TAGS: bindata sqlite sqlite_unlock_notify GOOS: windows diff --git a/.golangci.yml b/.golangci.yml index afd91d65e5..570942bdd3 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -13,6 +13,7 @@ linters: - forbidigo - gocheckcompilerdirectives - gocritic + - goheader - govet - ineffassign - mirror @@ -51,6 +52,14 @@ linters: desc: do not use the go-chi cache package, use gitea's cache system - pkg: github.com/pkg/errors desc: use builtin errors package instead + migrations: + files: + - '**/models/migrations/**/*.go' + deny: + - pkg: code.gitea.io/gitea/models$ + desc: migrations must not depend on the models package + - pkg: code.gitea.io/gitea/modules/structs + desc: migrations must not depend on modules/structs (API structures change over time) nolintlint: allow-unused: false require-explanation: true @@ -109,6 +118,11 @@ linters: enable: - nilness - unusedwrite + goheader: + values: + regexp: + HEADER: '((Copyright [^\n]+|All rights reserved\.)\n)*Copyright \d{4} (The (Gogs|Gitea) Authors|Gitea Authors|Gitea)\.( All rights reserved\.)?(\n(Copyright [^\n]+|All rights reserved\.))*\nSPDX-License-Identifier: [\w.-]+' + template: '{{ HEADER }}' exclusions: generated: lax presets: @@ -158,9 +172,16 @@ issues: max-same-issues: 0 formatters: enable: - - gofmt + - gci - gofumpt settings: + gci: + custom-order: true + sections: + - standard + - prefix(code.gitea.io/gitea) + - blank + - default gofumpt: extra-rules: true exclusions: @@ -170,9 +191,6 @@ formatters: - .venv - public - web_src - - third_party$ - - builtin$ - - examples$ run: timeout: 10m diff --git a/CHANGELOG.md b/CHANGELOG.md index b662cb4ad5..0fbdf6d9f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,385 @@ This changelog goes through the changes that have been made in each release without substantial changes to our git log; to see the highlights of what has been added to each release, please refer to the [blog](https://blog.gitea.com). +## [1.26.0-rc0](https://github.com/go-gitea/gitea/releases/tag/v1.26.0-rc0) - 2026-04-07 + +* BREAKING + * Correct swagger annotations for enums, status codes, and notification state (#37030) + * Remove GET API registration-token (#36801) + * Support Actions `concurrency` syntax (#32751) + * Make PUBLIC_URL_DETECTION default to "auto" (#36955) +* SECURITY + * Bound PageSize in `ListUnadoptedRepositories` (#36884) +* FEATURES + * Support Actions `concurrency` syntax (#32751) + * Add terraform state registry (#36710) + * Instance-wide (global) info banner and maintenance mode (#36571) + * Support rendering OpenAPI spec (#36449) + * Add keyboard shortcuts for repository file and code search (#36416) + * Add support for archive-upload rpc (#36391) + * Add ability to download subpath archive (#36371) + * Add workflow dependencies visualization (#26062) (#36248) & Restyle Workflow Graph (#36912) + * Automatic generation of release notes (#35977) + * Add "Go to file", "Delete Directory" to repo file list page (#35911) + * Introduce "config edit-ini" sub command to help maintaining INI config file (#35735) + * Add button to re-run failed jobs in Actions (#36924) + * Support actions and reusable workflows from private repos (#32562) + * Add summary to action runs view (#36883) + * Add user badges (#36752) + * Add configurable permissions for Actions automatic tokens (#36173) + * Add per-runner “Disable/Pause” (#36776) +* PERFORMANCE + * WorkflowDispatch API optionally return runid (#36706) + * Add render cache for SVG icons (#36863) + * Load `mentionValues` asynchronously (#36739) + * Lazy-load some Vue components, fix heatmap chunk loading on every page (#36719) + * Load heatmap data asynchronously (#36622) + * Use prev/next pagination for user profile activities page to speed up (#36642) + * Refactor cat-file batch operations and support `--batch-command` approach (#35775) + * Use merge tree to detect conflicts when possible (#36400) +* ENHANCEMENTS + * Adds option to force update new branch in contents routes (#35592) + * Add viewer controller for mermaid (zoom, drag) (#36557) + * Add code editor setting dropdowns (#36534) + * Add `elk` layout support to mermaid (#36486) + * Add resolve/unresolve review comment API endpoints (#36441) + * Allow configuring default PR base branch (fixes #36412) (#36425) + * Add support for RPM Errata (updateinfo.xml) (#37125) + * Require additional user confirmation for making repo private (#36959) + * Feature non-zipped actions artifacts (action v7 / nodejs / npm v6.2.0) (#36786) + * Add `actions.WORKFLOW_DIRS` setting (#36619) + * Avoid opening new tab when downloading actions logs (#36740) + * Implements OIDC RP-Initiated Logout (#36724) + * Show workflow link (#37070) + * Desaturate dark theme background colors (#37056) + * Refactor "org teams" page and help new users to "add member" to an org (#37051) + * Add webhook name field to improve webhook identification (#37025) (#37040) + * Make task list checkboxes clickable in the preview tab (#37010) + * Improve severity labels in Actions logs and tweak colors (#36993) + * Linkify URLs in Actions workflow logs (#36986) + * Allow text selection on checkbox labels (#36970) + * Support dark/light theme images in markdown (#36922) + * Enable native dark mode for swagger-ui (#36899) + * Rework checkbox styling, remove `input` border hover effect (#36870) + * Refactor storage content-type handling of ServeDirectURL (#36804) + * Use "Enable Gravatar" but not "Disable" (#36771) + * Use case-insensitive matching for Git error "Not a valid object name" (#36728) + * Add “Copy Source” to markup comment menu (#36726) + * Change image transparency grid to CSS (#36711) + * Add "Run" prefix for unnamed action steps (#36624) + * Persist actions log time display settings in `localStorage` (#36623) + * Use first commit title for multi-commit PRs and fix auto-focus title field (#36606) + * Improve BuildCaseInsensitiveLike with lowercase (#36598) + * Improve diff highlighting (#36583) + * Exclude cancelled runs from failure-only email notifications (#36569) + * Use full-file highlighting for diff sections (#36561) + * Color command/error logs in Actions log (#36538) + * Add paging headers (#36521) + * Improve timeline entries for WIP prefix changes in pull requests (#36518) + * Add FOLDER_ICON_THEME configuration option (#36496) + * Normalize guessed languages for code highlighting (#36450) + * Add chunked transfer encoding support for LFS uploads (#36380) + * Indicate when only optional checks failed (#36367) + * Add 'allow_maintainer_edit' API option for creating a pull request (#36283) + * Support closing keywords with URL references (#36221) + * Improve diff file headers (#36215) + * Fix and enhance comment editor monospace toggle (#36181) + * Add git.DIFF_RENAME_SIMILARITY_THRESHOLD option (#36164) + * Add matching pair insertion to markdown textarea (#36121) + * Add sorting/filtering to admin user search API endpoint (#36112) + * Allow action user have read permission in public repo like other user (#36095) + * Disable matchBrackets in monaco (#36089) + * Use GitHub-style commit message for squash merge (#35987) + * Make composer registry support tar.gz and tar.bz2 and fix bugs (#35958) + * Add GITEA_PR_INDEX env variable to githooks (#35938) + * Add proper error message if session provider can not be created (#35520) + * Add button to copy file name in PR files (#35509) + * Move `X_FRAME_OPTIONS` setting from `cors` to `security` section (#30256) + * Add placeholder content for empty content page (#37114) + * Add `DEFAULT_DELETE_BRANCH_AFTER_MERGE` setting (#36917) + * Redirect to the only OAuth2 provider when no other login methods and fix various problems (#36901) + * Add admin badge to navbar avatar (#36790) + * Add `never` option to `PUBLIC_URL_DETECTION` configuration (#36785) + * Add background and run count to actions list page (#36707) + * Add icon to buttons "Close with Comment", "Close Pull Request", "Close Issue" (#36654) + * Add support for in_progress event in workflow_run webhook (#36979) + * Report commit status for pull_request_review events (#36589) + * Render merged pull request title as such in dashboard feed (#36479) + * Feature to be able to filter project boards by milestones (#36321) + * Use user id in noreply emails (#36550) + * Enable pagination on GiteaDownloader.getIssueReactions() (#36549) + * Remove striped tables in UI (#36509) + * Improve control char rendering and escape button styling (#37094) + * Support legacy run/job index-based URLs and refactor migration 326 (#37008) + * Add date to "No Contributions" tooltip (#36190) + * Show edit page confirmation dialog on tree view file change (#36130) + * Mention proc-receive in text for dashboard.resync_all_hooks func (#35991) + * Reuse selectable style for wiki (#35990) + * Support blue yellow colorblind theme (#35910) + * Support selecting theme on the footer (#35741) + * Improve online runner check (#35722) + * Add quick approve button on PR page (#35678) + * Enable commenting on expanded lines in PR diffs (#35662) + * Print PR-Title into tooltip for actions (#35579) + * Use explicit, stronger defaults for newly generated repo signing keys for Debian (#36236) + * Improve the compare page (#36261) + * Unify repo names in system notices (#36491) + * Move package settings to package instead of being tied to version (#37026) + * Add Actions API rerun endpoints for runs and jobs (#36768) + * Add branch_count to repository API (#35351) (#36743) + * Add created_by filter to SearchIssues (#36670) + * Allow admins to rename non-local users (#35970) + * Support updating branch via API (#35951) + * Add an option to automatically verify SSH keys from LDAP (#35927) + * Make "update file" API can create a new file when SHA is not set (#35738) + * Update issue.go with labels documentation (labels content, not ids) (#35522) + * Expose content_version for optimistic locking on issue and PR edits (#37035) + * Pass ServeHeaderOptions by value instead of pointer, fine tune httplib tests (#36982) +* BUGFIXES + * Fix API not persisting pull request unit config when has_pull_requests is not set (#36718) + * Rename CSS variables and improve colorblind themes (#36353) + * Hide `add-matcher` and `remove-matcher` from actions job logs (#36520) + * Prevent navigation keys from triggering actions during IME composition (#36540) + * Fix vertical alignment of `.commit-sign-badge` children (#36570) + * Fix duplicate startup warnings in admin panel (#36641) + * Fix CODEOWNERS review request attribution using comment metadata (#36348) + * Fix HTML tags appearing in wiki table of contents (#36284) + * Fix various bugs (#37096) + * Fix various legacy problems (#37092) + * Fix RPM Registry 404 when package name contains 'package' (#37087) + * Merge some standalone Vite entries into index.js (#37085) + * Fix various problems (#37077) + * Fix issue label deletion with Actions tokens (#37013) + * Hide delete branch or tag buttons in mirror or archived repositories. (#37006) + * Fix org contact email not clearable once set (#36975) + * Fix a bug when forking a repository in an organization (#36950) + * Preserve sort order of exclusive labels from template repo (#36931) + * Make container registry support Apple Container (basic auth) (#36920) + * Fix the wrong push commits in the pull request when force push (#36914) + * Add class "list-header-filters" to the div for projects (#36889) + * Fix dbfs error handling (#36844) + * Fix incorrect viewed files counter if reverted change was viewed (#36819) + * Refactor avatar package, support default avatar fallback (#36788) + * Fix README symlink resolution in subdirectories like .github (#36775) + * Fix CSS stacking context issue in actions log (#36749) + * Add gpg signing for merge rebase and update by rebase (#36701) + * Delete non-exist branch should return 404 (#36694) + * Fix `TestActionsCollaborativeOwner` (#36657) + * Fix multi-arch Docker build SIGILL by splitting frontend stage (#36646) + * Fix linguist-detectable attribute being ignored for configuration files (#36640) + * Fix state desync in ComboMarkdownEditor (#36625) + * Unify DEFAULT_SHOW_FULL_NAME output in templates and dropdown (#36597) + * Pull Request Pusher should be the author of the merge (#36581) + * Fix various version parsing problems (#36553) + * Fix highlight diff result (#36539) + * Fix mirror sync parser and fix mirror messages (#36504) + * Fix bug when list pull request commits (#36485) + * Fix various bugs (#36446) + * Fix issue filter menu layout (#36426) + * Restrict branch naming when new change matches with protection rules (#36405) + * Fix link/origin referrer and login redirect (#36279) + * Generate IDs for HTML headings without id attribute (#36233) + * Use a migration test instead of a wrong test which populated the meta test repositories and fix a migration bug (#36160) + * Fix issue close timeline icon (#36138) + * Fix diff blob excerpt expansion (#35922) + * Fix external render (#35727) + * Fix review request webhook bug (#35339) (#35723) + * Fix shutdown waitgroup panic (#35676) + * Cleanup ActionRun creation (#35624) + * Fix possible bug when migrating issues/pull requests (#33487) + * Various fixes (#36697) + * Apply notify/register mail flags during install load (#37120) + * Repair duration display for bad stopped timestamps (#37121) + * Fix(upgrade.sh): use HTTPS for GPG key import and restore SELinux context after upgrade (#36930) + * Fix various trivial problems (#36921) + * Fix various trivial problems (#36953) + * Fix NuGet package upload error handling (#37074) + * Fix CodeQL code scanning alerts (#36858) + * Refactor issue sidebar and fix various problems (#37045) + * Fix various problems (#37029) + * Fix relative-time RangeError (#37021) + * Fix chroma lexer mapping (#36629) + * Fix typos and grammar in English locale (#36751) + * Fix milestone/project text overflow in issue sidebar (#36741) + * Fix `no-content` message not rendering after comment edit (#36733) + * Fix theme loading in development (#36605) + * Fix workflow run jobs API returning null steps (#36603) + * Fix timeline event layout overflow with long content (#36595) + * Fix minor UI issues in runner edit page (#36590) + * Fix incorrect vendored detections (#36508) + * Fix editorconfig not respected in PR Conversation view (#36492) + * Don't create self-references in merged PRs (#36490) + * Fix potential incorrect runID in run status update (#36437) + * Fix file-tree ui error when adding files to repo without commits (#36312) + * Improve image captcha contrast for dark mode (#36265) + * Fix panic in blame view when a file has only a single commit (#36230) + * Fix spelling error in migrate-storage cmd utility (#36226) + * Fix code highlighting on blame page (#36157) + * Fix nilnil in onedev downloader (#36154) + * Fix actions lint (#36029) + * Fix oauth2 session gob register (#36017) + * Fix Arch repo pacman.conf snippet (#35825) + * Fix a number of `strictNullChecks`-related issues (#35795) + * Fix URLJoin, markup render link reoslving, sign-in/up/linkaccount page common data (#36861) + * Hide delete directory button for mirror or archive repository and disable the menu item if user have no permission (#36384) + * Update message severity colors, fix navbar double border (#37019) + * Inline and lazy-load EasyMDE CSS, fix border colors (#36714) + * Closed milestones with no issues now show as 100% completed (#36220) + * Add test for ExtendCommentTreePathLength migration and fix bugs (#35791) + * Only turn links to current instance into hash links (#36237) + * Fix typos in code comments: doesnt, dont, wont (#36890) +* REFACTOR + * Replace Monaco with CodeMirror (#36764) + * Replace CSRF cookie with `CrossOriginProtection` (#36183) + * Replace index with id in actions routes (#36842) + * Remove unnecessary function parameter (#35765) + * Move jobparser from act repository to Gitea (#36699) + * Refactor compare router param parse (#36105) + * Optimize 'refreshAccesses' to perform update without removing then adding (#35702) + * Clean up checkbox cursor styles (#37016) + * Remove undocumented support of signing key in the repository git configuration file (#36143) + * Switch `cmd/` to use constructor functions. (#36962) + * Use `relative-time` to render absolute dates (#36238) + * Some refactors about GetMergeBase (#36186) + * Some small refactors (#36163) + * Use gitRepo as parameter instead of repopath when invoking sign functions (#36162) + * Move blame to gitrepo (#36161) + * Move some functions to gitrepo package to reduce RepoPath reference directly (#36126) + * Use gitrepo's clone and push when possible (#36093) + * Remove mermaid margin workaround (#35732) + * Move some functions to gitrepo package (#35543) + * Move GetDiverging functions to gitrepo (#35524) + * Use global lock instead of status pool for cron lock (#35507) + * Use explicit mux instead of DefaultServeMux (#36276) + * Use gitrepo's push function (#36245) + * Pass request context to generateAdditionalHeadersForIssue (#36274) + * Move assign project when creating pull request to the same database transaction (#36244) + * Move catfile batch to a sub package of git module (#36232) + * Use gitrepo.Repository instead of wikipath (#35398) + * Use experimental go json v2 library (#35392) + * Refactor template render (#36438) + * Refactor GetRepoRawDiffForFile to avoid unnecessary pipe or goroutine (#36434) + * Refactor text utility classes to Tailwind CSS (#36703) + * Refactor git command stdio pipe (#36422) + * Refactor git command context & pipeline (#36406) + * Refactor git command stdio pipe (#36393) + * Remove unused functions (#36672) + * Refactor Actions Token Access (#35688) + * Move commit related functions to gitrepo package (#35600) + * Move archive function to repo_model and gitrepo (#35514) + * Move some functions to gitrepo package (#35503) + * Use git model to detect whether branch exist instead of gitrepo method (#35459) + * Some refactor for repo path (#36251) + * Extract helper functions from SearchIssues (#36158) + * Refactor merge conan and container auth preserve actions taskID (#36560) + * Refactor Nuget Auth to reuse Basic Auth Token Validation (#36558) + * Refactor ActionsTaskID (#36503) + * Refactor auth middleware (#36848) + * Refactor code render and render control chars (#37078) + * Clean up AppURL, remove legacy origin-url webcomponent (#37090) + * Remove `util.URLJoin` and replace all callers with direct path concatenation (#36867) + * Replace legacy tw-flex utility classes with flex-text-block/inline (#36778) + * Mark unused&immature activitypub as "not implemented" (#36789) +* TESTING + * Add e2e tests for server push events (#36879) + * Rework e2e tests (#36634) + * Add e2e reaction test, improve accessibility, enable parallel testing (#37081) + * Increase e2e test timeouts on CI to fix flaky tests (#37053) +* BUILD + * Convert locale files from ini to json format (#35489) + * Bump golangci-lint to 2.7.2, enable modernize stringsbuilder (#36180) + * Port away from `flake-utils` (#35675) + * Remove nolint (#36252) + * Update the Unlicense copy to latest version (#36636) + * Update to go 1.26.0 and golangci-lint 2.9.0 (#36588) + * Replace `google/go-licenses` with custom generation (#36575) + * Update go dependencies (#36548) + * Bump appleboy/git-push-action from 1.0.0 to 1.2.0 (#36306) + * Remove fomantic form module (#36222) + * Bump setup-node to v6, re-enable cache (#36207) + * Bump crowdin/github-action from 1 to 2 (#36204) + * Revert "Bump alpine to 3.23 (#36185)" (#36202) + * Update chroma to v2.21.1 (#36201) + * Bump astral-sh/setup-uv from 6 to 7 (#36198) + * Bump docker/build-push-action from 5 to 6 (#36197) + * Bump aws-actions/configure-aws-credentials from 4 to 5 (#36196) + * Bump dev-hanz-ops/install-gh-cli-action from 0.1.0 to 0.2.1 (#36195) + * Add JSON linting (#36192) + * Enable dependabot for actions (#36191) + * Bump alpine to 3.23 (#36185) + * Update chroma to v2.21.0 (#36171) + * Update JS deps and eslint enhancements (#36147) + * Update JS deps (#36091) + * update golangci-lint to v2.7.0 (#36079) + * Update JS deps, fix deprecations (#36040) + * Update JS deps (#35978) + * Add toolchain directive to go.mod (#35901) + * Move `gitea-vet` to use `go tool` (#35878) + * Update to go 1.25.4 (#35877) + * Enable TypeScript `strictNullChecks` (#35843) + * Enable `vue/require-typed-ref` eslint rule (#35764) + * Update JS dependencies (#35759) + * Move `codeformat` folder to tools (#35758) + * Update dependencies (#35733) + * Bump happy-dom from 20.0.0 to 20.0.2 (#35677) + * Bump setup-go to v6 (#35660) + * Update JS deps, misc tweaks (#35643) + * Bump happy-dom from 19.0.2 to 20.0.0 (#35625) + * Use bundled version of spectral (#35573) + * Update JS and PY deps (#35565) + * Bump github.com/wneessen/go-mail from 0.6.2 to 0.7.1 (#35557) + * Migrate from webpack to vite (#37002) + * Update JS dependencies and misc tweaks (#37064) + * Update to eslint 10 (#36925) + * Optimize Docker build with dependency layer caching (#36864) + * Update JS deps (#36850) + * Update tool dependencies and fix new lint issues (#36702) + * Remove redundant linter rules (#36658) + * Move Fomantic dropdown CSS to custom module (#36530) + * Remove and forbid `@ts-expect-error` (#36513) + * Refactor git command stderr handling (#36402) + * Enable gocheckcompilerdirectives linter (#36156) + * Replace `lint-go-gopls` with additional `govet` linters (#36028) + * Update golangci-lint to v2.6.0 (#35801) + * Misc tool tweaks (#35734) + * Add cache to container build (#35697) + * Upgrade vite (#37126) + * Update `setup-uv` to v8.0.0 (#37101) + * Upgrade `go-git` to v5.17.2 and related dependencies (#37060) + * Raise minimum Node.js version to 22.18.0 (#37058) + * Upgrade `golang.org/x/image` to v0.38.0 (#37054) + * Update minimum go version to 1.26.1, golangci-lint to 2.11.2, fix test style (#36876) + * Enable eslint concurrency (#36878) + * Vendor relative-time-element as local web component (#36853) + * Update material-icon-theme v5.32.0 (#36832) + * Update Go dependencies (#36781) + * Upgrade minimatch (#36760) + * Remove i18n backport tool at the moment because of translation format changed (#36643) + * Update emoji data for Unicode 16 (#36596) + * Update JS dependencies, adjust webpack config, misc fixes (#36431) + * Update material-icon-theme to v5.31.0 (#36427) + * Update JS and PY deps (#36383) + * Bump alpine to 3.23, add platforms to `docker-dryrun` (#36379) + * Update JS deps (#36354) + * Update goldmark to v1.7.16 (#36343) + * Update chroma to v2.22.0 (#36342) +* DOCS + * Update AI Contribution Policy (#37022) + * Update AGENTS.md with additional guidelines (#37018) + * Add missing cron tasks to example ini (#37012) + * Add AI Contribution Policy to CONTRIBUTING.md (#36651) + * Minor punctuation improvement in CONTRIBUTING.md (#36291) + * Add documentation for markdown anchor post-processing (#36443) +* MISC + * Correct spelling (#36783) + * Update Nix flake (#37110) + * Update Nix flake (#37024) + * Add valid github scopes (#36977) + * Update Nix flake (#36943) + * Update Nix flake (#36902) + * Update Nix flake (#36857) + * Update Nix flake (#36787) + ## [1.25.5](https://github.com/go-gitea/gitea/releases/tag/v1.25.5) - 2026-03-10 * SECURITY diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4b32631d6a..aea31640b3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,14 @@ # Contribution Guidelines +This document explains how to contribute changes to the Gitea project. Topic-specific guides live in separate files so the essentials are easier to find. + +| Topic | Document | +| :---- | :------- | +| Backend (Go modules, API v1) | [docs/guideline-backend.md](docs/guideline-backend.md) | +| Frontend (npm, UI guidelines) | [docs/guideline-frontend.md](docs/guideline-frontend.md) | +| Maintainers, TOC, labels, merge queue, commit format for mergers | [docs/community-governance.md](docs/community-governance.md) | +| Release cycle, backports, tagging releases | [docs/release-management.md](docs/release-management.md) | +
Table of Contents - [Contribution Guidelines](#contribution-guidelines) @@ -11,10 +20,6 @@ - [Discuss your design before the implementation](#discuss-your-design-before-the-implementation) - [Issue locking](#issue-locking) - [Building Gitea](#building-gitea) - - [Dependencies](#dependencies) - - [Backend](#backend) - - [Frontend](#frontend) - - [Design guideline](#design-guideline) - [Styleguide](#styleguide) - [Copyright](#copyright) - [Testing](#testing) @@ -22,47 +27,19 @@ - [Code review](#code-review) - [Pull request format](#pull-request-format) - [PR title and summary](#pr-title-and-summary) - - [Milestone](#milestone) - - [Labels](#labels) - [Breaking PRs](#breaking-prs) - [What is a breaking PR?](#what-is-a-breaking-pr) - [How to handle breaking PRs?](#how-to-handle-breaking-prs) - [Maintaining open PRs](#maintaining-open-prs) - - [Getting PRs merged](#getting-prs-merged) - - [Final call](#final-call) - - [Commit messages](#commit-messages) - - [PR Co-authors](#pr-co-authors) - - [PRs targeting `main`](#prs-targeting-main) - - [Backport PRs](#backport-prs) + - [Reviewing PRs](#reviewing-prs) + - [For PR authors](#for-pr-authors) - [Documentation](#documentation) - - [API v1](#api-v1) - - [GitHub API compatibility](#github-api-compatibility) - - [Adding/Maintaining API routes](#addingmaintaining-api-routes) - - [When to use what HTTP method](#when-to-use-what-http-method) - - [Requirements for API routes](#requirements-for-api-routes) - - [Backports and Frontports](#backports-and-frontports) - - [What is backported?](#what-is-backported) - - [How to backport?](#how-to-backport) - - [Format of backport PRs](#format-of-backport-prs) - - [Frontports](#frontports) - [Developer Certificate of Origin (DCO)](#developer-certificate-of-origin-dco) - - [Release Cycle](#release-cycle) - - [Maintainers](#maintainers) - - [Technical Oversight Committee (TOC)](#technical-oversight-committee-toc) - - [TOC election process](#toc-election-process) - - [Current TOC members](#current-toc-members) - - [Previous TOC/owners members](#previous-tocowners-members) - - [Governance Compensation](#governance-compensation) - - [TOC \& Working groups](#toc--working-groups) - - [Roadmap](#roadmap) - - [Versions](#versions) - - [Releasing Gitea](#releasing-gitea)
## Introduction -This document explains how to contribute changes to the Gitea project. \ It assumes you have followed the [installation instructions](https://docs.gitea.com/category/installation). \ Sensitive security-related issues should be reported to [security@gitea.io](mailto:security@gitea.io). @@ -131,34 +108,6 @@ If further discussion is needed, we encourage you to open a new issue instead an See the [development setup instructions](https://docs.gitea.com/development/hacking-on-gitea). -## Dependencies - -### Backend - -Go dependencies are managed using [Go Modules](https://go.dev/cmd/go/#hdr-Module_maintenance). \ -You can find more details in the [go mod documentation](https://go.dev/ref/mod) and the [Go Modules Wiki](https://github.com/golang/go/wiki/Modules). - -Pull requests should only modify `go.mod` and `go.sum` where it is related to your change, be it a bugfix or a new feature. \ -Apart from that, these files should only be modified by Pull Requests whose only purpose is to update dependencies. - -The `go.mod`, `go.sum` update needs to be justified as part of the PR description, -and must be verified by the reviewers and/or merger to always reference -an existing upstream commit. - -### Frontend - -For the frontend, we use [npm](https://www.npmjs.com/). - -The same restrictions apply for frontend dependencies as for backend dependencies, with the exceptions that the files for it are `package.json` and `package-lock.json`, and that new versions must always reference an existing version. - -## Design guideline - -Depending on your change, please read the - -- [backend development guideline](https://docs.gitea.com/contributing/guidelines-backend) -- [frontend development guideline](https://docs.gitea.com/contributing/guidelines-frontend) -- [refactoring guideline](https://docs.gitea.com/contributing/guidelines-refactoring) - ## Styleguide You should always run `make fmt` before committing to conform to Gitea's styleguide. @@ -216,6 +165,8 @@ The tool `go run build/backport-locale.go` can be used to backport locales from ## Code review +How labels, milestones, and the merge queue work is documented in [docs/community-governance.md](docs/community-governance.md). + ### Pull request format Please try to make your pull request easy to review for us. \ @@ -260,29 +211,6 @@ Fixes/Closes/Resolves #. to your summary. \ Each issue that will be closed must stand on a separate line. -### Milestone - -A PR should only be assigned to a milestone if it will likely be merged into the given version. \ -As a rule of thumb, assume that a PR will stay open for an additional month for every 100 added lines. \ -PRs without a milestone may not be merged. - -### Labels - -Almost all labels used inside Gitea can be classified as one of the following: - -- `modifies/…`: Determines which parts of the codebase are affected. These labels will be set through the CI. -- `topic/…`: Determines the conceptual component of Gitea that is affected, i.e. issues, projects, or authentication. At best, PRs should only target one component but there might be overlap. Must be set manually. -- `type/…`: Determines the type of an issue or PR (feature, refactoring, docs, bug, …). If GitHub supported scoped labels, these labels would be exclusive, so you should set **exactly** one, not more or less (every PR should fall into one of the provided categories, and only one). -- `issue/…` / `pr/…`: Labels that are specific to issues or PRs respectively and that are only necessary in a given context, i.e. `issue/not-a-bug` or `pr/need-2-approvals` - -Every PR should be labeled correctly with every label that applies. - -There are also some labels that will be managed automatically.\ -In particular, these are - -- the amount of pending required approvals -- has all `backport`s or needs a manual backport - ### Breaking PRs #### What is a breaking PR? @@ -311,165 +239,29 @@ Breaking PRs will not be merged as long as not both of these requirements are me ### Maintaining open PRs -The moment you create a non-draft PR or the moment you convert a draft PR to a non-draft PR is the moment code review starts for it. \ -Once that happens, do not rebase or squash your branch anymore as it makes it difficult to review the new changes. \ -Merge the base branch into your branch only when you really need to, i.e. because of conflicting changes in the mean time. \ -This reduces unnecessary CI runs. \ -Don't worry about merge commits messing up your commit history as every PR will be squash merged. \ -This means that all changes are joined into a single new commit whose message is as described below. +Code review starts when you open a non-draft PR or move a draft out of draft state. After that, do not rebase or squash your branch; it makes new changes harder to review. -### Getting PRs merged +Merge the base branch into yours only when you need to, for example because of conflicting changes elsewhere. That limits unnecessary CI runs. -Changes to Gitea must be reviewed before they are accepted — no matter who -makes the change, even if they are an owner or a maintainer. \ -The only exception are critical bugs that prevent Gitea from being compiled or started. \ -Specifically, we require two approvals from maintainers for every PR. \ -Once this criteria has been met, your PR receives the `lgtm/done` label. \ -From this point on, your only responsibility is to fix merge conflicts or respond to/implement requests by maintainers. \ -It is the responsibility of the maintainers from this point to get your PR merged. +Every PR is squash-merged, so merge commits on your branch do not matter for final history. The squash produces a single commit; mergers follow the [commit message format](docs/community-governance.md#commit-messages) in the governance guide. -If a PR has the `lgtm/done` label and there are no open discussions or merge conflicts anymore, any maintainer can add the `reviewed/wait-merge` label. \ -This label means that the PR is part of the merge queue and will be merged as soon as possible. \ -The merge queue will be cleared in the order of the list below: +### Reviewing PRs - +Maintainers are encouraged to review pull requests in areas where they have expertise or particular interest. -Gitea uses it's own tool, the to automate parts of the review process. \ -This tool does the things listed below automatically: +#### For PR authors -- create a backport PR if needed once the initial PR was merged -- remove the PR from the merge queue after the PR merged -- keep the oldest branch in the merge queue up to date with merges +- **Response**: When answering reviewer questions, use real-world cases or examples and avoid speculation. +- **Discussion**: A discussion is always welcome and should be used to clarify the changes and the intent of the PR. +- **Help**: If you need help with the PR or comments are unclear, ask for clarification. -### Final call - -If a PR has been ignored for more than 7 days with no comments or reviews, and the author or any maintainer believes it will not survive a long wait (such as a refactoring PR), they can send "final call" to the TOC by mentioning them in a comment. - -After another 7 days, if there is still zero approval, this is considered a polite refusal, and the PR will be closed to avoid wasting further time. Therefore, the "final call" has a cost, and should be used cautiously. - -However, if there are no objections from maintainers, the PR can be merged with only one approval from the TOC (not the author). - -### Commit messages - -Mergers are able and required to rewrite the PR title and summary (the first comment of a PR) so that it can produce an easily understandable commit message if necessary. \ -The final commit message should no longer contain any uncertainty such as `hopefully, won't happen anymore`. Replace uncertainty with certainty. - -#### PR Co-authors - -A person counts as a PR co-author the moment they (co-)authored a commit that is not simply a `Merge base branch into branch` commit. \ -Mergers are required to remove such "false-positive" co-authors when writing the commit message. \ -The true co-authors must remain in the commit message. - -#### PRs targeting `main` - -The commit message of PRs targeting `main` is always - -```bash -$PR_TITLE ($PR_INDEX) - -$REWRITTEN_PR_SUMMARY -``` - -#### Backport PRs - -The commit message of backport PRs is always - -```bash -$PR_TITLE ($INITIAL_PR_INDEX) ($BACKPORT_PR_INDEX) - -$REWRITTEN_PR_SUMMARY -``` +Guidance for reviewers, the merge queue, and the squash commit message format is in [docs/community-governance.md](docs/community-governance.md). ## Documentation If you add a new feature or change an existing aspect of Gitea, the documentation for that feature must be created or updated in another PR at [https://gitea.com/gitea/docs](https://gitea.com/gitea/docs). **The docs directory on main repository will be removed at some time. We will have a yaml file to store configuration file's meta data. After that completed, configuration documentation should be in the main repository.** -## API v1 - -The API is documented by [swagger](https://gitea.com/api/swagger) and is based on [the GitHub API](https://docs.github.com/en/rest). - -### GitHub API compatibility - -Gitea's API should use the same endpoints and fields as the GitHub API as far as possible, unless there are good reasons to deviate. \ -If Gitea provides functionality that GitHub does not, a new endpoint can be created. \ -If information is provided by Gitea that is not provided by the GitHub API, a new field can be used that doesn't collide with any GitHub fields. \ -Updating an existing API should not remove existing fields unless there is a really good reason to do so. \ -The same applies to status responses. If you notice a problem, feel free to leave a comment in the code for future refactoring to API v2 (which is currently not planned). - -### Adding/Maintaining API routes - -All expected results (errors, success, fail messages) must be documented ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/repo/issue.go#L319-L327)). \ -All JSON input types must be defined as a struct in [modules/structs/](modules/structs/) ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/modules/structs/issue.go#L76-L91)) \ -and referenced in [routers/api/v1/swagger/options.go](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/swagger/options.go). \ -They can then be used like [this example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/repo/issue.go#L318). \ -All JSON responses must be defined as a struct in [modules/structs/](modules/structs/) ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/modules/structs/issue.go#L36-L68)) \ -and referenced in its category in [routers/api/v1/swagger/](routers/api/v1/swagger/) ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/swagger/issue.go#L11-L16)) \ -They can be used like [this example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/repo/issue.go#L277-L279). - -### When to use what HTTP method - -In general, HTTP methods are chosen as follows: - -- **GET** endpoints return the requested object(s) and status **OK (200)** -- **DELETE** endpoints return the status **No Content (204)** and no content either -- **POST** endpoints are used to **create** new objects (e.g. a User) and return the status **Created (201)** and the created object -- **PUT** endpoints are used to **add/assign** existing Objects (e.g. a user to a team) and return the status **No Content (204)** and no content either -- **PATCH** endpoints are used to **edit/change** an existing object and return the changed object and the status **OK (200)** - -### Requirements for API routes - -All parameters of endpoints changing/editing an object must be optional (except the ones to identify the object, which are required). - -Endpoints returning lists must - -- support pagination (`page` & `limit` options in query) -- set `X-Total-Count` header via **SetTotalCountHeader** ([example](https://github.com/go-gitea/gitea/blob/7aae98cc5d4113f1e9918b7ee7dd09f67c189e3e/routers/api/v1/repo/issue.go#L444)) - -## Backports and Frontports - -### What is backported? - -We backport PRs given the following circumstances: - -1. Feature freeze is active, but `-rc0` has not been released yet. Here, we backport as much as possible. -2. `rc0` has been released. Here, we only backport bug- and security-fixes, and small enhancements. Large PRs such as refactors are not backported anymore. -3. We never backport new features. -4. We never backport breaking changes except when - 1. The breaking change has no effect on the vast majority of users - 2. The component triggering the breaking change is marked as experimental - -### How to backport? - -In the past, it was necessary to manually backport your PRs. \ -Now, that's not a requirement anymore as our [backport bot](https://github.com/GiteaBot) tries to create backports automatically once the PR is merged when the PR - -- does not have the label `backport/manual` -- has the label `backport/` - -The `backport/manual` label signifies either that you want to backport the change yourself, or that there were conflicts when backporting, thus you **must** do it yourself. - -### Format of backport PRs - -The title of backport PRs should be - -``` - (#) -``` - -The first two lines of the summary of the backporting PR should be - -``` -Backport # - -``` - -with the rest of the summary and labels matching the original PR. - -### Frontports - -Frontports behave exactly as described above for backports. - ## Developer Certificate of Origin (DCO) We consider the act of contributing to the code by submitting a Pull Request as the "Sign off" or agreement to the certifications and terms of the [DCO](DCO) and [MIT license](LICENSE). \ @@ -483,148 +275,3 @@ Signed-off-by: Joe Smith If you set the `user.name` and `user.email` Git config options, you can add the line to the end of your commits automatically with `git commit -s`. We assume in good faith that the information you provide is legally binding. - -## Release Cycle - -We adopted a release schedule to streamline the process of working on, finishing, and issuing releases. \ -The overall goal is to make a major release every three or four months, which breaks down into two or three months of general development followed by one month of testing and polishing known as the release freeze. \ -All the feature pull requests should be -merged before feature freeze. All feature pull requests haven't been merged before this feature freeze will be moved to next milestone, please notice our feature freeze announcement on discord. And, during the frozen period, a corresponding -release branch is open for fixes backported from main branch. Release candidates -are made during this period for user testing to -obtain a final version that is maintained in this branch. - -During a development cycle, we may also publish any necessary minor releases -for the previous version. For example, if the latest, published release is -v1.2, then minor changes for the previous release—e.g., v1.1.0 -> v1.1.1—are -still possible. - -## Maintainers - -To make sure every PR is checked, we have [maintainers](MAINTAINERS). \ -Every PR **must** be reviewed by at least two maintainers (or owners) before it can get merged. \ -For refactoring PRs after a week and documentation only PRs, the approval of only one maintainer is enough. \ -A maintainer should be a contributor of Gitea and contributed at least -4 accepted PRs. A contributor should apply as a maintainer in the -[Discord](https://discord.gg/Gitea) `#develop` channel. The team maintainers may invite the contributor. A maintainer -should spend some time on code reviews. If a maintainer has no -time to do that, they should apply to leave the maintainers team -and we will give them the honor of being a member of the [advisors -team](https://github.com/orgs/go-gitea/teams/advisors). Of course, if -an advisor has time to code review, we will gladly welcome them back -to the maintainers team. If a maintainer is inactive for more than 3 -months and forgets to leave the maintainers team, the owners may move -him or her from the maintainers team to the advisors team. -For security reasons, Maintainers should use 2FA for their accounts and -if possible provide GPG signed commits. -https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/ -https://help.github.com/articles/signing-commits-with-gpg/ - -Furthermore, any account with write access (like bots and TOC members) **must** use 2FA. -https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/ - -## Technical Oversight Committee (TOC) - -At the start of 2023, the `Owners` team was dissolved. Instead, the governance charter proposed a technical oversight committee (TOC) which expands the ownership team of the Gitea project from three elected positions to six positions. Three positions are elected as it has been over the past years, and the other three consist of appointed members from the Gitea company. -https://blog.gitea.com/quarterly-23q1/ - -### TOC election process - -Any maintainer is eligible to be part of the community TOC if they are not associated with the Gitea company. -A maintainer can either nominate themselves, or can be nominated by other maintainers to be a candidate for the TOC election. -If you are nominated by someone else, you must first accept your nomination before the vote starts to be a candidate. - -The TOC is elected for one year, the TOC election happens yearly. -After the announcement of the results of the TOC election, elected members have two weeks time to confirm or refuse the seat. -If an elected member does not answer within this timeframe, they are automatically assumed to refuse the seat. -Refusals result in the person with the next highest vote getting the same choice. -As long as seats are empty in the TOC, members of the previous TOC can fill them until an elected member accepts the seat. - -If an elected member that accepts the seat does not have 2FA configured yet, they will be temporarily counted as `answer pending` until they manage to configure 2FA, thus leaving their seat empty for this duration. - -### Current TOC members - -- 2024-01-01 ~ 2024-12-31 - - Company - - [Jason Song](https://gitea.com/wolfogre) - - [Lunny Xiao](https://gitea.com/lunny) - - [Matti Ranta](https://gitea.com/techknowlogick) - - Community - - [6543](https://gitea.com/6543) <6543@obermui.de> - - [delvh](https://gitea.com/delvh) - - [John Olheiser](https://gitea.com/jolheiser) - -### Previous TOC/owners members - -Here's the history of the owners and the time they served: - -- [Lunny Xiao](https://gitea.com/lunny) - 2016, 2017, [2018](https://github.com/go-gitea/gitea/issues/3255), [2019](https://github.com/go-gitea/gitea/issues/5572), [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801), [2022](https://github.com/go-gitea/gitea/issues/17872), 2023 -- [Kim Carlbäcker](https://github.com/bkcsoft) - 2016, 2017 -- [Thomas Boerger](https://gitea.com/tboerger) - 2016, 2017 -- [Lauris Bukšis-Haberkorns](https://gitea.com/lafriks) - [2018](https://github.com/go-gitea/gitea/issues/3255), [2019](https://github.com/go-gitea/gitea/issues/5572), [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801) -- [Matti Ranta](https://gitea.com/techknowlogick) - [2019](https://github.com/go-gitea/gitea/issues/5572), [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801), [2022](https://github.com/go-gitea/gitea/issues/17872), 2023 -- [Andrew Thornton](https://gitea.com/zeripath) - [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801), [2022](https://github.com/go-gitea/gitea/issues/17872), 2023 -- [6543](https://gitea.com/6543) - 2023 -- [John Olheiser](https://gitea.com/jolheiser) - 2023 -- [Jason Song](https://gitea.com/wolfogre) - 2023 - -## Governance Compensation - -Each member of the community elected TOC will be granted $500 each month as compensation for their work. - -Furthermore, any community release manager for a specific release or LTS will be compensated $500 for the delivery of said release. - -These funds will come from community sources like the OpenCollective rather than directly from the company. -Only non-company members are eligible for this compensation, and if a member of the community TOC takes the responsibility of release manager, they would only be compensated for their TOC duties. -Gitea Ltd employees are not eligible to receive any funds from the OpenCollective unless it is reimbursement for a purchase made for the Gitea project itself. - -## TOC & Working groups - -With Gitea covering many projects outside of the main repository, several groups will be created to help focus on specific areas instead of requiring maintainers to be a jack-of-all-trades. Maintainers are of course more than welcome to be part of multiple groups should they wish to contribute in multiple places. - -The currently proposed groups are: - -- **Core Group**: maintain the primary Gitea repository -- **Integration Group**: maintain the Gitea ecosystem's related tools, including go-sdk/tea/changelog/bots etc. -- **Documentation Group**: maintain related documents and repositories -- **Translation Group**: coordinate with translators and maintain translations -- **Security Group**: managed by TOC directly, members are decided by TOC, maintains security patches/responsible for security items - -## Roadmap - -Each year a roadmap will be discussed with the entire Gitea maintainers team, and feedback will be solicited from various stakeholders. -TOC members need to review the roadmap every year and work together on the direction of the project. - -When a vote is required for a proposal or other change, the vote of community elected TOC members count slightly more than the vote of company elected TOC members. With this approach, we both avoid ties and ensure that changes align with the mission statement and community opinion. - -You can visit our roadmap on the wiki. - -## Versions - -Gitea has the `main` branch as a tip branch and has version branches -such as `release/v1.19`. `release/v1.19` is a release branch and we will -tag `v1.19.0` for binary download. If `v1.19.0` has bugs, we will accept -pull requests on the `release/v1.19` branch and publish a `v1.19.1` tag, -after bringing the bug fix also to the main branch. - -Since the `main` branch is a tip version, if you wish to use Gitea -in production, please download the latest release tag version. All the -branches will be protected via GitHub, all the PRs to every branch must -be reviewed by two maintainers and must pass the automatic tests. - -## Releasing Gitea - -- Let $vmaj, $vmin and $vpat be Major, Minor and Patch version numbers, $vpat should be rc1, rc2, 0, 1, ...... $vmaj.$vmin will be kept the same as milestones on github or gitea in future. -- Before releasing, confirm all the version's milestone issues or PRs has been resolved. Then discuss the release on Discord channel #maintainers and get agreed with almost all the owners and mergers. Or you can declare the version and if nobody is against it in about several hours. -- If this is a big version first you have to create PR for changelog on branch `main` with PRs with label `changelog` and after it has been merged do following steps: - - Create `-dev` tag as `git tag -s -F release.notes v$vmaj.$vmin.0-dev` and push the tag as `git push origin v$vmaj.$vmin.0-dev`. - - When CI has finished building tag then you have to create a new branch named `release/v$vmaj.$vmin` -- If it is bugfix version create PR for changelog on branch `release/v$vmaj.$vmin` and wait till it is reviewed and merged. -- Add a tag as `git tag -s -F release.notes v$vmaj.$vmin.$`, release.notes file could be a temporary file to only include the changelog this version which you added to `CHANGELOG.md`. -- And then push the tag as `git push origin v$vmaj.$vmin.$`. Drone CI will automatically create a release and upload all the compiled binary. (But currently it doesn't add the release notes automatically. Maybe we should fix that.) -- If needed send a frontport PR for the changelog to branch `main` and update the version in `docs/config.yaml` to refer to the new version. -- Send PR to [blog repository](https://gitea.com/gitea/blog) announcing the release. -- Verify all release assets were correctly published through CI on dl.gitea.com and GitHub releases. Once ACKed: - - bump the version of https://dl.gitea.com/gitea/version.json - - merge the blog post PR - - announce the release in discord `#announcements` diff --git a/Makefile b/Makefile index 8b0adf1906..e621cc362f 100644 --- a/Makefile +++ b/Makefile @@ -14,7 +14,6 @@ XGO_VERSION := go-1.25.x AIR_PACKAGE ?= github.com/air-verse/air@v1 EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3 -GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.9.2 GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.11.4 GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.15 MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.8.0 @@ -206,7 +205,7 @@ clean: ## delete backend and integration files .PHONY: fmt fmt: ## format the Go and template code - @GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run tools/code-batch-process.go gitea-fmt -w '{file-list}' + $(GO) run $(GOLANGCI_LINT_PACKAGE) fmt $(eval TEMPLATES := $(shell find templates -type f -name '*.tmpl')) @# strip whitespace after '{{' or '(' and before '}}' or ')' unless there is only @# whitespace before it @@ -278,10 +277,10 @@ lint-frontend: lint-js lint-css ## lint frontend files lint-frontend-fix: lint-js-fix lint-css-fix ## lint frontend files and fix issues .PHONY: lint-backend -lint-backend: lint-go lint-go-gitea-vet lint-editorconfig ## lint backend files +lint-backend: lint-go lint-editorconfig ## lint backend files .PHONY: lint-backend-fix -lint-backend-fix: lint-go-fix lint-go-gitea-vet lint-editorconfig ## lint backend files and fix issues +lint-backend-fix: lint-go-fix lint-editorconfig ## lint backend files and fix issues .PHONY: lint-js lint-js: node_modules ## lint js and ts files @@ -336,11 +335,6 @@ lint-go-windows: @GOOS= GOARCH= $(GO) install $(GOLANGCI_LINT_PACKAGE) golangci-lint run -.PHONY: lint-go-gitea-vet -lint-go-gitea-vet: ## lint go files with gitea-vet - @echo "Running gitea-vet..." - @$(GO) vet -vettool="$(shell GOOS= GOARCH= go tool -n gitea-vet)" ./... - .PHONY: lint-editorconfig lint-editorconfig: @echo "Running editorconfig check..." @@ -525,7 +519,7 @@ test-mssql-migration: migrations.mssql.test migrations.individual.mssql.test .PHONY: playwright playwright: deps-frontend @# on GitHub Actions VMs, playwright's system deps are pre-installed - @pnpm exec playwright install $(if $(GITHUB_ACTIONS),,--with-deps) chromium $(if $(CI),firefox) $(PLAYWRIGHT_FLAGS) + @pnpm exec playwright install $(if $(GITHUB_ACTIONS),,--with-deps) chromium firefox $(PLAYWRIGHT_FLAGS) .PHONY: test-e2e test-e2e: playwright $(EXECUTABLE_E2E) @@ -730,7 +724,6 @@ deps-backend: ## install backend dependencies deps-tools: ## install tool dependencies $(GO) install $(AIR_PACKAGE) & \ $(GO) install $(EDITORCONFIG_CHECKER_PACKAGE) & \ - $(GO) install $(GOFUMPT_PACKAGE) & \ $(GO) install $(GOLANGCI_LINT_PACKAGE) & \ $(GO) install $(GXZ_PACKAGE) & \ $(GO) install $(MISSPELL_PACKAGE) & \ diff --git a/assets/go-licenses.json b/assets/go-licenses.json index ff946e4fca..61bf76702c 100644 --- a/assets/go-licenses.json +++ b/assets/go-licenses.json @@ -1194,11 +1194,6 @@ "path": "github.com/yuin/goldmark-highlighting/v2/LICENSE", "licenseText": "MIT License\n\nCopyright (c) 2019 Yusuke Inuzuka\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n" }, - { - "name": "github.com/yuin/goldmark-meta", - "path": "github.com/yuin/goldmark-meta/LICENSE", - "licenseText": "MIT License\n\nCopyright (c) 2019 Yusuke Inuzuka\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n" - }, { "name": "github.com/yuin/goldmark", "path": "github.com/yuin/goldmark/LICENSE", @@ -1329,11 +1324,6 @@ "path": "gopkg.in/warnings.v0/LICENSE", "licenseText": "Copyright (c) 2016 Péter Surányi.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" }, - { - "name": "gopkg.in/yaml.v2", - "path": "gopkg.in/yaml.v2/LICENSE", - "licenseText": " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"{}\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright {yyyy} {name of copyright owner}\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n" - }, { "name": "gopkg.in/yaml.v3", "path": "gopkg.in/yaml.v3/LICENSE", diff --git a/build/generate-go-licenses.go b/build/generate-go-licenses.go index b710fdb841..057e6a6e49 100644 --- a/build/generate-go-licenses.go +++ b/build/generate-go-licenses.go @@ -19,6 +19,7 @@ import ( // regexp is based on go-license, excluding README and NOTICE // https://github.com/google/go-licenses/blob/master/licenses/find.go +// also defined in vite.config.ts var licenseRe = regexp.MustCompile(`^(?i)((UN)?LICEN(S|C)E|COPYING).*$`) // primaryLicenseRe matches exact primary license filenames without suffixes. diff --git a/cmd/generate.go b/cmd/generate.go index b94ff79aae..21f8b42bff 100644 --- a/cmd/generate.go +++ b/cmd/generate.go @@ -78,11 +78,7 @@ func runGenerateInternalToken(_ context.Context, c *cli.Command) error { } func runGenerateLfsJwtSecret(_ context.Context, c *cli.Command) error { - _, jwtSecretBase64, err := generate.NewJwtSecretWithBase64() - if err != nil { - return err - } - + _, jwtSecretBase64 := generate.NewJwtSecretWithBase64() fmt.Printf("%s", jwtSecretBase64) if isatty.IsTerminal(os.Stdout.Fd()) { diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini index 4df50f5cc6..ef276e4da5 100644 --- a/custom/conf/app.example.ini +++ b/custom/conf/app.example.ini @@ -41,10 +41,10 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; App name that shows in every page title -APP_NAME = ; Gitea: Git with a cup of tea +;APP_NAME = Gitea: Git with a cup of tea ;; ;; RUN_USER will automatically detect the current user - but you can set it here change it if you run locally -RUN_USER = ; git +;RUN_USER = ;; ;; Application run mode, affects performance and debugging: "dev" or "prod", default is "prod" ;; Mode "dev" makes Gitea easier to develop and debug, values other than "dev" are treated as "prod" which is for production use. @@ -175,14 +175,15 @@ RUN_USER = ; git ;; The port number the builtin SSH server should listen on, defaults to SSH_PORT ;SSH_LISTEN_PORT = ;; -;; Root path of SSH directory, default is '~/.ssh', but you have to use '/home/git/.ssh'. +;; Root path of SSH user directory for the system's standalone SSH server if Gitea is not using its builtin SSH server. +;; Default is the '.ssh' directory in the run user's home directory. ;SSH_ROOT_PATH = ;; -;; Gitea will create a authorized_keys file by default when it is not using the internal ssh server +;; Gitea will create an authorized_keys file by default when it is not using the builtin SSH server ;; If you intend to use the AuthorizedKeysCommand functionality then you should turn this off. ;SSH_CREATE_AUTHORIZED_KEYS_FILE = true ;; -;; Gitea will create a authorized_principals file by default when it is not using the internal ssh server +;; Gitea will create an authorized_principals file by default when it is not using the builtin SSH server ;; If you intend to use the AuthorizedPrincipalsCommand functionality then you should turn this off. ;SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE = true ;; @@ -460,6 +461,11 @@ INTERNAL_TOKEN = ;; Name of cookie used to store authentication information. ;COOKIE_REMEMBER_NAME = gitea_incredible ;; +;; URL or path that Gitea should redirect users to *after* performing its own logout. +;; Use this, if needed, when authentication is handled by a reverse proxy or SSO. +;; For example: "/my-sso/logout?return=/my-sso/home" +;REVERSE_PROXY_LOGOUT_REDIRECT = +;; ;; Reverse proxy authentication header name of user name, email, and full name ;REVERSE_PROXY_AUTHENTICATION_USER = X-WEBAUTH-USER ;REVERSE_PROXY_AUTHENTICATION_EMAIL = X-WEBAUTH-EMAIL @@ -1178,16 +1184,16 @@ LEVEL = Info ;[repository.release] ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. +;; Comma-separated list of allowed release attachment file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. ;ALLOWED_TYPES = ;; ;; Number of releases that are displayed on release page ;DEFAULT_PAGING_NUM = 10 ;; -;; Max size of each file in megabytes. Defaults to 2GB +;; Max size of each release attachment file in megabytes. Defaults to 2GB ;FILE_MAX_SIZE = 2048 ;; -;; Max number of files per upload. Defaults to 5 +;; Max number of release attachment files per upload. Defaults to 5 ;MAX_FILES = 5 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1994,16 +2000,18 @@ LEVEL = Info ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; -;; Whether issue and pull request attachments are enabled. Defaults to `true` +;; Whether issue, pull-request and release attachments are enabled. Defaults to `true` +;; ALLOWED_TYPES/MAX_SIZE/MAX_FILES in this section only affect issue and pull-request attachments, not release attachments. +;; Release attachment has its own config options in [repository.release] section. ;ENABLED = true ;; -;; Comma-separated list of allowed file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. +;; Comma-separated list of allowed issue/pull-request attachment file extensions (`.zip`), mime types (`text/plain`) or wildcard type (`image/*`, `audio/*`, `video/*`). Empty value or `*/*` allows all types. ;ALLOWED_TYPES = .avif,.cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip ;; -;; Max size of each file. Defaults to 100MB +;; Max size of each issue/pull-request attachment file. Defaults to 100MB ;MAX_SIZE = 100 ;; -;; Max number of files per upload. Defaults to 5 +;; Max number of issue/pull-request attachment files per upload. Defaults to 5 ;MAX_FILES = 5 ;; ;; Storage type for attachments, `local` for local disk or `minio` for s3 compatible @@ -2790,6 +2798,8 @@ LEVEL = Info ;LIMIT_SIZE_SWIFT = -1 ;; Maximum size of a Vagrant upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) ;LIMIT_SIZE_VAGRANT = -1 +;; Maximum size of a Terraform state upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) +;LIMIT_SIZE_TERRAFORM_STATE = -1 ;; Enable RPM re-signing by default. (It will overwrite the old signature ,using v4 format, not compatible with CentOS 6 or older) ;DEFAULT_RPM_SIGN_ENABLED = false ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; diff --git a/docker/root/etc/s6/openssh/finish b/docker/root/etc/s6/openssh/finish index 06bd986563..8e89b35c51 100755 --- a/docker/root/etc/s6/openssh/finish +++ b/docker/root/etc/s6/openssh/finish @@ -1,2 +1,8 @@ #!/bin/bash +# $1 = exit code of the run script, $2 = signal +if [ "$1" -ne 0 ]; then + # avoid immediately restarting the sshd service, which may cause CPU 100% if the error (permission, configuration) is not fixed + echo "openssh failed with exit code $1 - waiting a short delay before attempting a restart" + sleep 3 +fi exit 0 diff --git a/docs/community-governance.md b/docs/community-governance.md new file mode 100644 index 0000000000..dbf2481329 --- /dev/null +++ b/docs/community-governance.md @@ -0,0 +1,198 @@ +# Community governance and review process + +This document describes maintainer expectations, project governance, and the detailed pull request review workflow (labels, merge queue, commit message format for mergers). For what contributors should do when opening and updating a PR, see [CONTRIBUTING.md](../CONTRIBUTING.md). + +## Code review + +### Milestone + +A PR should only be assigned to a milestone if it will likely be merged into the given version. \ +PRs without a milestone may not be merged. + +### Labels + +Almost all labels used inside Gitea can be classified as one of the following: + +- `modifies/…`: Determines which parts of the codebase are affected. These labels will be set through the CI. +- `topic/…`: Determines the conceptual component of Gitea that is affected, i.e. issues, projects, or authentication. At best, PRs should only target one component but there might be overlap. Must be set manually. +- `type/…`: Determines the type of an issue or PR (feature, refactoring, docs, bug, …). If GitHub supported scoped labels, these labels would be exclusive, so you should set **exactly** one, not more or less (every PR should fall into one of the provided categories, and only one). +- `issue/…` / `lgtm/…`: Labels that are specific to issues or PRs respectively and that are only necessary in a given context, i.e. `issue/not-a-bug` or `lgtm/need 2` + +Every PR should be labeled correctly with every label that applies. + +There are also some labels that will be managed automatically.\ +In particular, these are + +- the amount of pending required approvals +- has all `backport`s or needs a manual backport + +### Reviewing PRs + +Maintainers are encouraged to review pull requests in areas where they have expertise or particular interest. + +#### For reviewers + +- **Verification**: Verify that the PR accurately reflects the changes, and verify that the tests and documentation are complete and aligned with the implementation. +- **Actionable feedback**: Say what should change and why, and distinguish required changes from optional suggestions. +- **Feedback**: Focus feedback on the issue itself and avoid comments about the contributor's abilities. +- **Request changes**: If you request changes (i.e., block a PR), give a clear rationale and, whenever possible, a concrete path to resolution. +- **Approval**: Only approve a PR when you are fully satisfied with its current state - "rubber-stamp" approvals need to be highlighted as such. + +### Getting PRs merged + +Changes to Gitea must be reviewed before they are accepted, including changes from owners and maintainers. The exception is critical bugs that prevent Gitea from compiling or starting. + +We require two maintainer approvals for every PR. When that is satisfied, your PR gets the `lgtm/done` label. After that, you mainly fix merge conflicts and respond to or implement maintainer requests; maintainers drive getting the PR merged. + +If a PR has `lgtm/done`, no open discussions, and no merge conflicts, any maintainer may add `reviewed/wait-merge`. That puts the PR in the merge queue. PRs are merged from the queue in the order of this list: + + + +Gitea uses its own tool, , to automate parts of the review process. The backporter: + +- Creates a backport PR when needed after the initial PR merges. +- Removes the PR from the merge queue after it merges. +- Keeps the oldest branch in the merge queue up to date with merges. + +### Final call + +If a PR has been ignored for more than 7 days with no comments or reviews, and the author or any maintainer believes it will not survive a long wait (such as a refactoring PR), they can send "final call" to the TOC by mentioning them in a comment. + +After another 7 days, if there is still zero approval, this is considered a polite refusal, and the PR will be closed to avoid wasting further time. Therefore, the "final call" has a cost, and should be used cautiously. + +However, if there are no objections from maintainers, the PR can be merged with only one approval from the TOC (not the author). + +### Commit messages + +Mergers are required to rewrite the PR title and the first comment (the summary) when necessary so the squash commit message is clear. + +The final commit message should not hedge: replace phrases like `hopefully, won't happen anymore` with definite wording. + +#### PR Co-authors + +A person counts as a PR co-author once they (co-)authored a commit that is not simply a `Merge base branch into branch` commit. Mergers must remove such false-positive co-authors when writing the squash message. Every true co-author must remain in the commit message. + +#### PRs targeting `main` + +The commit message of PRs targeting `main` is always + +```bash +$PR_TITLE ($PR_INDEX) + +$REWRITTEN_PR_SUMMARY +``` + +#### Backport PRs + +The commit message of backport PRs is always + +```bash +$PR_TITLE ($INITIAL_PR_INDEX) ($BACKPORT_PR_INDEX) + +$REWRITTEN_PR_SUMMARY +``` + +## Maintainers + +We list [maintainers](../MAINTAINERS) so every PR gets proper review. + +#### Review expectations + +Every PR **must** be reviewed by at least two maintainers (or owners) before merge. **Exception:** after one week, refactoring PRs and documentation-only PRs need only one maintainer approval. + +Maintainers are expected to spend time on code reviews. + +#### Becoming a maintainer + +A maintainer should already be a Gitea contributor with at least four merged PRs. To apply, use the [Discord](https://discord.gg/Gitea) `#develop` channel. Maintainer teams may also invite contributors. + +#### Stepping down, advisors, and inactivity + +If you cannot keep reviewing, apply to leave the maintainers team. You can join the [advisors team](https://github.com/orgs/go-gitea/teams/advisors); advisors who want to review again are welcome back as maintainers. + +If a maintainer is inactive for more than three months and has not left the team, owners may move them to the advisors team. + +#### Account security + +For security, maintainers should enable 2FA and sign commits with GPG when possible: + +- [Two-factor authentication](https://docs.github.com/en/authentication/securing-your-account-with-two-factor-authentication-2fa/configuring-two-factor-authentication) +- [Signing commits with GPG](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) + +Any account with write access (including bots and TOC members) **must** use [2FA](https://docs.github.com/en/authentication/securing-your-account-with-two-factor-authentication-2fa/configuring-two-factor-authentication). + +## Technical Oversight Committee (TOC) + +At the start of 2023, the `Owners` team was dissolved. Instead, the governance charter proposed a technical oversight committee (TOC) which expands the ownership team of the Gitea project from three elected positions to six positions. Three positions are elected as it has been over the past years, and the other three consist of appointed members from the Gitea company. +https://blog.gitea.com/quarterly-23q1/ + +### TOC election process + +Any maintainer is eligible to be part of the community TOC if they are not associated with the Gitea company. +A maintainer can either nominate themselves, or can be nominated by other maintainers to be a candidate for the TOC election. +If you are nominated by someone else, you must first accept your nomination before the vote starts to be a candidate. + +The TOC is elected for one year, the TOC election happens yearly. +After the announcement of the results of the TOC election, elected members have two weeks time to confirm or refuse the seat. +If an elected member does not answer within this timeframe, they are automatically assumed to refuse the seat. +Refusals result in the person with the next highest vote getting the same choice. +As long as seats are empty in the TOC, members of the previous TOC can fill them until an elected member accepts the seat. + +If an elected member that accepts the seat does not have 2FA configured yet, they will be temporarily counted as `answer pending` until they manage to configure 2FA, thus leaving their seat empty for this duration. + +### Current TOC members + +- 2024-01-01 ~ 2024-12-31 + - Company + - [Jason Song](https://gitea.com/wolfogre) + - [Lunny Xiao](https://gitea.com/lunny) + - [Matti Ranta](https://gitea.com/techknowlogick) + - Community + - [6543](https://gitea.com/6543) <6543@obermui.de> + - [delvh](https://gitea.com/delvh) + - [John Olheiser](https://gitea.com/jolheiser) + +### Previous TOC/owners members + +Here's the history of the owners and the time they served: + +- [Lunny Xiao](https://gitea.com/lunny) - 2016, 2017, [2018](https://github.com/go-gitea/gitea/issues/3255), [2019](https://github.com/go-gitea/gitea/issues/5572), [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801), [2022](https://github.com/go-gitea/gitea/issues/17872), 2023 +- [Kim Carlbäcker](https://github.com/bkcsoft) - 2016, 2017 +- [Thomas Boerger](https://gitea.com/tboerger) - 2016, 2017 +- [Lauris Bukšis-Haberkorns](https://gitea.com/lafriks) - [2018](https://github.com/go-gitea/gitea/issues/3255), [2019](https://github.com/go-gitea/gitea/issues/5572), [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801) +- [Matti Ranta](https://gitea.com/techknowlogick) - [2019](https://github.com/go-gitea/gitea/issues/5572), [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801), [2022](https://github.com/go-gitea/gitea/issues/17872), 2023 +- [Andrew Thornton](https://gitea.com/zeripath) - [2020](https://github.com/go-gitea/gitea/issues/9230), [2021](https://github.com/go-gitea/gitea/issues/13801), [2022](https://github.com/go-gitea/gitea/issues/17872), 2023 +- [6543](https://gitea.com/6543) - 2023 +- [John Olheiser](https://gitea.com/jolheiser) - 2023 +- [Jason Song](https://gitea.com/wolfogre) - 2023 + +## Governance Compensation + +Each member of the community elected TOC will be granted $500 each month as compensation for their work. + +Furthermore, any community release manager for a specific release or LTS will be compensated $500 for the delivery of said release. + +These funds will come from community sources like the OpenCollective rather than directly from the company. +Only non-company members are eligible for this compensation, and if a member of the community TOC takes the responsibility of release manager, they would only be compensated for their TOC duties. +Gitea Ltd employees are not eligible to receive any funds from the OpenCollective unless it is reimbursement for a purchase made for the Gitea project itself. + +## TOC & Working groups + +With Gitea covering many projects outside of the main repository, several groups will be created to help focus on specific areas instead of requiring maintainers to be a jack-of-all-trades. Maintainers are of course more than welcome to be part of multiple groups should they wish to contribute in multiple places. + +The currently proposed groups are: + +- **Core Group**: maintain the primary Gitea repository +- **Integration Group**: maintain the Gitea ecosystem's related tools, including go-sdk/tea/changelog/bots etc. +- **Documentation Group**: maintain related documents and repositories +- **Translation Group**: coordinate with translators and maintain translations +- **Security Group**: managed by TOC directly, members are decided by TOC, maintains security patches/responsible for security items + +## Roadmap + +Each year a roadmap will be discussed with the entire Gitea maintainers team, and feedback will be solicited from various stakeholders. +TOC members need to review the roadmap every year and work together on the direction of the project. + +When a vote is required for a proposal or other change, the vote of community elected TOC members count slightly more than the vote of company elected TOC members. With this approach, we both avoid ties and ensure that changes align with the mission statement and community opinion. + +You can visit our roadmap on the wiki. diff --git a/docs/guideline-backend.md b/docs/guideline-backend.md new file mode 100644 index 0000000000..bc3e71113f --- /dev/null +++ b/docs/guideline-backend.md @@ -0,0 +1,58 @@ +# Backend development + +This document covers backend-specific contribution expectations. For general contribution workflow, see [CONTRIBUTING.md](../CONTRIBUTING.md). + +For coding style and architecture, see also the [backend development guideline](https://docs.gitea.com/contributing/guidelines-backend) on the documentation site. + +## Dependencies + +Go dependencies are managed using [Go Modules](https://go.dev/cmd/go/#hdr-Module_maintenance). \ +You can find more details in the [go mod documentation](https://go.dev/ref/mod) and the [Go Modules Wiki](https://github.com/golang/go/wiki/Modules). + +Pull requests should only modify `go.mod` and `go.sum` where it is related to your change, be it a bugfix or a new feature. \ +Apart from that, these files should only be modified by Pull Requests whose only purpose is to update dependencies. + +The `go.mod`, `go.sum` update needs to be justified as part of the PR description, +and must be verified by the reviewers and/or merger to always reference +an existing upstream commit. + +## API v1 + +The API is documented by [swagger](https://gitea.com/api/swagger) and is based on [the GitHub API](https://docs.github.com/en/rest). + +### GitHub API compatibility + +Gitea's API should use the same endpoints and fields as the GitHub API as far as possible, unless there are good reasons to deviate. \ +If Gitea provides functionality that GitHub does not, a new endpoint can be created. \ +If information is provided by Gitea that is not provided by the GitHub API, a new field can be used that doesn't collide with any GitHub fields. \ +Updating an existing API should not remove existing fields unless there is a really good reason to do so. \ +The same applies to status responses. If you notice a problem, feel free to leave a comment in the code for future refactoring to API v2 (which is currently not planned). + +### Adding/Maintaining API routes + +All expected results (errors, success, fail messages) must be documented ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/repo/issue.go#L319-L327)). \ +All JSON input types must be defined as a struct in [modules/structs/](modules/structs/) ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/modules/structs/issue.go#L76-L91)) \ +and referenced in [routers/api/v1/swagger/options.go](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/swagger/options.go). \ +They can then be used like [this example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/repo/issue.go#L318). \ +All JSON responses must be defined as a struct in [modules/structs/](modules/structs/) ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/modules/structs/issue.go#L36-L68)) \ +and referenced in its category in [routers/api/v1/swagger/](routers/api/v1/swagger/) ([example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/swagger/issue.go#L11-L16)) \ +They can be used like [this example](https://github.com/go-gitea/gitea/blob/c620eb5b2d0d874da68ebd734d3864c5224f71f7/routers/api/v1/repo/issue.go#L277-L279). + +### When to use what HTTP method + +In general, HTTP methods are chosen as follows: + +- **GET** endpoints return the requested object(s) and status **OK (200)** +- **DELETE** endpoints return the status **No Content (204)** and no content either +- **POST** endpoints are used to **create** new objects (e.g. a User) and return the status **Created (201)** and the created object +- **PUT** endpoints are used to **add/assign** existing Objects (e.g. a user to a team) and return the status **No Content (204)** and no content either +- **PATCH** endpoints are used to **edit/change** an existing object and return the changed object and the status **OK (200)** + +### Requirements for API routes + +All parameters of endpoints changing/editing an object must be optional (except the ones to identify the object, which are required). + +Endpoints returning lists must + +- support pagination (`page` & `limit` options in query) +- set `X-Total-Count` header via **SetTotalCountHeader** ([example](https://github.com/go-gitea/gitea/blob/7aae98cc5d4113f1e9918b7ee7dd09f67c189e3e/routers/api/v1/repo/issue.go#L444)) diff --git a/docs/guideline-frontend.md b/docs/guideline-frontend.md new file mode 100644 index 0000000000..80ebe82177 --- /dev/null +++ b/docs/guideline-frontend.md @@ -0,0 +1,17 @@ +# Frontend development + +This document covers frontend-specific contribution expectations. For general contribution workflow, see [CONTRIBUTING.md](../CONTRIBUTING.md). + +## Dependencies + +For the frontend, we use [npm](https://www.npmjs.com/). + +The same restrictions apply for frontend dependencies as for [backend dependencies](guideline-backend.md#dependencies), with the exceptions that the files for it are `package.json` and `package-lock.json`, and that new versions must always reference an existing version. + +## Design guideline + +Depending on your change, please read the + +- [backend development guideline](https://docs.gitea.com/contributing/guidelines-backend) +- [frontend development guideline](https://docs.gitea.com/contributing/guidelines-frontend) +- [refactoring guideline](https://docs.gitea.com/contributing/guidelines-refactoring) diff --git a/docs/release-management.md b/docs/release-management.md new file mode 100644 index 0000000000..be8d9e1abf --- /dev/null +++ b/docs/release-management.md @@ -0,0 +1,115 @@ +# Release management + +This document describes the release cycle, backports, versioning, and the release manager checklist. For everyday contribution workflow, see [CONTRIBUTING.md](../CONTRIBUTING.md). + +## Backports and Frontports + +### What is backported? + +We backport PRs given the following circumstances: + +1. Feature freeze is active, but `-rc0` has not been released yet. Here, we backport as much as possible. +2. `rc0` has been released. Here, we only backport bug- and security-fixes, and small enhancements. Large PRs such as refactors are not backported anymore. +3. We never backport new features. +4. We never backport breaking changes except when + 1. The breaking change has no effect on the vast majority of users + 2. The component triggering the breaking change is marked as experimental + +### How to backport? + +In the past, it was necessary to manually backport your PRs. \ +Now, that's not a requirement anymore as our [backport bot](https://github.com/GiteaBot) tries to create backports automatically once the PR is merged when the PR + +- does not have the label `backport/manual` +- has the label `backport/` + +The `backport/manual` label signifies either that you want to backport the change yourself, or that there were conflicts when backporting, thus you **must** do it yourself. + +### Format of backport PRs + +The title of backport PRs should be + +``` + (#) +``` + +The first two lines of the summary of the backporting PR should be + +``` +Backport # + +``` + +with the rest of the summary and labels matching the original PR. + +### Frontports + +Frontports behave exactly as described above for backports. + +## Release Cycle + +We use a release schedule so work, stabilization, and releases stay predictable. + +### Cadence + +- Aim for a major release about every three or four months. +- Roughly two or three months of general development, then about one month of testing and polish called the **release freeze**. +- *Starting with v1.26 the release cycle will be more predictable and follow a more regular schedule.* + +### Release schedule + +We will try to publish a new major version every three months: + +- v1.26.0 in April 2026 +- v1.27.0 in June 2026 +- v1.28.0 in September 2026 +- v1.29.0 in December 2026 + +#### How is the release handled? +- The release manager will tag the release candidate (e.g. `v1.26.0-rc0`) and publish it for testing in the **first week of the release month**. +- If there are no major issues, the release manager will check with the other maintainers and then tag the final release (e.g. `v1.26.0`) in the **one or two weeks following the release candidate**. + +### Feature freeze + +- Merge feature PRs before the freeze when you can. +- Feature PRs still open at the freeze move to the next milestone. Watch Discord for the freeze announcement. +- During the freeze, a **release branch** takes fixes backported from `main`. Release candidates ship for testing; the final release for that line is maintained from that branch. + +### Patch releases + +During a cycle we may ship patch releases for an older line. For example, if the latest release is v1.2, we can still publish v1.1.1 after v1.1.0. + +### End of life (EOL) + +We support per standard the last major release. For example, if the latest release is v1.26, we support v1.26 and v1.25, but not v1.24 anymore. We will only publish security fixes for the last major release, so if you are using an older release, please upgrade to a supported release as soon as possible. +Also we always try to support the latest on main branch, so if you are using the latest on main, you should be fine. + +## Versions + +Gitea has the `main` branch as a tip branch and has version branches +such as `release/v1.19`. `release/v1.19` is a release branch and we will +tag `v1.19.0` for binary download. If `v1.19.0` has bugs, we will accept +pull requests on the `release/v1.19` branch and publish a `v1.19.1` tag, +after bringing the bug fix also to the main branch. + +Since the `main` branch is a tip version, if you wish to use Gitea +in production, please download the latest release tag version. All the +branches will be protected via GitHub, all the PRs to every branch must +be reviewed by two maintainers and must pass the automatic tests. + +## Releasing Gitea + +- Let MAJOR, MINOR and PATCH be Major, Minor and Patch version numbers, PATCH should be rc1, rc2, 0, 1, ...... MAJOR.MINOR will be kept the same as milestones on github or gitea in future. +- Before releasing, confirm all the version's milestone issues or PRs has been resolved. Then discuss the release on Discord channel #maintainers and get agreed with almost all the owners and mergers. Or you can declare the version and if nobody is against it in about several hours. +- If this is a big version first you have to create PR for changelog on branch `main` with PRs with label `changelog` and after it has been merged do following steps: + - Create `-dev` tag as `git tag -s -F release.notes vMAJOR.MINOR.0-dev` and push the tag as `git push origin vMAJOR.MINOR.0-dev`. + - When CI has finished building tag then you have to create a new branch named `release/vMAJOR.MINOR` +- If it is bugfix version create PR for changelog on branch `release/vMAJOR.MINOR` and wait till it is reviewed and merged. +- Add a tag as `git tag -s -F release.notes vMAJOR.MINOR.PATCH`, release.notes file could be a temporary file to only include the changelog this version which you added to `CHANGELOG.md`. +- And then push the tag as `git push origin vMAJOR.MINOR.$`. CI will automatically create a release and upload all the compiled binary. (But currently it doesn't add the release notes automatically. Maybe we should fix that.) +- If needed send a frontport PR for the changelog to branch `main` and update the version in `docs/config.yaml` to refer to the new version. +- Send PR to [blog repository](https://gitea.com/gitea/blog) announcing the release. +- Verify all release assets were correctly published through CI on dl.gitea.com and GitHub releases. Once ACKed: + - bump the version of https://dl.gitea.com/gitea/version.json + - merge the blog post PR + - announce the release in discord `#announcements` diff --git a/eslint.config.ts b/eslint.config.ts index 5b7884bdce..29016ed808 100644 --- a/eslint.config.ts +++ b/eslint.config.ts @@ -574,8 +574,6 @@ export default defineConfig([ 'no-restricted-properties': [2, ...restrictedProperties], 'no-restricted-imports': [2, {paths: [ {name: 'jquery', message: 'Use the global $ instead', allowTypeImports: true}, - {name: 'htmx.org', message: 'Use the global htmx instead', allowTypeImports: true}, - {name: 'idiomorph/htmx', message: 'Loaded in globals.ts', allowTypeImports: true}, ]}], 'no-restricted-syntax': [2, 'WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression'], 'no-return-assign': [0], @@ -926,6 +924,7 @@ export default defineConfig([ { ...playwright.configs['flat/recommended'], files: ['tests/e2e/**/*.test.ts'], + languageOptions: {globals: {...globals.nodeBuiltin, ...globals.browser}}, rules: { ...playwright.configs['flat/recommended'].rules, 'playwright/expect-expect': [0], @@ -1022,6 +1021,6 @@ export default defineConfig([ }, { files: ['web_src/**/*'], - languageOptions: {globals: {...globals.browser, ...globals.jquery, htmx: false}}, + languageOptions: {globals: {...globals.browser, ...globals.jquery}}, }, ]); diff --git a/flake.lock b/flake.lock index 246cfd4e79..8ec14d2852 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "nixpkgs": { "locked": { - "lastModified": 1774386573, - "narHash": "sha256-4hAV26quOxdC6iyG7kYaZcM3VOskcPUrdCQd/nx8obc=", + "lastModified": 1775710090, + "narHash": "sha256-ar3rofg+awPB8QXDaFJhJ2jJhu+KqN/PRCXeyuXR76E=", "owner": "nixos", "repo": "nixpkgs", - "rev": "46db2e09e1d3f113a13c0d7b81e2f221c63b8ce9", + "rev": "4c1018dae018162ec878d42fec712642d214fdfa", "type": "github" }, "original": { diff --git a/go.mod b/go.mod index f73a4bd08e..d1aac0db90 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module code.gitea.io/gitea -go 1.26.1 +go 1.26.2 // rfc5280 said: "The serial number is an integer assigned by the CA to each certificate." // But some CAs use negative serial number, just relax the check. related: @@ -12,7 +12,7 @@ require ( code.gitea.io/sdk/gitea v0.24.1 codeberg.org/gusted/mcaptcha v0.0.0-20220723083913-4f3072e1d570 connectrpc.com/connect v1.19.1 - gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed + gitea.com/go-chi/binding v0.0.0-20260414111559-654cea7ac60a gitea.com/go-chi/cache v0.2.1 gitea.com/go-chi/captcha v0.0.0-20240315150714-fb487f629098 gitea.com/go-chi/session v0.0.0-20251124165456-68e0254e989e @@ -27,7 +27,7 @@ require ( github.com/PuerkitoBio/goquery v1.12.0 github.com/SaveTheRbtz/zstd-seekable-format-go/pkg v0.8.0 github.com/alecthomas/chroma/v2 v2.23.1 - github.com/aws/aws-sdk-go-v2/credentials v1.19.13 + github.com/aws/aws-sdk-go-v2/credentials v1.19.14 github.com/aws/aws-sdk-go-v2/service/codecommit v1.33.12 github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb github.com/blevesearch/bleve/v2 v2.5.7 @@ -49,21 +49,20 @@ require ( github.com/gliderlabs/ssh v0.3.8 github.com/go-chi/chi/v5 v5.2.5 github.com/go-chi/cors v1.2.2 - github.com/go-co-op/gocron/v2 v2.19.1 - github.com/go-enry/go-enry/v2 v2.9.5 + github.com/go-co-op/gocron/v2 v2.20.0 + github.com/go-enry/go-enry/v2 v2.9.6 github.com/go-git/go-billy/v5 v5.8.0 - github.com/go-git/go-git/v5 v5.17.2 + github.com/go-git/go-git/v5 v5.18.0 github.com/go-ldap/ldap/v3 v3.4.13 github.com/go-redsync/redsync/v4 v4.16.0 github.com/go-sql-driver/mysql v1.9.3 - github.com/go-webauthn/webauthn v0.16.1 - github.com/goccy/go-json v0.10.6 + github.com/go-webauthn/webauthn v0.16.4 github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 github.com/golang-jwt/jwt/v5 v5.3.1 github.com/google/go-github/v84 v84.0.0 github.com/google/licenseclassifier/v2 v2.0.0 - github.com/google/pprof v0.0.0-20260302011040-a15ffb7f9dcc + github.com/google/pprof v0.0.0-20260402051712-545e8a4df936 github.com/google/uuid v1.6.0 github.com/gorilla/feeds v1.2.0 github.com/gorilla/sessions v1.4.0 @@ -75,15 +74,15 @@ require ( github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 github.com/klauspost/compress v1.18.5 github.com/klauspost/cpuid/v2 v2.3.0 - github.com/lib/pq v1.12.1 + github.com/lib/pq v1.12.3 github.com/markbates/goth v1.82.0 - github.com/mattn/go-isatty v0.0.20 - github.com/mattn/go-sqlite3 v1.14.38 - github.com/meilisearch/meilisearch-go v0.36.1 + github.com/mattn/go-isatty v0.0.21 + github.com/mattn/go-sqlite3 v1.14.42 + github.com/meilisearch/meilisearch-go v0.36.2 github.com/mholt/archives v0.1.5 github.com/microcosm-cc/bluemonday v1.0.27 github.com/microsoft/go-mssqldb v1.9.6 - github.com/minio/minio-go/v7 v7.0.99 + github.com/minio/minio-go/v7 v7.0.100 github.com/msteinert/pam/v2 v2.1.0 github.com/nektos/act v0.2.63 github.com/niklasfasching/go-org v1.9.1 @@ -109,17 +108,16 @@ require ( github.com/yohcop/openid-go v1.0.1 github.com/yuin/goldmark v1.8.2 github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc - github.com/yuin/goldmark-meta v1.1.0 gitlab.com/gitlab-org/api/client-go v1.46.0 go.yaml.in/yaml/v4 v4.0.0-rc.3 - golang.org/x/crypto v0.49.0 - golang.org/x/image v0.38.0 - golang.org/x/net v0.52.0 + golang.org/x/crypto v0.50.0 + golang.org/x/image v0.39.0 + golang.org/x/net v0.53.0 golang.org/x/oauth2 v0.36.0 golang.org/x/sync v0.20.0 - golang.org/x/sys v0.42.0 - golang.org/x/text v0.35.0 - google.golang.org/grpc v1.79.3 + golang.org/x/sys v0.43.0 + golang.org/x/text v0.36.0 + google.golang.org/grpc v1.80.0 google.golang.org/protobuf v1.36.11 gopkg.in/ini.v1 v1.67.1 gopkg.in/yaml.v3 v3.0.1 @@ -131,7 +129,6 @@ require ( require ( cloud.google.com/go/compute/metadata v0.9.0 // indirect - code.gitea.io/gitea-vet v0.2.3 // indirect dario.cat/mergo v1.0.2 // indirect filippo.io/edwards25519 v1.2.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect @@ -196,7 +193,8 @@ require ( github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-ini/ini v1.67.0 // indirect github.com/go-viper/mapstructure/v2 v2.5.0 // indirect - github.com/go-webauthn/x v0.2.2 // indirect + github.com/go-webauthn/x v0.2.3 // indirect + github.com/goccy/go-json v0.10.6 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect github.com/golang-sql/sqlexp v0.1.0 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect @@ -283,7 +281,6 @@ require ( golang.org/x/tools v0.43.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20260401020348-3a24fdc17823 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect ) ignore ( @@ -308,5 +305,3 @@ replace github.com/Azure/azure-sdk-for-go/sdk/azcore => github.com/Azure/azure-s replace github.com/Azure/azure-sdk-for-go/sdk/storage/azblob => github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.6.2 // v1.6.4+ uses API version unsupported by Azurite in CI replace github.com/microsoft/go-mssqldb => github.com/microsoft/go-mssqldb v1.9.7 // downgraded with Azure SDK - -tool code.gitea.io/gitea-vet diff --git a/go.sum b/go.sum index 91b16dda30..547c61d826 100644 --- a/go.sum +++ b/go.sum @@ -2,8 +2,6 @@ cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdB cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10= code.gitea.io/actions-proto-go v0.4.1 h1:l0EYhjsgpUe/1VABo2eK7zcoNX2W44WOnb0MSLrKfls= code.gitea.io/actions-proto-go v0.4.1/go.mod h1:mn7Wkqz6JbnTOHQpot3yDeHx+O5C9EGhMEE+htvHBas= -code.gitea.io/gitea-vet v0.2.3 h1:gdFmm6WOTM65rE8FUBTRzeQZYzXePKSSB1+r574hWwI= -code.gitea.io/gitea-vet v0.2.3/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE= code.gitea.io/sdk/gitea v0.24.1 h1:hpaqcdGcBmfMpV7JSbBJVwE99qo+WqGreJYKrDKEyW8= code.gitea.io/sdk/gitea v0.24.1/go.mod h1:5/77BL3sHneCMEiZaMT9lfTvnnibsYxyO48mceCF3qA= code.pfad.fr/check v1.1.0 h1:GWvjdzhSEgHvEHe2uJujDcpmZoySKuHQNrZMfzfO0bE= @@ -18,8 +16,8 @@ filippo.io/edwards25519 v1.2.0 h1:crnVqOiS4jqYleHd9vaKZ+HKtHfllngJIiOpNpoJsjo= filippo.io/edwards25519 v1.2.0/go.mod h1:xzAOLCNug/yB62zG1bQ8uziwrIqIuxhctzJT18Q77mc= gitea.com/gitea/act v0.261.10 h1:ndwbtuMXXz1dpYF2iwY1/PkgKNETo4jmPXfinTZt8cs= gitea.com/gitea/act v0.261.10/go.mod h1:oIkqQHvU0lfuIWwcpqa4FmU+t3prA89tgkuHUTsrI2c= -gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed h1:EZZBtilMLSZNWtHHcgq2mt6NSGhJSZBuduAlinMEmso= -gitea.com/go-chi/binding v0.0.0-20240430071103-39a851e106ed/go.mod h1:E3i3cgB04dDx0v3CytCgRTTn9Z/9x891aet3r456RVw= +gitea.com/go-chi/binding v0.0.0-20260414111559-654cea7ac60a h1:JHoBrfuTSF9Ke9aNfSYj1XRPBHjKPgCApVprnt2Am0M= +gitea.com/go-chi/binding v0.0.0-20260414111559-654cea7ac60a/go.mod h1:FOsLJIMdpiHzBp3Vby6Wfkdw2ppGscrjgU1IC7E4/zQ= gitea.com/go-chi/cache v0.2.1 h1:bfAPkvXlbcZxPCpcmDVCWoHgiBSBmZN/QosnZvEC0+g= gitea.com/go-chi/cache v0.2.1/go.mod h1:Qic0HZ8hOHW62ETGbonpwz8WYypj9NieU9659wFUJ8Q= gitea.com/go-chi/captcha v0.0.0-20240315150714-fb487f629098 h1:p2ki+WK0cIeNQuqjR98IP2KZQKRzJJiV7aTeMAFwaWo= @@ -96,8 +94,8 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPd github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/aws/aws-sdk-go-v2 v1.41.5 h1:dj5kopbwUsVUVFgO4Fi5BIT3t4WyqIDjGKCangnV/yY= github.com/aws/aws-sdk-go-v2 v1.41.5/go.mod h1:mwsPRE8ceUUpiTgF7QmQIJ7lgsKUPQOUl3o72QBrE1o= -github.com/aws/aws-sdk-go-v2/credentials v1.19.13 h1:mA59E3fokBvyEGHKFdnpNNrvaR351cqiHgRg+JzOSRI= -github.com/aws/aws-sdk-go-v2/credentials v1.19.13/go.mod h1:yoTXOQKea18nrM69wGF9jBdG4WocSZA1h38A+t/MAsk= +github.com/aws/aws-sdk-go-v2/credentials v1.19.14 h1:n+UcGWAIZHkXzYt87uMFBv/l8THYELoX6gVcUvgl6fI= +github.com/aws/aws-sdk-go-v2/credentials v1.19.14/go.mod h1:cJKuyWB59Mqi0jM3nFYQRmnHVQIcgoxjEMAbLkpr62w= github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.21 h1:Rgg6wvjjtX8bNHcvi9OnXWwcE0a2vGpbwmtICOsvcf4= github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.21/go.mod h1:A/kJFst/nm//cyqonihbdpQZwiUhhzpqTsdbhDdRF9c= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.21 h1:PEgGVtPoB6NTpPrBgqSE5hE/o47Ij9qk/SEZFbUOe9A= @@ -290,10 +288,10 @@ github.com/go-chi/chi/v5 v5.2.5 h1:Eg4myHZBjyvJmAFjFvWgrqDTXFyOzjj7YIm3L3mu6Ug= github.com/go-chi/chi/v5 v5.2.5/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0= github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE= github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= -github.com/go-co-op/gocron/v2 v2.19.1 h1:B4iLeA0NB/2iO3EKQ7NfKn5KsQgZfjb2fkvoZJU3yBI= -github.com/go-co-op/gocron/v2 v2.19.1/go.mod h1:5lEiCKk1oVJV39Zg7/YG10OnaVrDAV5GGR6O0663k6U= -github.com/go-enry/go-enry/v2 v2.9.5 h1:HPhAQQHYwJgihL2PxBZiUMFWiROsGwOBdB6/D8zCUhY= -github.com/go-enry/go-enry/v2 v2.9.5/go.mod h1:9yrj4ES1YrbNb1Wb7/PWYr2bpaCXUGRt0uafN0ISyG8= +github.com/go-co-op/gocron/v2 v2.20.0 h1:9IMrnnVSWjfSh3E54gWmWCHbloQJLh6f9+nwyKfLNpc= +github.com/go-co-op/gocron/v2 v2.20.0/go.mod h1:5lEiCKk1oVJV39Zg7/YG10OnaVrDAV5GGR6O0663k6U= +github.com/go-enry/go-enry/v2 v2.9.6 h1:np63eOtMV56zfYDHnFVgpEVOk8fr2kmylcMnAZUDbSs= +github.com/go-enry/go-enry/v2 v2.9.6/go.mod h1:9yrj4ES1YrbNb1Wb7/PWYr2bpaCXUGRt0uafN0ISyG8= github.com/go-enry/go-oniguruma v1.2.1 h1:k8aAMuJfMrqm/56SG2lV9Cfti6tC4x8673aHCcBk+eo= github.com/go-enry/go-oniguruma v1.2.1/go.mod h1:bWDhYP+S6xZQgiRL7wlTScFYBe023B6ilRZbCAD5Hf4= github.com/go-fed/httpsig v1.1.1-0.20201223112313-55836744818e h1:oRq/fiirun5HqlEWMLIcDmLpIELlG4iGbd0s8iqgPi8= @@ -304,8 +302,8 @@ github.com/go-git/go-billy/v5 v5.8.0 h1:I8hjc3LbBlXTtVuFNJuwYuMiHvQJDq1AT6u4DwDz github.com/go-git/go-billy/v5 v5.8.0/go.mod h1:RpvI/rw4Vr5QA+Z60c6d6LXH0rYJo0uD5SqfmrrheCY= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= -github.com/go-git/go-git/v5 v5.17.2 h1:B+nkdlxdYrvyFK4GPXVU8w1U+YkbsgciIR7f2sZJ104= -github.com/go-git/go-git/v5 v5.17.2/go.mod h1:pW/VmeqkanRFqR6AljLcs7EA7FbZaN5MQqO7oZADXpo= +github.com/go-git/go-git/v5 v5.18.0 h1:O831KI+0PR51hM2kep6T8k+w0/LIAD490gvqMCvL5hM= +github.com/go-git/go-git/v5 v5.18.0/go.mod h1:pW/VmeqkanRFqR6AljLcs7EA7FbZaN5MQqO7oZADXpo= github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-jose/go-jose/v4 v4.1.3 h1:CVLmWDhDVRa6Mi/IgCgaopNosCaHz7zrMeF9MlZRkrs= @@ -327,10 +325,10 @@ github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U= github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-viper/mapstructure/v2 v2.5.0 h1:vM5IJoUAy3d7zRSVtIwQgBj7BiWtMPfmPEgAXnvj1Ro= github.com/go-viper/mapstructure/v2 v2.5.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= -github.com/go-webauthn/webauthn v0.16.1 h1:x5/SSki5/aIfogaRukqvbg/RXa3Sgxy/9vU7UfFPHKU= -github.com/go-webauthn/webauthn v0.16.1/go.mod h1:RBS+rtQJMkE5VfMQ4diDA2VNrEL8OeUhp4Srz37FHbQ= -github.com/go-webauthn/x v0.2.2 h1:zIiipvMbr48CXi5RG0XdBJR94kd8I5LfzHPb/q+YYmk= -github.com/go-webauthn/x v0.2.2/go.mod h1:IpJ5qyWB9NRhLX3C7gIfjTU7RZLXEP6kzFkoVSE7Fz4= +github.com/go-webauthn/webauthn v0.16.4 h1:R9jqR/cYZa7hRquFF7Za/8qoH/K/TIs1/Q/4CyGN+1Q= +github.com/go-webauthn/webauthn v0.16.4/go.mod h1:SU2ljAgToTV/YLPI0C05QS4qn+e04WpB5g1RMfcZfS4= +github.com/go-webauthn/x v0.2.3 h1:8oArS+Rc1SWFLXhE17KZNx258Z4kUSyaDgsSncCO5RA= +github.com/go-webauthn/x v0.2.3/go.mod h1:tM04GF3V6VYq79AZMl7vbj4q6pz9r7L2criWRzbWhPk= github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= @@ -391,8 +389,8 @@ github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/ github.com/google/licenseclassifier/v2 v2.0.0 h1:1Y57HHILNf4m0ABuMVb6xk4vAJYEUO0gDxNpog0pyeA= github.com/google/licenseclassifier/v2 v2.0.0/go.mod h1:cOjbdH0kyC9R22sdQbYsFkto4NGCAc+ZSwbeThazEtM= github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= -github.com/google/pprof v0.0.0-20260302011040-a15ffb7f9dcc h1:VBbFa1lDYWEeV5FZKUiYKYT0VxCp9twUmmaq9eb8sXw= -github.com/google/pprof v0.0.0-20260302011040-a15ffb7f9dcc/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI= +github.com/google/pprof v0.0.0-20260402051712-545e8a4df936 h1:EwtI+Al+DeppwYX2oXJCETMO23COyaKGP6fHVpkpWpg= +github.com/google/pprof v0.0.0-20260402051712-545e8a4df936/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -498,8 +496,8 @@ github.com/letsencrypt/challtestsrv v1.4.2 h1:0ON3ldMhZyWlfVNYYpFuWRTmZNnyfiL9Hh github.com/letsencrypt/challtestsrv v1.4.2/go.mod h1:GhqMqcSoeGpYd5zX5TgwA6er/1MbWzx/o7yuuVya+Wk= github.com/letsencrypt/pebble/v2 v2.10.0 h1:Wq6gYXlsY6ubqI3hhxsTzdyotvfdjFBxuwYqCLCnj/U= github.com/letsencrypt/pebble/v2 v2.10.0/go.mod h1:Sk8cmUIPcIdv2nINo+9PB4L+ZBhzY+F9A1a/h/xmWiQ= -github.com/lib/pq v1.12.1 h1:x1nbl/338GLqeDJ/FAiILallhAsqubLzEZu/pXtHUow= -github.com/lib/pq v1.12.1/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA= +github.com/lib/pq v1.12.3 h1:tTWxr2YLKwIvK90ZXEw8GP7UFHtcbTtty8zsI+YjrfQ= +github.com/lib/pq v1.12.3/go.mod h1:/p+8NSbOcwzAEI7wiMXFlgydTwcgTr3OSKMsD2BitpA= github.com/libdns/libdns v1.1.1 h1:wPrHrXILoSHKWJKGd0EiAVmiJbFShguILTg9leS/P/U= github.com/libdns/libdns v1.1.1/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= @@ -512,16 +510,16 @@ github.com/markbates/goth v1.82.0 h1:8j/c34AjBSTNzO7zTsOyP5IYCQCMBTRBHAbBt/PI0bQ github.com/markbates/goth v1.82.0/go.mod h1:/DRlcq0pyqkKToyZjsL2KgiA1zbF1HIjE7u2uC79rUk= github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= -github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= -github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.21 h1:xYae+lCNBP7QuW4PUnNG61ffM4hVIfm+zUzDuSzYLGs= +github.com/mattn/go-isatty v0.0.21/go.mod h1:ZXfXG4SQHsB/w3ZeOYbR0PrPwLy+n6xiMrJlRFqopa4= github.com/mattn/go-runewidth v0.0.21 h1:jJKAZiQH+2mIinzCJIaIG9Be1+0NR+5sz/lYEEjdM8w= github.com/mattn/go-runewidth v0.0.21/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs= github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= -github.com/mattn/go-sqlite3 v1.14.38 h1:tDUzL85kMvOrvpCt8P64SbGgVFtJB11GPi2AdmITgb4= -github.com/mattn/go-sqlite3 v1.14.38/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= -github.com/meilisearch/meilisearch-go v0.36.1 h1:mJTCJE5g7tRvaqKco6DfqOuJEjX+rRltDEnkEC02Y0M= -github.com/meilisearch/meilisearch-go v0.36.1/go.mod h1:hWcR0MuWLSzHfbz9GGzIr3s9rnXLm1jqkmHkJPbUSvM= +github.com/mattn/go-sqlite3 v1.14.42 h1:MigqEP4ZmHw3aIdIT7T+9TLa90Z6smwcthx+Azv4Cgo= +github.com/mattn/go-sqlite3 v1.14.42/go.mod h1:pjEuOr8IwzLJP2MfGeTb0A35jauH+C2kbHKBr7yXKVQ= +github.com/meilisearch/meilisearch-go v0.36.2 h1:MYaMPCpdLh2aYPt+zK+19mLoA4dfBY3S1L7T0FADCjU= +github.com/meilisearch/meilisearch-go v0.36.2/go.mod h1:hWcR0MuWLSzHfbz9GGzIr3s9rnXLm1jqkmHkJPbUSvM= github.com/mholt/acmez/v3 v3.1.6 h1:eGVQNObP0pBN4sxqrXeg7MYqTOWyoiYpQqITVWlrevk= github.com/mholt/acmez/v3 v3.1.6/go.mod h1:5nTPosTGosLxF3+LU4ygbgMRFDhbAVpqMI4+a4aHLBY= github.com/mholt/archives v0.1.5 h1:Fh2hl1j7VEhc6DZs2DLMgiBNChUux154a1G+2esNvzQ= @@ -538,8 +536,8 @@ github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI github.com/minio/crc64nvme v1.1.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= -github.com/minio/minio-go/v7 v7.0.99 h1:2vH/byrwUkIpFQFOilvTfaUpvAX3fEFhEzO+DR3DlCE= -github.com/minio/minio-go/v7 v7.0.99/go.mod h1:EtGNKtlX20iL2yaYnxEigaIvj0G0GwSDnifnG8ClIdw= +github.com/minio/minio-go/v7 v7.0.100 h1:ShkWi8Tyj9RtU57OQB2HIXKz4bFgtVib0bbT1sbtLI8= +github.com/minio/minio-go/v7 v7.0.100/go.mod h1:EtGNKtlX20iL2yaYnxEigaIvj0G0GwSDnifnG8ClIdw= github.com/minio/minlz v1.1.0 h1:rUOGu3EP4EqJC5k3qCsIwEnZiJULKqtRyDdqbhlvMmQ= github.com/minio/minlz v1.1.0/go.mod h1:qT0aEB35q79LLornSzeDH75LBf3aH1MV+jB5w9Wasec= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= @@ -732,7 +730,6 @@ github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZ github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= github.com/yohcop/openid-go v1.0.1 h1:DPRd3iPO5F6O5zX2e62XpVAbPT6wV51cuucH0z9g3js= github.com/yohcop/openid-go v1.0.1/go.mod h1:b/AvD03P0KHj4yuihb+VtLD6bYYgsy0zqBzPCRjkCNs= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/goldmark v1.4.15/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= @@ -740,8 +737,6 @@ github.com/yuin/goldmark v1.8.2 h1:kEGpgqJXdgbkhcOgBxkC0X0PmoPG1ZyoZ117rDVp4zE= github.com/yuin/goldmark v1.8.2/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg= github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc h1:+IAOyRda+RLrxa1WC7umKOZRsGq4QrFFMYApOeHzQwQ= github.com/yuin/goldmark-highlighting/v2 v2.0.0-20230729083705-37449abec8cc/go.mod h1:ovIvrum6DQJA4QsJSovrkC4saKHQVs7TvcaeO8AIl5I= -github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= -github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/blake3 v0.2.4 h1:KYQPkhpRtcqh0ssGYcKLG1JYvddkEA8QwCM/yBqhaZI= @@ -787,15 +782,14 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= -golang.org/x/crypto v0.49.0 h1:+Ng2ULVvLHnJ/ZFEq4KdcDd/cfjrrjjNSXNzxg0Y4U4= -golang.org/x/crypto v0.49.0/go.mod h1:ErX4dUh2UM+CFYiXZRTcMpEcN8b/1gxEuv3nODoYtCA= +golang.org/x/crypto v0.50.0 h1:zO47/JPrL6vsNkINmLoo/PH1gcxpls50DNogFvB5ZGI= +golang.org/x/crypto v0.50.0/go.mod h1:3muZ7vA7PBCE6xgPX7nkzzjiUq87kRItoJQM1Yo8S+Q= golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b h1:DXr+pvt3nC887026GRP39Ej11UATqWDmWuS99x26cD0= golang.org/x/exp v0.0.0-20250819193227-8b4c13bb791b/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4= -golang.org/x/image v0.38.0 h1:5l+q+Y9JDC7mBOMjo4/aPhMDcxEptsX+Tt3GgRQRPuE= -golang.org/x/image v0.38.0/go.mod h1:/3f6vaXC+6CEanU4KJxbcUZyEePbyKbaLoDOe4ehFYY= +golang.org/x/image v0.39.0 h1:skVYidAEVKgn8lZ602XO75asgXBgLj9G/FE3RbuPFww= +golang.org/x/image v0.39.0/go.mod h1:sIbmppfU+xFLPIG0FoVUTvyBMmgng1/XAMhQ2ft0hpA= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -807,7 +801,6 @@ golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= @@ -821,13 +814,12 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0= -golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw= +golang.org/x/net v0.53.0 h1:d+qAbo5L0orcWAr0a9JweQpjXF19LMXJE8Ey7hwOdUA= +golang.org/x/net v0.53.0/go.mod h1:JvMuJH7rrdiCfbeHoo3fCQU24Lf5JJwT9W3sJFulfgs= golang.org/x/oauth2 v0.36.0 h1:peZ/1z27fi9hUOFCAZaHyrpWG5lwe0RJEEEeH0ThlIs= golang.org/x/oauth2 v0.36.0/go.mod h1:YDBUJMTkDnJS+A4BP4eZBjCqtokkg1hODuPjwiGPO7Q= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -870,8 +862,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo= -golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= +golang.org/x/sys v0.43.0 h1:Rlag2XtaFTxp19wS8MXlJwTvoh8ArU6ezoyFsMyCTNI= +golang.org/x/sys v0.43.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -882,8 +874,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= -golang.org/x/term v0.41.0 h1:QCgPso/Q3RTJx2Th4bDLqML4W6iJiaXFq2/ftQF13YU= -golang.org/x/term v0.41.0/go.mod h1:3pfBgksrReYfZ5lvYM0kSO0LIkAl4Yl2bXOkKP7Ec2A= +golang.org/x/term v0.42.0 h1:UiKe+zDFmJobeJ5ggPwOshJIVt6/Ft0rcfrXZDLWAWY= +golang.org/x/term v0.42.0/go.mod h1:Dq/D+snpsbazcBG5+F9Q1n2rXV8Ma+71xEjTRufARgY= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -894,14 +886,13 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8= -golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA= +golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg= +golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164= golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U= golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200928182047-19e03678916f/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= @@ -916,8 +907,8 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/genproto/googleapis/rpc v0.0.0-20260401020348-3a24fdc17823 h1:YedBIttDguBl/zy2wJauEUm+DZZg4UXseWj0g/3N+yo= google.golang.org/genproto/googleapis/rpc v0.0.0-20260401020348-3a24fdc17823/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8= -google.golang.org/grpc v1.79.3 h1:sybAEdRIEtvcD68Gx7dmnwjZKlyfuc61Dyo9pGXXkKE= -google.golang.org/grpc v1.79.3/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ= +google.golang.org/grpc v1.80.0 h1:Xr6m2WmWZLETvUNvIUmeD5OAagMw3FiKmMlTdViWsHM= +google.golang.org/grpc v1.80.0/go.mod h1:ho/dLnxwi3EDJA4Zghp7k2Ec1+c2jqup0bFkw07bwF4= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -942,7 +933,6 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/models/actions/run.go b/models/actions/run.go index e293a6056f..bce356c0e2 100644 --- a/models/actions/run.go +++ b/models/actions/run.go @@ -153,7 +153,11 @@ func (run *ActionRun) LoadRepo(ctx context.Context) error { } func (run *ActionRun) Duration() time.Duration { - return calculateDuration(run.Started, run.Stopped, run.Status) + run.PreviousDuration + d := calculateDuration(run.Started, run.Stopped, run.Status, run.Updated) + run.PreviousDuration + if d < 0 { + return 0 + } + return d } func (run *ActionRun) GetPushEventPayload() (*api.PushPayload, error) { @@ -322,16 +326,16 @@ func GetRunByRepoAndID(ctx context.Context, repoID, runID int64) (*ActionRun, er return &run, nil } -func GetRunByIndex(ctx context.Context, repoID, index int64) (*ActionRun, error) { +func GetRunByRepoAndIndex(ctx context.Context, repoID, runIndex int64) (*ActionRun, error) { run := &ActionRun{ RepoID: repoID, - Index: index, + Index: runIndex, } has, err := db.GetEngine(ctx).Get(run) if err != nil { return nil, err } else if !has { - return nil, fmt.Errorf("run with index %d %d: %w", repoID, index, util.ErrNotExist) + return nil, fmt.Errorf("run with repo_id %d and index %d: %w", repoID, runIndex, util.ErrNotExist) } return run, nil diff --git a/models/actions/run_job.go b/models/actions/run_job.go index 616e298dc9..d1e5d1e938 100644 --- a/models/actions/run_job.go +++ b/models/actions/run_job.go @@ -18,6 +18,11 @@ import ( "xorm.io/builder" ) +// MaxJobNumPerRun is the maximum number of jobs in a single run. +// https://docs.github.com/en/actions/reference/limits#existing-system-limits +// TODO: check this limit when creating jobs +const MaxJobNumPerRun = 256 + // ActionRunJob represents a job of a run type ActionRunJob struct { ID int64 @@ -67,7 +72,7 @@ func init() { } func (job *ActionRunJob) Duration() time.Duration { - return calculateDuration(job.Started, job.Stopped, job.Status) + return calculateDuration(job.Started, job.Stopped, job.Status, job.Updated) } func (job *ActionRunJob) LoadRun(ctx context.Context) error { diff --git a/models/actions/run_test.go b/models/actions/run_test.go index bd2b92f4f6..e82cbe84b5 100644 --- a/models/actions/run_test.go +++ b/models/actions/run_test.go @@ -5,10 +5,12 @@ package actions import ( "testing" + "time" "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" ) @@ -30,6 +32,16 @@ func TestUpdateRepoRunsNumbers(t *testing.T) { err = UpdateRepoRunsNumbers(t.Context(), repo) assert.NoError(t, err) repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 4}) - assert.Equal(t, 5, repo.NumActionRuns) + assert.Equal(t, 4, repo.NumActionRuns) assert.Equal(t, 3, repo.NumClosedActionRuns) } + +func TestActionRun_Duration_NonNegative(t *testing.T) { + run := &ActionRun{ + Started: timeutil.TimeStamp(100), + Stopped: timeutil.TimeStamp(200), + Status: StatusSuccess, + PreviousDuration: -time.Hour, + } + assert.Equal(t, time.Duration(0), run.Duration()) +} diff --git a/models/actions/runner.go b/models/actions/runner.go index f5d40ca7d6..f0088491bb 100644 --- a/models/actions/runner.go +++ b/models/actions/runner.go @@ -171,9 +171,8 @@ func (r *ActionRunner) LoadAttributes(ctx context.Context) error { return nil } -func (r *ActionRunner) GenerateToken() (err error) { - r.Token, r.TokenSalt, r.TokenHash, _, err = generateSaltedToken() - return err +func (r *ActionRunner) GenerateAndFillToken() { + r.Token, r.TokenSalt, r.TokenHash, _ = generateSaltedToken() } // CanMatchLabels checks whether the runner's labels can match a job's "runs-on" diff --git a/models/actions/runner_token.go b/models/actions/runner_token.go index bbd2af73b6..f7b7c9fdf0 100644 --- a/models/actions/runner_token.go +++ b/models/actions/runner_token.go @@ -97,10 +97,7 @@ func NewRunnerTokenWithValue(ctx context.Context, ownerID, repoID int64, token s } func NewRunnerToken(ctx context.Context, ownerID, repoID int64) (*ActionRunnerToken, error) { - token, err := util.CryptoRandomString(40) - if err != nil { - return nil, err - } + token := util.CryptoRandomString(40) return NewRunnerTokenWithValue(ctx, ownerID, repoID, token) } diff --git a/models/actions/task.go b/models/actions/task.go index e092d6fbbd..28928c2bc6 100644 --- a/models/actions/task.go +++ b/models/actions/task.go @@ -77,7 +77,7 @@ func init() { } func (task *ActionTask) Duration() time.Duration { - return calculateDuration(task.Started, task.Stopped, task.Status) + return calculateDuration(task.Started, task.Stopped, task.Status, task.Updated) } func (task *ActionTask) IsStopped() bool { @@ -147,9 +147,8 @@ func (task *ActionTask) LoadAttributes(ctx context.Context) error { return nil } -func (task *ActionTask) GenerateToken() (err error) { - task.Token, task.TokenSalt, task.TokenHash, task.TokenLastEight, err = generateSaltedToken() - return err +func (task *ActionTask) GenerateAndFillToken() { + task.Token, task.TokenSalt, task.TokenHash, task.TokenLastEight = generateSaltedToken() } func GetTaskByID(ctx context.Context, id int64) (*ActionTask, error) { @@ -288,9 +287,7 @@ func CreateTaskForRunner(ctx context.Context, runner *ActionRunner) (*ActionTask CommitSHA: job.CommitSHA, IsForkPullRequest: job.IsForkPullRequest, } - if err := task.GenerateToken(); err != nil { - return nil, false, err - } + task.GenerateAndFillToken() workflowJob, err := job.ParseJob() if err != nil { diff --git a/models/actions/task_step.go b/models/actions/task_step.go index 03ffbf1931..3b477d8483 100644 --- a/models/actions/task_step.go +++ b/models/actions/task_step.go @@ -28,7 +28,7 @@ type ActionTaskStep struct { } func (step *ActionTaskStep) Duration() time.Duration { - return calculateDuration(step.Started, step.Stopped, step.Status) + return calculateDuration(step.Started, step.Stopped, step.Status, step.Updated) } func init() { diff --git a/models/actions/utils.go b/models/actions/utils.go index f6ba661ae3..e5704d0377 100644 --- a/models/actions/utils.go +++ b/models/actions/utils.go @@ -13,22 +13,17 @@ import ( "time" auth_model "code.gitea.io/gitea/models/auth" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" ) -func generateSaltedToken() (string, string, string, string, error) { - salt, err := util.CryptoRandomString(10) - if err != nil { - return "", "", "", "", err - } - buf, err := util.CryptoRandomBytes(20) - if err != nil { - return "", "", "", "", err - } +func generateSaltedToken() (string, string, string, string) { + salt := util.CryptoRandomString(10) + buf := util.CryptoRandomBytes(20) token := hex.EncodeToString(buf) hash := auth_model.HashToken(token, salt) - return token, salt, hash, token[len(token)-8:], nil + return token, salt, hash, token[len(token)-8:] } /* @@ -72,13 +67,25 @@ func (indexes *LogIndexes) ToDB() ([]byte, error) { var timeSince = time.Since -func calculateDuration(started, stopped timeutil.TimeStamp, status Status) time.Duration { +// calculateDuration computes wall time for a run, job, task, or step. When status is terminal +// but stopped is missing or inconsistent with started, fallbackEnd (typically the row Updated +// time) is used so duration still reflects approximate elapsed time instead of 0 or a negative. +func calculateDuration(started, stopped timeutil.TimeStamp, status Status, fallbackEnd timeutil.TimeStamp) time.Duration { if started == 0 { return 0 } s := started.AsTime() if status.IsDone() { - return stopped.AsTime().Sub(s) + end := stopped + if stopped.IsZero() || stopped < started { + if !fallbackEnd.IsZero() && fallbackEnd >= started { + end = fallbackEnd + } else { + log.Trace("actions: invalid duration timestamps (started=%d, stopped=%d, fallbackEnd=%d, status=%s)", started, stopped, fallbackEnd, status) + return 0 + } + } + return end.AsTime().Sub(s) } return timeSince(s).Truncate(time.Second) } diff --git a/models/actions/utils_test.go b/models/actions/utils_test.go index 98c048d4ef..2f7e7da360 100644 --- a/models/actions/utils_test.go +++ b/models/actions/utils_test.go @@ -45,9 +45,10 @@ func Test_calculateDuration(t *testing.T) { return timeutil.TimeStamp(1000).AsTime().Sub(t) } type args struct { - started timeutil.TimeStamp - stopped timeutil.TimeStamp - status Status + started timeutil.TimeStamp + stopped timeutil.TimeStamp + status Status + fallbackEnd timeutil.TimeStamp } tests := []struct { name string @@ -81,10 +82,48 @@ func Test_calculateDuration(t *testing.T) { }, want: 100 * time.Second, }, + { + name: "done_stopped_zero_no_fallback", + args: args{ + started: 500, + stopped: 0, + status: StatusSuccess, + }, + want: 0, + }, + { + name: "done_stopped_zero_uses_fallback", + args: args{ + started: 500, + stopped: 0, + status: StatusSuccess, + fallbackEnd: 600, + }, + want: 100 * time.Second, + }, + { + name: "done_stopped_before_started_no_fallback", + args: args{ + started: 600, + stopped: 550, + status: StatusSuccess, + }, + want: 0, + }, + { + name: "done_stopped_before_started_uses_fallback", + args: args{ + started: 600, + stopped: 550, + status: StatusSuccess, + fallbackEnd: 650, + }, + want: 50 * time.Second, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - assert.Equalf(t, tt.want, calculateDuration(tt.args.started, tt.args.stopped, tt.args.status), "calculateDuration(%v, %v, %v)", tt.args.started, tt.args.stopped, tt.args.status) + assert.Equalf(t, tt.want, calculateDuration(tt.args.started, tt.args.stopped, tt.args.status, tt.args.fallbackEnd), "calculateDuration(%v, %v, %v, %v)", tt.args.started, tt.args.stopped, tt.args.status, tt.args.fallbackEnd) }) } } diff --git a/models/activities/user_heatmap.go b/models/activities/user_heatmap.go index e24d44c519..2d1635917e 100644 --- a/models/activities/user_heatmap.go +++ b/models/activities/user_heatmap.go @@ -62,6 +62,7 @@ func getUserHeatmapData(ctx context.Context, user *user_model.User, team *organi return nil, err } + // HINT: USER-ACTIVITY-PUSH-COMMITS: it only uses the doer's action time, it doesn't use git commit's time return hdata, db.GetEngine(ctx). Select(groupBy+" AS timestamp, count(user_id) as contributions"). Table("action"). diff --git a/models/asymkey/ssh_key.go b/models/asymkey/ssh_key.go index 98784b36bd..1873c30859 100644 --- a/models/asymkey/ssh_key.go +++ b/models/asymkey/ssh_key.go @@ -15,6 +15,7 @@ import ( "code.gitea.io/gitea/models/perm" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" @@ -64,7 +65,12 @@ func (key *PublicKey) AfterLoad() { // OmitEmail returns content of public key without email address. func (key *PublicKey) OmitEmail() string { - return strings.Join(strings.Split(key.Content, " ")[:2], " ") + fields := strings.Split(key.Content, " ") // format: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC... comment + if len(fields) < 2 { + setting.PanicInDevOrTesting("invalid public key %d content: %s", key.ID, key.Content) + return "" // not a valid public key, it shouldn't really happen, the value is managed internally + } + return strings.Join(fields[:2], " ") } func addKey(ctx context.Context, key *PublicKey) (err error) { diff --git a/models/auth/access_token.go b/models/auth/access_token.go index 63331b4841..7578528be8 100644 --- a/models/auth/access_token.go +++ b/models/auth/access_token.go @@ -98,19 +98,13 @@ func init() { // NewAccessToken creates new access token. func NewAccessToken(ctx context.Context, t *AccessToken) error { - salt, err := util.CryptoRandomString(10) - if err != nil { - return err - } - token, err := util.CryptoRandomBytes(20) - if err != nil { - return err - } + salt := util.CryptoRandomString(10) + token := util.CryptoRandomBytes(20) t.TokenSalt = salt t.Token = hex.EncodeToString(token) t.TokenHash = HashToken(t.Token, t.TokenSalt) t.TokenLastEight = t.Token[len(t.Token)-8:] - _, err = db.GetEngine(ctx).Insert(t) + _, err := db.GetEngine(ctx).Insert(t) return err } diff --git a/models/auth/oauth2.go b/models/auth/oauth2.go index e2bb72b722..846c386a20 100644 --- a/models/auth/oauth2.go +++ b/models/auth/oauth2.go @@ -185,10 +185,7 @@ var base32Lower = base32.NewEncoding(lowerBase32Chars).WithPadding(base32.NoPadd // GenerateClientSecret will generate the client secret and returns the plaintext and saves the hash at the database func (app *OAuth2Application) GenerateClientSecret(ctx context.Context) (string, error) { - rBytes, err := util.CryptoRandomBytes(32) - if err != nil { - return "", err - } + rBytes := util.CryptoRandomBytes(32) // Add a prefix to the base32, this is in order to make it easier // for code scanners to grab sensitive tokens. clientSecret := "gto_" + base32Lower.EncodeToString(rBytes) @@ -484,10 +481,7 @@ func (grant *OAuth2Grant) TableName() string { // GenerateNewAuthorizationCode generates a new authorization code for a grant and saves it to the database func (grant *OAuth2Grant) GenerateNewAuthorizationCode(ctx context.Context, redirectURI, codeChallenge, codeChallengeMethod string) (code *OAuth2AuthorizationCode, err error) { - rBytes, err := util.CryptoRandomBytes(32) - if err != nil { - return &OAuth2AuthorizationCode{}, err - } + rBytes := util.CryptoRandomBytes(32) // Add a prefix to the base32, this is in order to make it easier // for code scanners to grab sensitive tokens. codeSecret := "gta_" + base32Lower.EncodeToString(rBytes) diff --git a/models/auth/twofactor.go b/models/auth/twofactor.go index 4263495650..80c34ba6ad 100644 --- a/models/auth/twofactor.go +++ b/models/auth/twofactor.go @@ -65,14 +65,11 @@ func init() { // GenerateScratchToken recreates the scratch token the user is using. func (t *TwoFactor) GenerateScratchToken() (string, error) { - tokenBytes, err := util.CryptoRandomBytes(6) - if err != nil { - return "", err - } + tokenBytes := util.CryptoRandomBytes(6) // these chars are specially chosen, avoid ambiguous chars like `0`, `O`, `1`, `I`. const base32Chars = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789" token := base32.NewEncoding(base32Chars).WithPadding(base32.NoPadding).EncodeToString(tokenBytes) - t.ScratchSalt, _ = util.CryptoRandomString(10) + t.ScratchSalt = util.CryptoRandomString(10) t.ScratchHash = HashToken(token, t.ScratchSalt) return token, nil } diff --git a/models/db/engine.go b/models/db/engine.go index b08799210e..fbcc3fa15e 100755 --- a/models/db/engine.go +++ b/models/db/engine.go @@ -11,11 +11,11 @@ import ( "reflect" "strings" - "xorm.io/xorm" - _ "github.com/go-sql-driver/mysql" // Needed for the MySQL driver _ "github.com/lib/pq" // Needed for the Postgresql driver _ "github.com/microsoft/go-mssqldb" // Needed for the MSSQL driver + + "xorm.io/xorm" ) var ( diff --git a/models/fixtures/access.yml b/models/fixtures/access.yml index 596046e950..c0aa06c86d 100644 --- a/models/fixtures/access.yml +++ b/models/fixtures/access.yml @@ -177,3 +177,5 @@ user_id: 40 repo_id: 1 mode: 2 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/access_token.yml b/models/fixtures/access_token.yml index 0744255f66..d85d785da5 100644 --- a/models/fixtures/access_token.yml +++ b/models/fixtures/access_token.yml @@ -31,3 +31,5 @@ created_unix: 946687980 updated_unix: 946687980 # commented out tokens so you can see what they are in plaintext + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action.yml b/models/fixtures/action.yml index af9ce93ba5..32f2ae8764 100644 --- a/models/fixtures/action.yml +++ b/models/fixtures/action.yml @@ -73,3 +73,5 @@ is_private: false created_unix: 1680454039 content: '4|' # issueId 5 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action_artifact.yml b/models/fixtures/action_artifact.yml index a25dfc205c..5fcc70aa53 100644 --- a/models/fixtures/action_artifact.yml +++ b/models/fixtures/action_artifact.yml @@ -177,3 +177,5 @@ created_unix: 1730330775 updated_unix: 1730330775 expired_unix: 1738106775 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action_run.yml b/models/fixtures/action_run.yml index ac5e8303c3..b7d1201189 100644 --- a/models/fixtures/action_run.yml +++ b/models/fixtures/action_run.yml @@ -140,23 +140,4 @@ need_approval: 0 approved_by: 0 -- - id: 805 - title: "update actions" - repo_id: 4 - owner_id: 1 - workflow_id: "artifact.yaml" - index: 191 - trigger_user_id: 1 - ref: "refs/heads/master" - commit_sha: "c2d72f548424103f01ee1dc02889c1e2bff816b0" - event: "push" - trigger_event: "push" - is_fork_pull_request: 0 - status: 5 - started: 1683636528 - stopped: 1683636626 - created: 1683636108 - updated: 1683636626 - need_approval: 0 - approved_by: 0 +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action_run_job.yml b/models/fixtures/action_run_job.yml index 04799b73ca..2a4e64285f 100644 --- a/models/fixtures/action_run_job.yml +++ b/models/fixtures/action_run_job.yml @@ -130,17 +130,4 @@ started: 1683636528 stopped: 1683636626 -- - id: 206 - run_id: 805 - repo_id: 4 - owner_id: 1 - commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 - is_fork_pull_request: 0 - name: job_2 - attempt: 1 - job_id: job_2 - task_id: 56 - status: 3 - started: 1683636528 - stopped: 1683636626 +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action_runner.yml b/models/fixtures/action_runner.yml index ecb7214006..110ff627a2 100644 --- a/models/fixtures/action_runner.yml +++ b/models/fixtures/action_runner.yml @@ -49,3 +49,5 @@ repo_id: 0 description: "This runner is going to be deleted" agent_labels: '["runner_to_be_deleted","linux"]' + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action_runner_token.yml b/models/fixtures/action_runner_token.yml index 6520b7f6fb..3af8a28c9c 100644 --- a/models/fixtures/action_runner_token.yml +++ b/models/fixtures/action_runner_token.yml @@ -33,3 +33,5 @@ is_active: 1 created: 1695617751 updated: 1695617751 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action_task.yml b/models/fixtures/action_task.yml index e1bc588dc5..13efe378c4 100644 --- a/models/fixtures/action_task.yml +++ b/models/fixtures/action_task.yml @@ -178,22 +178,4 @@ log_size: 0 log_expired: 0 -- - id: 56 - attempt: 1 - runner_id: 1 - status: 3 # 3 is the status code for "cancelled" - started: 1683636528 - stopped: 1683636626 - repo_id: 4 - owner_id: 1 - commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0 - is_fork_pull_request: 0 - token_hash: 6d8ef48297195edcc8e22c70b3020eaa06c52976db67d39b4240c64a69a2cc1508825121b7b8394e48e00b1bf3718b2aaaab - token_salt: eeeeeeee - token_last_eight: eeeeeeee - log_filename: artifact-test2/2f/47.log - log_in_storage: 1 - log_length: 707 - log_size: 90179 - log_expired: 0 +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/action_task_output.yml b/models/fixtures/action_task_output.yml index 314e9f7115..741b193b13 100644 --- a/models/fixtures/action_task_output.yml +++ b/models/fixtures/action_task_output.yml @@ -18,3 +18,5 @@ task_id: 50 output_key: output_b output_value: bbb + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/attachment.yml b/models/fixtures/attachment.yml index 570d4a27da..0870895a71 100644 --- a/models/fixtures/attachment.yml +++ b/models/fixtures/attachment.yml @@ -166,3 +166,5 @@ download_count: 0 size: 0 created_unix: 946684800 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/badge.yml b/models/fixtures/badge.yml index 438cd0ca5d..72550be79a 100644 --- a/models/fixtures/badge.yml +++ b/models/fixtures/badge.yml @@ -3,3 +3,5 @@ slug: badge1 description: just a test badge image_url: badge1.png + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/branch.yml b/models/fixtures/branch.yml index a17999091e..e09022a614 100644 --- a/models/fixtures/branch.yml +++ b/models/fixtures/branch.yml @@ -249,3 +249,5 @@ is_deleted: false deleted_by_id: 0 deleted_unix: 0 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/collaboration.yml b/models/fixtures/collaboration.yml index 4c3ac367f6..2de3448809 100644 --- a/models/fixtures/collaboration.yml +++ b/models/fixtures/collaboration.yml @@ -63,3 +63,5 @@ repo_id: 32 user_id: 10 mode: 2 # write + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/comment.yml b/models/fixtures/comment.yml index 8fde386e22..6930dbb58e 100644 --- a/models/fixtures/comment.yml +++ b/models/fixtures/comment.yml @@ -102,3 +102,5 @@ review_id: 22 assignee_id: 5 created_unix: 946684817 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/commit_status.yml b/models/fixtures/commit_status.yml index 87c652e53a..df3bc42505 100644 --- a/models/fixtures/commit_status.yml +++ b/models/fixtures/commit_status.yml @@ -57,3 +57,5 @@ context: deploy/awesomeness context_hash: ae9547713a6665fc4261d0756904932085a41cf2 creator_id: 2 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/commit_status_index.yml b/models/fixtures/commit_status_index.yml index f63343b042..9157911bac 100644 --- a/models/fixtures/commit_status_index.yml +++ b/models/fixtures/commit_status_index.yml @@ -3,3 +3,5 @@ repo_id: 1 sha: "1234123412341234123412341234123412341234" max_index: 5 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/deploy_key.yml b/models/fixtures/deploy_key.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/deploy_key.yml +++ b/models/fixtures/deploy_key.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/email_address.yml b/models/fixtures/email_address.yml index 0f6bd9ee6d..b3c78120af 100644 --- a/models/fixtures/email_address.yml +++ b/models/fixtures/email_address.yml @@ -317,3 +317,5 @@ lower_email: user40@example.com is_activated: true is_primary: true + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/external_login_user.yml b/models/fixtures/external_login_user.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/external_login_user.yml +++ b/models/fixtures/external_login_user.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/follow.yml b/models/fixtures/follow.yml index b8d35828bf..f8de0e039d 100644 --- a/models/fixtures/follow.yml +++ b/models/fixtures/follow.yml @@ -17,3 +17,5 @@ id: 4 user_id: 31 follow_id: 33 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/gpg_key.yml b/models/fixtures/gpg_key.yml index 2d54313fdf..3d2895dc1c 100644 --- a/models/fixtures/gpg_key.yml +++ b/models/fixtures/gpg_key.yml @@ -21,3 +21,5 @@ can_encrypt_comms: true can_encrypt_storage: true can_certify: true + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/gpg_key_import.yml b/models/fixtures/gpg_key_import.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/gpg_key_import.yml +++ b/models/fixtures/gpg_key_import.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/hook_task.yml b/models/fixtures/hook_task.yml index 6023719b1e..e19eeb0368 100644 --- a/models/fixtures/hook_task.yml +++ b/models/fixtures/hook_task.yml @@ -35,3 +35,5 @@ "X-Head": "42" } } + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/issue.yml b/models/fixtures/issue.yml index ca5b1c6cd1..6da3c9e279 100644 --- a/models/fixtures/issue.yml +++ b/models/fixtures/issue.yml @@ -372,3 +372,5 @@ created_unix: 1707270422 updated_unix: 1707270422 is_locked: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/issue_assignees.yml b/models/fixtures/issue_assignees.yml index c40ecad676..a0bf422dc9 100644 --- a/models/fixtures/issue_assignees.yml +++ b/models/fixtures/issue_assignees.yml @@ -18,3 +18,5 @@ id: 5 assignee_id: 10 issue_id: 6 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/issue_index.yml b/models/fixtures/issue_index.yml index 5aabc08e38..5110068447 100644 --- a/models/fixtures/issue_index.yml +++ b/models/fixtures/issue_index.yml @@ -33,3 +33,5 @@ - group_id: 51 max_index: 1 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/issue_label.yml b/models/fixtures/issue_label.yml index f4ecb1f923..3754bd7828 100644 --- a/models/fixtures/issue_label.yml +++ b/models/fixtures/issue_label.yml @@ -17,3 +17,5 @@ id: 4 issue_id: 2 label_id: 4 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/issue_pin.yml b/models/fixtures/issue_pin.yml index 14b7a72d84..dc3d1c60d9 100644 --- a/models/fixtures/issue_pin.yml +++ b/models/fixtures/issue_pin.yml @@ -4,3 +4,5 @@ issue_id: 4 is_pull: false pin_order: 1 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/issue_user.yml b/models/fixtures/issue_user.yml index 64824316ea..756cb7be4b 100644 --- a/models/fixtures/issue_user.yml +++ b/models/fixtures/issue_user.yml @@ -18,3 +18,5 @@ issue_id: 1 is_read: false is_mentioned: true + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/issue_watch.yml b/models/fixtures/issue_watch.yml index 4bc3ff1b8b..edc1041abc 100644 --- a/models/fixtures/issue_watch.yml +++ b/models/fixtures/issue_watch.yml @@ -29,3 +29,5 @@ is_watching: false created_unix: 946684800 updated_unix: 946684800 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/label.yml b/models/fixtures/label.yml index acfac74968..064f790a77 100644 --- a/models/fixtures/label.yml +++ b/models/fixtures/label.yml @@ -107,3 +107,5 @@ num_issues: 0 num_closed_issues: 0 archived_unix: 0 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/lfs_meta_object.yml b/models/fixtures/lfs_meta_object.yml index ae5ae56542..0fe430f147 100644 --- a/models/fixtures/lfs_meta_object.yml +++ b/models/fixtures/lfs_meta_object.yml @@ -30,3 +30,5 @@ size: 25 repository_id: 54 created_unix: 1671607299 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/login_source.yml b/models/fixtures/login_source.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/login_source.yml +++ b/models/fixtures/login_source.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/milestone.yml b/models/fixtures/milestone.yml index 87c30cc96c..c4ed2aea78 100644 --- a/models/fixtures/milestone.yml +++ b/models/fixtures/milestone.yml @@ -52,3 +52,5 @@ num_closed_issues: 0 completeness: 0 deadline_unix: 253370764800 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/mirror.yml b/models/fixtures/mirror.yml index 97bc4ae60d..1f690654cb 100644 --- a/models/fixtures/mirror.yml +++ b/models/fixtures/mirror.yml @@ -47,3 +47,5 @@ next_update_unix: 0 lfs_enabled: false lfs_endpoint: "" + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/notice.yml b/models/fixtures/notice.yml index af08f07bfa..17e26d7634 100644 --- a/models/fixtures/notice.yml +++ b/models/fixtures/notice.yml @@ -12,3 +12,5 @@ id: 3 type: 1 # NoticeRepository description: description3 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/notification.yml b/models/fixtures/notification.yml index bd279d4bb2..dcfbeada39 100644 --- a/models/fixtures/notification.yml +++ b/models/fixtures/notification.yml @@ -52,3 +52,5 @@ issue_id: 4 created_unix: 946688800 updated_unix: 946688820 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/oauth2_application.yml b/models/fixtures/oauth2_application.yml index 2f38cb58b6..5b3b00b16e 100644 --- a/models/fixtures/oauth2_application.yml +++ b/models/fixtures/oauth2_application.yml @@ -18,3 +18,5 @@ created_unix: 1546869730 updated_unix: 1546869730 confidential_client: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/oauth2_authorization_code.yml b/models/fixtures/oauth2_authorization_code.yml index d29502164e..64d8b17507 100644 --- a/models/fixtures/oauth2_authorization_code.yml +++ b/models/fixtures/oauth2_authorization_code.yml @@ -13,3 +13,5 @@ code_challenge_method: "S256" redirect_uri: "http://127.0.0.1/" valid_until: 3546869730 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/oauth2_grant.yml b/models/fixtures/oauth2_grant.yml index e63286878b..54f4e45e62 100644 --- a/models/fixtures/oauth2_grant.yml +++ b/models/fixtures/oauth2_grant.yml @@ -29,3 +29,5 @@ scope: "whatever" created_unix: 1546869730 updated_unix: 1546869730 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/org_user.yml b/models/fixtures/org_user.yml index 73a3e9dba9..dc35701182 100644 --- a/models/fixtures/org_user.yml +++ b/models/fixtures/org_user.yml @@ -135,3 +135,5 @@ uid: 20 org_id: 17 is_public: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/project.yml b/models/fixtures/project.yml index 44d87bce04..e61781fd7f 100644 --- a/models/fixtures/project.yml +++ b/models/fixtures/project.yml @@ -69,3 +69,5 @@ type: 2 created_unix: 1688973000 updated_unix: 1688973000 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/project_board.yml b/models/fixtures/project_board.yml index 3293dea6ed..91d2198171 100644 --- a/models/fixtures/project_board.yml +++ b/models/fixtures/project_board.yml @@ -75,3 +75,5 @@ default: true created_unix: 1588117528 updated_unix: 1588117528 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/project_issue.yml b/models/fixtures/project_issue.yml index b1af05908a..7d9d511882 100644 --- a/models/fixtures/project_issue.yml +++ b/models/fixtures/project_issue.yml @@ -21,3 +21,5 @@ issue_id: 5 project_id: 1 project_board_id: 3 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/protected_branch.yml b/models/fixtures/protected_branch.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/protected_branch.yml +++ b/models/fixtures/protected_branch.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/protected_tag.yml b/models/fixtures/protected_tag.yml index 1944e7bd84..cb83439645 100644 --- a/models/fixtures/protected_tag.yml +++ b/models/fixtures/protected_tag.yml @@ -22,3 +22,5 @@ allowlist_team_i_ds: "[]" created_unix: 1715596037 updated_unix: 1715596037 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/public_key.yml b/models/fixtures/public_key.yml index 856b0e3fb2..756bca86b6 100644 --- a/models/fixtures/public_key.yml +++ b/models/fixtures/public_key.yml @@ -10,3 +10,5 @@ updated_unix: 1565224552 login_source_id: 0 verified: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/pull_request.yml b/models/fixtures/pull_request.yml index 9a16316e5a..b8da7fe081 100644 --- a/models/fixtures/pull_request.yml +++ b/models/fixtures/pull_request.yml @@ -117,3 +117,5 @@ index: 1 head_repo_id: 61 base_repo_id: 61 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/reaction.yml b/models/fixtures/reaction.yml index ee571a73a4..9effcc98f7 100644 --- a/models/fixtures/reaction.yml +++ b/models/fixtures/reaction.yml @@ -37,3 +37,5 @@ comment_id: 2 user_id: 1 created_unix: 1573248005 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/release.yml b/models/fixtures/release.yml index 372a79509f..be44e331ec 100644 --- a/models/fixtures/release.yml +++ b/models/fixtures/release.yml @@ -150,3 +150,5 @@ is_prerelease: false is_tag: false created_unix: 946684803 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/renamed_branch.yml b/models/fixtures/renamed_branch.yml index efa5130a2b..9055080ff8 100644 --- a/models/fixtures/renamed_branch.yml +++ b/models/fixtures/renamed_branch.yml @@ -3,3 +3,5 @@ repo_id: 1 from: dev to: master + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repo_archiver.yml b/models/fixtures/repo_archiver.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/repo_archiver.yml +++ b/models/fixtures/repo_archiver.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repo_indexer_status.yml b/models/fixtures/repo_indexer_status.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/repo_indexer_status.yml +++ b/models/fixtures/repo_indexer_status.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repo_license.yml b/models/fixtures/repo_license.yml index ca780a73aa..0d1b2f0098 100644 --- a/models/fixtures/repo_license.yml +++ b/models/fixtures/repo_license.yml @@ -1 +1,3 @@ [] # empty + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repo_redirect.yml b/models/fixtures/repo_redirect.yml index 8850c8d780..60459d638d 100644 --- a/models/fixtures/repo_redirect.yml +++ b/models/fixtures/repo_redirect.yml @@ -3,3 +3,5 @@ owner_id: 2 lower_name: oldrepo1 redirect_repo_id: 1 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repo_topic.yml b/models/fixtures/repo_topic.yml index f166faccc1..3a4e7edaa9 100644 --- a/models/fixtures/repo_topic.yml +++ b/models/fixtures/repo_topic.yml @@ -25,3 +25,5 @@ - repo_id: 2 topic_id: 6 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repo_transfer.yml b/models/fixtures/repo_transfer.yml index b12e6b207f..0a26eaec8e 100644 --- a/models/fixtures/repo_transfer.yml +++ b/models/fixtures/repo_transfer.yml @@ -29,3 +29,5 @@ repo_id: 5 created_unix: 1553610671 updated_unix: 1553610671 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repo_unit.yml b/models/fixtures/repo_unit.yml index 4c3e37500f..69f083ccd7 100644 --- a/models/fixtures/repo_unit.yml +++ b/models/fixtures/repo_unit.yml @@ -747,3 +747,5 @@ type: 10 config: "{}" created_unix: 946684810 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/repository.yml b/models/fixtures/repository.yml index dfa514db37..d8eb796207 100644 --- a/models/fixtures/repository.yml +++ b/models/fixtures/repository.yml @@ -1788,3 +1788,5 @@ size: 0 is_fsck_enabled: true close_issues_via_commit_in_any_branch: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/review.yml b/models/fixtures/review.yml index 5b8bbceca9..abcc9d3bb2 100644 --- a/models/fixtures/review.yml +++ b/models/fixtures/review.yml @@ -214,3 +214,5 @@ original_author_id: 0 updated_unix: 946684817 created_unix: 946684817 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/star.yml b/models/fixtures/star.yml index 39b51b3736..96db493448 100644 --- a/models/fixtures/star.yml +++ b/models/fixtures/star.yml @@ -17,3 +17,5 @@ id: 4 uid: 10 repo_id: 32 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/stopwatch.yml b/models/fixtures/stopwatch.yml index b7919d6fbb..bbb3852069 100644 --- a/models/fixtures/stopwatch.yml +++ b/models/fixtures/stopwatch.yml @@ -9,3 +9,5 @@ user_id: 2 issue_id: 2 created_unix: 1500988002 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/system_setting.yml b/models/fixtures/system_setting.yml index dcad176c89..ae612fa9f9 100644 --- a/models/fixtures/system_setting.yml +++ b/models/fixtures/system_setting.yml @@ -13,3 +13,5 @@ version: 1 created: 1653533198 updated: 1653533198 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/team.yml b/models/fixtures/team.yml index b549d0589b..3c2cb7802a 100644 --- a/models/fixtures/team.yml +++ b/models/fixtures/team.yml @@ -261,3 +261,5 @@ num_members: 1 includes_all_repositories: true can_create_org_repo: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/team_repo.yml b/models/fixtures/team_repo.yml index a29078107e..c91f74467a 100644 --- a/models/fixtures/team_repo.yml +++ b/models/fixtures/team_repo.yml @@ -75,3 +75,5 @@ org_id: 41 team_id: 22 repo_id: 61 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/team_unit.yml b/models/fixtures/team_unit.yml index 110019eee3..bb95087009 100644 --- a/models/fixtures/team_unit.yml +++ b/models/fixtures/team_unit.yml @@ -340,3 +340,5 @@ team_id: 24 type: 1 # code access_mode: 2 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/team_user.yml b/models/fixtures/team_user.yml index 6b2d153278..4cceffee6a 100644 --- a/models/fixtures/team_user.yml +++ b/models/fixtures/team_user.yml @@ -159,3 +159,5 @@ org_id: 35 team_id: 24 uid: 2 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/topic.yml b/models/fixtures/topic.yml index 055addf510..97ac821fc1 100644 --- a/models/fixtures/topic.yml +++ b/models/fixtures/topic.yml @@ -27,3 +27,5 @@ id: 6 name: topicname2 repo_count: 2 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/tracked_time.yml b/models/fixtures/tracked_time.yml index 768af38d9e..7c2145a6d8 100644 --- a/models/fixtures/tracked_time.yml +++ b/models/fixtures/tracked_time.yml @@ -69,3 +69,5 @@ time: 100000 created_unix: 947688815 deleted: true + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/two_factor.yml b/models/fixtures/two_factor.yml index d8cb85274b..13b421b7b4 100644 --- a/models/fixtures/two_factor.yml +++ b/models/fixtures/two_factor.yml @@ -7,3 +7,5 @@ last_used_passcode: created_unix: 1564253724 updated_unix: 1564253724 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/user.yml b/models/fixtures/user.yml index 976a236011..1a33947e04 100644 --- a/models/fixtures/user.yml +++ b/models/fixtures/user.yml @@ -1556,3 +1556,5 @@ repo_admin_change_team_access: false theme: "" keep_activity_private: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/user_blocking.yml b/models/fixtures/user_blocking.yml index 2ec9d99df5..c1714e40c8 100644 --- a/models/fixtures/user_blocking.yml +++ b/models/fixtures/user_blocking.yml @@ -17,3 +17,5 @@ id: 4 blocker_id: 50 blockee_id: 34 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/user_open_id.yml b/models/fixtures/user_open_id.yml index d3a367b99d..72fe7e34b8 100644 --- a/models/fixtures/user_open_id.yml +++ b/models/fixtures/user_open_id.yml @@ -15,3 +15,5 @@ uid: 2 uri: https://domain1.tld/user2/ show: true + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/user_redirect.yml b/models/fixtures/user_redirect.yml index c668cb6c3b..1b0f7a9494 100644 --- a/models/fixtures/user_redirect.yml +++ b/models/fixtures/user_redirect.yml @@ -6,3 +6,5 @@ id: 2 lower_name: olduser2 redirect_user_id: 2 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/watch.yml b/models/fixtures/watch.yml index 18bcd2ed2b..b7ee121f24 100644 --- a/models/fixtures/watch.yml +++ b/models/fixtures/watch.yml @@ -39,3 +39,5 @@ user_id: 10 repo_id: 32 mode: 1 # normal + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/webauthn_credential.yml b/models/fixtures/webauthn_credential.yml index bc43127fcd..d188a4d76a 100644 --- a/models/fixtures/webauthn_credential.yml +++ b/models/fixtures/webauthn_credential.yml @@ -7,3 +7,5 @@ clone_warning: false created_unix: 946684800 updated_unix: 946684800 + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/fixtures/webhook.yml b/models/fixtures/webhook.yml index ec282914b8..f372aaaecb 100644 --- a/models/fixtures/webhook.yml +++ b/models/fixtures/webhook.yml @@ -50,3 +50,5 @@ events: '{"push_only":true,"branch_filter":"{master,feature*}"}' is_active: true is_system_webhook: false + +# DO NOT add more test data in the fixtures, test case should prepare their own test data separately and clearly diff --git a/models/issues/comment.go b/models/issues/comment.go index 25e74c01ea..84a7150b9f 100644 --- a/models/issues/comment.go +++ b/models/issues/comment.go @@ -7,6 +7,7 @@ package issues import ( "context" + "errors" "fmt" "html/template" "slices" @@ -21,7 +22,9 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/htmlutil" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/optional" "code.gitea.io/gitea/modules/references" "code.gitea.io/gitea/modules/structs" @@ -326,21 +329,34 @@ type Comment struct { RefIssue *Issue `xorm:"-"` RefComment *Comment `xorm:"-"` - Commits []*git_model.SignCommitWithStatuses `xorm:"-"` - OldCommit string `xorm:"-"` - NewCommit string `xorm:"-"` - CommitsNum int64 `xorm:"-"` - IsForcePush bool `xorm:"-"` + Commits []*git_model.SignCommitWithStatuses `xorm:"-"` + OldCommit string `xorm:"-"` + NewCommit string `xorm:"-"` + CommitsNum int64 `xorm:"-"` + + // Templates still use it. It is not persisted in database, it is only set when creating or loading + IsForcePush bool `xorm:"-"` } func init() { db.RegisterModel(new(Comment)) } -// PushActionContent is content of push pull comment +// PushActionContent is content of pull request's push comment type PushActionContent struct { - IsForcePush bool `json:"is_force_push"` - CommitIDs []string `json:"commit_ids"` + IsForcePush bool `json:"is_force_push"` + // if IsForcePush=true, CommitIDs contains the commit pair [old head, new head] + // if IsForcePush=false, CommitIDs contains the new commits newly pushed to the head branch + CommitIDs []string `json:"commit_ids"` +} + +func (c *Comment) GetPushActionContent() (*PushActionContent, error) { + if c.Type != CommentTypePullRequestPush { + return nil, errors.New("not a pull request push comment") + } + var data PushActionContent + _ = json.Unmarshal(util.UnsafeStringToBytes(c.Content), &data) + return &data, nil } // LoadIssue loads the issue reference for the comment @@ -528,6 +544,12 @@ func (c *Comment) EventTag() string { return fmt.Sprintf("event-%d", c.ID) } +func (c *Comment) GetSanitizedContentHTML() template.HTML { + // mainly for type=4 CommentTypeCommitRef + // the content is a link like message title (from CreateRefComment) + return markup.Sanitize(c.Content) +} + // LoadLabel if comment.Type is CommentTypeLabel, then load Label func (c *Comment) LoadLabel(ctx context.Context) error { var label Label diff --git a/models/issues/pull.go b/models/issues/pull.go index c07044f301..1b883f2981 100644 --- a/models/issues/pull.go +++ b/models/issues/pull.go @@ -877,7 +877,12 @@ func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule, warnings := make([]string, 0) - expr := fmt.Sprintf("^%s$", strings.TrimPrefix(tokens[0], "!")) + // Strip leading "!" for negative rules, then strip leading "/" since + // git returns relative paths (e.g. "docs/foo.md" not "/docs/foo.md") + // and the regex is already anchored with ^...$, so the "/" is redundant. + pattern := strings.TrimPrefix(tokens[0], "!") + pattern = strings.TrimPrefix(pattern, "/") + expr := fmt.Sprintf("^%s$", pattern) rule.Rule, err = regexp2.Compile(expr, regexp2.None) if err != nil { warnings = append(warnings, fmt.Sprintf("incorrect codeowner regexp: %s", err)) diff --git a/models/issues/pull_test.go b/models/issues/pull_test.go index 25b27cbe9c..79d1f8aa9b 100644 --- a/models/issues/pull_test.go +++ b/models/issues/pull_test.go @@ -17,16 +17,43 @@ import ( "github.com/stretchr/testify/require" ) -func TestPullRequest_LoadAttributes(t *testing.T) { +func TestPullRequest(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) + + t.Run("LoadAttributes", testPullRequestLoadAttributes) + t.Run("LoadIssue", testPullRequestLoadIssue) + t.Run("LoadBaseRepo", testPullRequestLoadBaseRepo) + t.Run("LoadHeadRepo", testPullRequestLoadHeadRepo) + t.Run("PullRequestsNewest", testPullRequestsNewest) + t.Run("PullRequestsOldest", testPullRequestsOldest) + t.Run("GetUnmergedPullRequest", testGetUnmergedPullRequest) + t.Run("HasUnmergedPullRequestsByHeadInfo", testHasUnmergedPullRequestsByHeadInfo) + t.Run("GetUnmergedPullRequestsByHeadInfo", testGetUnmergedPullRequestsByHeadInfo) + t.Run("GetUnmergedPullRequestsByBaseInfo", testGetUnmergedPullRequestsByBaseInfo) + t.Run("GetPullRequestByIndex", testGetPullRequestByIndex) + t.Run("GetPullRequestByID", testGetPullRequestByID) + t.Run("GetPullRequestByIssueID", testGetPullRequestByIssueID) + t.Run("PullRequest_UpdateCols", testPullRequestUpdateCols) + t.Run("PullRequest_IsWorkInProgress", testPullRequestIsWorkInProgress) + t.Run("PullRequest_GetWorkInProgressPrefixWorkInProgress", testPullRequestGetWorkInProgressPrefixWorkInProgress) + t.Run("DeleteOrphanedObjects", testDeleteOrphanedObjects) + t.Run("ParseCodeOwnersLine", testParseCodeOwnersLine) + t.Run("CodeOwnerAbsolutePathPatterns", testCodeOwnerAbsolutePathPatterns) + t.Run("GetApprovers", testGetApprovers) + t.Run("GetPullRequestByMergedCommit", testGetPullRequestByMergedCommit) + t.Run("Migrate_InsertPullRequests", testMigrateInsertPullRequests) + t.Run("PullRequestsClosedRecentSortType", testPullRequestsClosedRecentSortType) + t.Run("LoadRequestedReviewers", testLoadRequestedReviewers) +} + +func testPullRequestLoadAttributes(t *testing.T) { pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}) assert.NoError(t, pr.LoadAttributes(t.Context())) assert.NotNil(t, pr.Merger) assert.Equal(t, pr.MergerID, pr.Merger.ID) } -func TestPullRequest_LoadIssue(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testPullRequestLoadIssue(t *testing.T) { pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}) assert.NoError(t, pr.LoadIssue(t.Context())) assert.NotNil(t, pr.Issue) @@ -36,8 +63,7 @@ func TestPullRequest_LoadIssue(t *testing.T) { assert.Equal(t, int64(2), pr.Issue.ID) } -func TestPullRequest_LoadBaseRepo(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testPullRequestLoadBaseRepo(t *testing.T) { pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}) assert.NoError(t, pr.LoadBaseRepo(t.Context())) assert.NotNil(t, pr.BaseRepo) @@ -47,8 +73,7 @@ func TestPullRequest_LoadBaseRepo(t *testing.T) { assert.Equal(t, pr.BaseRepoID, pr.BaseRepo.ID) } -func TestPullRequest_LoadHeadRepo(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testPullRequestLoadHeadRepo(t *testing.T) { pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}) assert.NoError(t, pr.LoadHeadRepo(t.Context())) assert.NotNil(t, pr.HeadRepo) @@ -59,8 +84,7 @@ func TestPullRequest_LoadHeadRepo(t *testing.T) { // TODO TestNewPullRequest -func TestPullRequestsNewest(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testPullRequestsNewest(t *testing.T) { prs, count, err := issues_model.PullRequests(t.Context(), 1, &issues_model.PullRequestsOptions{ ListOptions: db.ListOptions{ Page: 1, @@ -77,7 +101,7 @@ func TestPullRequestsNewest(t *testing.T) { } } -func TestPullRequests_Closed_RecentSortType(t *testing.T) { +func testPullRequestsClosedRecentSortType(t *testing.T) { // Issue ID | Closed At. | Updated At // 2 | 1707270001 | 1707270001 // 3 | 1707271000 | 1707279999 @@ -90,7 +114,6 @@ func TestPullRequests_Closed_RecentSortType(t *testing.T) { {"recentclose", []int64{11, 3, 2}}, } - assert.NoError(t, unittest.PrepareTestDatabase()) _, err := db.Exec(t.Context(), "UPDATE issue SET closed_unix = 1707270001, updated_unix = 1707270001, is_closed = true WHERE id = 2") require.NoError(t, err) _, err = db.Exec(t.Context(), "UPDATE issue SET closed_unix = 1707271000, updated_unix = 1707279999, is_closed = true WHERE id = 3") @@ -118,9 +141,7 @@ func TestPullRequests_Closed_RecentSortType(t *testing.T) { } } -func TestLoadRequestedReviewers(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testLoadRequestedReviewers(t *testing.T) { pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) assert.NoError(t, pull.LoadIssue(t.Context())) issue := pull.Issue @@ -146,8 +167,7 @@ func TestLoadRequestedReviewers(t *testing.T) { assert.Empty(t, pull.RequestedReviewers) } -func TestPullRequestsOldest(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testPullRequestsOldest(t *testing.T) { prs, count, err := issues_model.PullRequests(t.Context(), 1, &issues_model.PullRequestsOptions{ ListOptions: db.ListOptions{ Page: 1, @@ -164,8 +184,7 @@ func TestPullRequestsOldest(t *testing.T) { } } -func TestGetUnmergedPullRequest(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetUnmergedPullRequest(t *testing.T) { pr, err := issues_model.GetUnmergedPullRequest(t.Context(), 1, 1, "branch2", "master", issues_model.PullRequestFlowGithub) assert.NoError(t, err) assert.Equal(t, int64(2), pr.ID) @@ -175,9 +194,7 @@ func TestGetUnmergedPullRequest(t *testing.T) { assert.True(t, issues_model.IsErrPullRequestNotExist(err)) } -func TestHasUnmergedPullRequestsByHeadInfo(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testHasUnmergedPullRequestsByHeadInfo(t *testing.T) { exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(t.Context(), 1, "branch2") assert.NoError(t, err) assert.True(t, exist) @@ -187,8 +204,7 @@ func TestHasUnmergedPullRequestsByHeadInfo(t *testing.T) { assert.False(t, exist) } -func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetUnmergedPullRequestsByHeadInfo(t *testing.T) { prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(t.Context(), 1, "branch2") assert.NoError(t, err) assert.Len(t, prs, 1) @@ -198,8 +214,7 @@ func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) { } } -func TestGetUnmergedPullRequestsByBaseInfo(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetUnmergedPullRequestsByBaseInfo(t *testing.T) { prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(t.Context(), 1, "master") assert.NoError(t, err) assert.Len(t, prs, 1) @@ -209,8 +224,7 @@ func TestGetUnmergedPullRequestsByBaseInfo(t *testing.T) { assert.Equal(t, "master", pr.BaseBranch) } -func TestGetPullRequestByIndex(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetPullRequestByIndex(t *testing.T) { pr, err := issues_model.GetPullRequestByIndex(t.Context(), 1, 2) assert.NoError(t, err) assert.Equal(t, int64(1), pr.BaseRepoID) @@ -225,8 +239,7 @@ func TestGetPullRequestByIndex(t *testing.T) { assert.True(t, issues_model.IsErrPullRequestNotExist(err)) } -func TestGetPullRequestByID(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetPullRequestByID(t *testing.T) { pr, err := issues_model.GetPullRequestByID(t.Context(), 1) assert.NoError(t, err) assert.Equal(t, int64(1), pr.ID) @@ -237,8 +250,7 @@ func TestGetPullRequestByID(t *testing.T) { assert.True(t, issues_model.IsErrPullRequestNotExist(err)) } -func TestGetPullRequestByIssueID(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetPullRequestByIssueID(t *testing.T) { pr, err := issues_model.GetPullRequestByIssueID(t.Context(), 2) assert.NoError(t, err) assert.Equal(t, int64(2), pr.IssueID) @@ -248,8 +260,7 @@ func TestGetPullRequestByIssueID(t *testing.T) { assert.True(t, issues_model.IsErrPullRequestNotExist(err)) } -func TestPullRequest_UpdateCols(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testPullRequestUpdateCols(t *testing.T) { pr := &issues_model.PullRequest{ ID: 1, BaseBranch: "baseBranch", @@ -265,9 +276,7 @@ func TestPullRequest_UpdateCols(t *testing.T) { // TODO TestAddTestPullRequestTask -func TestPullRequest_IsWorkInProgress(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testPullRequestIsWorkInProgress(t *testing.T) { pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) pr.LoadIssue(t.Context()) @@ -280,9 +289,7 @@ func TestPullRequest_IsWorkInProgress(t *testing.T) { assert.True(t, pr.IsWorkInProgress(t.Context())) } -func TestPullRequest_GetWorkInProgressPrefixWorkInProgress(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testPullRequestGetWorkInProgressPrefixWorkInProgress(t *testing.T) { pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}) pr.LoadIssue(t.Context()) @@ -296,9 +303,7 @@ func TestPullRequest_GetWorkInProgressPrefixWorkInProgress(t *testing.T) { assert.Equal(t, "[wip]", pr.GetWorkInProgressPrefix(t.Context())) } -func TestDeleteOrphanedObjects(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - +func testDeleteOrphanedObjects(t *testing.T) { countBefore, err := db.GetEngine(t.Context()).Count(&issues_model.PullRequest{}) assert.NoError(t, err) @@ -317,7 +322,7 @@ func TestDeleteOrphanedObjects(t *testing.T) { assert.Equal(t, countBefore, countAfter) } -func TestParseCodeOwnersLine(t *testing.T) { +func testParseCodeOwnersLine(t *testing.T) { type CodeOwnerTest struct { Line string Tokens []string @@ -331,6 +336,8 @@ func TestParseCodeOwnersLine(t *testing.T) { {Line: `docs/(aws|google|azure)/[^/]*\\.(md|txt) @org3 @org2/team2`, Tokens: []string{`docs/(aws|google|azure)/[^/]*\.(md|txt)`, "@org3", "@org2/team2"}}, {Line: `\#path @org3`, Tokens: []string{`#path`, "@org3"}}, {Line: `path\ with\ spaces/ @org3`, Tokens: []string{`path with spaces/`, "@org3"}}, + {Line: `/docs/.*\\.md @user1`, Tokens: []string{`/docs/.*\.md`, "@user1"}}, + {Line: `!/assets/.*\\.(bin|exe|msi) @user1`, Tokens: []string{`!/assets/.*\.(bin|exe|msi)`, "@user1"}}, } for _, g := range given { @@ -339,8 +346,37 @@ func TestParseCodeOwnersLine(t *testing.T) { } } -func TestGetApprovers(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testCodeOwnerAbsolutePathPatterns(t *testing.T) { + type testCase struct { + content string + file string + expected bool + } + + cases := []testCase{ + // Absolute path pattern should match (leading "/" stripped) + {content: "/README.md @user5\n", file: "README.md", expected: true}, + // Absolute path pattern in subdirectory + {content: "/docs/.* @user5\n", file: "docs/foo.md", expected: true}, + // Absolute path should not match nested paths it shouldn't + {content: "/docs/.* @user5\n", file: "other/docs/foo.md", expected: false}, + // Relative path still works + {content: "README.md @user5\n", file: "README.md", expected: true}, + // Negated absolute path pattern + {content: "!/.* @user5\n", file: "README.md", expected: false}, + } + + for _, c := range cases { + rules, _ := issues_model.GetCodeOwnersFromContent(t.Context(), c.content) + require.NotEmpty(t, rules) + rule := rules[0] + regexpMatched, _ := rule.Rule.MatchString(c.file) + ruleMatched := regexpMatched == !rule.Negative + assert.Equal(t, c.expected, ruleMatched, "pattern %q against file %q", c.content, c.file) + } +} + +func testGetApprovers(t *testing.T) { pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 5}) // Official reviews are already deduplicated. Allow unofficial reviews // to assert that there are no duplicated approvers. @@ -350,8 +386,7 @@ func TestGetApprovers(t *testing.T) { assert.Equal(t, expected, approvers) } -func TestGetPullRequestByMergedCommit(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testGetPullRequestByMergedCommit(t *testing.T) { pr, err := issues_model.GetPullRequestByMergedCommit(t.Context(), 1, "1a8823cd1a9549fde083f992f6b9b87a7ab74fb3") assert.NoError(t, err) assert.EqualValues(t, 1, pr.ID) @@ -362,8 +397,7 @@ func TestGetPullRequestByMergedCommit(t *testing.T) { assert.ErrorAs(t, err, &issues_model.ErrPullRequestNotExist{}) } -func TestMigrate_InsertPullRequests(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) +func testMigrateInsertPullRequests(t *testing.T) { reponame := "repo1" repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: reponame}) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID}) diff --git a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run.yml b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run.yml index 342adb2a04..49f71ce3fc 100644 --- a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run.yml +++ b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run.yml @@ -1,9 +1,29 @@ # type ActionRun struct { -# ID int64 `xorm:"pk autoincr"` -# RepoID int64 `xorm:"index"` -# Index int64 +# ID int64 `xorm:"pk autoincr"` +# RepoID int64 `xorm:"index"` +# Index int64 +# CommitSHA string `xorm:"commit_sha"` +# Event string +# TriggerEvent string +# EventPayload string `xorm:"LONGTEXT"` # } - - id: 106 - repo_id: 1 + id: 990 + repo_id: 100 index: 7 + commit_sha: merge-sha + event: pull_request + event_payload: '{"pull_request":{"head":{"sha":"sha-shared"}}}' +- + id: 991 + repo_id: 100 + index: 8 + commit_sha: sha-shared + event: push + event_payload: '{"head_commit":{"id":"sha-shared"}}' +- + id: 1991 + repo_id: 100 + index: 9 + commit_sha: sha-other + event: release diff --git a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run_job.yml b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run_job.yml index 4f90a4495c..addf5e0682 100644 --- a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run_job.yml +++ b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/action_run_job.yml @@ -3,8 +3,14 @@ # RunID int64 `xorm:"index"` # } - - id: 530 - run_id: 106 + id: 997 + run_id: 990 - - id: 531 - run_id: 106 + id: 998 + run_id: 990 +- + id: 1997 + run_id: 991 +- + id: 1998 + run_id: 1991 diff --git a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status.yml b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status.yml index ceff4c9993..6be1c7ca48 100644 --- a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status.yml +++ b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status.yml @@ -1,29 +1,51 @@ # type CommitStatus struct { # ID int64 `xorm:"pk autoincr"` # RepoID int64 `xorm:"index"` +# SHA string # TargetURL string # } - - id: 10 - repo_id: 1 + id: 10010 + repo_id: 100 + sha: sha-shared target_url: /testuser/repo1/actions/runs/7/jobs/0 - - id: 11 - repo_id: 1 + id: 10011 + repo_id: 100 + sha: sha-shared target_url: /testuser/repo1/actions/runs/7/jobs/1 - - id: 12 - repo_id: 1 + id: 10012 + repo_id: 100 + sha: sha-shared + target_url: /testuser/repo1/actions/runs/8/jobs/0 +- + id: 10013 + repo_id: 100 + sha: sha-other + target_url: /testuser/repo1/actions/runs/9/jobs/0 +- + id: 10014 + repo_id: 100 + sha: sha-shared target_url: /otheruser/badrepo/actions/runs/7/jobs/0 - - id: 13 - repo_id: 1 + id: 10015 + repo_id: 100 + sha: sha-shared target_url: /testuser/repo1/actions/runs/10/jobs/0 - - id: 14 - repo_id: 1 + id: 10016 + repo_id: 100 + sha: sha-shared target_url: /testuser/repo1/actions/runs/7/jobs/3 - - id: 15 - repo_id: 1 + id: 10017 + repo_id: 100 + sha: sha-shared target_url: https://ci.example.com/build/123 +- + id: 10018 + repo_id: 100 + sha: sha-shared + target_url: /testuser/repo1/actions/runs/990/jobs/997 diff --git a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status_summary.yml b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status_summary.yml index 580b2a4f04..3dd846bb36 100644 --- a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status_summary.yml +++ b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/commit_status_summary.yml @@ -6,14 +6,14 @@ # TargetURL string # } - - id: 20 - repo_id: 1 - sha: "012345" - state: success + id: 10020 + repo_id: 100 + sha: sha-shared + state: pending target_url: /testuser/repo1/actions/runs/7/jobs/0 - - id: 21 - repo_id: 1 - sha: "678901" - state: success - target_url: https://ci.example.com/build/123 + id: 10021 + repo_id: 100 + sha: sha-other + state: pending + target_url: /testuser/repo1/actions/runs/9/jobs/0 diff --git a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/repository.yml b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/repository.yml index 86cfb926e4..46162e7803 100644 --- a/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/repository.yml +++ b/models/migrations/fixtures/Test_FixCommitStatusTargetURLToUseRunAndJobID/repository.yml @@ -4,6 +4,6 @@ # Name string # } - - id: 1 + id: 100 owner_name: testuser name: repo1 diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go index cad4156dee..db74ff78d5 100644 --- a/models/migrations/migrations.go +++ b/models/migrations/migrations.go @@ -400,7 +400,7 @@ func prepareMigrationTasks() []*migration { newMigration(323, "Add support for actions concurrency", v1_26.AddActionsConcurrency), newMigration(324, "Fix closed milestone completeness for milestones with no issues", v1_26.FixClosedMilestoneCompleteness), newMigration(325, "Fix missed repo_id when migrate attachments", v1_26.FixMissedRepoIDWhenMigrateAttachments), - newMigration(326, "Migrate commit status target URL to use run ID and job ID", v1_26.FixCommitStatusTargetURLToUseRunAndJobID), + newMigration(326, "Partially migrate commit status target URL to use run ID and job ID", v1_26.FixCommitStatusTargetURLToUseRunAndJobID), newMigration(327, "Add disabled state to action runners", v1_26.AddDisabledToActionRunner), newMigration(328, "Add TokenPermissions column to ActionRunJob", v1_26.AddTokenPermissionsToActionRunJob), newMigration(329, "Add unique constraint for user badge", v1_26.AddUniqueIndexForUserBadge), diff --git a/models/migrations/v1_19/v233.go b/models/migrations/v1_19/v233.go index 9eb6d40509..44ced874b3 100644 --- a/models/migrations/v1_19/v233.go +++ b/models/migrations/v1_19/v233.go @@ -9,7 +9,6 @@ import ( "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/secret" "code.gitea.io/gitea/modules/setting" - api "code.gitea.io/gitea/modules/structs" "xorm.io/builder" "xorm.io/xorm" @@ -129,11 +128,11 @@ func AddHeaderAuthorizationEncryptedColWebhook(x *xorm.Engine) error { } type MatrixPayloadSafe struct { - Body string `json:"body"` - MsgType string `json:"msgtype"` - Format string `json:"format"` - FormattedBody string `json:"formatted_body"` - Commits []*api.PayloadCommit `json:"io.gitea.commits,omitempty"` + Body string `json:"body"` + MsgType string `json:"msgtype"` + Format string `json:"format"` + FormattedBody string `json:"formatted_body"` + Commits json.Value `json:"io.gitea.commits,omitempty"` } type MatrixPayloadUnsafe struct { MatrixPayloadSafe diff --git a/models/migrations/v1_26/v326.go b/models/migrations/v1_26/v326.go index 1ec0af76a0..dcf548bec0 100644 --- a/models/migrations/v1_26/v326.go +++ b/models/migrations/v1_26/v326.go @@ -4,18 +4,32 @@ package v1_26 import ( + "errors" "fmt" "net/url" "strconv" "strings" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + webhook_module "code.gitea.io/gitea/modules/webhook" "xorm.io/xorm" ) -const actionsRunPath = "/actions/runs/" +const ( + actionsRunPath = "/actions/runs/" + + // Only commit status target URLs whose resolved run ID is smaller than this threshold are rewritten by this partial migration. + // The fixed value 1000 is a conservative cutoff chosen to cover the smaller legacy run indexes that are most likely to be confused with ID-based URLs at runtime. + // Larger legacy {run} or {job} numbers are usually easier to disambiguate. For example: + // * /actions/runs/1200/jobs/1420 is most likely an ID-based URL, because a run should not contain more than 256 jobs. + // * /actions/runs/1500/jobs/3 is most likely an index-based URL, because a job ID cannot be smaller than its run ID. + // But URLs with small numbers, such as /actions/runs/5/jobs/6, are much harder to distinguish reliably. + // This migration therefore prioritizes rewriting target URLs for runs in that lower range. + legacyURLIDThreshold int64 = 1000 +) type migrationRepository struct { ID int64 @@ -24,9 +38,13 @@ type migrationRepository struct { } type migrationActionRun struct { - ID int64 - RepoID int64 - Index int64 + ID int64 + RepoID int64 + Index int64 + CommitSHA string `xorm:"commit_sha"` + Event webhook_module.HookEventType + TriggerEvent string + EventPayload string } type migrationActionRunJob struct { @@ -40,93 +58,156 @@ type migrationCommitStatus struct { TargetURL string } -func FixCommitStatusTargetURLToUseRunAndJobID(x *xorm.Engine) error { - runByIndexCache := make(map[int64]map[int64]*migrationActionRun) - jobsByRunIDCache := make(map[int64][]int64) - repoLinkCache := make(map[int64]string) - - if err := migrateCommitStatusTargetURL(x, "commit_status", runByIndexCache, jobsByRunIDCache, repoLinkCache); err != nil { - return err - } - return migrateCommitStatusTargetURL(x, "commit_status_summary", runByIndexCache, jobsByRunIDCache, repoLinkCache) +// Frozen subsets of modules/structs payload types, decoded from stored +// action_run.event_payload values. Inlined so the migration is insulated +// from future field changes in modules/structs. +type migrationPayloadCommit struct { + ID string `json:"id"` } -func migrateCommitStatusTargetURL( +type migrationPushPayload struct { + HeadCommit *migrationPayloadCommit `json:"head_commit"` +} + +type migrationPRBranchInfo struct { + Sha string `json:"sha"` +} + +type migrationPullRequest struct { + Head *migrationPRBranchInfo `json:"head"` +} + +type migrationPullRequestPayload struct { + PullRequest *migrationPullRequest `json:"pull_request"` +} + +type commitSHAAndRuns struct { + commitSHA string + runs map[int64]*migrationActionRun +} + +// FixCommitStatusTargetURLToUseRunAndJobID partially migrates legacy Actions +// commit status target URLs to the new run/job ID-based form. +// +// Only rows whose resolved run ID is below legacyURLIDThreshold are rewritten. +// This is because smaller legacy run indexes are more likely to collide with run ID URLs during runtime resolution, +// so this migration prioritizes that lower range and leaves the remaining legacy target URLs to the web compatibility logic. +func FixCommitStatusTargetURLToUseRunAndJobID(x *xorm.Engine) error { + jobsByRunIDCache := make(map[int64][]int64) + repoLinkCache := make(map[int64]string) + groups, err := loadLegacyMigrationRunGroups(x) + if err != nil { + return err + } + + for repoID, groupsBySHA := range groups { + for _, group := range groupsBySHA { + if err := migrateCommitStatusTargetURLForGroup(x, "commit_status", repoID, group.commitSHA, group.runs, jobsByRunIDCache, repoLinkCache); err != nil { + return err + } + if err := migrateCommitStatusTargetURLForGroup(x, "commit_status_summary", repoID, group.commitSHA, group.runs, jobsByRunIDCache, repoLinkCache); err != nil { + return err + } + } + } + return nil +} + +func loadLegacyMigrationRunGroups(x *xorm.Engine) (map[int64]map[string]*commitSHAAndRuns, error) { + var runs []migrationActionRun + if err := x.Table("action_run"). + Where("id < ?", legacyURLIDThreshold). + Cols("id", "repo_id", "`index`", "commit_sha", "event", "trigger_event", "event_payload"). + Find(&runs); err != nil { + return nil, fmt.Errorf("query action_run: %w", err) + } + + groups := make(map[int64]map[string]*commitSHAAndRuns) + for i := range runs { + run := runs[i] + commitID, err := getCommitStatusCommitID(&run) + if err != nil { + log.Warn("skip action_run id=%d when resolving commit status commit SHA: %v", run.ID, err) + continue + } + if commitID == "" { + // empty commitID means the run didn't create any commit status records, just skip + continue + } + if groups[run.RepoID] == nil { + groups[run.RepoID] = make(map[string]*commitSHAAndRuns) + } + if groups[run.RepoID][commitID] == nil { + groups[run.RepoID][commitID] = &commitSHAAndRuns{ + commitSHA: commitID, + runs: make(map[int64]*migrationActionRun), + } + } + groups[run.RepoID][commitID].runs[run.Index] = &run + } + return groups, nil +} + +func migrateCommitStatusTargetURLForGroup( x *xorm.Engine, table string, - runByIndexCache map[int64]map[int64]*migrationActionRun, + repoID int64, + sha string, + runs map[int64]*migrationActionRun, jobsByRunIDCache map[int64][]int64, repoLinkCache map[int64]string, ) error { - const batchSize = 500 - var lastID int64 + var rows []migrationCommitStatus + if err := x.Table(table). + Where("repo_id = ?", repoID). + And("sha = ?", sha). + Cols("id", "repo_id", "target_url"). + Find(&rows); err != nil { + return fmt.Errorf("query %s for repo_id=%d sha=%s: %w", table, repoID, sha, err) + } - for { - var rows []migrationCommitStatus - sess := x.Table(table). - Where("target_url LIKE ?", "%"+actionsRunPath+"%"). - And("id > ?", lastID). - Asc("id"). - Limit(batchSize) - if err := sess.Find(&rows); err != nil { - return fmt.Errorf("query %s: %w", table, err) - } - if len(rows) == 0 { - return nil + for _, row := range rows { + repoLink, err := getRepoLinkCached(x, repoLinkCache, row.RepoID) + if err != nil || repoLink == "" { + if err != nil { + log.Warn("convert %s id=%d getRepoLinkCached: %v", table, row.ID, err) + } else { + log.Warn("convert %s id=%d: repo=%d not found", table, row.ID, row.RepoID) + } + continue } - for _, row := range rows { - lastID = row.ID - if row.TargetURL == "" { - continue - } + runNum, jobNum, ok := parseTargetURL(row.TargetURL, repoLink) + if !ok { + continue + } - repoLink, err := getRepoLinkCached(x, repoLinkCache, row.RepoID) - if err != nil || repoLink == "" { - if err != nil { - log.Warn("convert %s id=%d getRepoLinkCached: %v", table, row.ID, err) - } else { - log.Warn("convert %s id=%d: repo=%d not found", table, row.ID, row.RepoID) - } - continue - } + run, ok := runs[runNum] + if !ok { + continue + } - runNum, jobNum, ok := parseTargetURL(row.TargetURL, repoLink) - if !ok { - continue + jobID, ok, err := getJobIDByIndexCached(x, jobsByRunIDCache, run.ID, jobNum) + if err != nil || !ok { + if err != nil { + log.Warn("convert %s id=%d getJobIDByIndexCached: %v", table, row.ID, err) + } else { + log.Warn("convert %s id=%d: job not found for run_id=%d job_index=%d", table, row.ID, run.ID, jobNum) } + continue + } - run, err := getRunByIndexCached(x, runByIndexCache, row.RepoID, runNum) - if err != nil || run == nil { - if err != nil { - log.Warn("convert %s id=%d getRunByIndexCached: %v", table, row.ID, err) - } else { - log.Warn("convert %s id=%d: run not found for repo_id=%d run_index=%d", table, row.ID, row.RepoID, runNum) - } - continue - } + oldURL := row.TargetURL + newURL := fmt.Sprintf("%s%s%d/jobs/%d", repoLink, actionsRunPath, run.ID, jobID) + if oldURL == newURL { + continue + } - jobID, ok, err := getJobIDByIndexCached(x, jobsByRunIDCache, run.ID, jobNum) - if err != nil || !ok { - if err != nil { - log.Warn("convert %s id=%d getJobIDByIndexCached: %v", table, row.ID, err) - } else { - log.Warn("convert %s id=%d: job not found for run_id=%d job_index=%d", table, row.ID, run.ID, jobNum) - } - continue - } - - oldURL := row.TargetURL - newURL := fmt.Sprintf("%s%s%d/jobs/%d", repoLink, actionsRunPath, run.ID, jobID) // expect: {repo_link}/actions/runs/{run_id}/jobs/{job_id} - if oldURL == newURL { - continue - } - - if _, err := x.Table(table).ID(row.ID).Cols("target_url").Update(&migrationCommitStatus{TargetURL: newURL}); err != nil { - return fmt.Errorf("update %s id=%d target_url from %s to %s: %w", table, row.ID, oldURL, newURL, err) - } + if _, err := x.Table(table).ID(row.ID).Cols("target_url").Update(&migrationCommitStatus{TargetURL: newURL}); err != nil { + return fmt.Errorf("update %s id=%d target_url from %s to %s: %w", table, row.ID, oldURL, newURL, err) } } + return nil } func getRepoLinkCached(x *xorm.Engine, cache map[int64]string, repoID int64) (string, error) { @@ -147,35 +228,6 @@ func getRepoLinkCached(x *xorm.Engine, cache map[int64]string, repoID int64) (st return link, nil } -func getRunByIndexCached(x *xorm.Engine, cache map[int64]map[int64]*migrationActionRun, repoID, runIndex int64) (*migrationActionRun, error) { - if repoCache, ok := cache[repoID]; ok { - if run, ok := repoCache[runIndex]; ok { - if run == nil { - return nil, fmt.Errorf("run repo_id=%d run_index=%d not found", repoID, runIndex) - } - return run, nil - } - } - - var run migrationActionRun - has, err := x.Table("action_run").Where("repo_id=? AND `index`=?", repoID, runIndex).Get(&run) - if err != nil { - return nil, err - } - if !has { - if cache[repoID] == nil { - cache[repoID] = make(map[int64]*migrationActionRun) - } - cache[repoID][runIndex] = nil - return nil, fmt.Errorf("run repo_id=%d run_index=%d not found", repoID, runIndex) - } - if cache[repoID] == nil { - cache[repoID] = make(map[int64]*migrationActionRun) - } - cache[repoID][runIndex] = &run - return &run, nil -} - func getJobIDByIndexCached(x *xorm.Engine, cache map[int64][]int64, runID, jobIndex int64) (int64, bool, error) { jobIDs, ok := cache[runID] if !ok { @@ -202,7 +254,7 @@ func parseTargetURL(targetURL, repoLink string) (runNum, jobNum int64, ok bool) } rest := targetURL[len(prefix):] - parts := strings.Split(rest, "/") // expect: {run_num}/jobs/{job_num} + parts := strings.Split(rest, "/") if len(parts) == 3 && parts[1] == "jobs" { runNum, err1 := strconv.ParseInt(parts[0], 10, 64) jobNum, err2 := strconv.ParseInt(parts[2], 10, 64) @@ -214,3 +266,72 @@ func parseTargetURL(targetURL, repoLink string) (runNum, jobNum int64, ok bool) return 0, 0, false } + +func getCommitStatusCommitID(run *migrationActionRun) (string, error) { + switch run.Event { + case webhook_module.HookEventPush: + payload, err := getPushEventPayload(run) + if err != nil { + return "", fmt.Errorf("getPushEventPayload: %w", err) + } + if payload.HeadCommit == nil { + return "", errors.New("head commit is missing in event payload") + } + return payload.HeadCommit.ID, nil + case webhook_module.HookEventPullRequest, + webhook_module.HookEventPullRequestSync, + webhook_module.HookEventPullRequestAssign, + webhook_module.HookEventPullRequestLabel, + webhook_module.HookEventPullRequestReviewRequest, + webhook_module.HookEventPullRequestMilestone: + payload, err := getPullRequestEventPayload(run) + if err != nil { + return "", fmt.Errorf("getPullRequestEventPayload: %w", err) + } + if payload.PullRequest == nil { + return "", errors.New("pull request is missing in event payload") + } else if payload.PullRequest.Head == nil { + return "", errors.New("head of pull request is missing in event payload") + } + return payload.PullRequest.Head.Sha, nil + case webhook_module.HookEventPullRequestReviewApproved, + webhook_module.HookEventPullRequestReviewRejected, + webhook_module.HookEventPullRequestReviewComment: + payload, err := getPullRequestEventPayload(run) + if err != nil { + return "", fmt.Errorf("getPullRequestEventPayload: %w", err) + } + if payload.PullRequest == nil { + return "", errors.New("pull request is missing in event payload") + } else if payload.PullRequest.Head == nil { + return "", errors.New("head of pull request is missing in event payload") + } + return payload.PullRequest.Head.Sha, nil + case webhook_module.HookEventRelease: + return run.CommitSHA, nil + default: + return "", nil + } +} + +func getPushEventPayload(run *migrationActionRun) (*migrationPushPayload, error) { + if run.Event != webhook_module.HookEventPush { + return nil, fmt.Errorf("event %s is not a push event", run.Event) + } + var payload migrationPushPayload + if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil { + return nil, err + } + return &payload, nil +} + +func getPullRequestEventPayload(run *migrationActionRun) (*migrationPullRequestPayload, error) { + if !run.Event.IsPullRequest() && !run.Event.IsPullRequestReview() { + return nil, fmt.Errorf("event %s is not a pull request event", run.Event) + } + var payload migrationPullRequestPayload + if err := json.Unmarshal([]byte(run.EventPayload), &payload); err != nil { + return nil, err + } + return &payload, nil +} diff --git a/models/migrations/v1_26/v326_test.go b/models/migrations/v1_26/v326_test.go index ddc2640160..b92eed35f6 100644 --- a/models/migrations/v1_26/v326_test.go +++ b/models/migrations/v1_26/v326_test.go @@ -28,9 +28,13 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) { } type ActionRun struct { - ID int64 `xorm:"pk autoincr"` - RepoID int64 `xorm:"index"` - Index int64 + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"index"` + Index int64 + CommitSHA string `xorm:"commit_sha"` + Event string + TriggerEvent string + EventPayload string `xorm:"LONGTEXT"` } type ActionRunJob struct { @@ -41,6 +45,7 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) { type CommitStatus struct { ID int64 `xorm:"pk autoincr"` RepoID int64 `xorm:"index"` + SHA string TargetURL string } @@ -61,14 +66,6 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) { ) defer deferable() - newURL1 := "/testuser/repo1/actions/runs/106/jobs/530" - newURL2 := "/testuser/repo1/actions/runs/106/jobs/531" - - invalidWrongRepo := "/otheruser/badrepo/actions/runs/7/jobs/0" - invalidNonexistentRun := "/testuser/repo1/actions/runs/10/jobs/0" - invalidNonexistentJob := "/testuser/repo1/actions/runs/7/jobs/3" - externalTargetURL := "https://ci.example.com/build/123" - require.NoError(t, FixCommitStatusTargetURLToUseRunAndJobID(x)) cases := []struct { @@ -76,14 +73,26 @@ func Test_FixCommitStatusTargetURLToUseRunAndJobID(t *testing.T) { id int64 want string }{ - {table: "commit_status", id: 10, want: newURL1}, - {table: "commit_status", id: 11, want: newURL2}, - {table: "commit_status", id: 12, want: invalidWrongRepo}, - {table: "commit_status", id: 13, want: invalidNonexistentRun}, - {table: "commit_status", id: 14, want: invalidNonexistentJob}, - {table: "commit_status", id: 15, want: externalTargetURL}, - {table: "commit_status_summary", id: 20, want: newURL1}, - {table: "commit_status_summary", id: 21, want: externalTargetURL}, + // Legacy URLs for runs whose resolved run IDs are below the threshold should be rewritten. + {table: "commit_status", id: 10010, want: "/testuser/repo1/actions/runs/990/jobs/997"}, + {table: "commit_status", id: 10011, want: "/testuser/repo1/actions/runs/990/jobs/998"}, + {table: "commit_status", id: 10012, want: "/testuser/repo1/actions/runs/991/jobs/1997"}, + + // Runs whose resolved IDs are above the threshold are intentionally left unchanged. + {table: "commit_status", id: 10013, want: "/testuser/repo1/actions/runs/9/jobs/0"}, + + // URLs that do not resolve cleanly as legacy Actions URLs should remain untouched. + {table: "commit_status", id: 10014, want: "/otheruser/badrepo/actions/runs/7/jobs/0"}, + {table: "commit_status", id: 10015, want: "/testuser/repo1/actions/runs/10/jobs/0"}, + {table: "commit_status", id: 10016, want: "/testuser/repo1/actions/runs/7/jobs/3"}, + {table: "commit_status", id: 10017, want: "https://ci.example.com/build/123"}, + + // Already ID-based URLs are valid inputs and should not be rewritten again. + {table: "commit_status", id: 10018, want: "/testuser/repo1/actions/runs/990/jobs/997"}, + + // The same rewrite rules apply to commit_status_summary rows. + {table: "commit_status_summary", id: 10020, want: "/testuser/repo1/actions/runs/990/jobs/997"}, + {table: "commit_status_summary", id: 10021, want: "/testuser/repo1/actions/runs/9/jobs/0"}, } for _, tc := range cases { diff --git a/models/migrations/v1_6/v71.go b/models/migrations/v1_6/v71.go index 2b11f57c92..b4dcd87eba 100644 --- a/models/migrations/v1_6/v71.go +++ b/models/migrations/v1_6/v71.go @@ -51,10 +51,7 @@ func AddScratchHash(x *xorm.Engine) error { for _, tfa := range tfas { // generate salt - salt, err := util.CryptoRandomString(10) - if err != nil { - return err - } + salt := util.CryptoRandomString(10) tfa.ScratchSalt = salt tfa.ScratchHash = base.HashToken(tfa.ScratchToken, salt) diff --git a/models/migrations/v1_9/v85.go b/models/migrations/v1_9/v85.go index 48e1cd5dc4..0e95a71f92 100644 --- a/models/migrations/v1_9/v85.go +++ b/models/migrations/v1_9/v85.go @@ -65,10 +65,7 @@ func HashAppToken(x *xorm.Engine) error { for _, token := range tokens { // generate salt - salt, err := util.CryptoRandomString(10) - if err != nil { - return err - } + salt := util.CryptoRandomString(10) token.TokenSalt = salt token.TokenHash = base.HashToken(token.Sha1, salt) if len(token.Sha1) < 8 { diff --git a/models/organization/team_invite.go b/models/organization/team_invite.go index 17f6c59610..186ae5f6e8 100644 --- a/models/organization/team_invite.go +++ b/models/organization/team_invite.go @@ -116,10 +116,7 @@ func CreateTeamInvite(ctx context.Context, doer *user_model.User, team *Team, em } } - token, err := util.CryptoRandomString(25) - if err != nil { - return nil, err - } + token := util.CryptoRandomString(25) invite := &TeamInvite{ Token: token, diff --git a/models/organization/team_list.go b/models/organization/team_list.go index 0274f9c5ba..5629cec366 100644 --- a/models/organization/team_list.go +++ b/models/organization/team_list.go @@ -88,7 +88,7 @@ func SearchTeam(ctx context.Context, opts *SearchTeamOptions) (TeamList, int64, sess = db.SetSessionPagination(sess, opts) teams := make([]*Team, 0, opts.PageSize) - count, err := sess.Where(cond).OrderBy("lower_name").FindAndCount(&teams) + count, err := sess.Where(cond).OrderBy("CASE WHEN name=? THEN '' ELSE lower_name END", OwnerTeamName).FindAndCount(&teams) if err != nil { return nil, 0, err } diff --git a/models/packages/descriptor.go b/models/packages/descriptor.go index ea0e0d5e73..2ef27051ee 100644 --- a/models/packages/descriptor.go +++ b/models/packages/descriptor.go @@ -9,6 +9,7 @@ import ( "fmt" "net/url" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/cache" @@ -53,8 +54,11 @@ func (l PackagePropertyList) GetByName(name string) string { // PackageDescriptor describes a package type PackageDescriptor struct { - Package *Package - Owner *user_model.User + // basic package info + Package *Package + Owner *user_model.User + + // package version info Repository *repo_model.Repository Version *PackageVersion SemVer *version.Version @@ -77,6 +81,11 @@ func (pd *PackageDescriptor) PackageWebLink() string { return fmt.Sprintf("%s/-/packages/%s/%s", pd.Owner.HomeLink(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName)) } +// PackageSettingsLink returns the relative package settings link +func (pd *PackageDescriptor) PackageSettingsLink() string { + return fmt.Sprintf("%s/-/packages/settings/%s/%s", pd.Owner.HomeLink(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName)) +} + // VersionWebLink returns the relative package version web link func (pd *PackageDescriptor) VersionWebLink() string { return fmt.Sprintf("%s/%s", pd.PackageWebLink(), url.PathEscape(pd.Version.LowerVersion)) @@ -203,6 +212,8 @@ func GetPackageDescriptorWithCache(ctx context.Context, pv *PackageVersion, c *c metadata = &rubygems.Metadata{} case TypeSwift: metadata = &swift.Metadata{} + case TypeTerraformState: + // terraform packages have no metadata case TypeVagrant: metadata = &vagrant.Metadata{} default: @@ -267,6 +278,15 @@ func GetPackageDescriptors(ctx context.Context, pvs []*PackageVersion) ([]*Packa return getPackageDescriptors(ctx, pvs, cache.NewEphemeralCache()) } +// GetAllPackageDescriptors gets all package descriptors for a package +func GetAllPackageDescriptors(ctx context.Context, p *Package) ([]*PackageDescriptor, error) { + pvs := make([]*PackageVersion, 0, 10) + if err := db.GetEngine(ctx).Where("package_id = ?", p.ID).Find(&pvs); err != nil { + return nil, err + } + return getPackageDescriptors(ctx, pvs, cache.NewEphemeralCache()) +} + func getPackageDescriptors(ctx context.Context, pvs []*PackageVersion, c *cache.EphemeralCache) ([]*PackageDescriptor, error) { pds := make([]*PackageDescriptor, 0, len(pvs)) for _, pv := range pvs { diff --git a/models/packages/package.go b/models/packages/package.go index 38d1cdcf66..17e5d4eee3 100644 --- a/models/packages/package.go +++ b/models/packages/package.go @@ -30,28 +30,29 @@ type Type string // List of supported packages const ( - TypeAlpine Type = "alpine" - TypeArch Type = "arch" - TypeCargo Type = "cargo" - TypeChef Type = "chef" - TypeComposer Type = "composer" - TypeConan Type = "conan" - TypeConda Type = "conda" - TypeContainer Type = "container" - TypeCran Type = "cran" - TypeDebian Type = "debian" - TypeGeneric Type = "generic" - TypeGo Type = "go" - TypeHelm Type = "helm" - TypeMaven Type = "maven" - TypeNpm Type = "npm" - TypeNuGet Type = "nuget" - TypePub Type = "pub" - TypePyPI Type = "pypi" - TypeRpm Type = "rpm" - TypeRubyGems Type = "rubygems" - TypeSwift Type = "swift" - TypeVagrant Type = "vagrant" + TypeAlpine Type = "alpine" + TypeArch Type = "arch" + TypeCargo Type = "cargo" + TypeChef Type = "chef" + TypeComposer Type = "composer" + TypeConan Type = "conan" + TypeConda Type = "conda" + TypeContainer Type = "container" + TypeCran Type = "cran" + TypeDebian Type = "debian" + TypeGeneric Type = "generic" + TypeGo Type = "go" + TypeHelm Type = "helm" + TypeMaven Type = "maven" + TypeNpm Type = "npm" + TypeNuGet Type = "nuget" + TypePub Type = "pub" + TypePyPI Type = "pypi" + TypeRpm Type = "rpm" + TypeRubyGems Type = "rubygems" + TypeSwift Type = "swift" + TypeTerraformState Type = "terraform" + TypeVagrant Type = "vagrant" ) var TypeList = []Type{ @@ -76,6 +77,7 @@ var TypeList = []Type{ TypeRpm, TypeRubyGems, TypeSwift, + TypeTerraformState, TypeVagrant, } @@ -124,6 +126,8 @@ func (pt Type) Name() string { return "RubyGems" case TypeSwift: return "Swift" + case TypeTerraformState: + return "Terraform State" case TypeVagrant: return "Vagrant" } @@ -175,6 +179,8 @@ func (pt Type) SVGName() string { return "gitea-rubygems" case TypeSwift: return "gitea-swift" + case TypeTerraformState: + return "gitea-terraform" case TypeVagrant: return "gitea-vagrant" } diff --git a/models/packages/package_blob_upload.go b/models/packages/package_blob_upload.go index 4b0e789221..60a55805a8 100644 --- a/models/packages/package_blob_upload.go +++ b/models/packages/package_blob_upload.go @@ -31,16 +31,13 @@ type PackageBlobUpload struct { // CreateBlobUpload inserts a blob upload func CreateBlobUpload(ctx context.Context) (*PackageBlobUpload, error) { - id, err := util.CryptoRandomString(25) - if err != nil { - return nil, err - } + id := util.CryptoRandomString(25) pbu := &PackageBlobUpload{ ID: strings.ToLower(id), } - _, err = db.GetEngine(ctx).Insert(pbu) + _, err := db.GetEngine(ctx).Insert(pbu) return pbu, err } diff --git a/models/packages/package_file.go b/models/packages/package_file.go index bf877485d6..69401eee3e 100644 --- a/models/packages/package_file.go +++ b/models/packages/package_file.go @@ -115,6 +115,20 @@ func DeleteFileByID(ctx context.Context, fileID int64) error { return err } +// DeleteFilesByPackageID deletes all files of a specific package +// Versions must not be deleted prior to this call +func DeleteFilesByPackageID(ctx context.Context, packageID int64) error { + deleteStmt := builder.Delete(builder.In("version_id", builder.Select("package_version.id").From("package_version").Where(builder.Eq{"package_id": packageID}))).From("package_file") + _, err := db.GetEngine(ctx).Exec(deleteStmt) + return err +} + +// DeleteFilesByVersionID deletes all files of a specific version +func DeleteFilesByVersionID(ctx context.Context, versionID int64) error { + _, err := db.GetEngine(ctx).Where("version_id = ?", versionID).Delete(&PackageFile{}) + return err +} + func UpdateFile(ctx context.Context, pf *PackageFile, cols []string) error { _, err := db.GetEngine(ctx).ID(pf.ID).Cols(cols...).Update(pf) return err diff --git a/models/packages/package_property.go b/models/packages/package_property.go index acc05d8d5a..30794ad73c 100644 --- a/models/packages/package_property.go +++ b/models/packages/package_property.go @@ -5,6 +5,7 @@ package packages import ( "context" + "errors" "code.gitea.io/gitea/models/db" @@ -51,13 +52,13 @@ func InsertProperty(ctx context.Context, refType PropertyType, refID int64, name // GetProperties gets all properties func GetProperties(ctx context.Context, refType PropertyType, refID int64) ([]*PackageProperty, error) { pps := make([]*PackageProperty, 0, 10) - return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Find(&pps) + return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).OrderBy("id").Find(&pps) } // GetPropertiesByName gets all properties with a specific name func GetPropertiesByName(ctx context.Context, refType PropertyType, refID int64, name string) ([]*PackageProperty, error) { pps := make([]*PackageProperty, 0, 10) - return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Find(&pps) + return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).OrderBy("id").Find(&pps) } // UpdateProperty updates a property @@ -86,6 +87,46 @@ func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) return err } +// DeletePropertiesByPackageID deletes properties of a typed linked to the package +// Use to avoid for loops in mass deletion of properties +func DeletePropertiesByPackageID(ctx context.Context, refType PropertyType, packageID int64) error { + var deleteStmt *builder.Builder + + switch refType { + case PropertyTypeFile: + deleteStmt = builder.Delete( + // Delete all properties that are attached to a file and are in ids from a subquery + // which returns ids from the package_file table joined on package_version to link it with package id + builder.Eq{"ref_type": PropertyTypeFile}, builder.In("ref_id", + builder.Select("package_file.id").From("package_file"). + LeftJoin("package_version", "package_file.version_id = package_version.id"). + Where(builder.Eq{"package_version.package_id": packageID}))).From("package_property") + case PropertyTypeVersion: + // Delete all properties that are attached to a version and are in ids from subquery to the package_version filtered by package id + deleteStmt = builder.Delete( + builder.Eq{"ref_type": PropertyTypeVersion}, builder.In("ref_id", + builder.Select("package_version.id").From("package_version"). + Where(builder.Eq{"package_version.package_id": packageID}))).From("package_property") + case PropertyTypePackage: + // Delete all properties that are attached to a package and their reference links to the given package ID + deleteStmt = builder.Delete( + builder.Eq{"ref_type": PropertyTypePackage}, builder.Eq{"ref_id": packageID}). + From("package_property") + default: + return errors.New("invalid ref type") + } + + _, err := db.GetEngine(ctx).Exec(deleteStmt) + return err +} + +// DeleteFilePropertiesByVersionID deletes all file properties linked to specific version +func DeleteFilePropertiesByVersionID(ctx context.Context, versionID int64) error { + deleteStmt := builder.Delete(builder.Eq{"ref_type": PropertyTypeFile}, builder.In("ref_id", builder.Select("id").From("package_file").Where(builder.Eq{"version_id": versionID}))).From("package_property") + _, err := db.GetEngine(ctx).Exec(deleteStmt) + return err +} + // DeletePropertyByID deletes a property func DeletePropertyByID(ctx context.Context, propertyID int64) error { _, err := db.GetEngine(ctx).ID(propertyID).Delete(&PackageProperty{}) diff --git a/models/packages/package_version.go b/models/packages/package_version.go index 0a478c0323..3e0e1899ea 100644 --- a/models/packages/package_version.go +++ b/models/packages/package_version.go @@ -157,6 +157,12 @@ func DeleteVersionByID(ctx context.Context, versionID int64) error { return err } +// DeleteVersionsByPackageID deletes all versions of a specific package +func DeleteVersionsByPackageID(ctx context.Context, packageID int64) error { + _, err := db.GetEngine(ctx).Where(builder.Eq{"package_id": packageID}).Delete(&PackageVersion{}) + return err +} + // HasVersionFileReferences checks if there are associated files func HasVersionFileReferences(ctx context.Context, versionID int64) (bool, error) { return db.GetEngine(ctx).Get(&PackageFile{ diff --git a/models/renderhelper/repo_file.go b/models/renderhelper/repo_file.go index f1df8e89e0..5d0bfd6c80 100644 --- a/models/renderhelper/repo_file.go +++ b/models/renderhelper/repo_file.go @@ -50,8 +50,8 @@ type RepoFileOptions struct { DeprecatedRepoName string // it is only a patch for the non-standard "markup" api DeprecatedOwnerName string // it is only a patch for the non-standard "markup" api - CurrentRefPath string // eg: "branch/main" - CurrentTreePath string // eg: "path/to/file" in the repo + CurrentRefPath string // eg: "branch/main", it is a sub URL path escaped by callers, TODO: rename to CurrentRefSubURL + CurrentTreePath string // eg: "path/to/file" in the repo, it is the tree path without URL path escaping } func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository, opts ...RepoFileOptions) *markup.RenderContext { @@ -70,6 +70,10 @@ func NewRenderContextRepoFile(ctx context.Context, repo *repo_model.Repository, "repo": helper.opts.DeprecatedRepoName, }) } + // External render's iframe needs this to generate correct links + // TODO: maybe need to make it access "CurrentRefPath" directly (but impossible at the moment due to cycle-import) + // CurrentRefPath is already path-escaped by callers + rctx.RenderOptions.Metas["RefTypeNameSubURL"] = helper.opts.CurrentRefPath rctx = rctx.WithHelper(helper).WithEnableHeadingIDGeneration(true) return rctx } diff --git a/models/repo.go b/models/repo.go index 522debb9fe..34e7b10803 100644 --- a/models/repo.go +++ b/models/repo.go @@ -7,8 +7,7 @@ package models import ( "context" "strconv" - - _ "image/jpeg" // Needed for jpeg support + "strings" "code.gitea.io/gitea/models/db" issues_model "code.gitea.io/gitea/models/issues" @@ -17,6 +16,8 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" + _ "image/jpeg" // Needed for jpeg support + "xorm.io/builder" ) @@ -86,11 +87,20 @@ func labelStatsCorrectNumClosedIssuesRepo(ctx context.Context, id int64) error { return err } -var milestoneStatsQueryNumIssues = "SELECT `milestone`.id FROM `milestone` WHERE `milestone`.num_closed_issues!=(SELECT COUNT(*) FROM `issue` WHERE `issue`.milestone_id=`milestone`.id AND `issue`.is_closed=?) OR `milestone`.num_issues!=(SELECT COUNT(*) FROM `issue` WHERE `issue`.milestone_id=`milestone`.id)" +func milestoneStatsQueryNumIssuesSQL() string { + sql := ` +SELECT "milestone".id FROM "milestone" +WHERE ( + "milestone".num_closed_issues != (SELECT COUNT(*) FROM "issue" WHERE "issue".milestone_id="milestone".id AND "issue".is_closed=?) + OR "milestone".num_issues != (SELECT COUNT(*) FROM "issue" WHERE "issue".milestone_id="milestone".id) +) +` + return strings.TrimSpace(strings.ReplaceAll(sql, "\"", "`")) +} func milestoneStatsCorrectNumIssuesRepo(ctx context.Context, id int64) error { e := db.GetEngine(ctx) - results, err := e.Query(milestoneStatsQueryNumIssues+" AND `milestone`.repo_id = ?", true, id) + results, err := e.Query(milestoneStatsQueryNumIssuesSQL()+" AND `milestone`.repo_id = ?", true, id) if err != nil { return err } @@ -192,7 +202,7 @@ func CheckRepoStats(ctx context.Context) error { }, // Milestone.Num{,Closed}Issues { - statsQuery(milestoneStatsQueryNumIssues, true), + statsQuery(milestoneStatsQueryNumIssuesSQL(), true), issues_model.UpdateMilestoneCounters, "milestone count 'num_closed_issues' and 'num_issues'", }, diff --git a/models/repo/org_repo.go b/models/repo/org_repo.go index 96f21ba2ac..d8c2c91fec 100644 --- a/models/repo/org_repo.go +++ b/models/repo/org_repo.go @@ -18,7 +18,14 @@ import ( // GetOrgRepositories get repos belonging to the given organization func GetOrgRepositories(ctx context.Context, orgID int64) (RepositoryList, error) { var orgRepos []*Repository - return orgRepos, db.GetEngine(ctx).Where("owner_id = ?", orgID).Find(&orgRepos) + err := db.GetEngine(ctx).Where("owner_id = ?", orgID).Find(&orgRepos) + return orgRepos, err +} + +// GetOrgRepositoryIDs get repo IDs belonging to the given organization +func GetOrgRepositoryIDs(ctx context.Context, orgID int64) (repoIDs []int64, _ error) { + err := db.GetEngine(ctx).Table("repository").Where("owner_id = ?", orgID).Cols("id").Find(&repoIDs) + return repoIDs, err } type SearchTeamRepoOptions struct { @@ -26,7 +33,7 @@ type SearchTeamRepoOptions struct { TeamID int64 } -// GetRepositories returns paginated repositories in team of organization. +// GetTeamRepositories returns paginated repositories in team of organization. func GetTeamRepositories(ctx context.Context, opts *SearchTeamRepoOptions) (RepositoryList, error) { sess := db.GetEngine(ctx) if opts.TeamID > 0 { diff --git a/models/user/user.go b/models/user/user.go index a74662bb12..c9dc59b543 100644 --- a/models/user/user.go +++ b/models/user/user.go @@ -20,8 +20,6 @@ import ( "time" "unicode" - _ "image/jpeg" // Needed for jpeg support - "code.gitea.io/gitea/models/auth" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/auth/openid" @@ -39,6 +37,8 @@ import ( "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/validation" + _ "image/jpeg" // Needed for jpeg support + "golang.org/x/text/runes" "golang.org/x/text/transform" "golang.org/x/text/unicode/norm" @@ -524,10 +524,7 @@ const SaltByteLength = 16 // GetUserSalt returns a random user salt token. func GetUserSalt() (string, error) { - rBytes, err := util.CryptoRandomBytes(SaltByteLength) - if err != nil { - return "", err - } + rBytes := util.CryptoRandomBytes(SaltByteLength) // Returns a 32-byte long string. return hex.EncodeToString(rBytes), nil } @@ -1323,9 +1320,12 @@ func GetUserByEmail(ctx context.Context, email string) (*User, error) { return nil, ErrUserNotExist{Name: email} } -// GetUser checks if a user already exists -func GetUser(ctx context.Context, user *User) (bool, error) { - return db.GetEngine(ctx).Get(user) +func GetIndividualUser(ctx context.Context, user *User) (bool, error) { + has, err := db.GetEngine(ctx).Get(user) + if has && user.Type != UserTypeIndividual { + has = false + } + return has, err } // GetUserByOpenID returns the user object by given OpenID if exists. @@ -1459,16 +1459,6 @@ func IsUserVisibleToViewer(ctx context.Context, u, viewer *User) bool { return false } -// CountWrongUserType count OrgUser who have wrong type -func CountWrongUserType(ctx context.Context) (int64, error) { - return db.GetEngine(ctx).Where(builder.Eq{"type": 0}.And(builder.Neq{"num_teams": 0})).Count(new(User)) -} - -// FixWrongUserType fix OrgUser who have wrong type -func FixWrongUserType(ctx context.Context) (int64, error) { - return db.GetEngine(ctx).Where(builder.Eq{"type": 0}.And(builder.Neq{"num_teams": 0})).Cols("type").NoAutoTime().Update(&User{Type: 1}) -} - func GetOrderByName() string { if setting.UI.DefaultShowFullName { return "full_name, name" diff --git a/modules/actions/github.go b/modules/actions/github.go index 68116ec83a..b7f475aa91 100644 --- a/modules/actions/github.go +++ b/modules/actions/github.go @@ -59,6 +59,10 @@ func IsDefaultBranchWorkflow(triggedEvent webhook_module.HookEventType) bool { // Github "issues" event // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issues return true + case webhook_module.HookEventWorkflowRun: + // GitHub "workflow_run" event + // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run + return true } return false diff --git a/modules/actions/workflows.go b/modules/actions/workflows.go index 4ac06def4d..ba1aee7d72 100644 --- a/modules/actions/workflows.go +++ b/modules/actions/workflows.go @@ -103,10 +103,20 @@ func GetEventsFromContent(content []byte) ([]*jobparser.Event, error) { if err != nil { return nil, err } + if err := ValidateWorkflowContent(content); err != nil { + return nil, err + } return events, nil } +// ValidateWorkflowContent catches structural errors (e.g. blank lines in run: | blocks) +// that model.ReadWorkflow alone does not detect. +func ValidateWorkflowContent(content []byte) error { + _, err := jobparser.Parse(content) + return err +} + func DetectWorkflows( gitRepo *git.Repository, commit *git.Commit, diff --git a/modules/actions/workflows_test.go b/modules/actions/workflows_test.go index ea027366f7..cda2de13e2 100644 --- a/modules/actions/workflows_test.go +++ b/modules/actions/workflows_test.go @@ -9,16 +9,26 @@ import ( "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/test" webhook_module "code.gitea.io/gitea/modules/webhook" "github.com/stretchr/testify/assert" ) +func fullWorkflowContent(part string) []byte { + return []byte(` +name: test +` + part + ` +jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo hello +`) +} + func TestIsWorkflow(t *testing.T) { - oldDirs := setting.Actions.WorkflowDirs - defer func() { - setting.Actions.WorkflowDirs = oldDirs - }() + defer test.MockVariableValue(&setting.Actions.WorkflowDirs)() tests := []struct { name string @@ -218,7 +228,7 @@ func TestDetectMatched(t *testing.T) { for _, tc := range testCases { t.Run(tc.desc, func(t *testing.T) { - evts, err := GetEventsFromContent([]byte(tc.yamlOn)) + evts, err := GetEventsFromContent(fullWorkflowContent(tc.yamlOn)) assert.NoError(t, err) assert.Len(t, evts, 1) assert.Equal(t, tc.expected, detectMatched(nil, tc.commit, tc.triggedEvent, tc.payload, evts[0])) @@ -373,7 +383,7 @@ func TestMatchIssuesEvent(t *testing.T) { for _, tc := range testCases { t.Run(tc.desc, func(t *testing.T) { - evts, err := GetEventsFromContent([]byte(tc.yamlOn)) + evts, err := GetEventsFromContent(fullWorkflowContent(tc.yamlOn)) assert.NoError(t, err) assert.Len(t, evts, 1) diff --git a/modules/avatar/avatar.go b/modules/avatar/avatar.go index 3b622b99af..44c61e1ff6 100644 --- a/modules/avatar/avatar.go +++ b/modules/avatar/avatar.go @@ -11,15 +11,14 @@ import ( "image/color" "image/png" - _ "image/gif" // for processing gif images - _ "image/jpeg" // for processing jpeg images - "code.gitea.io/gitea/modules/avatar/identicon" "code.gitea.io/gitea/modules/setting" - "golang.org/x/image/draw" - _ "golang.org/x/image/webp" // for processing webp images + _ "image/gif" // for processing gif images + _ "image/jpeg" // for processing jpeg images + + "golang.org/x/image/draw" ) // DefaultAvatarSize is the target CSS pixel size for avatar generation. It is diff --git a/modules/charset/ambiguous.go b/modules/charset/ambiguous.go index 96e0561e15..c87d3cfa5a 100644 --- a/modules/charset/ambiguous.go +++ b/modules/charset/ambiguous.go @@ -1,4 +1,3 @@ -// This file is generated by modules/charset/ambiguous/generate.go DO NOT EDIT // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT @@ -14,11 +13,12 @@ import ( // AmbiguousTablesForLocale provides the table of ambiguous characters for this locale. func AmbiguousTablesForLocale(locale translation.Locale) []*AmbiguousTable { + ambiguousTableMap := globalVars().ambiguousTableMap key := locale.Language() var table *AmbiguousTable var ok bool for len(key) > 0 { - if table, ok = AmbiguousCharacters[key]; ok { + if table, ok = ambiguousTableMap[key]; ok { break } idx := strings.LastIndexAny(key, "-_") @@ -29,18 +29,18 @@ func AmbiguousTablesForLocale(locale translation.Locale) []*AmbiguousTable { } } if table == nil && (locale.Language() == "zh-CN" || locale.Language() == "zh_CN") { - table = AmbiguousCharacters["zh-hans"] + table = ambiguousTableMap["zh-hans"] } if table == nil && strings.HasPrefix(locale.Language(), "zh") { - table = AmbiguousCharacters["zh-hant"] + table = ambiguousTableMap["zh-hant"] } if table == nil { - table = AmbiguousCharacters["_default"] + table = ambiguousTableMap["_default"] } return []*AmbiguousTable{ table, - AmbiguousCharacters["_common"], + ambiguousTableMap["_common"], } } @@ -52,7 +52,7 @@ func isAmbiguous(r rune, confusableTo *rune, tables ...*AmbiguousTable) bool { i := sort.Search(len(table.Confusable), func(i int) bool { return table.Confusable[i] >= r }) - (*confusableTo) = table.With[i] + *confusableTo = table.With[i] return true } return false diff --git a/modules/charset/ambiguous/generate.go b/modules/charset/ambiguous/generate.go deleted file mode 100644 index e3fda5be98..0000000000 --- a/modules/charset/ambiguous/generate.go +++ /dev/null @@ -1,188 +0,0 @@ -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package main - -import ( - "bytes" - "flag" - "fmt" - "go/format" - "os" - "sort" - "text/template" - "unicode" - - "code.gitea.io/gitea/modules/json" - - "golang.org/x/text/unicode/rangetable" -) - -// ambiguous.json provides a one to one mapping of ambiguous characters to other characters -// See https://github.com/hediet/vscode-unicode-data/blob/main/out/ambiguous.json - -type AmbiguousTable struct { - Confusable []rune - With []rune - Locale string - RangeTable *unicode.RangeTable -} - -type RunePair struct { - Confusable rune - With rune -} - -var verbose bool - -func main() { - flag.Usage = func() { - fmt.Fprintf(os.Stderr, `%s: Generate AmbiguousCharacter - -Usage: %[1]s [-v] [-o output.go] ambiguous.json -`, os.Args[0]) - flag.PrintDefaults() - } - - output := "" - flag.BoolVar(&verbose, "v", false, "verbose output") - flag.StringVar(&output, "o", "ambiguous_gen.go", "file to output to") - flag.Parse() - input := flag.Arg(0) - if input == "" { - input = "ambiguous.json" - } - - bs, err := os.ReadFile(input) - if err != nil { - fatalf("Unable to read: %s Err: %v", input, err) - } - - var unwrapped string - if err := json.Unmarshal(bs, &unwrapped); err != nil { - fatalf("Unable to unwrap content in: %s Err: %v", input, err) - } - - fromJSON := map[string][]uint32{} - if err := json.Unmarshal([]byte(unwrapped), &fromJSON); err != nil { - fatalf("Unable to unmarshal content in: %s Err: %v", input, err) - } - - tables := make([]*AmbiguousTable, 0, len(fromJSON)) - for locale, chars := range fromJSON { - table := &AmbiguousTable{Locale: locale} - table.Confusable = make([]rune, 0, len(chars)/2) - table.With = make([]rune, 0, len(chars)/2) - pairs := make([]RunePair, len(chars)/2) - for i := 0; i < len(chars); i += 2 { - pairs[i/2].Confusable, pairs[i/2].With = rune(chars[i]), rune(chars[i+1]) - } - sort.Slice(pairs, func(i, j int) bool { - return pairs[i].Confusable < pairs[j].Confusable - }) - for _, pair := range pairs { - table.Confusable = append(table.Confusable, pair.Confusable) - table.With = append(table.With, pair.With) - } - table.RangeTable = rangetable.New(table.Confusable...) - tables = append(tables, table) - } - sort.Slice(tables, func(i, j int) bool { - return tables[i].Locale < tables[j].Locale - }) - data := map[string]any{ - "Tables": tables, - } - - if err := runTemplate(generatorTemplate, output, &data); err != nil { - fatalf("Unable to run template: %v", err) - } -} - -func runTemplate(t *template.Template, filename string, data any) error { - buf := bytes.NewBuffer(nil) - if err := t.Execute(buf, data); err != nil { - return fmt.Errorf("unable to execute template: %w", err) - } - bs, err := format.Source(buf.Bytes()) - if err != nil { - verbosef("Bad source:\n%s", buf.String()) - return fmt.Errorf("unable to format source: %w", err) - } - - old, err := os.ReadFile(filename) - if err != nil && !os.IsNotExist(err) { - return fmt.Errorf("failed to read old file %s because %w", filename, err) - } else if err == nil { - if bytes.Equal(bs, old) { - // files are the same don't rewrite it. - return nil - } - } - - file, err := os.Create(filename) - if err != nil { - return fmt.Errorf("failed to create file %s because %w", filename, err) - } - defer file.Close() - _, err = file.Write(bs) - if err != nil { - return fmt.Errorf("unable to write generated source: %w", err) - } - return nil -} - -var generatorTemplate = template.Must(template.New("ambiguousTemplate").Parse(`// This file is generated by modules/charset/ambiguous/generate.go DO NOT EDIT -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - - -package charset - -import "unicode" - -// This file is generated from https://github.com/hediet/vscode-unicode-data/blob/main/out/ambiguous.json - -// AmbiguousTable matches a confusable rune with its partner for the Locale -type AmbiguousTable struct { - Confusable []rune - With []rune - Locale string - RangeTable *unicode.RangeTable -} - -// AmbiguousCharacters provides a map by locale name to the confusable characters in that locale -var AmbiguousCharacters = map[string]*AmbiguousTable{ - {{range .Tables}}{{printf "%q:" .Locale}} { - Confusable: []rune{ {{range .Confusable}}{{.}},{{end}} }, - With: []rune{ {{range .With}}{{.}},{{end}} }, - Locale: {{printf "%q" .Locale}}, - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {{range .RangeTable.R16 }} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, - {{end}} }, - R32: []unicode.Range32{ - {{range .RangeTable.R32}} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, - {{end}} }, - LatinOffset: {{.RangeTable.LatinOffset}}, - }, - }, - {{end}} -} - -`)) - -func logf(format string, args ...any) { - fmt.Fprintf(os.Stderr, format+"\n", args...) -} - -func verbosef(format string, args ...any) { - if verbose { - logf(format, args...) - } -} - -func fatalf(format string, args ...any) { - logf("fatal: "+format+"\n", args...) - os.Exit(1) -} diff --git a/modules/charset/ambiguous_gen.go b/modules/charset/ambiguous_gen.go index c88ffd5aa5..669a46c91a 100644 --- a/modules/charset/ambiguous_gen.go +++ b/modules/charset/ambiguous_gen.go @@ -1,5 +1,5 @@ -// This file is generated by modules/charset/ambiguous/generate.go DO NOT EDIT -// Copyright 2022 The Gitea Authors. All rights reserved. +// This file is generated by modules/charset/generate/generate.go DO NOT EDIT +// Copyright 2026 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT package charset @@ -16,821 +16,837 @@ type AmbiguousTable struct { RangeTable *unicode.RangeTable } -// AmbiguousCharacters provides a map by locale name to the confusable characters in that locale -var AmbiguousCharacters = map[string]*AmbiguousTable{ - "_common": { - Confusable: []rune{184, 383, 388, 397, 422, 423, 439, 444, 445, 448, 451, 540, 546, 547, 577, 593, 609, 611, 617, 618, 623, 651, 655, 660, 697, 699, 700, 701, 702, 706, 707, 708, 710, 712, 714, 715, 720, 727, 731, 732, 756, 760, 884, 890, 894, 895, 900, 913, 914, 917, 918, 919, 922, 924, 925, 927, 929, 932, 933, 935, 945, 947, 953, 957, 959, 961, 963, 965, 978, 988, 1000, 1010, 1011, 1017, 1018, 1029, 1030, 1032, 1109, 1110, 1112, 1121, 1140, 1141, 1198, 1199, 1211, 1213, 1216, 1231, 1248, 1281, 1292, 1307, 1308, 1309, 1357, 1359, 1365, 1370, 1373, 1377, 1379, 1382, 1392, 1400, 1404, 1405, 1409, 1412, 1413, 1417, 1472, 1475, 1493, 1496, 1497, 1503, 1505, 1523, 1549, 1575, 1607, 1632, 1633, 1637, 1639, 1643, 1645, 1726, 1729, 1748, 1749, 1776, 1777, 1781, 1783, 1793, 1794, 1795, 1796, 1984, 1994, 2036, 2037, 2042, 2307, 2406, 2429, 2534, 2538, 2541, 2662, 2663, 2666, 2691, 2790, 2819, 2848, 2918, 2920, 3046, 3074, 3174, 3202, 3302, 3330, 3360, 3430, 3437, 3458, 3664, 3792, 4125, 4160, 4327, 4351, 4608, 4816, 5024, 5025, 5026, 5029, 5033, 5034, 5035, 5036, 5038, 5043, 5047, 5051, 5053, 5056, 5058, 5059, 5070, 5071, 5074, 5076, 5077, 5081, 5082, 5086, 5087, 5090, 5094, 5095, 5102, 5107, 5108, 5120, 5167, 5171, 5176, 5194, 5196, 5229, 5231, 5234, 5261, 5290, 5311, 5441, 5500, 5501, 5511, 5551, 5556, 5573, 5598, 5610, 5616, 5623, 5741, 5742, 5760, 5810, 5815, 5825, 5836, 5845, 5846, 5868, 5869, 5941, 6147, 6153, 7428, 7439, 7441, 7452, 7456, 7457, 7458, 7462, 7555, 7564, 7837, 7935, 8125, 8126, 8127, 8128, 8175, 8189, 8190, 8192, 8193, 8194, 8195, 8196, 8197, 8198, 8199, 8200, 8201, 8202, 8208, 8209, 8210, 8218, 8219, 8228, 8232, 8233, 8239, 8242, 8249, 8250, 8257, 8259, 8260, 8270, 8275, 8282, 8287, 8450, 8458, 8459, 8460, 8461, 8462, 8464, 8465, 8466, 8467, 8469, 8473, 8474, 8475, 8476, 8477, 8484, 8488, 8490, 8492, 8493, 8494, 8495, 8496, 8497, 8499, 8500, 8505, 8509, 8517, 8518, 8519, 8520, 8521, 8544, 8548, 8553, 8556, 8557, 8558, 8559, 8560, 8564, 8569, 8572, 8573, 8574, 8722, 8725, 8726, 8727, 8739, 8744, 8746, 8758, 8764, 8868, 8897, 8899, 8959, 9075, 9076, 9082, 9213, 9585, 9587, 10088, 10089, 10094, 10095, 10098, 10099, 10100, 10101, 10133, 10134, 10187, 10189, 10201, 10539, 10540, 10741, 10744, 10745, 10799, 11397, 11406, 11410, 11412, 11416, 11418, 11422, 11423, 11426, 11427, 11428, 11429, 11430, 11432, 11436, 11450, 11462, 11466, 11468, 11472, 11474, 11576, 11577, 11599, 11601, 11604, 11605, 11613, 11840, 12034, 12035, 12295, 12308, 12309, 12339, 12448, 12755, 12756, 20022, 20031, 42192, 42193, 42194, 42195, 42196, 42198, 42199, 42201, 42202, 42204, 42205, 42207, 42208, 42209, 42210, 42211, 42214, 42215, 42218, 42219, 42220, 42222, 42224, 42226, 42227, 42228, 42232, 42233, 42237, 42239, 42510, 42564, 42567, 42719, 42731, 42735, 42801, 42842, 42858, 42862, 42872, 42889, 42892, 42904, 42905, 42911, 42923, 42930, 42931, 42932, 43826, 43829, 43837, 43847, 43848, 43854, 43858, 43866, 43893, 43905, 43907, 43923, 43945, 43946, 43951, 64422, 64423, 64424, 64425, 64426, 64427, 64428, 64429, 64830, 64831, 65072, 65101, 65102, 65103, 65112, 65128, 65165, 65166, 65257, 65258, 65259, 65260, 65282, 65284, 65285, 65286, 65287, 65290, 65291, 65293, 65294, 65295, 65296, 65297, 65298, 65299, 65300, 65301, 65302, 65303, 65304, 65305, 65308, 65309, 65310, 65312, 65313, 65314, 65315, 65316, 65317, 65318, 65319, 65320, 65321, 65322, 65323, 65324, 65325, 65326, 65327, 65328, 65329, 65330, 65331, 65332, 65333, 65334, 65335, 65336, 65337, 65338, 65339, 65340, 65341, 65342, 65343, 65344, 65345, 65346, 65347, 65348, 65349, 65350, 65351, 65352, 65353, 65354, 65355, 65356, 65357, 65358, 65359, 65360, 65361, 65362, 65363, 65364, 65365, 65366, 65367, 65368, 65369, 65370, 65371, 65372, 65373, 65512, 66178, 66182, 66183, 66186, 66192, 66194, 66197, 66198, 66199, 66203, 66208, 66209, 66210, 66213, 66219, 66224, 66225, 66226, 66228, 66255, 66293, 66305, 66306, 66313, 66321, 66325, 66327, 66330, 66335, 66336, 66338, 66564, 66581, 66587, 66592, 66604, 66621, 66632, 66740, 66754, 66766, 66770, 66794, 66806, 66835, 66838, 66840, 66844, 66845, 66853, 66854, 66855, 68176, 70864, 71430, 71434, 71438, 71439, 71840, 71842, 71843, 71844, 71846, 71849, 71852, 71854, 71855, 71858, 71861, 71864, 71867, 71868, 71872, 71873, 71874, 71875, 71876, 71878, 71880, 71882, 71884, 71893, 71894, 71895, 71896, 71900, 71904, 71909, 71910, 71913, 71916, 71919, 71922, 93960, 93962, 93974, 93992, 94005, 94010, 94011, 94015, 94016, 94018, 94019, 94033, 94034, 119060, 119149, 119302, 119309, 119311, 119314, 119315, 119318, 119338, 119350, 119351, 119354, 119355, 119808, 119809, 119810, 119811, 119812, 119813, 119814, 119815, 119816, 119817, 119818, 119819, 119820, 119821, 119822, 119823, 119824, 119825, 119826, 119827, 119828, 119829, 119830, 119831, 119832, 119833, 119834, 119835, 119836, 119837, 119838, 119839, 119840, 119841, 119842, 119843, 119844, 119845, 119847, 119848, 119849, 119850, 119851, 119852, 119853, 119854, 119855, 119856, 119857, 119858, 119859, 119860, 119861, 119862, 119863, 119864, 119865, 119866, 119867, 119868, 119869, 119870, 119871, 119872, 119873, 119874, 119875, 119876, 119877, 119878, 119879, 119880, 119881, 119882, 119883, 119884, 119885, 119886, 119887, 119888, 119889, 119890, 119891, 119892, 119894, 119895, 119896, 119897, 119899, 119900, 119901, 119902, 119903, 119904, 119905, 119906, 119907, 119908, 119909, 119910, 119911, 119912, 119913, 119914, 119915, 119916, 119917, 119918, 119919, 119920, 119921, 119922, 119923, 119924, 119925, 119926, 119927, 119928, 119929, 119930, 119931, 119932, 119933, 119934, 119935, 119936, 119937, 119938, 119939, 119940, 119941, 119942, 119943, 119944, 119945, 119946, 119947, 119948, 119949, 119951, 119952, 119953, 119954, 119955, 119956, 119957, 119958, 119959, 119960, 119961, 119962, 119963, 119964, 119966, 119967, 119970, 119973, 119974, 119977, 119978, 119979, 119980, 119982, 119983, 119984, 119985, 119986, 119987, 119988, 119989, 119990, 119991, 119992, 119993, 119995, 119997, 119998, 119999, 120000, 120001, 120003, 120005, 120006, 120007, 120008, 120009, 120010, 120011, 120012, 120013, 120014, 120015, 120016, 120017, 120018, 120019, 120020, 120021, 120022, 120023, 120024, 120025, 120026, 120027, 120028, 120029, 120030, 120031, 120032, 120033, 120034, 120035, 120036, 120037, 120038, 120039, 120040, 120041, 120042, 120043, 120044, 120045, 120046, 120047, 120048, 120049, 120050, 120051, 120052, 120053, 120055, 120056, 120057, 120058, 120059, 120060, 120061, 120062, 120063, 120064, 120065, 120066, 120067, 120068, 120069, 120071, 120072, 120073, 120074, 120077, 120078, 120079, 120080, 120081, 120082, 120083, 120084, 120086, 120087, 120088, 120089, 120090, 120091, 120092, 120094, 120095, 120096, 120097, 120098, 120099, 120100, 120101, 120102, 120103, 120104, 120105, 120107, 120108, 120109, 120110, 120111, 120112, 120113, 120114, 120115, 120116, 120117, 120118, 120119, 120120, 120121, 120123, 120124, 120125, 120126, 120128, 120129, 120130, 120131, 120132, 120134, 120138, 120139, 120140, 120141, 120142, 120143, 120144, 120146, 120147, 120148, 120149, 120150, 120151, 120152, 120153, 120154, 120155, 120156, 120157, 120159, 120160, 120161, 120162, 120163, 120164, 120165, 120166, 120167, 120168, 120169, 120170, 120171, 120172, 120173, 120174, 120175, 120176, 120177, 120178, 120179, 120180, 120181, 120182, 120183, 120184, 120185, 120186, 120187, 120188, 120189, 120190, 120191, 120192, 120193, 120194, 120195, 120196, 120197, 120198, 120199, 120200, 120201, 120202, 120203, 120204, 120205, 120206, 120207, 120208, 120209, 120211, 120212, 120213, 120214, 120215, 120216, 120217, 120218, 120219, 120220, 120221, 120222, 120223, 120224, 120225, 120226, 120227, 120228, 120229, 120230, 120231, 120232, 120233, 120234, 120235, 120236, 120237, 120238, 120239, 120240, 120241, 120242, 120243, 120244, 120245, 120246, 120247, 120248, 120249, 120250, 120251, 120252, 120253, 120254, 120255, 120256, 120257, 120258, 120259, 120260, 120261, 120263, 120264, 120265, 120266, 120267, 120268, 120269, 120270, 120271, 120272, 120273, 120274, 120275, 120276, 120277, 120278, 120279, 120280, 120281, 120282, 120283, 120284, 120285, 120286, 120287, 120288, 120289, 120290, 120291, 120292, 120293, 120294, 120295, 120296, 120297, 120298, 120299, 120300, 120301, 120302, 120303, 120304, 120305, 120306, 120307, 120308, 120309, 120310, 120311, 120312, 120313, 120315, 120316, 120317, 120318, 120319, 120320, 120321, 120322, 120323, 120324, 120325, 120326, 120327, 120328, 120329, 120330, 120331, 120332, 120333, 120334, 120335, 120336, 120337, 120338, 120339, 120340, 120341, 120342, 120343, 120344, 120345, 120346, 120347, 120348, 120349, 120350, 120351, 120352, 120353, 120354, 120355, 120356, 120357, 120358, 120359, 120360, 120361, 120362, 120363, 120364, 120365, 120367, 120368, 120369, 120370, 120371, 120372, 120373, 120374, 120375, 120376, 120377, 120378, 120379, 120380, 120381, 120382, 120383, 120384, 120385, 120386, 120387, 120388, 120389, 120390, 120391, 120392, 120393, 120394, 120395, 120396, 120397, 120398, 120399, 120400, 120401, 120402, 120403, 120404, 120405, 120406, 120407, 120408, 120409, 120410, 120411, 120412, 120413, 120414, 120415, 120416, 120417, 120419, 120420, 120421, 120422, 120423, 120424, 120425, 120426, 120427, 120428, 120429, 120430, 120431, 120432, 120433, 120434, 120435, 120436, 120437, 120438, 120439, 120440, 120441, 120442, 120443, 120444, 120445, 120446, 120447, 120448, 120449, 120450, 120451, 120452, 120453, 120454, 120455, 120456, 120457, 120458, 120459, 120460, 120461, 120462, 120463, 120464, 120465, 120466, 120467, 120468, 120469, 120471, 120472, 120473, 120474, 120475, 120476, 120477, 120478, 120479, 120480, 120481, 120482, 120483, 120484, 120488, 120489, 120492, 120493, 120494, 120496, 120497, 120499, 120500, 120502, 120504, 120507, 120508, 120510, 120514, 120516, 120522, 120526, 120528, 120530, 120532, 120534, 120544, 120546, 120547, 120550, 120551, 120552, 120554, 120555, 120557, 120558, 120560, 120562, 120565, 120566, 120568, 120572, 120574, 120580, 120584, 120586, 120588, 120590, 120592, 120602, 120604, 120605, 120608, 120609, 120610, 120612, 120613, 120615, 120616, 120618, 120620, 120623, 120624, 120626, 120630, 120632, 120638, 120642, 120644, 120646, 120648, 120650, 120660, 120662, 120663, 120666, 120667, 120668, 120670, 120671, 120673, 120674, 120676, 120678, 120681, 120682, 120684, 120688, 120690, 120696, 120700, 120702, 120704, 120706, 120708, 120718, 120720, 120721, 120724, 120725, 120726, 120728, 120729, 120731, 120732, 120734, 120736, 120739, 120740, 120742, 120746, 120748, 120754, 120758, 120760, 120762, 120764, 120766, 120776, 120778, 120782, 120783, 120784, 120785, 120786, 120787, 120788, 120789, 120790, 120791, 120792, 120793, 120794, 120795, 120796, 120797, 120798, 120799, 120800, 120801, 120802, 120803, 120804, 120805, 120806, 120807, 120808, 120809, 120810, 120811, 120812, 120813, 120814, 120815, 120816, 120817, 120818, 120819, 120820, 120821, 120822, 120823, 120824, 120825, 120826, 120827, 120828, 120829, 120830, 120831, 125127, 125131, 126464, 126500, 126564, 126592, 126596, 128844, 128872, 130032, 130033, 130034, 130035, 130036, 130037, 130038, 130039, 130040, 130041}, - With: []rune{44, 102, 98, 103, 82, 50, 51, 53, 115, 73, 33, 51, 56, 56, 63, 97, 103, 121, 105, 105, 119, 117, 121, 63, 96, 96, 96, 96, 96, 60, 62, 94, 94, 96, 96, 96, 58, 45, 105, 126, 96, 58, 96, 105, 59, 74, 96, 65, 66, 69, 90, 72, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 89, 70, 50, 99, 106, 67, 77, 83, 73, 74, 115, 105, 106, 119, 86, 118, 89, 121, 104, 101, 73, 105, 51, 100, 71, 113, 87, 119, 85, 83, 79, 96, 96, 119, 113, 113, 104, 110, 110, 117, 103, 102, 111, 58, 108, 58, 108, 118, 96, 108, 111, 96, 44, 108, 111, 46, 108, 111, 86, 44, 42, 111, 111, 45, 111, 46, 73, 111, 86, 46, 46, 58, 58, 79, 108, 96, 96, 95, 58, 111, 63, 79, 56, 57, 111, 57, 56, 58, 111, 56, 79, 79, 57, 111, 111, 111, 111, 111, 111, 111, 111, 57, 111, 111, 111, 111, 111, 121, 111, 85, 79, 68, 82, 84, 105, 89, 65, 74, 69, 63, 87, 77, 72, 89, 71, 104, 90, 52, 98, 82, 87, 83, 86, 83, 76, 67, 80, 75, 100, 54, 71, 66, 61, 86, 62, 60, 96, 85, 80, 100, 98, 74, 76, 50, 120, 72, 120, 82, 98, 70, 65, 68, 68, 77, 66, 88, 120, 32, 60, 88, 73, 96, 75, 77, 58, 43, 47, 58, 58, 99, 111, 111, 117, 118, 119, 122, 114, 103, 121, 102, 121, 96, 105, 96, 126, 96, 96, 96, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 45, 45, 45, 44, 96, 46, 32, 32, 32, 96, 60, 62, 47, 45, 47, 42, 126, 58, 32, 67, 103, 72, 72, 72, 104, 73, 73, 76, 108, 78, 80, 81, 82, 82, 82, 90, 90, 75, 66, 67, 101, 101, 69, 70, 77, 111, 105, 121, 68, 100, 101, 105, 106, 73, 86, 88, 76, 67, 68, 77, 105, 118, 120, 73, 99, 100, 45, 47, 92, 42, 73, 118, 85, 58, 126, 84, 118, 85, 69, 105, 112, 97, 73, 47, 88, 40, 41, 60, 62, 40, 41, 123, 125, 43, 45, 47, 92, 84, 120, 120, 92, 47, 92, 120, 114, 72, 73, 75, 77, 78, 79, 111, 80, 112, 67, 99, 84, 89, 88, 45, 47, 57, 51, 76, 54, 86, 69, 73, 33, 79, 81, 88, 61, 92, 47, 79, 40, 41, 47, 61, 47, 92, 92, 47, 66, 80, 100, 68, 84, 71, 75, 74, 67, 90, 70, 77, 78, 76, 83, 82, 86, 72, 87, 88, 89, 65, 69, 73, 79, 85, 46, 44, 58, 61, 46, 50, 105, 86, 63, 50, 115, 50, 51, 57, 38, 58, 96, 70, 102, 117, 51, 74, 88, 66, 101, 102, 111, 114, 114, 117, 117, 121, 105, 114, 119, 122, 118, 115, 99, 111, 111, 111, 111, 111, 111, 111, 111, 40, 41, 58, 95, 95, 95, 45, 92, 108, 108, 111, 111, 111, 111, 34, 36, 37, 38, 96, 42, 43, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 60, 61, 62, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 73, 66, 69, 70, 124, 88, 79, 80, 83, 84, 43, 65, 66, 67, 70, 79, 77, 84, 89, 88, 72, 90, 66, 67, 124, 77, 84, 88, 56, 42, 108, 88, 79, 67, 76, 83, 111, 99, 115, 82, 79, 85, 55, 111, 117, 78, 79, 75, 67, 86, 70, 76, 88, 46, 79, 118, 119, 119, 119, 86, 70, 76, 89, 69, 90, 57, 69, 52, 76, 79, 85, 53, 84, 118, 115, 70, 105, 122, 55, 111, 51, 57, 54, 57, 111, 117, 121, 79, 90, 87, 67, 88, 87, 67, 86, 84, 76, 73, 82, 83, 51, 62, 65, 85, 89, 96, 96, 123, 46, 51, 86, 92, 55, 70, 82, 76, 60, 62, 47, 92, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 67, 68, 71, 74, 75, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 102, 104, 105, 106, 107, 108, 110, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 68, 69, 70, 71, 74, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 68, 69, 70, 71, 73, 74, 75, 76, 77, 79, 83, 84, 85, 86, 87, 88, 89, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 105, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 70, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 108, 56, 108, 111, 111, 108, 111, 67, 84, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57}, - Locale: "_common", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 184, Hi: 383, Stride: 199}, - {Lo: 388, Hi: 397, Stride: 9}, - {Lo: 422, Hi: 423, Stride: 1}, - {Lo: 439, Hi: 444, Stride: 5}, - {Lo: 445, Hi: 451, Stride: 3}, - {Lo: 540, Hi: 546, Stride: 6}, - {Lo: 547, Hi: 577, Stride: 30}, - {Lo: 593, Hi: 609, Stride: 16}, - {Lo: 611, Hi: 617, Stride: 6}, - {Lo: 618, Hi: 623, Stride: 5}, - {Lo: 651, Hi: 655, Stride: 4}, - {Lo: 660, Hi: 697, Stride: 37}, - {Lo: 699, Hi: 702, Stride: 1}, - {Lo: 706, Hi: 708, Stride: 1}, - {Lo: 710, Hi: 714, Stride: 2}, - {Lo: 715, Hi: 720, Stride: 5}, - {Lo: 727, Hi: 731, Stride: 4}, - {Lo: 732, Hi: 756, Stride: 24}, - {Lo: 760, Hi: 884, Stride: 124}, - {Lo: 890, Hi: 894, Stride: 4}, - {Lo: 895, Hi: 900, Stride: 5}, - {Lo: 913, Hi: 914, Stride: 1}, - {Lo: 917, Hi: 919, Stride: 1}, - {Lo: 922, Hi: 924, Stride: 2}, - {Lo: 925, Hi: 929, Stride: 2}, - {Lo: 932, Hi: 933, Stride: 1}, - {Lo: 935, Hi: 945, Stride: 10}, - {Lo: 947, Hi: 953, Stride: 6}, - {Lo: 957, Hi: 965, Stride: 2}, - {Lo: 978, Hi: 988, Stride: 10}, - {Lo: 1000, Hi: 1010, Stride: 10}, - {Lo: 1011, Hi: 1017, Stride: 6}, - {Lo: 1018, Hi: 1029, Stride: 11}, - {Lo: 1030, Hi: 1032, Stride: 2}, - {Lo: 1109, Hi: 1110, Stride: 1}, - {Lo: 1112, Hi: 1121, Stride: 9}, - {Lo: 1140, Hi: 1141, Stride: 1}, - {Lo: 1198, Hi: 1199, Stride: 1}, - {Lo: 1211, Hi: 1213, Stride: 2}, - {Lo: 1216, Hi: 1231, Stride: 15}, - {Lo: 1248, Hi: 1281, Stride: 33}, - {Lo: 1292, Hi: 1307, Stride: 15}, - {Lo: 1308, Hi: 1309, Stride: 1}, - {Lo: 1357, Hi: 1359, Stride: 2}, - {Lo: 1365, Hi: 1370, Stride: 5}, - {Lo: 1373, Hi: 1377, Stride: 4}, - {Lo: 1379, Hi: 1382, Stride: 3}, - {Lo: 1392, Hi: 1400, Stride: 8}, - {Lo: 1404, Hi: 1405, Stride: 1}, - {Lo: 1409, Hi: 1412, Stride: 3}, - {Lo: 1413, Hi: 1417, Stride: 4}, - {Lo: 1472, Hi: 1475, Stride: 3}, - {Lo: 1493, Hi: 1496, Stride: 3}, - {Lo: 1497, Hi: 1503, Stride: 6}, - {Lo: 1505, Hi: 1523, Stride: 18}, - {Lo: 1549, Hi: 1575, Stride: 26}, - {Lo: 1607, Hi: 1632, Stride: 25}, - {Lo: 1633, Hi: 1637, Stride: 4}, - {Lo: 1639, Hi: 1643, Stride: 4}, - {Lo: 1645, Hi: 1726, Stride: 81}, - {Lo: 1729, Hi: 1748, Stride: 19}, - {Lo: 1749, Hi: 1776, Stride: 27}, - {Lo: 1777, Hi: 1781, Stride: 4}, - {Lo: 1783, Hi: 1793, Stride: 10}, - {Lo: 1794, Hi: 1796, Stride: 1}, - {Lo: 1984, Hi: 1994, Stride: 10}, - {Lo: 2036, Hi: 2037, Stride: 1}, - {Lo: 2042, Hi: 2307, Stride: 265}, - {Lo: 2406, Hi: 2429, Stride: 23}, - {Lo: 2534, Hi: 2538, Stride: 4}, - {Lo: 2541, Hi: 2662, Stride: 121}, - {Lo: 2663, Hi: 2666, Stride: 3}, - {Lo: 2691, Hi: 2790, Stride: 99}, - {Lo: 2819, Hi: 2848, Stride: 29}, - {Lo: 2918, Hi: 2920, Stride: 2}, - {Lo: 3046, Hi: 3074, Stride: 28}, - {Lo: 3174, Hi: 3202, Stride: 28}, - {Lo: 3302, Hi: 3330, Stride: 28}, - {Lo: 3360, Hi: 3430, Stride: 70}, - {Lo: 3437, Hi: 3458, Stride: 21}, - {Lo: 3664, Hi: 3792, Stride: 128}, - {Lo: 4125, Hi: 4160, Stride: 35}, - {Lo: 4327, Hi: 4351, Stride: 24}, - {Lo: 4608, Hi: 5024, Stride: 208}, - {Lo: 5025, Hi: 5026, Stride: 1}, - {Lo: 5029, Hi: 5033, Stride: 4}, - {Lo: 5034, Hi: 5036, Stride: 1}, - {Lo: 5038, Hi: 5043, Stride: 5}, - {Lo: 5047, Hi: 5051, Stride: 4}, - {Lo: 5053, Hi: 5056, Stride: 3}, - {Lo: 5058, Hi: 5059, Stride: 1}, - {Lo: 5070, Hi: 5071, Stride: 1}, - {Lo: 5074, Hi: 5076, Stride: 2}, - {Lo: 5077, Hi: 5081, Stride: 4}, - {Lo: 5082, Hi: 5086, Stride: 4}, - {Lo: 5087, Hi: 5090, Stride: 3}, - {Lo: 5094, Hi: 5095, Stride: 1}, - {Lo: 5102, Hi: 5107, Stride: 5}, - {Lo: 5108, Hi: 5120, Stride: 12}, - {Lo: 5167, Hi: 5171, Stride: 4}, - {Lo: 5176, Hi: 5194, Stride: 18}, - {Lo: 5196, Hi: 5229, Stride: 33}, - {Lo: 5231, Hi: 5234, Stride: 3}, - {Lo: 5261, Hi: 5290, Stride: 29}, - {Lo: 5311, Hi: 5441, Stride: 130}, - {Lo: 5500, Hi: 5501, Stride: 1}, - {Lo: 5511, Hi: 5551, Stride: 40}, - {Lo: 5556, Hi: 5573, Stride: 17}, - {Lo: 5598, Hi: 5610, Stride: 12}, - {Lo: 5616, Hi: 5623, Stride: 7}, - {Lo: 5741, Hi: 5742, Stride: 1}, - {Lo: 5760, Hi: 5810, Stride: 50}, - {Lo: 5815, Hi: 5825, Stride: 10}, - {Lo: 5836, Hi: 5845, Stride: 9}, - {Lo: 5846, Hi: 5868, Stride: 22}, - {Lo: 5869, Hi: 5941, Stride: 72}, - {Lo: 6147, Hi: 6153, Stride: 6}, - {Lo: 7428, Hi: 7439, Stride: 11}, - {Lo: 7441, Hi: 7452, Stride: 11}, - {Lo: 7456, Hi: 7458, Stride: 1}, - {Lo: 7462, Hi: 7555, Stride: 93}, - {Lo: 7564, Hi: 7837, Stride: 273}, - {Lo: 7935, Hi: 8125, Stride: 190}, - {Lo: 8126, Hi: 8128, Stride: 1}, - {Lo: 8175, Hi: 8189, Stride: 14}, - {Lo: 8190, Hi: 8192, Stride: 2}, - {Lo: 8193, Hi: 8202, Stride: 1}, - {Lo: 8208, Hi: 8210, Stride: 1}, - {Lo: 8218, Hi: 8219, Stride: 1}, - {Lo: 8228, Hi: 8232, Stride: 4}, - {Lo: 8233, Hi: 8239, Stride: 6}, - {Lo: 8242, Hi: 8249, Stride: 7}, - {Lo: 8250, Hi: 8257, Stride: 7}, - {Lo: 8259, Hi: 8260, Stride: 1}, - {Lo: 8270, Hi: 8275, Stride: 5}, - {Lo: 8282, Hi: 8287, Stride: 5}, - {Lo: 8450, Hi: 8458, Stride: 8}, - {Lo: 8459, Hi: 8462, Stride: 1}, - {Lo: 8464, Hi: 8467, Stride: 1}, - {Lo: 8469, Hi: 8473, Stride: 4}, - {Lo: 8474, Hi: 8477, Stride: 1}, - {Lo: 8484, Hi: 8488, Stride: 4}, - {Lo: 8490, Hi: 8492, Stride: 2}, - {Lo: 8493, Hi: 8497, Stride: 1}, - {Lo: 8499, Hi: 8500, Stride: 1}, - {Lo: 8505, Hi: 8509, Stride: 4}, - {Lo: 8517, Hi: 8521, Stride: 1}, - {Lo: 8544, Hi: 8548, Stride: 4}, - {Lo: 8553, Hi: 8556, Stride: 3}, - {Lo: 8557, Hi: 8560, Stride: 1}, - {Lo: 8564, Hi: 8569, Stride: 5}, - {Lo: 8572, Hi: 8574, Stride: 1}, - {Lo: 8722, Hi: 8725, Stride: 3}, - {Lo: 8726, Hi: 8727, Stride: 1}, - {Lo: 8739, Hi: 8744, Stride: 5}, - {Lo: 8746, Hi: 8758, Stride: 12}, - {Lo: 8764, Hi: 8868, Stride: 104}, - {Lo: 8897, Hi: 8899, Stride: 2}, - {Lo: 8959, Hi: 9075, Stride: 116}, - {Lo: 9076, Hi: 9082, Stride: 6}, - {Lo: 9213, Hi: 9585, Stride: 372}, - {Lo: 9587, Hi: 10088, Stride: 501}, - {Lo: 10089, Hi: 10094, Stride: 5}, - {Lo: 10095, Hi: 10098, Stride: 3}, - {Lo: 10099, Hi: 10101, Stride: 1}, - {Lo: 10133, Hi: 10134, Stride: 1}, - {Lo: 10187, Hi: 10189, Stride: 2}, - {Lo: 10201, Hi: 10539, Stride: 338}, - {Lo: 10540, Hi: 10741, Stride: 201}, - {Lo: 10744, Hi: 10745, Stride: 1}, - {Lo: 10799, Hi: 11397, Stride: 598}, - {Lo: 11406, Hi: 11410, Stride: 4}, - {Lo: 11412, Hi: 11416, Stride: 4}, - {Lo: 11418, Hi: 11422, Stride: 4}, - {Lo: 11423, Hi: 11426, Stride: 3}, - {Lo: 11427, Hi: 11430, Stride: 1}, - {Lo: 11432, Hi: 11436, Stride: 4}, - {Lo: 11450, Hi: 11462, Stride: 12}, - {Lo: 11466, Hi: 11468, Stride: 2}, - {Lo: 11472, Hi: 11474, Stride: 2}, - {Lo: 11576, Hi: 11577, Stride: 1}, - {Lo: 11599, Hi: 11601, Stride: 2}, - {Lo: 11604, Hi: 11605, Stride: 1}, - {Lo: 11613, Hi: 11840, Stride: 227}, - {Lo: 12034, Hi: 12035, Stride: 1}, - {Lo: 12295, Hi: 12308, Stride: 13}, - {Lo: 12309, Hi: 12339, Stride: 30}, - {Lo: 12448, Hi: 12755, Stride: 307}, - {Lo: 12756, Hi: 20022, Stride: 7266}, - {Lo: 20031, Hi: 42192, Stride: 22161}, - {Lo: 42193, Hi: 42196, Stride: 1}, - {Lo: 42198, Hi: 42199, Stride: 1}, - {Lo: 42201, Hi: 42202, Stride: 1}, - {Lo: 42204, Hi: 42205, Stride: 1}, - {Lo: 42207, Hi: 42211, Stride: 1}, - {Lo: 42214, Hi: 42215, Stride: 1}, - {Lo: 42218, Hi: 42220, Stride: 1}, - {Lo: 42222, Hi: 42226, Stride: 2}, - {Lo: 42227, Hi: 42228, Stride: 1}, - {Lo: 42232, Hi: 42233, Stride: 1}, - {Lo: 42237, Hi: 42239, Stride: 2}, - {Lo: 42510, Hi: 42564, Stride: 54}, - {Lo: 42567, Hi: 42719, Stride: 152}, - {Lo: 42731, Hi: 42735, Stride: 4}, - {Lo: 42801, Hi: 42842, Stride: 41}, - {Lo: 42858, Hi: 42862, Stride: 4}, - {Lo: 42872, Hi: 42889, Stride: 17}, - {Lo: 42892, Hi: 42904, Stride: 12}, - {Lo: 42905, Hi: 42911, Stride: 6}, - {Lo: 42923, Hi: 42930, Stride: 7}, - {Lo: 42931, Hi: 42932, Stride: 1}, - {Lo: 43826, Hi: 43829, Stride: 3}, - {Lo: 43837, Hi: 43847, Stride: 10}, - {Lo: 43848, Hi: 43854, Stride: 6}, - {Lo: 43858, Hi: 43866, Stride: 8}, - {Lo: 43893, Hi: 43905, Stride: 12}, - {Lo: 43907, Hi: 43923, Stride: 16}, - {Lo: 43945, Hi: 43946, Stride: 1}, - {Lo: 43951, Hi: 64422, Stride: 20471}, - {Lo: 64423, Hi: 64429, Stride: 1}, - {Lo: 64830, Hi: 64831, Stride: 1}, - {Lo: 65072, Hi: 65101, Stride: 29}, - {Lo: 65102, Hi: 65103, Stride: 1}, - {Lo: 65112, Hi: 65128, Stride: 16}, - {Lo: 65165, Hi: 65166, Stride: 1}, - {Lo: 65257, Hi: 65260, Stride: 1}, - {Lo: 65282, Hi: 65284, Stride: 2}, - {Lo: 65285, Hi: 65287, Stride: 1}, - {Lo: 65290, Hi: 65291, Stride: 1}, - {Lo: 65293, Hi: 65305, Stride: 1}, - {Lo: 65308, Hi: 65310, Stride: 1}, - {Lo: 65312, Hi: 65373, Stride: 1}, - {Lo: 65512, Hi: 65512, Stride: 1}, +func newAmbiguousTableMap() map[string]*AmbiguousTable { + return map[string]*AmbiguousTable{ + "_common": { + Confusable: []rune{184, 383, 388, 397, 422, 423, 439, 444, 445, 448, 451, 540, 546, 547, 577, 593, 609, 611, 617, 618, 623, 651, 655, 660, 697, 699, 700, 701, 702, 706, 707, 708, 710, 712, 714, 715, 720, 727, 731, 732, 756, 760, 884, 890, 894, 895, 900, 913, 914, 917, 918, 919, 922, 924, 925, 927, 929, 932, 933, 935, 945, 947, 953, 957, 959, 961, 963, 965, 978, 988, 1000, 1010, 1011, 1017, 1018, 1029, 1030, 1032, 1109, 1110, 1112, 1121, 1140, 1141, 1198, 1199, 1211, 1213, 1216, 1231, 1248, 1281, 1292, 1307, 1308, 1309, 1357, 1359, 1365, 1370, 1373, 1377, 1379, 1382, 1392, 1400, 1404, 1405, 1409, 1412, 1413, 1417, 1472, 1475, 1493, 1496, 1497, 1503, 1505, 1523, 1549, 1575, 1607, 1632, 1633, 1637, 1639, 1643, 1645, 1726, 1729, 1748, 1749, 1776, 1777, 1781, 1783, 1793, 1794, 1795, 1796, 1984, 1994, 2036, 2037, 2042, 2307, 2406, 2429, 2534, 2538, 2541, 2662, 2663, 2666, 2691, 2790, 2819, 2848, 2918, 2920, 3046, 3074, 3174, 3202, 3302, 3330, 3360, 3430, 3437, 3458, 3664, 3792, 4125, 4160, 4327, 4351, 4608, 4816, 5024, 5025, 5026, 5029, 5033, 5034, 5035, 5036, 5038, 5043, 5047, 5051, 5053, 5056, 5058, 5059, 5070, 5071, 5074, 5076, 5077, 5081, 5082, 5086, 5087, 5090, 5094, 5095, 5102, 5107, 5108, 5120, 5167, 5171, 5176, 5194, 5196, 5229, 5231, 5234, 5261, 5290, 5311, 5441, 5500, 5501, 5511, 5551, 5556, 5573, 5598, 5610, 5616, 5623, 5741, 5742, 5760, 5810, 5815, 5825, 5836, 5845, 5846, 5868, 5869, 5941, 6147, 6153, 7428, 7439, 7441, 7452, 7456, 7457, 7458, 7462, 7555, 7564, 7837, 7935, 8125, 8126, 8127, 8128, 8175, 8189, 8190, 8192, 8193, 8194, 8195, 8196, 8197, 8198, 8199, 8200, 8201, 8202, 8208, 8209, 8210, 8218, 8219, 8228, 8232, 8233, 8239, 8242, 8249, 8250, 8257, 8259, 8260, 8270, 8275, 8282, 8287, 8450, 8458, 8459, 8460, 8461, 8462, 8464, 8465, 8466, 8467, 8469, 8473, 8474, 8475, 8476, 8477, 8484, 8488, 8490, 8492, 8493, 8494, 8495, 8496, 8497, 8499, 8500, 8505, 8509, 8517, 8518, 8519, 8520, 8521, 8544, 8548, 8553, 8556, 8557, 8558, 8559, 8560, 8564, 8569, 8572, 8573, 8574, 8722, 8725, 8726, 8727, 8739, 8744, 8746, 8758, 8764, 8868, 8897, 8899, 8959, 9075, 9076, 9082, 9213, 9585, 9587, 10088, 10089, 10094, 10095, 10098, 10099, 10100, 10101, 10133, 10134, 10187, 10189, 10201, 10539, 10540, 10741, 10744, 10745, 10799, 11397, 11406, 11410, 11412, 11416, 11418, 11422, 11423, 11426, 11427, 11428, 11429, 11430, 11432, 11436, 11450, 11462, 11466, 11468, 11472, 11474, 11576, 11577, 11599, 11601, 11604, 11605, 11613, 11840, 12034, 12035, 12295, 12308, 12309, 12339, 12448, 12755, 12756, 20022, 20031, 42192, 42193, 42194, 42195, 42196, 42198, 42199, 42201, 42202, 42204, 42205, 42207, 42208, 42209, 42210, 42211, 42214, 42215, 42218, 42219, 42220, 42222, 42224, 42226, 42227, 42228, 42232, 42233, 42237, 42239, 42510, 42564, 42567, 42719, 42731, 42735, 42801, 42842, 42858, 42862, 42872, 42889, 42892, 42904, 42905, 42911, 42923, 42930, 42931, 42932, 43826, 43829, 43837, 43847, 43848, 43854, 43858, 43866, 43893, 43905, 43907, 43923, 43945, 43946, 43951, 64422, 64423, 64424, 64425, 64426, 64427, 64428, 64429, 64830, 64831, 65072, 65101, 65102, 65103, 65112, 65128, 65165, 65166, 65257, 65258, 65259, 65260, 65282, 65284, 65285, 65286, 65287, 65290, 65291, 65293, 65294, 65295, 65296, 65297, 65298, 65299, 65300, 65301, 65302, 65303, 65304, 65305, 65308, 65309, 65310, 65312, 65313, 65314, 65315, 65316, 65317, 65318, 65319, 65320, 65321, 65322, 65323, 65324, 65325, 65326, 65327, 65328, 65329, 65330, 65331, 65332, 65333, 65334, 65335, 65336, 65337, 65338, 65339, 65340, 65341, 65342, 65343, 65344, 65345, 65346, 65347, 65348, 65349, 65350, 65351, 65352, 65353, 65354, 65355, 65356, 65357, 65358, 65359, 65360, 65361, 65362, 65363, 65364, 65365, 65366, 65367, 65368, 65369, 65370, 65371, 65372, 65373, 65512, 66178, 66182, 66183, 66186, 66192, 66194, 66197, 66198, 66199, 66203, 66208, 66209, 66210, 66213, 66219, 66224, 66225, 66226, 66228, 66255, 66293, 66305, 66306, 66313, 66321, 66325, 66327, 66330, 66335, 66336, 66338, 66564, 66581, 66587, 66592, 66604, 66621, 66632, 66740, 66754, 66766, 66770, 66794, 66806, 66835, 66838, 66840, 66844, 66845, 66853, 66854, 66855, 68176, 70864, 71430, 71434, 71438, 71439, 71840, 71842, 71843, 71844, 71846, 71849, 71852, 71854, 71855, 71858, 71861, 71864, 71867, 71868, 71872, 71873, 71874, 71875, 71876, 71878, 71880, 71882, 71884, 71893, 71894, 71895, 71896, 71900, 71904, 71909, 71910, 71913, 71916, 71919, 71922, 93960, 93962, 93974, 93992, 94005, 94010, 94011, 94015, 94016, 94018, 94019, 94033, 94034, 119060, 119149, 119302, 119309, 119311, 119314, 119315, 119318, 119338, 119350, 119351, 119354, 119355, 119808, 119809, 119810, 119811, 119812, 119813, 119814, 119815, 119816, 119817, 119818, 119819, 119820, 119821, 119822, 119823, 119824, 119825, 119826, 119827, 119828, 119829, 119830, 119831, 119832, 119833, 119834, 119835, 119836, 119837, 119838, 119839, 119840, 119841, 119842, 119843, 119844, 119845, 119847, 119848, 119849, 119850, 119851, 119852, 119853, 119854, 119855, 119856, 119857, 119858, 119859, 119860, 119861, 119862, 119863, 119864, 119865, 119866, 119867, 119868, 119869, 119870, 119871, 119872, 119873, 119874, 119875, 119876, 119877, 119878, 119879, 119880, 119881, 119882, 119883, 119884, 119885, 119886, 119887, 119888, 119889, 119890, 119891, 119892, 119894, 119895, 119896, 119897, 119899, 119900, 119901, 119902, 119903, 119904, 119905, 119906, 119907, 119908, 119909, 119910, 119911, 119912, 119913, 119914, 119915, 119916, 119917, 119918, 119919, 119920, 119921, 119922, 119923, 119924, 119925, 119926, 119927, 119928, 119929, 119930, 119931, 119932, 119933, 119934, 119935, 119936, 119937, 119938, 119939, 119940, 119941, 119942, 119943, 119944, 119945, 119946, 119947, 119948, 119949, 119951, 119952, 119953, 119954, 119955, 119956, 119957, 119958, 119959, 119960, 119961, 119962, 119963, 119964, 119966, 119967, 119970, 119973, 119974, 119977, 119978, 119979, 119980, 119982, 119983, 119984, 119985, 119986, 119987, 119988, 119989, 119990, 119991, 119992, 119993, 119995, 119997, 119998, 119999, 120000, 120001, 120003, 120005, 120006, 120007, 120008, 120009, 120010, 120011, 120012, 120013, 120014, 120015, 120016, 120017, 120018, 120019, 120020, 120021, 120022, 120023, 120024, 120025, 120026, 120027, 120028, 120029, 120030, 120031, 120032, 120033, 120034, 120035, 120036, 120037, 120038, 120039, 120040, 120041, 120042, 120043, 120044, 120045, 120046, 120047, 120048, 120049, 120050, 120051, 120052, 120053, 120055, 120056, 120057, 120058, 120059, 120060, 120061, 120062, 120063, 120064, 120065, 120066, 120067, 120068, 120069, 120071, 120072, 120073, 120074, 120077, 120078, 120079, 120080, 120081, 120082, 120083, 120084, 120086, 120087, 120088, 120089, 120090, 120091, 120092, 120094, 120095, 120096, 120097, 120098, 120099, 120100, 120101, 120102, 120103, 120104, 120105, 120107, 120108, 120109, 120110, 120111, 120112, 120113, 120114, 120115, 120116, 120117, 120118, 120119, 120120, 120121, 120123, 120124, 120125, 120126, 120128, 120129, 120130, 120131, 120132, 120134, 120138, 120139, 120140, 120141, 120142, 120143, 120144, 120146, 120147, 120148, 120149, 120150, 120151, 120152, 120153, 120154, 120155, 120156, 120157, 120159, 120160, 120161, 120162, 120163, 120164, 120165, 120166, 120167, 120168, 120169, 120170, 120171, 120172, 120173, 120174, 120175, 120176, 120177, 120178, 120179, 120180, 120181, 120182, 120183, 120184, 120185, 120186, 120187, 120188, 120189, 120190, 120191, 120192, 120193, 120194, 120195, 120196, 120197, 120198, 120199, 120200, 120201, 120202, 120203, 120204, 120205, 120206, 120207, 120208, 120209, 120211, 120212, 120213, 120214, 120215, 120216, 120217, 120218, 120219, 120220, 120221, 120222, 120223, 120224, 120225, 120226, 120227, 120228, 120229, 120230, 120231, 120232, 120233, 120234, 120235, 120236, 120237, 120238, 120239, 120240, 120241, 120242, 120243, 120244, 120245, 120246, 120247, 120248, 120249, 120250, 120251, 120252, 120253, 120254, 120255, 120256, 120257, 120258, 120259, 120260, 120261, 120263, 120264, 120265, 120266, 120267, 120268, 120269, 120270, 120271, 120272, 120273, 120274, 120275, 120276, 120277, 120278, 120279, 120280, 120281, 120282, 120283, 120284, 120285, 120286, 120287, 120288, 120289, 120290, 120291, 120292, 120293, 120294, 120295, 120296, 120297, 120298, 120299, 120300, 120301, 120302, 120303, 120304, 120305, 120306, 120307, 120308, 120309, 120310, 120311, 120312, 120313, 120315, 120316, 120317, 120318, 120319, 120320, 120321, 120322, 120323, 120324, 120325, 120326, 120327, 120328, 120329, 120330, 120331, 120332, 120333, 120334, 120335, 120336, 120337, 120338, 120339, 120340, 120341, 120342, 120343, 120344, 120345, 120346, 120347, 120348, 120349, 120350, 120351, 120352, 120353, 120354, 120355, 120356, 120357, 120358, 120359, 120360, 120361, 120362, 120363, 120364, 120365, 120367, 120368, 120369, 120370, 120371, 120372, 120373, 120374, 120375, 120376, 120377, 120378, 120379, 120380, 120381, 120382, 120383, 120384, 120385, 120386, 120387, 120388, 120389, 120390, 120391, 120392, 120393, 120394, 120395, 120396, 120397, 120398, 120399, 120400, 120401, 120402, 120403, 120404, 120405, 120406, 120407, 120408, 120409, 120410, 120411, 120412, 120413, 120414, 120415, 120416, 120417, 120419, 120420, 120421, 120422, 120423, 120424, 120425, 120426, 120427, 120428, 120429, 120430, 120431, 120432, 120433, 120434, 120435, 120436, 120437, 120438, 120439, 120440, 120441, 120442, 120443, 120444, 120445, 120446, 120447, 120448, 120449, 120450, 120451, 120452, 120453, 120454, 120455, 120456, 120457, 120458, 120459, 120460, 120461, 120462, 120463, 120464, 120465, 120466, 120467, 120468, 120469, 120471, 120472, 120473, 120474, 120475, 120476, 120477, 120478, 120479, 120480, 120481, 120482, 120483, 120484, 120488, 120489, 120492, 120493, 120494, 120496, 120497, 120499, 120500, 120502, 120504, 120507, 120508, 120510, 120514, 120516, 120522, 120526, 120528, 120530, 120532, 120534, 120544, 120546, 120547, 120550, 120551, 120552, 120554, 120555, 120557, 120558, 120560, 120562, 120565, 120566, 120568, 120572, 120574, 120580, 120584, 120586, 120588, 120590, 120592, 120602, 120604, 120605, 120608, 120609, 120610, 120612, 120613, 120615, 120616, 120618, 120620, 120623, 120624, 120626, 120630, 120632, 120638, 120642, 120644, 120646, 120648, 120650, 120660, 120662, 120663, 120666, 120667, 120668, 120670, 120671, 120673, 120674, 120676, 120678, 120681, 120682, 120684, 120688, 120690, 120696, 120700, 120702, 120704, 120706, 120708, 120718, 120720, 120721, 120724, 120725, 120726, 120728, 120729, 120731, 120732, 120734, 120736, 120739, 120740, 120742, 120746, 120748, 120754, 120758, 120760, 120762, 120764, 120766, 120776, 120778, 120782, 120783, 120784, 120785, 120786, 120787, 120788, 120789, 120790, 120791, 120792, 120793, 120794, 120795, 120796, 120797, 120798, 120799, 120800, 120801, 120802, 120803, 120804, 120805, 120806, 120807, 120808, 120809, 120810, 120811, 120812, 120813, 120814, 120815, 120816, 120817, 120818, 120819, 120820, 120821, 120822, 120823, 120824, 120825, 120826, 120827, 120828, 120829, 120830, 120831, 125127, 125131, 126464, 126500, 126564, 126592, 126596, 128844, 128872, 130032, 130033, 130034, 130035, 130036, 130037, 130038, 130039, 130040, 130041}, + With: []rune{44, 102, 98, 103, 82, 50, 51, 53, 115, 73, 33, 51, 56, 56, 63, 97, 103, 121, 105, 105, 119, 117, 121, 63, 96, 96, 96, 96, 96, 60, 62, 94, 94, 96, 96, 96, 58, 45, 105, 126, 96, 58, 96, 105, 59, 74, 96, 65, 66, 69, 90, 72, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 89, 70, 50, 99, 106, 67, 77, 83, 73, 74, 115, 105, 106, 119, 86, 118, 89, 121, 104, 101, 73, 105, 51, 100, 71, 113, 87, 119, 85, 83, 79, 96, 96, 119, 113, 113, 104, 110, 110, 117, 103, 102, 111, 58, 108, 58, 108, 118, 96, 108, 111, 96, 44, 108, 111, 46, 108, 111, 86, 44, 42, 111, 111, 45, 111, 46, 73, 111, 86, 46, 46, 58, 58, 79, 108, 96, 96, 95, 58, 111, 63, 79, 56, 57, 111, 57, 56, 58, 111, 56, 79, 79, 57, 111, 111, 111, 111, 111, 111, 111, 111, 57, 111, 111, 111, 111, 111, 121, 111, 85, 79, 68, 82, 84, 105, 89, 65, 74, 69, 63, 87, 77, 72, 89, 71, 104, 90, 52, 98, 82, 87, 83, 86, 83, 76, 67, 80, 75, 100, 54, 71, 66, 61, 86, 62, 60, 96, 85, 80, 100, 98, 74, 76, 50, 120, 72, 120, 82, 98, 70, 65, 68, 68, 77, 66, 88, 120, 32, 60, 88, 73, 96, 75, 77, 58, 43, 47, 58, 58, 99, 111, 111, 117, 118, 119, 122, 114, 103, 121, 102, 121, 96, 105, 96, 126, 96, 96, 96, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 45, 45, 45, 44, 96, 46, 32, 32, 32, 96, 60, 62, 47, 45, 47, 42, 126, 58, 32, 67, 103, 72, 72, 72, 104, 73, 73, 76, 108, 78, 80, 81, 82, 82, 82, 90, 90, 75, 66, 67, 101, 101, 69, 70, 77, 111, 105, 121, 68, 100, 101, 105, 106, 73, 86, 88, 76, 67, 68, 77, 105, 118, 120, 73, 99, 100, 45, 47, 92, 42, 73, 118, 85, 58, 126, 84, 118, 85, 69, 105, 112, 97, 73, 47, 88, 40, 41, 60, 62, 40, 41, 123, 125, 43, 45, 47, 92, 84, 120, 120, 92, 47, 92, 120, 114, 72, 73, 75, 77, 78, 79, 111, 80, 112, 67, 99, 84, 89, 88, 45, 47, 57, 51, 76, 54, 86, 69, 73, 33, 79, 81, 88, 61, 92, 47, 79, 40, 41, 47, 61, 47, 92, 92, 47, 66, 80, 100, 68, 84, 71, 75, 74, 67, 90, 70, 77, 78, 76, 83, 82, 86, 72, 87, 88, 89, 65, 69, 73, 79, 85, 46, 44, 58, 61, 46, 50, 105, 86, 63, 50, 115, 50, 51, 57, 38, 58, 96, 70, 102, 117, 51, 74, 88, 66, 101, 102, 111, 114, 114, 117, 117, 121, 105, 114, 119, 122, 118, 115, 99, 111, 111, 111, 111, 111, 111, 111, 111, 40, 41, 58, 95, 95, 95, 45, 92, 108, 108, 111, 111, 111, 111, 34, 36, 37, 38, 96, 42, 43, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 60, 61, 62, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 73, 66, 69, 70, 124, 88, 79, 80, 83, 84, 43, 65, 66, 67, 70, 79, 77, 84, 89, 88, 72, 90, 66, 67, 124, 77, 84, 88, 56, 42, 108, 88, 79, 67, 76, 83, 111, 99, 115, 82, 79, 85, 55, 111, 117, 78, 79, 75, 67, 86, 70, 76, 88, 46, 79, 118, 119, 119, 119, 86, 70, 76, 89, 69, 90, 57, 69, 52, 76, 79, 85, 53, 84, 118, 115, 70, 105, 122, 55, 111, 51, 57, 54, 57, 111, 117, 121, 79, 90, 87, 67, 88, 87, 67, 86, 84, 76, 73, 82, 83, 51, 62, 65, 85, 89, 96, 96, 123, 46, 51, 86, 92, 55, 70, 82, 76, 60, 62, 47, 92, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 67, 68, 71, 74, 75, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 102, 104, 105, 106, 107, 108, 110, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 68, 69, 70, 71, 74, 75, 76, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 68, 69, 70, 71, 73, 74, 75, 76, 77, 79, 83, 84, 85, 86, 87, 88, 89, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 73, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 105, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 65, 66, 69, 90, 72, 73, 75, 77, 78, 79, 80, 84, 89, 88, 97, 121, 105, 118, 111, 112, 111, 117, 112, 70, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57, 108, 56, 108, 111, 111, 108, 111, 67, 84, 79, 73, 50, 51, 52, 53, 54, 55, 56, 57}, + Locale: "_common", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 184, Hi: 383, Stride: 199}, + {Lo: 388, Hi: 397, Stride: 9}, + {Lo: 422, Hi: 423, Stride: 1}, + {Lo: 439, Hi: 444, Stride: 5}, + {Lo: 445, Hi: 451, Stride: 3}, + {Lo: 540, Hi: 546, Stride: 6}, + {Lo: 547, Hi: 577, Stride: 30}, + {Lo: 593, Hi: 609, Stride: 16}, + {Lo: 611, Hi: 617, Stride: 6}, + {Lo: 618, Hi: 623, Stride: 5}, + {Lo: 651, Hi: 655, Stride: 4}, + {Lo: 660, Hi: 697, Stride: 37}, + {Lo: 699, Hi: 702, Stride: 1}, + {Lo: 706, Hi: 708, Stride: 1}, + {Lo: 710, Hi: 714, Stride: 2}, + {Lo: 715, Hi: 720, Stride: 5}, + {Lo: 727, Hi: 731, Stride: 4}, + {Lo: 732, Hi: 756, Stride: 24}, + {Lo: 760, Hi: 884, Stride: 124}, + {Lo: 890, Hi: 894, Stride: 4}, + {Lo: 895, Hi: 900, Stride: 5}, + {Lo: 913, Hi: 914, Stride: 1}, + {Lo: 917, Hi: 919, Stride: 1}, + {Lo: 922, Hi: 924, Stride: 2}, + {Lo: 925, Hi: 929, Stride: 2}, + {Lo: 932, Hi: 933, Stride: 1}, + {Lo: 935, Hi: 945, Stride: 10}, + {Lo: 947, Hi: 953, Stride: 6}, + {Lo: 957, Hi: 965, Stride: 2}, + {Lo: 978, Hi: 988, Stride: 10}, + {Lo: 1000, Hi: 1010, Stride: 10}, + {Lo: 1011, Hi: 1017, Stride: 6}, + {Lo: 1018, Hi: 1029, Stride: 11}, + {Lo: 1030, Hi: 1032, Stride: 2}, + {Lo: 1109, Hi: 1110, Stride: 1}, + {Lo: 1112, Hi: 1121, Stride: 9}, + {Lo: 1140, Hi: 1141, Stride: 1}, + {Lo: 1198, Hi: 1199, Stride: 1}, + {Lo: 1211, Hi: 1213, Stride: 2}, + {Lo: 1216, Hi: 1231, Stride: 15}, + {Lo: 1248, Hi: 1281, Stride: 33}, + {Lo: 1292, Hi: 1307, Stride: 15}, + {Lo: 1308, Hi: 1309, Stride: 1}, + {Lo: 1357, Hi: 1359, Stride: 2}, + {Lo: 1365, Hi: 1370, Stride: 5}, + {Lo: 1373, Hi: 1377, Stride: 4}, + {Lo: 1379, Hi: 1382, Stride: 3}, + {Lo: 1392, Hi: 1400, Stride: 8}, + {Lo: 1404, Hi: 1405, Stride: 1}, + {Lo: 1409, Hi: 1412, Stride: 3}, + {Lo: 1413, Hi: 1417, Stride: 4}, + {Lo: 1472, Hi: 1475, Stride: 3}, + {Lo: 1493, Hi: 1496, Stride: 3}, + {Lo: 1497, Hi: 1503, Stride: 6}, + {Lo: 1505, Hi: 1523, Stride: 18}, + {Lo: 1549, Hi: 1575, Stride: 26}, + {Lo: 1607, Hi: 1632, Stride: 25}, + {Lo: 1633, Hi: 1637, Stride: 4}, + {Lo: 1639, Hi: 1643, Stride: 4}, + {Lo: 1645, Hi: 1726, Stride: 81}, + {Lo: 1729, Hi: 1748, Stride: 19}, + {Lo: 1749, Hi: 1776, Stride: 27}, + {Lo: 1777, Hi: 1781, Stride: 4}, + {Lo: 1783, Hi: 1793, Stride: 10}, + {Lo: 1794, Hi: 1796, Stride: 1}, + {Lo: 1984, Hi: 1994, Stride: 10}, + {Lo: 2036, Hi: 2037, Stride: 1}, + {Lo: 2042, Hi: 2307, Stride: 265}, + {Lo: 2406, Hi: 2429, Stride: 23}, + {Lo: 2534, Hi: 2538, Stride: 4}, + {Lo: 2541, Hi: 2662, Stride: 121}, + {Lo: 2663, Hi: 2666, Stride: 3}, + {Lo: 2691, Hi: 2790, Stride: 99}, + {Lo: 2819, Hi: 2848, Stride: 29}, + {Lo: 2918, Hi: 2920, Stride: 2}, + {Lo: 3046, Hi: 3074, Stride: 28}, + {Lo: 3174, Hi: 3202, Stride: 28}, + {Lo: 3302, Hi: 3330, Stride: 28}, + {Lo: 3360, Hi: 3430, Stride: 70}, + {Lo: 3437, Hi: 3458, Stride: 21}, + {Lo: 3664, Hi: 3792, Stride: 128}, + {Lo: 4125, Hi: 4160, Stride: 35}, + {Lo: 4327, Hi: 4351, Stride: 24}, + {Lo: 4608, Hi: 5024, Stride: 208}, + {Lo: 5025, Hi: 5026, Stride: 1}, + {Lo: 5029, Hi: 5033, Stride: 4}, + {Lo: 5034, Hi: 5036, Stride: 1}, + {Lo: 5038, Hi: 5043, Stride: 5}, + {Lo: 5047, Hi: 5051, Stride: 4}, + {Lo: 5053, Hi: 5056, Stride: 3}, + {Lo: 5058, Hi: 5059, Stride: 1}, + {Lo: 5070, Hi: 5071, Stride: 1}, + {Lo: 5074, Hi: 5076, Stride: 2}, + {Lo: 5077, Hi: 5081, Stride: 4}, + {Lo: 5082, Hi: 5086, Stride: 4}, + {Lo: 5087, Hi: 5090, Stride: 3}, + {Lo: 5094, Hi: 5095, Stride: 1}, + {Lo: 5102, Hi: 5107, Stride: 5}, + {Lo: 5108, Hi: 5120, Stride: 12}, + {Lo: 5167, Hi: 5171, Stride: 4}, + {Lo: 5176, Hi: 5194, Stride: 18}, + {Lo: 5196, Hi: 5229, Stride: 33}, + {Lo: 5231, Hi: 5234, Stride: 3}, + {Lo: 5261, Hi: 5290, Stride: 29}, + {Lo: 5311, Hi: 5441, Stride: 130}, + {Lo: 5500, Hi: 5501, Stride: 1}, + {Lo: 5511, Hi: 5551, Stride: 40}, + {Lo: 5556, Hi: 5573, Stride: 17}, + {Lo: 5598, Hi: 5610, Stride: 12}, + {Lo: 5616, Hi: 5623, Stride: 7}, + {Lo: 5741, Hi: 5742, Stride: 1}, + {Lo: 5760, Hi: 5810, Stride: 50}, + {Lo: 5815, Hi: 5825, Stride: 10}, + {Lo: 5836, Hi: 5845, Stride: 9}, + {Lo: 5846, Hi: 5868, Stride: 22}, + {Lo: 5869, Hi: 5941, Stride: 72}, + {Lo: 6147, Hi: 6153, Stride: 6}, + {Lo: 7428, Hi: 7439, Stride: 11}, + {Lo: 7441, Hi: 7452, Stride: 11}, + {Lo: 7456, Hi: 7458, Stride: 1}, + {Lo: 7462, Hi: 7555, Stride: 93}, + {Lo: 7564, Hi: 7837, Stride: 273}, + {Lo: 7935, Hi: 8125, Stride: 190}, + {Lo: 8126, Hi: 8128, Stride: 1}, + {Lo: 8175, Hi: 8189, Stride: 14}, + {Lo: 8190, Hi: 8192, Stride: 2}, + {Lo: 8193, Hi: 8202, Stride: 1}, + {Lo: 8208, Hi: 8210, Stride: 1}, + {Lo: 8218, Hi: 8219, Stride: 1}, + {Lo: 8228, Hi: 8232, Stride: 4}, + {Lo: 8233, Hi: 8239, Stride: 6}, + {Lo: 8242, Hi: 8249, Stride: 7}, + {Lo: 8250, Hi: 8257, Stride: 7}, + {Lo: 8259, Hi: 8260, Stride: 1}, + {Lo: 8270, Hi: 8275, Stride: 5}, + {Lo: 8282, Hi: 8287, Stride: 5}, + {Lo: 8450, Hi: 8458, Stride: 8}, + {Lo: 8459, Hi: 8462, Stride: 1}, + {Lo: 8464, Hi: 8467, Stride: 1}, + {Lo: 8469, Hi: 8473, Stride: 4}, + {Lo: 8474, Hi: 8477, Stride: 1}, + {Lo: 8484, Hi: 8488, Stride: 4}, + {Lo: 8490, Hi: 8492, Stride: 2}, + {Lo: 8493, Hi: 8497, Stride: 1}, + {Lo: 8499, Hi: 8500, Stride: 1}, + {Lo: 8505, Hi: 8509, Stride: 4}, + {Lo: 8517, Hi: 8521, Stride: 1}, + {Lo: 8544, Hi: 8548, Stride: 4}, + {Lo: 8553, Hi: 8556, Stride: 3}, + {Lo: 8557, Hi: 8560, Stride: 1}, + {Lo: 8564, Hi: 8569, Stride: 5}, + {Lo: 8572, Hi: 8574, Stride: 1}, + {Lo: 8722, Hi: 8725, Stride: 3}, + {Lo: 8726, Hi: 8727, Stride: 1}, + {Lo: 8739, Hi: 8744, Stride: 5}, + {Lo: 8746, Hi: 8758, Stride: 12}, + {Lo: 8764, Hi: 8868, Stride: 104}, + {Lo: 8897, Hi: 8899, Stride: 2}, + {Lo: 8959, Hi: 9075, Stride: 116}, + {Lo: 9076, Hi: 9082, Stride: 6}, + {Lo: 9213, Hi: 9585, Stride: 372}, + {Lo: 9587, Hi: 10088, Stride: 501}, + {Lo: 10089, Hi: 10094, Stride: 5}, + {Lo: 10095, Hi: 10098, Stride: 3}, + {Lo: 10099, Hi: 10101, Stride: 1}, + {Lo: 10133, Hi: 10134, Stride: 1}, + {Lo: 10187, Hi: 10189, Stride: 2}, + {Lo: 10201, Hi: 10539, Stride: 338}, + {Lo: 10540, Hi: 10741, Stride: 201}, + {Lo: 10744, Hi: 10745, Stride: 1}, + {Lo: 10799, Hi: 11397, Stride: 598}, + {Lo: 11406, Hi: 11410, Stride: 4}, + {Lo: 11412, Hi: 11416, Stride: 4}, + {Lo: 11418, Hi: 11422, Stride: 4}, + {Lo: 11423, Hi: 11426, Stride: 3}, + {Lo: 11427, Hi: 11430, Stride: 1}, + {Lo: 11432, Hi: 11436, Stride: 4}, + {Lo: 11450, Hi: 11462, Stride: 12}, + {Lo: 11466, Hi: 11468, Stride: 2}, + {Lo: 11472, Hi: 11474, Stride: 2}, + {Lo: 11576, Hi: 11577, Stride: 1}, + {Lo: 11599, Hi: 11601, Stride: 2}, + {Lo: 11604, Hi: 11605, Stride: 1}, + {Lo: 11613, Hi: 11840, Stride: 227}, + {Lo: 12034, Hi: 12035, Stride: 1}, + {Lo: 12295, Hi: 12308, Stride: 13}, + {Lo: 12309, Hi: 12339, Stride: 30}, + {Lo: 12448, Hi: 12755, Stride: 307}, + {Lo: 12756, Hi: 20022, Stride: 7266}, + {Lo: 20031, Hi: 42192, Stride: 22161}, + {Lo: 42193, Hi: 42196, Stride: 1}, + {Lo: 42198, Hi: 42199, Stride: 1}, + {Lo: 42201, Hi: 42202, Stride: 1}, + {Lo: 42204, Hi: 42205, Stride: 1}, + {Lo: 42207, Hi: 42211, Stride: 1}, + {Lo: 42214, Hi: 42215, Stride: 1}, + {Lo: 42218, Hi: 42220, Stride: 1}, + {Lo: 42222, Hi: 42226, Stride: 2}, + {Lo: 42227, Hi: 42228, Stride: 1}, + {Lo: 42232, Hi: 42233, Stride: 1}, + {Lo: 42237, Hi: 42239, Stride: 2}, + {Lo: 42510, Hi: 42564, Stride: 54}, + {Lo: 42567, Hi: 42719, Stride: 152}, + {Lo: 42731, Hi: 42735, Stride: 4}, + {Lo: 42801, Hi: 42842, Stride: 41}, + {Lo: 42858, Hi: 42862, Stride: 4}, + {Lo: 42872, Hi: 42889, Stride: 17}, + {Lo: 42892, Hi: 42904, Stride: 12}, + {Lo: 42905, Hi: 42911, Stride: 6}, + {Lo: 42923, Hi: 42930, Stride: 7}, + {Lo: 42931, Hi: 42932, Stride: 1}, + {Lo: 43826, Hi: 43829, Stride: 3}, + {Lo: 43837, Hi: 43847, Stride: 10}, + {Lo: 43848, Hi: 43854, Stride: 6}, + {Lo: 43858, Hi: 43866, Stride: 8}, + {Lo: 43893, Hi: 43905, Stride: 12}, + {Lo: 43907, Hi: 43923, Stride: 16}, + {Lo: 43945, Hi: 43946, Stride: 1}, + {Lo: 43951, Hi: 64422, Stride: 20471}, + {Lo: 64423, Hi: 64429, Stride: 1}, + {Lo: 64830, Hi: 64831, Stride: 1}, + {Lo: 65072, Hi: 65101, Stride: 29}, + {Lo: 65102, Hi: 65103, Stride: 1}, + {Lo: 65112, Hi: 65128, Stride: 16}, + {Lo: 65165, Hi: 65166, Stride: 1}, + {Lo: 65257, Hi: 65260, Stride: 1}, + {Lo: 65282, Hi: 65284, Stride: 2}, + {Lo: 65285, Hi: 65287, Stride: 1}, + {Lo: 65290, Hi: 65291, Stride: 1}, + {Lo: 65293, Hi: 65305, Stride: 1}, + {Lo: 65308, Hi: 65310, Stride: 1}, + {Lo: 65312, Hi: 65373, Stride: 1}, + {Lo: 65512, Hi: 65512, Stride: 1}, + }, + R32: []unicode.Range32{ + {Lo: 66178, Hi: 66182, Stride: 4}, + {Lo: 66183, Hi: 66186, Stride: 3}, + {Lo: 66192, Hi: 66194, Stride: 2}, + {Lo: 66197, Hi: 66199, Stride: 1}, + {Lo: 66203, Hi: 66208, Stride: 5}, + {Lo: 66209, Hi: 66210, Stride: 1}, + {Lo: 66213, Hi: 66219, Stride: 6}, + {Lo: 66224, Hi: 66226, Stride: 1}, + {Lo: 66228, Hi: 66255, Stride: 27}, + {Lo: 66293, Hi: 66305, Stride: 12}, + {Lo: 66306, Hi: 66313, Stride: 7}, + {Lo: 66321, Hi: 66325, Stride: 4}, + {Lo: 66327, Hi: 66330, Stride: 3}, + {Lo: 66335, Hi: 66336, Stride: 1}, + {Lo: 66338, Hi: 66564, Stride: 226}, + {Lo: 66581, Hi: 66587, Stride: 6}, + {Lo: 66592, Hi: 66604, Stride: 12}, + {Lo: 66621, Hi: 66632, Stride: 11}, + {Lo: 66740, Hi: 66754, Stride: 14}, + {Lo: 66766, Hi: 66770, Stride: 4}, + {Lo: 66794, Hi: 66806, Stride: 12}, + {Lo: 66835, Hi: 66838, Stride: 3}, + {Lo: 66840, Hi: 66844, Stride: 4}, + {Lo: 66845, Hi: 66853, Stride: 8}, + {Lo: 66854, Hi: 66855, Stride: 1}, + {Lo: 68176, Hi: 70864, Stride: 2688}, + {Lo: 71430, Hi: 71438, Stride: 4}, + {Lo: 71439, Hi: 71840, Stride: 401}, + {Lo: 71842, Hi: 71844, Stride: 1}, + {Lo: 71846, Hi: 71852, Stride: 3}, + {Lo: 71854, Hi: 71855, Stride: 1}, + {Lo: 71858, Hi: 71867, Stride: 3}, + {Lo: 71868, Hi: 71872, Stride: 4}, + {Lo: 71873, Hi: 71876, Stride: 1}, + {Lo: 71878, Hi: 71884, Stride: 2}, + {Lo: 71893, Hi: 71896, Stride: 1}, + {Lo: 71900, Hi: 71904, Stride: 4}, + {Lo: 71909, Hi: 71910, Stride: 1}, + {Lo: 71913, Hi: 71922, Stride: 3}, + {Lo: 93960, Hi: 93962, Stride: 2}, + {Lo: 93974, Hi: 93992, Stride: 18}, + {Lo: 94005, Hi: 94010, Stride: 5}, + {Lo: 94011, Hi: 94015, Stride: 4}, + {Lo: 94016, Hi: 94018, Stride: 2}, + {Lo: 94019, Hi: 94033, Stride: 14}, + {Lo: 94034, Hi: 119060, Stride: 25026}, + {Lo: 119149, Hi: 119302, Stride: 153}, + {Lo: 119309, Hi: 119311, Stride: 2}, + {Lo: 119314, Hi: 119315, Stride: 1}, + {Lo: 119318, Hi: 119338, Stride: 20}, + {Lo: 119350, Hi: 119351, Stride: 1}, + {Lo: 119354, Hi: 119355, Stride: 1}, + {Lo: 119808, Hi: 119845, Stride: 1}, + {Lo: 119847, Hi: 119892, Stride: 1}, + {Lo: 119894, Hi: 119897, Stride: 1}, + {Lo: 119899, Hi: 119949, Stride: 1}, + {Lo: 119951, Hi: 119964, Stride: 1}, + {Lo: 119966, Hi: 119967, Stride: 1}, + {Lo: 119970, Hi: 119973, Stride: 3}, + {Lo: 119974, Hi: 119977, Stride: 3}, + {Lo: 119978, Hi: 119980, Stride: 1}, + {Lo: 119982, Hi: 119993, Stride: 1}, + {Lo: 119995, Hi: 119997, Stride: 2}, + {Lo: 119998, Hi: 120001, Stride: 1}, + {Lo: 120003, Hi: 120005, Stride: 2}, + {Lo: 120006, Hi: 120053, Stride: 1}, + {Lo: 120055, Hi: 120069, Stride: 1}, + {Lo: 120071, Hi: 120074, Stride: 1}, + {Lo: 120077, Hi: 120084, Stride: 1}, + {Lo: 120086, Hi: 120092, Stride: 1}, + {Lo: 120094, Hi: 120105, Stride: 1}, + {Lo: 120107, Hi: 120121, Stride: 1}, + {Lo: 120123, Hi: 120126, Stride: 1}, + {Lo: 120128, Hi: 120132, Stride: 1}, + {Lo: 120134, Hi: 120138, Stride: 4}, + {Lo: 120139, Hi: 120144, Stride: 1}, + {Lo: 120146, Hi: 120157, Stride: 1}, + {Lo: 120159, Hi: 120209, Stride: 1}, + {Lo: 120211, Hi: 120261, Stride: 1}, + {Lo: 120263, Hi: 120313, Stride: 1}, + {Lo: 120315, Hi: 120365, Stride: 1}, + {Lo: 120367, Hi: 120417, Stride: 1}, + {Lo: 120419, Hi: 120469, Stride: 1}, + {Lo: 120471, Hi: 120484, Stride: 1}, + {Lo: 120488, Hi: 120489, Stride: 1}, + {Lo: 120492, Hi: 120494, Stride: 1}, + {Lo: 120496, Hi: 120497, Stride: 1}, + {Lo: 120499, Hi: 120500, Stride: 1}, + {Lo: 120502, Hi: 120504, Stride: 2}, + {Lo: 120507, Hi: 120508, Stride: 1}, + {Lo: 120510, Hi: 120514, Stride: 4}, + {Lo: 120516, Hi: 120522, Stride: 6}, + {Lo: 120526, Hi: 120534, Stride: 2}, + {Lo: 120544, Hi: 120546, Stride: 2}, + {Lo: 120547, Hi: 120550, Stride: 3}, + {Lo: 120551, Hi: 120552, Stride: 1}, + {Lo: 120554, Hi: 120555, Stride: 1}, + {Lo: 120557, Hi: 120558, Stride: 1}, + {Lo: 120560, Hi: 120562, Stride: 2}, + {Lo: 120565, Hi: 120566, Stride: 1}, + {Lo: 120568, Hi: 120572, Stride: 4}, + {Lo: 120574, Hi: 120580, Stride: 6}, + {Lo: 120584, Hi: 120592, Stride: 2}, + {Lo: 120602, Hi: 120604, Stride: 2}, + {Lo: 120605, Hi: 120608, Stride: 3}, + {Lo: 120609, Hi: 120610, Stride: 1}, + {Lo: 120612, Hi: 120613, Stride: 1}, + {Lo: 120615, Hi: 120616, Stride: 1}, + {Lo: 120618, Hi: 120620, Stride: 2}, + {Lo: 120623, Hi: 120624, Stride: 1}, + {Lo: 120626, Hi: 120630, Stride: 4}, + {Lo: 120632, Hi: 120638, Stride: 6}, + {Lo: 120642, Hi: 120650, Stride: 2}, + {Lo: 120660, Hi: 120662, Stride: 2}, + {Lo: 120663, Hi: 120666, Stride: 3}, + {Lo: 120667, Hi: 120668, Stride: 1}, + {Lo: 120670, Hi: 120671, Stride: 1}, + {Lo: 120673, Hi: 120674, Stride: 1}, + {Lo: 120676, Hi: 120678, Stride: 2}, + {Lo: 120681, Hi: 120682, Stride: 1}, + {Lo: 120684, Hi: 120688, Stride: 4}, + {Lo: 120690, Hi: 120696, Stride: 6}, + {Lo: 120700, Hi: 120708, Stride: 2}, + {Lo: 120718, Hi: 120720, Stride: 2}, + {Lo: 120721, Hi: 120724, Stride: 3}, + {Lo: 120725, Hi: 120726, Stride: 1}, + {Lo: 120728, Hi: 120729, Stride: 1}, + {Lo: 120731, Hi: 120732, Stride: 1}, + {Lo: 120734, Hi: 120736, Stride: 2}, + {Lo: 120739, Hi: 120740, Stride: 1}, + {Lo: 120742, Hi: 120746, Stride: 4}, + {Lo: 120748, Hi: 120754, Stride: 6}, + {Lo: 120758, Hi: 120766, Stride: 2}, + {Lo: 120776, Hi: 120778, Stride: 2}, + {Lo: 120782, Hi: 120831, Stride: 1}, + {Lo: 125127, Hi: 125131, Stride: 4}, + {Lo: 126464, Hi: 126500, Stride: 36}, + {Lo: 126564, Hi: 126592, Stride: 28}, + {Lo: 126596, Hi: 128844, Stride: 2248}, + {Lo: 128872, Hi: 130032, Stride: 1160}, + {Lo: 130033, Hi: 130041, Stride: 1}, + }, + LatinOffset: 0, }, - R32: []unicode.Range32{ - {Lo: 66178, Hi: 66182, Stride: 4}, - {Lo: 66183, Hi: 66186, Stride: 3}, - {Lo: 66192, Hi: 66194, Stride: 2}, - {Lo: 66197, Hi: 66199, Stride: 1}, - {Lo: 66203, Hi: 66208, Stride: 5}, - {Lo: 66209, Hi: 66210, Stride: 1}, - {Lo: 66213, Hi: 66219, Stride: 6}, - {Lo: 66224, Hi: 66226, Stride: 1}, - {Lo: 66228, Hi: 66255, Stride: 27}, - {Lo: 66293, Hi: 66305, Stride: 12}, - {Lo: 66306, Hi: 66313, Stride: 7}, - {Lo: 66321, Hi: 66325, Stride: 4}, - {Lo: 66327, Hi: 66330, Stride: 3}, - {Lo: 66335, Hi: 66336, Stride: 1}, - {Lo: 66338, Hi: 66564, Stride: 226}, - {Lo: 66581, Hi: 66587, Stride: 6}, - {Lo: 66592, Hi: 66604, Stride: 12}, - {Lo: 66621, Hi: 66632, Stride: 11}, - {Lo: 66740, Hi: 66754, Stride: 14}, - {Lo: 66766, Hi: 66770, Stride: 4}, - {Lo: 66794, Hi: 66806, Stride: 12}, - {Lo: 66835, Hi: 66838, Stride: 3}, - {Lo: 66840, Hi: 66844, Stride: 4}, - {Lo: 66845, Hi: 66853, Stride: 8}, - {Lo: 66854, Hi: 66855, Stride: 1}, - {Lo: 68176, Hi: 70864, Stride: 2688}, - {Lo: 71430, Hi: 71438, Stride: 4}, - {Lo: 71439, Hi: 71840, Stride: 401}, - {Lo: 71842, Hi: 71844, Stride: 1}, - {Lo: 71846, Hi: 71852, Stride: 3}, - {Lo: 71854, Hi: 71855, Stride: 1}, - {Lo: 71858, Hi: 71867, Stride: 3}, - {Lo: 71868, Hi: 71872, Stride: 4}, - {Lo: 71873, Hi: 71876, Stride: 1}, - {Lo: 71878, Hi: 71884, Stride: 2}, - {Lo: 71893, Hi: 71896, Stride: 1}, - {Lo: 71900, Hi: 71904, Stride: 4}, - {Lo: 71909, Hi: 71910, Stride: 1}, - {Lo: 71913, Hi: 71922, Stride: 3}, - {Lo: 93960, Hi: 93962, Stride: 2}, - {Lo: 93974, Hi: 93992, Stride: 18}, - {Lo: 94005, Hi: 94010, Stride: 5}, - {Lo: 94011, Hi: 94015, Stride: 4}, - {Lo: 94016, Hi: 94018, Stride: 2}, - {Lo: 94019, Hi: 94033, Stride: 14}, - {Lo: 94034, Hi: 119060, Stride: 25026}, - {Lo: 119149, Hi: 119302, Stride: 153}, - {Lo: 119309, Hi: 119311, Stride: 2}, - {Lo: 119314, Hi: 119315, Stride: 1}, - {Lo: 119318, Hi: 119338, Stride: 20}, - {Lo: 119350, Hi: 119351, Stride: 1}, - {Lo: 119354, Hi: 119355, Stride: 1}, - {Lo: 119808, Hi: 119845, Stride: 1}, - {Lo: 119847, Hi: 119892, Stride: 1}, - {Lo: 119894, Hi: 119897, Stride: 1}, - {Lo: 119899, Hi: 119949, Stride: 1}, - {Lo: 119951, Hi: 119964, Stride: 1}, - {Lo: 119966, Hi: 119967, Stride: 1}, - {Lo: 119970, Hi: 119973, Stride: 3}, - {Lo: 119974, Hi: 119977, Stride: 3}, - {Lo: 119978, Hi: 119980, Stride: 1}, - {Lo: 119982, Hi: 119993, Stride: 1}, - {Lo: 119995, Hi: 119997, Stride: 2}, - {Lo: 119998, Hi: 120001, Stride: 1}, - {Lo: 120003, Hi: 120005, Stride: 2}, - {Lo: 120006, Hi: 120053, Stride: 1}, - {Lo: 120055, Hi: 120069, Stride: 1}, - {Lo: 120071, Hi: 120074, Stride: 1}, - {Lo: 120077, Hi: 120084, Stride: 1}, - {Lo: 120086, Hi: 120092, Stride: 1}, - {Lo: 120094, Hi: 120105, Stride: 1}, - {Lo: 120107, Hi: 120121, Stride: 1}, - {Lo: 120123, Hi: 120126, Stride: 1}, - {Lo: 120128, Hi: 120132, Stride: 1}, - {Lo: 120134, Hi: 120138, Stride: 4}, - {Lo: 120139, Hi: 120144, Stride: 1}, - {Lo: 120146, Hi: 120157, Stride: 1}, - {Lo: 120159, Hi: 120209, Stride: 1}, - {Lo: 120211, Hi: 120261, Stride: 1}, - {Lo: 120263, Hi: 120313, Stride: 1}, - {Lo: 120315, Hi: 120365, Stride: 1}, - {Lo: 120367, Hi: 120417, Stride: 1}, - {Lo: 120419, Hi: 120469, Stride: 1}, - {Lo: 120471, Hi: 120484, Stride: 1}, - {Lo: 120488, Hi: 120489, Stride: 1}, - {Lo: 120492, Hi: 120494, Stride: 1}, - {Lo: 120496, Hi: 120497, Stride: 1}, - {Lo: 120499, Hi: 120500, Stride: 1}, - {Lo: 120502, Hi: 120504, Stride: 2}, - {Lo: 120507, Hi: 120508, Stride: 1}, - {Lo: 120510, Hi: 120514, Stride: 4}, - {Lo: 120516, Hi: 120522, Stride: 6}, - {Lo: 120526, Hi: 120534, Stride: 2}, - {Lo: 120544, Hi: 120546, Stride: 2}, - {Lo: 120547, Hi: 120550, Stride: 3}, - {Lo: 120551, Hi: 120552, Stride: 1}, - {Lo: 120554, Hi: 120555, Stride: 1}, - {Lo: 120557, Hi: 120558, Stride: 1}, - {Lo: 120560, Hi: 120562, Stride: 2}, - {Lo: 120565, Hi: 120566, Stride: 1}, - {Lo: 120568, Hi: 120572, Stride: 4}, - {Lo: 120574, Hi: 120580, Stride: 6}, - {Lo: 120584, Hi: 120592, Stride: 2}, - {Lo: 120602, Hi: 120604, Stride: 2}, - {Lo: 120605, Hi: 120608, Stride: 3}, - {Lo: 120609, Hi: 120610, Stride: 1}, - {Lo: 120612, Hi: 120613, Stride: 1}, - {Lo: 120615, Hi: 120616, Stride: 1}, - {Lo: 120618, Hi: 120620, Stride: 2}, - {Lo: 120623, Hi: 120624, Stride: 1}, - {Lo: 120626, Hi: 120630, Stride: 4}, - {Lo: 120632, Hi: 120638, Stride: 6}, - {Lo: 120642, Hi: 120650, Stride: 2}, - {Lo: 120660, Hi: 120662, Stride: 2}, - {Lo: 120663, Hi: 120666, Stride: 3}, - {Lo: 120667, Hi: 120668, Stride: 1}, - {Lo: 120670, Hi: 120671, Stride: 1}, - {Lo: 120673, Hi: 120674, Stride: 1}, - {Lo: 120676, Hi: 120678, Stride: 2}, - {Lo: 120681, Hi: 120682, Stride: 1}, - {Lo: 120684, Hi: 120688, Stride: 4}, - {Lo: 120690, Hi: 120696, Stride: 6}, - {Lo: 120700, Hi: 120708, Stride: 2}, - {Lo: 120718, Hi: 120720, Stride: 2}, - {Lo: 120721, Hi: 120724, Stride: 3}, - {Lo: 120725, Hi: 120726, Stride: 1}, - {Lo: 120728, Hi: 120729, Stride: 1}, - {Lo: 120731, Hi: 120732, Stride: 1}, - {Lo: 120734, Hi: 120736, Stride: 2}, - {Lo: 120739, Hi: 120740, Stride: 1}, - {Lo: 120742, Hi: 120746, Stride: 4}, - {Lo: 120748, Hi: 120754, Stride: 6}, - {Lo: 120758, Hi: 120766, Stride: 2}, - {Lo: 120776, Hi: 120778, Stride: 2}, - {Lo: 120782, Hi: 120831, Stride: 1}, - {Lo: 125127, Hi: 125131, Stride: 4}, - {Lo: 126464, Hi: 126500, Stride: 36}, - {Lo: 126564, Hi: 126592, Stride: 28}, - {Lo: 126596, Hi: 128844, Stride: 2248}, - {Lo: 128872, Hi: 130032, Stride: 1160}, - {Lo: 130033, Hi: 130041, Stride: 1}, - }, - LatinOffset: 0, }, - }, - "_default": { - Confusable: []rune{160, 180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{32, 96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "_default", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 160, Hi: 180, Stride: 20}, - {Lo: 215, Hi: 305, Stride: 90}, - {Lo: 921, Hi: 1009, Stride: 88}, - {Lo: 1040, Hi: 1042, Stride: 2}, - {Lo: 1045, Hi: 1047, Stride: 2}, - {Lo: 1050, Hi: 1052, Stride: 2}, - {Lo: 1053, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 8216, Stride: 5}, - {Lo: 8217, Hi: 8245, Stride: 28}, - {Lo: 12494, Hi: 65281, Stride: 52787}, - {Lo: 65283, Hi: 65288, Stride: 5}, - {Lo: 65289, Hi: 65292, Stride: 3}, - {Lo: 65306, Hi: 65307, Stride: 1}, - {Lo: 65311, Hi: 65374, Stride: 63}, + + "_default": { + Confusable: []rune{160, 180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{32, 96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "_default", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 160, Hi: 180, Stride: 20}, + {Lo: 215, Hi: 305, Stride: 90}, + {Lo: 921, Hi: 1009, Stride: 88}, + {Lo: 1040, Hi: 1042, Stride: 2}, + {Lo: 1045, Hi: 1047, Stride: 2}, + {Lo: 1050, Hi: 1052, Stride: 2}, + {Lo: 1053, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 8216, Stride: 5}, + {Lo: 8217, Hi: 8245, Stride: 28}, + {Lo: 12494, Hi: 65281, Stride: 52787}, + {Lo: 65283, Hi: 65288, Stride: 5}, + {Lo: 65289, Hi: 65292, Stride: 3}, + {Lo: 65306, Hi: 65307, Stride: 1}, + {Lo: 65311, Hi: 65374, Stride: 63}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "cs": { - Confusable: []rune{180, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{96, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "cs", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 305, Stride: 125}, - {Lo: 921, Hi: 1009, Stride: 88}, - {Lo: 1040, Hi: 1042, Stride: 2}, - {Lo: 1045, Hi: 1047, Stride: 2}, - {Lo: 1050, Hi: 1052, Stride: 2}, - {Lo: 1053, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8216, Hi: 8217, Stride: 1}, - {Lo: 8245, Hi: 12494, Stride: 4249}, - {Lo: 65281, Hi: 65283, Stride: 2}, - {Lo: 65288, Hi: 65289, Stride: 1}, - {Lo: 65292, Hi: 65306, Stride: 14}, - {Lo: 65307, Hi: 65311, Stride: 4}, - {Lo: 65374, Hi: 65374, Stride: 1}, + + "cs": { + Confusable: []rune{180, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{96, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "cs", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 305, Stride: 125}, + {Lo: 921, Hi: 1009, Stride: 88}, + {Lo: 1040, Hi: 1042, Stride: 2}, + {Lo: 1045, Hi: 1047, Stride: 2}, + {Lo: 1050, Hi: 1052, Stride: 2}, + {Lo: 1053, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8216, Hi: 8217, Stride: 1}, + {Lo: 8245, Hi: 12494, Stride: 4249}, + {Lo: 65281, Hi: 65283, Stride: 2}, + {Lo: 65288, Hi: 65289, Stride: 1}, + {Lo: 65292, Hi: 65306, Stride: 14}, + {Lo: 65307, Hi: 65311, Stride: 4}, + {Lo: 65374, Hi: 65374, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 0, }, - R32: []unicode.Range32{}, - LatinOffset: 0, }, - }, - "de": { - Confusable: []rune{180, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{96, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "de", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 305, Stride: 125}, - {Lo: 921, Hi: 1009, Stride: 88}, - {Lo: 1040, Hi: 1042, Stride: 2}, - {Lo: 1045, Hi: 1047, Stride: 2}, - {Lo: 1050, Hi: 1052, Stride: 2}, - {Lo: 1053, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8216, Hi: 8217, Stride: 1}, - {Lo: 8245, Hi: 12494, Stride: 4249}, - {Lo: 65281, Hi: 65283, Stride: 2}, - {Lo: 65288, Hi: 65289, Stride: 1}, - {Lo: 65292, Hi: 65306, Stride: 14}, - {Lo: 65307, Hi: 65311, Stride: 4}, - {Lo: 65374, Hi: 65374, Stride: 1}, + + "de": { + Confusable: []rune{180, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{96, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "de", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 305, Stride: 125}, + {Lo: 921, Hi: 1009, Stride: 88}, + {Lo: 1040, Hi: 1042, Stride: 2}, + {Lo: 1045, Hi: 1047, Stride: 2}, + {Lo: 1050, Hi: 1052, Stride: 2}, + {Lo: 1053, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8216, Hi: 8217, Stride: 1}, + {Lo: 8245, Hi: 12494, Stride: 4249}, + {Lo: 65281, Hi: 65283, Stride: 2}, + {Lo: 65288, Hi: 65289, Stride: 1}, + {Lo: 65292, Hi: 65306, Stride: 14}, + {Lo: 65307, Hi: 65311, Stride: 4}, + {Lo: 65374, Hi: 65374, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 0, }, - R32: []unicode.Range32{}, - LatinOffset: 0, }, - }, - "es": { - Confusable: []rune{180, 215, 305, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{96, 120, 105, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "es", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 1009, Stride: 704}, - {Lo: 1040, Hi: 1042, Stride: 2}, - {Lo: 1045, Hi: 1047, Stride: 2}, - {Lo: 1050, Hi: 1052, Stride: 2}, - {Lo: 1053, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 8245, Stride: 34}, - {Lo: 12494, Hi: 65281, Stride: 52787}, - {Lo: 65283, Hi: 65288, Stride: 5}, - {Lo: 65289, Hi: 65292, Stride: 3}, - {Lo: 65306, Hi: 65307, Stride: 1}, - {Lo: 65311, Hi: 65374, Stride: 63}, + + "es": { + Confusable: []rune{180, 215, 305, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{96, 120, 105, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "es", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 1009, Stride: 704}, + {Lo: 1040, Hi: 1042, Stride: 2}, + {Lo: 1045, Hi: 1047, Stride: 2}, + {Lo: 1050, Hi: 1052, Stride: 2}, + {Lo: 1053, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 8245, Stride: 34}, + {Lo: 12494, Hi: 65281, Stride: 52787}, + {Lo: 65283, Hi: 65288, Stride: 5}, + {Lo: 65289, Hi: 65292, Stride: 3}, + {Lo: 65306, Hi: 65307, Stride: 1}, + {Lo: 65311, Hi: 65374, Stride: 63}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "fr": { - Confusable: []rune{215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "fr", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 215, Hi: 305, Stride: 90}, - {Lo: 921, Hi: 1009, Stride: 88}, - {Lo: 1040, Hi: 1042, Stride: 2}, - {Lo: 1045, Hi: 1047, Stride: 2}, - {Lo: 1050, Hi: 1052, Stride: 2}, - {Lo: 1053, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8216, Hi: 8245, Stride: 29}, - {Lo: 12494, Hi: 65281, Stride: 52787}, - {Lo: 65283, Hi: 65288, Stride: 5}, - {Lo: 65289, Hi: 65292, Stride: 3}, - {Lo: 65306, Hi: 65307, Stride: 1}, - {Lo: 65311, Hi: 65374, Stride: 63}, + + "fr": { + Confusable: []rune{215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "fr", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 215, Hi: 305, Stride: 90}, + {Lo: 921, Hi: 1009, Stride: 88}, + {Lo: 1040, Hi: 1042, Stride: 2}, + {Lo: 1045, Hi: 1047, Stride: 2}, + {Lo: 1050, Hi: 1052, Stride: 2}, + {Lo: 1053, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8216, Hi: 8245, Stride: 29}, + {Lo: 12494, Hi: 65281, Stride: 52787}, + {Lo: 65283, Hi: 65288, Stride: 5}, + {Lo: 65289, Hi: 65292, Stride: 3}, + {Lo: 65306, Hi: 65307, Stride: 1}, + {Lo: 65311, Hi: 65374, Stride: 63}, + }, + R32: []unicode.Range32{}, + LatinOffset: 0, }, - R32: []unicode.Range32{}, - LatinOffset: 0, }, - }, - "it": { - Confusable: []rune{160, 180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{32, 96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "it", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 160, Hi: 180, Stride: 20}, - {Lo: 215, Hi: 305, Stride: 90}, - {Lo: 921, Hi: 1009, Stride: 88}, - {Lo: 1040, Hi: 1042, Stride: 2}, - {Lo: 1045, Hi: 1047, Stride: 2}, - {Lo: 1050, Hi: 1052, Stride: 2}, - {Lo: 1053, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 8216, Stride: 5}, - {Lo: 8245, Hi: 12494, Stride: 4249}, - {Lo: 65281, Hi: 65283, Stride: 2}, - {Lo: 65288, Hi: 65289, Stride: 1}, - {Lo: 65292, Hi: 65306, Stride: 14}, - {Lo: 65307, Hi: 65311, Stride: 4}, - {Lo: 65374, Hi: 65374, Stride: 1}, + + "it": { + Confusable: []rune{160, 180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{32, 96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "it", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 160, Hi: 180, Stride: 20}, + {Lo: 215, Hi: 305, Stride: 90}, + {Lo: 921, Hi: 1009, Stride: 88}, + {Lo: 1040, Hi: 1042, Stride: 2}, + {Lo: 1045, Hi: 1047, Stride: 2}, + {Lo: 1050, Hi: 1052, Stride: 2}, + {Lo: 1053, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 8216, Stride: 5}, + {Lo: 8245, Hi: 12494, Stride: 4249}, + {Lo: 65281, Hi: 65283, Stride: 2}, + {Lo: 65288, Hi: 65289, Stride: 1}, + {Lo: 65292, Hi: 65306, Stride: 14}, + {Lo: 65307, Hi: 65311, Stride: 4}, + {Lo: 65374, Hi: 65374, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "ja": { - Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8217, 8245, 65281, 65283, 65292, 65306, 65307}, - With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 96, 33, 35, 44, 58, 59}, - Locale: "ja", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 921, Stride: 616}, - {Lo: 1009, Hi: 1040, Stride: 31}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 8216, Stride: 5}, - {Lo: 8217, Hi: 8245, Stride: 28}, - {Lo: 65281, Hi: 65283, Stride: 2}, - {Lo: 65292, Hi: 65306, Stride: 14}, - {Lo: 65307, Hi: 65307, Stride: 1}, + + "ja": { + Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8217, 8245, 65281, 65283, 65292, 65306, 65307}, + With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 96, 33, 35, 44, 58, 59}, + Locale: "ja", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 921, Stride: 616}, + {Lo: 1009, Hi: 1040, Stride: 31}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 8216, Stride: 5}, + {Lo: 8217, Hi: 8245, Stride: 28}, + {Lo: 65281, Hi: 65283, Stride: 2}, + {Lo: 65292, Hi: 65306, Stride: 14}, + {Lo: 65307, Hi: 65307, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "ko": { - Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "ko", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 921, Stride: 616}, - {Lo: 1009, Hi: 1040, Stride: 31}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 8245, Stride: 34}, - {Lo: 12494, Hi: 65281, Stride: 52787}, - {Lo: 65283, Hi: 65288, Stride: 5}, - {Lo: 65289, Hi: 65292, Stride: 3}, - {Lo: 65306, Hi: 65307, Stride: 1}, - {Lo: 65311, Hi: 65374, Stride: 63}, + + "ko": { + Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "ko", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 921, Stride: 616}, + {Lo: 1009, Hi: 1040, Stride: 31}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 8245, Stride: 34}, + {Lo: 12494, Hi: 65281, Stride: 52787}, + {Lo: 65283, Hi: 65288, Stride: 5}, + {Lo: 65289, Hi: 65292, Stride: 3}, + {Lo: 65306, Hi: 65307, Stride: 1}, + {Lo: 65311, Hi: 65374, Stride: 63}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "pl": { - Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "pl", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 921, Stride: 616}, - {Lo: 1009, Hi: 1040, Stride: 31}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8216, Hi: 8217, Stride: 1}, - {Lo: 8245, Hi: 12494, Stride: 4249}, - {Lo: 65281, Hi: 65283, Stride: 2}, - {Lo: 65288, Hi: 65289, Stride: 1}, - {Lo: 65292, Hi: 65306, Stride: 14}, - {Lo: 65307, Hi: 65311, Stride: 4}, - {Lo: 65374, Hi: 65374, Stride: 1}, + + "pl": { + Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "pl", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 921, Stride: 616}, + {Lo: 1009, Hi: 1040, Stride: 31}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8216, Hi: 8217, Stride: 1}, + {Lo: 8245, Hi: 12494, Stride: 4249}, + {Lo: 65281, Hi: 65283, Stride: 2}, + {Lo: 65288, Hi: 65289, Stride: 1}, + {Lo: 65292, Hi: 65306, Stride: 14}, + {Lo: 65307, Hi: 65311, Stride: 4}, + {Lo: 65374, Hi: 65374, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "pt-BR": { - Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "pt-BR", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 921, Stride: 616}, - {Lo: 1009, Hi: 1040, Stride: 31}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8216, Hi: 8217, Stride: 1}, - {Lo: 8245, Hi: 12494, Stride: 4249}, - {Lo: 65281, Hi: 65283, Stride: 2}, - {Lo: 65288, Hi: 65289, Stride: 1}, - {Lo: 65292, Hi: 65306, Stride: 14}, - {Lo: 65307, Hi: 65311, Stride: 4}, - {Lo: 65374, Hi: 65374, Stride: 1}, + + "pt-BR": { + Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "pt-BR", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 921, Stride: 616}, + {Lo: 1009, Hi: 1040, Stride: 31}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8216, Hi: 8217, Stride: 1}, + {Lo: 8245, Hi: 12494, Stride: 4249}, + {Lo: 65281, Hi: 65283, Stride: 2}, + {Lo: 65288, Hi: 65289, Stride: 1}, + {Lo: 65292, Hi: 65306, Stride: 14}, + {Lo: 65307, Hi: 65311, Stride: 4}, + {Lo: 65374, Hi: 65374, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "qps-ploc": { - Confusable: []rune{160, 180, 215, 305, 921, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{32, 96, 120, 105, 73, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "qps-ploc", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 160, Hi: 180, Stride: 20}, - {Lo: 215, Hi: 305, Stride: 90}, - {Lo: 921, Hi: 1040, Stride: 119}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 8216, Stride: 5}, - {Lo: 8217, Hi: 8245, Stride: 28}, - {Lo: 12494, Hi: 65281, Stride: 52787}, - {Lo: 65283, Hi: 65288, Stride: 5}, - {Lo: 65289, Hi: 65292, Stride: 3}, - {Lo: 65306, Hi: 65307, Stride: 1}, - {Lo: 65311, Hi: 65374, Stride: 63}, + + "qps-ploc": { + Confusable: []rune{160, 180, 215, 305, 921, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{32, 96, 120, 105, 73, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "qps-ploc", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 160, Hi: 180, Stride: 20}, + {Lo: 215, Hi: 305, Stride: 90}, + {Lo: 921, Hi: 1040, Stride: 119}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 8216, Stride: 5}, + {Lo: 8217, Hi: 8245, Stride: 28}, + {Lo: 12494, Hi: 65281, Stride: 52787}, + {Lo: 65283, Hi: 65288, Stride: 5}, + {Lo: 65289, Hi: 65292, Stride: 3}, + {Lo: 65306, Hi: 65307, Stride: 1}, + {Lo: 65311, Hi: 65374, Stride: 63}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "ru": { - Confusable: []rune{180, 215, 305, 921, 1009, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{96, 120, 105, 73, 112, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "ru", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 921, Stride: 616}, - {Lo: 1009, Hi: 8216, Stride: 7207}, - {Lo: 8217, Hi: 8245, Stride: 28}, - {Lo: 12494, Hi: 65281, Stride: 52787}, - {Lo: 65283, Hi: 65288, Stride: 5}, - {Lo: 65289, Hi: 65292, Stride: 3}, - {Lo: 65306, Hi: 65307, Stride: 1}, - {Lo: 65311, Hi: 65374, Stride: 63}, + + "ru": { + Confusable: []rune{180, 215, 305, 921, 1009, 8216, 8217, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{96, 120, 105, 73, 112, 96, 96, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "ru", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 921, Stride: 616}, + {Lo: 1009, Hi: 8216, Stride: 7207}, + {Lo: 8217, Hi: 8245, Stride: 28}, + {Lo: 12494, Hi: 65281, Stride: 52787}, + {Lo: 65283, Hi: 65288, Stride: 5}, + {Lo: 65289, Hi: 65292, Stride: 3}, + {Lo: 65306, Hi: 65307, Stride: 1}, + {Lo: 65311, Hi: 65374, Stride: 63}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "tr": { - Confusable: []rune{160, 180, 215, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, - With: []rune{32, 96, 120, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, - Locale: "tr", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 160, Hi: 180, Stride: 20}, - {Lo: 215, Hi: 921, Stride: 706}, - {Lo: 1009, Hi: 1040, Stride: 31}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 8245, Stride: 34}, - {Lo: 12494, Hi: 65281, Stride: 52787}, - {Lo: 65283, Hi: 65288, Stride: 5}, - {Lo: 65289, Hi: 65292, Stride: 3}, - {Lo: 65306, Hi: 65307, Stride: 1}, - {Lo: 65311, Hi: 65374, Stride: 63}, + + "tr": { + Confusable: []rune{160, 180, 215, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 8245, 12494, 65281, 65283, 65288, 65289, 65292, 65306, 65307, 65311, 65374}, + With: []rune{32, 96, 120, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 96, 47, 33, 35, 40, 41, 44, 58, 59, 63, 126}, + Locale: "tr", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 160, Hi: 180, Stride: 20}, + {Lo: 215, Hi: 921, Stride: 706}, + {Lo: 1009, Hi: 1040, Stride: 31}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 8245, Stride: 34}, + {Lo: 12494, Hi: 65281, Stride: 52787}, + {Lo: 65283, Hi: 65288, Stride: 5}, + {Lo: 65289, Hi: 65292, Stride: 3}, + {Lo: 65306, Hi: 65307, Stride: 1}, + {Lo: 65311, Hi: 65374, Stride: 63}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "zh-hans": { - Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8245, 12494, 65281, 65288, 65289, 65306, 65374}, - With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 47, 33, 40, 41, 58, 126}, - Locale: "zh-hans", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 921, Stride: 616}, - {Lo: 1009, Hi: 1040, Stride: 31}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8245, Hi: 12494, Stride: 4249}, - {Lo: 65281, Hi: 65288, Stride: 7}, - {Lo: 65289, Hi: 65306, Stride: 17}, - {Lo: 65374, Hi: 65374, Stride: 1}, + + "zh-hans": { + Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8245, 12494, 65281, 65288, 65289, 65306, 65374}, + With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 96, 47, 33, 40, 41, 58, 126}, + Locale: "zh-hans", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 921, Stride: 616}, + {Lo: 1009, Hi: 1040, Stride: 31}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8245, Hi: 12494, Stride: 4249}, + {Lo: 65281, Hi: 65288, Stride: 7}, + {Lo: 65289, Hi: 65306, Stride: 17}, + {Lo: 65374, Hi: 65374, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, - "zh-hant": { - Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 12494, 65283, 65307, 65374}, - With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 47, 35, 59, 126}, - Locale: "zh-hant", - RangeTable: &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 180, Hi: 215, Stride: 35}, - {Lo: 305, Hi: 921, Stride: 616}, - {Lo: 1009, Hi: 1040, Stride: 31}, - {Lo: 1042, Hi: 1045, Stride: 3}, - {Lo: 1047, Hi: 1050, Stride: 3}, - {Lo: 1052, Hi: 1054, Stride: 1}, - {Lo: 1056, Hi: 1059, Stride: 1}, - {Lo: 1061, Hi: 1068, Stride: 7}, - {Lo: 1072, Hi: 1073, Stride: 1}, - {Lo: 1075, Hi: 1077, Stride: 2}, - {Lo: 1086, Hi: 1088, Stride: 2}, - {Lo: 1089, Hi: 1093, Stride: 2}, - {Lo: 8211, Hi: 12494, Stride: 4283}, - {Lo: 65283, Hi: 65307, Stride: 24}, - {Lo: 65374, Hi: 65374, Stride: 1}, + + "zh-hant": { + Confusable: []rune{180, 215, 305, 921, 1009, 1040, 1042, 1045, 1047, 1050, 1052, 1053, 1054, 1056, 1057, 1058, 1059, 1061, 1068, 1072, 1073, 1075, 1077, 1086, 1088, 1089, 1091, 1093, 8211, 12494, 65283, 65307, 65374}, + With: []rune{96, 120, 105, 73, 112, 65, 66, 69, 51, 75, 77, 72, 79, 80, 67, 84, 89, 88, 98, 97, 54, 114, 101, 111, 112, 99, 121, 120, 45, 47, 35, 59, 126}, + Locale: "zh-hant", + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 180, Hi: 215, Stride: 35}, + {Lo: 305, Hi: 921, Stride: 616}, + {Lo: 1009, Hi: 1040, Stride: 31}, + {Lo: 1042, Hi: 1045, Stride: 3}, + {Lo: 1047, Hi: 1050, Stride: 3}, + {Lo: 1052, Hi: 1054, Stride: 1}, + {Lo: 1056, Hi: 1059, Stride: 1}, + {Lo: 1061, Hi: 1068, Stride: 7}, + {Lo: 1072, Hi: 1073, Stride: 1}, + {Lo: 1075, Hi: 1077, Stride: 2}, + {Lo: 1086, Hi: 1088, Stride: 2}, + {Lo: 1089, Hi: 1093, Stride: 2}, + {Lo: 8211, Hi: 12494, Stride: 4283}, + {Lo: 65283, Hi: 65307, Stride: 24}, + {Lo: 65374, Hi: 65374, Stride: 1}, + }, + R32: []unicode.Range32{}, + LatinOffset: 1, }, - R32: []unicode.Range32{}, - LatinOffset: 1, }, - }, + } } diff --git a/modules/charset/ambiguous_gen_test.go b/modules/charset/ambiguous_gen_test.go index d3be0b1a13..81d2e8065b 100644 --- a/modules/charset/ambiguous_gen_test.go +++ b/modules/charset/ambiguous_gen_test.go @@ -8,11 +8,13 @@ import ( "testing" "unicode" + "code.gitea.io/gitea/modules/translation" + "github.com/stretchr/testify/assert" ) func TestAmbiguousCharacters(t *testing.T) { - for locale, ambiguous := range AmbiguousCharacters { + for locale, ambiguous := range globalVars().ambiguousTableMap { assert.Equal(t, locale, ambiguous.Locale) assert.Len(t, ambiguous.With, len(ambiguous.Confusable)) assert.True(t, sort.SliceIsSorted(ambiguous.Confusable, func(i, j int) bool { @@ -28,4 +30,8 @@ func TestAmbiguousCharacters(t *testing.T) { assert.True(t, found, "%c is not in %d", confusable, i) } } + + var confusableTo rune + ret := isAmbiguous('𝐾', &confusableTo, AmbiguousTablesForLocale(&translation.MockLocale{})...) + assert.True(t, ret) } diff --git a/modules/charset/breakwriter.go b/modules/charset/breakwriter.go deleted file mode 100644 index a87e846466..0000000000 --- a/modules/charset/breakwriter.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package charset - -import ( - "bytes" - "io" -) - -// BreakWriter wraps an io.Writer to always write '\n' as '
' -type BreakWriter struct { - io.Writer -} - -// Write writes the provided byte slice transparently replacing '\n' with '
' -func (b *BreakWriter) Write(bs []byte) (n int, err error) { - pos := 0 - for pos < len(bs) { - idx := bytes.IndexByte(bs[pos:], '\n') - if idx < 0 { - wn, err := b.Writer.Write(bs[pos:]) - return n + wn, err - } - - if idx > 0 { - wn, err := b.Writer.Write(bs[pos : pos+idx]) - n += wn - if err != nil { - return n, err - } - } - - if _, err = b.Writer.Write([]byte("
")); err != nil { - return n, err - } - pos += idx + 1 - - n++ - } - - return n, err -} diff --git a/modules/charset/breakwriter_test.go b/modules/charset/breakwriter_test.go deleted file mode 100644 index 5eeeedc4e2..0000000000 --- a/modules/charset/breakwriter_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package charset - -import ( - "strings" - "testing" -) - -func TestBreakWriter_Write(t *testing.T) { - tests := []struct { - name string - kase string - expect string - wantErr bool - }{ - { - name: "noline", - kase: "abcdefghijklmnopqrstuvwxyz", - expect: "abcdefghijklmnopqrstuvwxyz", - }, - { - name: "endline", - kase: "abcdefghijklmnopqrstuvwxyz\n", - expect: "abcdefghijklmnopqrstuvwxyz
", - }, - { - name: "startline", - kase: "\nabcdefghijklmnopqrstuvwxyz", - expect: "
abcdefghijklmnopqrstuvwxyz", - }, - { - name: "onlyline", - kase: "\n\n\n", - expect: "


", - }, - { - name: "empty", - kase: "", - expect: "", - }, - { - name: "midline", - kase: "\nabc\ndefghijkl\nmnopqrstuvwxy\nz", - expect: "
abc
defghijkl
mnopqrstuvwxy
z", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - buf := &strings.Builder{} - b := &BreakWriter{ - Writer: buf, - } - n, err := b.Write([]byte(tt.kase)) - if (err != nil) != tt.wantErr { - t.Errorf("BreakWriter.Write() error = %v, wantErr %v", err, tt.wantErr) - return - } - if n != len(tt.kase) { - t.Errorf("BreakWriter.Write() = %v, want %v", n, len(tt.kase)) - } - if buf.String() != tt.expect { - t.Errorf("BreakWriter.Write() wrote %q, want %v", buf.String(), tt.expect) - } - }) - } -} diff --git a/modules/charset/charset.go b/modules/charset/charset.go index b156654973..96de1c9fcc 100644 --- a/modules/charset/charset.go +++ b/modules/charset/charset.go @@ -6,7 +6,10 @@ package charset import ( "bytes" "io" + "regexp" "strings" + "sync" + "unicode" "unicode/utf8" "code.gitea.io/gitea/modules/setting" @@ -17,8 +20,19 @@ import ( "golang.org/x/text/transform" ) -// UTF8BOM is the utf-8 byte-order marker -var UTF8BOM = []byte{'\xef', '\xbb', '\xbf'} +var globalVars = sync.OnceValue(func() (ret struct { + utf8Bom []byte + + defaultWordRegexp *regexp.Regexp + ambiguousTableMap map[string]*AmbiguousTable + invisibleRangeTable *unicode.RangeTable +}, +) { + ret.utf8Bom = []byte{'\xef', '\xbb', '\xbf'} + ret.ambiguousTableMap = newAmbiguousTableMap() + ret.invisibleRangeTable = newInvisibleRangeTable() + return ret +}) type ConvertOpts struct { KeepBOM bool @@ -105,7 +119,7 @@ func maybeRemoveBOM(content []byte, opts ConvertOpts) []byte { if opts.KeepBOM { return content } - return bytes.TrimPrefix(content, UTF8BOM) + return bytes.TrimPrefix(content, globalVars().utf8Bom) } // DetectEncoding detect the encoding of content diff --git a/modules/charset/escape.go b/modules/charset/escape.go index 167683a298..8f25e7876d 100644 --- a/modules/charset/escape.go +++ b/modules/charset/escape.go @@ -1,10 +1,6 @@ // Copyright 2022 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT -//go:generate go run invisible/generate.go -v -o ./invisible_gen.go - -//go:generate go run ambiguous/generate.go -v -o ./ambiguous_gen.go ambiguous/ambiguous.json - package charset import ( @@ -12,36 +8,36 @@ import ( "io" "strings" - "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/translation" ) -// RuneNBSP is the codepoint for NBSP -const RuneNBSP = 0xa0 +type EscapeOptions struct { + Allowed map[rune]bool +} + +func AllowRuneNBSP() map[rune]bool { + return map[rune]bool{0xa0: true} +} + +func EscapeOptionsForView() EscapeOptions { + return EscapeOptions{ + // it's safe to see NBSP in the view, but maybe not in the diff + Allowed: AllowRuneNBSP(), + } +} // EscapeControlHTML escapes the Unicode control sequences in a provided html document -func EscapeControlHTML(html template.HTML, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, output template.HTML) { +func EscapeControlHTML(html template.HTML, locale translation.Locale, opts ...EscapeOptions) (escaped *EscapeStatus, output template.HTML) { if !setting.UI.AmbiguousUnicodeDetection { return &EscapeStatus{}, html } sb := &strings.Builder{} - escaped, _ = EscapeControlReader(strings.NewReader(string(html)), sb, locale, allowed...) // err has been handled in EscapeControlReader + escaped, _ = EscapeControlReader(strings.NewReader(string(html)), sb, locale, opts...) // err has been handled in EscapeControlReader return escaped, template.HTML(sb.String()) } // EscapeControlReader escapes the Unicode control sequences in a provided reader of HTML content and writer in a locale and returns the findings as an EscapeStatus -func EscapeControlReader(reader io.Reader, writer io.Writer, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, err error) { - if !setting.UI.AmbiguousUnicodeDetection { - _, err = io.Copy(writer, reader) - return &EscapeStatus{}, err - } - outputStream := &HTMLStreamerWriter{Writer: writer} - streamer := NewEscapeStreamer(locale, outputStream, allowed...).(*escapeStreamer) - - if err = StreamHTML(reader, streamer); err != nil { - streamer.escaped.HasError = true - log.Error("Error whilst escaping: %v", err) - } - return streamer.escaped, err +func EscapeControlReader(reader io.Reader, writer io.Writer, locale translation.Locale, opts ...EscapeOptions) (*EscapeStatus, error) { + return escapeStream(locale, reader, writer, opts...) } diff --git a/modules/charset/escape_status.go b/modules/charset/escape_status.go index 37b6ad86d4..fb9ebbb228 100644 --- a/modules/charset/escape_status.go +++ b/modules/charset/escape_status.go @@ -3,11 +3,9 @@ package charset -// EscapeStatus represents the findings of the unicode escaper +// EscapeStatus represents the findings of the Unicode escaper type EscapeStatus struct { - Escaped bool - HasError bool - HasBadRunes bool + Escaped bool // it means that some characters were escaped, and they can also be unescaped back HasInvisible bool HasAmbiguous bool } @@ -19,8 +17,6 @@ func (status *EscapeStatus) Or(other *EscapeStatus) *EscapeStatus { st = &EscapeStatus{} } st.Escaped = st.Escaped || other.Escaped - st.HasError = st.HasError || other.HasError - st.HasBadRunes = st.HasBadRunes || other.HasBadRunes st.HasAmbiguous = st.HasAmbiguous || other.HasAmbiguous st.HasInvisible = st.HasInvisible || other.HasInvisible return st diff --git a/modules/charset/escape_stream.go b/modules/charset/escape_stream.go index 22e7f14f39..11d09083fc 100644 --- a/modules/charset/escape_stream.go +++ b/modules/charset/escape_stream.go @@ -4,288 +4,415 @@ package charset import ( + "bytes" "fmt" - "regexp" - "strings" + "html" + "io" "unicode" "unicode/utf8" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/translation" - - "golang.org/x/net/html" ) -// VScode defaultWordRegexp -var defaultWordRegexp = regexp.MustCompile(`(-?\d*\.\d\w*)|([^\` + "`" + `\~\!\@\#\$\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s\x00-\x1f]+)`) - -func NewEscapeStreamer(locale translation.Locale, next HTMLStreamer, allowed ...rune) HTMLStreamer { - allowedM := make(map[rune]bool, len(allowed)) - for _, v := range allowed { - allowedM[v] = true - } - return &escapeStreamer{ - escaped: &EscapeStatus{}, - PassthroughHTMLStreamer: *NewPassthroughStreamer(next), - locale: locale, - ambiguousTables: AmbiguousTablesForLocale(locale), - allowed: allowedM, - } +type htmlChunkReader struct { + in io.Reader + readErr error + readBuf []byte + curInTag bool } type escapeStreamer struct { - PassthroughHTMLStreamer + htmlChunkReader + escaped *EscapeStatus locale translation.Locale ambiguousTables []*AmbiguousTable allowed map[rune]bool + + out io.Writer } -func (e *escapeStreamer) EscapeStatus() *EscapeStatus { - return e.escaped +func escapeStream(locale translation.Locale, in io.Reader, out io.Writer, opts ...EscapeOptions) (*EscapeStatus, error) { + es := &escapeStreamer{ + escaped: &EscapeStatus{}, + locale: locale, + ambiguousTables: AmbiguousTablesForLocale(locale), + htmlChunkReader: htmlChunkReader{ + in: in, + readBuf: make([]byte, 0, 32*1024), + }, + out: out, + } + + if len(opts) > 0 { + es.allowed = opts[0].Allowed + } + + readCount := 0 + lastIsTag := false + for { + parts, partInTag, err := es.readRunes() + readCount++ + if err == io.EOF { + return es.escaped, nil + } else if err != nil { + return nil, err + } + for i, part := range parts { + if partInTag[i] { + lastIsTag = true + if _, err := out.Write(part); err != nil { + return nil, err + } + } else { + // if last part is tag, then this part is content begin + // if the content is the first part of the first read, then it's also content begin + isContentBegin := lastIsTag || (readCount == 1 && i == 0) + lastIsTag = false + if isContentBegin { + if part, err = es.trimAndWriteBom(part); err != nil { + return nil, err + } + } + if err = es.detectAndWriteRunes(part); err != nil { + return nil, err + } + } + } + } } -// Text tells the next streamer there is a text -func (e *escapeStreamer) Text(data string) error { - sb := &strings.Builder{} - var until int - var next int +func (e *escapeStreamer) trimAndWriteBom(part []byte) ([]byte, error) { + remaining, ok := bytes.CutPrefix(part, globalVars().utf8Bom) + if ok { + part = remaining + if _, err := e.out.Write(globalVars().utf8Bom); err != nil { + return part, err + } + } + return part, nil +} + +const longSentenceDetectionLimit = 20 + +func (e *escapeStreamer) possibleLongSentence(results []detectResult, pos int) bool { + countBasic := 0 + countNonASCII := 0 + for i := max(pos-longSentenceDetectionLimit, 0); i < min(pos+longSentenceDetectionLimit, len(results)); i++ { + if results[i].runeType == runeTypeBasic && results[i].runeChar != ' ' { + countBasic++ + } + if results[i].runeType == runeTypeNonASCII || results[i].runeType == runeTypeAmbiguous { + countNonASCII++ + } + } + countChar := countBasic + countNonASCII + // many non-ASCII runes around, it seems to be a sentence, + // don't handle the invisible/ambiguous chars in it, otherwise it will be too noisy + return countChar != 0 && countNonASCII*100/countChar >= 50 +} + +func (e *escapeStreamer) analyzeDetectResults(results []detectResult) { + for i := range results { + res := &results[i] + if res.runeType == runeTypeInvisible || res.runeType == runeTypeAmbiguous { + leftIsNonASCII := i > 0 && (results[i-1].runeType == runeTypeNonASCII || results[i-1].runeType == runeTypeAmbiguous) + rightIsNonASCII := i < len(results)-1 && (results[i+1].runeType == runeTypeNonASCII || results[i+1].runeType == runeTypeAmbiguous) + surroundingNonASCII := leftIsNonASCII || rightIsNonASCII + if !surroundingNonASCII { + if len(results) < longSentenceDetectionLimit { + res.needEscape = setting.UI.AmbiguousUnicodeDetection + } else if !e.possibleLongSentence(results, i) { + res.needEscape = setting.UI.AmbiguousUnicodeDetection + } + } + } + } +} + +func (e *escapeStreamer) detectAndWriteRunes(part []byte) error { + results := e.detectRunes(part) + e.analyzeDetectResults(results) + return e.writeDetectResults(part, results) +} + +func (e *htmlChunkReader) readRunes() (parts [][]byte, partInTag []bool, _ error) { + // we have read everything, eof + if e.readErr != nil && len(e.readBuf) == 0 { + return nil, nil, e.readErr + } + + // not eof, and the there is space in the buffer, try to read more data + if e.readErr == nil && len(e.readBuf) <= cap(e.readBuf)*3/4 { + n, err := e.in.Read(e.readBuf[len(e.readBuf):cap(e.readBuf)]) + e.readErr = err + e.readBuf = e.readBuf[:len(e.readBuf)+n] + } + if len(e.readBuf) == 0 { + return nil, nil, e.readErr + } + + // try to exact tag parts and content parts pos := 0 - if len(data) > len(UTF8BOM) && data[:len(UTF8BOM)] == string(UTF8BOM) { - _, _ = sb.WriteString(data[:len(UTF8BOM)]) - pos = len(UTF8BOM) - } - dataBytes := []byte(data) - for pos < len(data) { - nextIdxs := defaultWordRegexp.FindStringIndex(data[pos:]) - if nextIdxs == nil { - until = len(data) - next = until - } else { - until = min(nextIdxs[0]+pos, len(data)) - next = min(nextIdxs[1]+pos, len(data)) - } - - // from pos until we know that the runes are not \r\t\n or even ' ' - n := next - until - runes := make([]rune, 0, n) - positions := make([]int, 0, n+1) - - for pos < until { - r, sz := utf8.DecodeRune(dataBytes[pos:]) - positions = positions[:0] - positions = append(positions, pos, pos+sz) - types, confusables, _ := e.runeTypes(r) - if err := e.handleRunes(dataBytes, []rune{r}, positions, types, confusables, sb); err != nil { - return err - } - pos += sz - } - - for i := pos; i < next; { - r, sz := utf8.DecodeRune(dataBytes[i:]) - runes = append(runes, r) - positions = append(positions, i) - i += sz - } - positions = append(positions, next) - types, confusables, runeCounts := e.runeTypes(runes...) - if runeCounts.needsEscape() { - if err := e.handleRunes(dataBytes, runes, positions, types, confusables, sb); err != nil { - return err + for pos < len(e.readBuf) { + var curPartEnd int + nextInTag := e.curInTag + if e.curInTag { + // if cur part is in tag, try to find the tag close char '>' + idx := bytes.IndexByte(e.readBuf[pos:], '>') + if idx == -1 { + // if no tag close char, then the whole buffer is in tag + curPartEnd = len(e.readBuf) + } else { + // tag part ends, switch to content part + curPartEnd = pos + idx + 1 + nextInTag = !nextInTag } } else { - _, _ = sb.Write(dataBytes[pos:next]) + // if cur part is in content, try to find the tag open char '<' + idx := bytes.IndexByte(e.readBuf[pos:], '<') + if idx == -1 { + // if no tag open char, then the whole buffer is in content + curPartEnd = len(e.readBuf) + } else { + // content part ends, switch to tag part + curPartEnd = pos + idx + nextInTag = !nextInTag + } + } + + curPartLen := curPartEnd - pos + if curPartLen == 0 { + // if cur part is empty, only need to switch the part type + if e.curInTag == nextInTag { + panic("impossible, curPartLen is 0 but the part in tag status is not switched") + } + e.curInTag = nextInTag + continue + } + + // now, curPartLen can't be 0 + curPart := make([]byte, curPartLen) + copy(curPart, e.readBuf[pos:curPartEnd]) + // now we get the curPart bytes, but we can't directly use it, the last rune in it might have been cut + // try to decode the last rune, if it's invalid, then we cut the last byte and try again until we get a valid rune or no byte left + for i := curPartLen - 1; i >= 0; i-- { + last, lastSize := utf8.DecodeRune(curPart[i:]) + if last == utf8.RuneError && lastSize == 1 { + curPartLen-- + } else { + curPartLen += lastSize - 1 + break + } + } + if curPartLen == 0 { + // actually it's impossible that the part doesn't contain any valid rune, + // the only case is that the cap(readBuf) is too small, or the origin contain indeed doesn't contain any valid rune + // * try to leave the last 4 bytes (possible longest utf-8 encoding) to next round + // * at least consume 1 byte to avoid infinite loop + curPartLen = max(len(curPart)-utf8.UTFMax, 1) + } + + // if curPartLen is not the same as curPart, it means we have cut some bytes, + // need to wait for more data if not eof + trailingCorrupted := curPartLen != len(curPart) + + // finally, we get the real part we need + curPart = curPart[:curPartLen] + parts = append(parts, curPart) + partInTag = append(partInTag, e.curInTag) + + pos += curPartLen + e.curInTag = nextInTag + + if trailingCorrupted && e.readErr == nil { + // if the last part is corrupted, and we haven't reach eof, then we need to wait for more data to get the complete part + break } - pos = next } - if sb.Len() > 0 { - if err := e.PassthroughHTMLStreamer.Text(sb.String()); err != nil { + + copy(e.readBuf, e.readBuf[pos:]) + e.readBuf = e.readBuf[:len(e.readBuf)-pos] + return parts, partInTag, nil +} + +func (e *escapeStreamer) writeDetectResults(data []byte, results []detectResult) error { + lastWriteRawIdx := -1 + for idx := range results { + res := &results[idx] + if !res.needEscape { + if lastWriteRawIdx == -1 { + lastWriteRawIdx = idx + } + continue + } + + if lastWriteRawIdx != -1 { + if _, err := e.out.Write(data[results[lastWriteRawIdx].position:res.position]); err != nil { + return err + } + lastWriteRawIdx = -1 + } + switch res.runeType { + case runeTypeBroken: + if err := e.writeBrokenRune(data[res.position : res.position+res.runeSize]); err != nil { + return err + } + case runeTypeAmbiguous: + if err := e.writeAmbiguousRune(res.runeChar, res.confusable); err != nil { + return err + } + case runeTypeInvisible: + if err := e.writeInvisibleRune(res.runeChar); err != nil { + return err + } + case runeTypeControlChar: + if err := e.writeControlRune(res.runeChar); err != nil { + return err + } + default: + panic("unreachable") + } + } + if lastWriteRawIdx != -1 { + lastResult := results[len(results)-1] + if _, err := e.out.Write(data[results[lastWriteRawIdx].position : lastResult.position+lastResult.runeSize]); err != nil { return err } } return nil } -func (e *escapeStreamer) handleRunes(data []byte, runes []rune, positions []int, types []runeType, confusables []rune, sb *strings.Builder) error { - for i, r := range runes { - switch types[i] { - case brokenRuneType: - if sb.Len() > 0 { - if err := e.PassthroughHTMLStreamer.Text(sb.String()); err != nil { - return err - } - sb.Reset() - } - end := positions[i+1] - start := positions[i] - if err := e.brokenRune(data[start:end]); err != nil { - return err - } - case ambiguousRuneType: - if sb.Len() > 0 { - if err := e.PassthroughHTMLStreamer.Text(sb.String()); err != nil { - return err - } - sb.Reset() - } - if err := e.ambiguousRune(r, confusables[0]); err != nil { - return err - } - confusables = confusables[1:] - case invisibleRuneType: - if sb.Len() > 0 { - if err := e.PassthroughHTMLStreamer.Text(sb.String()); err != nil { - return err - } - sb.Reset() - } - if err := e.invisibleRune(r); err != nil { - return err - } - default: - _, _ = sb.WriteRune(r) - } - } - return nil +func (e *escapeStreamer) writeBrokenRune(_ []byte) (err error) { + // Although we'd like to use the original bytes to display (show the real broken content to users), + // however, when this "escape stream" module is applied to the content, the content has already been processed by other modules. + // So the invalid bytes just can't be kept till this step, in most (all) cases, the only thing we see here is utf8.RuneError + _, err = io.WriteString(e.out, ``) + return err } -func (e *escapeStreamer) brokenRune(bs []byte) error { - e.escaped.Escaped = true - e.escaped.HasBadRunes = true - - if err := e.PassthroughHTMLStreamer.StartTag("span", html.Attribute{ - Key: "class", - Val: "broken-code-point", - }); err != nil { +func (e *escapeStreamer) writeEscapedCharHTML(tag1, attr, tag2, content, tag3 string) (err error) { + _, err = io.WriteString(e.out, tag1) + if err != nil { return err } - if err := e.PassthroughHTMLStreamer.Text(fmt.Sprintf("<%X>", bs)); err != nil { + _, err = io.WriteString(e.out, html.EscapeString(attr)) + if err != nil { return err } - - return e.PassthroughHTMLStreamer.EndTag("span") + _, err = io.WriteString(e.out, tag2) + if err != nil { + return err + } + _, err = io.WriteString(e.out, html.EscapeString(content)) + if err != nil { + return err + } + _, err = io.WriteString(e.out, tag3) + return err } -func (e *escapeStreamer) ambiguousRune(r, c rune) error { +func runeToHex(r rune) string { + return fmt.Sprintf("[U+%04X]", r) +} + +func (e *escapeStreamer) writeAmbiguousRune(r, c rune) (err error) { e.escaped.Escaped = true e.escaped.HasAmbiguous = true - - if err := e.PassthroughHTMLStreamer.StartTag("span", html.Attribute{ - Key: "class", - Val: "ambiguous-code-point", - }, html.Attribute{ - Key: "data-tooltip-content", - Val: e.locale.TrString("repo.ambiguous_character", r, c), - }); err != nil { - return err - } - if err := e.PassthroughHTMLStreamer.StartTag("span", html.Attribute{ - Key: "class", - Val: "char", - }); err != nil { - return err - } - if err := e.PassthroughHTMLStreamer.Text(string(r)); err != nil { - return err - } - if err := e.PassthroughHTMLStreamer.EndTag("span"); err != nil { - return err - } - - return e.PassthroughHTMLStreamer.EndTag("span") + return e.writeEscapedCharHTML( + ``, + string(r), + ``, + ) } -func (e *escapeStreamer) invisibleRune(r rune) error { +func (e *escapeStreamer) writeInvisibleRune(r rune) error { e.escaped.Escaped = true e.escaped.HasInvisible = true - - if err := e.PassthroughHTMLStreamer.StartTag("span", html.Attribute{ - Key: "class", - Val: "escaped-code-point", - }, html.Attribute{ - Key: "data-escaped", - Val: fmt.Sprintf("[U+%04X]", r), - }); err != nil { - return err - } - if err := e.PassthroughHTMLStreamer.StartTag("span", html.Attribute{ - Key: "class", - Val: "char", - }); err != nil { - return err - } - if err := e.PassthroughHTMLStreamer.Text(string(r)); err != nil { - return err - } - if err := e.PassthroughHTMLStreamer.EndTag("span"); err != nil { - return err - } - - return e.PassthroughHTMLStreamer.EndTag("span") + return e.writeEscapedCharHTML( + ``, + string(r), + ``, + ) } -type runeCountType struct { - numBasicRunes int - numNonConfusingNonBasicRunes int - numAmbiguousRunes int - numInvisibleRunes int - numBrokenRunes int +func (e *escapeStreamer) writeControlRune(r rune) error { + var display string + if r >= 0 && r <= 0x1f { + display = string(0x2400 + r) + } else if r == 0x7f { + display = string(rune(0x2421)) + } else { + display = runeToHex(r) + } + return e.writeEscapedCharHTML( + ``, + string(r), + ``, + ) } -func (counts runeCountType) needsEscape() bool { - if counts.numBrokenRunes > 0 { - return true - } - if counts.numBasicRunes == 0 && - counts.numNonConfusingNonBasicRunes > 0 { - return false - } - return counts.numAmbiguousRunes > 0 || counts.numInvisibleRunes > 0 +type detectResult struct { + runeChar rune + runeType int + runeSize int + position int + confusable rune + needEscape bool } -type runeType int - const ( - basicASCIIRuneType runeType = iota // <- This is technically deadcode but its self-documenting so it should stay - brokenRuneType - nonBasicASCIIRuneType - ambiguousRuneType - invisibleRuneType + runeTypeBasic int = iota + runeTypeBroken + runeTypeNonASCII + runeTypeAmbiguous + runeTypeInvisible + runeTypeControlChar ) -func (e *escapeStreamer) runeTypes(runes ...rune) (types []runeType, confusables []rune, runeCounts runeCountType) { - types = make([]runeType, len(runes)) - for i, r := range runes { - var confusable rune +func (e *escapeStreamer) detectRunes(data []byte) []detectResult { + runeCount := utf8.RuneCount(data) + results := make([]detectResult, runeCount) + invisibleRangeTable := globalVars().invisibleRangeTable + var i int + var confusable rune + for pos := 0; pos < len(data); i++ { + r, runeSize := utf8.DecodeRune(data[pos:]) + results[i].runeChar = r + results[i].runeSize = runeSize + results[i].position = pos + pos += runeSize + switch { case r == utf8.RuneError: - types[i] = brokenRuneType - runeCounts.numBrokenRunes++ - case r == ' ' || r == '\t' || r == '\n': - runeCounts.numBasicRunes++ - case e.allowed[r]: - if r > 0x7e || r < 0x20 { - types[i] = nonBasicASCIIRuneType - runeCounts.numNonConfusingNonBasicRunes++ - } else { - runeCounts.numBasicRunes++ + results[i].runeType = runeTypeBroken + results[i].needEscape = true + case r == ' ' || r == '\t' || r == '\n' || e.allowed[r]: + results[i].runeType = runeTypeBasic + if r >= 0x80 { + results[i].runeType = runeTypeNonASCII } - case unicode.Is(InvisibleRanges, r): - types[i] = invisibleRuneType - runeCounts.numInvisibleRunes++ - case unicode.IsControl(r): - types[i] = invisibleRuneType - runeCounts.numInvisibleRunes++ + case r < 0x20 || r == 0x7f: + results[i].runeType = runeTypeControlChar + results[i].needEscape = true + case unicode.Is(invisibleRangeTable, r): + results[i].runeType = runeTypeInvisible + // not sure about results[i].needEscape, will be detected separately case isAmbiguous(r, &confusable, e.ambiguousTables...): - confusables = append(confusables, confusable) - types[i] = ambiguousRuneType - runeCounts.numAmbiguousRunes++ - case r > 0x7e || r < 0x20: - types[i] = nonBasicASCIIRuneType - runeCounts.numNonConfusingNonBasicRunes++ - default: - runeCounts.numBasicRunes++ + results[i].runeType = runeTypeAmbiguous + results[i].confusable = confusable + // not sure about results[i].needEscape, will be detected separately + case r >= 0x80: + results[i].runeType = runeTypeNonASCII + default: // details to basic runes } } - return types, confusables, runeCounts + return results } diff --git a/modules/charset/escape_test.go b/modules/charset/escape_test.go index 9d796a0c18..4e1ff0fcf4 100644 --- a/modules/charset/escape_test.go +++ b/modules/charset/escape_test.go @@ -4,7 +4,6 @@ package charset import ( - "regexp" "strings" "testing" @@ -13,6 +12,7 @@ import ( "code.gitea.io/gitea/modules/translation" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) type escapeControlTest struct { @@ -57,24 +57,24 @@ var escapeControlTests = []escapeControlTest{ status: EscapeStatus{}, }, { - name: "hebrew", + name: "hebrew", // old test was wrong, such text shouldn't be escaped text: "עד תקופת יוון העתיקה היה העיסוק במתמטיקה תכליתי בלבד: היא שימשה כאוסף של נוסחאות לחישוב קרקע, אוכלוסין וכו'. פריצת הדרך של היוונים, פרט לתרומותיהם הגדולות לידע המתמטי, הייתה בלימוד המתמטיקה כשלעצמה, מתוקף ערכה הרוחני. יחסם של חלק מהיוונים הקדמונים למתמטיקה היה דתי - למשל, הכת שאסף סביבו פיתגורס האמינה כי המתמטיקה היא הבסיס לכל הדברים. היוונים נחשבים ליוצרי מושג ההוכחה המתמטית, וכן לראשונים שעסקו במתמטיקה לשם עצמה, כלומר כתחום מחקרי עיוני ומופשט ולא רק כעזר שימושי. עם זאת, לצדה", - result: `עד תקופת יוון העתיקה היה העיסוק במתמטיקה תכליתי בלבד: היא שימשה כאוסף של נוסחאות לחישוב קרקע, אוכלוסין וכו'. פריצת הדרך של היוונים, פרט לתרומותיהם הגדולות לידע המתמטי, הייתה בלימוד המתמטיקה כשלעצמה, מתוקף ערכה הרוחני. יחסם של חלק מהיוונים הקדמונים למתמטיקה היה דתי - למשל, הכת שאסף סביבו פיתגורס האמינה כי המתמטיקה היא הבסיס לכל הדברים. היוונים נחשבים ליוצרי מושג ההוכחה המתמטית, וכן לראשונים שעסקו במתמטיקה לשם עצמה, כלומר כתחום מחקרי עיוני ומופשט ולא רק כעזר שימושי. עם זאת, לצדה`, - status: EscapeStatus{Escaped: true, HasAmbiguous: true}, + result: "עד תקופת יוון העתיקה היה העיסוק במתמטיקה תכליתי בלבד: היא שימשה כאוסף של נוסחאות לחישוב קרקע, אוכלוסין וכו'. פריצת הדרך של היוונים, פרט לתרומותיהם הגדולות לידע המתמטי, הייתה בלימוד המתמטיקה כשלעצמה, מתוקף ערכה הרוחני. יחסם של חלק מהיוונים הקדמונים למתמטיקה היה דתי - למשל, הכת שאסף סביבו פיתגורס האמינה כי המתמטיקה היא הבסיס לכל הדברים. היוונים נחשבים ליוצרי מושג ההוכחה המתמטית, וכן לראשונים שעסקו במתמטיקה לשם עצמה, כלומר כתחום מחקרי עיוני ומופשט ולא רק כעזר שימושי. עם זאת, לצדה", + status: EscapeStatus{}, }, { - name: "more hebrew", + name: "more hebrew", // old test was wrong, such text shouldn't be escaped text: `בתקופה מאוחרת יותר, השתמשו היוונים בשיטת סימון מתקדמת יותר, שבה הוצגו המספרים לפי 22 אותיות האלפבית היווני. לסימון המספרים בין 1 ל-9 נקבעו תשע האותיות הראשונות, בתוספת גרש ( ' ) בצד ימין של האות, למעלה; תשע האותיות הבאות ייצגו את העשרות מ-10 עד 90, והבאות את המאות. לסימון הספרות בין 1000 ל-900,000, השתמשו היוונים באותן אותיות, אך הוסיפו לאותיות את הגרש דווקא מצד שמאל של האותיות, למטה. ממיליון ומעלה, כנראה השתמשו היוונים בשני תגים במקום אחד. המתמטיקאי הבולט הראשון ביוון העתיקה, ויש האומרים בתולדות האנושות, הוא תאלס (624 לפנה"ס - 546 לפנה"ס בקירוב).[1] לא יהיה זה משולל יסוד להניח שהוא האדם הראשון שהוכיח משפט מתמטי, ולא רק גילה אותו. תאלס הוכיח שישרים מקבילים חותכים מצד אחד של שוקי זווית קטעים בעלי יחסים שווים (משפט תאלס הראשון), שהזווית המונחת על קוטר במעגל היא זווית ישרה (משפט תאלס השני), שהקוטר מחלק את המעגל לשני חלקים שווים, ושזוויות הבסיס במשולש שווה-שוקיים שוות זו לזו. מיוחסות לו גם שיטות למדידת גובהן של הפירמידות בעזרת מדידת צילן ולקביעת מיקומה של ספינה הנראית מן החוף. בשנים 582 לפנה"ס עד 496 לפנה"ס, בקירוב, חי מתמטיקאי חשוב במיוחד - פיתגורס. המקורות הראשוניים עליו מועטים, וההיסטוריונים מתקשים להפריד את העובדות משכבת המסתורין והאגדות שנקשרו בו. ידוע שסביבו התקבצה האסכולה הפיתגוראית מעין כת פסבדו-מתמטית שהאמינה ש"הכל מספר", או ליתר דיוק הכל ניתן לכימות, וייחסה למספרים משמעויות מיסטיות. ככל הנראה הפיתגוראים ידעו לבנות את הגופים האפלטוניים, הכירו את הממוצע האריתמטי, הממוצע הגאומטרי והממוצע ההרמוני והגיעו להישגים חשובים נוספים. ניתן לומר שהפיתגוראים גילו את היותו של השורש הריבועי של 2, שהוא גם האלכסון בריבוע שאורך צלעותיו 1, אי רציונלי, אך תגליתם הייתה למעשה רק שהקטעים "חסרי מידה משותפת", ומושג המספר האי רציונלי מאוחר יותר.[2] אזכור ראשון לקיומם של קטעים חסרי מידה משותפת מופיע בדיאלוג "תאיטיטוס" של אפלטון, אך רעיון זה היה מוכר עוד קודם לכן, במאה החמישית לפנה"ס להיפאסוס, בן האסכולה הפיתגוראית, ואולי לפיתגורס עצמו.[3]`, - result: `בתקופה מאוחרת יותר, השתמשו היוונים בשיטת סימון מתקדמת יותר, שבה הוצגו המספרים לפי 22 אותיות האלפבית היווני. לסימון המספרים בין 1 ל-9 נקבעו תשע האותיות הראשונות, בתוספת גרש ( ' ) בצד ימין של האות, למעלה; תשע האותיות הבאות ייצגו את העשרות מ-10 עד 90, והבאות את המאות. לסימון הספרות בין 1000 ל-900,000, השתמשו היוונים באותן אותיות, אך הוסיפו לאותיות את הגרש דווקא מצד שמאל של האותיות, למטה. ממיליון ומעלה, כנראה השתמשו היוונים בשני תגים במקום אחד. + result: `בתקופה מאוחרת יותר, השתמשו היוונים בשיטת סימון מתקדמת יותר, שבה הוצגו המספרים לפי 22 אותיות האלפבית היווני. לסימון המספרים בין 1 ל-9 נקבעו תשע האותיות הראשונות, בתוספת גרש ( ' ) בצד ימין של האות, למעלה; תשע האותיות הבאות ייצגו את העשרות מ-10 עד 90, והבאות את המאות. לסימון הספרות בין 1000 ל-900,000, השתמשו היוונים באותן אותיות, אך הוסיפו לאותיות את הגרש דווקא מצד שמאל של האותיות, למטה. ממיליון ומעלה, כנראה השתמשו היוונים בשני תגים במקום אחד. - המתמטיקאי הבולט הראשון ביוון העתיקה, ויש האומרים בתולדות האנושות, הוא תאלס (624 לפנה"ס - 546 לפנה"ס בקירוב).[1] לא יהיה זה משולל יסוד להניח שהוא האדם הראשון שהוכיח משפט מתמטי, ולא רק גילה אותו. תאלס הוכיח שישרים מקבילים חותכים מצד אחד של שוקי זווית קטעים בעלי יחסים שווים (משפט תאלס הראשון), שהזווית המונחת על קוטר במעגל היא זווית ישרה (משפט תאלס השני), שהקוטר מחלק את המעגל לשני חלקים שווים, ושזוויות הבסיס במשולש שווה-שוקיים שוות זו לזו. מיוחסות לו גם שיטות למדידת גובהן של הפירמידות בעזרת מדידת צילן ולקביעת מיקומה של ספינה הנראית מן החוף. + המתמטיקאי הבולט הראשון ביוון העתיקה, ויש האומרים בתולדות האנושות, הוא תאלס (624 לפנה"ס - 546 לפנה"ס בקירוב).[1] לא יהיה זה משולל יסוד להניח שהוא האדם הראשון שהוכיח משפט מתמטי, ולא רק גילה אותו. תאלס הוכיח שישרים מקבילים חותכים מצד אחד של שוקי זווית קטעים בעלי יחסים שווים (משפט תאלס הראשון), שהזווית המונחת על קוטר במעגל היא זווית ישרה (משפט תאלס השני), שהקוטר מחלק את המעגל לשני חלקים שווים, ושזוויות הבסיס במשולש שווה-שוקיים שוות זו לזו. מיוחסות לו גם שיטות למדידת גובהן של הפירמידות בעזרת מדידת צילן ולקביעת מיקומה של ספינה הנראית מן החוף. - בשנים 582 לפנה"ס עד 496 לפנה"ס, בקירוב, חי מתמטיקאי חשוב במיוחד - פיתגורס. המקורות הראשוניים עליו מועטים, וההיסטוריונים מתקשים להפריד את העובדות משכבת המסתורין והאגדות שנקשרו בו. ידוע שסביבו התקבצה האסכולה הפיתגוראית מעין כת פסבדו-מתמטית שהאמינה ש"הכל מספר", או ליתר דיוק הכל ניתן לכימות, וייחסה למספרים משמעויות מיסטיות. ככל הנראה הפיתגוראים ידעו לבנות את הגופים האפלטוניים, הכירו את הממוצע האריתמטי, הממוצע הגאומטרי והממוצע ההרמוני והגיעו להישגים חשובים נוספים. ניתן לומר שהפיתגוראים גילו את היותו של השורש הריבועי של 2, שהוא גם האלכסון בריבוע שאורך צלעותיו 1, אי רציונלי, אך תגליתם הייתה למעשה רק שהקטעים "חסרי מידה משותפת", ומושג המספר האי רציונלי מאוחר יותר.[2] אזכור ראשון לקיומם של קטעים חסרי מידה משותפת מופיע בדיאלוג "תאיטיטוס" של אפלטון, אך רעיון זה היה מוכר עוד קודם לכן, במאה החמישית לפנה"ס להיפאסוס, בן האסכולה הפיתגוראית, ואולי לפיתגורס עצמו.[3]`, - status: EscapeStatus{Escaped: true, HasAmbiguous: true}, + בשנים 582 לפנה"ס עד 496 לפנה"ס, בקירוב, חי מתמטיקאי חשוב במיוחד - פיתגורס. המקורות הראשוניים עליו מועטים, וההיסטוריונים מתקשים להפריד את העובדות משכבת המסתורין והאגדות שנקשרו בו. ידוע שסביבו התקבצה האסכולה הפיתגוראית מעין כת פסבדו-מתמטית שהאמינה ש"הכל מספר", או ליתר דיוק הכל ניתן לכימות, וייחסה למספרים משמעויות מיסטיות. ככל הנראה הפיתגוראים ידעו לבנות את הגופים האפלטוניים, הכירו את הממוצע האריתמטי, הממוצע הגאומטרי והממוצע ההרמוני והגיעו להישגים חשובים נוספים. ניתן לומר שהפיתגוראים גילו את היותו של השורש הריבועי של 2, שהוא גם האלכסון בריבוע שאורך צלעותיו 1, אי רציונלי, אך תגליתם הייתה למעשה רק שהקטעים "חסרי מידה משותפת", ומושג המספר האי רציונלי מאוחר יותר.[2] אזכור ראשון לקיומם של קטעים חסרי מידה משותפת מופיע בדיאלוג "תאיטיטוס" של אפלטון, אך רעיון זה היה מוכר עוד קודם לכן, במאה החמישית לפנה"ס להיפאסוס, בן האסכולה הפיתגוראית, ואולי לפיתגורס עצמו.[3]`, + status: EscapeStatus{}, }, { name: "Mixed RTL+LTR", @@ -111,7 +111,7 @@ then resh (ר), and finally heh (ה) (which should appear leftmost).`, { name: "CVE testcase", text: "if access_level != \"user\u202E \u2066// Check if admin\u2069 \u2066\" {", - result: `if access_level != "user` + "\u202e" + ` ` + "\u2066" + `// Check if admin` + "\u2069" + ` ` + "\u2066" + `" {`, + result: `if access_level != "user` + "\u202e" + ` ` + "\u2066" + `// Check if admin` + "\u2069" + ` ` + "\u2066" + `" {`, status: EscapeStatus{Escaped: true, HasInvisible: true}, }, { @@ -123,7 +123,7 @@ then resh (ר), and finally heh (ה) (which should appear leftmost).`, result: `Many computer programs fail to display bidirectional text correctly. For example, the Hebrew name Sarah ` + "\u2067" + `שרה` + "\u2066\n" + `sin (ש) (which appears rightmost), then resh (ר), and finally heh (ה) (which should appear leftmost).` + - "\n" + `if access_level != "user` + "\u202e" + ` ` + "\u2066" + `// Check if admin` + "\u2069" + ` ` + "\u2066" + `" {` + "\n", + "\n" + `if access_level != "user` + "\u202e" + ` ` + "\u2066" + `// Check if admin` + "\u2069" + ` ` + "\u2066" + `" {` + "\n", status: EscapeStatus{Escaped: true, HasInvisible: true}, }, { @@ -134,38 +134,22 @@ then resh (ר), and finally heh (ה) (which should appear leftmost).`, result: "\xef\xbb\xbftest", status: EscapeStatus{}, }, + { + name: "ambiguous", + text: "O𝐾", + result: `O𝐾`, + status: EscapeStatus{Escaped: true, HasAmbiguous: true}, + }, } func TestEscapeControlReader(t *testing.T) { - // add some control characters to the tests - tests := make([]escapeControlTest, 0, len(escapeControlTests)*3) - copy(tests, escapeControlTests) - - // if there is a BOM, we should keep the BOM - addPrefix := func(prefix, s string) string { - if strings.HasPrefix(s, "\xef\xbb\xbf") { - return s[:3] + prefix + s[3:] - } - return prefix + s - } - for _, test := range escapeControlTests { - test.name += " (+Control)" - test.text = addPrefix("\u001E", test.text) - test.result = addPrefix(``+"\u001e"+``, test.result) - test.status.Escaped = true - test.status.HasInvisible = true - tests = append(tests, test) - } - - re := regexp.MustCompile(`repo.ambiguous_character:\d+,\d+`) // simplify the output for the tests, remove the translation variants - for _, tt := range tests { + for _, tt := range escapeControlTests { t.Run(tt.name, func(t *testing.T) { output := &strings.Builder{} status, err := EscapeControlReader(strings.NewReader(tt.text), output, &translation.MockLocale{}) assert.NoError(t, err) assert.Equal(t, tt.status, *status) outStr := output.String() - outStr = re.ReplaceAllString(outStr, "repo.ambiguous_character") assert.Equal(t, tt.result, outStr) }) } @@ -179,3 +163,50 @@ func TestSettingAmbiguousUnicodeDetection(t *testing.T) { _, out = EscapeControlHTML("a test", &translation.MockLocale{}) assert.EqualValues(t, `a test`, out) } + +func TestHTMLChunkReader(t *testing.T) { + type textPart struct { + text string + isTag bool + } + testReadChunks := func(t *testing.T, chunkSize int, input string, expected []textPart) { + r := &htmlChunkReader{in: strings.NewReader(input), readBuf: make([]byte, 0, chunkSize)} + var results []textPart + for { + parts, partIsTag, err := r.readRunes() + if err != nil { + break + } + for i, part := range parts { + results = append(results, textPart{string(part), partIsTag[i]}) + } + } + assert.Equal(t, expected, results, "chunk size: %d, input: %s", chunkSize, input) + } + + testReadChunks(t, 10, "abcghi", []textPart{ + {text: "abc", isTag: false}, + {text: "", isTag: true}, + {text: "gh", isTag: false}, + // -- chunk + {text: "i", isTag: false}, + }) + + testReadChunks(t, 10, "ghi", []textPart{ + {text: "", isTag: true}, + {text: "", isTag: true}, + // -- chunk + {text: "ghi", isTag: false}, + }) + + rune1, rune2, rune3, rune4 := "A", "é", "啊", "🌞" + require.Len(t, rune1, 1) + require.Len(t, rune2, 2) + require.Len(t, rune3, 3) + require.Len(t, rune4, 4) + input := "<" + rune1 + rune2 + rune3 + rune4 + ">" + rune1 + rune2 + rune3 + rune4 + testReadChunks(t, 4, input, []textPart{{"", true}, {"Aé", false}, {"啊", false}, {"🌞", false}}) + testReadChunks(t, 5, input, []textPart{{"", true}, {"Aé", false}, {"啊", false}, {"🌞", false}}) + testReadChunks(t, 6, input, []textPart{{"", true}, {"A", false}, {"é啊", false}, {"🌞", false}}) + testReadChunks(t, 7, input, []textPart{{"", true}, {"A", false}, {"é啊", false}, {"🌞", false}}) +} diff --git a/modules/charset/ambiguous/ambiguous.json b/modules/charset/generate/ambiguous.json similarity index 100% rename from modules/charset/ambiguous/ambiguous.json rename to modules/charset/generate/ambiguous.json diff --git a/modules/charset/invisible/generate.go b/modules/charset/generate/generate.go similarity index 57% rename from modules/charset/invisible/generate.go rename to modules/charset/generate/generate.go index bd57dd6c4d..16ea53fda1 100644 --- a/modules/charset/invisible/generate.go +++ b/modules/charset/generate/generate.go @@ -5,37 +5,86 @@ package main import ( "bytes" - "flag" "fmt" "go/format" + "log" "os" + "sort" "text/template" + "unicode" + + "code.gitea.io/gitea/modules/json" "golang.org/x/text/unicode/rangetable" ) +// ambiguous.json provides a one to one mapping of ambiguous characters to other characters +// See https://github.com/hediet/vscode-unicode-data/blob/main/out/ambiguous.json + +type AmbiguousTable struct { + Confusable []rune + With []rune + Locale string + RangeTable *unicode.RangeTable +} + +type RunePair struct { + Confusable rune + With rune +} + // InvisibleRunes these are runes that vscode has assigned to be invisible // See https://github.com/hediet/vscode-unicode-data var InvisibleRunes = []rune{ 9, 10, 11, 12, 13, 32, 127, 160, 173, 847, 1564, 4447, 4448, 6068, 6069, 6155, 6156, 6157, 6158, 7355, 7356, 8192, 8193, 8194, 8195, 8196, 8197, 8198, 8199, 8200, 8201, 8202, 8203, 8204, 8205, 8206, 8207, 8234, 8235, 8236, 8237, 8238, 8239, 8287, 8288, 8289, 8290, 8291, 8292, 8293, 8294, 8295, 8296, 8297, 8298, 8299, 8300, 8301, 8302, 8303, 10240, 12288, 12644, 65024, 65025, 65026, 65027, 65028, 65029, 65030, 65031, 65032, 65033, 65034, 65035, 65036, 65037, 65038, 65039, 65279, 65440, 65520, 65521, 65522, 65523, 65524, 65525, 65526, 65527, 65528, 65532, 78844, 119155, 119156, 119157, 119158, 119159, 119160, 119161, 119162, 917504, 917505, 917506, 917507, 917508, 917509, 917510, 917511, 917512, 917513, 917514, 917515, 917516, 917517, 917518, 917519, 917520, 917521, 917522, 917523, 917524, 917525, 917526, 917527, 917528, 917529, 917530, 917531, 917532, 917533, 917534, 917535, 917536, 917537, 917538, 917539, 917540, 917541, 917542, 917543, 917544, 917545, 917546, 917547, 917548, 917549, 917550, 917551, 917552, 917553, 917554, 917555, 917556, 917557, 917558, 917559, 917560, 917561, 917562, 917563, 917564, 917565, 917566, 917567, 917568, 917569, 917570, 917571, 917572, 917573, 917574, 917575, 917576, 917577, 917578, 917579, 917580, 917581, 917582, 917583, 917584, 917585, 917586, 917587, 917588, 917589, 917590, 917591, 917592, 917593, 917594, 917595, 917596, 917597, 917598, 917599, 917600, 917601, 917602, 917603, 917604, 917605, 917606, 917607, 917608, 917609, 917610, 917611, 917612, 917613, 917614, 917615, 917616, 917617, 917618, 917619, 917620, 917621, 917622, 917623, 917624, 917625, 917626, 917627, 917628, 917629, 917630, 917631, 917760, 917761, 917762, 917763, 917764, 917765, 917766, 917767, 917768, 917769, 917770, 917771, 917772, 917773, 917774, 917775, 917776, 917777, 917778, 917779, 917780, 917781, 917782, 917783, 917784, 917785, 917786, 917787, 917788, 917789, 917790, 917791, 917792, 917793, 917794, 917795, 917796, 917797, 917798, 917799, 917800, 917801, 917802, 917803, 917804, 917805, 917806, 917807, 917808, 917809, 917810, 917811, 917812, 917813, 917814, 917815, 917816, 917817, 917818, 917819, 917820, 917821, 917822, 917823, 917824, 917825, 917826, 917827, 917828, 917829, 917830, 917831, 917832, 917833, 917834, 917835, 917836, 917837, 917838, 917839, 917840, 917841, 917842, 917843, 917844, 917845, 917846, 917847, 917848, 917849, 917850, 917851, 917852, 917853, 917854, 917855, 917856, 917857, 917858, 917859, 917860, 917861, 917862, 917863, 917864, 917865, 917866, 917867, 917868, 917869, 917870, 917871, 917872, 917873, 917874, 917875, 917876, 917877, 917878, 917879, 917880, 917881, 917882, 917883, 917884, 917885, 917886, 917887, 917888, 917889, 917890, 917891, 917892, 917893, 917894, 917895, 917896, 917897, 917898, 917899, 917900, 917901, 917902, 917903, 917904, 917905, 917906, 917907, 917908, 917909, 917910, 917911, 917912, 917913, 917914, 917915, 917916, 917917, 917918, 917919, 917920, 917921, 917922, 917923, 917924, 917925, 917926, 917927, 917928, 917929, 917930, 917931, 917932, 917933, 917934, 917935, 917936, 917937, 917938, 917939, 917940, 917941, 917942, 917943, 917944, 917945, 917946, 917947, 917948, 917949, 917950, 917951, 917952, 917953, 917954, 917955, 917956, 917957, 917958, 917959, 917960, 917961, 917962, 917963, 917964, 917965, 917966, 917967, 917968, 917969, 917970, 917971, 917972, 917973, 917974, 917975, 917976, 917977, 917978, 917979, 917980, 917981, 917982, 917983, 917984, 917985, 917986, 917987, 917988, 917989, 917990, 917991, 917992, 917993, 917994, 917995, 917996, 917997, 917998, 917999, } -var verbose bool - -func main() { - flag.Usage = func() { - fmt.Fprintf(os.Stderr, `%s: Generate InvisibleRunesRange - -Usage: %[1]s [-v] [-o output.go] -`, os.Args[0]) - flag.PrintDefaults() +func generateAmbiguous() { + bs, err := os.ReadFile("ambiguous.json") + if err != nil { + log.Fatalf("Unable to read, err: %v", err) } - output := "" - flag.BoolVar(&verbose, "v", false, "verbose output") - flag.StringVar(&output, "o", "invisible_gen.go", "file to output to") - flag.Parse() + var unwrapped string + if err := json.Unmarshal(bs, &unwrapped); err != nil { + log.Fatalf("Unable to unwrap content in, err: %v", err) + } + fromJSON := map[string][]uint32{} + if err := json.Unmarshal([]byte(unwrapped), &fromJSON); err != nil { + log.Fatalf("Unable to unmarshal content in, err: %v", err) + } + + tables := make([]*AmbiguousTable, 0, len(fromJSON)) + for locale, chars := range fromJSON { + table := &AmbiguousTable{Locale: locale} + table.Confusable = make([]rune, 0, len(chars)/2) + table.With = make([]rune, 0, len(chars)/2) + pairs := make([]RunePair, len(chars)/2) + for i := 0; i < len(chars); i += 2 { + pairs[i/2].Confusable, pairs[i/2].With = rune(chars[i]), rune(chars[i+1]) + } + sort.Slice(pairs, func(i, j int) bool { + return pairs[i].Confusable < pairs[j].Confusable + }) + for _, pair := range pairs { + table.Confusable = append(table.Confusable, pair.Confusable) + table.With = append(table.With, pair.With) + } + table.RangeTable = rangetable.New(table.Confusable...) + tables = append(tables, table) + } + sort.Slice(tables, func(i, j int) bool { + return tables[i].Locale < tables[j].Locale + }) + data := map[string]any{"Tables": tables} + + if err := runTemplate(templateAmbiguous, "../ambiguous_gen.go", &data); err != nil { + log.Fatalf("Unable to run template: %v", err) + } +} + +func generateInvisible() { // First we filter the runes to remove // filtered := make([]rune, 0, len(InvisibleRunes)) @@ -47,8 +96,8 @@ Usage: %[1]s [-v] [-o output.go] } table := rangetable.New(filtered...) - if err := runTemplate(generatorTemplate, output, table); err != nil { - fatalf("Unable to run template: %v", err) + if err := runTemplate(generatorInvisible, "../invisible_gen.go", table); err != nil { + log.Fatalf("Unable to run template: %v", err) } } @@ -59,7 +108,7 @@ func runTemplate(t *template.Template, filename string, data any) error { } bs, err := format.Source(buf.Bytes()) if err != nil { - verbosef("Bad source:\n%s", buf.String()) + log.Printf("Bad source:\n%s", buf.String()) return fmt.Errorf("unable to format source: %w", err) } @@ -85,37 +134,68 @@ func runTemplate(t *template.Template, filename string, data any) error { return nil } -var generatorTemplate = template.Must(template.New("invisibleTemplate").Parse(`// This file is generated by modules/charset/invisible/generate.go DO NOT EDIT -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT +func main() { + generateAmbiguous() + generateInvisible() +} +var templateAmbiguous = template.Must(template.New("ambiguousTemplate").Parse(`// This file is generated by modules/charset/generate/generate.go DO NOT EDIT +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT package charset import "unicode" -var InvisibleRanges = &unicode.RangeTable{ - R16: []unicode.Range16{ -{{range .R16 }} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, -{{end}} }, - R32: []unicode.Range32{ -{{range .R32}} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, -{{end}} }, - LatinOffset: {{.LatinOffset}}, +// This file is generated from https://github.com/hediet/vscode-unicode-data/blob/main/out/ambiguous.json + +// AmbiguousTable matches a confusable rune with its partner for the Locale +type AmbiguousTable struct { + Confusable []rune + With []rune + Locale string + RangeTable *unicode.RangeTable +} + +func newAmbiguousTableMap() map[string]*AmbiguousTable { + return map[string]*AmbiguousTable { + {{- range .Tables}} + {{printf "%q" .Locale}}: { + Confusable: []rune{ {{range .Confusable}}{{.}},{{end}} }, + With: []rune{ {{range .With}}{{.}},{{end}} }, + Locale: {{printf "%q" .Locale}}, + RangeTable: &unicode.RangeTable{ + R16: []unicode.Range16{ + {{range .RangeTable.R16 }} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, + {{end}} }, + R32: []unicode.Range32{ + {{range .RangeTable.R32}} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, + {{end}} }, + LatinOffset: {{.RangeTable.LatinOffset}}, + }, + }, + {{end}} + } } `)) -func logf(format string, args ...any) { - fmt.Fprintf(os.Stderr, format+"\n", args...) -} +var generatorInvisible = template.Must(template.New("invisibleTemplate").Parse(`// This file is generated by modules/charset/generate/generate.go DO NOT EDIT +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT -func verbosef(format string, args ...any) { - if verbose { - logf(format, args...) +package charset + +import "unicode" + +func newInvisibleRangeTable() *unicode.RangeTable { + return &unicode.RangeTable{ + R16: []unicode.Range16{ +{{range .R16 }} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, + {{end}}}, + R32: []unicode.Range32{ +{{range .R32}} {Lo:{{.Lo}}, Hi:{{.Hi}}, Stride: {{.Stride}}}, + {{end}}}, + LatinOffset: {{.LatinOffset}}, } } - -func fatalf(format string, args ...any) { - logf("fatal: "+format+"\n", args...) - os.Exit(1) -} +`)) diff --git a/modules/charset/htmlstream.go b/modules/charset/htmlstream.go deleted file mode 100644 index 61f29120a6..0000000000 --- a/modules/charset/htmlstream.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright 2022 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package charset - -import ( - "fmt" - "io" - - "golang.org/x/net/html" -) - -// HTMLStreamer represents a SAX-like interface for HTML -type HTMLStreamer interface { - Error(err error) error - Doctype(data string) error - Comment(data string) error - StartTag(data string, attrs ...html.Attribute) error - SelfClosingTag(data string, attrs ...html.Attribute) error - EndTag(data string) error - Text(data string) error -} - -// PassthroughHTMLStreamer is a passthrough streamer -type PassthroughHTMLStreamer struct { - next HTMLStreamer -} - -func NewPassthroughStreamer(next HTMLStreamer) *PassthroughHTMLStreamer { - return &PassthroughHTMLStreamer{next: next} -} - -var _ (HTMLStreamer) = &PassthroughHTMLStreamer{} - -// Error tells the next streamer in line that there is an error -func (p *PassthroughHTMLStreamer) Error(err error) error { - return p.next.Error(err) -} - -// Doctype tells the next streamer what the doctype is -func (p *PassthroughHTMLStreamer) Doctype(data string) error { - return p.next.Doctype(data) -} - -// Comment tells the next streamer there is a comment -func (p *PassthroughHTMLStreamer) Comment(data string) error { - return p.next.Comment(data) -} - -// StartTag tells the next streamer there is a starting tag -func (p *PassthroughHTMLStreamer) StartTag(data string, attrs ...html.Attribute) error { - return p.next.StartTag(data, attrs...) -} - -// SelfClosingTag tells the next streamer there is a self-closing tag -func (p *PassthroughHTMLStreamer) SelfClosingTag(data string, attrs ...html.Attribute) error { - return p.next.SelfClosingTag(data, attrs...) -} - -// EndTag tells the next streamer there is a end tag -func (p *PassthroughHTMLStreamer) EndTag(data string) error { - return p.next.EndTag(data) -} - -// Text tells the next streamer there is a text -func (p *PassthroughHTMLStreamer) Text(data string) error { - return p.next.Text(data) -} - -// HTMLStreamWriter acts as a writing sink -type HTMLStreamerWriter struct { - io.Writer - err error -} - -// Write implements io.Writer -func (h *HTMLStreamerWriter) Write(data []byte) (int, error) { - if h.err != nil { - return 0, h.err - } - return h.Writer.Write(data) -} - -// Write implements io.StringWriter -func (h *HTMLStreamerWriter) WriteString(data string) (int, error) { - if h.err != nil { - return 0, h.err - } - return h.Writer.Write([]byte(data)) -} - -// Error tells the next streamer in line that there is an error -func (h *HTMLStreamerWriter) Error(err error) error { - if h.err == nil { - h.err = err - } - return h.err -} - -// Doctype tells the next streamer what the doctype is -func (h *HTMLStreamerWriter) Doctype(data string) error { - _, h.err = h.WriteString("") - return h.err -} - -// Comment tells the next streamer there is a comment -func (h *HTMLStreamerWriter) Comment(data string) error { - _, h.err = h.WriteString("") - return h.err -} - -// StartTag tells the next streamer there is a starting tag -func (h *HTMLStreamerWriter) StartTag(data string, attrs ...html.Attribute) error { - return h.startTag(data, attrs, false) -} - -// SelfClosingTag tells the next streamer there is a self-closing tag -func (h *HTMLStreamerWriter) SelfClosingTag(data string, attrs ...html.Attribute) error { - return h.startTag(data, attrs, true) -} - -func (h *HTMLStreamerWriter) startTag(data string, attrs []html.Attribute, selfclosing bool) error { - if _, h.err = h.WriteString("<" + data); h.err != nil { - return h.err - } - for _, attr := range attrs { - if _, h.err = h.WriteString(" " + attr.Key + "=\"" + html.EscapeString(attr.Val) + "\""); h.err != nil { - return h.err - } - } - if selfclosing { - if _, h.err = h.WriteString("/>"); h.err != nil { - return h.err - } - } else { - if _, h.err = h.WriteString(">"); h.err != nil { - return h.err - } - } - return h.err -} - -// EndTag tells the next streamer there is a end tag -func (h *HTMLStreamerWriter) EndTag(data string) error { - _, h.err = h.WriteString("") - return h.err -} - -// Text tells the next streamer there is a text -func (h *HTMLStreamerWriter) Text(data string) error { - _, h.err = h.WriteString(html.EscapeString(data)) - return h.err -} - -// StreamHTML streams an html to a provided streamer -func StreamHTML(source io.Reader, streamer HTMLStreamer) error { - tokenizer := html.NewTokenizer(source) - for { - tt := tokenizer.Next() - switch tt { - case html.ErrorToken: - if tokenizer.Err() != io.EOF { - return tokenizer.Err() - } - return nil - case html.DoctypeToken: - token := tokenizer.Token() - if err := streamer.Doctype(token.Data); err != nil { - return err - } - case html.CommentToken: - token := tokenizer.Token() - if err := streamer.Comment(token.Data); err != nil { - return err - } - case html.StartTagToken: - token := tokenizer.Token() - if err := streamer.StartTag(token.Data, token.Attr...); err != nil { - return err - } - case html.SelfClosingTagToken: - token := tokenizer.Token() - if err := streamer.StartTag(token.Data, token.Attr...); err != nil { - return err - } - case html.EndTagToken: - token := tokenizer.Token() - if err := streamer.EndTag(token.Data); err != nil { - return err - } - case html.TextToken: - token := tokenizer.Token() - if err := streamer.Text(token.Data); err != nil { - return err - } - default: - return fmt.Errorf("unknown type of token: %d", tt) - } - } -} diff --git a/modules/charset/invisible_gen.go b/modules/charset/invisible_gen.go index 812f0e34b3..ddda875a9f 100644 --- a/modules/charset/invisible_gen.go +++ b/modules/charset/invisible_gen.go @@ -1,36 +1,38 @@ -// This file is generated by modules/charset/invisible/generate.go DO NOT EDIT -// Copyright 2022 The Gitea Authors. All rights reserved. +// This file is generated by modules/charset/generate/generate.go DO NOT EDIT +// Copyright 2026 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT package charset import "unicode" -var InvisibleRanges = &unicode.RangeTable{ - R16: []unicode.Range16{ - {Lo: 11, Hi: 13, Stride: 1}, - {Lo: 127, Hi: 160, Stride: 33}, - {Lo: 173, Hi: 847, Stride: 674}, - {Lo: 1564, Hi: 4447, Stride: 2883}, - {Lo: 4448, Hi: 6068, Stride: 1620}, - {Lo: 6069, Hi: 6155, Stride: 86}, - {Lo: 6156, Hi: 6158, Stride: 1}, - {Lo: 7355, Hi: 7356, Stride: 1}, - {Lo: 8192, Hi: 8207, Stride: 1}, - {Lo: 8234, Hi: 8239, Stride: 1}, - {Lo: 8287, Hi: 8303, Stride: 1}, - {Lo: 10240, Hi: 12288, Stride: 2048}, - {Lo: 12644, Hi: 65024, Stride: 52380}, - {Lo: 65025, Hi: 65039, Stride: 1}, - {Lo: 65279, Hi: 65440, Stride: 161}, - {Lo: 65520, Hi: 65528, Stride: 1}, - {Lo: 65532, Hi: 65532, Stride: 1}, - }, - R32: []unicode.Range32{ - {Lo: 78844, Hi: 119155, Stride: 40311}, - {Lo: 119156, Hi: 119162, Stride: 1}, - {Lo: 917504, Hi: 917631, Stride: 1}, - {Lo: 917760, Hi: 917999, Stride: 1}, - }, - LatinOffset: 2, +func newInvisibleRangeTable() *unicode.RangeTable { + return &unicode.RangeTable{ + R16: []unicode.Range16{ + {Lo: 11, Hi: 13, Stride: 1}, + {Lo: 127, Hi: 160, Stride: 33}, + {Lo: 173, Hi: 847, Stride: 674}, + {Lo: 1564, Hi: 4447, Stride: 2883}, + {Lo: 4448, Hi: 6068, Stride: 1620}, + {Lo: 6069, Hi: 6155, Stride: 86}, + {Lo: 6156, Hi: 6158, Stride: 1}, + {Lo: 7355, Hi: 7356, Stride: 1}, + {Lo: 8192, Hi: 8207, Stride: 1}, + {Lo: 8234, Hi: 8239, Stride: 1}, + {Lo: 8287, Hi: 8303, Stride: 1}, + {Lo: 10240, Hi: 12288, Stride: 2048}, + {Lo: 12644, Hi: 65024, Stride: 52380}, + {Lo: 65025, Hi: 65039, Stride: 1}, + {Lo: 65279, Hi: 65440, Stride: 161}, + {Lo: 65520, Hi: 65528, Stride: 1}, + {Lo: 65532, Hi: 65532, Stride: 1}, + }, + R32: []unicode.Range32{ + {Lo: 78844, Hi: 119155, Stride: 40311}, + {Lo: 119156, Hi: 119162, Stride: 1}, + {Lo: 917504, Hi: 917631, Stride: 1}, + {Lo: 917760, Hi: 917999, Stride: 1}, + }, + LatinOffset: 2, + } } diff --git a/modules/generate/generate.go b/modules/generate/generate.go index 2d9a3dd902..9baa057b17 100644 --- a/modules/generate/generate.go +++ b/modules/generate/generate.go @@ -54,21 +54,16 @@ func DecodeJwtSecretBase64(src string) ([]byte, error) { } // NewJwtSecretWithBase64 generates a jwt secret with its base64 encoded value intended to be used for saving into config file -func NewJwtSecretWithBase64() ([]byte, string, error) { +func NewJwtSecretWithBase64() ([]byte, string) { bytes := make([]byte, defaultJwtSecretLen) - _, err := io.ReadFull(rand.Reader, bytes) + _, err := rand.Read(bytes) if err != nil { - return nil, "", err + panic(err) // rand.Read never fails } - return bytes, base64.RawURLEncoding.EncodeToString(bytes), nil + return bytes, base64.RawURLEncoding.EncodeToString(bytes) } // NewSecretKey generate a new value intended to be used by SECRET_KEY. func NewSecretKey() (string, error) { - secretKey, err := util.CryptoRandomString(64) - if err != nil { - return "", err - } - - return secretKey, nil + return util.CryptoRandomString(64), nil } diff --git a/modules/generate/generate_test.go b/modules/generate/generate_test.go index af640a60c1..f9dd20cc7f 100644 --- a/modules/generate/generate_test.go +++ b/modules/generate/generate_test.go @@ -25,10 +25,12 @@ func TestDecodeJwtSecretBase64(t *testing.T) { } func TestNewJwtSecretWithBase64(t *testing.T) { - secret, encoded, err := NewJwtSecretWithBase64() - assert.NoError(t, err) + secret, encoded := NewJwtSecretWithBase64() assert.Len(t, secret, 32) decoded, err := DecodeJwtSecretBase64(encoded) assert.NoError(t, err) assert.Equal(t, secret, decoded) + + secret2, _ := NewJwtSecretWithBase64() + assert.NotEqual(t, secret, secret2) } diff --git a/modules/git/catfile_batch_reader.go b/modules/git/catfile_batch_reader.go index 0c8fc740be..4d77fb03c7 100644 --- a/modules/git/catfile_batch_reader.go +++ b/modules/git/catfile_batch_reader.go @@ -10,66 +10,49 @@ import ( "errors" "io" "math" + "slices" "strconv" "strings" "sync/atomic" - "time" "code.gitea.io/gitea/modules/git/gitcmd" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" ) -var catFileBatchDebugWaitClose atomic.Int64 - type catFileBatchCommunicator struct { - closeFunc func(err error) + closeFunc atomic.Pointer[func(err error)] reqWriter io.Writer respReader *bufio.Reader debugGitCmd *gitcmd.Command } -func (b *catFileBatchCommunicator) Close() { - if b.closeFunc != nil { - b.closeFunc(nil) - b.closeFunc = nil +func (b *catFileBatchCommunicator) Close(err ...error) { + if fn := b.closeFunc.Swap(nil); fn != nil { + (*fn)(util.OptionalArg(err)) } } -// newCatFileBatch opens git cat-file --batch in the provided repo and returns a stdin pipe, a stdout reader and cancel function -func newCatFileBatch(ctx context.Context, repoPath string, cmdCatFile *gitcmd.Command) (ret *catFileBatchCommunicator) { +// newCatFileBatch opens git cat-file --batch/--batch-check/--batch-command command and prepares the stdin/stdout pipes for communication. +func newCatFileBatch(ctx context.Context, repoPath string, cmdCatFile *gitcmd.Command) *catFileBatchCommunicator { ctx, ctxCancel := context.WithCancelCause(ctx) - - // We often want to feed the commits in order into cat-file --batch, followed by their trees and subtrees as necessary. stdinWriter, stdoutReader, stdPipeClose := cmdCatFile.MakeStdinStdoutPipe() - pipeClose := func() { - if delay := catFileBatchDebugWaitClose.Load(); delay > 0 { - time.Sleep(time.Duration(delay)) // for testing purpose only - } - stdPipeClose() - } - closeFunc := func(err error) { - ctxCancel(err) - pipeClose() - } - return newCatFileBatchWithCloseFunc(ctx, repoPath, cmdCatFile, stdinWriter, stdoutReader, closeFunc) -} - -func newCatFileBatchWithCloseFunc(ctx context.Context, repoPath string, cmdCatFile *gitcmd.Command, - stdinWriter gitcmd.PipeWriter, stdoutReader gitcmd.PipeReader, closeFunc func(err error), -) *catFileBatchCommunicator { ret := &catFileBatchCommunicator{ debugGitCmd: cmdCatFile, - closeFunc: closeFunc, reqWriter: stdinWriter, respReader: bufio.NewReaderSize(stdoutReader, 32*1024), // use a buffered reader for rich operations } + ret.closeFunc.Store(new(func(err error) { + ctxCancel(err) + stdPipeClose() + })) err := cmdCatFile.WithDir(repoPath).StartWithStderr(ctx) if err != nil { log.Error("Unable to start git command %v: %v", cmdCatFile.LogString(), err) // ideally here it should return the error, but it would require refactoring all callers // so just return a dummy communicator that does nothing, almost the same behavior as before, not bad - closeFunc(err) + ret.Close(err) return ret } @@ -78,12 +61,33 @@ func newCatFileBatchWithCloseFunc(ctx context.Context, repoPath string, cmdCatFi if err != nil && !errors.Is(err, context.Canceled) { log.Error("cat-file --batch command failed in repo %s, error: %v", repoPath, err) } - closeFunc(err) + ret.Close(err) }() return ret } +func (b *catFileBatchCommunicator) debugKill() (ret struct { + beforeClose chan struct{} + blockClose chan struct{} + afterClose chan struct{} +}, +) { + ret.beforeClose = make(chan struct{}) + ret.blockClose = make(chan struct{}) + ret.afterClose = make(chan struct{}) + oldCloseFunc := b.closeFunc.Load() + b.closeFunc.Store(new(func(err error) { + b.closeFunc.Store(nil) + close(ret.beforeClose) + <-ret.blockClose + (*oldCloseFunc)(err) + close(ret.afterClose) + })) + b.debugGitCmd.DebugKill() + return ret +} + // catFileBatchParseInfoLine reads the header line from cat-file --batch // We expect: SP SP LF // then leaving the rest of the stream " LF" to be read @@ -169,77 +173,46 @@ headerLoop: return id, DiscardFull(rd, size-n+1) } -// git tree files are a list: -// SP NUL -// -// Unfortunately this 20-byte notation is somewhat in conflict to all other git tools -// Therefore we need some method to convert these binary hashes to hex hashes - // ParseCatFileTreeLine reads an entry from a tree in a cat-file --batch stream -// This carefully avoids allocations - except where fnameBuf is too small. -// It is recommended therefore to pass in an fnameBuf large enough to avoid almost all allocations -// -// Each line is composed of: -// SP NUL -// -// We don't attempt to convert the raw HASH to save a lot of time -func ParseCatFileTreeLine(objectFormat ObjectFormat, rd BufferedReader, modeBuf, fnameBuf, shaBuf []byte) (mode, fname, sha []byte, n int, err error) { - var readBytes []byte - - // Read the Mode & fname - readBytes, err = rd.ReadSlice('\x00') - if err != nil { - return mode, fname, sha, n, err - } - idx := bytes.IndexByte(readBytes, ' ') - if idx < 0 { - log.Debug("missing space in readBytes ParseCatFileTreeLine: %s", readBytes) - return mode, fname, sha, n, &ErrNotExist{} - } - - n += idx + 1 - copy(modeBuf, readBytes[:idx]) - if len(modeBuf) >= idx { - modeBuf = modeBuf[:idx] - } else { - modeBuf = append(modeBuf, readBytes[len(modeBuf):idx]...) - } - mode = modeBuf - - readBytes = readBytes[idx+1:] - - // Deal with the fname - copy(fnameBuf, readBytes) - if len(fnameBuf) > len(readBytes) { - fnameBuf = fnameBuf[:len(readBytes)] - } else { - fnameBuf = append(fnameBuf, readBytes[len(fnameBuf):]...) - } - for err == bufio.ErrBufferFull { - readBytes, err = rd.ReadSlice('\x00') - fnameBuf = append(fnameBuf, readBytes...) - } - n += len(fnameBuf) - if err != nil { - return mode, fname, sha, n, err - } - fnameBuf = fnameBuf[:len(fnameBuf)-1] - fname = fnameBuf - - // Deal with the binary hash - idx = 0 - length := objectFormat.FullLength() / 2 - for idx < length { - var read int - read, err = rd.Read(shaBuf[idx:length]) - n += read - if err != nil { - return mode, fname, sha, n, err +// Each entry is composed of: +// SP NUL +func ParseCatFileTreeLine(objectFormat ObjectFormat, rd BufferedReader) (mode EntryMode, name string, objID ObjectID, n int, err error) { + // use the in-buffer memory as much as possible to avoid extra allocations + bufBytes, err := rd.ReadSlice('\x00') + const maxEntryInfoBytes = 1024 * 1024 + if errors.Is(err, bufio.ErrBufferFull) { + bufBytes = slices.Clone(bufBytes) + for len(bufBytes) < maxEntryInfoBytes && errors.Is(err, bufio.ErrBufferFull) { + var tmp []byte + tmp, err = rd.ReadSlice('\x00') + bufBytes = append(bufBytes, tmp...) } - idx += read } - sha = shaBuf - return mode, fname, sha, n, err + if err != nil { + return mode, name, objID, len(bufBytes), err + } + + idx := bytes.IndexByte(bufBytes, ' ') + if idx < 0 { + return mode, name, objID, len(bufBytes), errors.New("invalid CatFileTreeLine output") + } + + mode = ParseEntryMode(util.UnsafeBytesToString(bufBytes[:idx])) + name = string(bufBytes[idx+1 : len(bufBytes)-1]) // trim the NUL terminator, it needs a copy because the bufBytes will be reused by the reader + if mode == EntryModeNoEntry { + return mode, name, objID, len(bufBytes), errors.New("invalid entry mode: " + string(bufBytes[:idx])) + } + + switch objectFormat { + case Sha1ObjectFormat: + objID = &Sha1Hash{} + case Sha256ObjectFormat: + objID = &Sha256Hash{} + default: + panic("unsupported object format: " + objectFormat.Name()) + } + readIDLen, err := io.ReadFull(rd, objID.RawValue()) + return mode, name, objID, len(bufBytes) + readIDLen, err } func DiscardFull(rd BufferedReader, discard int64) error { diff --git a/modules/git/catfile_batch_test.go b/modules/git/catfile_batch_test.go index 69662ffc1a..782d34d249 100644 --- a/modules/git/catfile_batch_test.go +++ b/modules/git/catfile_batch_test.go @@ -7,9 +7,7 @@ import ( "io" "os" "path/filepath" - "sync" "testing" - "time" "code.gitea.io/gitea/modules/test" @@ -39,13 +37,22 @@ func testCatFileBatch(t *testing.T) { require.Error(t, err) }) - simulateQueryTerminated := func(pipeCloseDelay, pipeReadDelay time.Duration) (errRead error) { - catFileBatchDebugWaitClose.Store(int64(pipeCloseDelay)) - defer catFileBatchDebugWaitClose.Store(0) + simulateQueryTerminated := func(t *testing.T, errBeforePipeClose, errAfterPipeClose error) { + readError := func(t *testing.T, r io.Reader, expectedErr error) { + if expectedErr == nil { + return // expectedErr == nil means this read should be skipped + } + n, err := r.Read(make([]byte, 100)) + assert.Zero(t, n) + assert.ErrorIs(t, err, expectedErr) + } + batch, err := NewBatch(t.Context(), filepath.Join(testReposDir, "repo1_bare")) require.NoError(t, err) defer batch.Close() - _, _ = batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449") + _, err = batch.QueryInfo("e2129701f1a4d54dc44f03c93bca0a2aec7c5449") + require.NoError(t, err) + var c *catFileBatchCommunicator switch b := batch.(type) { case *catFileBatchLegacy: @@ -58,24 +65,18 @@ func testCatFileBatch(t *testing.T) { t.FailNow() } - wg := sync.WaitGroup{} - wg.Go(func() { - time.Sleep(pipeReadDelay) - var n int - n, errRead = c.respReader.Read(make([]byte, 100)) - assert.Zero(t, n) - }) - time.Sleep(10 * time.Millisecond) - c.debugGitCmd.DebugKill() - wg.Wait() - return errRead - } + require.NotEqual(t, errBeforePipeClose == nil, errAfterPipeClose == nil, "must set exactly one of the expected errors") + inceptor := c.debugKill() + <-inceptor.beforeClose // wait for the command's Close to be called, the pipe is not closed yet + readError(t, c.respReader, errBeforePipeClose) // then caller will read on an open pipe which will be closed soon + close(inceptor.blockClose) // continue to close the pipe + <-inceptor.afterClose // wait for the pipe to be closed + readError(t, c.respReader, errAfterPipeClose) // then caller will read on a closed pipe + } t.Run("QueryTerminated", func(t *testing.T) { - err := simulateQueryTerminated(0, 20*time.Millisecond) - assert.ErrorIs(t, err, os.ErrClosed) // pipes are closed faster - err = simulateQueryTerminated(40*time.Millisecond, 20*time.Millisecond) - assert.ErrorIs(t, err, io.EOF) // reader is faster + simulateQueryTerminated(t, io.EOF, nil) // reader is faster + simulateQueryTerminated(t, nil, os.ErrClosed) // pipes are closed faster }) batch, err := NewBatch(t.Context(), filepath.Join(testReposDir, "repo1_bare")) diff --git a/modules/git/commit.go b/modules/git/commit.go index dfecfe6057..c3d23d6878 100644 --- a/modules/git/commit.go +++ b/modules/git/commit.go @@ -247,27 +247,6 @@ func (c *Commit) GetFileContent(filename string, limit int) (string, error) { return string(bytes), nil } -// GetBranchName gets the closest branch name (as returned by 'git name-rev --name-only') -func (c *Commit) GetBranchName() (string, error) { - cmd := gitcmd.NewCommand("name-rev") - if DefaultFeatures().CheckVersionAtLeast("2.13.0") { - cmd.AddArguments("--exclude", "refs/tags/*") - } - cmd.AddArguments("--name-only", "--no-undefined").AddDynamicArguments(c.ID.String()) - data, _, err := cmd.WithDir(c.repo.Path).RunStdString(c.repo.Ctx) - if err != nil { - // handle special case where git can not describe commit - if strings.Contains(err.Error(), "cannot describe") { - return "", nil - } - - return "", err - } - - // name-rev commitID output will be "master" or "master~12" - return strings.SplitN(strings.TrimSpace(data), "~", 2)[0], nil -} - // GetFullCommitID returns full length (40) of commit ID by given short SHA in a repository. func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) { commitID, _, err := gitcmd.NewCommand("rev-parse"). diff --git a/modules/git/object_id_test.go b/modules/git/object_id_test.go index 03d0c85d87..213a0cd341 100644 --- a/modules/git/object_id_test.go +++ b/modules/git/object_id_test.go @@ -22,4 +22,6 @@ func TestIsValidSHAPattern(t *testing.T) { assert.Equal(t, "2e65efe2a145dda7ee51d1741299f848e5bf752e", ComputeBlobHash(Sha1ObjectFormat, []byte("a")).String()) assert.Equal(t, "473a0f4c3be8a93681a267e3b1e9a7dcda1185436fe141f7749120a303721813", ComputeBlobHash(Sha256ObjectFormat, nil).String()) assert.Equal(t, "eb337bcee2061c5313c9a1392116b6c76039e9e30d71467ae359b36277e17dc7", ComputeBlobHash(Sha256ObjectFormat, []byte("a")).String()) + assert.True(t, IsEmptyCommitID("")) + assert.True(t, IsEmptyCommitID("0000000000000000000000000000000000000000")) } diff --git a/modules/git/parse_treeentry.go b/modules/git/parse_treeentry.go index d46cd3344d..23d59c1923 100644 --- a/modules/git/parse_treeentry.go +++ b/modules/git/parse_treeentry.go @@ -5,10 +5,7 @@ package git import ( "bytes" - "fmt" "io" - - "code.gitea.io/gitea/modules/log" ) // ParseTreeEntries parses the output of a `git ls-tree -l` command. @@ -47,14 +44,11 @@ func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) { } func catBatchParseTreeEntries(objectFormat ObjectFormat, ptree *Tree, rd BufferedReader, sz int64) ([]*TreeEntry, error) { - fnameBuf := make([]byte, 4096) - modeBuf := make([]byte, 40) - shaBuf := make([]byte, objectFormat.FullLength()) entries := make([]*TreeEntry, 0, 10) loop: for sz > 0 { - mode, fname, sha, count, err := ParseCatFileTreeLine(objectFormat, rd, modeBuf, fnameBuf, shaBuf) + mode, fname, objID, count, err := ParseCatFileTreeLine(objectFormat, rd) if err != nil { if err == io.EOF { break loop @@ -64,25 +58,9 @@ loop: sz -= int64(count) entry := new(TreeEntry) entry.ptree = ptree - - switch string(mode) { - case "100644": - entry.entryMode = EntryModeBlob - case "100755": - entry.entryMode = EntryModeExec - case "120000": - entry.entryMode = EntryModeSymlink - case "160000": - entry.entryMode = EntryModeCommit - case "40000", "40755": // git uses 40000 for tree object, but some users may get 40755 for unknown reasons - entry.entryMode = EntryModeTree - default: - log.Debug("Unknown mode: %v", string(mode)) - return nil, fmt.Errorf("unknown mode: %v", string(mode)) - } - - entry.ID = objectFormat.MustID(sha) - entry.name = string(fname) + entry.entryMode = mode + entry.ID = objID + entry.name = fname entries = append(entries, entry) } if _, err := rd.Discard(1); err != nil { diff --git a/modules/git/parse_treeentry_test.go b/modules/git/parse_treeentry_test.go index 4223cbb3d7..5b81b49edd 100644 --- a/modules/git/parse_treeentry_test.go +++ b/modules/git/parse_treeentry_test.go @@ -4,6 +4,9 @@ package git import ( + "bufio" + "io" + "strings" "testing" "github.com/stretchr/testify/assert" @@ -100,3 +103,31 @@ func TestParseTreeEntriesInvalid(t *testing.T) { assert.Error(t, err) assert.Empty(t, entries) } + +func TestParseCatFileTreeLine(t *testing.T) { + input := "100644 looooooooooooooooooooooooong-file-name.txt\x0012345678901234567890" + input += "40755 some-directory\x00abcdefg123abcdefg123" + + var readCount int + + buf := bufio.NewReaderSize(strings.NewReader(input), 20) // NewReaderSize has a limit: min buffer size = 16 + mode, name, objID, n, err := ParseCatFileTreeLine(Sha1ObjectFormat, buf) + readCount += n + assert.NoError(t, err) + assert.Equal(t, EntryModeBlob, mode) + assert.Equal(t, "looooooooooooooooooooooooong-file-name.txt", name) + assert.Equal(t, "12345678901234567890", string(objID.RawValue())) + + mode, name, objID, n, err = ParseCatFileTreeLine(Sha1ObjectFormat, buf) + readCount += n + assert.NoError(t, err) + assert.Equal(t, EntryModeTree, mode) + assert.Equal(t, "some-directory", name) + assert.Equal(t, "abcdefg123abcdefg123", string(objID.RawValue())) + + assert.Equal(t, len(input), readCount) + + _, _, _, n, err = ParseCatFileTreeLine(Sha1ObjectFormat, buf) + assert.ErrorIs(t, err, io.EOF) + assert.Zero(t, n) +} diff --git a/modules/git/pipeline/lfs_nogogit.go b/modules/git/pipeline/lfs_nogogit.go index 91bda0d0e5..9a49dc81a2 100644 --- a/modules/git/pipeline/lfs_nogogit.go +++ b/modules/git/pipeline/lfs_nogogit.go @@ -8,7 +8,6 @@ package pipeline import ( "bufio" "bytes" - "encoding/hex" "io" "sort" "strings" @@ -46,10 +45,6 @@ func findLFSFileFunc(repo *git.Repository, objectID git.ObjectID, revListReader trees := []string{} paths := []string{} - fnameBuf := make([]byte, 4096) - modeBuf := make([]byte, 40) - workingShaBuf := make([]byte, objectID.Type().FullLength()/2) - for scan.Scan() { // Get the next commit ID commitID := scan.Text() @@ -93,23 +88,23 @@ func findLFSFileFunc(repo *git.Repository, objectID git.ObjectID, revListReader case "tree": var n int64 for n < info.Size { - mode, fname, binObjectID, count, err := git.ParseCatFileTreeLine(objectID.Type(), batchReader, modeBuf, fnameBuf, workingShaBuf) + mode, fname, shaID, count, err := git.ParseCatFileTreeLine(objectID.Type(), batchReader) if err != nil { return nil, err } n += int64(count) - if bytes.Equal(binObjectID, objectID.RawValue()) { + if bytes.Equal(shaID.RawValue(), objectID.RawValue()) { result := LFSResult{ - Name: curPath + string(fname), + Name: curPath + fname, SHA: curCommit.ID.String(), Summary: strings.Split(strings.TrimSpace(curCommit.CommitMessage), "\n")[0], When: curCommit.Author.When, ParentHashes: curCommit.Parents, } - resultsMap[curCommit.ID.String()+":"+curPath+string(fname)] = &result - } else if string(mode) == git.EntryModeTree.String() { - trees = append(trees, hex.EncodeToString(binObjectID)) - paths = append(paths, curPath+string(fname)+"/") + resultsMap[curCommit.ID.String()+":"+curPath+fname] = &result + } else if mode == git.EntryModeTree { + trees = append(trees, shaID.String()) + paths = append(paths, curPath+fname+"/") } } if _, err := batchReader.Discard(1); err != nil { diff --git a/modules/git/repo_commit.go b/modules/git/repo_commit.go index c10f73690c..8ba3f83386 100644 --- a/modules/git/repo_commit.go +++ b/modules/git/repo_commit.go @@ -362,39 +362,6 @@ func (repo *Repository) CommitsBetweenLimit(last, before *Commit, limit, skip in return repo.parsePrettyFormatLogToList(bytes.TrimSpace(stdout)) } -// CommitsBetweenNotBase returns a list that contains commits between [before, last), excluding commits in baseBranch. -// If before is detached (removed by reset + push) it is not included. -func (repo *Repository) CommitsBetweenNotBase(last, before *Commit, baseBranch string) ([]*Commit, error) { - var stdout []byte - var err error - if before == nil { - stdout, _, err = gitcmd.NewCommand("rev-list"). - AddDynamicArguments(last.ID.String()). - AddOptionValues("--not", baseBranch). - WithDir(repo.Path). - RunStdBytes(repo.Ctx) - } else { - stdout, _, err = gitcmd.NewCommand("rev-list"). - AddDynamicArguments(before.ID.String()+".."+last.ID.String()). - AddOptionValues("--not", baseBranch). - WithDir(repo.Path). - RunStdBytes(repo.Ctx) - if err != nil && strings.Contains(err.Error(), "no merge base") { - // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. - // previously it would return the results of git rev-list before last so let's try that... - stdout, _, err = gitcmd.NewCommand("rev-list"). - AddDynamicArguments(before.ID.String(), last.ID.String()). - AddOptionValues("--not", baseBranch). - WithDir(repo.Path). - RunStdBytes(repo.Ctx) - } - } - if err != nil { - return nil, err - } - return repo.parsePrettyFormatLogToList(bytes.TrimSpace(stdout)) -} - // CommitsBetweenIDs return commits between twoe commits func (repo *Repository) CommitsBetweenIDs(last, before string) ([]*Commit, error) { lastCommit, err := repo.GetCommit(last) diff --git a/modules/git/tree_entry_mode.go b/modules/git/tree_entry_mode.go index 2ceba11374..f80f6bdc75 100644 --- a/modules/git/tree_entry_mode.go +++ b/modules/git/tree_entry_mode.go @@ -66,9 +66,10 @@ func ParseEntryMode(mode string) EntryMode { return EntryModeSymlink case "160000": return EntryModeCommit - case "040000": + case "040000", "40000": // leading-zero is optional return EntryModeTree default: + // if the faster path didn't work, try parsing the mode as an integer and masking off the file type bits // git uses 040000 for tree object, but some users may get 040755 from non-standard git implementations m, _ := strconv.ParseInt(mode, 8, 32) modeInt := EntryMode(m) diff --git a/modules/git/tree_entry_test.go b/modules/git/tree_entry_test.go index 3df6eeab68..bd6a5783b2 100644 --- a/modules/git/tree_entry_test.go +++ b/modules/git/tree_entry_test.go @@ -46,7 +46,9 @@ func TestParseEntryMode(t *testing.T) { {"160755", EntryModeCommit}, {"040000", EntryModeTree}, + {"40000", EntryModeTree}, {"040755", EntryModeTree}, + {"40755", EntryModeTree}, {"777777", EntryModeNoEntry}, // invalid mode } diff --git a/modules/gitrepo/compare.go b/modules/gitrepo/compare.go index 06cf880d99..7e38d33e6f 100644 --- a/modules/gitrepo/compare.go +++ b/modules/gitrepo/compare.go @@ -42,3 +42,30 @@ func GetDivergingCommits(ctx context.Context, repo Repository, baseBranch, targe } return &DivergeObject{Ahead: ahead, Behind: behind}, nil } + +// GetCommitIDsBetweenReverse returns the last commit IDs between two commits in reverse order (from old to new) with limit. +// If the result exceeds the limit, the old commits IDs will be ignored +func GetCommitIDsBetweenReverse(ctx context.Context, repo Repository, startRef, endRef, notRef string, limit int) ([]string, error) { + genCmd := func(reversions ...string) *gitcmd.Command { + cmd := gitcmd.NewCommand("rev-list", "--reverse"). + AddArguments("-n").AddDynamicArguments(strconv.Itoa(limit)). + AddDynamicArguments(reversions...) + if notRef != "" { // --not should be kept as the last parameter of git command, otherwise the result will be wrong + cmd.AddOptionValues("--not", notRef) + } + return cmd + } + stdout, _, err := RunCmdString(ctx, repo, genCmd(startRef+".."+endRef)) + // example git error message: fatal: origin/main..HEAD: no merge base + if err != nil && strings.Contains(err.Stderr(), "no merge base") { + // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. + // previously it would return the results of git rev-list before last so let's try that... + stdout, _, err = RunCmdString(ctx, repo, genCmd(startRef, endRef)) + } + if err != nil { + return nil, err + } + + commitIDs := strings.Fields(strings.TrimSpace(stdout)) + return commitIDs, nil +} diff --git a/modules/gitrepo/compare_test.go b/modules/gitrepo/compare_test.go index f8661d9412..2d2af0934d 100644 --- a/modules/gitrepo/compare_test.go +++ b/modules/gitrepo/compare_test.go @@ -40,3 +40,75 @@ func TestRepoGetDivergingCommits(t *testing.T) { Behind: 2, }, do) } + +func TestGetCommitIDsBetweenReverse(t *testing.T) { + repo := &mockRepository{path: "repo1_bare"} + + // tests raw commit IDs + commitIDs, err := GetCommitIDsBetweenReverse(t.Context(), repo, + "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", + "ce064814f4a0d337b333e646ece456cd39fab612", + "", + 100, + ) + assert.NoError(t, err) + assert.Equal(t, []string{ + "8006ff9adbf0cb94da7dad9e537e53817f9fa5c0", + "6fbd69e9823458e6c4a2fc5c0f6bc022b2f2acd1", + "37991dec2c8e592043f47155ce4808d4580f9123", + "feaf4ba6bc635fec442f46ddd4512416ec43c2c2", + "ce064814f4a0d337b333e646ece456cd39fab612", + }, commitIDs) + + commitIDs, err = GetCommitIDsBetweenReverse(t.Context(), repo, + "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", + "ce064814f4a0d337b333e646ece456cd39fab612", + "6fbd69e9823458e6c4a2fc5c0f6bc022b2f2acd1", + 100, + ) + assert.NoError(t, err) + assert.Equal(t, []string{ + "37991dec2c8e592043f47155ce4808d4580f9123", + "feaf4ba6bc635fec442f46ddd4512416ec43c2c2", + "ce064814f4a0d337b333e646ece456cd39fab612", + }, commitIDs) + + commitIDs, err = GetCommitIDsBetweenReverse(t.Context(), repo, + "8d92fc957a4d7cfd98bc375f0b7bb189a0d6c9f2", + "ce064814f4a0d337b333e646ece456cd39fab612", + "", + 3, + ) + assert.NoError(t, err) + assert.Equal(t, []string{ + "37991dec2c8e592043f47155ce4808d4580f9123", + "feaf4ba6bc635fec442f46ddd4512416ec43c2c2", + "ce064814f4a0d337b333e646ece456cd39fab612", + }, commitIDs) + + // test branch names instead of raw commit IDs. + commitIDs, err = GetCommitIDsBetweenReverse(t.Context(), repo, + "test", + "master", + "", + 100, + ) + assert.NoError(t, err) + assert.Equal(t, []string{ + "feaf4ba6bc635fec442f46ddd4512416ec43c2c2", + "ce064814f4a0d337b333e646ece456cd39fab612", + }, commitIDs) + + // add notref to exclude test + commitIDs, err = GetCommitIDsBetweenReverse(t.Context(), repo, + "test", + "master", + "test", + 100, + ) + assert.NoError(t, err) + assert.Equal(t, []string{ + "feaf4ba6bc635fec442f46ddd4512416ec43c2c2", + "ce064814f4a0d337b333e646ece456cd39fab612", + }, commitIDs) +} diff --git a/modules/graceful/manager_windows.go b/modules/graceful/manager_windows.go index 457768d6ca..9592dd6b39 100644 --- a/modules/graceful/manager_windows.go +++ b/modules/graceful/manager_windows.go @@ -1,5 +1,6 @@ // Copyright 2019 The Gitea Authors. All rights reserved. // SPDX-License-Identifier: MIT + // This code is heavily inspired by the archived gofacebook/gracenet/net.go handler //go:build windows diff --git a/modules/highlight/highlight.go b/modules/highlight/highlight.go index c7416c7a10..dca28588e4 100644 --- a/modules/highlight/highlight.go +++ b/modules/highlight/highlight.go @@ -5,13 +5,9 @@ package highlight import ( - "bufio" "bytes" - "fmt" gohtml "html" "html/template" - "io" - "strings" "sync" "code.gitea.io/gitea/modules/log" @@ -19,11 +15,11 @@ import ( "code.gitea.io/gitea/modules/util" "github.com/alecthomas/chroma/v2" - "github.com/alecthomas/chroma/v2/formatters/html" + chromahtml "github.com/alecthomas/chroma/v2/formatters/html" "github.com/alecthomas/chroma/v2/styles" ) -// don't index files larger than this many bytes for performance purposes +// don't highlight files larger than this many bytes for performance purposes const sizeLimit = 1024 * 1024 type globalVarsType struct { @@ -80,6 +76,10 @@ func UnsafeSplitHighlightedLines(code template.HTML) (ret [][]byte) { } } +func htmlEscape(code string) template.HTML { + return template.HTML(gohtml.EscapeString(code)) +} + // RenderCodeSlowGuess tries to get a lexer by file name and language first, // if not found, it will try to guess the lexer by code content, which is slow (more than several hundreds of milliseconds). func RenderCodeSlowGuess(fileName, language, code string) (output template.HTML, lexer chroma.Lexer, lexerDisplayName string) { @@ -90,7 +90,7 @@ func RenderCodeSlowGuess(fileName, language, code string) (output template.HTML, } if len(code) > sizeLimit { - return template.HTML(template.HTMLEscapeString(code)), nil, "" + return htmlEscape(code), nil, "" } lexer = detectChromaLexerWithAnalyze(fileName, language, util.UnsafeStringToBytes(code)) // it is also slow @@ -99,91 +99,65 @@ func RenderCodeSlowGuess(fileName, language, code string) (output template.HTML, // RenderCodeByLexer returns a HTML version of code string with chroma syntax highlighting classes func RenderCodeByLexer(lexer chroma.Lexer, code string) template.HTML { - formatter := html.New(html.WithClasses(true), - html.WithLineNumbers(false), - html.PreventSurroundingPre(true), + formatter := chromahtml.New(chromahtml.WithClasses(true), + chromahtml.WithLineNumbers(false), + chromahtml.PreventSurroundingPre(true), ) - htmlbuf := bytes.Buffer{} - htmlw := bufio.NewWriter(&htmlbuf) - iterator, err := lexer.Tokenise(nil, code) if err != nil { log.Error("Can't tokenize code: %v", err) - return template.HTML(template.HTMLEscapeString(code)) - } - // style not used for live site but need to pass something - err = formatter.Format(htmlw, globalVars().githubStyles, iterator) - if err != nil { - log.Error("Can't format code: %v", err) - return template.HTML(template.HTMLEscapeString(code)) + return htmlEscape(code) } - _ = htmlw.Flush() - // Chroma will add newlines for certain lexers in order to highlight them properly - // Once highlighted, strip them here, so they don't cause copy/paste trouble in HTML output - return template.HTML(strings.TrimSuffix(htmlbuf.String(), "\n")) + htmlBuf := &bytes.Buffer{} + // style not used for live site but need to pass something + err = formatter.Format(htmlBuf, globalVars().githubStyles, iterator) + if err != nil { + log.Error("Can't format code: %v", err) + return htmlEscape(code) + } + return template.HTML(util.UnsafeBytesToString(htmlBuf.Bytes())) } // RenderFullFile returns a slice of chroma syntax highlighted HTML lines of code and the matched lexer name -func RenderFullFile(fileName, language string, code []byte) ([]template.HTML, string, error) { - if len(code) > sizeLimit { - return RenderPlainText(code), "", nil +func RenderFullFile(fileName, language string, code []byte) ([]template.HTML, string) { + if language == LanguagePlaintext || len(code) > sizeLimit { + return renderPlainText(code), formatLexerName(LanguagePlaintext) } - - formatter := html.New(html.WithClasses(true), - html.WithLineNumbers(false), - html.PreventSurroundingPre(true), - ) - lexer := detectChromaLexerWithAnalyze(fileName, language, code) lexerName := formatLexerName(lexer.Config().Name) - - iterator, err := lexer.Tokenise(nil, string(code)) - if err != nil { - return nil, "", fmt.Errorf("can't tokenize code: %w", err) + rendered := RenderCodeByLexer(lexer, util.UnsafeBytesToString(code)) + unsafeLines := UnsafeSplitHighlightedLines(rendered) + lines := make([]template.HTML, len(unsafeLines)) + for idx, lineBytes := range unsafeLines { + lines[idx] = template.HTML(util.UnsafeBytesToString(lineBytes)) } - - tokensLines := chroma.SplitTokensIntoLines(iterator.Tokens()) - htmlBuf := &bytes.Buffer{} - - lines := make([]template.HTML, 0, len(tokensLines)) - for _, tokens := range tokensLines { - iterator = chroma.Literator(tokens...) - err = formatter.Format(htmlBuf, globalVars().githubStyles, iterator) - if err != nil { - return nil, "", fmt.Errorf("can't format code: %w", err) - } - lines = append(lines, template.HTML(htmlBuf.String())) - htmlBuf.Reset() - } - - return lines, lexerName, nil + return lines, lexerName } -// RenderPlainText returns non-highlighted HTML for code -func RenderPlainText(code []byte) []template.HTML { - r := bufio.NewReader(bytes.NewReader(code)) - m := make([]template.HTML, 0, bytes.Count(code, []byte{'\n'})+1) - for { - content, err := r.ReadString('\n') - if err != nil && err != io.EOF { - log.Error("failed to read string from buffer: %v", err) - break +// renderPlainText returns non-highlighted HTML for code +func renderPlainText(code []byte) []template.HTML { + lines := make([]template.HTML, 0, bytes.Count(code, []byte{'\n'})+1) + pos := 0 + for pos < len(code) { + var content []byte + nextPos := bytes.IndexByte(code[pos:], '\n') + if nextPos == -1 { + content = code[pos:] + pos = len(code) + } else { + content = code[pos : pos+nextPos+1] + pos += nextPos + 1 } - if content == "" && err == io.EOF { - break - } - s := template.HTML(gohtml.EscapeString(content)) - m = append(m, s) + lines = append(lines, htmlEscape(util.UnsafeBytesToString(content))) } - return m + return lines } func formatLexerName(name string) string { - if name == "fallback" { + if name == LanguagePlaintext || name == chromaLexerFallback { return "Plaintext" } - return util.ToTitleCaseNoLower(name) } diff --git a/modules/highlight/highlight_test.go b/modules/highlight/highlight_test.go index d026210475..211132b255 100644 --- a/modules/highlight/highlight_test.go +++ b/modules/highlight/highlight_test.go @@ -118,8 +118,7 @@ c=2 for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - out, lexerName, err := RenderFullFile(tt.name, "", []byte(tt.code)) - assert.NoError(t, err) + out, lexerName := RenderFullFile(tt.name, "", []byte(tt.code)) assert.Equal(t, tt.want, out) assert.Equal(t, tt.lexerName, lexerName) }) @@ -182,7 +181,7 @@ c=2`), for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - out := RenderPlainText([]byte(tt.code)) + out := renderPlainText([]byte(tt.code)) assert.Equal(t, tt.want, out) }) } diff --git a/modules/highlight/lexerdetect.go b/modules/highlight/lexerdetect.go index 5d98578f35..fe430f463f 100644 --- a/modules/highlight/lexerdetect.go +++ b/modules/highlight/lexerdetect.go @@ -16,7 +16,11 @@ import ( "github.com/go-enry/go-enry/v2" ) -const mapKeyLowerPrefix = "lower/" +const ( + mapKeyLowerPrefix = "lower/" + LanguagePlaintext = "plaintext" + chromaLexerFallback = "fallback" +) // chromaLexers is fully managed by us to do fast lookup for chroma lexers by file name or language name // Don't use lexers.Get because it is very slow in many cases (iterate all rules, filepath glob match, etc.) diff --git a/modules/indexer/code/search.go b/modules/indexer/code/search.go index 009d659d76..b5eb5116b0 100644 --- a/modules/indexer/code/search.go +++ b/modules/indexer/code/search.go @@ -74,7 +74,7 @@ func HighlightSearchResultCode(filename, language string, lineNums []int, code s // we should highlight the whole code block first, otherwise it doesn't work well with multiple line highlighting lexer := highlight.DetectChromaLexerByFileName(filename, language) hl := highlight.RenderCodeByLexer(lexer, code) - highlightedLines := strings.Split(string(hl), "\n") + highlightedLines := highlight.UnsafeSplitHighlightedLines(hl) // The lineNums outputted by render might not match the original lineNums, because "highlight" removes the last `\n` lines := make([]*ResultLine, min(len(highlightedLines), len(lineNums))) diff --git a/modules/json/jsongoccy.go b/modules/json/jsongoccy.go deleted file mode 100644 index 77ea047fa7..0000000000 --- a/modules/json/jsongoccy.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2025 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package json - -import ( - "bytes" - "io" - - "github.com/goccy/go-json" -) - -var _ Interface = jsonGoccy{} - -type jsonGoccy struct{} - -func (jsonGoccy) Marshal(v any) ([]byte, error) { - return json.Marshal(v) -} - -func (jsonGoccy) Unmarshal(data []byte, v any) error { - return json.Unmarshal(data, v) -} - -func (jsonGoccy) NewEncoder(writer io.Writer) Encoder { - return json.NewEncoder(writer) -} - -func (jsonGoccy) NewDecoder(reader io.Reader) Decoder { - return json.NewDecoder(reader) -} - -func (jsonGoccy) Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error { - return json.Indent(dst, src, prefix, indent) -} diff --git a/modules/json/jsonlegacy.go b/modules/json/jsonlegacy.go index 156e456041..81d644d4f4 100644 --- a/modules/json/jsonlegacy.go +++ b/modules/json/jsonlegacy.go @@ -6,11 +6,12 @@ package json import ( + "encoding/json" //nolint:depguard // this package wraps it "io" ) func getDefaultJSONHandler() Interface { - return jsonGoccy{} + return jsonV1{} } func MarshalKeepOptionalEmpty(v any) ([]byte, error) { @@ -20,3 +21,5 @@ func MarshalKeepOptionalEmpty(v any) ([]byte, error) { func NewDecoderCaseInsensitive(reader io.Reader) Decoder { return DefaultJSONHandler.NewDecoder(reader) } + +type Value = json.RawMessage diff --git a/modules/json/jsonv2.go b/modules/json/jsonv2.go index 0bba2783bc..c4afc9513b 100644 --- a/modules/json/jsonv2.go +++ b/modules/json/jsonv2.go @@ -8,6 +8,7 @@ package json import ( "bytes" jsonv1 "encoding/json" //nolint:depguard // this package wraps it + "encoding/json/jsontext" //nolint:depguard // this package wraps it jsonv2 "encoding/json/v2" //nolint:depguard // this package wraps it "io" ) @@ -90,3 +91,5 @@ func (d *jsonV2Decoder) Decode(v any) error { func NewDecoderCaseInsensitive(reader io.Reader) Decoder { return &jsonV2Decoder{reader: reader, opts: jsonV2.unmarshalCaseInsensitiveOptions} } + +type Value = jsontext.Value diff --git a/modules/markup/external/external.go b/modules/markup/external/external.go index 4d447e301a..4b3c96fd33 100644 --- a/modules/markup/external/external.go +++ b/modules/markup/external/external.go @@ -21,7 +21,33 @@ import ( // RegisterRenderers registers all supported third part renderers according settings func RegisterRenderers() { - markup.RegisterRenderer(&openAPIRenderer{}) + markup.RegisterRenderer(&frontendRenderer{ + name: "openapi-swagger", + patterns: []string{ + "openapi.yaml", + "openapi.yml", + "openapi.json", + "swagger.yaml", + "swagger.yml", + "swagger.json", + }, + }) + + markup.RegisterRenderer(&frontendRenderer{ + name: "viewer-3d", + patterns: []string{ + // It needs more logic to make it overall right (render a text 3D model automatically): + // we need to distinguish the ambiguous filename extensions. + // For example: "*.amf, *.obj, *.off, *.step" might be or not be a 3D model file. + // So when it is a text file, we can't assume that "we only render it by 3D plugin", + // otherwise the end users would be impossible to view its real content when the file is not a 3D model. + "*.3dm", "*.3ds", "*.3mf", "*.amf", "*.bim", "*.brep", + "*.dae", "*.fbx", "*.fcstd", "*.glb", "*.gltf", + "*.ifc", "*.igs", "*.iges", "*.stp", "*.step", + "*.stl", "*.obj", "*.off", "*.ply", "*.wrl", + }, + }) + for _, renderer := range setting.ExternalMarkupRenderers { markup.RegisterRenderer(&Renderer{renderer}) } diff --git a/modules/markup/external/frontend.go b/modules/markup/external/frontend.go new file mode 100644 index 0000000000..7327503d28 --- /dev/null +++ b/modules/markup/external/frontend.go @@ -0,0 +1,95 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package external + +import ( + "encoding/base64" + "io" + "unicode/utf8" + + "code.gitea.io/gitea/modules/htmlutil" + "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/public" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" +) + +type frontendRenderer struct { + name string + patterns []string +} + +var ( + _ markup.PostProcessRenderer = (*frontendRenderer)(nil) + _ markup.ExternalRenderer = (*frontendRenderer)(nil) +) + +func (p *frontendRenderer) Name() string { + return p.name +} + +func (p *frontendRenderer) NeedPostProcess() bool { + return false +} + +func (p *frontendRenderer) FileNamePatterns() []string { + // TODO: the file extensions are ambiguous, even if the file name matches, it doesn't mean that the file is a 3D model + // There are some approaches to make it more accurate, but they are all complicated: + // A. Make backend know everything (detect a file is a 3D model or not) + // B. Let frontend renders to try render one by one + // + // If there would be more frontend renders in the future, we need to implement the "frontend" approach: + // 1. Make backend or parent window collect the supported extensions of frontend renders (done: backend external render framework) + // 2. If the current file matches any extension, start the general iframe embedded render (done: this renderer) + // 3. The iframe window calls the frontend renders one by one (done: frontend external render) + // 4. Report the render result to parent by postMessage (TODO: when needed) + return p.patterns +} + +func (p *frontendRenderer) SanitizerRules() []setting.MarkupSanitizerRule { + return nil +} + +func (p *frontendRenderer) GetExternalRendererOptions() (ret markup.ExternalRendererOptions) { + ret.SanitizerDisabled = true + ret.DisplayInIframe = true + ret.ContentSandbox = "allow-scripts allow-forms allow-modals allow-popups allow-downloads" + return ret +} + +func (p *frontendRenderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { + if ctx.RenderOptions.StandalonePageOptions == nil { + opts := p.GetExternalRendererOptions() + return markup.RenderIFrame(ctx, &opts, output) + } + + content, err := util.ReadWithLimit(input, int(setting.UI.MaxDisplayFileSize)) + if err != nil { + return err + } + + contentEncoding, contentString := "text", util.UnsafeBytesToString(content) + if !utf8.Valid(content) { + contentEncoding = "base64" + contentString = base64.StdEncoding.EncodeToString(content) + } + + _, err = htmlutil.HTMLPrintf(output, + ` + + + + + + +
+ + + +`, + p.name, ctx.RenderOptions.RelativePath, + contentEncoding, contentString, + public.AssetURI("js/external-render-frontend.js")) + return err +} diff --git a/modules/markup/external/openapi.go b/modules/markup/external/openapi.go deleted file mode 100644 index de06e7dac7..0000000000 --- a/modules/markup/external/openapi.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2026 The Gitea Authors. All rights reserved. -// SPDX-License-Identifier: MIT - -package external - -import ( - "fmt" - "html" - "io" - - "code.gitea.io/gitea/modules/markup" - "code.gitea.io/gitea/modules/public" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/util" -) - -type openAPIRenderer struct{} - -var ( - _ markup.PostProcessRenderer = (*openAPIRenderer)(nil) - _ markup.ExternalRenderer = (*openAPIRenderer)(nil) -) - -func (p *openAPIRenderer) Name() string { - return "openapi" -} - -func (p *openAPIRenderer) NeedPostProcess() bool { - return false -} - -func (p *openAPIRenderer) FileNamePatterns() []string { - return []string{ - "openapi.yaml", - "openapi.yml", - "openapi.json", - "swagger.yaml", - "swagger.yml", - "swagger.json", - } -} - -func (p *openAPIRenderer) SanitizerRules() []setting.MarkupSanitizerRule { - return nil -} - -func (p *openAPIRenderer) GetExternalRendererOptions() (ret markup.ExternalRendererOptions) { - ret.SanitizerDisabled = true - ret.DisplayInIframe = true - ret.ContentSandbox = "" - return ret -} - -func (p *openAPIRenderer) Render(ctx *markup.RenderContext, input io.Reader, output io.Writer) error { - content, err := util.ReadWithLimit(input, int(setting.UI.MaxDisplayFileSize)) - if err != nil { - return err - } - // TODO: can extract this to a tmpl file later - _, err = io.WriteString(output, fmt.Sprintf( - ` - - - - - - -
- - -`, - public.AssetURI("css/swagger.css"), - html.EscapeString(ctx.RenderOptions.RelativePath), - html.EscapeString(util.UnsafeBytesToString(content)), - public.AssetURI("js/swagger.js"), - )) - return err -} diff --git a/modules/markup/html.go b/modules/markup/html.go index 1c2ae6918d..0fe37ae305 100644 --- a/modules/markup/html.go +++ b/modules/markup/html.go @@ -6,6 +6,7 @@ package markup import ( "bytes" "fmt" + "html/template" "io" "regexp" "slices" @@ -149,9 +150,9 @@ func PostProcessDefault(ctx *RenderContext, input io.Reader, output io.Writer) e return postProcess(ctx, procs, input, output) } -// PostProcessCommitMessage will use the same logic as PostProcess, but will disable -// the shortLinkProcessor. -func PostProcessCommitMessage(ctx *RenderContext, content string) (string, error) { +// PostProcessCommitMessage will use the same logic as PostProcess, but will disable the shortLinkProcessor. +// FIXME: this function and its family have a very strange design: it takes HTML as input and output, processes the "escaped" content. +func PostProcessCommitMessage(ctx *RenderContext, content template.HTML) (template.HTML, error) { procs := []processor{ fullIssuePatternProcessor, comparePatternProcessor, @@ -165,7 +166,8 @@ func PostProcessCommitMessage(ctx *RenderContext, content string) (string, error emojiProcessor, emojiShortCodeProcessor, } - return postProcessString(ctx, procs, content) + s, err := postProcessString(ctx, procs, string(content)) + return template.HTML(s), err } var emojiProcessors = []processor{ diff --git a/modules/markup/markdown/goldmark.go b/modules/markup/markdown/goldmark.go index 555a171685..4a560517f2 100644 --- a/modules/markup/markdown/goldmark.go +++ b/modules/markup/markdown/goldmark.go @@ -70,6 +70,8 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa } case *ast.CodeSpan: g.transformCodeSpan(ctx, v, reader) + case *ast.FencedCodeBlock: + g.transformFencedCodeblock(v, reader) case *ast.Blockquote: return g.transformBlockquote(v, reader) } diff --git a/modules/markup/markdown/markdown.go b/modules/markup/markdown/markdown.go index cca44a8774..f6a6cb26c6 100644 --- a/modules/markup/markdown/markdown.go +++ b/modules/markup/markdown/markdown.go @@ -21,7 +21,6 @@ import ( chromahtml "github.com/alecthomas/chroma/v2/formatters/html" "github.com/yuin/goldmark" highlighting "github.com/yuin/goldmark-highlighting/v2" - meta "github.com/yuin/goldmark-meta" "github.com/yuin/goldmark/ast" "github.com/yuin/goldmark/extension" "github.com/yuin/goldmark/parser" @@ -166,7 +165,6 @@ func SpecializedMarkdown(ctx *markup.RenderContext) *GlodmarkRender { ParseBlockDollar: setting.Markdown.MathCodeBlockOptions.ParseBlockDollar, ParseBlockSquareBrackets: setting.Markdown.MathCodeBlockOptions.ParseBlockSquareBrackets, // this is a bad syntax "\[ ... \]", it conflicts with normal markdown escaping }), - meta.Meta, ), goldmark.WithParserOptions( parser.WithAttribute(), diff --git a/modules/markup/markdown/markdown_test.go b/modules/markup/markdown/markdown_test.go index 261c4e780c..2f14a0fae9 100644 --- a/modules/markup/markdown/markdown_test.go +++ b/modules/markup/markdown/markdown_test.go @@ -429,9 +429,12 @@ test --- test `, - `- item1 -- item2 - + `
+
    +
  • item1
  • +
  • item2
  • +
+

test

`, }, @@ -443,8 +446,8 @@ anything --- test `, - `anything - + `
+

anything

test

`, }, @@ -471,14 +474,26 @@ foo: bar
  • task 1
+`, + }, + // we have our own frontmatter parser, don't need to use github.com/yuin/goldmark-meta + { + "InvalidFrontmatter", + `--- +foo +`, + `
+

foo

`, }, } - for _, test := range testcases { - res, err := markdown.RenderString(markup.NewTestRenderContext(), test.input) - assert.NoError(t, err, "Unexpected error in testcase: %q", test.name) - assert.Equal(t, test.expected, string(res), "Unexpected result in testcase %q", test.name) + for _, tt := range testcases { + t.Run(tt.name, func(t *testing.T) { + res, err := markdown.RenderString(markup.NewTestRenderContext(), tt.input) + assert.NoError(t, err, "Unexpected error in testcase: %q", tt.name) + assert.Equal(t, tt.expected, string(res), "Unexpected result in testcase %q", tt.name) + }) } } @@ -568,3 +583,39 @@ func TestMarkdownLink(t *testing.T) { assert.Equal(t, `

https://example.com/__init__.py

`, string(result)) } + +func TestMarkdownUlDir(t *testing.T) { + defer test.MockVariableValue(&markup.RenderBehaviorForTesting.DisableAdditionalAttributes, false)() + result, err := markdown.RenderString(markup.NewTestRenderContext(), ` +* a + * b +`) + assert.NoError(t, err) + assert.Equal(t, `
    +
  • a +
      +
    • b
    • +
    +
  • +
+`, string(result)) +} + +func TestMarkdownFencedCodeBlock(t *testing.T) { + testRender := func(input, expected string) { + buffer, err := markdown.RenderString(markup.NewTestRenderContext(), input) + assert.NoError(t, err) + assert.Equal(t, strings.TrimSpace(expected), strings.TrimSpace(string(buffer))) + } + const nl = "\n" + const prefix = `
`
+	const suffix = `
` + + testRender("```\ncode\n```", prefix+`code`+nl+``+suffix) + + const jsCommon = prefix + `code` + nl + `` + suffix + testRender("```js\ncode\n```", jsCommon) + testRender("```js:app.ts\ncode\n```", jsCommon) + testRender("```js,ignore\ncode\n```", jsCommon) + testRender("```js ignore\ncode\n```", jsCommon) +} diff --git a/modules/markup/markdown/meta.go b/modules/markup/markdown/meta.go index e76b253ecd..6ddd892110 100644 --- a/modules/markup/markdown/meta.go +++ b/modules/markup/markdown/meta.go @@ -60,8 +60,8 @@ func ExtractMetadata(contents string, out any) (string, error) { return string(body), err } -// ExtractMetadata consumes a markdown file, parses YAML frontmatter, -// and returns the frontmatter metadata separated from the markdown content +// ExtractMetadataBytes consumes a Markdown content, parses YAML frontmatter, +// and returns the frontmatter metadata separated from the Markdown content func ExtractMetadataBytes(contents []byte, out any) ([]byte, error) { var front, body []byte diff --git a/modules/markup/markdown/transform_codeblock.go b/modules/markup/markdown/transform_codeblock.go new file mode 100644 index 0000000000..de9264c4c4 --- /dev/null +++ b/modules/markup/markdown/transform_codeblock.go @@ -0,0 +1,32 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package markdown + +import ( + "github.com/yuin/goldmark/ast" + "github.com/yuin/goldmark/text" +) + +func (g *ASTTransformer) transformFencedCodeblock(v *ast.FencedCodeBlock, reader text.Reader) { + // * Some engines support a meta syntax for appending the filename after the language, separated by a colon + // * https://www.glukhov.org/documentation-tools/markdown/markdown-codeblocks/ + // * Some engines support additional "options" after the language, separated by a space or comma: ```rust,ignore``` + // * https://docs.readme.com/rdmd/docs/code-blocks + // * https://next-book.vercel.app/reference/fencedcode + if v.Info == nil { + return + } + info := v.Info.Segment.Value(reader.Source()) + newEnd := -1 + for i, b := range info { + if b == ' ' || b == ',' || b == ':' { + newEnd = i + break + } + } + if newEnd != -1 { + start := v.Info.Segment.Start + v.Info = ast.NewTextSegment(text.NewSegment(start, start+newEnd)) + } +} diff --git a/modules/markup/markdown/transform_list.go b/modules/markup/markdown/transform_list.go index c89ad2f2cf..6cafa8ff78 100644 --- a/modules/markup/markdown/transform_list.go +++ b/modules/markup/markdown/transform_list.go @@ -81,5 +81,16 @@ func (g *ASTTransformer) transformList(_ *markup.RenderContext, v *ast.List, rc v.AppendChild(v, newChild) } } - g.applyElementDir(v) + + nestedList := false + for p := v.Parent(); p != nil; p = p.Parent() { + if _, ok := p.(*ast.List); ok { + nestedList = true + break + } + } + if !nestedList { + // "dir=auto" should be only added to top-level "ul". https://github.com/go-gitea/gitea/issues/35058 + g.applyElementDir(v) + } } diff --git a/modules/markup/render.go b/modules/markup/render.go index c0d44c72fc..6e8838d49f 100644 --- a/modules/markup/render.go +++ b/modules/markup/render.go @@ -6,6 +6,7 @@ package markup import ( "bytes" "context" + "errors" "fmt" "html/template" "io" @@ -38,6 +39,15 @@ var RenderBehaviorForTesting struct { DisableAdditionalAttributes bool } +type WebThemeInterface interface { + PublicAssetURI() string +} + +type StandalonePageOptions struct { + CurrentWebTheme WebThemeInterface + RenderQueryString string +} + type RenderOptions struct { UseAbsoluteLink bool @@ -55,7 +65,7 @@ type RenderOptions struct { Metas map[string]string // used by external render. the router "/org/repo/render/..." will output the rendered content in a standalone page - InStandalonePage bool + StandalonePageOptions *StandalonePageOptions // EnableHeadingIDGeneration controls whether to auto-generate IDs for HTML headings without id attribute. // This should be enabled for repository files and wiki pages, but disabled for comments to avoid duplicate IDs. @@ -127,8 +137,8 @@ func (ctx *RenderContext) WithMetas(metas map[string]string) *RenderContext { return ctx } -func (ctx *RenderContext) WithInStandalonePage(v bool) *RenderContext { - ctx.RenderOptions.InStandalonePage = v +func (ctx *RenderContext) WithStandalonePage(opts StandalonePageOptions) *RenderContext { + ctx.RenderOptions.StandalonePageOptions = &opts return ctx } @@ -197,20 +207,24 @@ func RenderString(ctx *RenderContext, content string) (string, error) { return buf.String(), nil } -func renderIFrame(ctx *RenderContext, sandbox string, output io.Writer) error { +func RenderIFrame(ctx *RenderContext, opts *ExternalRendererOptions, output io.Writer) error { + ownerName, repoName := ctx.RenderOptions.Metas["user"], ctx.RenderOptions.Metas["repo"] + refSubURL := ctx.RenderOptions.Metas["RefTypeNameSubURL"] + if ownerName == "" || repoName == "" || refSubURL == "" { + setting.PanicInDevOrTesting("RenderIFrame requires user, repo and RefTypeNameSubURL metas") + return errors.New("RenderIFrame requires user, repo and RefTypeNameSubURL metas") + } src := fmt.Sprintf("%s/%s/%s/render/%s/%s", setting.AppSubURL, - url.PathEscape(ctx.RenderOptions.Metas["user"]), - url.PathEscape(ctx.RenderOptions.Metas["repo"]), - util.PathEscapeSegments(ctx.RenderOptions.Metas["RefTypeNameSubURL"]), + url.PathEscape(ownerName), + url.PathEscape(repoName), + ctx.RenderOptions.Metas["RefTypeNameSubURL"], util.PathEscapeSegments(ctx.RenderOptions.RelativePath), ) - - var sandboxAttrValue template.HTML - if sandbox != "" { - sandboxAttrValue = htmlutil.HTMLFormat(`sandbox="%s"`, sandbox) + var extraAttrs template.HTML + if opts.ContentSandbox != "" { + extraAttrs = htmlutil.HTMLFormat(` sandbox="%s"`, opts.ContentSandbox) } - iframe := htmlutil.HTMLFormat(``, src, sandboxAttrValue) - _, err := io.WriteString(output, string(iframe)) + _, err := htmlutil.HTMLPrintf(output, ``, src, extraAttrs) return err } @@ -222,7 +236,7 @@ func pipes() (io.ReadCloser, io.WriteCloser, func()) { } } -func getExternalRendererOptions(renderer Renderer) (ret ExternalRendererOptions, _ bool) { +func GetExternalRendererOptions(renderer Renderer) (ret ExternalRendererOptions, _ bool) { if externalRender, ok := renderer.(ExternalRenderer); ok { return externalRender.GetExternalRendererOptions(), true } @@ -231,17 +245,23 @@ func getExternalRendererOptions(renderer Renderer) (ret ExternalRendererOptions, func RenderWithRenderer(ctx *RenderContext, renderer Renderer, input io.Reader, output io.Writer) error { var extraHeadHTML template.HTML - if extOpts, ok := getExternalRendererOptions(renderer); ok && extOpts.DisplayInIframe { - if !ctx.RenderOptions.InStandalonePage { + if extOpts, ok := GetExternalRendererOptions(renderer); ok && extOpts.DisplayInIframe { + if ctx.RenderOptions.StandalonePageOptions == nil { // for an external "DisplayInIFrame" render, it could only output its content in a standalone page // otherwise, a `, ret) + + ret = render(ctx, ExternalRendererOptions{ContentSandbox: "allow"}) + assert.Equal(t, ``, ret) +} diff --git a/modules/markup/sanitizer_default.go b/modules/markup/sanitizer_default.go index 77ba8bf4f4..447cf4807e 100644 --- a/modules/markup/sanitizer_default.go +++ b/modules/markup/sanitizer_default.go @@ -56,6 +56,11 @@ func (st *Sanitizer) createDefaultPolicy() *bluemonday.Policy { policy.AllowAttrs("src", "autoplay", "controls").OnElements("video") + // Native support of "" + // ATTENTION: it only works with "auto" theme, because "media" query doesn't work with the theme chosen by end user manually. + // For example: browser's color scheme is "dark", but end user chooses "light" theme. Maybe it needs JS to help to make it work. + policy.AllowAttrs("media", "srcset").OnElements("source") + policy.AllowAttrs("loading").OnElements("img") // Allow generally safe attributes (reference: https://github.com/jch/html-pipeline) @@ -86,6 +91,7 @@ func (st *Sanitizer) createDefaultPolicy() *bluemonday.Policy { "dl", "dt", "dd", "kbd", "q", "samp", "var", "hr", "ruby", "rt", "rp", "li", "tr", "td", "th", "s", "strike", "summary", "details", "caption", "figure", "figcaption", "abbr", "bdo", "cite", "dfn", "mark", "small", "span", "time", "video", "wbr", + "picture", "source", } // FIXME: Need to handle longdesc in img but there is no easy way to do it policy.AllowAttrs(generalSafeAttrs...).OnElements(generalSafeElements...) diff --git a/modules/markup/sanitizer_default_test.go b/modules/markup/sanitizer_default_test.go index e5ba018e1b..e66f00c02f 100644 --- a/modules/markup/sanitizer_default_test.go +++ b/modules/markup/sanitizer_default_test.go @@ -58,6 +58,9 @@ func TestSanitizer(t *testing.T) { `my custom URL scheme`, `my custom URL scheme`, `my custom URL scheme`, `my custom URL scheme`, + // picture + `c`, `c`, + // Disallow dangerous url schemes `bad`, `bad`, `bad`, `bad`, diff --git a/modules/migration/schemas_bindata.go b/modules/migration/schemas_bindata.go index 2f0e254408..d7c7081596 100644 --- a/modules/migration/schemas_bindata.go +++ b/modules/migration/schemas_bindata.go @@ -12,10 +12,10 @@ import ( "path" "sync" - _ "embed" - "code.gitea.io/gitea/modules/assetfs" + _ "embed" + "github.com/santhosh-tekuri/jsonschema/v6" ) diff --git a/modules/options/options_bindata.go b/modules/options/options_bindata.go index b2321d7eb5..f85f30065e 100644 --- a/modules/options/options_bindata.go +++ b/modules/options/options_bindata.go @@ -10,9 +10,9 @@ package options import ( "sync" - _ "embed" - "code.gitea.io/gitea/modules/assetfs" + + _ "embed" ) //go:embed bindata.dat diff --git a/modules/packages/nuget/metadata.go b/modules/packages/nuget/metadata.go index 5124627395..ae15e4ec83 100644 --- a/modules/packages/nuget/metadata.go +++ b/modules/packages/nuget/metadata.go @@ -140,7 +140,7 @@ type nuspecPackage struct { func ParsePackageMetaData(r io.ReaderAt, size int64) (*Package, error) { archive, err := zip.NewReader(r, size) if err != nil { - return nil, err + return nil, util.NewInvalidArgumentErrorf("unable to parse package meta: %v", err) } for _, file := range archive.File { diff --git a/modules/packages/nuget/symbol_extractor.go b/modules/packages/nuget/symbol_extractor.go index 2eadee5463..5e398151e8 100644 --- a/modules/packages/nuget/symbol_extractor.go +++ b/modules/packages/nuget/symbol_extractor.go @@ -42,7 +42,7 @@ func (l PortablePdbList) Close() { func ExtractPortablePdb(r io.ReaderAt, size int64) (PortablePdbList, error) { archive, err := zip.NewReader(r, size) if err != nil { - return nil, err + return nil, util.NewInvalidArgumentErrorf("unable to extract portable pdb: %v", err) } var pdbs PortablePdbList diff --git a/modules/packages/rpm/metadata.go b/modules/packages/rpm/metadata.go index f4f78c2cab..d8ac7ea75f 100644 --- a/modules/packages/rpm/metadata.go +++ b/modules/packages/rpm/metadata.go @@ -46,10 +46,11 @@ type Package struct { } type VersionMetadata struct { - License string `json:"license,omitempty"` - ProjectURL string `json:"project_url,omitempty"` - Summary string `json:"summary,omitempty"` - Description string `json:"description,omitempty"` + License string `json:"license,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + Summary string `json:"summary,omitempty"` + Description string `json:"description,omitempty"` + Updates []*Update `json:"updates,omitempty"` } type FileMetadata struct { @@ -296,3 +297,43 @@ func getChangelogs(h *rpmutils.RpmHeader) []*Changelog { } return changelogs } + +type DateAttr struct { + Date string `xml:"date,attr" json:"date"` +} + +type Update struct { + From string `xml:"from,attr" json:"from"` + Status string `xml:"status,attr" json:"status"` + Type string `xml:"type,attr" json:"type"` + Version string `xml:"version,attr" json:"version"` + ID string `xml:"id" json:"id"` + Title string `xml:"title" json:"title"` + Severity string `xml:"severity" json:"severity"` + Description string `xml:"description" json:"description"` + Issued *DateAttr `xml:"issued" json:"issued"` + Updated *DateAttr `xml:"updated" json:"updated"` + References []*Reference `xml:"references>reference" json:"references"` + PkgList []*Collection `xml:"pkglist>collection" json:"pkg_list"` +} + +type Reference struct { + Href string `xml:"href,attr" json:"href"` + ID string `xml:"id,attr" json:"id"` + Title string `xml:"title,attr" json:"title"` + Type string `xml:"type,attr" json:"type"` +} + +type Collection struct { + Short string `xml:"short,attr" json:"short"` + Packages []*UpdatePackage `xml:"package" json:"packages"` +} + +type UpdatePackage struct { + Arch string `xml:"arch,attr" json:"arch"` + Name string `xml:"name,attr" json:"name"` + Release string `xml:"release,attr" json:"release"` + Src string `xml:"src,attr" json:"src"` + Version string `xml:"version,attr" json:"version"` + Filename string `xml:"filename" json:"filename"` +} diff --git a/modules/packages/swift/metadata.go b/modules/packages/swift/metadata.go index 78925c6e6d..d0137f8dfe 100644 --- a/modules/packages/swift/metadata.go +++ b/modules/packages/swift/metadata.go @@ -47,6 +47,7 @@ type Metadata struct { Keywords []string `json:"keywords,omitempty"` RepositoryURL string `json:"repository_url,omitempty"` License string `json:"license,omitempty"` + LicenseURL string `json:"license_url,omitempty"` Author Person `json:"author"` Manifests map[string]*Manifest `json:"manifests,omitempty"` } @@ -67,7 +68,8 @@ type SoftwareSourceCode struct { Keywords []string `json:"keywords,omitempty"` CodeRepository string `json:"codeRepository,omitempty"` License string `json:"license,omitempty"` - Author Person `json:"author"` + LicenseURL string `json:"licenseURL,omitempty"` + Author *Person `json:"author,omitempty"` ProgrammingLanguage ProgrammingLanguage `json:"programmingLanguage"` RepositoryURLs []string `json:"repositoryURLs,omitempty"` } @@ -181,26 +183,31 @@ func ParsePackage(sr io.ReaderAt, size int64, mr io.Reader) (*Package, error) { if err := json.NewDecoder(mr).Decode(&ssc); err != nil { return nil, err } - p.Metadata.Description = ssc.Description p.Metadata.Keywords = ssc.Keywords p.Metadata.License = ssc.License - author := Person{ - Name: ssc.Author.Name, - GivenName: ssc.Author.GivenName, - MiddleName: ssc.Author.MiddleName, - FamilyName: ssc.Author.FamilyName, + p.Metadata.LicenseURL = ssc.LicenseURL + if ssc.Author != nil { + author := Person{ + Name: ssc.Author.Name, + GivenName: ssc.Author.GivenName, + MiddleName: ssc.Author.MiddleName, + FamilyName: ssc.Author.FamilyName, + } + // If Name is not provided, generate it from individual name components + if author.Name == "" { + author.Name = author.String() + } + p.Metadata.Author = author } - // If Name is not provided, generate it from individual name components - if author.Name == "" { - author.Name = author.String() - } - p.Metadata.Author = author p.Metadata.RepositoryURL = ssc.CodeRepository if !validation.IsValidURL(p.Metadata.RepositoryURL) { p.Metadata.RepositoryURL = "" } + if !validation.IsValidURL(p.Metadata.LicenseURL) { + p.Metadata.LicenseURL = "" + } p.RepositoryURLs = ssc.RepositoryURLs } diff --git a/modules/packages/swift/metadata_test.go b/modules/packages/swift/metadata_test.go index 461773cbfc..440bcb9fac 100644 --- a/modules/packages/swift/metadata_test.go +++ b/modules/packages/swift/metadata_test.go @@ -4,11 +4,12 @@ package swift import ( - "archive/zip" "bytes" "strings" "testing" + "code.gitea.io/gitea/modules/test" + "github.com/hashicorp/go-version" "github.com/stretchr/testify/assert" ) @@ -18,36 +19,24 @@ const ( packageVersion = "1.0.1" packageDescription = "Package Description" packageRepositoryURL = "https://gitea.io/gitea/gitea" + packageLicenseURL = "https://opensource.org/license/mit" packageAuthor = "KN4CK3R" packageLicense = "MIT" ) func TestParsePackage(t *testing.T) { - createArchive := func(files map[string][]byte) *bytes.Reader { - var buf bytes.Buffer - zw := zip.NewWriter(&buf) - for filename, content := range files { - w, _ := zw.Create(filename) - w.Write(content) - } - zw.Close() - return bytes.NewReader(buf.Bytes()) - } - t.Run("MissingManifestFile", func(t *testing.T) { - data := createArchive(map[string][]byte{"dummy.txt": {}}) - - p, err := ParsePackage(data, data.Size(), nil) + data := test.WriteZipArchive(map[string]string{"dummy.txt": ""}) + p, err := ParsePackage(bytes.NewReader(data.Bytes()), int64(data.Len()), nil) assert.Nil(t, p) assert.ErrorIs(t, err, ErrMissingManifestFile) }) t.Run("ManifestFileTooLarge", func(t *testing.T) { - data := createArchive(map[string][]byte{ - "Package.swift": make([]byte, maxManifestFileSize+1), + data := test.WriteZipArchive(map[string]string{ + "Package.swift": strings.Repeat("a", maxManifestFileSize+1), }) - - p, err := ParsePackage(data, data.Size(), nil) + p, err := ParsePackage(bytes.NewReader(data.Bytes()), int64(data.Len()), nil) assert.Nil(t, p) assert.ErrorIs(t, err, ErrManifestFileTooLarge) }) @@ -56,12 +45,12 @@ func TestParsePackage(t *testing.T) { content1 := "// swift-tools-version:5.7\n//\n// Package.swift" content2 := "// swift-tools-version:5.6\n//\n// Package@swift-5.6.swift" - data := createArchive(map[string][]byte{ - "Package.swift": []byte(content1), - "Package@swift-5.5.swift": []byte(content2), + data := test.WriteZipArchive(map[string]string{ + "Package.swift": content1, + "Package@swift-5.5.swift": content2, }) - p, err := ParsePackage(data, data.Size(), nil) + p, err := ParsePackage(bytes.NewReader(data.Bytes()), int64(data.Len()), nil) assert.NotNil(t, p) assert.NoError(t, err) @@ -77,14 +66,13 @@ func TestParsePackage(t *testing.T) { }) t.Run("WithMetadata", func(t *testing.T) { - data := createArchive(map[string][]byte{ - "Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"), + data := test.WriteZipArchive(map[string]string{ + "Package.swift": "// swift-tools-version:5.7\n//\n// Package.swift", }) p, err := ParsePackage( - data, - data.Size(), - strings.NewReader(`{"name":"`+packageName+`","version":"`+packageVersion+`","description":"`+packageDescription+`","keywords":["swift","package"],"license":"`+packageLicense+`","codeRepository":"`+packageRepositoryURL+`","author":{"givenName":"`+packageAuthor+`"},"repositoryURLs":["`+packageRepositoryURL+`"]}`), + bytes.NewReader(data.Bytes()), int64(data.Len()), + strings.NewReader(`{"name":"`+packageName+`","version":"`+packageVersion+`","description":"`+packageDescription+`","keywords":["swift","package"],"license":"`+packageLicense+`","licenseURL":"`+packageLicenseURL+`","codeRepository":"`+packageRepositoryURL+`","author":{"givenName":"`+packageAuthor+`"},"repositoryURLs":["`+packageRepositoryURL+`"]}`), ) assert.NotNil(t, p) assert.NoError(t, err) @@ -97,6 +85,7 @@ func TestParsePackage(t *testing.T) { assert.Equal(t, packageDescription, p.Metadata.Description) assert.ElementsMatch(t, []string{"swift", "package"}, p.Metadata.Keywords) assert.Equal(t, packageLicense, p.Metadata.License) + assert.Equal(t, packageLicenseURL, p.Metadata.LicenseURL) assert.Equal(t, packageAuthor, p.Metadata.Author.Name) assert.Equal(t, packageAuthor, p.Metadata.Author.GivenName) assert.Equal(t, packageRepositoryURL, p.Metadata.RepositoryURL) @@ -104,14 +93,13 @@ func TestParsePackage(t *testing.T) { }) t.Run("WithExplicitNameField", func(t *testing.T) { - data := createArchive(map[string][]byte{ - "Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"), + data := test.WriteZipArchive(map[string]string{ + "Package.swift": "// swift-tools-version:5.7\n//\n// Package.swift", }) authorName := "John Doe" p, err := ParsePackage( - data, - data.Size(), + bytes.NewReader(data.Bytes()), int64(data.Len()), strings.NewReader(`{"name":"`+packageName+`","version":"`+packageVersion+`","description":"`+packageDescription+`","author":{"name":"`+authorName+`","givenName":"John","familyName":"Doe"}}`), ) assert.NotNil(t, p) @@ -122,15 +110,30 @@ func TestParsePackage(t *testing.T) { assert.Equal(t, "Doe", p.Metadata.Author.FamilyName) }) + t.Run("WithEmptyJSONMetadata", func(t *testing.T) { + data := test.WriteZipArchive(map[string]string{ + "Package.swift": "// swift-tools-version:5.7\n//\n// Package.swift", + }) + + p, err := ParsePackage( + bytes.NewReader(data.Bytes()), int64(data.Len()), + strings.NewReader(`{}`), + ) + assert.NotNil(t, p) + assert.NoError(t, err) + assert.NotNil(t, p.Metadata) + assert.Empty(t, p.Metadata.Author.Name) + assert.Empty(t, p.RepositoryURLs) + }) + t.Run("NameFieldGeneration", func(t *testing.T) { - data := createArchive(map[string][]byte{ - "Package.swift": []byte("// swift-tools-version:5.7\n//\n// Package.swift"), + data := test.WriteZipArchive(map[string]string{ + "Package.swift": "// swift-tools-version:5.7\n//\n// Package.swift", }) // Test with only individual name components - Name should be auto-generated p, err := ParsePackage( - data, - data.Size(), + bytes.NewReader(data.Bytes()), int64(data.Len()), strings.NewReader(`{"author":{"givenName":"John","middleName":"Q","familyName":"Doe"}}`), ) assert.NotNil(t, p) diff --git a/modules/packages/terraform/lock.go b/modules/packages/terraform/lock.go new file mode 100644 index 0000000000..3c326c04e9 --- /dev/null +++ b/modules/packages/terraform/lock.go @@ -0,0 +1,100 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package terraform + +import ( + "context" + "errors" + "io" + "time" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/util" + + "xorm.io/builder" +) + +const LockFile = "terraform.lock" + +// LockInfo is the metadata for a terraform lock. +type LockInfo struct { + ID string `json:"ID"` + Operation string `json:"Operation"` + Info string `json:"Info"` + Who string `json:"Who"` + Version string `json:"Version"` + Created time.Time `json:"Created"` + Path string `json:"Path"` +} + +func (l *LockInfo) IsLocked() bool { + return l.ID != "" +} + +func ParseLockInfo(r io.Reader) (*LockInfo, error) { + var lock LockInfo + err := json.NewDecoder(r).Decode(&lock) + if err != nil { + return nil, err + } + // ID is required. Rest is less important. + if lock.ID == "" { + return nil, util.NewInvalidArgumentErrorf("terraform lock is missing an ID") + } + return &lock, nil +} + +// GetLock returns the terraform lock for the given package. +// Lock is empty if no lock exists. +func GetLock(ctx context.Context, packageID int64) (LockInfo, error) { + var lock LockInfo + locks, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypePackage, packageID, LockFile) + if err != nil { + return lock, err + } + if len(locks) == 0 || locks[0].Value == "" { + return lock, nil + } + + err = json.Unmarshal([]byte(locks[0].Value), &lock) + return lock, err +} + +// SetLock sets the terraform lock for the given package. +func SetLock(ctx context.Context, packageID int64, lock *LockInfo) error { + jsonBytes, err := json.Marshal(lock) + if err != nil { + return err + } + + return updateLock(ctx, packageID, string(jsonBytes), builder.Eq{"value": ""}) +} + +// RemoveLock removes the terraform lock for the given package. +func RemoveLock(ctx context.Context, packageID int64) error { + return updateLock(ctx, packageID, "", builder.Neq{"value": ""}) +} + +func updateLock(ctx context.Context, refID int64, value string, cond builder.Cond) error { + pp := packages_model.PackageProperty{RefType: packages_model.PropertyTypePackage, RefID: refID, Name: LockFile} + ok, err := db.GetEngine(ctx).Get(&pp) + if err != nil { + return err + } + if ok { + n, err := db.GetEngine(ctx).Where("ref_type=? AND ref_id=? AND name=?", packages_model.PropertyTypePackage, refID, LockFile).And(cond).Cols("value").Update(&packages_model.PackageProperty{Value: value}) + if err != nil { + return err + } + if n == 0 { + return errors.New("failed to update lock state") + } + + return nil + } + _, err = packages_model.InsertProperty(ctx, packages_model.PropertyTypePackage, refID, LockFile, value) + return err +} diff --git a/modules/packages/terraform/state.go b/modules/packages/terraform/state.go new file mode 100644 index 0000000000..5763128699 --- /dev/null +++ b/modules/packages/terraform/state.go @@ -0,0 +1,38 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package terraform + +import ( + "io" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/util" +) + +// Note: this is a subset of the Terraform state file format as the full one has two forms. +// If needed, it can be expanded in the future. + +type State struct { + Serial uint64 `json:"serial"` + Lineage string `json:"lineage"` +} + +// ParseState parses the required parts of Terraform state file +func ParseState(r io.Reader) (*State, error) { + var state State + err := json.NewDecoder(r).Decode(&state) + if err != nil { + return nil, err + } + // Serial starts at 1; 0 means it wasn't set in the state file + if state.Serial == 0 { + return nil, util.NewInvalidArgumentErrorf("state serial is missing") + } + // Lineage should always be set + if state.Lineage == "" { + return nil, util.NewInvalidArgumentErrorf("state lineage is missing") + } + + return &state, nil +} diff --git a/modules/public/manifest.go b/modules/public/manifest.go index 77e8959967..f807244c89 100644 --- a/modules/public/manifest.go +++ b/modules/public/manifest.go @@ -56,6 +56,8 @@ func parseManifest(data []byte) (map[string]string, map[string]string) { paths[key] = entry.File names[entry.File] = entry.Name // Map associated CSS files, e.g. "css/index.css" -> "css/index.B3zrQPqD.css" + // FIXME: INCORRECT-VITE-MANIFEST-PARSER: the logic is wrong, Vite manifest doesn't work this way + // It just happens to be correct for the current modules dependencies for _, css := range entry.CSS { cssKey := path.Dir(css) + "/" + entry.Name + path.Ext(css) paths[cssKey] = css @@ -125,27 +127,33 @@ func getManifestData() *manifestDataStruct { return data } -// getHashedPath resolves an unhashed asset path (origin path) to its content-hashed path from the frontend manifest. -// Example: getHashedPath("js/index.js") returns "js/index.C6Z2MRVQ.js" -// Falls back to returning the input path unchanged if the manifest is unavailable. -func getHashedPath(originPath string) string { - data := getManifestData() - if p, ok := data.paths[originPath]; ok { - return p - } - return originPath -} - // AssetURI returns the URI for a frontend asset. // It may return a relative path or a full URL depending on the StaticURLPrefix setting. // In Vite dev mode, known entry points are mapped to their source paths // so the reverse proxy serves them from the Vite dev server. // In production, it resolves the content-hashed path from the manifest. func AssetURI(originPath string) string { - if src := viteDevSourceURL(originPath); src != "" { - return src + if IsViteDevMode() { + if src := viteDevSourceURL(originPath); src != "" { + return src + } + // it should be caused by incorrect vite config + setting.PanicInDevOrTesting("Failed to locate local path for managed asset URI: %s", originPath) } - return setting.StaticURLPrefix + "/assets/" + getHashedPath(originPath) + + // Try to resolve an unhashed asset path (origin path) to its content-hashed path from the frontend manifest. + // Example: "js/index.js" -> "js/index.C6Z2MRVQ.js" + data := getManifestData() + assetPath := data.paths[originPath] + if assetPath == "" { + // it should be caused by either: "incorrect vite config" or "user's custom theme" + assetPath = originPath + if !setting.IsProd { + log.Warn("Failed to find managed asset URI for origin path: %s", originPath) + } + } + + return setting.StaticURLPrefix + "/assets/" + assetPath } // AssetNameFromHashedPath returns the asset entry name for a given hashed asset path. diff --git a/modules/public/manifest_test.go b/modules/public/manifest_test.go index 20a2232cf3..acfeaa6dbe 100644 --- a/modules/public/manifest_test.go +++ b/modules/public/manifest_test.go @@ -24,13 +24,6 @@ func TestViteManifest(t *testing.T) { "isEntry": true, "css": ["css/index.B3zrQPqD.css"] }, - "web_src/js/standalone/swagger.ts": { - "file": "js/swagger.SujiEmYM.js", - "name": "swagger", - "src": "web_src/js/standalone/swagger.ts", - "isEntry": true, - "css": ["css/swagger._-APWT_3.css"] - }, "web_src/css/themes/theme-gitea-dark.css": { "file": "css/theme-gitea-dark.CyAaQnn5.css", "name": "theme-gitea-dark", @@ -62,12 +55,10 @@ func TestViteManifest(t *testing.T) { // JS entries assert.Equal(t, "js/index.C6Z2MRVQ.js", paths["js/index.js"]) - assert.Equal(t, "js/swagger.SujiEmYM.js", paths["js/swagger.js"]) assert.Equal(t, "js/eventsource.sharedworker.Dug1twio.js", paths["js/eventsource.sharedworker.js"]) // Associated CSS from JS entries assert.Equal(t, "css/index.B3zrQPqD.css", paths["css/index.css"]) - assert.Equal(t, "css/swagger._-APWT_3.css", paths["css/swagger.css"]) // CSS-only entries assert.Equal(t, "css/theme-gitea-dark.CyAaQnn5.css", paths["css/theme-gitea-dark.css"]) @@ -78,8 +69,6 @@ func TestViteManifest(t *testing.T) { // Names: hashed path -> entry name assert.Equal(t, "index", names["js/index.C6Z2MRVQ.js"]) assert.Equal(t, "index", names["css/index.B3zrQPqD.css"]) - assert.Equal(t, "swagger", names["js/swagger.SujiEmYM.js"]) - assert.Equal(t, "swagger", names["css/swagger._-APWT_3.css"]) assert.Equal(t, "theme-gitea-dark", names["css/theme-gitea-dark.CyAaQnn5.css"]) assert.Equal(t, "eventsource.sharedworker", names["js/eventsource.sharedworker.Dug1twio.js"]) diff --git a/modules/public/public.go b/modules/public/public.go index 004aad5f3b..bb4721a48d 100644 --- a/modules/public/public.go +++ b/modules/public/public.go @@ -18,6 +18,8 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" + + "github.com/go-chi/cors" ) func CustomAssets() *assetfs.Layer { @@ -28,6 +30,15 @@ func AssetFS() *assetfs.LayeredFS { return assetfs.Layered(CustomAssets(), BuiltinAssets()) } +func AssetsCors() func(next http.Handler) http.Handler { + // static assets need to be served for external renders (sandboxed) + return cors.Handler(cors.Options{ + AllowedOrigins: []string{"*"}, + AllowedMethods: []string{"HEAD", "GET"}, + MaxAge: 3600 * 24, + }) +} + // FileHandlerFunc implements the static handler for serving files in "public" assets func FileHandlerFunc() http.HandlerFunc { assetFS := AssetFS() diff --git a/modules/public/vitedev.go b/modules/public/vitedev.go index 25bd28a826..7cfe692390 100644 --- a/modules/public/vitedev.go +++ b/modules/public/vitedev.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/routing" ) @@ -70,6 +71,9 @@ func getViteDevProxy() *httputil.ReverseProxy { return nil }, ErrorHandler: func(w http.ResponseWriter, r *http.Request, err error) { + if r.Context().Err() != nil { + return // request cancelled (e.g. client disconnected), silently ignore + } log.Error("Error proxying to Vite dev server: %v", err) http.Error(w, "Error proxying to Vite dev server: "+err.Error(), http.StatusBadGateway) }, @@ -136,34 +140,33 @@ func IsViteDevMode() bool { return isDev } -func viteDevSourceURL(name string) string { - if !IsViteDevMode() { - return "" - } - if strings.HasPrefix(name, "css/theme-") { - // Only redirect built-in themes to Vite source; custom themes are served from custom/public/assets/css/ - themeFile := strings.TrimPrefix(name, "css/") - srcPath := filepath.Join(setting.StaticRootPath, "web_src/css/themes", themeFile) - if _, err := os.Stat(srcPath); err == nil { - return setting.AppSubURL + "/web_src/css/themes/" + themeFile - } - return "" - } - if strings.HasPrefix(name, "css/") { - return setting.AppSubURL + "/web_src/" + name - } - if name == "js/eventsource.sharedworker.js" { - return setting.AppSubURL + "/web_src/js/features/eventsource.sharedworker.ts" - } - if name == "js/iife.js" { - return setting.AppSubURL + "/web_src/js/__vite_iife.js" - } - if name == "js/index.js" { - return setting.AppSubURL + "/web_src/js/index.ts" +func detectWebSrcPath(webSrcPath string) string { + localPath := util.FilePathJoinAbs(setting.StaticRootPath, "web_src", webSrcPath) + if _, err := os.Stat(localPath); err == nil { + return setting.AppSubURL + "/web_src/" + webSrcPath } return "" } +func viteDevSourceURL(name string) string { + if strings.HasPrefix(name, "css/theme-") { + // Only redirect built-in themes to Vite source; custom themes are served from custom/public/assets/css/ + themeFilePath := "css/themes/" + strings.TrimPrefix(name, "css/") + if srcPath := detectWebSrcPath(themeFilePath); srcPath != "" { + return srcPath + } + } + // try to map ".js" files to ".ts" files + pathPrefix, ok := strings.CutSuffix(name, ".js") + if ok { + if srcPath := detectWebSrcPath(pathPrefix + ".ts"); srcPath != "" { + return srcPath + } + } + // for all others that the names match + return detectWebSrcPath(name) +} + // isViteDevRequest returns true if the request should be proxied to the Vite dev server. // Ref: Vite source packages/vite/src/node/constants.ts and packages/vite/src/shared/constants.ts func isViteDevRequest(req *http.Request) bool { diff --git a/modules/session/redis.go b/modules/session/redis.go index 083869f4e1..f5cac8e636 100644 --- a/modules/session/redis.go +++ b/modules/session/redis.go @@ -1,18 +1,6 @@ // Copyright 2013 Beego Authors // Copyright 2014 The Macaron Authors // Copyright 2020 The Gitea Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"): you may -// not use this file except in compliance with the License. You may obtain -// a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations -// under the License. // SPDX-License-Identifier: Apache-2.0 package session diff --git a/modules/setting/lfs.go b/modules/setting/lfs.go index 7f2d0ae159..3ec21860ae 100644 --- a/modules/setting/lfs.go +++ b/modules/setting/lfs.go @@ -81,10 +81,7 @@ func loadLFSFrom(rootCfg ConfigProvider) error { jwtSecretBase64 := loadSecret(rootCfg.Section("server"), "LFS_JWT_SECRET_URI", "LFS_JWT_SECRET") LFS.JWTSecretBytes, err = generate.DecodeJwtSecretBase64(jwtSecretBase64) if err != nil { - LFS.JWTSecretBytes, jwtSecretBase64, err = generate.NewJwtSecretWithBase64() - if err != nil { - return fmt.Errorf("error generating JWT Secret for custom config: %v", err) - } + LFS.JWTSecretBytes, jwtSecretBase64 = generate.NewJwtSecretWithBase64() // Save secret saveCfg, err := rootCfg.PrepareSaving() diff --git a/modules/setting/mailer_test.go b/modules/setting/mailer_test.go index f281715973..7e1c988568 100644 --- a/modules/setting/mailer_test.go +++ b/modules/setting/mailer_test.go @@ -6,6 +6,8 @@ package setting import ( "testing" + "code.gitea.io/gitea/modules/test" + "github.com/stretchr/testify/assert" ) @@ -39,3 +41,30 @@ func Test_loadMailerFrom(t *testing.T) { }) } } + +func TestLoadSettingsForInstallMailServiceFlags(t *testing.T) { + defer test.MockVariableValue(&Service)() + defer test.MockVariableValue(&MailService)() + + cfg, err := NewConfigProviderFromData(` +[database] +DB_TYPE = postgres + +[mailer] +ENABLED = true +SMTP_ADDR = 127.0.0.1 +SMTP_PORT = 465 +FROM = noreply@example.com + +[service] +REGISTER_EMAIL_CONFIRM = true +ENABLE_NOTIFY_MAIL = true +`) + assert.NoError(t, err) + loadDBSetting(cfg) + loadServiceFrom(cfg) + loadMailsFrom(cfg) + + assert.True(t, Service.RegisterEmailConfirm) + assert.True(t, Service.EnableNotifyMail) +} diff --git a/modules/setting/oauth2.go b/modules/setting/oauth2.go index 2dfe77dda9..8e0210aa51 100644 --- a/modules/setting/oauth2.go +++ b/modules/setting/oauth2.go @@ -139,10 +139,7 @@ func loadOAuth2From(rootCfg ConfigProvider) { if InstallLock { jwtSecretBytes, err := generate.DecodeJwtSecretBase64(jwtSecretBase64) if err != nil { - jwtSecretBytes, jwtSecretBase64, err = generate.NewJwtSecretWithBase64() - if err != nil { - log.Fatal("error generating JWT secret: %v", err) - } + jwtSecretBytes, jwtSecretBase64 = generate.NewJwtSecretWithBase64() saveCfg, err := rootCfg.PrepareSaving() if err != nil { log.Fatal("save oauth2.JWT_SECRET failed: %v", err) @@ -162,10 +159,7 @@ var generalSigningSecret atomic.Pointer[[]byte] func GetGeneralTokenSigningSecret() []byte { old := generalSigningSecret.Load() if old == nil || len(*old) == 0 { - jwtSecret, _, err := generate.NewJwtSecretWithBase64() - if err != nil { - log.Fatal("Unable to generate general JWT secret: %v", err) - } + jwtSecret, _ := generate.NewJwtSecretWithBase64() if generalSigningSecret.CompareAndSwap(old, &jwtSecret) { return jwtSecret } diff --git a/modules/setting/packages.go b/modules/setting/packages.go index b598424064..38ee2ad55e 100644 --- a/modules/setting/packages.go +++ b/modules/setting/packages.go @@ -16,30 +16,31 @@ var ( Storage *Storage Enabled bool - LimitTotalOwnerCount int64 - LimitTotalOwnerSize int64 - LimitSizeAlpine int64 - LimitSizeArch int64 - LimitSizeCargo int64 - LimitSizeChef int64 - LimitSizeComposer int64 - LimitSizeConan int64 - LimitSizeConda int64 - LimitSizeContainer int64 - LimitSizeCran int64 - LimitSizeDebian int64 - LimitSizeGeneric int64 - LimitSizeGo int64 - LimitSizeHelm int64 - LimitSizeMaven int64 - LimitSizeNpm int64 - LimitSizeNuGet int64 - LimitSizePub int64 - LimitSizePyPI int64 - LimitSizeRpm int64 - LimitSizeRubyGems int64 - LimitSizeSwift int64 - LimitSizeVagrant int64 + LimitTotalOwnerCount int64 + LimitTotalOwnerSize int64 + LimitSizeAlpine int64 + LimitSizeArch int64 + LimitSizeCargo int64 + LimitSizeChef int64 + LimitSizeComposer int64 + LimitSizeConan int64 + LimitSizeConda int64 + LimitSizeContainer int64 + LimitSizeCran int64 + LimitSizeDebian int64 + LimitSizeGeneric int64 + LimitSizeGo int64 + LimitSizeHelm int64 + LimitSizeMaven int64 + LimitSizeNpm int64 + LimitSizeNuGet int64 + LimitSizePub int64 + LimitSizePyPI int64 + LimitSizeRpm int64 + LimitSizeRubyGems int64 + LimitSizeSwift int64 + LimitSizeTerraformState int64 + LimitSizeVagrant int64 DefaultRPMSignEnabled bool }{ @@ -86,6 +87,7 @@ func loadPackagesFrom(rootCfg ConfigProvider) (err error) { Packages.LimitSizeRpm = mustBytes(sec, "LIMIT_SIZE_RPM") Packages.LimitSizeRubyGems = mustBytes(sec, "LIMIT_SIZE_RUBYGEMS") Packages.LimitSizeSwift = mustBytes(sec, "LIMIT_SIZE_SWIFT") + Packages.LimitSizeTerraformState = mustBytes(sec, "LIMIT_SIZE_TERRAFORM_STATE") Packages.LimitSizeVagrant = mustBytes(sec, "LIMIT_SIZE_VAGRANT") Packages.DefaultRPMSignEnabled = sec.Key("DEFAULT_RPM_SIGN_ENABLED").MustBool(false) return nil diff --git a/modules/setting/security.go b/modules/setting/security.go index a1fd0bce2e..152bcffd9f 100644 --- a/modules/setting/security.go +++ b/modules/setting/security.go @@ -31,6 +31,7 @@ var ( ReverseProxyAuthEmail string ReverseProxyAuthFullName string ReverseProxyLimit int + ReverseProxyLogoutRedirect string ReverseProxyTrustedProxies []string MinPasswordLength int ImportLocalPaths bool @@ -124,6 +125,7 @@ func loadSecurityFrom(rootCfg ConfigProvider) { ReverseProxyAuthFullName = sec.Key("REVERSE_PROXY_AUTHENTICATION_FULL_NAME").MustString("X-WEBAUTH-FULLNAME") ReverseProxyLimit = sec.Key("REVERSE_PROXY_LIMIT").MustInt(1) + ReverseProxyLogoutRedirect = sec.Key("REVERSE_PROXY_LOGOUT_REDIRECT").String() ReverseProxyTrustedProxies = sec.Key("REVERSE_PROXY_TRUSTED_PROXIES").Strings(",") if len(ReverseProxyTrustedProxies) == 0 { ReverseProxyTrustedProxies = []string{"127.0.0.0/8", "::1/128"} diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 2918ef11a1..3c1ad14428 100644 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -201,7 +201,7 @@ func mustCurrentRunUserMatch(rootCfg ConfigProvider) { if HasInstallLock(rootCfg) { currentUser, match := IsRunUserMatchCurrentUser(RunUser) if !match { - log.Fatal("Expect user '%s' but current user is: %s", RunUser, currentUser) + log.Fatal("Expect user '%s' (RUN_USER in app.ini) but current user is: %s", RunUser, currentUser) } } } @@ -232,7 +232,7 @@ func LoadSettings() { func LoadSettingsForInstall() { loadDBSetting(CfgProvider) loadServiceFrom(CfgProvider) - loadMailerFrom(CfgProvider) + loadMailsFrom(CfgProvider) } var configuredPaths = make(map[string]string) diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 3a5eb5904f..4cd6269eaf 100644 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -6,12 +6,10 @@ package templates import ( "fmt" - "html" "html/template" "net/url" "strconv" "strings" - "sync" "time" "code.gitea.io/gitea/modules/base" @@ -25,22 +23,19 @@ import ( "code.gitea.io/gitea/services/gitdiff" ) -// NewFuncMap returns functions for injecting to templates -func NewFuncMap() template.FuncMap { +func newFuncMapWebPage() template.FuncMap { return map[string]any{ "DumpVar": dumpVar, "NIL": func() any { return nil }, // ----------------------------------------------------------------- // html/template related functions - "dict": dict, // it's lowercase because this name has been widely used. Our other functions should have uppercase names. - "Iif": iif, - "Eval": evalTokens, - "HTMLFormat": htmlFormat, - "QueryEscape": queryEscape, - "QueryBuild": QueryBuild, - "SanitizeHTML": SanitizeHTML, - "DotEscape": dotEscape, + "dict": dict, // it's lowercase because this name has been widely used. Our other functions should have uppercase names. + "Iif": iif, + "Eval": evalTokens, + "HTMLFormat": htmlFormat, + "QueryEscape": queryEscape, + "QueryBuild": QueryBuild, "PathEscape": url.PathEscape, "PathEscapeSegments": util.PathEscapeSegments, @@ -61,6 +56,7 @@ func NewFuncMap() template.FuncMap { // ----------------------------------------------------------------- // time / number / format + "ShortSha": base.ShortSha, "FileSize": base.FileSize, "CountFmt": countFmt, "Sec2Hour": util.SecToHours, @@ -71,8 +67,8 @@ func NewFuncMap() template.FuncMap { return strconv.FormatInt(time.Since(startTime).Nanoseconds()/1e6, 10) + "ms" }, - "AssetURI": public.AssetURI, - "ScriptImport": scriptImport, + "AssetURI": public.AssetURI, + // ----------------------------------------------------------------- // setting "AppName": func() string { @@ -84,17 +80,10 @@ func NewFuncMap() template.FuncMap { "AssetUrlPrefix": func() string { return setting.StaticURLPrefix + "/assets" }, - "AppUrl": func() string { - // The usage of AppUrl should be avoided as much as possible, - // because the AppURL(ROOT_URL) may not match user's visiting site and the ROOT_URL in app.ini may be incorrect. - // And it's difficult for Gitea to guess absolute URL correctly with zero configuration, - // because Gitea doesn't know whether the scheme is HTTP or HTTPS unless the reverse proxy could tell Gitea. - return setting.AppURL - }, "AppVer": func() string { return setting.AppVer }, - "AppDomain": func() string { // documented in mail-templates.md + "AppDomain": func() string { // TODO: helm registry still uses it, need to use current request host in the future return setting.Domain }, "ShowFooterTemplateLoadTime": func() bool { @@ -143,7 +132,6 @@ func NewFuncMap() template.FuncMap { // ----------------------------------------------------------------- // misc (TODO: move them to MiscUtils to avoid bloating the main func map) - "ShortSha": base.ShortSha, "ActionContent2Commits": ActionContent2Commits, "IsMultilineCommitMessage": isMultilineCommitMessage, "CommentMustAsDiff": gitdiff.CommentMustAsDiff, @@ -154,9 +142,8 @@ func NewFuncMap() template.FuncMap { } } -// SanitizeHTML sanitizes the input by default sanitization rules. -func SanitizeHTML(s string) template.HTML { - return markup.Sanitize(s) +func sanitizeHTML(msg string) template.HTML { + return markup.Sanitize(msg) } func htmlFormat(s any, args ...any) template.HTML { @@ -177,11 +164,6 @@ func queryEscape(s string) template.URL { return template.URL(url.QueryEscape(s)) } -// dotEscape wraps a dots in names with ZWJ [U+200D] in order to prevent auto-linkers from detecting these as urls -func dotEscape(raw string) string { - return strings.ReplaceAll(raw, ".", "\u200d.\u200d") -} - // iif is an "inline-if", similar util.Iif[T] but templates need the non-generic version, // and it could be simply used as "{{iif expr trueVal}}" (omit the falseVal). func iif(condition any, vals ...any) any { @@ -305,30 +287,3 @@ func QueryBuild(a ...any) template.URL { } return template.URL(s) } - -var globalVars = sync.OnceValue(func() (ret struct { - scriptImportRemainingPart string -}, -) { - // add onerror handler to alert users when the script fails to load: - // * for end users: there were many users reporting that "UI doesn't work", actually they made mistakes in their config - // * for developers: help them to remember to run "make watch-frontend" to build frontend assets - // the message will be directly put in the onerror JS code's string - onScriptErrorPrompt := `Please make sure the asset files can be accessed.` - if !setting.IsProd { - onScriptErrorPrompt += `\n\nFor development, run: make watch-frontend.` - } - onScriptErrorJS := fmt.Sprintf(`alert('Failed to load asset file from ' + this.src + '. %s')`, onScriptErrorPrompt) - ret.scriptImportRemainingPart = `onerror="` + html.EscapeString(onScriptErrorJS) + `">` - return ret -}) - -func scriptImport(path string, typ ...string) template.HTML { - if len(typ) > 0 { - if typ[0] == "module" { - return template.HTML(`", + "Summary": "summary with details", + "Details": "details line 1\n details line 2\n details line 3", + }) + msgWithSummary, _ := ctx.RenderToHTML("base/alert_details", map[string]any{ + "Message": "message with summary ", + "Summary": "summary only", + }) + + ctx.Flash.ErrorMsg = string(msgWithDetails) + ctx.Flash.WarningMsg = string(msgWithSummary) + ctx.Flash.InfoMsg = "a long message with line break\nthe second line " + ctx.Flash.SuccessMsg = "single line message " + ctx.Data["Flash"] = ctx.Flash +} + +func prepareMockDataUnicodeEscape(ctx *context.Context) { + content := "// demo code\n" + content += "if accessLevel != \"user\u202E \u2066// Check if admin (invisible char)\u2069 \u2066\" { }\n" + content += "if O𝐾 { } // ambiguous char\n" + content += "if O𝐾 && accessLevel != \"user\u202E \u2066// ambiguous char + invisible char\u2069 \u2066\" { }\n" + content += "str := `\xef` // broken char\n" + content += "str := `\x00 \x19 \x7f` // control char\n" + + lineNums := []int{1, 2, 3, 4, 5, 6, 7, 8, 9} + + highlightLines := code.HighlightSearchResultCode("demo.go", "", lineNums, content) + escapeStatus := &charset.EscapeStatus{} + lineEscapeStatus := make([]*charset.EscapeStatus, len(highlightLines)) + for i, hl := range highlightLines { + lineEscapeStatus[i], hl.FormattedContent = charset.EscapeControlHTML(hl.FormattedContent, ctx.Locale) + escapeStatus = escapeStatus.Or(lineEscapeStatus[i]) + } + ctx.Data["HighlightLines"] = highlightLines + ctx.Data["EscapeStatus"] = escapeStatus + ctx.Data["LineEscapeStatus"] = lineEscapeStatus +} + func TmplCommon(ctx *context.Context) { prepareMockData(ctx) - if ctx.Req.Method == http.MethodPost { - _ = ctx.Req.ParseForm() - ctx.Flash.Info("form: "+ctx.Req.Method+" "+ctx.Req.RequestURI+"
"+ - "Form: "+ctx.Req.Form.Encode()+"
"+ + if ctx.Req.Method == http.MethodPost && ctx.FormBool("mock_response_delay") { + ctx.Flash.Info("form submit: "+ctx.Req.Method+" "+ctx.Req.RequestURI+"\n"+ + "Form: "+ctx.Req.Form.Encode()+"\n"+ "PostForm: "+ctx.Req.PostForm.Encode(), true, ) diff --git a/routers/web/feed/convert.go b/routers/web/feed/convert.go index a5c379e01a..5d208bb286 100644 --- a/routers/web/feed/convert.go +++ b/routers/web/feed/convert.go @@ -15,6 +15,7 @@ import ( activities_model "code.gitea.io/gitea/models/activities" "code.gitea.io/gitea/models/renderhelper" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" @@ -237,7 +238,7 @@ func feedActionsToFeedItems(ctx *context.Context, actions activities_model.Actio } } if len(content) == 0 { - content = templates.SanitizeHTML(desc) + content = markup.Sanitize(desc) } items = append(items, &feeds.Item{ diff --git a/routers/web/misc/swagger.go b/routers/web/misc/swagger.go index 1ca347551c..4abd4f042d 100644 --- a/routers/web/misc/swagger.go +++ b/routers/web/misc/swagger.go @@ -6,15 +6,9 @@ package misc import ( "net/http" - "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/services/context" ) -// tplSwagger swagger page template -const tplSwagger templates.TplName = "swagger/ui" - -// Swagger render swagger-ui page with v1 json func Swagger(ctx *context.Context) { - ctx.Data["APIJSONVersion"] = "v1" - ctx.HTML(http.StatusOK, tplSwagger) + ctx.HTML(http.StatusOK, "swagger/openapi-viewer") } diff --git a/routers/web/org/home.go b/routers/web/org/home.go index e18a8de40f..262b001e6a 100644 --- a/routers/web/org/home.go +++ b/routers/web/org/home.go @@ -98,8 +98,10 @@ func home(ctx *context.Context, viewRepositories bool) { ctx.ServerError("FindOrgMembers", err) return } - ctx.Data["Members"] = members - ctx.Data["Teams"] = ctx.Org.Teams + + const orgOverviewTeamsLimit = 5 + ctx.Data["OrgOverviewMembers"] = members + ctx.Data["OrgOverviewTeams"] = ctx.Org.Teams[:min(len(ctx.Org.Teams), orgOverviewTeamsLimit)] ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull ctx.Data["ShowMemberAndTeamTab"] = ctx.Org.IsMember || len(members) > 0 diff --git a/routers/web/org/teams.go b/routers/web/org/teams.go index 1e22a67032..10803c9fbf 100644 --- a/routers/web/org/teams.go +++ b/routers/web/org/teams.go @@ -22,6 +22,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" shared_user "code.gitea.io/gitea/routers/web/shared/user" "code.gitea.io/gitea/services/context" @@ -54,13 +55,54 @@ func Teams(ctx *context.Context) { ctx.Data["Title"] = org.FullName ctx.Data["PageIsOrgTeams"] = true - for _, t := range ctx.Org.Teams { + keyword := ctx.FormTrim("q") + page := max(ctx.FormInt("page"), 1) + pagingNum := setting.UI.MembersPagingNum + + searchTeams := func() (teams []*org_model.Team, count int64, err error) { + if keyword == "" { + // fast path, use existing teams in context if no need to filter from database + count = int64(len(ctx.Org.Teams)) + start := (page - 1) * pagingNum + if start > len(ctx.Org.Teams) { + return nil, count, nil + } + end := min(start+pagingNum, len(ctx.Org.Teams)) + return ctx.Org.Teams[start:end], count, nil + } + + shouldSeeAllOrgTeams, err := context.UserShouldSeeAllOrgTeams(ctx) + if err != nil { + return nil, 0, err + } + opts := &org_model.SearchTeamOptions{ + OrgID: org.ID, + UserID: util.Iif(shouldSeeAllOrgTeams, 0, ctx.Doer.ID), + Keyword: keyword, + IncludeDesc: true, + ListOptions: db.ListOptions{Page: page, PageSize: pagingNum}, + } + return org_model.SearchTeam(ctx, opts) + } + + teams, count, err := searchTeams() + if err != nil { + ctx.ServerError("SearchTeam", err) + return + } + + for _, t := range teams { if err := t.LoadMembers(ctx); err != nil { ctx.ServerError("GetMembers", err) return } } - ctx.Data["Teams"] = ctx.Org.Teams + + ctx.Data["OrgListTeams"] = teams + ctx.Data["Keyword"] = keyword + pager := context.NewPagination(count, setting.UI.MembersPagingNum, page, 5) + pager.AddParamFromRequest(ctx.Req) + ctx.Data["Page"] = pager ctx.HTML(http.StatusOK, tplTeams) } @@ -213,7 +255,7 @@ func checkIsOrgMemberAndRedirect(ctx *context.Context, defaultRedirect string) { if isOrgMember, err := org_model.IsOrganizationMember(ctx, ctx.Org.Organization.ID, ctx.Doer.ID); err != nil { ctx.ServerError("IsOrganizationMember", err) return - } else if !isOrgMember { + } else if !isOrgMember && !ctx.Doer.IsAdmin { if ctx.Org.Organization.Visibility.IsPrivate() { defaultRedirect = setting.AppSubURL + "/" } else { diff --git a/routers/web/repo/actions/actions.go b/routers/web/repo/actions/actions.go index 988d2d0a99..644a53f28a 100644 --- a/routers/web/repo/actions/actions.go +++ b/routers/web/repo/actions/actions.go @@ -151,6 +151,11 @@ func prepareWorkflowTemplate(ctx *context.Context, commit *git.Commit) (workflow workflows = append(workflows, workflow) continue } + if err := actions.ValidateWorkflowContent(content); err != nil { + workflow.ErrMsg = ctx.Locale.TrString("actions.runs.invalid_workflow_helper", err.Error()) + workflows = append(workflows, workflow) + continue + } workflow.Workflow = wf // The workflow must contain at least one job without "needs". Otherwise, a deadlock will occur and no jobs will be able to run. hasJobWithoutNeeds := false @@ -315,6 +320,10 @@ func prepareWorkflowList(ctx *context.Context, workflows []WorkflowInfo) { if !job.Status.IsWaiting() { continue } + if err := actions.ValidateWorkflowContent(job.WorkflowPayload); err != nil { + runErrors[run.ID] = ctx.Locale.TrString("actions.runs.invalid_workflow_helper", err.Error()) + break + } hasOnlineRunner := false for _, runner := range runners { if !runner.IsDisabled && runner.CanMatchLabels(job.RunsOn) { diff --git a/routers/web/repo/actions/view.go b/routers/web/repo/actions/view.go index 6b3e95f3da..f92df685fd 100644 --- a/routers/web/repo/actions/view.go +++ b/routers/web/repo/actions/view.go @@ -68,9 +68,138 @@ func getCurrentRunByPathParam(ctx *context_module.Context) (run *actions_model.A return run } +// resolveCurrentRunForView resolves GET Actions page URLs and supports both ID-based and legacy index-based forms. +// +// By default, run summary pages (/actions/runs/{run}) use a best-effort ID-first fallback, +// and job pages (/actions/runs/{run}/jobs/{job}) try to confirm an ID-based URL first and prefer the ID-based interpretation when both are valid. +// +// `by_id=1` param explicitly forces the ID-based path, and `by_index=1` explicitly forces the legacy index-based path. +// If both are present, `by_id` takes precedence. +func resolveCurrentRunForView(ctx *context_module.Context) *actions_model.ActionRun { + // `by_id` explicitly requests ID-based resolution, so the request skips the legacy index-based disambiguation logic and resolves the run by ID directly. + // It takes precedence over `by_index` when both query parameters are present. + if ctx.PathParam("run") == "latest" || ctx.FormBool("by_id") { + return getCurrentRunByPathParam(ctx) + } + + runNum := ctx.PathParamInt64("run") + if runNum <= 0 { + ctx.NotFound(nil) + return nil + } + + byIndex := ctx.FormBool("by_index") + + if ctx.PathParam("job") == "" { + // The URL does not contain a {job} path parameter, so it cannot use the + // job-specific rules to disambiguate ID-based URLs from legacy index-based URLs. + // Because of that, this path is handled with a best-effort ID-first fallback by default. + // + // When the same repository contains: + // - a run whose ID matches runNum, and + // - a different run whose repo-scope index also matches runNum + // this path prefers the ID match and may show a different run than the old legacy URL originally intended, + // unless `by_index=1` explicitly forces the legacy index-based interpretation. + + if !byIndex { + runByID, err := actions_model.GetRunByRepoAndID(ctx, ctx.Repo.Repository.ID, runNum) + if err == nil { + return runByID + } + if !errors.Is(err, util.ErrNotExist) { + ctx.ServerError("GetRun:"+ctx.PathParam("run"), err) + return nil + } + } + + runByIndex, err := actions_model.GetRunByRepoAndIndex(ctx, ctx.Repo.Repository.ID, runNum) + if err == nil { + ctx.Redirect(fmt.Sprintf("%s/actions/runs/%d", ctx.Repo.RepoLink, runByIndex.ID), http.StatusFound) + return nil + } + if !errors.Is(err, util.ErrNotExist) { + ctx.ServerError("GetRunByRepoAndIndex", err) + return nil + } + ctx.NotFound(nil) + return nil + } + + jobNum := ctx.PathParamInt64("job") + if jobNum < 0 { + ctx.NotFound(nil) + return nil + } + + // A job index should not be larger than MaxJobNumPerRun, so larger values can skip the legacy index-based path and be treated as job IDs directly. + if !byIndex && jobNum >= actions_model.MaxJobNumPerRun { + return getCurrentRunByPathParam(ctx) + } + + var runByID, runByIndex *actions_model.ActionRun + var targetJobByIndex *actions_model.ActionRunJob + + // Each run must have at least one job, so a valid job ID in the same run cannot be smaller than the run ID. + if !byIndex && jobNum >= runNum { + // Probe the repo-scoped job ID first and only accept it when the job exists and belongs to the same runNum. + job, err := actions_model.GetRunJobByRepoAndID(ctx, ctx.Repo.Repository.ID, jobNum) + if err != nil && !errors.Is(err, util.ErrNotExist) { + ctx.ServerError("GetRunJobByRepoAndID", err) + return nil + } + if job != nil { + if err := job.LoadRun(ctx); err != nil { + ctx.ServerError("LoadRun", err) + return nil + } + if job.Run.ID == runNum { + runByID = job.Run + } + } + } + + // Try to resolve the request as a legacy run-index/job-index URL. + { + run, err := actions_model.GetRunByRepoAndIndex(ctx, ctx.Repo.Repository.ID, runNum) + if err != nil && !errors.Is(err, util.ErrNotExist) { + ctx.ServerError("GetRunByRepoAndIndex", err) + return nil + } + if run != nil { + jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID) + if err != nil { + ctx.ServerError("GetRunJobsByRunID", err) + return nil + } + if jobNum < int64(len(jobs)) { + runByIndex = run + targetJobByIndex = jobs[jobNum] + } + } + } + + if runByID == nil && runByIndex == nil { + ctx.NotFound(nil) + return nil + } + + if runByID != nil && runByIndex == nil { + return runByID + } + + if runByID == nil && runByIndex != nil { + ctx.Redirect(fmt.Sprintf("%s/actions/runs/%d/jobs/%d", ctx.Repo.RepoLink, runByIndex.ID, targetJobByIndex.ID), http.StatusFound) + return nil + } + + // Reaching this point means both ID-based and legacy index-based interpretations are valid. Prefer the ID-based interpretation by default. + // Use `by_index=1` query parameter to access the legacy index-based interpretation when necessary. + return runByID +} + func View(ctx *context_module.Context) { ctx.Data["PageIsActions"] = true - run := getCurrentRunByPathParam(ctx) + run := resolveCurrentRunForView(ctx) if ctx.Written() { return } diff --git a/routers/web/repo/attachment.go b/routers/web/repo/attachment.go index 8b35f52ed6..bb2002521c 100644 --- a/routers/web/repo/attachment.go +++ b/routers/web/repo/attachment.go @@ -23,16 +23,16 @@ import ( // UploadIssueAttachment response for Issue/PR attachments func UploadIssueAttachment(ctx *context.Context) { - uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Attachment.AllowedTypes) + uploadAttachment(ctx, ctx.Repo.Repository.ID, attachment.UploadAttachmentForIssue) } // UploadReleaseAttachment response for uploading release attachments func UploadReleaseAttachment(ctx *context.Context) { - uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Repository.Release.AllowedTypes) + uploadAttachment(ctx, ctx.Repo.Repository.ID, attachment.UploadAttachmentForRelease) } // UploadAttachment response for uploading attachments -func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) { +func uploadAttachment(ctx *context.Context, repoID int64, uploadFunc attachment.UploadAttachmentFunc) { if !setting.Attachment.Enabled { ctx.HTTPError(http.StatusNotFound, "attachment is not enabled") return @@ -46,7 +46,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) { defer file.Close() uploaderFile := attachment.NewLimitedUploaderKnownSize(file, header.Size) - attach, err := attachment.UploadAttachmentReleaseSizeLimit(ctx, uploaderFile, allowedTypes, &repo_model.Attachment{ + attach, err := uploadFunc(ctx, uploaderFile, &repo_model.Attachment{ Name: header.Filename, UploaderID: ctx.Doer.ID, RepoID: repoID, @@ -56,7 +56,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) { ctx.HTTPError(http.StatusBadRequest, err.Error()) return } - ctx.ServerError("UploadAttachmentReleaseSizeLimit", err) + ctx.ServerError("uploadAttachment(uploadFunc)", err) return } @@ -119,7 +119,7 @@ func DeleteAttachment(ctx *context.Context) { }) } -// GetAttachment serve attachments with the given UUID +// ServeAttachment serve attachments with the given UUID func ServeAttachment(ctx *context.Context, uuid string) { attach, err := repo_model.GetAttachmentByUUID(ctx, uuid) if err != nil { diff --git a/routers/web/repo/branch.go b/routers/web/repo/branch.go index 5e5cfec5c2..c566e465e9 100644 --- a/routers/web/repo/branch.go +++ b/routers/web/repo/branch.go @@ -231,7 +231,7 @@ func CreateBranch(ctx *context.Context) { flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ "Message": ctx.Tr("repo.editor.push_rejected"), "Summary": ctx.Tr("repo.editor.push_rejected_summary"), - "Details": utils.SanitizeFlashErrorString(e.Message), + "Details": utils.EscapeFlashErrorString(e.Message), }) if err != nil { ctx.ServerError("UpdatePullRequest.HTMLString", err) diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go index 168d959494..34e588b141 100644 --- a/routers/web/repo/commit.go +++ b/routers/web/repo/commit.go @@ -410,7 +410,8 @@ func Diff(ctx *context.Context) { ctx.Data["NoteCommit"] = note.Commit ctx.Data["NoteAuthor"] = user_model.ValidateCommitWithEmail(ctx, note.Commit) rctx := renderhelper.NewRenderContextRepoComment(ctx, ctx.Repo.Repository, renderhelper.RepoCommentOptions{CurrentRefPath: path.Join("commit", util.PathEscapeSegments(commitID))}) - ctx.Data["NoteRendered"], err = markup.PostProcessCommitMessage(rctx, template.HTMLEscapeString(string(charset.ToUTF8WithFallback(note.Message, charset.ConvertOpts{})))) + htmlMessage := template.HTML(template.HTMLEscapeString(string(charset.ToUTF8WithFallback(note.Message, charset.ConvertOpts{})))) + ctx.Data["NoteRendered"], err = markup.PostProcessCommitMessage(rctx, htmlMessage) if err != nil { ctx.ServerError("PostProcessCommitMessage", err) return diff --git a/routers/web/repo/editor_error.go b/routers/web/repo/editor_error.go index e1473a34b3..f23b2738e5 100644 --- a/routers/web/repo/editor_error.go +++ b/routers/web/repo/editor_error.go @@ -27,13 +27,13 @@ func editorHandleFileOperationErrorRender(ctx *context_service.Context, message, flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ "Message": message, "Summary": summary, - "Details": utils.SanitizeFlashErrorString(details), + "Details": utils.EscapeFlashErrorString(details), }) if err == nil { ctx.JSONError(flashError) } else { - log.Error("RenderToHTML: %v", err) - ctx.JSONError(message + "\n" + summary + "\n" + utils.SanitizeFlashErrorString(details)) + log.Error("RenderToHTML(%q, %q, %q), error: %v", message, summary, details, err) + ctx.JSONError("Unable to render error details, see server logs") // it should never happen } } diff --git a/routers/web/repo/issue_comment.go b/routers/web/repo/issue_comment.go index 7f8cc23a3f..860dcd7442 100644 --- a/routers/web/repo/issue_comment.go +++ b/routers/web/repo/issue_comment.go @@ -21,6 +21,7 @@ import ( repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/convert" @@ -31,31 +32,22 @@ import ( // NewComment create a comment for issue func NewComment(ctx *context.Context) { - form := web.GetForm(ctx).(*forms.CreateCommentForm) issue := GetActionIssue(ctx) - if ctx.Written() { + if issue == nil { return } - if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { - if log.IsTrace() { - if ctx.IsSigned { - issueType := "issues" - if issue.IsPull { - issueType = "pulls" - } - log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+ - "User in Repo has Permissions: %-+v", - ctx.Doer, - issue.PosterID, - issueType, - ctx.Repo.Repository, - ctx.Repo.Permission) - } else { - log.Trace("Permission Denied: Not logged in") - } - } + if ctx.HasError() { + ctx.JSONError(ctx.GetErrMsg()) + return + } + form := web.GetForm(ctx).(*forms.CreateCommentForm) + issueType := util.Iif(issue.IsPull, "pulls", "issues") + + if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { + log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+ + "User in Repo has Permissions: %-+v", ctx.Doer, issue.PosterID, issueType, ctx.Repo.Repository, ctx.Repo.Permission) ctx.HTTPError(http.StatusForbidden) return } @@ -65,151 +57,134 @@ func NewComment(ctx *context.Context) { return } - var attachments []string - if setting.Attachment.Enabled { - attachments = form.Files - } + redirect := fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, issueType, issue.Index) + attachments := util.Iif(setting.Attachment.Enabled, form.Files, nil) - if ctx.HasError() { - ctx.JSONError(ctx.GetErrMsg()) - return - } - - var comment *issues_model.Comment - defer func() { - // Check if issue admin/poster changes the status of issue. - if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) && - (form.Status == "reopen" || form.Status == "close") && - !(issue.IsPull && issue.PullRequest.HasMerged) { - // Duplication and conflict check should apply to reopen pull request. - var pr *issues_model.PullRequest - - if form.Status == "reopen" && issue.IsPull { - pull := issue.PullRequest - var err error - pr, err = issues_model.GetUnmergedPullRequest(ctx, pull.HeadRepoID, pull.BaseRepoID, pull.HeadBranch, pull.BaseBranch, pull.Flow) - if err != nil { - if !issues_model.IsErrPullRequestNotExist(err) { - ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked")) - return - } - } - - // Regenerate patch and test conflict. - if pr == nil { - issue.PullRequest.HeadCommitID = "" - pull_service.StartPullRequestCheckImmediately(ctx, issue.PullRequest) - } - - // check whether the ref of PR in base repo is consistent with the head commit of head branch in the head repo - // get head commit of PR - if pull.Flow == issues_model.PullRequestFlowGithub { - prHeadRef := pull.GetGitHeadRefName() - if err := pull.LoadBaseRepo(ctx); err != nil { - ctx.ServerError("Unable to load base repo", err) - return - } - prHeadCommitID, err := gitrepo.GetFullCommitID(ctx, pull.BaseRepo, prHeadRef) - if err != nil { - ctx.ServerError("Get head commit Id of pr fail", err) - return - } - - // get head commit of branch in the head repo - if err := pull.LoadHeadRepo(ctx); err != nil { - ctx.ServerError("Unable to load head repo", err) - return - } - if exist, _ := git_model.IsBranchExist(ctx, pull.HeadRepo.ID, pull.BaseBranch); !exist { - // todo localize - ctx.JSONError("The origin branch is delete, cannot reopen.") - return - } - headBranchRef := git.RefNameFromBranch(pull.HeadBranch) - headBranchCommitID, err := gitrepo.GetFullCommitID(ctx, pull.HeadRepo, headBranchRef.String()) - if err != nil { - ctx.ServerError("Get head commit Id of head branch fail", err) - return - } - - err = pull.LoadIssue(ctx) - if err != nil { - ctx.ServerError("load the issue of pull request error", err) - return - } - - if prHeadCommitID != headBranchCommitID { - // force push to base repo - err := gitrepo.Push(ctx, pull.HeadRepo, pull.BaseRepo, git.PushOptions{ - Branch: pull.HeadBranch + ":" + prHeadRef, - Force: true, - Env: repo_module.InternalPushingEnvironment(pull.Issue.Poster, pull.BaseRepo), - }) - if err != nil { - ctx.ServerError("force push error", err) - return - } - } - } - } - - if pr != nil { - ctx.Flash.Info(ctx.Tr("repo.pulls.open_unmerged_pull_exists", pr.Index)) + // allow empty content if there are attachments + if form.Content != "" || len(attachments) > 0 { + comment, err := issue_service.CreateIssueComment(ctx, ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments) + if err != nil { + if errors.Is(err, user_model.ErrBlockedUser) { + ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_user")) } else { - if form.Status == "close" && !issue.IsClosed { - if err := issue_service.CloseIssue(ctx, issue, ctx.Doer, ""); err != nil { - log.Error("CloseIssue: %v", err) - if issues_model.IsErrDependenciesLeft(err) { - if issue.IsPull { - ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked")) - } else { - ctx.JSONError(ctx.Tr("repo.issues.dependency.issue_close_blocked")) - } - return - } - } else { - if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil { - ctx.ServerError("stopTimerIfAvailable", err) - return - } - log.Trace("Issue [%d] status changed to closed: %v", issue.ID, issue.IsClosed) - } - } else if form.Status == "reopen" && issue.IsClosed { - if err := issue_service.ReopenIssue(ctx, issue, ctx.Doer, ""); err != nil { - log.Error("ReopenIssue: %v", err) + ctx.ServerError("CreateIssueComment", err) + } + return + } + // redirect to the comment's hashtag + redirect += "#" + comment.HashTag() + } else if form.Status == "" { + // if no status change (close, reopen), it is a plain comment, and content is required + // "approve/reject" are handled differently in SubmitReview + ctx.JSONError(ctx.Tr("repo.issues.comment_no_content")) + return + } + + // ATTENTION: From now on, do not use ctx.JSONError, don't return on user error, because the comment has been created. + // Always use ctx.Flash.Xxx and then redirect, then the message will be displayed + // TODO: need further refactoring to the code below + + // Check if doer can change the status of issue (close, reopen). + if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) && + (form.Status == "reopen" || form.Status == "close") && + !(issue.IsPull && issue.PullRequest.HasMerged) { + // Duplication and conflict check should apply to reopen pull request. + var branchOtherUnmergedPR *issues_model.PullRequest + var err error + if form.Status == "reopen" && issue.IsPull { + pull := issue.PullRequest + branchOtherUnmergedPR, err = issues_model.GetUnmergedPullRequest(ctx, pull.HeadRepoID, pull.BaseRepoID, pull.HeadBranch, pull.BaseBranch, pull.Flow) + if err != nil { + if !issues_model.IsErrPullRequestNotExist(err) { + ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked")) + } + } + + if branchOtherUnmergedPR != nil { + ctx.Flash.Error(ctx.Tr("repo.pulls.open_unmerged_pull_exists", branchOtherUnmergedPR.Index)) + } else { + // Regenerate patch and test conflict. + issue.PullRequest.HeadCommitID = "" + pull_service.StartPullRequestCheckImmediately(ctx, issue.PullRequest) + } + + // check whether the ref of PR in base repo is consistent with the head commit of head branch in the head repo + // get head commit of PR + if branchOtherUnmergedPR != nil && pull.Flow == issues_model.PullRequestFlowGithub { + prHeadRef := pull.GetGitHeadRefName() + if err := pull.LoadBaseRepo(ctx); err != nil { + ctx.ServerError("Unable to load base repo", err) + return + } + prHeadCommitID, err := gitrepo.GetFullCommitID(ctx, pull.BaseRepo, prHeadRef) + if err != nil { + ctx.ServerError("Get head commit Id of pr fail", err) + return + } + + // get head commit of branch in the head repo + if err := pull.LoadHeadRepo(ctx); err != nil { + ctx.ServerError("Unable to load head repo", err) + return + } + if exist, _ := git_model.IsBranchExist(ctx, pull.HeadRepo.ID, pull.BaseBranch); !exist { + ctx.Flash.Error("The origin branch is delete, cannot reopen.") + return + } + headBranchRef := git.RefNameFromBranch(pull.HeadBranch) + headBranchCommitID, err := gitrepo.GetFullCommitID(ctx, pull.HeadRepo, headBranchRef.String()) + if err != nil { + ctx.ServerError("Get head commit Id of head branch fail", err) + return + } + + err = pull.LoadIssue(ctx) + if err != nil { + ctx.ServerError("load the issue of pull request error", err) + return + } + + if prHeadCommitID != headBranchCommitID { + // force push to base repo + err := gitrepo.Push(ctx, pull.HeadRepo, pull.BaseRepo, git.PushOptions{ + Branch: pull.HeadBranch + ":" + prHeadRef, + Force: true, + Env: repo_module.InternalPushingEnvironment(pull.Issue.Poster, pull.BaseRepo), + }) + if err != nil { + ctx.ServerError("force push error", err) + return } } } } - // Redirect to comment hashtag if there is any actual content. - typeName := "issues" - if issue.IsPull { - typeName = "pulls" + if form.Status == "close" && !issue.IsClosed { + if err := issue_service.CloseIssue(ctx, issue, ctx.Doer, ""); err != nil { + log.Error("CloseIssue: %v", err) + if issues_model.IsErrDependenciesLeft(err) { + if issue.IsPull { + ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked")) + } else { + ctx.Flash.Error(ctx.Tr("repo.issues.dependency.issue_close_blocked")) + } + } + } else { + if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil { + ctx.ServerError("stopTimerIfAvailable", err) + return + } + log.Trace("Issue [%d] status changed to closed: %v", issue.ID, issue.IsClosed) + } + } else if form.Status == "reopen" && issue.IsClosed && branchOtherUnmergedPR == nil { + if err := issue_service.ReopenIssue(ctx, issue, ctx.Doer, ""); err != nil { + log.Error("ReopenIssue: %v", err) + ctx.Flash.Error("Unable to reopen.") + } } - if comment != nil { - ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d#%s", ctx.Repo.RepoLink, typeName, issue.Index, comment.HashTag())) - } else { - ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, typeName, issue.Index)) - } - }() + } // end if: handle close or reopen - // Fix #321: Allow empty comments, as long as we have attachments. - if len(form.Content) == 0 && len(attachments) == 0 { - return - } - - comment, err := issue_service.CreateIssueComment(ctx, ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments) - if err != nil { - if errors.Is(err, user_model.ErrBlockedUser) { - ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_user")) - } else { - ctx.ServerError("CreateIssueComment", err) - } - return - } - - log.Trace("Comment created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, comment.ID) + ctx.JSONRedirect(redirect) } // UpdateCommentContent change comment of issue's content diff --git a/routers/web/repo/issue_new.go b/routers/web/repo/issue_new.go index 98fb842ddf..592d902ba8 100644 --- a/routers/web/repo/issue_new.go +++ b/routers/web/repo/issue_new.go @@ -170,7 +170,7 @@ func renderErrorOfTemplates(ctx *context.Context, errs map[string]error) templat flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ "Message": ctx.Tr("repo.issues.choose.ignore_invalid_templates"), "Summary": ctx.Tr("repo.issues.choose.invalid_templates", len(errs)), - "Details": utils.SanitizeFlashErrorString(strings.Join(lines, "\n")), + "Details": utils.EscapeFlashErrorString(strings.Join(lines, "\n")), }) if err != nil { log.Debug("render flash error: %v", err) diff --git a/routers/web/repo/issue_view.go b/routers/web/repo/issue_view.go index 250a54fc24..f678f83878 100644 --- a/routers/web/repo/issue_view.go +++ b/routers/web/repo/issue_view.go @@ -29,7 +29,6 @@ import ( "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/templates/vars" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" @@ -781,14 +780,14 @@ func prepareIssueViewCommentsAndSidebarParticipants(ctx *context.Context, issue } else if comment.Type == issues_model.CommentTypeAddTimeManual || comment.Type == issues_model.CommentTypeStopTracking || comment.Type == issues_model.CommentTypeDeleteTimeManual { - // drop error since times could be pruned from DB.. + // drop error since times could be pruned from DB _ = comment.LoadTime(ctx) if comment.Content != "" { // Content before v1.21 did store the formatted string instead of seconds, // so "|" is used as delimiter to mark the new format if comment.Content[0] != '|' { // handle old time comments that have formatted text stored - comment.RenderedContent = templates.SanitizeHTML(comment.Content) + comment.RenderedContent = markup.Sanitize(comment.Content) comment.Content = "" } else { // else it's just a duration in seconds to pass on to the frontend diff --git a/routers/web/repo/issue_watch.go b/routers/web/repo/issue_watch.go index dfa3491786..19d723c0ea 100644 --- a/routers/web/repo/issue_watch.go +++ b/routers/web/repo/issue_watch.go @@ -5,7 +5,6 @@ package repo import ( "net/http" - "strconv" issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/modules/log" @@ -46,12 +45,7 @@ func IssueWatch(ctx *context.Context) { return } - watch, err := strconv.ParseBool(ctx.Req.PostFormValue("watch")) - if err != nil { - ctx.ServerError("watch is not bool", err) - return - } - + watch := ctx.FormBool("watch") if err := issues_model.CreateOrUpdateIssueWatch(ctx, ctx.Doer.ID, issue.ID, watch); err != nil { ctx.ServerError("CreateOrUpdateIssueWatch", err) return diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go index e312fc9d2a..efcdaac674 100644 --- a/routers/web/repo/pull.go +++ b/routers/web/repo/pull.go @@ -1042,7 +1042,7 @@ func UpdatePullRequest(ctx *context.Context) { flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ "Message": ctx.Tr("repo.pulls.merge_conflict"), "Summary": ctx.Tr("repo.pulls.merge_conflict_summary"), - "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "
" + utils.SanitizeFlashErrorString(conflictError.StdOut), + "Details": utils.EscapeFlashErrorString(conflictError.StdErr) + "\n" + utils.EscapeFlashErrorString(conflictError.StdOut), }) if err != nil { ctx.ServerError("UpdatePullRequest.HTMLString", err) @@ -1054,9 +1054,9 @@ func UpdatePullRequest(ctx *context.Context) { } else if pull_service.IsErrRebaseConflicts(err) { conflictError := err.(pull_service.ErrRebaseConflicts) flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ - "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)), + "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.EscapeFlashErrorString(conflictError.CommitSHA)), "Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"), - "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "
" + utils.SanitizeFlashErrorString(conflictError.StdOut), + "Details": utils.EscapeFlashErrorString(conflictError.StdErr) + "\n" + utils.EscapeFlashErrorString(conflictError.StdOut), }) if err != nil { ctx.ServerError("UpdatePullRequest.HTMLString", err) @@ -1191,7 +1191,7 @@ func MergePullRequest(ctx *context.Context) { flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ "Message": ctx.Tr("repo.editor.merge_conflict"), "Summary": ctx.Tr("repo.editor.merge_conflict_summary"), - "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "
" + utils.SanitizeFlashErrorString(conflictError.StdOut), + "Details": utils.EscapeFlashErrorString(conflictError.StdErr) + "\n" + utils.EscapeFlashErrorString(conflictError.StdOut), }) if err != nil { ctx.ServerError("MergePullRequest.HTMLString", err) @@ -1202,9 +1202,9 @@ func MergePullRequest(ctx *context.Context) { } else if pull_service.IsErrRebaseConflicts(err) { conflictError := err.(pull_service.ErrRebaseConflicts) flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ - "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)), + "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.EscapeFlashErrorString(conflictError.CommitSHA)), "Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"), - "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "
" + utils.SanitizeFlashErrorString(conflictError.StdOut), + "Details": utils.EscapeFlashErrorString(conflictError.StdErr) + "\n" + utils.EscapeFlashErrorString(conflictError.StdOut), }) if err != nil { ctx.ServerError("MergePullRequest.HTMLString", err) @@ -1234,7 +1234,7 @@ func MergePullRequest(ctx *context.Context) { flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ "Message": ctx.Tr("repo.pulls.push_rejected"), "Summary": ctx.Tr("repo.pulls.push_rejected_summary"), - "Details": utils.SanitizeFlashErrorString(pushrejErr.Message), + "Details": utils.EscapeFlashErrorString(pushrejErr.Message), }) if err != nil { ctx.ServerError("MergePullRequest.HTMLString", err) @@ -1454,7 +1454,7 @@ func CompareAndPullRequestPost(ctx *context.Context) { flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{ "Message": ctx.Tr("repo.pulls.push_rejected"), "Summary": ctx.Tr("repo.pulls.push_rejected_summary"), - "Details": utils.SanitizeFlashErrorString(pushrejErr.Message), + "Details": utils.EscapeFlashErrorString(pushrejErr.Message), }) if err != nil { ctx.ServerError("CompareAndPullRequest.HTMLString", err) diff --git a/routers/web/repo/release.go b/routers/web/repo/release.go index 005106a32d..1372022ae4 100644 --- a/routers/web/repo/release.go +++ b/routers/web/repo/release.go @@ -451,6 +451,7 @@ func NewReleasePost(ctx *context.Context) { return } + form.Target = util.IfZero(form.Target, ctx.Repo.Repository.DefaultBranch) if exist, _ := git_model.IsBranchExist(ctx, ctx.Repo.Repository.ID, form.Target); !exist { ctx.RenderWithErrDeprecated(ctx.Tr("form.target_branch_not_exist"), tplReleaseNew, &form) return @@ -564,6 +565,11 @@ func EditRelease(ctx *context.Context) { } return } + if rel.IsTag { + ctx.NotFound(err) // for a pure tag release, don't allow to edit it as a release + return + } + ctx.Data["ID"] = rel.ID ctx.Data["tag_name"] = rel.TagName ctx.Data["tag_target"] = util.IfZero(rel.Target, ctx.Repo.Repository.DefaultBranch) @@ -613,7 +619,7 @@ func EditReleasePost(ctx *context.Context) { return } if rel.IsTag { - ctx.NotFound(err) + ctx.NotFound(err) // for a pure tag release, don't allow to edit it as a release return } ctx.Data["tag_name"] = rel.TagName diff --git a/routers/web/repo/render.go b/routers/web/repo/render.go index b1299c7047..ace871a9f1 100644 --- a/routers/web/repo/render.go +++ b/routers/web/repo/render.go @@ -42,7 +42,10 @@ func RenderFile(ctx *context.Context) { rctx := renderhelper.NewRenderContextRepoFile(ctx, ctx.Repo.Repository, renderhelper.RepoFileOptions{ CurrentRefPath: ctx.Repo.RefTypeNameSubURL(), CurrentTreePath: path.Dir(ctx.Repo.TreePath), - }).WithRelativePath(ctx.Repo.TreePath).WithInStandalonePage(true) + }).WithRelativePath(ctx.Repo.TreePath).WithStandalonePage(markup.StandalonePageOptions{ + CurrentWebTheme: ctx.TemplateContext.CurrentWebTheme(), + RenderQueryString: ctx.Req.URL.RawQuery, + }) renderer, rendererInput, err := rctx.DetectMarkupRendererByReader(blobReader) if err != nil { http.Error(ctx.Resp, "Unable to find renderer", http.StatusBadRequest) diff --git a/routers/web/repo/setting/setting.go b/routers/web/repo/setting/setting.go index 5a5137a1a7..29f3e62b8f 100644 --- a/routers/web/repo/setting/setting.go +++ b/routers/web/repo/setting/setting.go @@ -459,11 +459,7 @@ func handleSettingsPostPushMirrorAdd(ctx *context.Context) { return } - remoteSuffix, err := util.CryptoRandomString(10) - if err != nil { - ctx.ServerError("RandomString", err) - return - } + remoteSuffix := util.CryptoRandomString(10) remoteAddress, err := util.SanitizeURL(form.PushMirrorAddress) if err != nil { diff --git a/routers/web/repo/setting/webhook.go b/routers/web/repo/setting/webhook.go index b0f3a5cfee..8c57a68b25 100644 --- a/routers/web/repo/setting/webhook.go +++ b/routers/web/repo/setting/webhook.go @@ -450,12 +450,21 @@ func MatrixHooksEditPost(ctx *context.Context) { editWebhook(ctx, matrixHookParams(ctx)) } +func matrixRoomIDEncode(roomID string) string { + // See https://spec.matrix.org/latest/appendices/#room-ids + // Some (unrelated) demo links: https://spec.matrix.org/latest/appendices/#matrixto-navigation + // API spec: https://spec.matrix.org/v1.18/client-server-api/#sending-events-to-a-room + // Some of their examples show links like: "PUT /rooms/!roomid:domain/state/m.example.event" + return strings.NewReplacer("%21", "!", "%3A", ":").Replace(url.PathEscape(roomID)) +} + func matrixHookParams(ctx *context.Context) webhookParams { form := web.GetForm(ctx).(*forms.NewMatrixHookForm) + // TODO: need to migrate to the latest (v3) API: https://spec.matrix.org/v1.18/client-server-api/ return webhookParams{ Type: webhook_module.MATRIX, - URL: fmt.Sprintf("%s/_matrix/client/r0/rooms/%s/send/m.room.message", form.HomeserverURL, url.PathEscape(form.RoomID)), + URL: fmt.Sprintf("%s/_matrix/client/r0/rooms/%s/send/m.room.message", form.HomeserverURL, matrixRoomIDEncode(form.RoomID)), ContentType: webhook.ContentTypeJSON, HTTPMethod: http.MethodPut, WebhookForm: form.WebhookForm, diff --git a/routers/web/repo/setting/webhook_test.go b/routers/web/repo/setting/webhook_test.go new file mode 100644 index 0000000000..ca4a21e075 --- /dev/null +++ b/routers/web/repo/setting/webhook_test.go @@ -0,0 +1,15 @@ +// Copyright 2026 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package setting + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestWebhookMatrix(t *testing.T) { + assert.Equal(t, "!roomid:domain", matrixRoomIDEncode("!roomid:domain")) + assert.Equal(t, "!room%23id:domain", matrixRoomIDEncode("!room#id:domain")) // maybe it should never really happen in real world +} diff --git a/routers/web/repo/star.go b/routers/web/repo/star.go index 00c06b7d02..8cfbfefdf1 100644 --- a/routers/web/repo/star.go +++ b/routers/web/repo/star.go @@ -26,6 +26,5 @@ func ActionStar(ctx *context.Context) { ctx.ServerError("GetRepositoryByName", err) return } - ctx.RespHeader().Add("hx-trigger", "refreshUserCards") // see the `hx-trigger="refreshUserCards ..."` comments in tmpl ctx.HTML(http.StatusOK, tplStarUnstar) } diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go index 7136b87058..b455f91845 100644 --- a/routers/web/repo/view.go +++ b/routers/web/repo/view.go @@ -16,10 +16,6 @@ import ( "strings" "time" - _ "image/gif" // for processing gif images - _ "image/jpeg" // for processing jpeg images - _ "image/png" // for processing png images - activities_model "code.gitea.io/gitea/models/activities" admin_model "code.gitea.io/gitea/models/admin" asymkey_model "code.gitea.io/gitea/models/asymkey" @@ -46,6 +42,9 @@ import ( _ "golang.org/x/image/bmp" // for processing bmp images _ "golang.org/x/image/webp" // for processing webp images + _ "image/gif" // for processing gif images + _ "image/jpeg" // for processing jpeg images + _ "image/png" // for processing png images ) const ( @@ -159,7 +158,7 @@ func markupRenderToHTML(ctx *context.Context, renderCtx *markup.RenderContext, r go func() { sb := &strings.Builder{} if markup.RendererNeedPostProcess(renderer) { - escaped, _ = charset.EscapeControlReader(markupRd, sb, ctx.Locale, charset.RuneNBSP) // We allow NBSP here this is rendered + escaped, _ = charset.EscapeControlReader(markupRd, sb, ctx.Locale, charset.EscapeOptionsForView()) } else { escaped = &charset.EscapeStatus{} _, _ = io.Copy(sb, markupRd) @@ -310,13 +309,15 @@ func renderDirectoryFiles(ctx *context.Context, timeout time.Duration) git.Entri return nil } - { + { // this block is for testing purpose only if timeout != 0 && !setting.IsProd && !setting.IsInTesting { log.Debug("first call to get directory file commit info") clearFilesCommitInfo := func() { log.Warn("clear directory file commit info to force async loading on frontend") for i := range files { - files[i].Commit = nil + if i%2 == 0 { // for testing purpose, only clear half of the files' commit info + files[i].Commit = nil + } } } _ = clearFilesCommitInfo diff --git a/routers/web/repo/view_file.go b/routers/web/repo/view_file.go index 44bc8543b0..8d7721103a 100644 --- a/routers/web/repo/view_file.go +++ b/routers/web/repo/view_file.go @@ -21,12 +21,11 @@ import ( "code.gitea.io/gitea/modules/git/attribute" "code.gitea.io/gitea/modules/highlight" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/context" issue_service "code.gitea.io/gitea/services/issue" - - "github.com/nektos/act/pkg/model" ) func prepareLatestCommitInfo(ctx *context.Context) bool { @@ -78,14 +77,17 @@ func handleFileViewRenderMarkup(ctx *context.Context, prefetchBuf []byte, utf8Re return false } - ctx.Data["MarkupType"] = rctx.RenderOptions.MarkupType - var err error ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRenderToHTML(ctx, rctx, renderer, utf8Reader) if err != nil { ctx.ServerError("Render", err) return true } + + opts, ok := markup.GetExternalRendererOptions(renderer) + usingIframe := ok && opts.DisplayInIframe + ctx.Data["MarkupType"] = rctx.RenderOptions.MarkupType + ctx.Data["RenderAsMarkup"] = util.Iif(usingIframe, "markup-iframe", "markup-inplace") return true } @@ -119,12 +121,8 @@ func handleFileViewRenderSource(ctx *context.Context, attrs *attribute.Attribute } language := attrs.GetLanguage().Value() - fileContent, lexerName, err := highlight.RenderFullFile(filename, language, buf) + fileContent, lexerName := highlight.RenderFullFile(filename, language, buf) ctx.Data["LexerName"] = lexerName - if err != nil { - log.Error("highlight.RenderFullFile failed, fallback to plain text: %v", err) - fileContent = highlight.RenderPlainText(buf) - } status := &charset.EscapeStatus{} statuses := make([]*charset.EscapeStatus, len(fileContent)) for i, line := range fileContent { @@ -188,8 +186,7 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) { if err != nil { log.Error("actions.GetContentFromEntry: %v", err) } - _, workFlowErr := model.ReadWorkflow(bytes.NewReader(content)) - if workFlowErr != nil { + if workFlowErr := actions.ValidateWorkflowContent(content); workFlowErr != nil { ctx.Data["FileError"] = ctx.Locale.Tr("actions.runs.invalid_workflow_helper", workFlowErr.Error()) } } else if issue_service.IsCodeOwnerFile(ctx.Repo.TreePath) { @@ -242,8 +239,6 @@ func prepareFileView(ctx *context.Context, entry *git.TreeEntry) { case fInfo.blobOrLfsSize >= setting.UI.MaxDisplayFileSize: ctx.Data["IsFileTooLarge"] = true case handleFileViewRenderMarkup(ctx, buf, contentReader): - // it also sets ctx.Data["FileContent"] and more - ctx.Data["IsMarkup"] = true case handleFileViewRenderSource(ctx, attrs, fInfo, contentReader): // it also sets ctx.Data["FileContent"] and more ctx.Data["IsDisplayingSource"] = true diff --git a/routers/web/repo/view_readme.go b/routers/web/repo/view_readme.go index eba3ffc36f..25e1f87806 100644 --- a/routers/web/repo/view_readme.go +++ b/routers/web/repo/view_readme.go @@ -195,16 +195,16 @@ func prepareToRenderReadmeFile(ctx *context.Context, subfolder string, readmeFil }).WithRelativePath(readmeFullPath) renderer := rctx.DetectMarkupRenderer(buf) if renderer != nil { - ctx.Data["IsMarkup"] = true + ctx.Data["RenderAsMarkup"] = "markup-inplace" ctx.Data["MarkupType"] = rctx.RenderOptions.MarkupType ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRenderToHTML(ctx, rctx, renderer, rd) if err != nil { log.Error("Render failed for %s in %-v: %v Falling back to rendering source", readmeFile.Name(), ctx.Repo.Repository, err) - delete(ctx.Data, "IsMarkup") + delete(ctx.Data, "RenderAsMarkup") } } - if ctx.Data["IsMarkup"] != true { + if ctx.Data["RenderAsMarkup"] == nil { ctx.Data["IsPlainText"] = true content, err := io.ReadAll(rd) if err != nil { diff --git a/routers/web/repo/watch.go b/routers/web/repo/watch.go index 70c548b8ce..a7fbfc168b 100644 --- a/routers/web/repo/watch.go +++ b/routers/web/repo/watch.go @@ -26,6 +26,5 @@ func ActionWatch(ctx *context.Context) { ctx.ServerError("GetRepositoryByName", err) return } - ctx.RespHeader().Add("hx-trigger", "refreshUserCards") // see the `hx-trigger="refreshUserCards ..."` comments in tmpl ctx.HTML(http.StatusOK, tplWatchUnwatch) } diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go index e5b07633a2..1826ca54e1 100644 --- a/routers/web/repo/wiki.go +++ b/routers/web/repo/wiki.go @@ -258,8 +258,7 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) { defer markupWr.Close() done := make(chan struct{}) go func() { - // We allow NBSP here this is rendered - escaped, _ = charset.EscapeControlReader(markupRd, buf, ctx.Locale, charset.RuneNBSP) + escaped, _ = charset.EscapeControlReader(markupRd, buf, ctx.Locale, charset.EscapeOptionsForView()) output = template.HTML(buf.String()) buf.Reset() close(done) diff --git a/routers/web/user/home.go b/routers/web/user/home.go index 21ca0fc683..9c99a6c8ef 100644 --- a/routers/web/user/home.go +++ b/routers/web/user/home.go @@ -655,6 +655,9 @@ func ShowSSHKeys(ctx *context.Context) { // "authorized_keys" file format: "#" followed by comment line per key buf.WriteString("# Gitea isn't a key server. The keys are exported as the user uploaded and might not have been fully verified.\n") for i := range keys { + if keys[i].Type == asymkey_model.KeyTypePrincipal { + continue // SSH principal keys are not for signing or authentication + } buf.WriteString(keys[i].OmitEmail()) buf.WriteString("\n") } diff --git a/routers/web/user/package.go b/routers/web/user/package.go index ffbfaa229b..1484ba2fdf 100644 --- a/routers/web/user/package.go +++ b/routers/web/user/package.go @@ -8,6 +8,7 @@ import ( "errors" "net/http" "net/url" + "time" "code.gitea.io/gitea/models/db" org_model "code.gitea.io/gitea/models/organization" @@ -18,13 +19,13 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/httplib" - "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/optional" alpine_module "code.gitea.io/gitea/modules/packages/alpine" arch_module "code.gitea.io/gitea/modules/packages/arch" container_module "code.gitea.io/gitea/modules/packages/container" debian_module "code.gitea.io/gitea/modules/packages/debian" rpm_module "code.gitea.io/gitea/modules/packages/rpm" + terraform_module "code.gitea.io/gitea/modules/packages/terraform" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/util" @@ -35,6 +36,8 @@ import ( "code.gitea.io/gitea/services/forms" packages_service "code.gitea.io/gitea/services/packages" container_service "code.gitea.io/gitea/services/packages/container" + + "github.com/google/uuid" ) const ( @@ -315,6 +318,11 @@ func ViewPackageVersion(ctx *context.Context) { } ctx.Data["LatestVersions"] = pvs ctx.Data["TotalVersionCount"] = pvsTotal + ctx.Data["PackageVersionViewData"], err = packages_service.GetSpecManager().Get(pd.Package.Type).GetViewPackageVersionData(ctx, pd) + if err != nil { + ctx.ServerError("GetViewPackageVersionData", err) + return + } ctx.Data["CanWritePackages"] = ctx.Package.AccessMode >= perm.AccessModeWrite || ctx.IsUserSiteAdmin() @@ -491,20 +499,52 @@ func packageSettingsPostActionLink(ctx *context.Context, form *forms.PackageSett } func packageSettingsPostActionDelete(ctx *context.Context) { - err := packages_service.RemovePackageVersion(ctx, ctx.Doer, ctx.Package.Descriptor.Version) - if err != nil { - log.Error("Error deleting package: %v", err) - ctx.Flash.Error(ctx.Tr("packages.settings.delete.error")) + pd := ctx.Package.Descriptor + + if ctx.FormString("package_name") != pd.Package.Name { + ctx.Flash.Error(ctx.Tr("packages.settings.delete.invalid_package_name")) + ctx.Redirect(pd.PackageSettingsLink()) + return + } + if err := packages_service.RemovePackage(ctx, ctx.Doer, pd.Package); err != nil { + errTr := util.ErrorAsTranslatable(err) + if errTr == nil { + ctx.ServerError("RemovePackage", err) + return + } + ctx.Flash.Error(errTr.Translate(ctx.Locale)) + ctx.Redirect(pd.PackageSettingsLink()) + return + } + + ctx.Flash.Success(ctx.Tr("packages.settings.delete.success")) + ctx.Redirect(ctx.Package.Owner.HomeLink() + "/-/packages") +} + +// PackageVersionDelete deletes a package version +func PackageVersionDelete(ctx *context.Context) { + pd := ctx.Package.Descriptor + if pd.Version == nil { + ctx.NotFound(nil) + return + } + + if err := packages_service.RemovePackageVersion(ctx, ctx.Doer, pd.Version); err != nil { + errTr := util.ErrorAsTranslatable(err) + if errTr == nil { + ctx.ServerError("RemovePackageVersion", err) + return + } + ctx.Flash.Error(errTr.Translate(ctx.Locale)) } else { - ctx.Flash.Success(ctx.Tr("packages.settings.delete.success")) + ctx.Flash.Success(ctx.Tr("packages.settings.delete.version.success")) } - redirectURL := ctx.Package.Owner.HomeLink() + "/-/packages" // redirect to the package if there are still versions available - if has, _ := packages_model.ExistVersion(ctx, &packages_model.PackageSearchOptions{PackageID: ctx.Package.Descriptor.Package.ID, IsInternal: optional.Some(false)}); has { - redirectURL = ctx.Package.Descriptor.PackageWebLink() + redirectURL := ctx.Package.Owner.HomeLink() + "/-/packages" + if has, _ := packages_model.ExistVersion(ctx, &packages_model.PackageSearchOptions{PackageID: pd.Package.ID, IsInternal: optional.Some(false)}); has { + redirectURL = pd.PackageWebLink() } - ctx.Redirect(redirectURL) } @@ -512,7 +552,7 @@ func packageSettingsPostActionDelete(ctx *context.Context) { func DownloadPackageFile(ctx *context.Context) { pf, err := packages_model.GetFileForVersionByID(ctx, ctx.Package.Descriptor.Version.ID, ctx.PathParamInt64("fileid")) if err != nil { - if err == packages_model.ErrPackageFileNotExist { + if errors.Is(err, packages_model.ErrPackageFileNotExist) { ctx.NotFound(err) } else { ctx.ServerError("GetFileForVersionByID", err) @@ -528,3 +568,56 @@ func DownloadPackageFile(ctx *context.Context) { packages_helper.ServePackageFile(ctx, s, u, pf) } + +// ActionPackageTerraformLock locks a terraform state +func ActionPackageTerraformLock(ctx *context.Context) { + pd := ctx.Package.Descriptor + if pd.Package.Type != packages_model.TypeTerraformState { + ctx.NotFound(nil) + return + } + + existingLock, err := terraform_module.GetLock(ctx, pd.Package.ID) + if err != nil { + ctx.ServerError("GetLock", err) + return + } + if existingLock.IsLocked() { + ctx.Flash.Error(ctx.Tr("packages.terraform.lock.error.already_locked")) + ctx.Redirect(pd.VersionWebLink()) + return + } + + lockID := uuid.New().String() + lockInfo := &terraform_module.LockInfo{ + ID: lockID, + Operation: "Manual UI Lock", + Who: ctx.Doer.Name, + Created: time.Now(), + } + + if err := terraform_module.SetLock(ctx, pd.Package.ID, lockInfo); err != nil { + ctx.ServerError("SetLock", err) + return + } + + ctx.Flash.Success(ctx.Tr("packages.terraform.lock.success")) + ctx.Redirect(pd.VersionWebLink()) +} + +// ActionPackageTerraformUnlock unlocks a terraform state +func ActionPackageTerraformUnlock(ctx *context.Context) { + pd := ctx.Package.Descriptor + if pd.Package.Type != packages_model.TypeTerraformState { + ctx.NotFound(nil) + return + } + + if err := terraform_module.RemoveLock(ctx, pd.Package.ID); err != nil { + ctx.ServerError("RemoveLock", err) + return + } + + ctx.Flash.Success(ctx.Tr("packages.terraform.unlock.success")) + ctx.Redirect(pd.VersionWebLink()) +} diff --git a/routers/web/web.go b/routers/web/web.go index e3dcf27cc4..1dff6cbc04 100644 --- a/routers/web/web.go +++ b/routers/web/web.go @@ -45,7 +45,7 @@ import ( "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/forms" - _ "code.gitea.io/gitea/modules/session" // to registers all internal adapters + _ "code.gitea.io/gitea/modules/session" // to register all internal adapters "gitea.com/go-chi/captcha" chi_middleware "github.com/go-chi/chi/v5/middleware" @@ -260,7 +260,7 @@ func Routes() *web.Router { routes.BeforeRouting(chi_middleware.GetHead) routes.Head("/", misc.DummyOK) // for health check - doesn't need to be passed through gzip handler - routes.Methods("GET, HEAD, OPTIONS", "/assets/*", routing.MarkLogLevelTrace, optionsCorsHandler(), public.FileHandlerFunc()) + routes.Methods("GET, HEAD, OPTIONS", "/assets/*", routing.MarkLogLevelTrace, public.AssetsCors(), public.FileHandlerFunc()) routes.Methods("GET, HEAD", "/avatars/*", avatarStorageHandler(setting.Avatar.Storage, "avatars", storage.Avatars)) routes.Methods("GET, HEAD", "/repo-avatars/*", avatarStorageHandler(setting.RepoAvatar.Storage, "repo-avatars", storage.RepoAvatars)) routes.Methods("GET, HEAD", "/apple-touch-icon.png", misc.StaticRedirect("/assets/img/apple-touch-icon.png")) @@ -1071,14 +1071,19 @@ func registerWebRoutes(m *web.Router, webAuth *AuthMiddleware) { m.Get("/versions", user.ListPackageVersions) m.Group("/{version}", func() { m.Get("", user.ViewPackageVersion) + m.Post("", reqPackageAccess(perm.AccessModeWrite), user.PackageVersionDelete) m.Get("/{version_sub}", user.ViewPackageVersion) - m.Get("/files/{fileid}", user.DownloadPackageFile) - m.Group("/settings", func() { - m.Get("", user.PackageSettings) - m.Post("", web.Bind(forms.PackageSettingForm{}), user.PackageSettingsPost) + m.Group("/terraform", func() { + m.Post("/lock", user.ActionPackageTerraformLock) + m.Post("/unlock", user.ActionPackageTerraformUnlock) }, reqPackageAccess(perm.AccessModeWrite)) + m.Get("/files/{fileid}", user.DownloadPackageFile) }) }) + m.Group("/settings/{type}/{name}", func() { + m.Get("", user.PackageSettings) + m.Post("", web.Bind(forms.PackageSettingForm{}), user.PackageSettingsPost) + }, reqPackageAccess(perm.AccessModeWrite)) }, context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead)) } @@ -1705,7 +1710,7 @@ func registerWebRoutes(m *web.Router, webAuth *AuthMiddleware) { m.Get("/forks", repo.Forks) m.Get("/commit/{sha:([a-f0-9]{7,64})}.{ext:patch|diff}", repo.MustBeNotEmpty, repo.RawDiff) - m.Post("/lastcommit/*", context.RepoRefByType(git.RefTypeCommit), repo.LastCommit) + m.Get("/lastcommit/*", context.RepoRefByType(git.RefTypeCommit), repo.LastCommit) }, optSignIn, context.RepoAssignment, reqUnitCodeReader) // end "/{username}/{reponame}": repo code diff --git a/services/actions/init_test.go b/services/actions/init_test.go index e61b3759e1..4db765839e 100644 --- a/services/actions/init_test.go +++ b/services/actions/init_test.go @@ -35,7 +35,7 @@ func TestInitToken(t *testing.T) { }) t.Run("EnvToken", func(t *testing.T) { - tokenValue, _ := util.CryptoRandomString(32) + tokenValue := util.CryptoRandomString(32) t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN", tokenValue) t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN_FILE", "") err := initGlobalRunnerToken(t.Context()) @@ -52,7 +52,7 @@ func TestInitToken(t *testing.T) { }) t.Run("EnvFileToken", func(t *testing.T) { - tokenValue, _ := util.CryptoRandomString(32) + tokenValue := util.CryptoRandomString(32) f := t.TempDir() + "/token" _ = os.WriteFile(f, []byte(tokenValue), 0o644) t.Setenv("GITEA_RUNNER_REGISTRATION_TOKEN", "") diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go index 20a4f81eab..c7813360ab 100644 --- a/services/actions/job_emitter.go +++ b/services/actions/job_emitter.go @@ -151,21 +151,28 @@ func findBlockedRunByConcurrency(ctx context.Context, repoID int64, concurrencyG func checkRunConcurrency(ctx context.Context, run *actions_model.ActionRun) (jobs, updatedJobs []*actions_model.ActionRunJob, err error) { checkedConcurrencyGroup := make(container.Set[string]) - // check run (workflow-level) concurrency - if run.ConcurrencyGroup != "" { - concurrentRun, err := findBlockedRunByConcurrency(ctx, run.RepoID, run.ConcurrencyGroup) + collect := func(concurrencyGroup string) error { + concurrentRun, err := findBlockedRunByConcurrency(ctx, run.RepoID, concurrencyGroup) if err != nil { - return nil, nil, fmt.Errorf("find blocked run by concurrency: %w", err) + return fmt.Errorf("find blocked run by concurrency: %w", err) } if concurrentRun != nil && !concurrentRun.NeedApproval { js, ujs, err := checkJobsOfRun(ctx, concurrentRun) if err != nil { - return nil, nil, err + return err } jobs = append(jobs, js...) updatedJobs = append(updatedJobs, ujs...) } - checkedConcurrencyGroup.Add(run.ConcurrencyGroup) + checkedConcurrencyGroup.Add(concurrencyGroup) + return nil + } + + // check run (workflow-level) concurrency + if run.ConcurrencyGroup != "" { + if err := collect(run.ConcurrencyGroup); err != nil { + return nil, nil, err + } } // check job concurrency @@ -177,22 +184,12 @@ func checkRunConcurrency(ctx context.Context, run *actions_model.ActionRun) (job if !job.Status.IsDone() { continue } - if job.ConcurrencyGroup == "" && checkedConcurrencyGroup.Contains(job.ConcurrencyGroup) { + if job.ConcurrencyGroup == "" || checkedConcurrencyGroup.Contains(job.ConcurrencyGroup) { continue } - concurrentRun, err := findBlockedRunByConcurrency(ctx, job.RepoID, job.ConcurrencyGroup) - if err != nil { - return nil, nil, fmt.Errorf("find blocked run by concurrency: %w", err) + if err := collect(job.ConcurrencyGroup); err != nil { + return nil, nil, err } - if concurrentRun != nil && !concurrentRun.NeedApproval { - js, ujs, err := checkJobsOfRun(ctx, concurrentRun) - if err != nil { - return nil, nil, err - } - jobs = append(jobs, js...) - updatedJobs = append(updatedJobs, ujs...) - } - checkedConcurrencyGroup.Add(job.ConcurrencyGroup) } return jobs, updatedJobs, nil } diff --git a/services/actions/job_emitter_test.go b/services/actions/job_emitter_test.go index a2152fb270..5ab1c0846d 100644 --- a/services/actions/job_emitter_test.go +++ b/services/actions/job_emitter_test.go @@ -7,6 +7,8 @@ import ( "testing" actions_model "code.gitea.io/gitea/models/actions" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/unittest" "github.com/stretchr/testify/assert" ) @@ -134,3 +136,68 @@ jobs: }) } } + +// Test_checkRunConcurrency_NoDuplicateConcurrencyGroupCheck verifies that when a run's +// ConcurrencyGroup has already been checked at the run level, the same group is not +// re-checked for individual jobs. +func Test_checkRunConcurrency_NoDuplicateConcurrencyGroupCheck(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + ctx := t.Context() + + // Run A: the triggering run with a concurrency group. + runA := &actions_model.ActionRun{ + RepoID: 4, + OwnerID: 1, + TriggerUserID: 1, + WorkflowID: "test.yml", + Index: 9901, + Ref: "refs/heads/main", + Status: actions_model.StatusRunning, + ConcurrencyGroup: "test-cg", + } + assert.NoError(t, db.Insert(ctx, runA)) + + // A done job for run A with the same ConcurrencyGroup. + // This triggers the job-level concurrency check in checkRunConcurrency. + jobADone := &actions_model.ActionRunJob{ + RunID: runA.ID, + RepoID: 4, + OwnerID: 1, + JobID: "job1", + Name: "job1", + Status: actions_model.StatusSuccess, + ConcurrencyGroup: "test-cg", + } + assert.NoError(t, db.Insert(ctx, jobADone)) + + // Blocked run B competing for the same concurrency group. + runB := &actions_model.ActionRun{ + RepoID: 4, + OwnerID: 1, + TriggerUserID: 1, + WorkflowID: "test.yml", + Index: 9902, + Ref: "refs/heads/main", + Status: actions_model.StatusBlocked, + ConcurrencyGroup: "test-cg", + } + assert.NoError(t, db.Insert(ctx, runB)) + + // A blocked job belonging to run B (no job-level concurrency group). + jobBBlocked := &actions_model.ActionRunJob{ + RunID: runB.ID, + RepoID: 4, + OwnerID: 1, + JobID: "job1", + Name: "job1", + Status: actions_model.StatusBlocked, + } + assert.NoError(t, db.Insert(ctx, jobBBlocked)) + + jobs, _, err := checkRunConcurrency(ctx, runA) + assert.NoError(t, err) + + if assert.Len(t, jobs, 1) { + assert.Equal(t, jobBBlocked.ID, jobs[0].ID) + } +} diff --git a/services/actions/notifier.go b/services/actions/notifier.go index abcaff09a3..19d6be9420 100644 --- a/services/actions/notifier.go +++ b/services/actions/notifier.go @@ -816,12 +816,14 @@ func (n *actionsNotifier) WorkflowRunStatusUpdate(ctx context.Context, repo *rep return } - newNotifyInput(repo, sender, webhook_module.HookEventWorkflowRun).WithPayload(&api.WorkflowRunPayload{ - Action: status, - Workflow: convertedWorkflow, - WorkflowRun: convertedRun, - Organization: org, - Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}), - Sender: convert.ToUser(ctx, sender, nil), - }).Notify(ctx) + newNotifyInput(repo, sender, webhook_module.HookEventWorkflowRun). + WithRef(git.RefNameFromBranch(repo.DefaultBranch).String()). + WithPayload(&api.WorkflowRunPayload{ + Action: status, + Workflow: convertedWorkflow, + WorkflowRun: convertedRun, + Organization: org, + Repo: convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm_model.AccessModeOwner}), + Sender: convert.ToUser(ctx, sender, nil), + }).Notify(ctx) } diff --git a/services/agit/agit.go b/services/agit/agit.go index fa2ddd9baf..55b98a65ae 100644 --- a/services/agit/agit.go +++ b/services/agit/agit.go @@ -282,12 +282,15 @@ func ProcReceive(ctx context.Context, repo *repo_model.Repository, gitRepo *git. if err != nil { return nil, fmt.Errorf("failed to load pull issue. Error: %w", err) } - comment, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i], forcePush.Value()) - if err == nil && comment != nil { + + isForcePush := forcePush.Value() + comment, commentCreated, err := pull_service.CreatePushPullComment(ctx, pusher, pr, oldCommitID, opts.NewCommitIDs[i], isForcePush) + if err != nil { + log.Error("CreatePushPullComment: %v", err) + } else if commentCreated { notify_service.PullRequestPushCommits(ctx, pusher, pr, comment) } notify_service.PullRequestSynchronized(ctx, pusher, pr) - isForcePush := comment != nil && comment.IsForcePush results = append(results, private.HookProcReceiveRefResult{ OldOID: oldCommitID, diff --git a/services/attachment/attachment.go b/services/attachment/attachment.go index d69253dd59..a824cdb246 100644 --- a/services/attachment/attachment.go +++ b/services/attachment/attachment.go @@ -54,12 +54,17 @@ func NewLimitedUploaderMaxBytesReader(r io.ReadCloser, w http.ResponseWriter) *U return &UploaderFile{rd: r, size: -1, respWriter: w} } -func UploadAttachmentGeneralSizeLimit(ctx context.Context, file *UploaderFile, allowedTypes string, attach *repo_model.Attachment) (*repo_model.Attachment, error) { - return uploadAttachment(ctx, file, allowedTypes, setting.Attachment.MaxSize<<20, attach) +type UploadAttachmentFunc func(ctx context.Context, file *UploaderFile, attach *repo_model.Attachment) (*repo_model.Attachment, error) + +func UploadAttachmentForIssue(ctx context.Context, file *UploaderFile, attach *repo_model.Attachment) (*repo_model.Attachment, error) { + return uploadAttachment(ctx, file, setting.Attachment.AllowedTypes, setting.Attachment.MaxSize<<20, attach) } -func UploadAttachmentReleaseSizeLimit(ctx context.Context, file *UploaderFile, allowedTypes string, attach *repo_model.Attachment) (*repo_model.Attachment, error) { - return uploadAttachment(ctx, file, allowedTypes, setting.Repository.Release.FileMaxSize<<20, attach) +func UploadAttachmentForRelease(ctx context.Context, file *UploaderFile, attach *repo_model.Attachment) (*repo_model.Attachment, error) { + // FIXME: although the release attachment has different settings from the issue attachment, + // it still uses the same attachment table, the same storage and the same upload logic + // So if the "issue attachment [attachment]" is not enabled, it will also affect the release attachment, which is not expected. + return uploadAttachment(ctx, file, setting.Repository.Release.AllowedTypes, setting.Repository.Release.FileMaxSize<<20, attach) } func uploadAttachment(ctx context.Context, file *UploaderFile, allowedTypes string, maxFileSize int64, attach *repo_model.Attachment) (*repo_model.Attachment, error) { diff --git a/services/auth/auth_token.go b/services/auth/auth_token.go index 8897bbd19c..7fcf3ba0df 100644 --- a/services/auth/auth_token.go +++ b/services/auth/auth_token.go @@ -64,10 +64,7 @@ func CheckAuthToken(ctx context.Context, value string) (*auth_model.AuthToken, e } func RegenerateAuthToken(ctx context.Context, t *auth_model.AuthToken) (*auth_model.AuthToken, string, error) { - token, hash, err := generateTokenAndHash() - if err != nil { - return nil, "", err - } + token, hash := generateTokenAndHash() newToken := &auth_model.AuthToken{ ID: t.ID, @@ -89,16 +86,9 @@ func CreateAuthTokenForUserID(ctx context.Context, userID int64) (*auth_model.Au ExpiresUnix: timeutil.TimeStampNow().AddDuration(time.Duration(setting.LogInRememberDays*24) * time.Hour), } - var err error - t.ID, err = util.CryptoRandomString(10) - if err != nil { - return nil, "", err - } + t.ID = util.CryptoRandomString(10) - token, hash, err := generateTokenAndHash() - if err != nil { - return nil, "", err - } + token, hash := generateTokenAndHash() t.TokenHash = hash @@ -109,15 +99,12 @@ func CreateAuthTokenForUserID(ctx context.Context, userID int64) (*auth_model.Au return t, token, nil } -func generateTokenAndHash() (string, string, error) { - buf, err := util.CryptoRandomBytes(32) - if err != nil { - return "", "", err - } +func generateTokenAndHash() (string, string) { + buf := util.CryptoRandomBytes(32) token := hex.EncodeToString(buf) hashedToken := sha256.Sum256([]byte(token)) - return token, hex.EncodeToString(hashedToken[:]), nil + return token, hex.EncodeToString(hashedToken[:]) } diff --git a/services/auth/signin.go b/services/auth/signin.go index e116a088e0..a8fa0f17dd 100644 --- a/services/auth/signin.go +++ b/services/auth/signin.go @@ -51,7 +51,7 @@ func UserSignIn(ctx context.Context, username, password string) (*user_model.Use } if user != nil { - hasUser, err := user_model.GetUser(ctx, user) + hasUser, err := user_model.GetIndividualUser(ctx, user) if err != nil { return nil, nil, err } diff --git a/services/auth/source/oauth2/providers_openid.go b/services/auth/source/oauth2/providers_openid.go index fc0d77a7e6..557fe6cb01 100644 --- a/services/auth/source/oauth2/providers_openid.go +++ b/services/auth/source/oauth2/providers_openid.go @@ -46,8 +46,14 @@ func (o *OpenIDProvider) CreateGothProvider(providerName, callbackURL string, so provider, err := openidConnect.New(source.ClientID, source.ClientSecret, callbackURL, source.OpenIDConnectAutoDiscoveryURL, scopes...) if err != nil { log.Warn("Failed to create OpenID Connect Provider with name '%s' with url '%s': %v", providerName, source.OpenIDConnectAutoDiscoveryURL, err) + return nil, err } - return provider, err + if source.ExternalIDClaim != "" { + // UserIdClaims is a fallback list; goth returns the first non-empty matching claim. + // A single entry is sufficient because the admin explicitly chooses one claim (e.g. "oid" for Azure AD). + provider.UserIdClaims = []string{source.ExternalIDClaim} + } + return provider, nil } // CustomURLSettings returns the custom url settings for this provider diff --git a/services/auth/source/oauth2/source.go b/services/auth/source/oauth2/source.go index 00d89b3481..3f69c08fab 100644 --- a/services/auth/source/oauth2/source.go +++ b/services/auth/source/oauth2/source.go @@ -30,6 +30,7 @@ type Source struct { SSHPublicKeyClaimName string FullNameClaimName string + ExternalIDClaim string } // FromDB fills up an OAuth2Config from serialized format. diff --git a/services/auth/source/oauth2/source_sync.go b/services/auth/source/oauth2/source_sync.go index c2e3dfb1a8..445e281a06 100644 --- a/services/auth/source/oauth2/source_sync.go +++ b/services/auth/source/oauth2/source_sync.go @@ -67,7 +67,7 @@ func (source *Source) refresh(ctx context.Context, provider goth.Provider, u *us LoginSource: u.LoginSourceID, } - hasUser, err := user_model.GetUser(ctx, user) + hasUser, err := user_model.GetIndividualUser(ctx, user) if err != nil { return err } diff --git a/services/context/base.go b/services/context/base.go index 8d44de5bc7..c5ec4b419a 100644 --- a/services/context/base.go +++ b/services/context/base.go @@ -159,12 +159,10 @@ func (b *Base) Redirect(location string, status ...int) { // So in this case, we should remove the session cookie from the response header removeSessionCookieHeader(b.Resp) } - // in case the request is made by htmx, have it redirect the browser instead of trying to follow the redirect inside htmx - if b.Req.Header.Get("HX-Request") == "true" { - b.Resp.Header().Set("HX-Redirect", location) - // we have to return a non-redirect status code so XMLHTTPRequest will not immediately follow the redirect - // so as to give htmx redirect logic a chance to run - b.Status(http.StatusNoContent) + // In case the request is made by "fetch-action" module, make JS redirect to the new location + // Otherwise, the JS fetch will follow the redirection and read a "login" page, embed it to the current page, which is not expected. + if b.Req.Header.Get("X-Gitea-Fetch-Action") != "" { + b.JSON(http.StatusOK, map[string]any{"redirect": location}) return } http.Redirect(b.Resp, b.Req, location, code) diff --git a/services/context/base_test.go b/services/context/base_test.go index 2a4f86dddf..f9bbe71729 100644 --- a/services/context/base_test.go +++ b/services/context/base_test.go @@ -38,9 +38,10 @@ func TestRedirect(t *testing.T) { req, _ = http.NewRequest(http.MethodGet, "/", nil) resp := httptest.NewRecorder() - req.Header.Add("HX-Request", "true") + req.Header.Add("X-Gitea-Fetch-Action", "1") b := NewBaseContextForTest(resp, req) b.Redirect("/other") - assert.Equal(t, "/other", resp.Header().Get("HX-Redirect")) - assert.Equal(t, http.StatusNoContent, resp.Code) + assert.Contains(t, resp.Header().Get("Content-Type"), "application/json") + assert.JSONEq(t, `{"redirect":"/other"}`, resp.Body.String()) + assert.Equal(t, http.StatusOK, resp.Code) } diff --git a/services/context/context.go b/services/context/context.go index a6a861ecaa..d6030808d8 100644 --- a/services/context/context.go +++ b/services/context/context.go @@ -63,8 +63,6 @@ type Context struct { Package *Package } -type TemplateContext map[string]any - func init() { web.RegisterResponseStatusProvider[*Base](func(req *http.Request) web_types.ResponseStatusProvider { return req.Context().Value(BaseContextKey).(*Base) diff --git a/services/context/context_template.go b/services/context/context_template.go index 52c7461187..2b34681faa 100644 --- a/services/context/context_template.go +++ b/services/context/context_template.go @@ -5,15 +5,25 @@ package context import ( "context" + "fmt" + "html" + "html/template" "net/http" "strconv" + "strings" + "sync" "time" + "code.gitea.io/gitea/modules/httplib" + "code.gitea.io/gitea/modules/public" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/webtheme" ) +type TemplateContext map[string]any + var _ context.Context = TemplateContext(nil) func NewTemplateContext(ctx context.Context, req *http.Request) TemplateContext { @@ -69,3 +79,83 @@ func (c TemplateContext) CurrentWebBanner() *setting.WebBannerType { } return nil } + +// AppFullLink returns a full URL link with AppSubURL for the given app link (no AppSubURL) +// If no link is given, it returns the current app full URL with sub-path but without trailing slash (that's why it is not named as AppURL) +func (c TemplateContext) AppFullLink(link ...string) template.URL { + s := httplib.GuessCurrentAppURL(c.parentContext()) + s = strings.TrimSuffix(s, "/") + if len(link) == 0 { + return template.URL(s) + } + return template.URL(s + strings.TrimPrefix(link[0], "/")) +} + +var globalVars = sync.OnceValue(func() (ret struct { + scriptImportRemainingPart string +}, +) { + // add onerror handler to alert users when the script fails to load: + // * for end users: there were many users reporting that "UI doesn't work", actually they made mistakes in their config + // * for developers: help them to remember to run "make watch-frontend" to build frontend assets + // the message will be directly put in the onerror JS code's string + onScriptErrorPrompt := `Please make sure the asset files can be accessed.` + if !setting.IsProd { + onScriptErrorPrompt += `\n\nFor development, run: make watch-frontend.` + } + onScriptErrorJS := fmt.Sprintf(`alert('Failed to load asset file from ' + this.src + '. %s')`, onScriptErrorPrompt) + ret.scriptImportRemainingPart = `onerror="` + html.EscapeString(onScriptErrorJS) + `">` + return ret +}) + +func (c TemplateContext) ScriptImport(path string, typ ...string) template.HTML { + if len(typ) > 0 { + if typ[0] == "module" { + return template.HTML(` -{{ScriptImport "js/iife.js"}} +{{ctx.ScriptImport "js/iife.js"}} diff --git a/templates/base/head_style.tmpl b/templates/base/head_style.tmpl index 15fa7ad730..4a4fb9d96f 100644 --- a/templates/base/head_style.tmpl +++ b/templates/base/head_style.tmpl @@ -1,2 +1,2 @@ - + diff --git a/templates/base/markup_codepreview.tmpl b/templates/base/markup_codepreview.tmpl index e16848581d..4c04b5092c 100644 --- a/templates/base/markup_codepreview.tmpl +++ b/templates/base/markup_codepreview.tmpl @@ -13,10 +13,7 @@ {{- range $idx, $line := .HighlightLines -}} - {{- if $.EscapeStatus.Escaped -}} - {{- $lineEscapeStatus := index $.LineEscapeStatus $idx -}} - {{if $lineEscapeStatus.Escaped}}{{end}} - {{- end}} + {{- ctx.RenderUtils.RenderUnicodeEscapeToggleTd $.EscapeStatus (index $.LineEscapeStatus $idx)}}
{{$line.FormattedContent}}
{{/* only div works, span generates incorrect HTML structure */}} {{- end -}} diff --git a/templates/devtest/devtest-footer.tmpl b/templates/devtest/devtest-footer.tmpl index 868136e194..091a1035a9 100644 --- a/templates/devtest/devtest-footer.tmpl +++ b/templates/devtest/devtest-footer.tmpl @@ -1,3 +1 @@ -{{/* TODO: the devtest.js is isolated from index.js, so no module is shared and many index.js functions do not work in devtest.ts */}} - {{template "base/footer" ctx.RootData}} diff --git a/templates/devtest/devtest-header.tmpl b/templates/devtest/devtest-header.tmpl index a7aebcb7dc..c9d7b3047f 100644 --- a/templates/devtest/devtest-header.tmpl +++ b/templates/devtest/devtest-header.tmpl @@ -1,8 +1,4 @@ {{template "base/head" ctx.RootData}} - -{{template "base/alert" .}} +
+
{{template "base/alert" ctx.RootData}}
diff --git a/templates/devtest/fetch-action.tmpl b/templates/devtest/fetch-action.tmpl index 4ee824f04b..e8fddf17b0 100644 --- a/templates/devtest/fetch-action.tmpl +++ b/templates/devtest/fetch-action.tmpl @@ -1,10 +1,9 @@ {{template "devtest/devtest-header"}}
- {{template "base/alert" .}}

link-action

- Use "window.fetch" to send a request to backend, the request is defined in an "A" or "BUTTON" element. + The request is defined in an "A" or "BUTTON" element. It might be renamed to "link-fetch-action" to match the "form-fetch-action".
@@ -16,30 +15,20 @@

form-fetch-action

-
Use "window.fetch" to send a form request to backend
-
-
+
+ -
+
-
+
bad action url
- {{template "devtest/devtest-footer"}} diff --git a/templates/devtest/fomantic-modal.tmpl b/templates/devtest/fomantic-modal.tmpl index 8e769790b2..98c3f332ae 100644 --- a/templates/devtest/fomantic-modal.tmpl +++ b/templates/devtest/fomantic-modal.tmpl @@ -1,21 +1,9 @@ {{template "devtest/devtest-header"}}
- {{template "base/alert" .}} - - -
-

<origin-url>

-
-
-
-

<overflow-menu>

diff --git a/templates/devtest/toast.tmpl b/templates/devtest/toast-and-message.tmpl similarity index 96% rename from templates/devtest/toast.tmpl rename to templates/devtest/toast-and-message.tmpl index 597b415469..c4056b6fc6 100644 --- a/templates/devtest/toast.tmpl +++ b/templates/devtest/toast-and-message.tmpl @@ -1,5 +1,5 @@ {{template "devtest/devtest-header"}} -
+

Toast

diff --git a/templates/devtest/unicode-escape.tmpl b/templates/devtest/unicode-escape.tmpl new file mode 100644 index 0000000000..a61813f5c8 --- /dev/null +++ b/templates/devtest/unicode-escape.tmpl @@ -0,0 +1,17 @@ +{{template "devtest/devtest-header"}} +
+
+ + + {{range $idx, $line := .HighlightLines}} + + + {{ctx.RenderUtils.RenderUnicodeEscapeToggleTd $.EscapeStatus (index $.LineEscapeStatus $idx)}} + + + {{end}} + +
{{$line.FormattedContent}}
+
+
+{{template "devtest/devtest-footer"}} diff --git a/templates/install.tmpl b/templates/install.tmpl index 45f14d5c57..bc6fed08e9 100644 --- a/templates/install.tmpl +++ b/templates/install.tmpl @@ -117,7 +117,7 @@ {{ctx.Locale.Tr "install.lfs_path_helper"}}
-
+
{{ctx.Locale.Tr "install.run_user_helper"}} diff --git a/templates/org/follow_unfollow.tmpl b/templates/org/follow_unfollow.tmpl index ba0bd01efe..77977c1279 100644 --- a/templates/org/follow_unfollow.tmpl +++ b/templates/org/follow_unfollow.tmpl @@ -1,4 +1,4 @@ -
{{if .ShowMemberAndTeamTab}} @@ -52,10 +60,9 @@ {{.NumMembers}} {{svg "octicon-chevron-right"}}
- {{$isMember := .IsOrganizationMember}} - {{range .Members}} - {{if or $isMember (call $.IsPublicMember .ID)}} - {{ctx.AvatarUtils.Avatar . 48}} + {{range $memberUser := .OrgOverviewMembers}} + {{if or $.IsOrganizationMember (call $.IsPublicMember $memberUser.ID)}} + {{template "shared/user/avatarlink" dict "user" $memberUser "size" 32 "tooltip" true}} {{end}} {{end}}
@@ -66,7 +73,7 @@ {{.Org.NumTeams}} {{svg "octicon-chevron-right"}}
- {{range .Teams}} + {{range .OrgOverviewTeams}}
{{.Name}}

diff --git a/templates/org/team/new.tmpl b/templates/org/team/new.tmpl index abf728fc54..f8785bb466 100644 --- a/templates/org/team/new.tmpl +++ b/templates/org/team/new.tmpl @@ -20,7 +20,7 @@

- + {{ctx.Locale.Tr "org.team_desc_helper"}}
{{if not (eq .Team.LowerName "owners")}} diff --git a/templates/org/team/sidebar.tmpl b/templates/org/team/sidebar.tmpl index 8678ed7454..645c94d416 100644 --- a/templates/org/team/sidebar.tmpl +++ b/templates/org/team/sidebar.tmpl @@ -1,17 +1,16 @@
-

+

{{.Team.Name}} -
+
{{if .Team.IsMember ctx $.SignedUser.ID}} -
- -
+ {{else if .IsOrganizationOwner}}
- +
{{end}}
@@ -85,12 +84,12 @@
{{end}}

- -
{{end}} +
+
+ + +
+
+
- {{range .Teams}} -
-
- {{.Name}} -
- {{ctx.Locale.Tr "view"}} + {{range $team := $.OrgListTeams}} +
+ -
- {{range .Members}} - {{template "shared/user/avatarlink" dict "user" .}} - {{end}} + {{if $team.Description}} +
+ {{if $team.Description}}{{$team.Description}}{{end}}
-
-

{{.NumMembers}} {{ctx.Locale.Tr "org.lower_members"}} · {{.NumRepos}} {{ctx.Locale.Tr "org.lower_repositories"}}

+ {{end}} +
+
+ {{range .Members}} + {{template "shared/user/avatarlink" dict "user" . "size" 32 "tooltip" true}} + {{else}} + {{ctx.Locale.Tr "org.teams.add_team_member"}} + {{end}} +
{{end}}
+ {{template "base/paginate" .}}
-