diff --git a/.drone.yml b/.drone.yml index 31706ac213..ce922f8c60 100644 --- a/.drone.yml +++ b/.drone.yml @@ -25,7 +25,7 @@ steps: - make deps-frontend - name: deps-backend - image: golang:1.17 + image: golang:1.18 pull: always commands: - make deps-backend @@ -45,32 +45,41 @@ steps: commands: - make lint-backend environment: - GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not + GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not GOSUMDB: sum.golang.org TAGS: bindata sqlite sqlite_unlock_notify depends_on: [deps-backend] + volumes: + - name: deps + path: /go - name: lint-backend-windows image: gitea/test_env:linux-amd64 # https://gitea.com/gitea/test-env commands: - - make golangci-lint vet + - make golangci-lint-windows vet environment: - GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not + GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not GOSUMDB: sum.golang.org TAGS: bindata sqlite sqlite_unlock_notify GOOS: windows GOARCH: amd64 depends_on: [deps-backend] + volumes: + - name: deps + path: /go - name: lint-backend-gogit image: gitea/test_env:linux-amd64 # https://gitea.com/gitea/test-env commands: - make lint-backend environment: - GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not + GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not GOSUMDB: sum.golang.org TAGS: bindata gogit sqlite sqlite_unlock_notify depends_on: [deps-backend] + volumes: + - name: deps + path: /go - name: checks-frontend image: node:16 @@ -79,7 +88,7 @@ steps: depends_on: [deps-frontend] - name: checks-backend - image: golang:1.17 + image: golang:1.18 commands: - make checks-backend depends_on: [deps-backend] @@ -100,11 +109,11 @@ steps: depends_on: [test-frontend] - name: build-backend-no-gcc - image: golang:1.16 # this step is kept as the lowest version of golang that we support + image: golang:1.17 # this step is kept as the lowest version of golang that we support pull: always environment: GO111MODULE: on - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io commands: - go build -o gitea_no_gcc # test if build succeeds without the sqlite tag depends_on: [deps-backend, checks-backend] @@ -113,10 +122,10 @@ steps: path: /go - name: build-backend-arm64 - image: golang:1.17 + image: golang:1.18 environment: GO111MODULE: on - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io GOOS: linux GOARCH: arm64 TAGS: bindata gogit @@ -129,10 +138,10 @@ steps: path: /go - name: build-backend-windows - image: golang:1.17 + image: golang:1.18 environment: GO111MODULE: on - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io GOOS: windows GOARCH: amd64 TAGS: bindata gogit @@ -144,10 +153,10 @@ steps: path: /go - name: build-backend-386 - image: golang:1.17 + image: golang:1.18 environment: GO111MODULE: on - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io GOOS: linux GOARCH: 386 commands: @@ -226,6 +235,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force when: event: @@ -233,7 +243,7 @@ steps: - pull_request - name: deps-backend - image: golang:1.17 + image: golang:1.18 pull: always commands: - make deps-backend @@ -260,7 +270,7 @@ steps: - ./build/test-env-check.sh - make backend environment: - GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not + GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not GOSUMDB: sum.golang.org TAGS: bindata sqlite sqlite_unlock_notify depends_on: [deps-backend, prepare-test-env] @@ -274,7 +284,7 @@ steps: commands: - make unit-test-coverage test-check environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata sqlite sqlite_unlock_notify RACE_ENABLED: true GITHUB_READ_TOKEN: @@ -290,7 +300,7 @@ steps: commands: - make unit-test-coverage test-check environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata gogit sqlite sqlite_unlock_notify RACE_ENABLED: true GITHUB_READ_TOKEN: @@ -306,7 +316,7 @@ steps: commands: - make test-mysql-migration integration-test-coverage environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata RACE_ENABLED: true TEST_LDAP: 1 @@ -323,7 +333,7 @@ steps: commands: - timeout -s ABRT 40m make test-mysql8-migration test-mysql8 environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata RACE_ENABLED: true TEST_LDAP: 1 @@ -339,7 +349,7 @@ steps: commands: - make test-mssql-migration test-mssql environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata RACE_ENABLED: true TEST_LDAP: 1 @@ -350,11 +360,11 @@ steps: path: /go - name: generate-coverage - image: golang:1.17 + image: golang:1.18 commands: - make coverage environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata depends_on: [unit-test, test-mysql] when: @@ -418,6 +428,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force when: event: @@ -425,7 +436,7 @@ steps: - pull_request - name: deps-backend - image: golang:1.17 + image: golang:1.18 pull: always commands: - make deps-backend @@ -446,7 +457,7 @@ steps: - ./build/test-env-check.sh - make backend environment: - GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not + GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not GOSUMDB: sum.golang.org TAGS: bindata gogit sqlite sqlite_unlock_notify depends_on: [deps-backend, prepare-test-env] @@ -460,7 +471,7 @@ steps: commands: - timeout -s ABRT 40m make test-sqlite-migration test-sqlite environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata gogit sqlite sqlite_unlock_notify RACE_ENABLED: true TEST_TAGS: gogit sqlite sqlite_unlock_notify @@ -476,7 +487,7 @@ steps: commands: - timeout -s ABRT 40m make test-pgsql-migration test-pgsql environment: - GOPROXY: https://goproxy.cn + GOPROXY: https://goproxy.io TAGS: bindata gogit RACE_ENABLED: true TEST_TAGS: gogit @@ -567,7 +578,7 @@ trigger: steps: - name: download - image: golang:1.17 + image: golang:1.18 pull: always commands: - timeout -s ABRT 40m make generate-license generate-gitignore @@ -619,6 +630,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: deps-frontend @@ -628,7 +640,7 @@ steps: - make deps-frontend - name: deps-backend - image: golang:1.17 + image: golang:1.18 pull: always commands: - make deps-backend @@ -637,14 +649,14 @@ steps: path: /go - name: static - image: techknowlogick/xgo:go-1.17.x + image: techknowlogick/xgo:go-1.18.x pull: always commands: - curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs - export PATH=$PATH:$GOPATH/bin - make release environment: - GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not + GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not TAGS: bindata sqlite sqlite_unlock_notify volumes: - name: deps @@ -737,6 +749,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: deps-frontend @@ -746,7 +759,7 @@ steps: - make deps-frontend - name: deps-backend - image: golang:1.17 + image: golang:1.18 pull: always commands: - make deps-backend @@ -755,14 +768,14 @@ steps: path: /go - name: static - image: techknowlogick/xgo:go-1.17.x + image: techknowlogick/xgo:go-1.18.x pull: always commands: - curl -sL https://deb.nodesource.com/setup_16.x | bash - && apt-get install -y nodejs - export PATH=$PATH:$GOPATH/bin - make release environment: - GOPROXY: https://goproxy.cn # proxy.golang.org is blocked in China, this proxy is not + GOPROXY: https://goproxy.io # proxy.golang.org is blocked in China, this proxy is not TAGS: bindata sqlite sqlite_unlock_notify depends_on: [fetch-tags] volumes: @@ -882,6 +895,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: publish @@ -892,7 +906,7 @@ steps: auto_tag_suffix: linux-amd64 repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -910,7 +924,7 @@ steps: auto_tag_suffix: linux-amd64-rootless repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -945,6 +959,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: publish @@ -955,7 +970,7 @@ steps: tags: dev-linux-amd64 repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -973,7 +988,7 @@ steps: tags: dev-linux-amd64-rootless repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1007,6 +1022,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: publish @@ -1017,7 +1033,7 @@ steps: tags: ${DRONE_BRANCH##release/v}-dev-linux-amd64 repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1035,7 +1051,7 @@ steps: tags: ${DRONE_BRANCH##release/v}-dev-linux-amd64-rootless repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1070,7 +1086,7 @@ steps: repo: gitea/gitea tags: linux-arm64 build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io environment: PLUGIN_MIRROR: from_secret: plugin_mirror @@ -1103,6 +1119,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: publish @@ -1113,7 +1130,7 @@ steps: auto_tag_suffix: linux-arm64 repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1131,7 +1148,7 @@ steps: auto_tag_suffix: linux-arm64-rootless repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1166,6 +1183,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: publish @@ -1176,7 +1194,7 @@ steps: tags: dev-linux-arm64 repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1194,7 +1212,7 @@ steps: tags: dev-linux-arm64-rootless repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1228,6 +1246,7 @@ steps: image: docker:git pull: always commands: + - git config --global --add safe.directory /drone/src - git fetch --tags --force - name: publish @@ -1238,7 +1257,7 @@ steps: tags: ${DRONE_BRANCH##release/v}-dev-linux-arm64 repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: @@ -1256,7 +1275,7 @@ steps: tags: ${DRONE_BRANCH##release/v}-dev-linux-arm64-rootless repo: gitea/gitea build_args: - - GOPROXY=https://goproxy.cn + - GOPROXY=https://goproxy.io password: from_secret: docker_password username: diff --git a/.eslintrc b/.eslintrc index 6de0c1f9fa..77d9dc1228 100644 --- a/.eslintrc +++ b/.eslintrc @@ -280,7 +280,7 @@ rules: no-unused-expressions: [2] no-unused-labels: [2] no-unused-private-class-members: [2] - no-unused-vars: [2, {args: all, argsIgnorePattern: ^_, varsIgnorePattern: ^_, caughtErrorsIgnorePattern: ^_, ignoreRestSiblings: false}] + no-unused-vars: [2, {args: all, argsIgnorePattern: ^_, varsIgnorePattern: ^_, caughtErrorsIgnorePattern: ^_, destructuredArrayIgnorePattern: ^_, ignoreRestSiblings: false}] no-use-before-define: [2, nofunc] no-useless-backreference: [0] no-useless-call: [2] diff --git a/.github/ISSUE_TEMPLATE/bug-report.yaml b/.github/ISSUE_TEMPLATE/bug-report.yaml index 286bfec3e4..9dacad0d5f 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yaml +++ b/.github/ISSUE_TEMPLATE/bug-report.yaml @@ -1,5 +1,6 @@ name: Bug Report description: Found something you weren't expecting? Report it here! +labels: kind/bug body: - type: markdown attributes: @@ -19,6 +20,13 @@ body: 6. In particular it's really important to provide pertinent logs. You must give us DEBUG level logs. Please read https://docs.gitea.io/en-us/logging-configuration/#debugging-problems In addition, if your problem relates to git commands set `RUN_MODE=dev` at the top of app.ini +- type: textarea + id: description + attributes: + label: Description + description: | + Please provide a description of your issue here, with a URL if you were able to reproduce the issue (see below) + If you are using a proxy or a CDN (e.g. Cloudflare) in front of Gitea, please disable the proxy/CDN fully and access Gitea directly to confirm the issue still persists without those services. - type: input id: gitea-ver attributes: @@ -26,6 +34,34 @@ body: description: Gitea version (or commit reference) of your instance validations: required: true +- type: dropdown + id: can-reproduce + attributes: + label: Can you reproduce the bug on the Gitea demo site? + description: | + If so, please provide a URL in the Description field + URL of Gitea demo: https://try.gitea.io + options: + - "Yes" + - "No" + validations: + required: true +- type: markdown + attributes: + value: | + It's really important to provide pertinent logs + Please read https://docs.gitea.io/en-us/logging-configuration/#debugging-problems + In addition, if your problem relates to git commands set `RUN_MODE=dev` at the top of app.ini +- type: input + id: logs + attributes: + label: Log Gist + description: Please provide a gist URL of your logs, with any sensitive information (e.g. API keys) removed/hidden +- type: textarea + id: screenshots + attributes: + label: Screenshots + description: If this issue involves the Web Interface, please provide one or more screenshots - type: input id: git-ver attributes: @@ -56,38 +92,3 @@ body: - MySQL - MSSQL - SQLite -- type: dropdown - id: can-reproduce - attributes: - label: Can you reproduce the bug on the Gitea demo site? - description: | - If so, please provide a URL in the Description field - URL of Gitea demo: https://try.gitea.io - options: - - "Yes" - - "No" - validations: - required: true -- type: markdown - attributes: - value: | - It's really important to provide pertinent logs - Please read https://docs.gitea.io/en-us/logging-configuration/#debugging-problems - In addition, if your problem relates to git commands set `RUN_MODE=dev` at the top of app.ini -- type: input - id: logs - attributes: - label: Log Gist - description: Please provide a gist URL of your logs, with any sensitive information (e.g. API keys) removed/hidden -- type: textarea - id: description - attributes: - label: Description - description: | - Please provide a description of your issue here, with a URL if you were able to reproduce the issue (see above) - If you are using a proxy or a CDN (e.g. Cloudflare) in front of Gitea, please disable the proxy/CDN fully and access Gitea directly to confirm the issue still persists without those services. -- type: textarea - id: screenshots - attributes: - label: Screenshots - description: If this issue involves the Web Interface, please provide one or more screenshots diff --git a/.github/ISSUE_TEMPLATE/feature-request.yaml b/.github/ISSUE_TEMPLATE/feature-request.yaml index 69b338ddf8..37f57c8f23 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.yaml +++ b/.github/ISSUE_TEMPLATE/feature-request.yaml @@ -1,5 +1,6 @@ name: Feature Request description: Got an idea for a feature that Gitea doesn't have currently? Submit your idea here! +labels: ["kind/feature", "kind/proposal"] body: - type: markdown attributes: diff --git a/.github/ISSUE_TEMPLATE/ui.bug-report.yaml b/.github/ISSUE_TEMPLATE/ui.bug-report.yaml index 2c8edafaf8..80db52d7f1 100644 --- a/.github/ISSUE_TEMPLATE/ui.bug-report.yaml +++ b/.github/ISSUE_TEMPLATE/ui.bug-report.yaml @@ -1,5 +1,6 @@ name: Web Interface Bug Report description: Something doesn't look quite as it should? Report it here! +labels: ["kind/bug", "kind/ui"] body: - type: markdown attributes: @@ -18,6 +19,20 @@ body: 6. In particular it's really important to provide pertinent logs. If you are certain that this is a javascript error, show us the javascript console. If the error appears to relate to Gitea the server you must also give us DEBUG level logs. (See https://docs.gitea.io/en-us/logging-configuration/#debugging-problems) +- type: textarea + id: description + attributes: + label: Description + description: | + Please provide a description of your issue here, with a URL if you were able to reproduce the issue (see below) + If using a proxy or a CDN (e.g. CloudFlare) in front of gitea, please disable the proxy/CDN fully and connect to gitea directly to confirm the issue still persists without those services. +- type: textarea + id: screenshots + attributes: + label: Screenshots + description: Please provide at least 1 screenshot showing the issue. + validations: + required: true - type: input id: gitea-ver attributes: @@ -25,18 +40,6 @@ body: description: Gitea version (or commit reference) your instance is running validations: required: true -- type: input - id: os-ver - attributes: - label: Operating System - description: The operating system you are using to access Gitea -- type: input - id: browser-ver - attributes: - label: Browser Version - description: The browser and version that you are using to access Gitea - validations: - required: true - type: dropdown id: can-reproduce attributes: @@ -49,17 +52,15 @@ body: - "No" validations: required: true -- type: textarea - id: description +- type: input + id: os-ver attributes: - label: Description - description: | - Please provide a description of your issue here, with a URL if you were able to reproduce the issue (see above) - If using a proxy or a CDN (e.g. CloudFlare) in front of gitea, please disable the proxy/CDN fully and connect to gitea directly to confirm the issue still persists without those services. -- type: textarea - id: screenshots + label: Operating System + description: The operating system you are using to access Gitea +- type: input + id: browser-ver attributes: - label: Screenshots - description: Please provide at least 1 screenshot showing the issue. + label: Browser Version + description: The browser and version that you are using to access Gitea validations: required: true diff --git a/.golangci.yml b/.golangci.yml index 91faa75d73..8e31d0cbc4 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -18,12 +18,14 @@ linters: - ineffassign - revive - gofumpt + - depguard enable-all: false disable-all: true fast: false run: - timeout: 3m + go: 1.18 + timeout: 10m skip-dirs: - node_modules - public @@ -64,7 +66,15 @@ linters-settings: - name: modifies-value-receiver gofumpt: extra-rules: true - lang-version: 1.16 + lang-version: "1.18" + depguard: + # TODO: use depguard to replace import checks in gitea-vet + list-type: denylist + # Check the list against standard lib. + include-go-root: true + packages-with-error-message: + - encoding/json: "use gitea's modules/json instead of encoding/json" + - github.com/unknwon/com: "use gitea's util and replacements" issues: exclude-rules: @@ -152,3 +162,6 @@ issues: - path: models/user/openid.go linters: - golint + - linters: + - staticcheck + text: "strings.Title is deprecated: The rule Title uses for word boundaries does not handle Unicode punctuation properly. Use golang.org/x/text/cases instead." diff --git a/CHANGELOG.md b/CHANGELOG.md index d93ea55776..16499cb916 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,194 @@ This changelog goes through all the changes that have been made in each release without substantial changes to our git log; to see the highlights of what has been added to each release, please refer to the [blog](https://blog.gitea.io). +## [1.16.7](https://github.com/go-gitea/gitea/releases/tag/v1.16.7) - 2022-05-02 + +* SECURITY + * Escape git fetch remote (#19487) (#19490) +* BUGFIXES + * Don't overwrite err with nil (#19572) (#19574) + * On Migrations, only write commit-graph if wiki clone was successful (#19563) (#19568) + * Respect DefaultUserIsRestricted system default when creating new user (#19310) (#19560) + * Don't error when branch's commit doesn't exist (#19547) (#19548) + * Support `hostname:port` to pass host matcher's check (#19543) (#19544) + * Prevent intermittent race in attribute reader close (#19537) (#19539) + * Fix 64-bit atomic operations on 32-bit machines (#19531) (#19532) + * Prevent dangling archiver goroutine (#19516) (#19526) + * Fix migrate release from github (#19510) (#19523) + * When view _Siderbar or _Footer, just display once (#19501) (#19522) + * Fix blame page select range error and some typos (#19503) + * Fix name of doctor fix "authorized-keys" in hints (#19464) (#19484) + * User specific repoID or xorm builder conditions for issue search (#19475) (#19476) + * Prevent dangling cat-file calls (goroutine alternative) (#19454) (#19466) + * RepoAssignment ensure to close before overwrite (#19449) (#19460) + * Set correct PR status on 3way on conflict checking (#19457) (#19458) + * Mark TemplateLoading error as "UnprocessableEntity" (#19445) (#19446) + +## [1.16.6](https://github.com/go-gitea/gitea/releases/tag/v1.16.6) - 2022-04-20 + +* ENHANCEMENTS + * Only request write when necessary (#18657) (#19422) + * Disable service worker by default (#18914) (#19342) +* BUGFIXES + * When dumping trim the standard suffices instead of a random suffix (#19440) (#19447) + * Fix DELETE request for non-existent public key (#19443) (#19444) + * Don't panic on ErrEmailInvalid (#19441) (#19442) + * Add uploadpack.allowAnySHA1InWant to allow --filter=blob:none with older git clients (#19430) (#19438) + * Warn on SSH connection for incorrect configuration (#19317) (#19437) + * Search Issues via API, dont show 500 if filter result in empty list (#19244) (#19436) + * When updating mirror repo intervals by API reschedule next update too (#19429) (#19433) + * Fix nil error when some pages are rendered outside request context (#19427) (#19428) + * Fix double blob-hunk on diff page (#19404) (#19405) + * Don't allow merging PR's which are being conflict checked (#19357) (#19358) + * Fix middleware function's placements (#19377) (#19378) + * Fix invalid CSRF token bug, make sure CSRF tokens can be up-to-date (#19338) + * Restore user autoregistration with email addresses (#19261) (#19312) + * Move checks for pulls before merge into own function (#19271) (#19277) + * Granular webhook events in editHook (#19251) (#19257) + * Only send webhook events to active system webhooks and only deliver to active hooks (#19234) (#19248) + * Use full output of git show-ref --tags to get tags for PushUpdateAddTag (#19235) (#19236) + * Touch mirrors on even on fail to update (#19217) (#19233) + * Hide sensitive content on admin panel progress monitor (#19218 & #19226) (#19231) + * Fix clone url JS error for the empty repo page (#19209) + * Bump goldmark to v1.4.11 (#19201) (#19203) +* TESTING + * Prevent intermittent failures in RepoIndexerTest (#19225 #19229) (#19228) +* BUILD + * Revert the minimal golang version requirement from 1.17 to 1.16 and add a warning in Makefile (#19319) +* MISC + * Performance improvement for add team user when org has more than 1000 repositories (#19227) (#19289) + * Check go and nodejs version by go.mod and package.json (#19197) (#19254) + +## [1.16.5](https://github.com/go-gitea/gitea/releases/tag/v1.16.5) - 2022-03-23 + +* BREAKING + * Bump to build with go1.18 (#19120 et al) (#19127) +* SECURITY + * Prevent redirect to Host (2) (#19175) (#19186) + * Try to prevent autolinking of displaynames by email readers (#19169) (#19183) + * Clean paths when looking in Storage (#19124) (#19179) + * Do not send notification emails to inactive users (#19131) (#19139) + * Do not send activation email if manual confirm is set (#19119) (#19122) +* ENHANCEMENTS + * Use the new/choose link for New Issue on project page (#19172) (#19176) +* BUGFIXES + * Fix showing issues in your repositories (#18916) (#19191) + * Fix compare link in active feeds for new branch (#19149) (#19185) + * Redirect .wiki/* ui link to /wiki (#18831) (#19184) + * Ensure deploy keys with write access can push (#19010) (#19182) + * Ensure that setting.LocalURL always has a trailing slash (#19171) (#19177) + * Cleanup protected branches when deleting users & teams (#19158) (#19174) + * Use IterateBufferSize whilst querying repositories during adoption check (#19140) (#19160) + * Fix NPE /repos/issues/search when not signed in (#19154) (#19155) + * Use custom favicon when viewing static files if it exists (#19130) (#19152) + * Fix the editor height in review box (#19003) (#19147) + * Ensure isSSH is set whenever DISABLE_HTTP_GIT is set (#19028) (#19146) + * Fix wrong scopes caused by empty scope input (#19029) (#19145) + * Make migrations SKIP_TLS_VERIFY apply to git too (#19132) (#19141) + * Handle email address not exist (#19089) (#19121) +* MISC + * Update json-iterator to allow compilation with go1.18 (#18644) (#19100) + * Update golang.org/x/crypto (#19097) (#19098) + +## [1.16.4](https://github.com/go-gitea/gitea/releases/tag/v1.16.4) - 2022-03-14 + +* SECURITY + * Restrict email address validation (#17688) (#19085) + * Fix lfs bug (#19072) (#19080) +* ENHANCEMENTS + * Improve SyncMirrors logging (#19045) (#19050) +* BUGFIXES + * Refactor mirror code & fix `StartToMirror` (#18904) (#19075) + * Update the webauthn_credential_id_sequence in Postgres (#19048) (#19060) + * Prevent 500 when there is an error during new auth source post (#19041) (#19059) + * If rendering has failed due to a net.OpError stop rendering (attempt 2) (#19049) (#19056) + * Fix flag validation (#19046) (#19051) + * Add pam account authorization check (#19040) (#19047) + * Ignore missing comment for user notifications (#18954) (#19043) + * Set `rel="nofollow noindex"` on new issue links (#19023) (#19042) + * Upgrading binding package (#19034) (#19035) + * Don't show context cancelled errors in attribute reader (#19006) (#19027) + * Fix update hint bug (#18996) (#19002) +* MISC + * Fix potential assignee query for repo (#18994) (#18999) + +## [1.16.3](https://github.com/go-gitea/gitea/releases/tag/v1.16.3) - 2022-03-02 + +* SECURITY + * Git backend ignore replace objects (#18979) (#18980) +* ENHANCEMENTS + * Adjust error for already locked db and prevent level db lock on malformed connstr (#18923) (#18938) +* BUGFIXES + * Set max text height to prevent overflow (#18862) (#18977) + * Fix newAttachmentPaths deletion for DeleteRepository() (#18973) (#18974) + * Accounts with WebAuthn only (no TOTP) now exist ... fix code to handle that case (#18897) (#18964) + * Send 404 on `/{org}.gpg` (#18959) (#18962) + * Fix admin user list pagination (#18957) (#18960) + * Fix lfs management setting (#18947) (#18946) + * Fix login with email panic when email is not exist (#18942) + * Update go-org to v1.6.1 (#18932) (#18933) + * Fix `` html in translation (#18929) (#18931) + * Fix page and missing return on unadopted repos API (#18848) (#18927) + * Allow adminstrator teams members to see other teams (#18918) (#18919) + * Don't treat BOM escape sequence as hidden character. (#18909) (#18910) + * Correctly link URLs to users/repos with dashes, dots or underscores (… (#18908) + * Fix redirect when using lowercase repo name (#18775) (#18902) + * Fix migration v210 (#18893) (#18892) + * Fix team management UI (#18887) (18886) + * BeforeSourcePath should point to base commit (#18880) (#18799) +* TRANSLATION + * Backport locales from master (#18944) +* MISC + * Don't update email for organisation (#18905) (#18906) + +## [1.16.2](https://github.com/go-gitea/gitea/releases/tag/v1.16.2) - 2022-02-24 + +* ENHANCEMENTS + * Show fullname on issue edits and gpg/ssh signing info (#18828) + * Immediately Hammer if second kill is sent (#18823) (#18826) + * Allow mermaid render error to wrap (#18791) +* BUGFIXES + * Fix ldap user sync missed email in email_address table (#18786) (#18876) + * Update assignees check to include any writing team and change org sidebar (#18680) (#18873) + * Don't report signal: killed errors in serviceRPC (#18850) (#18865) + * Fix bug where certain LDAP settings were reverted (#18859) + * Update go-org to 1.6.0 (#18824) (#18839) + * Fix login with email for ldap users (#18800) (#18836) + * Fix bug for get user by email (#18834) + * Fix panic in EscapeReader (#18820) (#18821) + * Fix ldap loginname (#18789) (#18804) + * Remove redundant call to UpdateRepoStats during migration (#18591) (#18794) + * In disk_channel queues synchronously push to disk on shutdown (#18415) (#18788) + * Fix template bug of LFS lock (#18784) (#18787) + * Attempt to fix the webauthn migration again - part 3 (#18770) (#18771) + * Send mail to issue/pr assignee/reviewer also when OnMention is set (#18707) (#18765) + * Fix a broken link in commits_list_small.tmpl (#18763) (#18764) + * Increase the size of the webauthn_credential credential_id field (#18739) (#18756) + * Prevent dangling GetAttribute calls (#18754) (#18755) + * Fix isempty detection of git repository (#18746) (#18750) + * Fix source code line highlighting on external tracker (#18729) (#18740) + * Prevent double encoding of branch names in delete branch (#18714) (#18738) + * Always set PullRequestWorkInProgressPrefixes in PrepareViewPullInfo (#18713) (#18737) + * Fix forked repositories missed tags (#18719) (#18735) + * Fix release typo (#18728) (#18731) + * Separate the details links of commit-statuses in headers (#18661) (#18730) + * Update object repo with the migrated repository (#18684) (#18726) + * Fix bug for version update hint (#18701) (#18705) + * Fix issue with docker-rootless shimming script (#18690) (#18699) + * Let `MinUnitAccessMode` return correct perm (#18675) (#18689) + * Prevent security failure due to bad APP_ID (#18678) (#18682) + * Restart zero worker if there is still work to do (#18658) (#18672) + * If rendering has failed due to a net.OpError stop rendering (#18642) (#18645) +* TESTING + * Ensure git tag tests and others create test repos in tmpdir (#18447) (#18767) +* BUILD + * Reduce CI go module downloads, add make targets (#18708, #18475, #18443) (#18741) +* MISC + * Put buttons back in org dashboard (#18817) (#18825) + * Various Mermaid improvements (#18776) (#18780) + * C preprocessor colors improvement (#18671) (#18696) + * Fix the missing i18n key for update checker (#18646) (#18665) + ## [1.16.1](https://github.com/go-gitea/gitea/releases/tag/v1.16.1) - 2022-02-06 * SECURITY diff --git a/Dockerfile b/Dockerfile index 8cb88ae83e..973d93b784 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ #Build stage -FROM golang:1.17-alpine3.15 AS build-env +FROM golang:1.18-alpine3.15 AS build-env ARG GOPROXY ENV GOPROXY ${GOPROXY:-direct} diff --git a/Dockerfile.rootless b/Dockerfile.rootless index 78a6c3cb03..27e898c58e 100644 --- a/Dockerfile.rootless +++ b/Dockerfile.rootless @@ -1,5 +1,5 @@ #Build stage -FROM golang:1.17-alpine3.15 AS build-env +FROM golang:1.18-alpine3.15 AS build-env ARG GOPROXY ENV GOPROXY ${GOPROXY:-direct} diff --git a/Makefile b/Makefile index 74067597cb..ab112584c6 100644 --- a/Makefile +++ b/Makefile @@ -24,10 +24,17 @@ SHASUM ?= shasum -a 256 HAS_GO = $(shell hash $(GO) > /dev/null 2>&1 && echo "GO" || echo "NOGO" ) COMMA := , -XGO_VERSION := go-1.17.x -MIN_GO_VERSION := 001016000 -MIN_NODE_VERSION := 012017000 -MIN_GOLANGCI_LINT_VERSION := 001044000 +XGO_VERSION := go-1.18.x + +AIR_PACKAGE ?= github.com/cosmtrek/air@v1.29.0 +EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.4.0 +ERRCHECK_PACKAGE ?= github.com/kisielk/errcheck@v1.6.0 +GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.3.0 +GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.44.2 +GXZ_PAGAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.10 +MISSPELL_PACKAGE ?= github.com/client9/misspell/cmd/misspell@v0.3.4 +SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.29.0 +XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest DOCKER_IMAGE ?= gitea/gitea DOCKER_TAG ?= latest @@ -125,8 +132,6 @@ ifeq ($(filter $(TAGS_SPLIT),bindata),bindata) GO_SOURCES += $(BINDATA_DEST) endif -#To update swagger use: GO111MODULE=on go get -u github.com/go-swagger/go-swagger/cmd/swagger -SWAGGER := $(GO) run github.com/go-swagger/go-swagger/cmd/swagger SWAGGER_SPEC := templates/swagger/v1_json.tmpl SWAGGER_SPEC_S_TMPL := s|"basePath": *"/api/v1"|"basePath": "{{AppSubUrl \| JSEscape \| Safe}}/api/v1"|g SWAGGER_SPEC_S_JSON := s|"basePath": *"{{AppSubUrl \| JSEscape \| Safe}}/api/v1"|"basePath": "/api/v1"|g @@ -196,9 +201,11 @@ help: .PHONY: go-check go-check: + $(eval MIN_GO_VERSION_STR := $(shell grep -Eo '^go\s+[0-9]+\.[0-9.]+' go.mod | cut -d' ' -f2)) + $(eval MIN_GO_VERSION := $(shell printf "%03d%03d%03d" $(shell echo '$(MIN_GO_VERSION_STR)' | tr '.' ' '))) $(eval GO_VERSION := $(shell printf "%03d%03d%03d" $(shell $(GO) version | grep -Eo '[0-9]+\.[0-9.]+' | tr '.' ' ');)) @if [ "$(GO_VERSION)" -lt "$(MIN_GO_VERSION)" ]; then \ - echo "Gitea requires Go 1.16 or greater to build. You can get it at https://golang.org/dl/"; \ + echo "Gitea requires Go $(MIN_GO_VERSION_STR) or greater to build. You can get it at https://go.dev/dl/"; \ exit 1; \ fi @@ -211,11 +218,12 @@ git-check: .PHONY: node-check node-check: + $(eval MIN_NODE_VERSION_STR := $(shell grep -Eo '"node":.*[0-9.]+"' package.json | sed -n 's/.*[^0-9.]\([0-9.]*\)"/\1/p')) + $(eval MIN_NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell echo '$(MIN_NODE_VERSION_STR)' | tr '.' ' '))) $(eval NODE_VERSION := $(shell printf "%03d%03d%03d" $(shell node -v | cut -c2- | tr '.' ' ');)) - $(eval MIN_NODE_VER_FMT := $(shell printf "%g.%g.%g" $(shell echo $(MIN_NODE_VERSION) | grep -o ...))) $(eval NPM_MISSING := $(shell hash npm > /dev/null 2>&1 || echo 1)) @if [ "$(NODE_VERSION)" -lt "$(MIN_NODE_VERSION)" -o "$(NPM_MISSING)" = "1" ]; then \ - echo "Gitea requires Node.js $(MIN_NODE_VER_FMT) or greater and npm to build. You can get it at https://nodejs.org/en/download/"; \ + echo "Gitea requires Node.js $(MIN_NODE_VERSION_STR) or greater and npm to build. You can get it at https://nodejs.org/en/download/"; \ exit 1; \ fi @@ -234,11 +242,8 @@ clean: .PHONY: fmt fmt: - @hash gofumpt > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install mvdan.cc/gofumpt@v0.3.0; \ - fi @echo "Running gitea-fmt (with gofumpt)..." - @$(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}' + @MISSPELL_PACKAGE=$(MISSPELL_PACKAGE) GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}' .PHONY: vet vet: @@ -257,7 +262,7 @@ endif .PHONY: generate-swagger generate-swagger: - $(SWAGGER) generate spec -x "$(SWAGGER_EXCLUDE)" -o './$(SWAGGER_SPEC)' + $(GO) run $(SWAGGER_PACKAGE) generate spec -x "$(SWAGGER_EXCLUDE)" -o './$(SWAGGER_SPEC)' $(SED_INPLACE) '$(SWAGGER_SPEC_S_TMPL)' './$(SWAGGER_SPEC)' $(SED_INPLACE) $(SWAGGER_NEWLINE_COMMAND) './$(SWAGGER_SPEC)' @@ -273,24 +278,18 @@ swagger-check: generate-swagger .PHONY: swagger-validate swagger-validate: $(SED_INPLACE) '$(SWAGGER_SPEC_S_JSON)' './$(SWAGGER_SPEC)' - $(SWAGGER) validate './$(SWAGGER_SPEC)' + $(GO) run $(SWAGGER_PACKAGE) validate './$(SWAGGER_SPEC)' $(SED_INPLACE) '$(SWAGGER_SPEC_S_TMPL)' './$(SWAGGER_SPEC)' .PHONY: errcheck errcheck: - @hash errcheck > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install github.com/kisielk/errcheck@8ddee489636a8311a376fc92e27a6a13c6658344; \ - fi @echo "Running errcheck..." - @errcheck $(GO_PACKAGES) + $(GO) run $(ERRCHECK_PACKAGE) $(GO_PACKAGES) .PHONY: fmt-check fmt-check: - @hash gofumpt > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install mvdan.cc/gofumpt@0.3.0; \ - fi # get all go files and run gitea-fmt (with gofmt) on them - @diff=$$($(GO) run build/code-batch-process.go gitea-fmt -l '{file-list}'); \ + @diff=$$(MISSPELL_PACKAGE=$(MISSPELL_PACKAGE) GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -l '{file-list}'); \ if [ -n "$$diff" ]; then \ echo "Please run 'make fmt' and commit the result:"; \ echo "$${diff}"; \ @@ -328,10 +327,7 @@ watch-frontend: node-check node_modules .PHONY: watch-backend watch-backend: go-check - @hash air > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install github.com/cosmtrek/air@bedc18201271882c2be66d216d0e1a275b526ec4; \ - fi - air -c .air.toml + $(GO) run $(AIR_PACKAGE) -c .air.toml .PHONY: test test: test-frontend test-backend @@ -611,12 +607,9 @@ $(DIST_DIRS): .PHONY: release-windows release-windows: | $(DIST_DIRS) - @hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install src.techknowlogick.com/xgo@latest; \ - fi - CGO_CFLAGS="$(CGO_CFLAGS)" xgo -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) . + CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION) . ifeq (,$(findstring gogit,$(TAGS))) - CGO_CFLAGS="$(CGO_CFLAGS)" xgo -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo gogit $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION)-gogit . + CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -buildmode exe -dest $(DIST)/binaries -tags 'netgo osusergo gogit $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out gitea-$(VERSION)-gogit . endif ifeq ($(CI),drone) cp /build/* $(DIST)/binaries @@ -624,20 +617,14 @@ endif .PHONY: release-linux release-linux: | $(DIST_DIRS) - @hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install src.techknowlogick.com/xgo@latest; \ - fi - CGO_CFLAGS="$(CGO_CFLAGS)" xgo -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets '$(LINUX_ARCHS)' -out gitea-$(VERSION) . + CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets '$(LINUX_ARCHS)' -out gitea-$(VERSION) . ifeq ($(CI),drone) cp /build/* $(DIST)/binaries endif .PHONY: release-darwin release-darwin: | $(DIST_DIRS) - @hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install src.techknowlogick.com/xgo@latest; \ - fi - CGO_CFLAGS="$(CGO_CFLAGS)" xgo -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin-10.12/amd64,darwin-10.12/arm64' -out gitea-$(VERSION) . + CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) run $(XGO_PACKAGE) -go $(XGO_VERSION) -dest $(DIST)/binaries -tags 'netgo osusergo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin-10.12/amd64,darwin-10.12/arm64' -out gitea-$(VERSION) . ifeq ($(CI),drone) cp /build/* $(DIST)/binaries endif @@ -652,17 +639,16 @@ release-check: | $(DIST_DIRS) .PHONY: release-compress release-compress: | $(DIST_DIRS) - @hash gxz > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install github.com/ulikunitz/xz/cmd/gxz@v0.5.10; \ - fi - cd $(DIST)/release/; for file in `find . -type f -name "*"`; do echo "compressing $${file}" && gxz -k -9 $${file}; done; + cd $(DIST)/release/; for file in `find . -type f -name "*"`; do echo "compressing $${file}" && $(GO) run $(GXZ_PAGAGE) -k -9 $${file}; done; .PHONY: release-sources release-sources: | $(DIST_DIRS) echo $(VERSION) > $(STORED_VERSION_FILE) # bsdtar needs a ^ to prevent matching subdirectories $(eval EXCL := --exclude=$(shell tar --help | grep -q bsdtar && echo "^")./) - tar $(addprefix $(EXCL),$(TAR_EXCLUDES)) -czf $(DIST)/release/gitea-src-$(VERSION).tar.gz . +# use transform to a add a release-folder prefix; in bsdtar the transform parameter equivalent is -s + $(eval TRANSFORM := $(shell tar --help | grep -q bsdtar && echo "-s '/^./gitea-src-$(VERSION)/'" || echo "--transform 's|^./|gitea-src-$(VERSION)/|'")) + tar $(addprefix $(EXCL),$(TAR_EXCLUDES)) $(TRANSFORM) -czf $(DIST)/release/gitea-src-$(VERSION).tar.gz . rm -f $(STORED_VERSION_FILE) .PHONY: release-docs @@ -685,6 +671,15 @@ deps-frontend: node_modules .PHONY: deps-backend deps-backend: $(GO) mod download + $(GO) install $(AIR_PACKAGE) + $(GO) install $(EDITORCONFIG_CHECKER_PACKAGE) + $(GO) install $(ERRCHECK_PACKAGE) + $(GO) install $(GOFUMPT_PACKAGE) + $(GO) install $(GOLANGCI_LINT_PACKAGE) + $(GO) install $(GXZ_PAGAGE) + $(GO) install $(MISSPELL_PACKAGE) + $(GO) install $(SWAGGER_PACKAGE) + $(GO) install $(XGO_PACKAGE) node_modules: package-lock.json npm install --no-save @@ -778,29 +773,19 @@ pr\#%: clean-all $(GO) run contrib/pr/checkout.go $* .PHONY: golangci-lint -golangci-lint: golangci-lint-check - golangci-lint run --timeout 10m +golangci-lint: + $(GO) run $(GOLANGCI_LINT_PACKAGE) run -.PHONY: golangci-lint-check -golangci-lint-check: - $(eval GOLANGCI_LINT_VERSION := $(shell printf "%03d%03d%03d" $(shell golangci-lint --version | grep -Eo '[0-9]+\.[0-9.]+' | tr '.' ' ');)) - $(eval MIN_GOLANGCI_LINT_VER_FMT := $(shell printf "%g.%g.%g" $(shell echo $(MIN_GOLANGCI_LINT_VERSION) | grep -o ...))) - @hash golangci-lint > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - echo "Downloading golangci-lint v${MIN_GOLANGCI_LINT_VER_FMT}"; \ - export BINARY="golangci-lint"; \ - curl -sfL "https://raw.githubusercontent.com/golangci/golangci-lint/v${MIN_GOLANGCI_LINT_VER_FMT}/install.sh" | sh -s -- -b $(GOPATH)/bin v$(MIN_GOLANGCI_LINT_VER_FMT); \ - elif [ "$(GOLANGCI_LINT_VERSION)" -lt "$(MIN_GOLANGCI_LINT_VERSION)" ]; then \ - echo "Downloading newer version of golangci-lint v${MIN_GOLANGCI_LINT_VER_FMT}"; \ - export BINARY="golangci-lint"; \ - curl -sfL "https://raw.githubusercontent.com/golangci/golangci-lint/v${MIN_GOLANGCI_LINT_VER_FMT}/install.sh" | sh -s -- -b $(GOPATH)/bin v$(MIN_GOLANGCI_LINT_VER_FMT); \ - fi +# workaround step for the lint-backend-windows CI task because 'go run' can not +# have distinct GOOS/GOARCH for its build and run steps +.PHONY: golangci-lint-windows +golangci-lint-windows: + @GOOS= GOARCH= $(GO) install $(GOLANGCI_LINT_PACKAGE) + golangci-lint run .PHONY: editorconfig-checker editorconfig-checker: - @hash editorconfig-checker > /dev/null 2>&1; if [ $$? -ne 0 ]; then \ - $(GO) install github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@50adf46752da119dfef66e57be3ce2693ea4aa9c; \ - fi - editorconfig-checker templates + $(GO) run $(EDITORCONFIG_CHECKER_PACKAGE) templates .PHONY: docker docker: diff --git a/README.md b/README.md index bbe27fab4d..84c0524ed4 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,7 @@ or if SQLite support is required: The `build` target is split into two sub-targets: -- `make backend` which requires [Go 1.16](https://golang.org/dl/) or greater. +- `make backend` which requires [Go 1.17](https://go.dev/dl/) or greater. - `make frontend` which requires [Node.js LTS](https://nodejs.org/en/download/) or greater and Internet connectivity to download npm dependencies. When building from the official source tarballs which include pre-built frontend files, the `frontend` target will not be triggered, making it possible to build without Node.js and Internet connectivity. diff --git a/build.go b/build.go index aa56141340..d379745c6d 100644 --- a/build.go +++ b/build.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build vendor -// +build vendor package main diff --git a/build/code-batch-process.go b/build/code-batch-process.go index 8139fe7623..0f8dbd40fe 100644 --- a/build/code-batch-process.go +++ b/build/code-batch-process.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore package main @@ -40,7 +39,7 @@ func passThroughCmd(cmd string, args []string) error { } c := exec.Cmd{ Path: foundCmd, - Args: args, + Args: append([]string{cmd}, args...), Stdin: os.Stdin, Stdout: os.Stdout, Stderr: os.Stderr, @@ -271,9 +270,9 @@ func main() { log.Print("the -d option is not supported by gitea-fmt") } cmdErrors = append(cmdErrors, giteaFormatGoImports(files, containsString(subArgs, "-l"), containsString(subArgs, "-w"))) - cmdErrors = append(cmdErrors, passThroughCmd("gofumpt", append([]string{"-extra", "-lang", "1.16"}, substArgs...))) + cmdErrors = append(cmdErrors, passThroughCmd("go", append([]string{"run", os.Getenv("GOFUMPT_PACKAGE"), "-extra", "-lang", "1.17"}, substArgs...))) case "misspell": - cmdErrors = append(cmdErrors, passThroughCmd("misspell", substArgs)) + cmdErrors = append(cmdErrors, passThroughCmd("go", append([]string{"run", os.Getenv("MISSPELL_PACKAGE")}, substArgs...))) default: log.Fatalf("unknown cmd: %s %v", subCmd, subArgs) } diff --git a/build/generate-bindata.go b/build/generate-bindata.go index 7fdf9d7616..ab81dd8938 100644 --- a/build/generate-bindata.go +++ b/build/generate-bindata.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore package main diff --git a/build/generate-emoji.go b/build/generate-emoji.go index 2f3536342d..a22f2a4571 100644 --- a/build/generate-emoji.go +++ b/build/generate-emoji.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore package main diff --git a/build/generate-gitignores.go b/build/generate-gitignores.go index 0f7d719d40..1e09c83a6a 100644 --- a/build/generate-gitignores.go +++ b/build/generate-gitignores.go @@ -1,5 +1,4 @@ //go:build ignore -// +build ignore package main diff --git a/build/generate-licenses.go b/build/generate-licenses.go index 0f9b9f369f..02b41a229a 100644 --- a/build/generate-licenses.go +++ b/build/generate-licenses.go @@ -1,5 +1,4 @@ //go:build ignore -// +build ignore package main diff --git a/build/gitea-format-imports.go b/build/gitea-format-imports.go index 67c8397b2d..c685ae68ee 100644 --- a/build/gitea-format-imports.go +++ b/build/gitea-format-imports.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore package main diff --git a/build/gocovmerge.go b/build/gocovmerge.go index 1d2652129f..dfe70efdad 100644 --- a/build/gocovmerge.go +++ b/build/gocovmerge.go @@ -7,7 +7,6 @@ // merges them into one profile //go:build ignore -// +build ignore package main diff --git a/cmd/admin.go b/cmd/admin.go index 70d44c39e0..fcf331751c 100644 --- a/cmd/admin.go +++ b/cmd/admin.go @@ -25,6 +25,7 @@ import ( repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/util" auth_service "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/auth/source/oauth2" "code.gitea.io/gitea/services/auth/source/smtp" @@ -114,6 +115,10 @@ var ( Name: "access-token", Usage: "Generate access token for the user", }, + cli.BoolFlag{ + Name: "restricted", + Usage: "Make a restricted user account", + }, }, } @@ -493,7 +498,7 @@ func runChangePassword(c *cli.Context) error { return err } - if err = user_model.UpdateUserCols(db.DefaultContext, user, "passwd", "passwd_hash_algo", "salt"); err != nil { + if err = user_model.UpdateUserCols(ctx, user, "passwd", "passwd_hash_algo", "salt"); err != nil { return err } @@ -551,7 +556,7 @@ func runCreateUser(c *cli.Context) error { // If this is the first user being created. // Take it as the admin and don't force a password update. - if n := user_model.CountUsers(); n == 0 { + if n := user_model.CountUsers(nil); n == 0 { changePassword = false } @@ -559,17 +564,26 @@ func runCreateUser(c *cli.Context) error { changePassword = c.Bool("must-change-password") } + restricted := util.OptionalBoolNone + + if c.IsSet("restricted") { + restricted = util.OptionalBoolOf(c.Bool("restricted")) + } + u := &user_model.User{ Name: username, Email: c.String("email"), Passwd: password, - IsActive: true, IsAdmin: c.Bool("admin"), MustChangePassword: changePassword, - Theme: setting.UI.DefaultTheme, } - if err := user_model.CreateUser(u); err != nil { + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolTrue, + IsRestricted: restricted, + } + + if err := user_model.CreateUser(u, overwriteDefault); err != nil { return fmt.Errorf("CreateUser: %v", err) } @@ -724,7 +738,7 @@ func runRepoSyncReleases(_ *cli.Context) error { log.Trace("Processing next %d repos of %d", len(repos), count) for _, repo := range repos { log.Trace("Synchronizing repo %s with path %s", repo.FullName(), repo.RepoPath()) - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { log.Warn("OpenRepository: %v", err) continue diff --git a/cmd/dump.go b/cmd/dump.go index 4180425598..ea41c0c029 100644 --- a/cmd/dump.go +++ b/cmd/dump.go @@ -7,6 +7,7 @@ package cmd import ( "fmt" + "io" "os" "path" "path/filepath" @@ -25,10 +26,21 @@ import ( "github.com/urfave/cli" ) -func addFile(w archiver.Writer, filePath, absPath string, verbose bool) error { +func addReader(w archiver.Writer, r io.ReadCloser, info os.FileInfo, customName string, verbose bool) error { if verbose { - log.Info("Adding file %s\n", filePath) + log.Info("Adding file %s", customName) } + + return w.Write(archiver.File{ + FileInfo: archiver.FileInfo{ + FileInfo: info, + CustomName: customName, + }, + ReadCloser: r, + }) +} + +func addFile(w archiver.Writer, filePath, absPath string, verbose bool) error { file, err := os.Open(absPath) if err != nil { return err @@ -39,13 +51,7 @@ func addFile(w archiver.Writer, filePath, absPath string, verbose bool) error { return err } - return w.Write(archiver.File{ - FileInfo: archiver.FileInfo{ - FileInfo: fileInfo, - CustomName: filePath, - }, - ReadCloser: file, - }) + return addReader(w, file, fileInfo, filePath, verbose) } func isSubdir(upper, lower string) (bool, error) { @@ -86,7 +92,7 @@ func (o outputType) String() string { } var outputTypeEnum = &outputType{ - Enum: []string{"zip", "rar", "tar", "sz", "tar.gz", "tar.xz", "tar.bz2", "tar.br", "tar.lz4"}, + Enum: []string{"zip", "tar", "tar.sz", "tar.gz", "tar.xz", "tar.bz2", "tar.br", "tar.lz4"}, Default: "zip", } @@ -136,6 +142,10 @@ It can be used for backup and capture Gitea server image to send to maintainer`, Name: "skip-attachment-data", Usage: "Skip attachment data", }, + cli.BoolFlag{ + Name: "skip-package-data", + Usage: "Skip package data", + }, cli.GenericFlag{ Name: "type", Value: outputTypeEnum, @@ -160,7 +170,12 @@ func runDump(ctx *cli.Context) error { fatal("Deleting default logger failed. Can not write to stdout: %v", err) } } else { - fileName = strings.TrimSuffix(fileName, path.Ext(fileName)) + for _, suffix := range outputTypeEnum.Enum { + if strings.HasSuffix(fileName, "."+suffix) { + fileName = strings.TrimSuffix(fileName, "."+suffix) + break + } + } fileName += "." + outType } setting.LoadFromExisting() @@ -236,13 +251,7 @@ func runDump(ctx *cli.Context) error { return err } - return w.Write(archiver.File{ - FileInfo: archiver.FileInfo{ - FileInfo: info, - CustomName: path.Join("data", "lfs", objPath), - }, - ReadCloser: object, - }) + return addReader(w, object, info, path.Join("data", "lfs", objPath), verbose) }); err != nil { fatal("Failed to dump LFS objects: %v", err) } @@ -321,6 +330,7 @@ func runDump(ctx *cli.Context) error { excludes = append(excludes, setting.RepoRootPath) excludes = append(excludes, setting.LFS.Path) excludes = append(excludes, setting.Attachment.Path) + excludes = append(excludes, setting.Packages.Path) excludes = append(excludes, setting.LogRootPath) excludes = append(excludes, absFileName) if err := addRecursiveExclude(w, "data", setting.AppDataPath, excludes, verbose); err != nil { @@ -336,17 +346,24 @@ func runDump(ctx *cli.Context) error { return err } - return w.Write(archiver.File{ - FileInfo: archiver.FileInfo{ - FileInfo: info, - CustomName: path.Join("data", "attachments", objPath), - }, - ReadCloser: object, - }) + return addReader(w, object, info, path.Join("data", "attachments", objPath), verbose) }); err != nil { fatal("Failed to dump attachments: %v", err) } + if ctx.IsSet("skip-package-data") && ctx.Bool("skip-package-data") { + log.Info("Skip dumping package data") + } else if err := storage.Packages.IterateObjects(func(objPath string, object storage.Object) error { + info, err := object.Stat() + if err != nil { + return err + } + + return addReader(w, object, info, path.Join("data", "packages", objPath), verbose) + }); err != nil { + fatal("Failed to dump packages: %v", err) + } + // Doesn't check if LogRootPath exists before processing --skip-log intentionally, // ensuring that it's clear the dump is skipped whether the directory's initialized // yet or not. diff --git a/cmd/embedded.go b/cmd/embedded.go index 2930e5d307..30fc7103d8 100644 --- a/cmd/embedded.go +++ b/cmd/embedded.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package cmd diff --git a/cmd/embedded_stub.go b/cmd/embedded_stub.go index 0e9e3e6ec3..26228256f2 100644 --- a/cmd/embedded_stub.go +++ b/cmd/embedded_stub.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !bindata -// +build !bindata package cmd diff --git a/cmd/hook.go b/cmd/hook.go index 1dd59e8192..8078763b18 100644 --- a/cmd/hook.go +++ b/cmd/hook.go @@ -15,9 +15,9 @@ import ( "strings" "time" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/private" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" @@ -162,7 +162,7 @@ func (n *nilWriter) WriteString(s string) (int, error) { } func runHookPreReceive(c *cli.Context) error { - if os.Getenv(models.EnvIsInternal) == "true" { + if isInternal, _ := strconv.ParseBool(os.Getenv(repo_module.EnvIsInternal)); isInternal { return nil } ctx, cancel := installSignals() @@ -180,12 +180,12 @@ Gitea or set your environment appropriately.`, "") } // the environment is set by serv command - isWiki := os.Getenv(models.EnvRepoIsWiki) == "true" - username := os.Getenv(models.EnvRepoUsername) - reponame := os.Getenv(models.EnvRepoName) - userID, _ := strconv.ParseInt(os.Getenv(models.EnvPusherID), 10, 64) - prID, _ := strconv.ParseInt(os.Getenv(models.EnvPRID), 10, 64) - isDeployKey, _ := strconv.ParseBool(os.Getenv(models.EnvIsDeployKey)) + isWiki, _ := strconv.ParseBool(os.Getenv(repo_module.EnvRepoIsWiki)) + username := os.Getenv(repo_module.EnvRepoUsername) + reponame := os.Getenv(repo_module.EnvRepoName) + userID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPusherID), 10, 64) + prID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPRID), 10, 64) + deployKeyID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvDeployKeyID), 10, 64) hookOptions := private.HookOptions{ UserID: userID, @@ -194,7 +194,7 @@ Gitea or set your environment appropriately.`, "") GitQuarantinePath: os.Getenv(private.GitQuarantinePath), GitPushOptions: pushOptions(), PullRequestID: prID, - IsDeployKey: isDeployKey, + DeployKeyID: deployKeyID, } scanner := bufio.NewScanner(os.Stdin) @@ -309,12 +309,12 @@ func runHookPostReceive(c *cli.Context) error { defer cancel() // First of all run update-server-info no matter what - if _, err := git.NewCommand(ctx, "update-server-info").Run(); err != nil { + if _, _, err := git.NewCommand(ctx, "update-server-info").RunStdString(nil); err != nil { return fmt.Errorf("Failed to call 'git update-server-info': %v", err) } // Now if we're an internal don't do anything else - if os.Getenv(models.EnvIsInternal) == "true" { + if isInternal, _ := strconv.ParseBool(os.Getenv(repo_module.EnvIsInternal)); isInternal { return nil } @@ -343,11 +343,11 @@ Gitea or set your environment appropriately.`, "") } // the environment is set by serv command - repoUser := os.Getenv(models.EnvRepoUsername) - isWiki := os.Getenv(models.EnvRepoIsWiki) == "true" - repoName := os.Getenv(models.EnvRepoName) - pusherID, _ := strconv.ParseInt(os.Getenv(models.EnvPusherID), 10, 64) - pusherName := os.Getenv(models.EnvPusherName) + repoUser := os.Getenv(repo_module.EnvRepoUsername) + isWiki, _ := strconv.ParseBool(os.Getenv(repo_module.EnvRepoIsWiki)) + repoName := os.Getenv(repo_module.EnvRepoName) + pusherID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPusherID), 10, 64) + pusherName := os.Getenv(repo_module.EnvPusherName) hookOptions := private.HookOptions{ UserName: pusherName, @@ -503,10 +503,10 @@ Gitea or set your environment appropriately.`, "") } reader := bufio.NewReader(os.Stdin) - repoUser := os.Getenv(models.EnvRepoUsername) - repoName := os.Getenv(models.EnvRepoName) - pusherID, _ := strconv.ParseInt(os.Getenv(models.EnvPusherID), 10, 64) - pusherName := os.Getenv(models.EnvPusherName) + repoUser := os.Getenv(repo_module.EnvRepoUsername) + repoName := os.Getenv(repo_module.EnvRepoName) + pusherID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPusherID), 10, 64) + pusherName := os.Getenv(repo_module.EnvPusherName) // 1. Version and features negotiation. // S: PKT-LINE(version=1\0push-options atomic...) / PKT-LINE(version=1\n) diff --git a/cmd/manager.go b/cmd/manager.go index 50b66cc7f2..03fe23aa9e 100644 --- a/cmd/manager.go +++ b/cmd/manager.go @@ -10,7 +10,6 @@ import ( "os" "time" - "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/private" "github.com/urfave/cli" @@ -27,6 +26,7 @@ var ( subcmdRestart, subcmdFlushQueues, subcmdLogging, + subCmdProcesses, }, } subcmdShutdown = cli.Command{ @@ -68,326 +68,38 @@ var ( }, }, } - defaultLoggingFlags = []cli.Flag{ - cli.StringFlag{ - Name: "group, g", - Usage: "Group to add logger to - will default to \"default\"", - }, cli.StringFlag{ - Name: "name, n", - Usage: "Name of the new logger - will default to mode", - }, cli.StringFlag{ - Name: "level, l", - Usage: "Logging level for the new logger", - }, cli.StringFlag{ - Name: "stacktrace-level, L", - Usage: "Stacktrace logging level", - }, cli.StringFlag{ - Name: "flags, F", - Usage: "Flags for the logger", - }, cli.StringFlag{ - Name: "expression, e", - Usage: "Matching expression for the logger", - }, cli.StringFlag{ - Name: "prefix, p", - Usage: "Prefix for the logger", - }, cli.BoolFlag{ - Name: "color", - Usage: "Use color in the logs", - }, cli.BoolFlag{ - Name: "debug", - }, - } - subcmdLogging = cli.Command{ - Name: "logging", - Usage: "Adjust logging commands", - Subcommands: []cli.Command{ - { - Name: "pause", - Usage: "Pause logging (Gitea will buffer logs up to a certain point and will drop them after that point)", - Flags: []cli.Flag{ - cli.BoolFlag{ - Name: "debug", - }, - }, - Action: runPauseLogging, - }, { - Name: "resume", - Usage: "Resume logging", - Flags: []cli.Flag{ - cli.BoolFlag{ - Name: "debug", - }, - }, - Action: runResumeLogging, - }, { - Name: "release-and-reopen", - Usage: "Cause Gitea to release and re-open files used for logging", - Flags: []cli.Flag{ - cli.BoolFlag{ - Name: "debug", - }, - }, - Action: runReleaseReopenLogging, - }, { - Name: "remove", - Usage: "Remove a logger", - ArgsUsage: "[name] Name of logger to remove", - Flags: []cli.Flag{ - cli.BoolFlag{ - Name: "debug", - }, cli.StringFlag{ - Name: "group, g", - Usage: "Group to add logger to - will default to \"default\"", - }, - }, - Action: runRemoveLogger, - }, { - Name: "add", - Usage: "Add a logger", - Subcommands: []cli.Command{ - { - Name: "console", - Usage: "Add a console logger", - Flags: append(defaultLoggingFlags, - cli.BoolFlag{ - Name: "stderr", - Usage: "Output console logs to stderr - only relevant for console", - }), - Action: runAddConsoleLogger, - }, { - Name: "file", - Usage: "Add a file logger", - Flags: append(defaultLoggingFlags, []cli.Flag{ - cli.StringFlag{ - Name: "filename, f", - Usage: "Filename for the logger - this must be set.", - }, cli.BoolTFlag{ - Name: "rotate, r", - Usage: "Rotate logs", - }, cli.Int64Flag{ - Name: "max-size, s", - Usage: "Maximum size in bytes before rotation", - }, cli.BoolTFlag{ - Name: "daily, d", - Usage: "Rotate logs daily", - }, cli.IntFlag{ - Name: "max-days, D", - Usage: "Maximum number of daily logs to keep", - }, cli.BoolTFlag{ - Name: "compress, z", - Usage: "Compress rotated logs", - }, cli.IntFlag{ - Name: "compression-level, Z", - Usage: "Compression level to use", - }, - }...), - Action: runAddFileLogger, - }, { - Name: "conn", - Usage: "Add a net conn logger", - Flags: append(defaultLoggingFlags, []cli.Flag{ - cli.BoolFlag{ - Name: "reconnect-on-message, R", - Usage: "Reconnect to host for every message", - }, cli.BoolFlag{ - Name: "reconnect, r", - Usage: "Reconnect to host when connection is dropped", - }, cli.StringFlag{ - Name: "protocol, P", - Usage: "Set protocol to use: tcp, unix, or udp (defaults to tcp)", - }, cli.StringFlag{ - Name: "address, a", - Usage: "Host address and port to connect to (defaults to :7020)", - }, - }...), - Action: runAddConnLogger, - }, { - Name: "smtp", - Usage: "Add an SMTP logger", - Flags: append(defaultLoggingFlags, []cli.Flag{ - cli.StringFlag{ - Name: "username, u", - Usage: "Mail server username", - }, cli.StringFlag{ - Name: "password, P", - Usage: "Mail server password", - }, cli.StringFlag{ - Name: "host, H", - Usage: "Mail server host (defaults to: 127.0.0.1:25)", - }, cli.StringSliceFlag{ - Name: "send-to, s", - Usage: "Email address(es) to send to", - }, cli.StringFlag{ - Name: "subject, S", - Usage: "Subject header of sent emails", - }, - }...), - Action: runAddSMTPLogger, - }, - }, + subCmdProcesses = cli.Command{ + Name: "processes", + Usage: "Display running processes within the current process", + Action: runProcesses, + Flags: []cli.Flag{ + cli.BoolFlag{ + Name: "debug", + }, + cli.BoolFlag{ + Name: "flat", + Usage: "Show processes as flat table rather than as tree", + }, + cli.BoolFlag{ + Name: "no-system", + Usage: "Do not show system proceses", + }, + cli.BoolFlag{ + Name: "stacktraces", + Usage: "Show stacktraces", + }, + cli.BoolFlag{ + Name: "json", + Usage: "Output as json", + }, + cli.StringFlag{ + Name: "cancel", + Usage: "Process PID to cancel. (Only available for non-system processes.)", }, }, } ) -func runRemoveLogger(c *cli.Context) error { - setup("manager", c.Bool("debug")) - group := c.String("group") - if len(group) == 0 { - group = log.DEFAULT - } - name := c.Args().First() - ctx, cancel := installSignals() - defer cancel() - - statusCode, msg := private.RemoveLogger(ctx, group, name) - switch statusCode { - case http.StatusInternalServerError: - return fail("InternalServerError", msg) - } - - fmt.Fprintln(os.Stdout, msg) - return nil -} - -func runAddSMTPLogger(c *cli.Context) error { - setup("manager", c.Bool("debug")) - vals := map[string]interface{}{} - mode := "smtp" - if c.IsSet("host") { - vals["host"] = c.String("host") - } else { - vals["host"] = "127.0.0.1:25" - } - - if c.IsSet("username") { - vals["username"] = c.String("username") - } - if c.IsSet("password") { - vals["password"] = c.String("password") - } - - if !c.IsSet("send-to") { - return fmt.Errorf("Some recipients must be provided") - } - vals["sendTos"] = c.StringSlice("send-to") - - if c.IsSet("subject") { - vals["subject"] = c.String("subject") - } else { - vals["subject"] = "Diagnostic message from Gitea" - } - - return commonAddLogger(c, mode, vals) -} - -func runAddConnLogger(c *cli.Context) error { - setup("manager", c.Bool("debug")) - vals := map[string]interface{}{} - mode := "conn" - vals["net"] = "tcp" - if c.IsSet("protocol") { - switch c.String("protocol") { - case "udp": - vals["net"] = "udp" - case "unix": - vals["net"] = "unix" - } - } - if c.IsSet("address") { - vals["address"] = c.String("address") - } else { - vals["address"] = ":7020" - } - if c.IsSet("reconnect") { - vals["reconnect"] = c.Bool("reconnect") - } - if c.IsSet("reconnect-on-message") { - vals["reconnectOnMsg"] = c.Bool("reconnect-on-message") - } - return commonAddLogger(c, mode, vals) -} - -func runAddFileLogger(c *cli.Context) error { - setup("manager", c.Bool("debug")) - vals := map[string]interface{}{} - mode := "file" - if c.IsSet("filename") { - vals["filename"] = c.String("filename") - } else { - return fmt.Errorf("filename must be set when creating a file logger") - } - if c.IsSet("rotate") { - vals["rotate"] = c.Bool("rotate") - } - if c.IsSet("max-size") { - vals["maxsize"] = c.Int64("max-size") - } - if c.IsSet("daily") { - vals["daily"] = c.Bool("daily") - } - if c.IsSet("max-days") { - vals["maxdays"] = c.Int("max-days") - } - if c.IsSet("compress") { - vals["compress"] = c.Bool("compress") - } - if c.IsSet("compression-level") { - vals["compressionLevel"] = c.Int("compression-level") - } - return commonAddLogger(c, mode, vals) -} - -func runAddConsoleLogger(c *cli.Context) error { - setup("manager", c.Bool("debug")) - vals := map[string]interface{}{} - mode := "console" - if c.IsSet("stderr") && c.Bool("stderr") { - vals["stderr"] = c.Bool("stderr") - } - return commonAddLogger(c, mode, vals) -} - -func commonAddLogger(c *cli.Context, mode string, vals map[string]interface{}) error { - if len(c.String("level")) > 0 { - vals["level"] = log.FromString(c.String("level")).String() - } - if len(c.String("stacktrace-level")) > 0 { - vals["stacktraceLevel"] = log.FromString(c.String("stacktrace-level")).String() - } - if len(c.String("expression")) > 0 { - vals["expression"] = c.String("expression") - } - if len(c.String("prefix")) > 0 { - vals["prefix"] = c.String("prefix") - } - if len(c.String("flags")) > 0 { - vals["flags"] = log.FlagsFromString(c.String("flags")) - } - if c.IsSet("color") { - vals["colorize"] = c.Bool("color") - } - group := "default" - if c.IsSet("group") { - group = c.String("group") - } - name := mode - if c.IsSet("name") { - name = c.String("name") - } - ctx, cancel := installSignals() - defer cancel() - - statusCode, msg := private.AddLogger(ctx, group, name, mode, vals) - switch statusCode { - case http.StatusInternalServerError: - return fail("InternalServerError", msg) - } - - fmt.Fprintln(os.Stdout, msg) - return nil -} - func runShutdown(c *cli.Context) error { ctx, cancel := installSignals() defer cancel() @@ -433,47 +145,16 @@ func runFlushQueues(c *cli.Context) error { return nil } -func runPauseLogging(c *cli.Context) error { +func runProcesses(c *cli.Context) error { ctx, cancel := installSignals() defer cancel() setup("manager", c.Bool("debug")) - statusCode, msg := private.PauseLogging(ctx) + statusCode, msg := private.Processes(ctx, os.Stdout, c.Bool("flat"), c.Bool("no-system"), c.Bool("stacktraces"), c.Bool("json"), c.String("cancel")) switch statusCode { case http.StatusInternalServerError: return fail("InternalServerError", msg) } - fmt.Fprintln(os.Stdout, msg) - return nil -} - -func runResumeLogging(c *cli.Context) error { - ctx, cancel := installSignals() - defer cancel() - - setup("manager", c.Bool("debug")) - statusCode, msg := private.ResumeLogging(ctx) - switch statusCode { - case http.StatusInternalServerError: - return fail("InternalServerError", msg) - } - - fmt.Fprintln(os.Stdout, msg) - return nil -} - -func runReleaseReopenLogging(c *cli.Context) error { - ctx, cancel := installSignals() - defer cancel() - - setup("manager", c.Bool("debug")) - statusCode, msg := private.ReleaseReopenLogging(ctx) - switch statusCode { - case http.StatusInternalServerError: - return fail("InternalServerError", msg) - } - - fmt.Fprintln(os.Stdout, msg) return nil } diff --git a/cmd/manager_logging.go b/cmd/manager_logging.go new file mode 100644 index 0000000000..0043ea1e52 --- /dev/null +++ b/cmd/manager_logging.go @@ -0,0 +1,383 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package cmd + +import ( + "fmt" + "net/http" + "os" + + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/private" + + "github.com/urfave/cli" +) + +var ( + defaultLoggingFlags = []cli.Flag{ + cli.StringFlag{ + Name: "group, g", + Usage: "Group to add logger to - will default to \"default\"", + }, cli.StringFlag{ + Name: "name, n", + Usage: "Name of the new logger - will default to mode", + }, cli.StringFlag{ + Name: "level, l", + Usage: "Logging level for the new logger", + }, cli.StringFlag{ + Name: "stacktrace-level, L", + Usage: "Stacktrace logging level", + }, cli.StringFlag{ + Name: "flags, F", + Usage: "Flags for the logger", + }, cli.StringFlag{ + Name: "expression, e", + Usage: "Matching expression for the logger", + }, cli.StringFlag{ + Name: "prefix, p", + Usage: "Prefix for the logger", + }, cli.BoolFlag{ + Name: "color", + Usage: "Use color in the logs", + }, cli.BoolFlag{ + Name: "debug", + }, + } + + subcmdLogging = cli.Command{ + Name: "logging", + Usage: "Adjust logging commands", + Subcommands: []cli.Command{ + { + Name: "pause", + Usage: "Pause logging (Gitea will buffer logs up to a certain point and will drop them after that point)", + Flags: []cli.Flag{ + cli.BoolFlag{ + Name: "debug", + }, + }, + Action: runPauseLogging, + }, { + Name: "resume", + Usage: "Resume logging", + Flags: []cli.Flag{ + cli.BoolFlag{ + Name: "debug", + }, + }, + Action: runResumeLogging, + }, { + Name: "release-and-reopen", + Usage: "Cause Gitea to release and re-open files used for logging", + Flags: []cli.Flag{ + cli.BoolFlag{ + Name: "debug", + }, + }, + Action: runReleaseReopenLogging, + }, { + Name: "remove", + Usage: "Remove a logger", + ArgsUsage: "[name] Name of logger to remove", + Flags: []cli.Flag{ + cli.BoolFlag{ + Name: "debug", + }, cli.StringFlag{ + Name: "group, g", + Usage: "Group to add logger to - will default to \"default\"", + }, + }, + Action: runRemoveLogger, + }, { + Name: "add", + Usage: "Add a logger", + Subcommands: []cli.Command{ + { + Name: "console", + Usage: "Add a console logger", + Flags: append(defaultLoggingFlags, + cli.BoolFlag{ + Name: "stderr", + Usage: "Output console logs to stderr - only relevant for console", + }), + Action: runAddConsoleLogger, + }, { + Name: "file", + Usage: "Add a file logger", + Flags: append(defaultLoggingFlags, []cli.Flag{ + cli.StringFlag{ + Name: "filename, f", + Usage: "Filename for the logger - this must be set.", + }, cli.BoolTFlag{ + Name: "rotate, r", + Usage: "Rotate logs", + }, cli.Int64Flag{ + Name: "max-size, s", + Usage: "Maximum size in bytes before rotation", + }, cli.BoolTFlag{ + Name: "daily, d", + Usage: "Rotate logs daily", + }, cli.IntFlag{ + Name: "max-days, D", + Usage: "Maximum number of daily logs to keep", + }, cli.BoolTFlag{ + Name: "compress, z", + Usage: "Compress rotated logs", + }, cli.IntFlag{ + Name: "compression-level, Z", + Usage: "Compression level to use", + }, + }...), + Action: runAddFileLogger, + }, { + Name: "conn", + Usage: "Add a net conn logger", + Flags: append(defaultLoggingFlags, []cli.Flag{ + cli.BoolFlag{ + Name: "reconnect-on-message, R", + Usage: "Reconnect to host for every message", + }, cli.BoolFlag{ + Name: "reconnect, r", + Usage: "Reconnect to host when connection is dropped", + }, cli.StringFlag{ + Name: "protocol, P", + Usage: "Set protocol to use: tcp, unix, or udp (defaults to tcp)", + }, cli.StringFlag{ + Name: "address, a", + Usage: "Host address and port to connect to (defaults to :7020)", + }, + }...), + Action: runAddConnLogger, + }, { + Name: "smtp", + Usage: "Add an SMTP logger", + Flags: append(defaultLoggingFlags, []cli.Flag{ + cli.StringFlag{ + Name: "username, u", + Usage: "Mail server username", + }, cli.StringFlag{ + Name: "password, P", + Usage: "Mail server password", + }, cli.StringFlag{ + Name: "host, H", + Usage: "Mail server host (defaults to: 127.0.0.1:25)", + }, cli.StringSliceFlag{ + Name: "send-to, s", + Usage: "Email address(es) to send to", + }, cli.StringFlag{ + Name: "subject, S", + Usage: "Subject header of sent emails", + }, + }...), + Action: runAddSMTPLogger, + }, + }, + }, + }, + } +) + +func runRemoveLogger(c *cli.Context) error { + setup("manager", c.Bool("debug")) + group := c.String("group") + if len(group) == 0 { + group = log.DEFAULT + } + name := c.Args().First() + ctx, cancel := installSignals() + defer cancel() + + statusCode, msg := private.RemoveLogger(ctx, group, name) + switch statusCode { + case http.StatusInternalServerError: + return fail("InternalServerError", msg) + } + + fmt.Fprintln(os.Stdout, msg) + return nil +} + +func runAddSMTPLogger(c *cli.Context) error { + setup("manager", c.Bool("debug")) + vals := map[string]interface{}{} + mode := "smtp" + if c.IsSet("host") { + vals["host"] = c.String("host") + } else { + vals["host"] = "127.0.0.1:25" + } + + if c.IsSet("username") { + vals["username"] = c.String("username") + } + if c.IsSet("password") { + vals["password"] = c.String("password") + } + + if !c.IsSet("send-to") { + return fmt.Errorf("Some recipients must be provided") + } + vals["sendTos"] = c.StringSlice("send-to") + + if c.IsSet("subject") { + vals["subject"] = c.String("subject") + } else { + vals["subject"] = "Diagnostic message from Gitea" + } + + return commonAddLogger(c, mode, vals) +} + +func runAddConnLogger(c *cli.Context) error { + setup("manager", c.Bool("debug")) + vals := map[string]interface{}{} + mode := "conn" + vals["net"] = "tcp" + if c.IsSet("protocol") { + switch c.String("protocol") { + case "udp": + vals["net"] = "udp" + case "unix": + vals["net"] = "unix" + } + } + if c.IsSet("address") { + vals["address"] = c.String("address") + } else { + vals["address"] = ":7020" + } + if c.IsSet("reconnect") { + vals["reconnect"] = c.Bool("reconnect") + } + if c.IsSet("reconnect-on-message") { + vals["reconnectOnMsg"] = c.Bool("reconnect-on-message") + } + return commonAddLogger(c, mode, vals) +} + +func runAddFileLogger(c *cli.Context) error { + setup("manager", c.Bool("debug")) + vals := map[string]interface{}{} + mode := "file" + if c.IsSet("filename") { + vals["filename"] = c.String("filename") + } else { + return fmt.Errorf("filename must be set when creating a file logger") + } + if c.IsSet("rotate") { + vals["rotate"] = c.Bool("rotate") + } + if c.IsSet("max-size") { + vals["maxsize"] = c.Int64("max-size") + } + if c.IsSet("daily") { + vals["daily"] = c.Bool("daily") + } + if c.IsSet("max-days") { + vals["maxdays"] = c.Int("max-days") + } + if c.IsSet("compress") { + vals["compress"] = c.Bool("compress") + } + if c.IsSet("compression-level") { + vals["compressionLevel"] = c.Int("compression-level") + } + return commonAddLogger(c, mode, vals) +} + +func runAddConsoleLogger(c *cli.Context) error { + setup("manager", c.Bool("debug")) + vals := map[string]interface{}{} + mode := "console" + if c.IsSet("stderr") && c.Bool("stderr") { + vals["stderr"] = c.Bool("stderr") + } + return commonAddLogger(c, mode, vals) +} + +func commonAddLogger(c *cli.Context, mode string, vals map[string]interface{}) error { + if len(c.String("level")) > 0 { + vals["level"] = log.FromString(c.String("level")).String() + } + if len(c.String("stacktrace-level")) > 0 { + vals["stacktraceLevel"] = log.FromString(c.String("stacktrace-level")).String() + } + if len(c.String("expression")) > 0 { + vals["expression"] = c.String("expression") + } + if len(c.String("prefix")) > 0 { + vals["prefix"] = c.String("prefix") + } + if len(c.String("flags")) > 0 { + vals["flags"] = log.FlagsFromString(c.String("flags")) + } + if c.IsSet("color") { + vals["colorize"] = c.Bool("color") + } + group := "default" + if c.IsSet("group") { + group = c.String("group") + } + name := mode + if c.IsSet("name") { + name = c.String("name") + } + ctx, cancel := installSignals() + defer cancel() + + statusCode, msg := private.AddLogger(ctx, group, name, mode, vals) + switch statusCode { + case http.StatusInternalServerError: + return fail("InternalServerError", msg) + } + + fmt.Fprintln(os.Stdout, msg) + return nil +} + +func runPauseLogging(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + + setup("manager", c.Bool("debug")) + statusCode, msg := private.PauseLogging(ctx) + switch statusCode { + case http.StatusInternalServerError: + return fail("InternalServerError", msg) + } + + fmt.Fprintln(os.Stdout, msg) + return nil +} + +func runResumeLogging(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + + setup("manager", c.Bool("debug")) + statusCode, msg := private.ResumeLogging(ctx) + switch statusCode { + case http.StatusInternalServerError: + return fail("InternalServerError", msg) + } + + fmt.Fprintln(os.Stdout, msg) + return nil +} + +func runReleaseReopenLogging(c *cli.Context) error { + ctx, cancel := installSignals() + defer cancel() + + setup("manager", c.Bool("debug")) + statusCode, msg := private.ReleaseReopenLogging(ctx) + switch statusCode { + case http.StatusInternalServerError: + return fail("InternalServerError", msg) + } + + fmt.Fprintln(os.Stdout, msg) + return nil +} diff --git a/cmd/serv.go b/cmd/serv.go index b4ef37f1dc..340f591dce 100644 --- a/cmd/serv.go +++ b/cmd/serv.go @@ -24,6 +24,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/pprof" "code.gitea.io/gitea/modules/private" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/services/lfs" @@ -235,17 +236,17 @@ func runServ(c *cli.Context) error { } return fail("Internal Server Error", "%s", err.Error()) } - os.Setenv(models.EnvRepoIsWiki, strconv.FormatBool(results.IsWiki)) - os.Setenv(models.EnvRepoName, results.RepoName) - os.Setenv(models.EnvRepoUsername, results.OwnerName) - os.Setenv(models.EnvPusherName, results.UserName) - os.Setenv(models.EnvPusherEmail, results.UserEmail) - os.Setenv(models.EnvPusherID, strconv.FormatInt(results.UserID, 10)) - os.Setenv(models.EnvRepoID, strconv.FormatInt(results.RepoID, 10)) - os.Setenv(models.EnvPRID, fmt.Sprintf("%d", 0)) - os.Setenv(models.EnvIsDeployKey, fmt.Sprintf("%t", results.IsDeployKey)) - os.Setenv(models.EnvKeyID, fmt.Sprintf("%d", results.KeyID)) - os.Setenv(models.EnvAppURL, setting.AppURL) + os.Setenv(repo_module.EnvRepoIsWiki, strconv.FormatBool(results.IsWiki)) + os.Setenv(repo_module.EnvRepoName, results.RepoName) + os.Setenv(repo_module.EnvRepoUsername, results.OwnerName) + os.Setenv(repo_module.EnvPusherName, results.UserName) + os.Setenv(repo_module.EnvPusherEmail, results.UserEmail) + os.Setenv(repo_module.EnvPusherID, strconv.FormatInt(results.UserID, 10)) + os.Setenv(repo_module.EnvRepoID, strconv.FormatInt(results.RepoID, 10)) + os.Setenv(repo_module.EnvPRID, fmt.Sprintf("%d", 0)) + os.Setenv(repo_module.EnvDeployKeyID, fmt.Sprintf("%d", results.DeployKeyID)) + os.Setenv(repo_module.EnvKeyID, fmt.Sprintf("%d", results.KeyID)) + os.Setenv(repo_module.EnvAppURL, setting.AppURL) // LFS token authentication if verb == lfsAuthenticateVerb { @@ -296,6 +297,15 @@ func runServ(c *cli.Context) error { gitcmd = exec.CommandContext(ctx, verb, repoPath) } + // Check if setting.RepoRootPath exists. It could be the case that it doesn't exist, this can happen when + // `[repository]` `ROOT` is a relative path and $GITEA_WORK_DIR isn't passed to the SSH connection. + if _, err := os.Stat(setting.RepoRootPath); err != nil { + if os.IsNotExist(err) { + return fail("Incorrect configuration.", + "Directory `[repository]` `ROOT` was not found, please check if $GITEA_WORK_DIR is passed to the SSH connection or make `[repository]` `ROOT` an absolute value.") + } + } + gitcmd.Dir = setting.RepoRootPath gitcmd.Stdout = os.Stdout gitcmd.Stdin = os.Stdin diff --git a/cmd/web.go b/cmd/web.go index 710c12775f..8c7c026172 100644 --- a/cmd/web.go +++ b/cmd/web.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/routers" "code.gitea.io/gitea/routers/install" @@ -59,6 +60,9 @@ and it takes care of all the other things for you`, } func runHTTPRedirector() { + _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "Web: HTTP Redirector", process.SystemProcessType, true) + defer finished() + source := fmt.Sprintf("%s:%s", setting.HTTPAddr, setting.PortToRedirect) dest := strings.TrimSuffix(setting.AppURL, "/") log.Info("Redirecting: %s to %s", source, dest) @@ -141,8 +145,10 @@ func runWeb(ctx *cli.Context) error { if setting.EnablePprof { go func() { + _, _, finished := process.GetManager().AddTypedContext(context.Background(), "Web: PProf Server", process.SystemProcessType, true) log.Info("Starting pprof server on localhost:6060") log.Info("%v", http.ListenAndServe("localhost:6060", nil)) + finished() }() } @@ -204,6 +210,8 @@ func listen(m http.Handler, handleRedirector bool) error { if setting.Protocol != setting.HTTPUnix && setting.Protocol != setting.FCGIUnix { listenAddr = net.JoinHostPort(listenAddr, setting.HTTPPort) } + _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "Web: Gitea Server", process.SystemProcessType, true) + defer finished() log.Info("Listen: %v://%s%s", setting.Protocol, listenAddr, setting.AppSubURL) // This can be useful for users, many users do wrong to their config and get strange behaviors behind a reverse-proxy. // A user may fix the configuration mistake when he sees this log. diff --git a/cmd/web_acme.go b/cmd/web_acme.go index 9a04274db5..7dbeb14a0e 100644 --- a/cmd/web_acme.go +++ b/cmd/web_acme.go @@ -15,6 +15,7 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" "github.com/caddyserver/certmagic" @@ -107,6 +108,9 @@ func runACME(listenAddr string, m http.Handler) error { if enableHTTPChallenge { go func() { + _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "Web: ACME HTTP challenge server", process.SystemProcessType, true) + defer finished() + log.Info("Running Let's Encrypt handler on %s", setting.HTTPAddr+":"+setting.PortToRedirect) // all traffic coming into HTTP will be redirect to HTTPS automatically (LE HTTP-01 validation happens here) err := runHTTP("tcp", setting.HTTPAddr+":"+setting.PortToRedirect, "Let's Encrypt HTTP Challenge", myACME.HTTPChallengeHandler(http.HandlerFunc(runLetsEncryptFallbackHandler))) @@ -128,5 +132,5 @@ func runLetsEncryptFallbackHandler(w http.ResponseWriter, r *http.Request) { // URI always contains a leading slash, which would result in a double // slash target := strings.TrimSuffix(setting.AppURL, "/") + r.URL.RequestURI() - http.Redirect(w, r, target, http.StatusFound) + http.Redirect(w, r, target, http.StatusTemporaryRedirect) } diff --git a/contrib/pr/checkout.go b/contrib/pr/checkout.go index ceeba4de3b..f6d29f3c5b 100644 --- a/contrib/pr/checkout.go +++ b/contrib/pr/checkout.go @@ -24,12 +24,12 @@ import ( "strconv" "time" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" gitea_git "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/markup" "code.gitea.io/gitea/modules/markup/external" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/routers" @@ -111,8 +111,8 @@ func runPR() { } unittest.LoadFixtures() util.RemoveAll(setting.RepoRootPath) - util.RemoveAll(models.LocalCopyPath()) - util.CopyDir(path.Join(curDir, "integrations/gitea-repositories-meta"), setting.RepoRootPath) + util.RemoveAll(repo_module.LocalCopyPath()) + unittest.CopyDir(path.Join(curDir, "integrations/gitea-repositories-meta"), setting.RepoRootPath) log.Printf("[PR] Setting up router\n") // routers.GlobalInit() diff --git a/contrib/upgrade.sh b/contrib/upgrade.sh index 171d9617c1..3a98c277d6 100755 --- a/contrib/upgrade.sh +++ b/contrib/upgrade.sh @@ -3,26 +3,33 @@ # from dl.gitea.io on linux as systemd service. It performs a backup and updates # Gitea in place. # NOTE: This adds the GPG Signing Key of the Gitea maintainers to the keyring. -# Depends on: bash, curl, xz, sha256sum, gpg. optionally jq. -# Usage: [environment vars] upgrade.sh [version] +# Depends on: bash, curl, xz, sha256sum. optionally jq, gpg # See section below for available environment vars. # When no version is specified, updates to the latest release. # Examples: # upgrade.sh 1.15.10 # giteahome=/opt/gitea giteaconf=$giteahome/app.ini upgrade.sh -while true; do - case "$1" in - -v | --version ) ver="$2"; shift 2 ;; - -y | --yes ) no_confirm="yes"; shift ;; - --ignore-gpg) ignore_gpg="yes"; shift ;; - -- ) shift; break ;; - * ) break ;; - esac -done - -set -euo pipefail +# apply variables from environment +: "${giteabin:="/usr/local/bin/gitea"}" +: "${giteahome:="/var/lib/gitea"}" +: "${giteaconf:="/etc/gitea/app.ini"}" +: "${giteauser:="git"}" +: "${sudocmd:="sudo"}" +: "${arch:="linux-amd64"}" +: "${service_start:="$sudocmd systemctl start gitea"}" +: "${service_stop:="$sudocmd systemctl stop gitea"}" +: "${service_status:="$sudocmd systemctl status gitea"}" +: "${backupopts:=""}" # see `gitea dump --help` for available options +function giteacmd { + if [[ $sudocmd = "su" ]]; then + # `-c` only accept one string as argument. + "$sudocmd" - "$giteauser" -c "$(printf "%q " "$giteabin" "--config" "$giteaconf" "--work-path" "$giteahome" "$@")" + else + "$sudocmd" --user "$giteauser" "$giteabin" --config "$giteaconf" --work-path "$giteahome" "$@" + fi +} function require { for exe in "$@"; do @@ -30,8 +37,19 @@ function require { done } +# parse command line arguments +while true; do + case "$1" in + -v | --version ) giteaversion="$2"; shift 2 ;; + -y | --yes ) no_confirm="yes"; shift ;; + --ignore-gpg) ignore_gpg="yes"; shift ;; + "" | -- ) shift; break ;; + * ) echo "Usage: [] upgrade.sh [-v ] [-y] [--ignore-gpg]"; exit 1;; + esac +done -require curl xz sha256sum gpg +# exit once any command fails. this means that each step should be idempotent! +set -euo pipefail if [[ -f /etc/os-release ]]; then os_release=$(cat /etc/os-release) @@ -46,38 +64,17 @@ if [[ -f /etc/os-release ]]; then fi fi - -# apply variables from environment -: "${giteabin:="/usr/local/bin/gitea"}" -: "${giteahome:="/var/lib/gitea"}" -: "${giteaconf:="/etc/gitea/app.ini"}" -: "${giteauser:="git"}" -: "${sudocmd:="sudo"}" -: "${arch:="linux-amd64"}" -: "${service_start:="$sudocmd systemctl start gitea"}" -: "${service_stop:="$sudocmd systemctl stop gitea"}" -: "${service_status:="$sudocmd systemctl status gitea"}" -: "${backupopts:=""}" # see `gitea dump --help` for available options - - -function giteacmd { - if [[ $sudocmd = "su" ]]; then - "$sudocmd" - "$giteauser" -c "$giteabin" --config "$giteaconf" --work-path "$giteahome" "$@" - else - "$sudocmd" --user "$giteauser" "$giteabin" --config "$giteaconf" --work-path "$giteahome" "$@" - fi -} +require curl xz sha256sum "$sudocmd" # select version to install -if [[ -z "${ver:-}" ]]; then +if [[ -z "${giteaversion:-}" ]]; then require jq giteaversion=$(curl --connect-timeout 10 -sL https://dl.gitea.io/gitea/version.json | jq -r .latest.version) -else - giteaversion="$ver" + echo "Latest available version is $giteaversion" fi - # confirm update +echo "Checking currently installed version..." current=$(giteacmd --version | cut -d ' ' -f 3) [[ "$current" == "$giteaversion" ]] && echo "$current is already installed, stopping." && exit 1 if [[ -z "${no_confirm:-}" ]]; then @@ -98,22 +95,24 @@ binurl="https://dl.gitea.io/gitea/${giteaversion}/${binname}.xz" echo "Downloading $binurl..." curl --connect-timeout 10 --silent --show-error --fail --location -O "$binurl{,.sha256,.asc}" -# validate checksum & gpg signature (exit script if error) +# validate checksum & gpg signature sha256sum -c "${binname}.xz.sha256" if [[ -z "${ignore_gpg:-}" ]]; then + require gpg gpg --keyserver keys.openpgp.org --recv 7C9E68152594688862D62AF62D9AE806EC1592E2 gpg --verify "${binname}.xz.asc" "${binname}.xz" || { echo 'Signature does not match'; exit 1; } fi rm "${binname}".xz.{sha256,asc} # unpack binary + make executable -xz --decompress "${binname}.xz" +xz --decompress --force "${binname}.xz" chown "$giteauser" "$binname" chmod +x "$binname" # stop gitea, create backup, replace binary, restart gitea -echo "Stopping gitea at $(date)" +echo "Flushing gitea queues at $(date)" giteacmd manager flush-queues +echo "Stopping gitea at $(date)" $service_stop echo "Creating backup in $giteahome" giteacmd dump $backupopts diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini index ad58e6bda3..c53175b2e0 100644 --- a/custom/conf/app.example.ini +++ b/custom/conf/app.example.ini @@ -61,7 +61,7 @@ RUN_MODE = ; prod ;; SSL Cipher Suites ;SSL_CIPHER_SUITES=; Will default to "ecdhe_ecdsa_with_aes_256_gcm_sha384,ecdhe_rsa_with_aes_256_gcm_sha384,ecdhe_ecdsa_with_aes_128_gcm_sha256,ecdhe_rsa_with_aes_128_gcm_sha256,ecdhe_ecdsa_with_chacha20_poly1305,ecdhe_rsa_with_chacha20_poly1305" if aes is supported by hardware, otherwise chacha will be first. ;; -;; Timeout for any write to the connection. (Set to 0 to disable all timeouts.) +;; Timeout for any write to the connection. (Set to -1 to disable all timeouts.) ;PER_WRITE_TIMEOUT = 30s ;; ;; Timeout per Kb written to connections. @@ -117,7 +117,7 @@ RUN_MODE = ; prod ;; ;; For the built-in SSH server, choose the key exchange algorithms to support for SSH connections, ;; for system SSH this setting has no effect -;SSH_SERVER_KEY_EXCHANGES = curve25519-sha256@libssh.org, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, diffie-hellman-group14-sha1 +;SSH_SERVER_KEY_EXCHANGES = curve25519-sha256, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, diffie-hellman-group14-sha256, diffie-hellman-group14-sha1 ;; ;; For the built-in SSH server, choose the MACs to support for SSH connections, ;; for system SSH this setting has no effect @@ -163,7 +163,7 @@ RUN_MODE = ; prod ;; Enable exposure of SSH clone URL to anonymous visitors, default is false ;SSH_EXPOSE_ANONYMOUS = false ;; -;; Timeout for any write to ssh connections. (Set to 0 to disable all timeouts.) +;; Timeout for any write to ssh connections. (Set to -1 to disable all timeouts.) ;; Will default to the PER_WRITE_TIMEOUT. ;SSH_PER_WRITE_TIMEOUT = 30s ;; @@ -237,7 +237,7 @@ RUN_MODE = ; prod ;; PPROF_DATA_PATH, use an absolute path when you start gitea as service ;PPROF_DATA_PATH = data/tmp/pprof ;; -;; Landing page, can be "home", "explore", "organizations" or "login" +;; Landing page, can be "home", "explore", "organizations", "login", or any URL such as "/org/repo" or even "https://anotherwebsite.com" ;; The "login" choice is not a security measure but just a UI flow change, use REQUIRE_SIGNIN_VIEW to force users to log in. ;LANDING_PAGE = home ;; @@ -398,6 +398,7 @@ INTERNAL_TOKEN= ;; By modifying the Gitea database, users can gain Gitea administrator privileges. ;; It also enables them to access other resources available to the user on the operating system that is running the Gitea instance and perform arbitrary actions in the name of the Gitea OS user. ;; WARNING: This maybe harmful to you website or your operating system. +;; WARNING: Setting this to true does not change existing hooks in git repos; adjust it before if necessary. ;DISABLE_GIT_HOOKS = true ;; ;; Set to true to disable webhooks feature. @@ -424,6 +425,23 @@ INTERNAL_TOKEN= ;; This cache will store the successfully hashed tokens in a LRU cache as a balance between performance and security. ;SUCCESSFUL_TOKENS_CACHE_SIZE = 20 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +[camo] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; At the moment we only support images +;; +;; if the camo is enabled +;ENABLED = false +;; url to a camo image proxy, it **is required** if camo is enabled. +;SERVER_URL = +;; HMAC to encode urls with, it **is required** if camo is enabled. +;HMAC_KEY = +;; Set to true to use camo for https too lese only non https urls are proxyed +;ALLWAYS = false + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; [oauth2] @@ -862,7 +880,7 @@ PATH = ;DISABLE_STARS = false ;; ;; The default branch name of new repositories -;DEFAULT_BRANCH = master +;DEFAULT_BRANCH = main ;; ;; Allow adoption of unadopted repositories ;ALLOW_ADOPTION_OF_UNADOPTED_REPOSITORIES = false @@ -1160,7 +1178,7 @@ PATH = ;; ;; Control how often the notification endpoint is polled to update the notification ;; The timeout will increase to MAX_TIMEOUT in TIMEOUT_STEPs if the notification count is unchanged -;; Set MIN_TIMEOUT to 0 to turn off +;; Set MIN_TIMEOUT to -1 to turn off ;MIN_TIMEOUT = 10s ;MAX_TIMEOUT = 60s ;TIMEOUT_STEP = 10s @@ -1245,7 +1263,7 @@ PATH = ;ISSUE_INDEXER_NAME = gitea_issues ;; ;; Timeout the indexer if it takes longer than this to start. -;; Set to zero to disable timeout. +;; Set to -1 to disable timeout. ;STARTUP_TIMEOUT = 30s ;; ;; Issue indexer queue, currently support: channel, levelqueue or redis, default is levelqueue (deprecated - use [queue.issue_indexer]) @@ -1533,6 +1551,7 @@ PATH = ;SENDMAIL_PATH = sendmail ;; ;; Specify any extra sendmail arguments +;; WARNING: if your sendmail program interprets options you should set this to "--" or terminate these args with "--" ;SENDMAIL_ARGS = ;; ;; Timeout for Sendmail @@ -1563,7 +1582,7 @@ PATH = ;HOST = ;; ;; Time to keep items in cache if not used, default is 16 hours. -;; Setting it to 0 disables caching +;; Setting it to -1 disables caching ;ITEM_TTL = 16h ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1577,7 +1596,7 @@ PATH = ;ENABLED = true ;; ;; Time to keep items in cache if not used, default is 8760 hours. -;; Setting it to 0 disables caching +;; Setting it to -1 disables caching ;ITEM_TTL = 8760h ;; ;; Only enable the cache when repository's commits count great than @@ -1751,8 +1770,8 @@ PATH = ;ENABLED = true ;; Whether to always run at least once at start up time (if ENABLED) ;RUN_AT_START = true -;; Notice if not success -;NO_SUCCESS_NOTICE = false +;; Whether to emit notice on successful execution too +;NOTICE_ON_SUCCESS = false ;; Time interval for job to run ;SCHEDULE = @midnight ;; Archives created more than OLDER_THAN ago are subject to deletion @@ -1771,7 +1790,7 @@ PATH = ;; Run Update mirrors task when Gitea starts. ;RUN_AT_START = false ;; Notice if not success -;NO_SUCCESS_NOTICE = true +;NOTICE_ON_SUCCESS = false ;; Limit the number of mirrors added to the queue to this number ;; (negative values mean no limit, 0 will result in no result in no mirrors being queued effectively disabling pull mirror updating.) ;PULL_LIMIT=50 @@ -1792,7 +1811,7 @@ PATH = ;; Run Repository health check task when Gitea starts. ;RUN_AT_START = false ;; Notice if not success -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;TIMEOUT = 60s ;; Arguments for command 'git fsck', e.g. "--unreachable --tags" ;; see more on http://git-scm.com/docs/git-fsck @@ -1810,7 +1829,7 @@ PATH = ;; Run check repository statistics task when Gitea starts. ;RUN_AT_START = true ;; Notice if not success -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @midnight ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1823,7 +1842,7 @@ PATH = ;; Update migrated repositories' issues and comments' posterid when starting server (default true) ;RUN_AT_START = true ;; Notice if not success -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;; Interval as a duration between each synchronization. (default every 24h) ;SCHEDULE = @midnight @@ -1838,7 +1857,7 @@ PATH = ;; Synchronize external user data when starting server (default false) ;RUN_AT_START = false ;; Notice if not success -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;; Interval as a duration between each synchronization (default every 24h) ;SCHEDULE = @midnight ;; Create new users, update existing user data and disable users that are not in external source anymore (default) @@ -1856,7 +1875,7 @@ PATH = ;; Clean-up deleted branches when starting server (default true) ;RUN_AT_START = true ;; Notice if not success -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;; Interval as a duration between each synchronization (default every 24h) ;SCHEDULE = @midnight ;; deleted branches than OLDER_THAN ago are subject to deletion @@ -1882,6 +1901,24 @@ PATH = ;; If CLEANUP_TYPE is set to PerWebhook, this is number of hook_task records to keep for a webhook (i.e. keep the most recent x deliveries). ;NUMBER_TO_KEEP = 10 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Cleanup expired packages +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.cleanup_packages] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Whether to enable the job +;ENABLED = true +;; Whether to always run at least once at start up time (if ENABLED) +;RUN_AT_START = true +;; Whether to emit notice on successful execution too +;NOTICE_ON_SUCCESS = false +;; Time interval for job to run +;SCHEDULE = @midnight +;; Unreferenced blobs created more than OLDER_THAN ago are subject to deletion +;OLDER_THAN = 24h + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1899,7 +1936,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @annually ;OLDER_THAN = 168h @@ -1912,7 +1949,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @annually; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1924,7 +1961,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @every 72h ;TIMEOUT = 60s ;; Arguments for command 'git gc' @@ -1940,7 +1977,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @every 72h ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1952,7 +1989,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @every 72h ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1964,7 +2001,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @every 72h ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1976,7 +2013,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @every 72h ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -1988,7 +2025,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @every 72h ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -2000,7 +2037,7 @@ PATH = ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;ENABLED = false ;RUN_AT_START = false -;NO_SUCCESS_NOTICE = false +;NOTICE_ON_SUCCESS = false ;SCHEDULE = @every 168h ;OLDER_THAN = 8760h @@ -2017,6 +2054,19 @@ PATH = ;SCHEDULE = @every 168h ;HTTP_ENDPOINT = https://dl.gitea.io/gitea/version.json +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Delete all old system notices from database +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[cron.delete_old_system_notices] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;ENABLED = false +;RUN_AT_START = false +;NO_SUCCESS_NOTICE = false +;SCHEDULE = @every 168h +;OLDER_THAN = 8760h + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Git Operation timeout in seconds @@ -2068,6 +2118,7 @@ PATH = ;[i18n] ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; The first locale will be used as the default if user browser's language doesn't match any locale in the list. ;LANGS = en-US,zh-CN,zh-HK,zh-TW,de-DE,fr-FR,nl-NL,lv-LV,ru-RU,uk-UA,ja-JP,es-ES,pt-BR,pt-PT,pl-PL,bg-BG,it-IT,fi-FI,tr-TR,cs-CZ,sr-SP,sv-SE,ko-KR,el-GR,fa-IR,hu-HU,id-ID,ml-IN ;NAMES = English,简体中文,繁體中文(香港),繁體中文(台灣),Deutsch,français,Nederlands,latviešu,русский,Українська,日本語,español,português do Brasil,Português de Portugal,polski,български,italiano,suomi,Türkçe,čeština,српски,svenska,한국어,ελληνικά,فارسی,magyar nyelv,bahasa Indonesia,മലയാളം @@ -2189,6 +2240,21 @@ PATH = ;; ;; Enable/Disable federation capabilities ; ENABLED = true +;; +;; Enable/Disable user statistics for nodeinfo if federation is enabled +; SHARE_USER_STATISTICS = true + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[packages] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; +;; Enable/Disable package registry capabilities +;ENABLED = true +;; +;; Path for chunked uploads. Defaults to APP_DATA_PATH + `tmp/package-upload` +;CHUNKED_UPLOAD_PATH = tmp/package-upload ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -2220,6 +2286,16 @@ PATH = ;; Where your lfs files reside, default is data/lfs. ;PATH = data/lfs +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; settings for packages, will override storage setting +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;[storage.packages] +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; storage type +;STORAGE_TYPE = local + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; customize storage diff --git a/docker/root/etc/s6/openssh/setup b/docker/root/etc/s6/openssh/setup index 89c03092be..f7843050c1 100755 --- a/docker/root/etc/s6/openssh/setup +++ b/docker/root/etc/s6/openssh/setup @@ -49,6 +49,7 @@ if [ -d /etc/ssh ]; then SSH_DSA_CERT="${SSH_DSA_CERT:+"HostCertificate "}${SSH_DSA_CERT}" \ SSH_MAX_STARTUPS="${SSH_MAX_STARTUPS:+"MaxStartups "}${SSH_MAX_STARTUPS}" \ SSH_MAX_SESSIONS="${SSH_MAX_SESSIONS:+"MaxSessions "}${SSH_MAX_SESSIONS}" \ + SSH_LOG_LEVEL=${SSH_LOG_LEVEL:-"INFO"} \ envsubst < /etc/templates/sshd_config > /etc/ssh/sshd_config chmod 0644 /etc/ssh/sshd_config diff --git a/docker/root/etc/templates/sshd_config b/docker/root/etc/templates/sshd_config index 8d336f3a8e..6f1a363045 100644 --- a/docker/root/etc/templates/sshd_config +++ b/docker/root/etc/templates/sshd_config @@ -8,7 +8,7 @@ ListenAddress :: ${SSH_MAX_STARTUPS} ${SSH_MAX_SESSIONS} -LogLevel INFO +LogLevel ${SSH_LOG_LEVEL} HostKey /data/ssh/ssh_host_ed25519_key ${SSH_ED25519_CERT} diff --git a/docs/config.yaml b/docs/config.yaml index 7dddf30969..41a1f7f022 100644 --- a/docs/config.yaml +++ b/docs/config.yaml @@ -18,10 +18,10 @@ params: description: Git with a cup of tea author: The Gitea Authors website: https://docs.gitea.io - version: 1.16.3 - minGoVersion: 1.16 - goVersion: 1.17 - minNodeVersion: 12.17 + version: 1.16.7 + minGoVersion: 1.17 + goVersion: 1.18 + minNodeVersion: 14 outputs: home: diff --git a/docs/content/doc/advanced/config-cheat-sheet.en-us.md b/docs/content/doc/advanced/config-cheat-sheet.en-us.md index 70bc2ee829..461795247d 100644 --- a/docs/content/doc/advanced/config-cheat-sheet.en-us.md +++ b/docs/content/doc/advanced/config-cheat-sheet.en-us.md @@ -75,7 +75,7 @@ Values containing `#` or `;` must be quoted using `` ` `` or `"""`. - `PREFIX_ARCHIVE_FILES`: **true**: Prefix archive files by placing them in a directory named after the repository. - `DISABLE_MIGRATIONS`: **false**: Disable migrating feature. - `DISABLE_STARS`: **false**: Disable stars feature. -- `DEFAULT_BRANCH`: **master**: Default branch name of all repositories. +- `DEFAULT_BRANCH`: **main**: Default branch name of all repositories. - `ALLOW_ADOPTION_OF_UNADOPTED_REPOSITORIES`: **false**: Allow non-admin users to adopt unadopted repositories - `ALLOW_DELETION_OF_UNADOPTED_REPOSITORIES`: **false**: Allow non-admin users to delete unadopted repositories @@ -206,7 +206,7 @@ The following configuration set `Content-Type: application/vnd.android.package-a ### UI - Notification (`ui.notification`) -- `MIN_TIMEOUT`: **10s**: These options control how often notification endpoint is polled to update the notification count. On page load the notification count will be checked after `MIN_TIMEOUT`. The timeout will increase to `MAX_TIMEOUT` by `TIMEOUT_STEP` if the notification count is unchanged. Set MIN_TIMEOUT to 0 to turn off. +- `MIN_TIMEOUT`: **10s**: These options control how often notification endpoint is polled to update the notification count. On page load the notification count will be checked after `MIN_TIMEOUT`. The timeout will increase to `MAX_TIMEOUT` by `TIMEOUT_STEP` if the notification count is unchanged. Set MIN_TIMEOUT to -1 to turn off. - `MAX_TIMEOUT`: **60s**. - `TIMEOUT_STEP`: **10s**. - `EVENT_SOURCE_UPDATE_TIME`: **10s**: This setting determines how often the database is queried to update notification counts. If the browser client supports `EventSource` and `SharedWorker`, a `SharedWorker` will be used in preference to polling notification endpoint. Set to **-1** to disable the `EventSource`. @@ -258,7 +258,7 @@ The following configuration set `Content-Type: application/vnd.android.package-a most cases you do not need to change the default value. Alter it only if your SSH server node is not the same as HTTP node. Do not set this variable if `PROTOCOL` is set to `http+unix`. -- `PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the connection. (Set to 0 to +- `PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the connection. (Set to -1 to disable all timeouts.) - `PER_WRITE_PER_KB_TIMEOUT`: **10s**: Timeout per Kb written to connections. @@ -280,14 +280,14 @@ The following configuration set `Content-Type: application/vnd.android.package-a - `SSH_AUTHORIZED_PRINCIPALS_BACKUP`: **false/true**: Enable SSH Authorized Principals Backup when rewriting all keys, default is true if `SSH_AUTHORIZED_PRINCIPALS_ALLOW` is not `off`. - `SSH_AUTHORIZED_KEYS_COMMAND_TEMPLATE`: **{{.AppPath}} --config={{.CustomConf}} serv key-{{.Key.ID}}**: Set the template for the command to passed on authorized keys. Possible keys are: AppPath, AppWorkPath, CustomConf, CustomPath, Key - where Key is a `models/asymkey.PublicKey` and the others are strings which are shellquoted. - `SSH_SERVER_CIPHERS`: **chacha20-poly1305@openssh.com, aes128-ctr, aes192-ctr, aes256-ctr, aes128-gcm@openssh.com, aes256-gcm@openssh.com**: For the built-in SSH server, choose the ciphers to support for SSH connections, for system SSH this setting has no effect. -- `SSH_SERVER_KEY_EXCHANGES`: **curve25519-sha256@libssh.org, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, diffie-hellman-group14-sha1**: For the built-in SSH server, choose the key exchange algorithms to support for SSH connections, for system SSH this setting has no effect. +- `SSH_SERVER_KEY_EXCHANGES`: **curve25519-sha256, ecdh-sha2-nistp256, ecdh-sha2-nistp384, ecdh-sha2-nistp521, diffie-hellman-group14-sha256, diffie-hellman-group14-sha1**: For the built-in SSH server, choose the key exchange algorithms to support for SSH connections, for system SSH this setting has no effect. - `SSH_SERVER_MACS`: **hmac-sha2-256-etm@openssh.com, hmac-sha2-256, hmac-sha1**: For the built-in SSH server, choose the MACs to support for SSH connections, for system SSH this setting has no effect - `SSH_SERVER_HOST_KEYS`: **ssh/gitea.rsa, ssh/gogs.rsa**: For the built-in SSH server, choose the keypairs to offer as the host key. The private key should be at `SSH_SERVER_HOST_KEY` and the public `SSH_SERVER_HOST_KEY.pub`. Relative paths are made absolute relative to the `APP_DATA_PATH`. If no key exists a 4096 bit RSA key will be created for you. - `SSH_KEY_TEST_PATH`: **/tmp**: Directory to create temporary files in when testing public keys using ssh-keygen, default is the system temporary directory. - `SSH_KEYGEN_PATH`: **ssh-keygen**: Path to ssh-keygen, default is 'ssh-keygen' which means the shell is responsible for finding out which one to call. - `SSH_EXPOSE_ANONYMOUS`: **false**: Enable exposure of SSH clone URL to anonymous visitors, default is false. - `SSH_PER_WRITE_TIMEOUT`: **30s**: Timeout for any write to the SSH connections. (Set to - 0 to disable all timeouts.) + -1 to disable all timeouts.) - `SSH_PER_WRITE_PER_KB_TIMEOUT`: **10s**: Timeout per Kb written to SSH connections. - `MINIMUM_KEY_SIZE_CHECK`: **true**: Indicate whether to check minimum key size with corresponding type. @@ -300,8 +300,7 @@ The following configuration set `Content-Type: application/vnd.android.package-a - `ENABLE_GZIP`: **false**: Enable gzip compression for runtime-generated content, static resources excluded. - `ENABLE_PPROF`: **false**: Application profiling (memory and cpu). For "web" command it listens on localhost:6060. For "serv" command it dumps to disk at `PPROF_DATA_PATH` as `(cpuprofile|memprofile)__` - `PPROF_DATA_PATH`: **data/tmp/pprof**: `PPROF_DATA_PATH`, use an absolute path when you start Gitea as service -- `LANDING_PAGE`: **home**: Landing page for unauthenticated users \[home, explore, organizations, login\]. - +- `LANDING_PAGE`: **home**: Landing page for unauthenticated users \[home, explore, organizations, login, **custom**\]. Where custom would instead be any URL such as "/org/repo" or even `https://anotherwebsite.com` - `LFS_START_SERVER`: **false**: Enables Git LFS support. - `LFS_CONTENT_PATH`: **%(APP_DATA_PATH)/lfs**: Default LFS content path. (if it is on local storage.) **DEPRECATED** use settings in `[lfs]`. - `LFS_JWT_SECRET`: **\**: LFS authentication secret, change this a unique string. @@ -416,7 +415,7 @@ relation to port exhaustion. - `REPO_INDEXER_EXCLUDE_VENDORED`: **true**: Exclude vendored files from index. - `UPDATE_BUFFER_LEN`: **20**: Buffer length of index request. **DEPRECATED** use settings in `[queue.issue_indexer]`. - `MAX_FILE_SIZE`: **1048576**: Maximum size in bytes of files to be indexed. -- `STARTUP_TIMEOUT`: **30s**: If the indexer takes longer than this timeout to start - fail. (This timeout will be added to the hammer time above for child processes - as bleve will not start until the previous parent is shutdown.) Set to zero to never timeout. +- `STARTUP_TIMEOUT`: **30s**: If the indexer takes longer than this timeout to start - fail. (This timeout will be added to the hammer time above for child processes - as bleve will not start until the previous parent is shutdown.) Set to -1 to never timeout. ## Queue (`queue` and `queue.*`) @@ -498,6 +497,7 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o It also enables them to access other resources available to the user on the operating system that is running the Gitea instance and perform arbitrary actions in the name of the Gitea OS user. This maybe harmful to you website or your operating system. + Setting this to true does not change existing hooks in git repos; adjust it before if necessary. - `DISABLE_WEBHOOKS`: **false**: Set to `true` to disable webhooks feature. - `ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET`: **true**: Set to `false` to allow local users to push to gitea-repositories without setting up the Gitea environment. This is not recommended and if you want local users to push to Gitea repositories you should set the environment appropriately. - `IMPORT_LOCAL_PATHS`: **false**: Set to `false` to prevent all users (including admin) from importing local path on server. @@ -513,7 +513,14 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o - spec - use one or more special characters as ``!"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~`` - off - do not check password complexity - `PASSWORD_CHECK_PWN`: **false**: Check [HaveIBeenPwned](https://haveibeenpwned.com/Passwords) to see if a password has been exposed. -- `SUCCESSFUL_TOKENS_CACHE_SIZE`: **20**: Cache successful token hashes. API tokens are stored in the DB as pbkdf2 hashes however, this means that there is a potentially significant hashing load when there are multiple API operations. This cache will store the successfully hashed tokens in a LRU cache as a balance between performance and security. +- `SUCCESSFUL_TOKENS_CACHE_SIZE`: **20**: Cache successful token hashes. API tokens are stored in the DB as pbkdf2 hashes however, this means that there is a potentially significant hashing load when there are multiple API operations. This cache will store the successfully hashed tokens in a LRU cache as a balance between performance and security. + +## Camo (`camo`) + +- `ENABLED`: **false**: Enable media proxy, we support images only at the moment. +- `SERVER_URL`: ****: url of camo server, it **is required** if camo is enabled. +- `HMAC_KEY`: ****: Provide the HMAC key for encoding urls, it **is required** if camo is enabled. +- `ALLWAYS`: **false**: Set to true to use camo for https too lese only non https urls are proxyed ## OpenID (`openid`) @@ -666,7 +673,7 @@ Define allowed algorithms and their minimum key length (use -1 to disable a type - Enabling dummy will ignore all settings except `ENABLED`, `SUBJECT_PREFIX` and `FROM`. - `SENDMAIL_PATH`: **sendmail**: The location of sendmail on the operating system (can be command or full path). -- `SENDMAIL_ARGS`: **_empty_**: Specify any extra sendmail arguments. +- `SENDMAIL_ARGS`: **_empty_**: Specify any extra sendmail arguments. (NOTE: you should be aware that email addresses can look like options - if your `sendmail` command takes options you must set the option terminator `--`) - `SENDMAIL_TIMEOUT`: **5m**: default timeout for sending email through sendmail - `SENDMAIL_CONVERT_CRLF`: **true**: Most versions of sendmail prefer LF line endings rather than CRLF line endings. Set this to false if your version of sendmail requires CRLF line endings. - `SEND_BUFFER_LEN`: **100**: Buffer length of mailing queue. **DEPRECATED** use `LENGTH` in `[queue.mailer]` @@ -680,12 +687,12 @@ Define allowed algorithms and their minimum key length (use -1 to disable a type - Redis: `redis://:macaron@127.0.0.1:6379/0?pool_size=100&idle_timeout=180s` - Memcache: `127.0.0.1:9090;127.0.0.1:9091` - TwoQueue LRU cache: `{"size":50000,"recent_ratio":0.25,"ghost_ratio":0.5}` or `50000` representing the maximum number of objects stored in the cache. -- `ITEM_TTL`: **16h**: Time to keep items in cache if not used, Setting it to 0 disables caching. +- `ITEM_TTL`: **16h**: Time to keep items in cache if not used, Setting it to -1 disables caching. ## Cache - LastCommitCache settings (`cache.last_commit`) - `ENABLED`: **true**: Enable the cache. -- `ITEM_TTL`: **8760h**: Time to keep items in cache if not used, Setting it to 0 disables caching. +- `ITEM_TTL`: **8760h**: Time to keep items in cache if not used, Setting it to -1 disables caching. - `COMMITS_COUNT`: **1000**: Only enable the cache when repository's commits count great than. ## Session (`session`) @@ -816,7 +823,7 @@ Default templates for project boards: - `ENABLED`: **false**: Enable to run all cron tasks periodically with default settings. - `RUN_AT_START`: **false**: Run cron tasks at application start-up. -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `SCHEDULE` accept formats - Full crontab specs, e.g. `* * * * * ?` @@ -835,7 +842,6 @@ Default templates for project boards: #### Cron - Update Mirrors (`cron.update_mirrors`) - `SCHEDULE`: **@every 10m**: Cron syntax for scheduling update mirrors, e.g. `@every 3h`. -- `NO_SUCCESS_NOTICE`: **true**: The cron task for update mirrors success report is not very useful - as it just means that the mirrors have been queued. Therefore this is turned off by default. - `PULL_LIMIT`: **50**: Limit the number of mirrors added to the queue to this number (negative values mean no limit, 0 will result in no mirrors being queued effectively disabling pull mirror updating). - `PUSH_LIMIT`: **50**: Limit the number of mirrors added to the queue to this number (negative values mean no limit, 0 will result in no mirrors being queued effectively disabling push mirror updating). @@ -850,7 +856,7 @@ Default templates for project boards: - `RUN_AT_START`: **true**: Run repository statistics check at start time. - `SCHEDULE`: **@midnight**: Cron syntax for scheduling repository statistics check. -### Cron - Cleanup hook_task Table (`cron.cleanup_hook_task_table`) +#### Cron - Cleanup hook_task Table (`cron.cleanup_hook_task_table`) - `ENABLED`: **true**: Enable cleanup hook_task job. - `RUN_AT_START`: **false**: Run cleanup hook_task at start time (if ENABLED). @@ -859,6 +865,14 @@ Default templates for project boards: - `OLDER_THAN`: **168h**: If CLEANUP_TYPE is set to OlderThan, then any delivered hook_task records older than this expression will be deleted. - `NUMBER_TO_KEEP`: **10**: If CLEANUP_TYPE is set to PerWebhook, this is number of hook_task records to keep for a webhook (i.e. keep the most recent x deliveries). +#### Cron - Cleanup expired packages (`cron.cleanup_packages`) + +- `ENABLED`: **true**: Enable cleanup expired packages job. +- `RUN_AT_START`: **true**: Run job at start time (if ENABLED). +- `NOTICE_ON_SUCCESS`: **false**: Notify every time this job runs. +- `SCHEDULE`: **@midnight**: Cron syntax for the job. +- `OLDER_THAN`: **24h**: Unreferenced package data created more than OLDER_THAN ago is subject to deletion. + #### Cron - Update Migration Poster ID (`cron.update_migration_poster_id`) - `SCHEDULE`: **@midnight** : Interval as a duration between each synchronization, it will always attempt synchronization when the instance starts. @@ -875,43 +889,43 @@ Default templates for project boards: - `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). - `SCHEDULE`: **@every 72h**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`. - `TIMEOUT`: **60s**: Time duration syntax for garbage collection execution timeout. -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `ARGS`: **\**: Arguments for command `git gc`, e.g. `--aggressive --auto`. The default value is same with [git] -> GC_ARGS #### Cron - Update the '.ssh/authorized_keys' file with Gitea SSH keys ('cron.resync_all_sshkeys') - `ENABLED`: **false**: Enable service. - `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `SCHEDULE`: **@every 72h**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`. #### Cron - Resynchronize pre-receive, update and post-receive hooks of all repositories ('cron.resync_all_hooks') - `ENABLED`: **false**: Enable service. - `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `SCHEDULE`: **@every 72h**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`. #### Cron - Reinitialize all missing Git repositories for which records exist ('cron.reinit_missing_repos') - `ENABLED`: **false**: Enable service. - `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `SCHEDULE`: **@every 72h**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`. #### Cron - Delete all repositories missing their Git files ('cron.delete_missing_repos') - `ENABLED`: **false**: Enable service. - `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `SCHEDULE`: **@every 72h**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`. #### Cron - Delete generated repository avatars ('cron.delete_generated_repository_avatars') - `ENABLED`: **false**: Enable service. - `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `SCHEDULE`: **@every 72h**: Cron syntax for scheduling repository archive cleanup, e.g. `@every 1h`. #### Cron - Delete all old actions from database ('cron.delete_old_actions') - `ENABLED`: **false**: Enable service. - `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). -- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `NOTICE_ON_SUCCESS`: **false**: Set to true to switch on success notices. - `SCHEDULE`: **@every 168h**: Cron syntax to set how often to check. - `OLDER_THAN`: **@every 8760h**: any action older than this expression will be deleted from database, suggest using `8760h` (1 year) because that's the max length of heatmap. @@ -922,6 +936,13 @@ Default templates for project boards: - `SCHEDULE`: **@every 168h**: Cron syntax for scheduling a work, e.g. `@every 168h`. - `HTTP_ENDPOINT`: **https://dl.gitea.io/gitea/version.json**: the endpoint that Gitea will check for newer versions +#### Cron - Delete all old system notices from database ('cron.delete_old_system_notices') +- `ENABLED`: **false**: Enable service. +- `RUN_AT_START`: **false**: Run tasks at start up time (if ENABLED). +- `NO_SUCCESS_NOTICE`: **false**: Set to true to switch off success notices. +- `SCHEDULE`: **@every 168h**: Cron syntax to set how often to check. +- `OLDER_THAN`: **@every 8760h**: any system notice older than this expression will be deleted from database. + ## Git (`git`) - `PATH`: **""**: The path of Git executable. If empty, Gitea searches through the PATH environment. @@ -976,7 +997,8 @@ Default templates for project boards: ## i18n (`i18n`) -- `LANGS`: **en-US,zh-CN,zh-HK,zh-TW,de-DE,fr-FR,nl-NL,lv-LV,ru-RU,ja-JP,es-ES,pt-BR,pt-PT,pl-PL,bg-BG,it-IT,fi-FI,tr-TR,cs-CZ,sr-SP,sv-SE,ko-KR,el-GR,fa-IR,hu-HU,id-ID,ml-IN**: List of locales shown in language selector +- `LANGS`: **en-US,zh-CN,zh-HK,zh-TW,de-DE,fr-FR,nl-NL,lv-LV,ru-RU,ja-JP,es-ES,pt-BR,pt-PT,pl-PL,bg-BG,it-IT,fi-FI,tr-TR,cs-CZ,sr-SP,sv-SE,ko-KR,el-GR,fa-IR,hu-HU,id-ID,ml-IN**: + List of locales shown in language selector. The first locale will be used as the default if user browser's language doesn't match any locale in the list. - `NAMES`: **English,简体中文,繁體中文(香港),繁體中文(台灣),Deutsch,français,Nederlands,latviešu,русский,日本語,español,português do Brasil,Português de Portugal,polski,български,italiano,suomi,Türkçe,čeština,српски,svenska,한국어,ελληνικά,فارسی,magyar nyelv,bahasa Indonesia,മലയാളം**: Visible names corresponding to the locales ## U2F (`U2F`) **DEPRECATED** @@ -1063,6 +1085,12 @@ Task queue configuration has been moved to `queue.task`. However, the below conf ## Federation (`federation`) - `ENABLED`: **true**: Enable/Disable federation capabilities +- `SHARE_USER_STATISTICS`: **true**: Enable/Disable user statistics for nodeinfo if federation is enabled + +## Packages (`packages`) + +- `ENABLED`: **true**: Enable/Disable package registry capabilities +- `CHUNKED_UPLOAD_PATH`: **tmp/package-upload**: Path for chunked uploads. Defaults to `APP_DATA_PATH` + `tmp/package-upload` ## Mirror (`mirror`) diff --git a/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md b/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md index 600e54a85e..cc6e950fbd 100644 --- a/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md +++ b/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md @@ -175,12 +175,12 @@ menu: - `HOST`: **\**: 针对redis和memcache有效,主机地址和端口。 - Redis: `network=tcp,addr=127.0.0.1:6379,password=macaron,db=0,pool_size=100,idle_timeout=180` - Memache: `127.0.0.1:9090;127.0.0.1:9091` -- `ITEM_TTL`: **16h**: 缓存项目失效时间,设置为 0 则禁用缓存。 +- `ITEM_TTL`: **16h**: 缓存项目失效时间,设置为 -1 则禁用缓存。 ## Cache - LastCommitCache settings (`cache.last_commit`) - `ENABLED`: **true**: 是否启用。 -- `ITEM_TTL`: **8760h**: 缓存项目失效时间,设置为 0 则禁用缓存。 +- `ITEM_TTL`: **8760h**: 缓存项目失效时间,设置为 -1 则禁用缓存。 - `COMMITS_COUNT`: **1000**: 仅当仓库的提交数大于时才启用缓存。 ## Session (`session`) diff --git a/docs/content/doc/advanced/customizing-gitea.en-us.md b/docs/content/doc/advanced/customizing-gitea.en-us.md index 39a08308b8..ef798ddbf9 100644 --- a/docs/content/doc/advanced/customizing-gitea.en-us.md +++ b/docs/content/doc/advanced/customizing-gitea.en-us.md @@ -132,15 +132,18 @@ copy javascript files from https://gitea.com/davidsvantesson/plantuml-code-highl `$GITEA_CUSTOM/public` folder. Then add the following to `custom/footer.tmpl`: ```html -{{if .RequireHighlightJS}} - - - -{{end}} ``` You can then add blocks like the following to your markdown: @@ -299,6 +302,8 @@ LANGS = en-US,foo-BAR NAMES = English,FooBar ``` +The first locale will be used as the default if user browser's language doesn't match any locale in the list. + Locales may change between versions, so keeping track of your customized locales is highly encouraged. ### Readmes diff --git a/docs/content/doc/advanced/logging-documentation.en-us.md b/docs/content/doc/advanced/logging-documentation.en-us.md index dee1dbb6d6..bdde5bd8c4 100644 --- a/docs/content/doc/advanced/logging-documentation.en-us.md +++ b/docs/content/doc/advanced/logging-documentation.en-us.md @@ -287,6 +287,7 @@ MODE = console LEVEL = debug ; please set the level to debug when we are debugging a problem ROUTER = console COLORIZE = false ; this can be true if you can strip out the ansi coloring +ENABLE_SSH_LOG = true ; shows logs related to git over SSH. ``` Sometimes it will be helpful get some specific `TRACE` level logging restricted @@ -445,7 +446,7 @@ Gitea includes built-in log rotation, which should be enough for most deployment - Disable built-in log rotation by setting `LOG_ROTATE` to `false` in your `app.ini`. - Install `logrotate`. - Configure `logrotate` to match your deployment requirements, see `man 8 logrotate` for configuration syntax details. In the `postrotate/endscript` block send Gitea a `USR1` signal via `kill -USR1` or `kill -10` to the `gitea` process itself, or run `gitea manager logging release-and-reopen` (with the appropriate environment). Ensure that your configurations apply to all files emitted by Gitea loggers as described in the above sections. -- Always do `logrotate /etc/logrotate.conf --debug` to test your configurations. +- Always do `logrotate /etc/logrotate.conf --debug` to test your configurations. - If you are using docker and are running from outside of the container you can use `docker exec -u $OS_USER $CONTAINER_NAME sh -c 'gitea manager logging release-and-reopen'` or `docker exec $CONTAINER_NAME sh -c '/bin/s6-svc -1 /etc/s6/gitea/'` or send `USR1` directly to the Gitea process itself. The next `logrotate` jobs will include your configurations, so no restart is needed. You can also immediately reload `logrotate` with `logrotate /etc/logrotate.conf --force`. diff --git a/docs/content/doc/developers.en-us.md b/docs/content/doc/developers.en-us.md index c24a23dfae..917049e5df 100644 --- a/docs/content/doc/developers.en-us.md +++ b/docs/content/doc/developers.en-us.md @@ -8,6 +8,6 @@ draft: false menu: sidebar: name: "Developers" - weight: 50 + weight: 55 identifier: "developers" --- diff --git a/docs/content/doc/developers.zh-tw.md b/docs/content/doc/developers.zh-tw.md index e2fbd4a34f..c9ce6634ad 100644 --- a/docs/content/doc/developers.zh-tw.md +++ b/docs/content/doc/developers.zh-tw.md @@ -8,6 +8,6 @@ draft: false menu: sidebar: name: "開發人員" - weight: 50 + weight: 55 identifier: "developers" --- diff --git a/docs/content/doc/developers/guidelines-backend.md b/docs/content/doc/developers/guidelines-backend.md index d249465453..1248d41432 100644 --- a/docs/content/doc/developers/guidelines-backend.md +++ b/docs/content/doc/developers/guidelines-backend.md @@ -42,7 +42,7 @@ To maintain understandable code and avoid circular dependencies it is important - `modules/setting`: Store all system configurations read from ini files and has been referenced by everywhere. But they should be used as function parameters when possible. - `modules/git`: Package to interactive with `Git` command line or Gogit package. - `public`: Compiled frontend files (javascript, images, css, etc.) -- `routers`: Handling of server requests. As it uses other Gitea packages to serve the request, other packages (models, modules or services) shall not depend on routers. +- `routers`: Handling of server requests. As it uses other Gitea packages to serve the request, other packages (models, modules or services) must not depend on routers. - `routers/api` Contains routers for `/api/v1` aims to handle RESTful API requests. - `routers/install` Could only respond when system is in INSTALL mode (INSTALL_LOCK=false). - `routers/private` will only be invoked by internal sub commands, especially `serv` and `hooks`. @@ -106,10 +106,20 @@ i.e. `servcies/user`, `models/repository`. Since there are some packages which use the same package name, it is possible that you find packages like `modules/user`, `models/user`, and `services/user`. When these packages are imported in one Go file, it's difficult to know which package we are using and if it's a variable name or an import name. So, we always recommend to use import aliases. To differ from package variables which are commonly in camelCase, just use **snake_case** for import aliases. i.e. `import user_service "code.gitea.io/gitea/services/user"` +### Important Gotchas + +- Never write `x.Update(exemplar)` without an explicit `WHERE` clause: + - This will cause all rows in the table to be updated with the non-zero values of the exemplar - including IDs. + - You should usually write `x.ID(id).Update(exemplar)`. +- If during a migration you are inserting into a table using `x.Insert(exemplar)` where the ID is preset: + - You will need to ``SET IDENTITY_INSERT `table` ON`` for the MSSQL variant (the migration will fail otherwise) + - However, you will also need to update the id sequence for postgres - the migration will silently pass here but later insertions will fail: + ``SELECT setval('table_name_id_seq', COALESCE((SELECT MAX(id)+1 FROM `table_name`), 1), false)`` + ### Future Tasks Currently, we are creating some refactors to do the following things: - Correct that codes which doesn't follow the rules. - There are too many files in `models`, so we are moving some of them into a sub package `models/xxx`. -- Some `modules` sub packages should be moved to `services` because they depends on `models`. \ No newline at end of file +- Some `modules` sub packages should be moved to `services` because they depend on `models`. diff --git a/docs/content/doc/developers/guidelines-frontend.md b/docs/content/doc/developers/guidelines-frontend.md index 9fec5bd17e..874896c5dc 100644 --- a/docs/content/doc/developers/guidelines-frontend.md +++ b/docs/content/doc/developers/guidelines-frontend.md @@ -23,7 +23,13 @@ menu: Gitea uses [Less CSS](https://lesscss.org), [Fomantic-UI](https://fomantic-ui.com/introduction/getting-started.html) (based on [jQuery](https://api.jquery.com)) and [Vue2](https://vuejs.org/v2/guide/) for its frontend. -The HTML pages are rendered by [Go HTML Template](https://pkg.go.dev/html/template) +The HTML pages are rendered by [Go HTML Template](https://pkg.go.dev/html/template). + +The source files can be found in the following directories: +* **Less styles:** `web_src/less/` +* **Javascript files:** `web_src/js/` +* **Vue layouts:** `web_src/js/components/` +* **HTML templates:** `templates/` ## General Guidelines diff --git a/docs/content/doc/features/comparison.en-us.md b/docs/content/doc/features/comparison.en-us.md index 745c5d37bc..36180e3f5b 100644 --- a/docs/content/doc/features/comparison.en-us.md +++ b/docs/content/doc/features/comparison.en-us.md @@ -34,25 +34,25 @@ _Symbols used in table:_ ## General Features | Feature | Gitea | Gogs | GitHub EE | GitLab CE | GitLab EE | BitBucket | RhodeCode CE | -| ----------------------------------- | -------------------------------------------------- | ---- | --------- | --------- | --------- | -------------- | ------------ | -| Open source and free | ✓ | ✓ | ✘ | ✓ | ✘ | ✘ | ✓ | -| Low resource usage (RAM/CPU) | ✓ | ✓ | ✘ | ✘ | ✘ | ✘ | ✘ | -| Multiple database support | ✓ | ✓ | ✘ | ⁄ | ⁄ | ✓ | ✓ | -| Multiple OS support | ✓ | ✓ | ✘ | ✘ | ✘ | ✘ | ✓ | -| Easy upgrade process | ✓ | ✓ | ✘ | ✓ | ✓ | ✘ | ✓ | -| Markdown support | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | -| Orgmode support | ✓ | ✘ | ✓ | ✘ | ✘ | ✘ | ? | -| CSV support | ✓ | ✘ | ✓ | ✘ | ✘ | ✓ | ? | -| Third-party render tool support | ✓ | ✘ | ✘ | ✘ | ✘ | ✓ | ? | -| Static Git-powered pages | [✘](https://github.com/go-gitea/gitea/issues/302) | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ | -| Integrated Git-powered wiki | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ (cloud only) | ✘ | -| Deploy Tokens | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | -| Repository Tokens with write rights | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ | -| Built-in Container Registry | [✘](https://github.com/go-gitea/gitea/issues/2316) | ✘ | ✘ | ✓ | ✓ | ✘ | ✘ | -| External git mirroring | ✓ | ✓ | ✘ | ✘ | ✓ | ✓ | ✓ | -| WebAuthn (2FA) | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ? | -| Built-in CI/CD | ✘ | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ | -| Subgroups: groups within groups | ✘ | ✘ | ✘ | ✓ | ✓ | ✘ | ✓ | +| ----------------------------------- | ---------------------------------------------------| ---- | --------- | --------- | --------- | -------------- | ------------ | +| Open source and free | ✓ | ✓ | ✘ | ✓ | ✘ | ✘ | ✓ | +| Low resource usage (RAM/CPU) | ✓ | ✓ | ✘ | ✘ | ✘ | ✘ | ✘ | +| Multiple database support | ✓ | ✓ | ✘ | ⁄ | ⁄ | ✓ | ✓ | +| Multiple OS support | ✓ | ✓ | ✘ | ✘ | ✘ | ✘ | ✓ | +| Easy upgrade process | ✓ | ✓ | ✘ | ✓ | ✓ | ✘ | ✓ | +| Markdown support | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | +| Orgmode support | ✓ | ✘ | ✓ | ✘ | ✘ | ✘ | ? | +| CSV support | ✓ | ✘ | ✓ | ✘ | ✘ | ✓ | ? | +| Third-party render tool support | ✓ | ✘ | ✘ | ✘ | ✘ | ✓ | ? | +| Static Git-powered pages | [✘](https://github.com/go-gitea/gitea/issues/302) | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ | +| Integrated Git-powered wiki | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ (cloud only) | ✘ | +| Deploy Tokens | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | +| Repository Tokens with write rights | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ✓ | +| Built-in Package/Container Registry | ✓ | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ | +| External git mirroring | ✓ | ✓ | ✘ | ✘ | ✓ | ✓ | ✓ | +| WebAuthn (2FA) | ✓ | ✘ | ✓ | ✓ | ✓ | ✓ | ? | +| Built-in CI/CD | ✘ | ✘ | ✓ | ✓ | ✓ | ✘ | ✘ | +| Subgroups: groups within groups | ✘ | ✘ | ✘ | ✓ | ✓ | ✘ | ✓ | ## Code management diff --git a/docs/content/doc/installation/from-binary.en-us.md b/docs/content/doc/installation/from-binary.en-us.md index 59a92758e0..d3486d8150 100644 --- a/docs/content/doc/installation/from-binary.en-us.md +++ b/docs/content/doc/installation/from-binary.en-us.md @@ -50,7 +50,8 @@ Of note, configuring `GITEA_WORK_DIR` will tell Gitea where to base its working ### Prepare environment -Check that Git is installed on the server. If it is not, install it first. +Check that Git is installed on the server. If it is not, install it first. Gitea requires Git version >= 2.0. + ```sh git --version ``` diff --git a/docs/content/doc/installation/on-kubernetes.en-us.md b/docs/content/doc/installation/on-kubernetes.en-us.md index 9fe869254c..abfbdf1679 100644 --- a/docs/content/doc/installation/on-kubernetes.en-us.md +++ b/docs/content/doc/installation/on-kubernetes.en-us.md @@ -25,3 +25,47 @@ helm install gitea gitea-charts/gitea ``` If you would like to customize your install, which includes kubernetes ingress, please refer to the complete [Gitea helm chart configuration details](https://gitea.com/gitea/helm-chart/) + +## Health check endpoint + +Gitea comes with a health check endpoint `/api/healthz`, you can configure it in kubernetes like this: + +```yaml + livenessProbe: + httpGet: + path: /api/healthz + port: http + initialDelaySeconds: 200 + timeoutSeconds: 5 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 10 +``` + +a successful health check response will respond with http code `200`, here's example: + +``` +HTTP/1.1 200 OK + + +{ + "status": "pass", + "description": "Gitea: Git with a cup of tea", + "checks": { + "cache:ping": [ + { + "status": "pass", + "time": "2022-02-19T09:16:08Z" + } + ], + "database:ping": [ + { + "status": "pass", + "time": "2022-02-19T09:16:08Z" + } + ] + } +} +``` + +for more information, please reference to kubernetes documentation [Define a liveness HTTP request](https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-a-liveness-http-request) diff --git a/docs/content/doc/installation/on-kubernetes.zh-tw.md b/docs/content/doc/installation/on-kubernetes.zh-tw.md index 9add5c4ee1..5ea412aa00 100644 --- a/docs/content/doc/installation/on-kubernetes.zh-tw.md +++ b/docs/content/doc/installation/on-kubernetes.zh-tw.md @@ -25,3 +25,47 @@ helm install gitea gitea-charts/gitea ``` 若您想自訂安裝(包括使用 kubernetes ingress),請前往完整的 [Gitea helm chart configuration details](https://gitea.com/gitea/helm-chart/) + +##運行狀況檢查終端節點 + +Gitea 附帶了一個運行狀況檢查端點 `/api/healthz`,你可以像這樣在 kubernetes 中配置它: + +```yaml + livenessProbe: + httpGet: + path: /api/healthz + port: http + initialDelaySeconds: 200 + timeoutSeconds: 5 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 10 +``` + +成功的運行狀況檢查回應將使用 HTTP 代碼 `200` 進行回應,下面是示例: + +``` +HTTP/1.1 200 OK + + +{ + "status": "pass", + "description": "Gitea: Git with a cup of tea", + "checks": { + "cache:ping": [ + { + "status": "pass", + "time": "2022-02-19T09:16:08Z" + } + ], + "database:ping": [ + { + "status": "pass", + "time": "2022-02-19T09:16:08Z" + } + ] + } +} +``` + +有關更多信息,請參考kubernetes文檔[定義一個存活態 HTTP請求接口](https://kubernetes.io/zh/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/) diff --git a/docs/content/doc/installation/with-docker-rootless.en-us.md b/docs/content/doc/installation/with-docker-rootless.en-us.md index e3de969122..634e08a72e 100644 --- a/docs/content/doc/installation/with-docker-rootless.en-us.md +++ b/docs/content/doc/installation/with-docker-rootless.en-us.md @@ -147,7 +147,7 @@ services: + - db + + db: -+ image: postgres:13 ++ image: postgres:14 + restart: always + environment: + - POSTGRES_USER=gitea diff --git a/docs/content/doc/installation/with-docker.en-us.md b/docs/content/doc/installation/with-docker.en-us.md index 77825729c2..66e596ea4d 100644 --- a/docs/content/doc/installation/with-docker.en-us.md +++ b/docs/content/doc/installation/with-docker.en-us.md @@ -187,7 +187,7 @@ services: + - db + + db: -+ image: postgres:13 ++ image: postgres:14 + restart: always + environment: + - POSTGRES_USER=gitea @@ -590,7 +590,7 @@ Add the following block to `/etc/ssh/sshd_config`, on the host: ```bash Match User git AuthorizedKeysCommandUser git - AuthorizedKeysCommand ssh -p 2222 -o StrictHostKeyChecking=no git@127.0.0.1 /usr/local/bin/gitea keys -c /data/gitea/conf/app.ini -e git -u %u -t %t -k %k + AuthorizedKeysCommand /usr/bin/ssh -p 2222 -o StrictHostKeyChecking=no git@127.0.0.1 /usr/local/bin/gitea keys -c /data/gitea/conf/app.ini -e git -u %u -t %t -k %k ``` (From 1.16.0 you will not need to set the `-c /data/gitea/conf/app.ini` option.) diff --git a/docs/content/doc/installation/with-docker.fr-fr.md b/docs/content/doc/installation/with-docker.fr-fr.md index 0011ba2ff4..176abf7a12 100644 --- a/docs/content/doc/installation/with-docker.fr-fr.md +++ b/docs/content/doc/installation/with-docker.fr-fr.md @@ -43,7 +43,7 @@ Vous devriez avoir une instance fonctionnelle de Gitea. Pour accèder à l'inter ## Named Volumes -Ce guide aboutira à une installation avec les données Gita et PostgreSQL stockées dans des volumes nommés. Cela permet une sauvegarde, une restauration et des mises à niveau en toute simplicité. +Ce guide aboutira à une installation avec les données Gitea et PostgreSQL stockées dans des volumes nommés. Cela permet une sauvegarde, une restauration et des mises à niveau en toute simplicité. ### The Database diff --git a/docs/content/doc/installation/with-docker.zh-cn.md b/docs/content/doc/installation/with-docker.zh-cn.md index 8461951f72..77577736f2 100644 --- a/docs/content/doc/installation/with-docker.zh-cn.md +++ b/docs/content/doc/installation/with-docker.zh-cn.md @@ -172,7 +172,7 @@ services: + - db + + db: -+ image: postgres:13 ++ image: postgres:14 + restart: always + environment: + - POSTGRES_USER=gitea diff --git a/docs/content/doc/packages.en-us.md b/docs/content/doc/packages.en-us.md new file mode 100644 index 0000000000..e613b6b250 --- /dev/null +++ b/docs/content/doc/packages.en-us.md @@ -0,0 +1,12 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "Package Registry" +slug: "packages" +toc: false +draft: false +menu: + sidebar: + name: "Package Registry" + weight: 45 + identifier: "packages" +--- diff --git a/docs/content/doc/packages/composer.en-us.md b/docs/content/doc/packages/composer.en-us.md new file mode 100644 index 0000000000..2502ee45b5 --- /dev/null +++ b/docs/content/doc/packages/composer.en-us.md @@ -0,0 +1,120 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "Composer Packages Repository" +slug: "packages/composer" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Composer" + weight: 10 + identifier: "composer" +--- + +# Composer Packages Repository + +Publish [Composer](https://getcomposer.org/) packages for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the Composer package registry, you can use [Composer](https://getcomposer.org/download/) to consume and a HTTP upload client like `curl` to publish packages. + +## Publish a package + +To publish a Composer package perform a HTTP PUT operation with the package content in the request body. +The package content must be the zipped PHP project with the `composer.json` file. +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +``` +PUT https://gitea.example.com/api/packages/{owner}/composer +``` + +| Parameter | Description | +| ---------- | ----------- | +| `owner` | The owner of the package. | + +If the `composer.json` file does not contain a `version` property, you must provide it as a query parameter: + +``` +PUT https://gitea.example.com/api/packages/{owner}/composer?version={x.y.z} +``` + +Example request using HTTP Basic authentication: + +```shell +curl --user your_username:your_password_or_token \ + --upload-file path/to/project.zip \ + https://gitea.example.com/api/packages/testuser/composer +``` + +Or specify the package version as query parameter: + +```shell +curl --user your_username:your_password_or_token \ + --upload-file path/to/project.zip \ + https://gitea.example.com/api/packages/testuser/composer?version=1.0.3 +``` + +The server responds with the following HTTP Status codes. + +| HTTP Status Code | Meaning | +| ----------------- | ------- | +| `201 Created` | The package has been published. | +| `400 Bad Request` | The package name and/or version are invalid or a package with the same name and version already exist. | + +## Configuring the package registry + +To register the package registry you need to add it to the Composer `config.json` file (which can usually be found under `/.composer/config.json`): + +```json +{ + "repositories": [{ + "type": "composer", + "url": "https://gitea.example.com/api/packages/{owner}/composer" + } + ] +} +``` + +To access the package registry using credentials, you must specify them in the `auth.json` file as follows: + +```json +{ + "http-basic": { + "gitea.example.com": { + "username": "{username}", + "password": "{password}" + } + } +} +``` + +| Parameter | Description | +| ---------- | ----------- | +| `owner` | The owner of the package. | +| `username` | Your Gitea username. | +| `password` | Your Gitea password or a personal access token. | + +## Install a package + +To install a package from the package registry, execute the following command: + +```shell +composer require {package_name} +``` + +Optional you can specify the package version: + +```shell +composer require {package_name}:{package_version} +``` + +| Parameter | Description | +| ----------------- | ----------- | +| `package_name` | The package name. | +| `package_version` | The package version. | diff --git a/docs/content/doc/packages/conan.en-us.md b/docs/content/doc/packages/conan.en-us.md new file mode 100644 index 0000000000..c650e9d7ea --- /dev/null +++ b/docs/content/doc/packages/conan.en-us.md @@ -0,0 +1,101 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "Conan Packages Repository" +slug: "packages/conan" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Conan" + weight: 20 + identifier: "conan" +--- + +# Conan Packages Repository + +Publish [Conan](https://conan.io/) packages for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the Conan package registry, you need to use the [conan](https://conan.io/downloads.html) command line tool to consume and publish packages. + +## Configuring the package registry + +To register the package registry you need to configure a new Conan remote: + +```shell +conan remote add {remote} https://gitea.example.com/api/packages/{owner}/conan +conan user --remote {remote} --password {password} {username} +``` + +| Parameter | Description | +| -----------| ----------- | +| `remote` | The remote name. | +| `username` | Your Gitea username. | +| `password` | Your Gitea password or a personal access token. | +| `owner` | The owner of the package. | + +For example: + +```shell +conan remote add gitea https://gitea.example.com/api/packages/testuser/conan +conan user --remote gitea --password password123 testuser +``` + +## Publish a package + +Publish a Conan package by running the following command: + +```shell +conan upload --remote={remote} {recipe} +``` + +| Parameter | Description | +| ----------| ----------- | +| `remote` | The remote name. | +| `recipe` | The recipe to upload. | + +For example: + +```shell +conan upload --remote=gitea ConanPackage/1.2@gitea/final +``` + +The Gitea Conan package registry has full [revision](https://docs.conan.io/en/latest/versioning/revisions.html) support. + +## Install a package + +To install a Conan package from the package registry, execute the following command: + +```shell +conan install --remote={remote} {recipe} +``` + +| Parameter | Description | +| ----------| ----------- | +| `remote` | The remote name. | +| `recipe` | The recipe to download. | + +For example: + +```shell +conan install --remote=gitea ConanPackage/1.2@gitea/final +``` + +## Supported commands + +``` +conan install +conan get +conan info +conan search +conan upload +conan user +conan download +conan remove +``` diff --git a/docs/content/doc/packages/container.en-us.md b/docs/content/doc/packages/container.en-us.md new file mode 100644 index 0000000000..28559eb22b --- /dev/null +++ b/docs/content/doc/packages/container.en-us.md @@ -0,0 +1,91 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "Container Registry" +slug: "packages/container" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Container Registry" + weight: 30 + identifier: "container" +--- + +# Container Registry + +Publish [Open Container Initiative](https://opencontainers.org/) compliant images for your user or organization. +The container registry follows the OCI specs and supports all compatible images like [Docker](https://www.docker.com/) and [Helm Charts](https://helm.sh/). + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the Container registry, you can use the tools for your specific image type. +The following examples use the `docker` client. + +## Login to the container registry + +To push an image or if the image is in a private registry, you have to authenticate: + +```shell +docker login gitea.example.com +``` + +## Image naming convention + +Images must follow this naming convention: + +`{registry}/{owner}/{image}` + +For example, these are all valid image names for the owner `testuser`: + +`gitea.example.com/testuser/myimage` + +`gitea.example.com/testuser/my-image` + +`gitea.example.com/testuser/my/image` + +**NOTE:** The registry only supports case-insensitive tag names. So `image:tag` and `image:Tag` get treated as the same image and tag. + +## Push an image + +Push an image by executing the following command: + +```shell +docker push gitea.example.com/{owner}/{image}:{tag} +``` + +| Parameter | Description | +| ----------| ----------- | +| `owner` | The owner of the image. | +| `image` | The name of the image. | +| `tag` | The tag of the image. | + +For example: + +```shell +docker push gitea.example.com/testuser/myimage:latest +``` + +## Pull an image + +Pull an image by executing the following command: + +```shell +docker pull gitea.example.com/{owner}/{image}:{tag} +``` + +| Parameter | Description | +| ----------| ----------- | +| `owner` | The owner of the image. | +| `image` | The name of the image. | +| `tag` | The tag of the image. | + +For example: + +```shell +docker pull gitea.example.com/testuser/myimage:latest +``` diff --git a/docs/content/doc/packages/generic.en-us.md b/docs/content/doc/packages/generic.en-us.md new file mode 100644 index 0000000000..afef323938 --- /dev/null +++ b/docs/content/doc/packages/generic.en-us.md @@ -0,0 +1,80 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "Generic Packages Repository" +slug: "packages/generic" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Generic" + weight: 40 + identifier: "generic" +--- + +# Generic Packages Repository + +Publish generic files, like release binaries or other output, for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Authenticate to the package registry + +To authenticate to the Package Registry, you need to provide [custom HTTP headers or use HTTP Basic authentication]({{< relref "doc/developers/api-usage.en-us.md#authentication" >}}). + +## Publish a package + +To publish a generic package perform a HTTP PUT operation with the package content in the request body. +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +``` +PUT https://gitea.example.com/api/packages/{owner}/generic/{package_name}/{package_version}/{file_name} +``` + +| Parameter | Description | +| ----------------- | ----------- | +| `owner` | The owner of the package. | +| `package_name` | The package name. It can contain only lowercase letters (`a-z`), uppercase letter (`A-Z`), numbers (`0-9`), dots (`.`), hyphens (`-`), or underscores (`_`). | +| `package_version` | The package version as described in the [SemVer](https://semver.org/) spec. | +| `file_name` | The filename. It can contain only lowercase letters (`a-z`), uppercase letter (`A-Z`), numbers (`0-9`), dots (`.`), hyphens (`-`), or underscores (`_`). | + +Example request using HTTP Basic authentication: + +```shell +curl --user your_username:your_password_or_token \ + --upload-file path/to/file.bin \ + https://gitea.example.com/api/packages/testuser/generic/test_package/1.0.0/file.bin +``` + +The server reponds with the following HTTP Status codes. + +| HTTP Status Code | Meaning | +| ----------------- | ------- | +| `201 Created` | The package has been published. | +| `400 Bad Request` | The package name and/or version are invalid or a package with the same name and version already exist. | + +## Download a package + +To download a generic package perform a HTTP GET operation. + +``` +GET https://gitea.example.com/api/packages/{owner}/generic/{package_name}/{package_version}/{file_name} +``` + +| Parameter | Description | +| ----------------- | ----------- | +| `owner` | The owner of the package. | +| `package_name` | The package name. | +| `package_version` | The package version. | +| `file_name` | The filename. | + +The file content is served in the response body. The response content type is `application/octet-stream`. + +Example request using HTTP Basic authentication: + +```shell +curl --user your_username:your_token_or_password \ + https://gitea.example.com/api/packages/testuser/generic/test_package/1.0.0/file.bin +``` diff --git a/docs/content/doc/packages/helm.en-us.md b/docs/content/doc/packages/helm.en-us.md new file mode 100644 index 0000000000..9c43b08bf4 --- /dev/null +++ b/docs/content/doc/packages/helm.en-us.md @@ -0,0 +1,67 @@ +--- +date: "2022-04-14T00:00:00+00:00" +title: "Helm Chart Registry" +slug: "packages/helm" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Helm" + weight: 50 + identifier: "helm" +--- + +# Helm Chart Registry + +Publish [Helm](https://helm.sh/) charts for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the Helm Chart registry use a simple HTTP client like `curl` or the [`helm cm-push`](https://github.com/chartmuseum/helm-push/) plugin. + +## Publish a package + +Publish a package by running the following command: + +```shell +curl --user {username}:{password} -X POST --upload-file ./{chart_file}.tgz https://gitea.example.com/api/packages/{owner}/helm/api/charts +``` + +or with the `helm cm-push` plugin: + +```shell +helm repo add --username {username} --password {password} {repo} https://gitea.example.com/api/packages/{owner}/helm +helm cm-push ./{chart_file}.tgz {repo} +``` + +| Parameter | Description | +| ------------ | ----------- | +| `username` | Your Gitea username. | +| `password` | Your Gitea password or a personal access token. | +| `repo` | The name for the repository. | +| `chart_file` | The Helm Chart archive. | +| `owner` | The owner of the package. | + +## Install a package + +To install a Helm char from the registry, execute the following command: + +```shell +helm repo add --username {username} --password {password} {repo} https://gitea.example.com/api/packages/{owner}/helm +helm repo update +helm install {name} {repo}/{chart} +``` + +| Parameter | Description | +| ---------- | ----------- | +| `username` | Your Gitea username. | +| `password` | Your Gitea password or a personal access token. | +| `repo` | The name for the repository. | +| `owner` | The owner of the package. | +| `name` | The local name. | +| `chart` | The name Helm Chart. | diff --git a/docs/content/doc/packages/maven.en-us.md b/docs/content/doc/packages/maven.en-us.md new file mode 100644 index 0000000000..837c8434ae --- /dev/null +++ b/docs/content/doc/packages/maven.en-us.md @@ -0,0 +1,110 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "Maven Packages Repository" +slug: "packages/maven" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Maven" + weight: 60 + identifier: "maven" +--- + +# Maven Packages Repository + +Publish [Maven](https://maven.apache.org) packages for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the Maven package registry, you can use [Maven](https://maven.apache.org/install.html) or [Gradle](https://gradle.org/install/). +The following examples use `Maven`. + +## Configuring the package registry + +To register the package registry you first need to add your access token to the [`settings.xml`](https://maven.apache.org/settings.html) file: + +```xml + + + + gitea + + + + Authorization + token {access_token} + + + + + + +``` + +Afterwards add the following sections to your project `pom.xml` file: + +```xml + + + gitea + https://gitea.example.com/api/packages/{owner}/maven + + + + + gitea + https://gitea.example.com/api/packages/{owner}/maven + + + gitea + https://gitea.example.com/api/packages/{owner}/maven + + +``` + +| Parameter | Description | +| -------------- | ----------- | +| `access_token` | Your [personal access token]({{< relref "doc/developers/api-usage.en-us.md#authentication" >}}). | +| `owner` | The owner of the package. | + +## Publish a package + +To publish a package simply run: + +```shell +mvn deploy +``` + +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +## Install a package + +To install a Maven package from the package registry, add a new dependency to your project `pom.xml` file: + +```xml + + com.test.package + test_project + 1.0.0 + +``` + +Afterwards run: + +```shell +mvn install +``` + +## Supported commands + +``` +mvn install +mvn deploy +mvn dependency:get: +``` \ No newline at end of file diff --git a/docs/content/doc/packages/npm.en-us.md b/docs/content/doc/packages/npm.en-us.md new file mode 100644 index 0000000000..9ab4ac900c --- /dev/null +++ b/docs/content/doc/packages/npm.en-us.md @@ -0,0 +1,118 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "npm Packages Repository" +slug: "packages/npm" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "npm" + weight: 70 + identifier: "npm" +--- + +# npm Packages Repository + +Publish [npm](https://www.npmjs.com/) packages for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the npm package registry, you need [Node.js](https://nodejs.org/en/download/) coupled with a package manager such as [Yarn](https://classic.yarnpkg.com/en/docs/install) or [npm](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm/) itself. + +The registry supports [scoped](https://docs.npmjs.com/misc/scope/) and unscoped packages. + +The following examples use the `npm` tool with the scope `@test`. + +## Configuring the package registry + +To register the package registry you need to configure a new package source. + +```shell +npm config set {scope}:registry https://gitea.example.com/api/packages/{owner}/npm/ +npm config set -- '//gitea.example.com/api/packages/{owner}/npm/:_authToken' "{token}" +``` + +| Parameter | Description | +| ------------ | ----------- | +| `scope` | The scope of the packages. | +| `owner` | The owner of the package. | +| `token` | Your [personal access token]({{< relref "doc/developers/api-usage.en-us.md#authentication" >}}). | + +For example: + +```shell +npm config set @test:registry https://gitea.example.com/api/packages/testuser/npm/ +npm config set -- '//gitea.example.com/api/packages/testuser/npm/:_authToken' "personal_access_token" +``` + +or without scope: + +```shell +npm config set registry https://gitea.example.com/api/packages/testuser/npm/ +npm config set -- '//gitea.example.com/api/packages/testuser/npm/:_authToken' "personal_access_token" +``` + +## Publish a package + +Publish a package by running the following command in your project: + +```shell +npm publish +``` + +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +## Install a package + +To install a package from the package registry, execute the following command: + +```shell +npm install {package_name} +``` + +| Parameter | Description | +| -------------- | ----------- | +| `package_name` | The package name. | + +For example: + +```shell +npm install @test/test_package +``` + +## Tag a package + +The registry supports [version tags](https://docs.npmjs.com/adding-dist-tags-to-packages/) which can be managed by `npm dist-tag`: + +```shell +npm dist-tag add {package_name}@{version} {tag} +``` + +| Parameter | Description | +| -------------- | ----------- | +| `package_name` | The package name. | +| `version` | The version of the package. | +| `tag` | The tag name. | + +For example: + +```shell +npm dist-tag add test_package@1.0.2 release +``` + +The tag name must not be a valid version. All tag names which are parsable as a version are rejected. + +## Supported commands + +``` +npm install +npm ci +npm publish +npm dist-tag +npm view +``` diff --git a/docs/content/doc/packages/nuget.en-us.md b/docs/content/doc/packages/nuget.en-us.md new file mode 100644 index 0000000000..0b92d85a3d --- /dev/null +++ b/docs/content/doc/packages/nuget.en-us.md @@ -0,0 +1,116 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "NuGet Packages Repository" +slug: "packages/nuget" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "NuGet" + weight: 80 + identifier: "nuget" +--- + +# NuGet Packages Repository + +Publish [NuGet](https://www.nuget.org/) packages for your user or organization. The package registry supports [NuGet Symbol Packages](https://docs.microsoft.com/en-us/nuget/create-packages/symbol-packages-snupkg) too. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the NuGet package registry, you can use command-line interface tools as well as NuGet features in various IDEs like Visual Studio. +More informations about NuGet clients can be found in [the official documentation](https://docs.microsoft.com/en-us/nuget/install-nuget-client-tools). +The following examples use the `dotnet nuget` tool. + +## Configuring the package registry + +To register the package registry you need to configure a new NuGet feed source: + +```shell +dotnet nuget add source --name {source_name} --username {username} --password {password} https://gitea.example.com/api/packages/{owner}/nuget/index.json +``` + +| Parameter | Description | +| ------------- | ----------- | +| `source_name` | The desired source name. | +| `username` | Your Gitea username. | +| `password` | Your Gitea password or a personal access token. | +| `owner` | The owner of the package. | + +For example: + +```shell +dotnet nuget add source --name gitea --username testuser --password password123 https://gitea.example.com/api/packages/testuser/nuget/index.json +``` + +## Publish a package + +Publish a package by running the following command: + +```shell +dotnet nuget push --source {source_name} {package_file} +``` + +| Parameter | Description | +| -------------- | ----------- | +| `source_name` | The desired source name. | +| `package_file` | Path to the package `.nupkg` file. | + +For example: + +```shell +dotnet nuget push --source gitea test_package.1.0.0.nupkg +``` + +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +### Symbol Packages + +The NuGet package registry has build support for a symbol server. The PDB files embedded in a symbol package (`.snupkg`) can get requested by clients. +To do so, register the NuGet package registry as symbol source: + +``` +https://gitea.example.com/api/packages/{owner}/nuget/symbols +``` + +| Parameter | Description | +| --------- | ----------- | +| `owner` | The owner of the package registry. | + +For example: + +``` +https://gitea.example.com/api/packages/testuser/nuget/symbols +``` + +## Install a package + +To install a NuGet package from the package registry, execute the following command: + +```shell +dotnet add package --source {source_name} --version {package_version} {package_name} +``` + +| Parameter | Description | +| ----------------- | ----------- | +| `source_name` | The desired source name. | +| `package_name` | The package name. | +| `package_version` | The package version. | + +For example: + +```shell +dotnet add package --source gitea --version 1.0.0 test_package +``` + +## Supported commands + +``` +dotnet add +dotnet nuget push +dotnet nuget delete +``` diff --git a/docs/content/doc/packages/overview.en-us.md b/docs/content/doc/packages/overview.en-us.md new file mode 100644 index 0000000000..10f2184bc9 --- /dev/null +++ b/docs/content/doc/packages/overview.en-us.md @@ -0,0 +1,100 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "Package Registry" +slug: "packages/overview" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Overview" + weight: 1 + identifier: "overview" +--- + +# Package Registry + +The Package Registry can be used as a public or private registry for common package managers. + +**Table of Contents** + +{{< toc >}} + +## Supported package managers + +The following package managers are currently supported: + +| Name | Language | Package client | +| ---- | -------- | -------------- | +| [Composer]({{< relref "doc/packages/composer.en-us.md" >}}) | PHP | `composer` | +| [Conan]({{< relref "doc/packages/conan.en-us.md" >}}) | C++ | `conan` | +| [Container]({{< relref "doc/packages/container.en-us.md" >}}) | - | any OCI compliant client | +| [Generic]({{< relref "doc/packages/generic.en-us.md" >}}) | - | any HTTP client | +| [Helm]({{< relref "doc/packages/helm.en-us.md" >}}) | - | any HTTP client, `cm-push` | +| [Maven]({{< relref "doc/packages/maven.en-us.md" >}}) | Java | `mvn`, `gradle` | +| [npm]({{< relref "doc/packages/npm.en-us.md" >}}) | JavaScript | `npm`, `yarn` | +| [NuGet]({{< relref "doc/packages/nuget.en-us.md" >}}) | .NET | `nuget` | +| [PyPI]({{< relref "doc/packages/pypi.en-us.md" >}}) | Python | `pip`, `twine` | +| [RubyGems]({{< relref "doc/packages/rubygems.en-us.md" >}}) | Ruby | `gem`, `Bundler` | + +**The following paragraphs only apply if Packages are not globally disabled!** + +## Repository-Packages + +A package always belongs to an owner (a user or organisation), not a repository. +To link an (already uploaded) package to a repository, open the settings page +on that package and choose a repository to link this package to. +The entire package will be linked, not just a single version. + +Linking a package results in showing that package in the repository's package list, +and shows a link to the repository on the package site (as well as a link to the repository issues). + +## Access Restrictions + +| Package owner type | User | Organization | +|--------------------|------|--------------| +| **read** access | public, if user is public too; otherwise for this user only | public, if org is public, otherwise org members only | +| **write** access | owner only | org members with admin or write access to the org | + +N.B.: These access restrictions are [subject to change](https://github.com/go-gitea/gitea/issues/19270), where more finegrained control will be added via a dedicated organization team permission. + +## Create or upload a package + +Depending on the type of package, use the respective package-manager for that. Check out the sub-page of a specific package manager for instructions. + +## View packages + +You can view the packages of a repository on the repository page. + +1. Go to the repository. +1. Go to **Packages** in the navigation bar. + +To view more details about a package, select the name of the package. + +## Download a package + +To download a package from your repository: + +1. Go to **Packages** in the navigation bar. +1. Select the name of the package to view the details. +1. In the **Assets** section, select the name of the package file you want to download. + +## Delete a package + +You cannot edit a package after you published it in the Package Registry. Instead, you +must delete and recreate it. + +To delete a package from your repository: + +1. Go to **Packages** in the navigation bar. +1. Select the name of the package to view the details. +1. Click **Delete package** to permanently delete the package. + +## Disable the Package Registry + +The Package Registry is automatically enabled. To disable it for a single repository: + +1. Go to **Settings** in the navigation bar. +1. Disable **Enable Repository Packages Registry**. + +Previously published packages are not deleted by disabling the Package Registry. diff --git a/docs/content/doc/packages/pypi.en-us.md b/docs/content/doc/packages/pypi.en-us.md new file mode 100644 index 0000000000..d9f4872dca --- /dev/null +++ b/docs/content/doc/packages/pypi.en-us.md @@ -0,0 +1,85 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "PyPI Packages Repository" +slug: "packages/pypi" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "PyPI" + weight: 90 + identifier: "pypi" +--- + +# PyPI Packages Repository + +Publish [PyPI](https://pypi.org/) packages for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the PyPI package registry, you need to use the tools [pip](https://pypi.org/project/pip/) to consume and [twine](https://pypi.org/project/twine/) to publish packages. + +## Configuring the package registry + +To register the package registry you need to edit your local `~/.pypirc` file. Add + +```ini +[distutils] +index-servers = gitea + +[gitea] +repository = https://gitea.example.com/api/packages/{owner}/pypi +username = {username} +password = {password} +``` + +| Placeholder | Description | +| ------------ | ----------- | +| `owner` | The owner of the package. | +| `username` | Your Gitea username. | +| `password` | Your Gitea password or a [personal access token]({{< relref "doc/developers/api-usage.en-us.md#authentication" >}}). | + +## Publish a package + +Publish a package by running the following command: + +```shell +python3 -m twine upload --repository gitea /path/to/files/* +``` + +The package files have the extensions `.tar.gz` and `.whl`. + +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +## Install a package + +To install a PyPI package from the package registry, execute the following command: + +```shell +pip install --index-url https://{username}:{password}@gitea.example.com/api/packages/{owner}/pypi/simple --no-deps {package_name} +``` + +| Parameter | Description | +| ----------------- | ----------- | +| `username` | Your Gitea username. | +| `password` | Your Gitea password or a personal access token. | +| `owner` | The owner of the package. | +| `package_name` | The package name. | + +For example: + +```shell +pip install --index-url https://testuser:password123@gitea.example.com/api/packages/testuser/pypi/simple --no-deps test_package +``` + +## Supported commands + +``` +pip install +twine upload +``` \ No newline at end of file diff --git a/docs/content/doc/packages/rubygems.en-us.md b/docs/content/doc/packages/rubygems.en-us.md new file mode 100644 index 0000000000..9d9ce09b1c --- /dev/null +++ b/docs/content/doc/packages/rubygems.en-us.md @@ -0,0 +1,127 @@ +--- +date: "2021-07-20T00:00:00+00:00" +title: "RubyGems Packages Repository" +slug: "packages/rubygems" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "RubyGems" + weight: 100 + identifier: "rubygems" +--- + +# RubyGems Packages Repository + +Publish [RubyGems](https://guides.rubygems.org/) packages for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the RubyGems package registry, you need to use the [gem](https://guides.rubygems.org/command-reference/) command line tool to consume and publish packages. + +## Configuring the package registry + +To register the package registry edit the `~/.gem/credentials` file and add: + +```ini +--- +https://gitea.example.com/api/packages/{owner}/rubygems: Bearer {token} +``` + +| Parameter | Description | +| ------------- | ----------- | +| `owner` | The owner of the package. | +| `token` | Your personal access token. | + +For example: + +``` +--- +https://gitea.example.com/api/packages/testuser/rubygems: Bearer 3bd626f84b01cd26b873931eace1e430a5773cc4 +``` + +## Publish a package + +Publish a package by running the following command: + +```shell +gem push --host {host} {package_file} +``` + +| Parameter | Description | +| -------------- | ----------- | +| `host` | URL to the package registry. | +| `package_file` | Path to the package `.gem` file. | + +For example: + +```shell +gem push --host https://gitea.example.com/api/packages/testuser/rubygems test_package-1.0.0.gem +``` + +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +## Install a package + +To install a package from the package registry you can use [Bundler](https://bundler.io) or `gem`. + +### Bundler + +Add a new `source` block to your `Gemfile`: + +``` +source "https://gitea.example.com/api/packages/{owner}/rubygems" do + gem "{package_name}" +end +``` + +| Parameter | Description | +| ----------------- | ----------- | +| `owner` | The owner of the package. | +| `package_name` | The package name. | + +For example: + +``` +source "https://gitea.example.com/api/packages/testuser/rubygems" do + gem "test_package" +end +``` + +Afterwards run the following command: + +```shell +bundle install +``` + +### gem + +Execute the following command: + +```shell +gem install --host https://gitea.example.com/api/packages/{owner}/rubygems {package_name} +``` + +| Parameter | Description | +| ----------------- | ----------- | +| `owner` | The owner of the package. | +| `package_name` | The package name. | + +For example: + +```shell +gem install --host https://gitea.example.com/api/packages/testuser/rubygems test_package +``` + +## Supported commands + +``` +gem install +bundle install +gem push +``` \ No newline at end of file diff --git a/docs/content/doc/translation.de-de.md b/docs/content/doc/translation.de-de.md index 585783a706..3470faa59b 100644 --- a/docs/content/doc/translation.de-de.md +++ b/docs/content/doc/translation.de-de.md @@ -8,6 +8,6 @@ draft: false menu: sidebar: name: "Übersetzung" - weight: 45 + weight: 50 identifier: "translation" --- diff --git a/docs/content/doc/translation.en-us.md b/docs/content/doc/translation.en-us.md index 208eb32ab8..c281088503 100644 --- a/docs/content/doc/translation.en-us.md +++ b/docs/content/doc/translation.en-us.md @@ -8,6 +8,6 @@ draft: false menu: sidebar: name: "Translation" - weight: 45 + weight: 50 identifier: "translation" --- diff --git a/docs/content/doc/translation.zh-tw.md b/docs/content/doc/translation.zh-tw.md index ca820c093c..5374e87e89 100644 --- a/docs/content/doc/translation.zh-tw.md +++ b/docs/content/doc/translation.zh-tw.md @@ -8,6 +8,6 @@ draft: false menu: sidebar: name: "翻譯" - weight: 45 + weight: 50 identifier: "translation" --- diff --git a/docs/content/doc/usage/backup-and-restore.en-us.md b/docs/content/doc/usage/backup-and-restore.en-us.md index 8d90379abe..7cb4a6230f 100644 --- a/docs/content/doc/usage/backup-and-restore.en-us.md +++ b/docs/content/doc/usage/backup-and-restore.en-us.md @@ -57,7 +57,7 @@ The command has to be executed with the `RUN_USER = ` specified in Example: ```none -docker exec -u -it -w <--tempdir> $(docker ps -qf "name=") bash -c '/app/gitea/gitea dump -c ' +docker exec -u -it -w <--tempdir> $(docker ps -qf 'name=^$') bash -c '/app/gitea/gitea dump -c ' ``` \*Note: `--tempdir` refers to the temporary directory of the docker environment used by Gitea; if you have not specified a custom `--tempdir`, then Gitea uses `/tmp` or the `TMPDIR` environment variable of the docker container. For `--tempdir` adjust your `docker exec` command options accordingly. diff --git a/docs/content/doc/usage/command-line.en-us.md b/docs/content/doc/usage/command-line.en-us.md index 80a2c6716d..8cc420ed11 100644 --- a/docs/content/doc/usage/command-line.en-us.md +++ b/docs/content/doc/usage/command-line.en-us.md @@ -313,8 +313,13 @@ in the current directory. - `--tempdir path`, `-t path`: Path to the temporary directory used. Optional. (default: /tmp). - `--skip-repository`, `-R`: Skip the repository dumping. Optional. - `--skip-custom-dir`: Skip dumping of the custom dir. Optional. + - `--skip-lfs-data`: Skip dumping of LFS data. Optional. + - `--skip-attachment-data`: Skip dumping of attachment data. Optional. + - `--skip-package-data`: Skip dumping of package data. Optional. + - `--skip-log`: Skip dumping of log data. Optional. - `--database`, `-d`: Specify the database SQL syntax. Optional. - `--verbose`, `-V`: If provided, shows additional details. Optional. + - `--type`: Set the dump output format. Optional. (default: zip) - Examples: - `gitea dump` - `gitea dump --verbose` @@ -503,6 +508,13 @@ Manage running server operations: - `--host value`, `-H value`: Mail server host (defaults to: 127.0.0.1:25) - `--send-to value`, `-s value`: Email address(es) to send to - `--subject value`, `-S value`: Subject header of sent emails + - `processes`: Display Gitea processes and goroutine information + - Options: + - `--flat`: Show processes as flat table rather than as tree + - `--no-system`: Do not show system processes + - `--stacktraces`: Show stacktraces for goroutines associated with processes + - `--json`: Output as json + - `--cancel PID`: Send cancel to process with PID. (Only for non-system processes.) ### dump-repo diff --git a/docs/content/doc/usage/email-setup.en-us.md b/docs/content/doc/usage/email-setup.en-us.md index 390dc78e50..df1b8545af 100644 --- a/docs/content/doc/usage/email-setup.en-us.md +++ b/docs/content/doc/usage/email-setup.en-us.md @@ -35,6 +35,7 @@ ENABLED = true FROM = gitea@mydomain.com MAILER_TYPE = sendmail SENDMAIL_PATH = /usr/sbin/sendmail +SENDMAIL_ARGS = "--" ; most "sendmail" programs take options, "--" will prevent an email address being interpreted as an option. ``` ## Using SMTP diff --git a/docs/content/doc/usage/fail2ban-setup.en-us.md b/docs/content/doc/usage/fail2ban-setup.en-us.md index 0821b23a9f..d1ff633246 100644 --- a/docs/content/doc/usage/fail2ban-setup.en-us.md +++ b/docs/content/doc/usage/fail2ban-setup.en-us.md @@ -89,7 +89,7 @@ chain in **iptables**. Configure it in `/etc/fail2ban/jail.d/gitea-docker.conf`: [gitea-docker] enabled = true filter = gitea -logpath = /home/git/gitea/log/gitea.log +logpath = /var/lib/gitea/log/gitea.log maxretry = 10 findtime = 3600 bantime = 900 diff --git a/docs/content/doc/usage/issue-pull-request-templates.en-us.md b/docs/content/doc/usage/issue-pull-request-templates.en-us.md index 218b8a3642..65af260c31 100644 --- a/docs/content/doc/usage/issue-pull-request-templates.en-us.md +++ b/docs/content/doc/usage/issue-pull-request-templates.en-us.md @@ -43,6 +43,39 @@ Possible file names for PR templates: - `.github/PULL_REQUEST_TEMPLATE.md` - `.github/pull_request_template.md` +Possible file names for PR default merge message templates: + +- `.gitea/default_merge_message/MERGE_TEMPLATE.md` +- `.gitea/default_merge_message/REBASE_TEMPLATE.md` +- `.gitea/default_merge_message/REBASE-MERGE_TEMPLATE.md` +- `.gitea/default_merge_message/SQUASH_TEMPLATE.md` +- `.gitea/default_merge_message/MANUALLY-MERGED_TEMPLATE.md` +- `.gitea/default_merge_message/REBASE-UPDATE-ONLY_TEMPLATE.md` + +Possible file names for PR default merge message templates: + +- `.gitea/default_merge_message/MERGE_TEMPLATE.md` +- `.gitea/default_merge_message/REBASE_TEMPLATE.md` +- `.gitea/default_merge_message/REBASE-MERGE_TEMPLATE.md` +- `.gitea/default_merge_message/SQUASH_TEMPLATE.md` +- `.gitea/default_merge_message/MANUALLY-MERGED_TEMPLATE.md` +- `.gitea/default_merge_message/REBASE-UPDATE-ONLY_TEMPLATE.md` + +You can use the following variables enclosed in `${}` inside these templates which follow [os.Expand](https://pkg.go.dev/os#Expand) syntax: + +- BaseRepoOwnerName: Base repository owner name of this pull request +- BaseRepoName: Base repository name of this pull request +- BaseBranch: Base repository target branch name of this pull request +- HeadRepoOwnerName: Head repository owner name of this pull request +- HeadRepoName: Head repository name of this pull request +- HeadBranch: Head repository branch name of this pull request +- PullRequestTitle: Pull request's title +- PullRequestDescription: Pull request's description +- PullRequestPosterName: Pull request's poster name +- PullRequestIndex: Pull request's index number +- PullRequestReference: Pull request's reference char with index number. i.e. #1, !2 +- ClosingIssues: return a string contains all issues which will be closed by this pull request i.e. `close #1, close #2` + Additionally, the New Issue page URL can be suffixed with `?title=Issue+Title&body=Issue+Text` and the form will be populated with those strings. Those strings will be used instead of the template if there is one. ## Issue Template Directory diff --git a/docs/content/doc/usage/reverse-proxies.en-us.md b/docs/content/doc/usage/reverse-proxies.en-us.md index 90b2765daa..008704cdcf 100644 --- a/docs/content/doc/usage/reverse-proxies.en-us.md +++ b/docs/content/doc/usage/reverse-proxies.en-us.md @@ -348,3 +348,18 @@ The added http-request will automatically add a trailing slash if needed and int Then you **MUST** set something like `[server] ROOT_URL = http://example.com/gitea/` correctly in your configuration. +## Traefik + +If you want traefik to serve your Gitea instance, you can add the following label section to your `docker-compose.yaml` (Assuming the provider is docker). + +```yaml +gitea: + image: gitea/gitea + ... + labels: + - "traefik.enable=true" + - "traefik.http.routers.gitea.rule=Host(`example.com`)" + - "traefik.http.services.gitea-websecure.loadbalancer.server.port=3000" +``` + +This config assumes that you are handling HTTPS on the traefik side and using HTTP between Gitea and traefik. \ No newline at end of file diff --git a/docs/content/doc/usage/reverse-proxies.zh-cn.md b/docs/content/doc/usage/reverse-proxies.zh-cn.md index 741f739dbd..88db0c3790 100644 --- a/docs/content/doc/usage/reverse-proxies.zh-cn.md +++ b/docs/content/doc/usage/reverse-proxies.zh-cn.md @@ -106,3 +106,19 @@ git.example.com { ``` 然后您**必须**在 Gitea 的配置文件中正确的添加类似 `[server] ROOT_URL = http://git.example.com/git/` 的配置项。 + +## 使用 Traefik 作为反向代理服务 + +如果您想使用 traefik 作为 Gitea 的反向代理服务,您可以在 `docker-compose.yaml` 中添加 label 部分(假设使用 docker 作为 traefik 的 provider): + +```yaml +gitea: + image: gitea/gitea + ... + labels: + - "traefik.enable=true" + - "traefik.http.routers.gitea.rule=Host(`example.com`)" + - "traefik.http.services.gitea-websecure.loadbalancer.server.port=3000" +``` + +这份配置假设您使用 traefik 来处理 HTTPS 服务,并在其和 Gitea 之间使用 HTTP 进行通信。 \ No newline at end of file diff --git a/go.mod b/go.mod index d744908c77..ceefebceea 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module code.gitea.io/gitea -go 1.16 +go 1.17 require ( code.gitea.io/gitea-vet v0.2.2-0.20220122151748-48ebc902541b @@ -11,22 +11,12 @@ require ( gitea.com/go-chi/session v0.0.0-20211218221615-e3605d8b28b8 gitea.com/lunny/levelqueue v0.4.1 github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121 - github.com/Azure/go-ntlmssp v0.0.0-20211209120228-48547f28849e // indirect - github.com/Microsoft/go-winio v0.5.2 // indirect github.com/NYTimes/gziphandler v1.1.1 - github.com/ProtonMail/go-crypto v0.0.0-20220113124808-70ae35bab23f // indirect github.com/PuerkitoBio/goquery v1.8.0 github.com/alecthomas/chroma v0.10.0 - github.com/andybalholm/brotli v1.0.4 // indirect - github.com/bits-and-blooms/bitset v1.2.1 // indirect github.com/blevesearch/bleve/v2 v2.3.1 - github.com/boombuler/barcode v1.0.1 // indirect - github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b // indirect github.com/caddyserver/certmagic v0.15.4 github.com/chi-middleware/proxy v1.1.1 - github.com/couchbase/go-couchbase v0.0.0-20210224140812-5740cd35f448 // indirect - github.com/couchbase/gomemcached v0.1.2 // indirect - github.com/couchbase/goutils v0.0.0-20210118111533-e33d3ffb5401 // indirect github.com/denisenkom/go-mssqldb v0.12.0 github.com/djherbis/buffer v1.2.0 github.com/djherbis/nio/v3 v3.0.1 @@ -36,7 +26,6 @@ require ( github.com/emirpasic/gods v1.12.0 github.com/ethantkoenig/rupture v1.0.1 github.com/gliderlabs/ssh v0.3.3 - github.com/go-asn1-ber/asn1-ber v1.5.3 // indirect github.com/go-chi/chi/v5 v5.0.7 github.com/go-chi/cors v1.2.0 github.com/go-enry/go-enry/v2 v2.8.0 @@ -45,7 +34,7 @@ require ( github.com/go-git/go-billy/v5 v5.3.1 github.com/go-git/go-git/v5 v5.4.3-0.20210630082519-b4368b2a2ca4 github.com/go-ldap/ldap/v3 v3.4.2 - github.com/go-redis/redis/v8 v8.11.4 + github.com/go-redis/redis/v8 v8.11.5 github.com/go-sql-driver/mysql v1.6.0 github.com/go-swagger/go-swagger v0.29.0 github.com/go-testfixtures/testfixtures/v3 v3.6.1 @@ -54,21 +43,17 @@ require ( github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 github.com/golang-jwt/jwt/v4 v4.3.0 - github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect - github.com/golang/snappy v0.0.4 // indirect github.com/google/go-github/v39 v39.2.0 + github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 github.com/google/uuid v1.3.0 github.com/gorilla/feeds v1.1.1 - github.com/gorilla/mux v1.8.0 // indirect github.com/gorilla/sessions v1.2.1 - github.com/hashicorp/go-retryablehttp v0.7.0 // indirect github.com/hashicorp/go-version v1.4.0 github.com/hashicorp/golang-lru v0.5.4 github.com/huandu/xstrings v1.3.2 github.com/jaytaylor/html2text v0.0.0-20211105163654-bc68cce691ba github.com/json-iterator/go v1.1.12 github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 - github.com/kevinburke/ssh_config v1.1.0 // indirect github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4 github.com/klauspost/compress v1.15.0 github.com/klauspost/cpuid/v2 v2.0.11 @@ -76,71 +61,229 @@ require ( github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96 github.com/markbates/goth v1.69.0 github.com/mattn/go-isatty v0.0.14 - github.com/mattn/go-runewidth v0.0.13 // indirect github.com/mattn/go-sqlite3 v1.14.12 - github.com/mholt/acmez v1.0.2 // indirect github.com/mholt/archiver/v3 v3.5.1 github.com/microcosm-cc/bluemonday v1.0.18 - github.com/miekg/dns v1.1.46 // indirect - github.com/minio/md5-simd v1.1.2 // indirect github.com/minio/minio-go/v7 v7.0.23 - github.com/minio/sha256-simd v1.0.0 // indirect - github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 // indirect github.com/msteinert/pam v1.0.0 github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 github.com/niklasfasching/go-org v1.6.2 - github.com/nwaples/rardecode v1.1.3 // indirect github.com/oliamb/cutter v0.2.2 github.com/olivere/elastic/v7 v7.0.31 - github.com/pierrec/lz4/v4 v4.1.14 // indirect github.com/pkg/errors v0.9.1 github.com/pquerna/otp v1.3.0 github.com/prometheus/client_golang v1.12.1 github.com/quasoft/websspi v1.1.2 - github.com/rs/xid v1.3.0 // indirect github.com/santhosh-tekuri/jsonschema/v5 v5.0.0 github.com/sergi/go-diff v1.2.0 - github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 // indirect github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 - github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect github.com/stretchr/testify v1.7.0 github.com/syndtr/goleveldb v1.0.0 github.com/tstranex/u2f v1.0.0 - github.com/ulikunitz/xz v0.5.10 // indirect - github.com/unknwon/com v1.0.1 - github.com/unknwon/i18n v0.0.0-20210904045753-ff3a8617e361 - github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae github.com/unrolled/render v1.4.1 github.com/urfave/cli v1.22.5 github.com/xanzy/go-gitlab v0.58.0 - github.com/xanzy/ssh-agent v0.3.1 // indirect github.com/yohcop/openid-go v1.0.0 - github.com/yuin/goldmark v1.4.8 + github.com/yuin/goldmark v1.4.11 github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 github.com/yuin/goldmark-meta v1.1.0 - go.etcd.io/bbolt v1.3.6 // indirect go.jolheiser.com/hcaptcha v0.0.4 go.jolheiser.com/pwn v0.0.3 - go.uber.org/atomic v1.9.0 // indirect - go.uber.org/multierr v1.8.0 // indirect - go.uber.org/zap v1.21.0 // indirect - golang.org/x/crypto v0.0.0-20220214200702-86341886e292 + golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd golang.org/x/net v0.0.0-20220225172249-27dd8689420f golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9 golang.org/x/text v0.3.7 - golang.org/x/time v0.0.0-20220224211638-0e9765cccd65 // indirect golang.org/x/tools v0.1.9 - gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df gopkg.in/ini.v1 v1.66.4 gopkg.in/yaml.v2 v2.4.0 mvdan.cc/xurls/v2 v2.4.0 strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 - xorm.io/builder v0.3.9 + xorm.io/builder v0.3.10 xorm.io/xorm v1.2.5 ) +require ( + cloud.google.com/go v0.99.0 // indirect + github.com/Azure/go-ntlmssp v0.0.0-20211209120228-48547f28849e // indirect + github.com/Microsoft/go-winio v0.5.2 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20220113124808-70ae35bab23f // indirect + github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect + github.com/RoaringBitmap/roaring v0.9.4 // indirect + github.com/acomagu/bufpipe v1.0.3 // indirect + github.com/andybalholm/brotli v1.0.4 // indirect + github.com/andybalholm/cascadia v1.3.1 // indirect + github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect + github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect + github.com/aymerick/douceur v0.2.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bgentry/speakeasy v0.1.0 // indirect + github.com/bits-and-blooms/bitset v1.2.1 // indirect + github.com/blevesearch/bleve_index_api v1.0.1 // indirect + github.com/blevesearch/go-porterstemmer v1.0.3 // indirect + github.com/blevesearch/mmap-go v1.0.3 // indirect + github.com/blevesearch/scorch_segment_api/v2 v2.1.0 // indirect + github.com/blevesearch/segment v0.9.0 // indirect + github.com/blevesearch/snowballstem v0.9.0 // indirect + github.com/blevesearch/upsidedown_store_api v1.0.1 // indirect + github.com/blevesearch/vellum v1.0.7 // indirect + github.com/blevesearch/zapx/v11 v11.3.3 // indirect + github.com/blevesearch/zapx/v12 v12.3.3 // indirect + github.com/blevesearch/zapx/v13 v13.3.3 // indirect + github.com/blevesearch/zapx/v14 v14.3.3 // indirect + github.com/blevesearch/zapx/v15 v15.3.3 // indirect + github.com/boombuler/barcode v1.0.1 // indirect + github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b // indirect + github.com/census-instrumentation/opencensus-proto v0.3.0 // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/cloudflare/cfssl v1.6.1 // indirect + github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 // indirect + github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490 // indirect + github.com/coreos/go-semver v0.3.0 // indirect + github.com/coreos/go-systemd/v22 v22.3.2 // indirect + github.com/couchbase/go-couchbase v0.0.0-20210224140812-5740cd35f448 // indirect + github.com/couchbase/gomemcached v0.1.2 // indirect + github.com/couchbase/goutils v0.0.0-20210118111533-e33d3ffb5401 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/dlclark/regexp2 v1.4.0 // indirect + github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect + github.com/envoyproxy/go-control-plane v0.10.1 // indirect + github.com/envoyproxy/protoc-gen-validate v0.6.2 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect + github.com/form3tech-oss/jwt-go v3.2.3+incompatible // indirect + github.com/fsnotify/fsnotify v1.5.1 // indirect + github.com/fullstorydev/grpcurl v1.8.1 // indirect + github.com/fxamacker/cbor/v2 v2.4.0 // indirect + github.com/go-asn1-ber/asn1-ber v1.5.3 // indirect + github.com/go-enry/go-oniguruma v1.2.1 // indirect + github.com/go-git/gcfg v1.5.0 // indirect + github.com/go-openapi/analysis v0.21.2 // indirect + github.com/go-openapi/errors v0.20.2 // indirect + github.com/go-openapi/inflect v0.19.0 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.19.6 // indirect + github.com/go-openapi/loads v0.21.0 // indirect + github.com/go-openapi/runtime v0.21.1 // indirect + github.com/go-openapi/spec v0.20.4 // indirect + github.com/go-openapi/strfmt v0.21.1 // indirect + github.com/go-openapi/swag v0.19.15 // indirect + github.com/go-openapi/validate v0.20.3 // indirect + github.com/go-stack/stack v1.8.1 // indirect + github.com/goccy/go-json v0.9.5 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect + github.com/golang-sql/sqlexp v0.0.0-20170517235910-f1bb20e5a188 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/golang/mock v1.6.0 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/snappy v0.0.4 // indirect + github.com/google/btree v1.0.1 // indirect + github.com/google/certificate-transparency-go v1.1.2-0.20210511102531-373a877eec92 // indirect + github.com/google/go-querystring v1.1.0 // indirect + github.com/gorilla/css v1.0.0 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/mux v1.8.0 // indirect + github.com/gorilla/securecookie v1.1.1 // indirect + github.com/gorilla/websocket v1.4.2 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 // indirect + github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect + github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-retryablehttp v0.7.0 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/imdario/mergo v0.3.12 // indirect + github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/jessevdk/go-flags v1.5.0 // indirect + github.com/jhump/protoreflect v1.8.2 // indirect + github.com/jonboulle/clockwork v0.2.2 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/klauspost/pgzip v1.2.5 // indirect + github.com/kr/pretty v0.3.0 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/libdns/libdns v0.2.1 // indirect + github.com/magiconair/properties v1.8.5 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/markbates/going v1.0.0 // indirect + github.com/mattn/go-runewidth v0.0.13 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect + github.com/mholt/acmez v1.0.2 // indirect + github.com/miekg/dns v1.1.46 // indirect + github.com/minio/md5-simd v1.1.2 // indirect + github.com/minio/sha256-simd v1.0.0 // indirect + github.com/mitchellh/go-homedir v1.1.0 // indirect + github.com/mitchellh/mapstructure v1.4.3 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 // indirect + github.com/mschoch/smat v0.2.0 // indirect + github.com/nwaples/rardecode v1.1.3 // indirect + github.com/oklog/ulid v1.3.1 // indirect + github.com/olekukonko/tablewriter v0.0.5 // indirect + github.com/pelletier/go-toml v1.9.4 // indirect + github.com/pierrec/lz4/v4 v4.1.14 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_model v0.2.0 // indirect + github.com/prometheus/common v0.32.1 // indirect + github.com/prometheus/procfs v0.7.3 // indirect + github.com/rivo/uniseg v0.2.0 // indirect + github.com/rogpeppe/go-internal v1.8.1 // indirect + github.com/rs/xid v1.3.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 // indirect + github.com/sirupsen/logrus v1.8.1 // indirect + github.com/soheilhy/cmux v0.1.5 // indirect + github.com/spf13/afero v1.8.0 // indirect + github.com/spf13/cast v1.4.1 // indirect + github.com/spf13/cobra v1.3.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/spf13/viper v1.10.1 // indirect + github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect + github.com/steveyen/gtreap v0.1.0 // indirect + github.com/subosito/gotenv v1.2.0 // indirect + github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802 // indirect + github.com/toqueteos/webbrowser v1.2.0 // indirect + github.com/ulikunitz/xz v0.5.10 // indirect + github.com/unknwon/com v1.0.1 // indirect + github.com/x448/float16 v0.8.4 // indirect + github.com/xanzy/ssh-agent v0.3.1 // indirect + github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect + github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 // indirect + go.etcd.io/bbolt v1.3.6 // indirect + go.etcd.io/etcd/api/v3 v3.5.1 // indirect + go.etcd.io/etcd/client/pkg/v3 v3.5.1 // indirect + go.etcd.io/etcd/client/v2 v2.305.1 // indirect + go.etcd.io/etcd/client/v3 v3.5.0-alpha.0 // indirect + go.etcd.io/etcd/etcdctl/v3 v3.5.0-alpha.0 // indirect + go.etcd.io/etcd/pkg/v3 v3.5.0-alpha.0 // indirect + go.etcd.io/etcd/raft/v3 v3.5.0-alpha.0 // indirect + go.etcd.io/etcd/server/v3 v3.5.0-alpha.0 // indirect + go.etcd.io/etcd/tests/v3 v3.5.0-alpha.0 // indirect + go.etcd.io/etcd/v3 v3.5.0-alpha.0 // indirect + go.mongodb.org/mongo-driver v1.8.2 // indirect + go.uber.org/atomic v1.9.0 // indirect + go.uber.org/multierr v1.8.0 // indirect + go.uber.org/zap v1.21.0 // indirect + golang.org/x/mod v0.5.1 // indirect + golang.org/x/time v0.0.0-20220224211638-0e9765cccd65 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa // indirect + google.golang.org/grpc v1.43.0 // indirect + google.golang.org/protobuf v1.27.1 // indirect + gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect + gopkg.in/cheggaaa/pb.v1 v1.0.28 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect + sigs.k8s.io/yaml v1.2.0 // indirect +) + replace github.com/hashicorp/go-version => github.com/6543/go-version v1.3.1 replace github.com/markbates/goth v1.68.0 => github.com/zeripath/goth v1.68.1-0.20220109111530-754359885dce diff --git a/go.sum b/go.sum index 6016617e59..40a4cbbb05 100644 --- a/go.sum +++ b/go.sum @@ -599,8 +599,8 @@ github.com/go-redis/redis v6.15.2+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8w github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg= github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-redis/redis/v8 v8.4.0/go.mod h1:A1tbYoHSa1fXwN+//ljcCYYJeLmVrwL9hbQN45Jdy0M= -github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F46Tg= -github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= +github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= +github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= @@ -766,8 +766,10 @@ github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/rpmpack v0.0.0-20191226140753-aa36bfddb3a0/go.mod h1:RaTPr0KUf2K7fnZYLNDrr8rxAamWs3iNywJLtQ2AzBg= @@ -1010,8 +1012,8 @@ github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNU github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= -github.com/kevinburke/ssh_config v1.1.0 h1:pH/t1WS9NzT8go394IqZeJTMHVm6Cr6ZJ6AQ+mdNo/o= -github.com/kevinburke/ssh_config v1.1.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4 h1:cTxwSmnaqLoo+4tLukHoB9iqHOu3LmLhRmgUxZo6Vp4= github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= @@ -1241,15 +1243,18 @@ github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= -github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= -github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= -github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= +github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= @@ -1506,10 +1511,6 @@ github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0o github.com/unknwon/com v0.0.0-20190804042917-757f69c95f3e/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM= github.com/unknwon/com v1.0.1 h1:3d1LTxD+Lnf3soQiD4Cp/0BRB+Rsa/+RTvz8GMMzIXs= github.com/unknwon/com v1.0.1/go.mod h1:tOOxU81rwgoCLoOVVPHb6T/wt8HZygqH5id+GNnlCXM= -github.com/unknwon/i18n v0.0.0-20210904045753-ff3a8617e361 h1:4Ij5sX4JEzCCY/CCl8trJHey1tPsIDomYTZf145GKk0= -github.com/unknwon/i18n v0.0.0-20210904045753-ff3a8617e361/go.mod h1:+5rDk6sDGpl3azws3O+f+GpFSyN9GVr0K8cvQLQM2ZQ= -github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae h1:ihaXiJkaca54IaCSnEXtE/uSZOmPxKZhDfVLrzZLFDs= -github.com/unknwon/paginater v0.0.0-20200328080006-042474bd0eae/go.mod h1:1fdkY6xxl6ExVs2QFv7R0F5IRZHKA8RahhB9fMC9RvM= github.com/unrolled/render v1.4.1 h1:VdpMc2YkAOWzbmC/P2yoHhRDXgsaCQHcTJ1KK6SNCA4= github.com/unrolled/render v1.4.1/go.mod h1:cK4RSTTVdND5j9EYEc0LAMOvdG11JeiKjyjfyZRvV2w= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= @@ -1553,8 +1554,8 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= -github.com/yuin/goldmark v1.4.8 h1:zHPiabbIRssZOI0MAzJDHsyvG4MXCGqVaMOwR+HeoQQ= -github.com/yuin/goldmark v1.4.8/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= +github.com/yuin/goldmark v1.4.11 h1:i45YIzqLnUc2tGaTlJCyUxSG8TvgyGqhqOZOUKIjJ6w= +github.com/yuin/goldmark v1.4.11/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg= github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU= github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= @@ -1692,8 +1693,8 @@ golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292 h1:f+lwQ+GtmgoY+A2YaQxlSOnDjXcQ7ZRLWOHbC6HtRqE= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd h1:XcWmESyNjXJMLahc3mqVQJcgSTDxFxhETVlfk9uGc38= +golang.org/x/crypto v0.0.0-20220315160706-3147a52a75dd/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -2277,7 +2278,6 @@ gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AW gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/ini.v1 v1.44.2/go.mod h1:M3Cogqpuv0QCi3ExAY5V4uOt4qb/R3xZubo9m8lK5wg= -gopkg.in/ini.v1 v1.46.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.57.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= @@ -2358,7 +2358,8 @@ sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 h1:mUcz5b3FJbP5Cvdq7Khzn6J9OCUQJaBwgBkCR+MOwSs= strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251/go.mod h1:FJGmPh3vz9jSos1L/F91iAgnC/aejc0wIIrF2ZwJxdY= -xorm.io/builder v0.3.9 h1:Sd65/LdWyO7LR8+Cbd+e7mm3sK/7U9k0jS3999IDHMc= xorm.io/builder v0.3.9/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE= +xorm.io/builder v0.3.10 h1:Rvkncad3Lo9YIVqCbgIf6QnpR/HcW3IEr0AANNpuyMQ= +xorm.io/builder v0.3.10/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE= xorm.io/xorm v1.2.5 h1:tqN7OhN8P9xi52qBb76I8m5maAJMz/SSbgK2RGPCPbo= xorm.io/xorm v1.2.5/go.mod h1:fTG8tSjk6O1BYxwuohZUK+S1glnRycsCF05L1qQyEU0= diff --git a/integrations/README_ZH.md b/integrations/README_ZH.md index 39639f9b89..eebb14de73 100644 --- a/integrations/README_ZH.md +++ b/integrations/README_ZH.md @@ -36,7 +36,7 @@ TEST_MYSQL_HOST=localhost:3306 TEST_MYSQL_DBNAME=test TEST_MYSQL_USERNAME=root T ## 如何使用 pgsql 数据库进行集成测试 同上,首先在 docker 容器里部署一个 pgsql 数据库 ``` -docker run -e "POSTGRES_DB=test" -p 5432:5432 --rm --name pgsql postgres:13 #(just ctrl-c to stop db and clean the container) +docker run -e "POSTGRES_DB=test" -p 5432:5432 --rm --name pgsql postgres:14 #(just ctrl-c to stop db and clean the container) ``` 之后便可以基于这个数据库进行集成测试 ``` diff --git a/integrations/admin_user_test.go b/integrations/admin_user_test.go index d657f65fa5..59adac7ecc 100644 --- a/integrations/admin_user_test.go +++ b/integrations/admin_user_test.go @@ -46,7 +46,7 @@ func TestAdminEditUser(t *testing.T) { } func testSuccessfullEdit(t *testing.T, formData user_model.User) { - makeRequest(t, formData, http.StatusFound) + makeRequest(t, formData, http.StatusSeeOther) } func makeRequest(t *testing.T, formData user_model.User, headerCode int) { diff --git a/integrations/api_branch_test.go b/integrations/api_branch_test.go index 54fe4a6eda..e137331343 100644 --- a/integrations/api_branch_test.go +++ b/integrations/api_branch_test.go @@ -37,7 +37,7 @@ func testAPIGetBranchProtection(t *testing.T, branchName string, expectedHTTPSta req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo1/branch_protections/%s?token=%s", branchName, token) resp := session.MakeRequest(t, req, expectedHTTPStatus) - if resp.Code == 200 { + if resp.Code == http.StatusOK { var branchProtection api.BranchProtection DecodeJSON(t, resp, &branchProtection) assert.EqualValues(t, branchName, branchProtection.BranchName) @@ -52,7 +52,7 @@ func testAPICreateBranchProtection(t *testing.T, branchName string, expectedHTTP }) resp := session.MakeRequest(t, req, expectedHTTPStatus) - if resp.Code == 201 { + if resp.Code == http.StatusCreated { var branchProtection api.BranchProtection DecodeJSON(t, resp, &branchProtection) assert.EqualValues(t, branchName, branchProtection.BranchName) @@ -65,7 +65,7 @@ func testAPIEditBranchProtection(t *testing.T, branchName string, body *api.Bran req := NewRequestWithJSON(t, "PATCH", "/api/v1/repos/user2/repo1/branch_protections/"+branchName+"?token="+token, body) resp := session.MakeRequest(t, req, expectedHTTPStatus) - if resp.Code == 200 { + if resp.Code == http.StatusOK { var branchProtection api.BranchProtection DecodeJSON(t, resp, &branchProtection) assert.EqualValues(t, branchName, branchProtection.BranchName) diff --git a/integrations/api_helper_for_declarative_test.go b/integrations/api_helper_for_declarative_test.go index 7f2cd787c3..181a646946 100644 --- a/integrations/api_helper_for_declarative_test.go +++ b/integrations/api_helper_for_declarative_test.go @@ -227,7 +227,7 @@ func doAPICreatePullRequest(ctx APITestContext, owner, repo, baseBranch, headBra Title: fmt.Sprintf("create a pr from %s to %s", headBranch, baseBranch), }) - expected := 201 + expected := http.StatusCreated if ctx.ExpectedCode != 0 { expected = ctx.ExpectedCode } @@ -246,7 +246,7 @@ func doAPIGetPullRequest(ctx APITestContext, owner, repo string, index int64) fu owner, repo, index, ctx.Token) req := NewRequest(t, http.MethodGet, urlStr) - expected := 200 + expected := http.StatusOK if ctx.ExpectedCode != 0 { expected = ctx.ExpectedCode } @@ -287,7 +287,7 @@ func doAPIMergePullRequest(ctx APITestContext, owner, repo string, index int64) expected := ctx.ExpectedCode if expected == 0 { - expected = 200 + expected = http.StatusOK } if !assert.EqualValues(t, expected, resp.Code, @@ -306,6 +306,24 @@ func doAPIManuallyMergePullRequest(ctx APITestContext, owner, repo, commitID str MergeCommitID: commitID, }) + if ctx.ExpectedCode != 0 { + ctx.Session.MakeRequest(t, req, ctx.ExpectedCode) + return + } + ctx.Session.MakeRequest(t, req, http.StatusOK) + } +} + +func doAPIAutoMergePullRequest(ctx APITestContext, owner, repo string, index int64) func(*testing.T) { + return func(t *testing.T) { + urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/merge?token=%s", + owner, repo, index, ctx.Token) + req := NewRequestWithJSON(t, http.MethodPost, urlStr, &forms.MergePullRequestForm{ + MergeMessageField: "doAPIMergePullRequest Merge", + Do: string(repo_model.MergeStyleMerge), + MergeWhenChecksSucceed: true, + }) + if ctx.ExpectedCode != 0 { ctx.Session.MakeRequest(t, req, ctx.ExpectedCode) return @@ -314,6 +332,19 @@ func doAPIManuallyMergePullRequest(ctx APITestContext, owner, repo, commitID str } } +func doAPICancelAutoMergePullRequest(ctx APITestContext, owner, repo string, index int64) func(*testing.T) { + return func(t *testing.T) { + urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d/merge?token=%s", + owner, repo, index, ctx.Token) + req := NewRequest(t, http.MethodDelete, urlStr) + if ctx.ExpectedCode != 0 { + ctx.Session.MakeRequest(t, req, ctx.ExpectedCode) + return + } + ctx.Session.MakeRequest(t, req, 204) + } +} + func doAPIGetBranch(ctx APITestContext, branch string, callback ...func(*testing.T, api.Branch)) func(*testing.T) { return func(t *testing.T) { req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/branches/%s?token=%s", ctx.Username, ctx.Reponame, branch, ctx.Token) diff --git a/integrations/api_issue_milestone_test.go b/integrations/api_issue_milestone_test.go index 60a6329424..a7f89721a5 100644 --- a/integrations/api_issue_milestone_test.go +++ b/integrations/api_issue_milestone_test.go @@ -9,7 +9,7 @@ import ( "net/http" "testing" - "code.gitea.io/gitea/models" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -21,7 +21,7 @@ import ( func TestAPIIssuesMilestone(t *testing.T) { defer prepareTestEnv(t)() - milestone := unittest.AssertExistsAndLoadBean(t, &models.Milestone{ID: 1}).(*models.Milestone) + milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}).(*issues_model.Milestone) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: milestone.RepoID}).(*repo_model.Repository) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID}).(*user_model.User) assert.Equal(t, int64(1), int64(milestone.NumIssues)) diff --git a/integrations/api_issue_reaction_test.go b/integrations/api_issue_reaction_test.go index aa6f46f8bd..4a063c8c68 100644 --- a/integrations/api_issue_reaction_test.go +++ b/integrations/api_issue_reaction_test.go @@ -11,6 +11,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/convert" @@ -23,7 +24,7 @@ func TestAPIIssuesReactions(t *testing.T) { defer prepareTestEnv(t)() issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 1}).(*models.Issue) - _ = issue.LoadRepo() + _ = issue.LoadRepo(db.DefaultContext) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: issue.Repo.OwnerID}).(*user_model.User) session := loginUser(t, owner.Name) @@ -82,7 +83,7 @@ func TestAPICommentReactions(t *testing.T) { comment := unittest.AssertExistsAndLoadBean(t, &models.Comment{ID: 2}).(*models.Comment) _ = comment.LoadIssue() issue := comment.Issue - _ = issue.LoadRepo() + _ = issue.LoadRepo(db.DefaultContext) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: issue.Repo.OwnerID}).(*user_model.User) session := loginUser(t, owner.Name) diff --git a/integrations/api_issue_stopwatch_test.go b/integrations/api_issue_stopwatch_test.go index 3f62e042cb..90098b9236 100644 --- a/integrations/api_issue_stopwatch_test.go +++ b/integrations/api_issue_stopwatch_test.go @@ -9,6 +9,7 @@ import ( "testing" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -45,7 +46,7 @@ func TestAPIStopStopWatches(t *testing.T) { defer prepareTestEnv(t)() issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 2}).(*models.Issue) - _ = issue.LoadRepo() + _ = issue.LoadRepo(db.DefaultContext) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: issue.Repo.OwnerID}).(*user_model.User) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) @@ -61,7 +62,7 @@ func TestAPICancelStopWatches(t *testing.T) { defer prepareTestEnv(t)() issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 1}).(*models.Issue) - _ = issue.LoadRepo() + _ = issue.LoadRepo(db.DefaultContext) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: issue.Repo.OwnerID}).(*user_model.User) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) @@ -77,7 +78,7 @@ func TestAPIStartStopWatches(t *testing.T) { defer prepareTestEnv(t)() issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 3}).(*models.Issue) - _ = issue.LoadRepo() + _ = issue.LoadRepo(db.DefaultContext) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: issue.Repo.OwnerID}).(*user_model.User) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) diff --git a/integrations/api_issue_test.go b/integrations/api_issue_test.go index 3957c10233..5ed5a0ad99 100644 --- a/integrations/api_issue_test.go +++ b/integrations/api_issue_test.go @@ -168,12 +168,11 @@ func TestAPIEditIssue(t *testing.T) { func TestAPISearchIssues(t *testing.T) { defer prepareTestEnv(t)() - session := loginUser(t, "user2") - token := getTokenForLoggedInUser(t, session) + token := getUserToken(t, "user2") link, _ := url.Parse("/api/v1/repos/issues/search") - req := NewRequest(t, "GET", link.String()) - resp := session.MakeRequest(t, req, http.StatusOK) + req := NewRequest(t, "GET", link.String()+"?token="+token) + resp := MakeRequest(t, req, http.StatusOK) var apiIssues []*api.Issue DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 10) @@ -181,7 +180,7 @@ func TestAPISearchIssues(t *testing.T) { query := url.Values{"token": {token}} link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 10) @@ -189,9 +188,10 @@ func TestAPISearchIssues(t *testing.T) { before := time.Unix(999307200, 0).Format(time.RFC3339) query.Add("since", since) query.Add("before", before) + query.Add("token", token) link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 8) query.Del("since") @@ -200,14 +200,14 @@ func TestAPISearchIssues(t *testing.T) { query.Add("state", "closed") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 2) query.Set("state", "all") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.EqualValues(t, "15", resp.Header().Get("X-Total-Count")) assert.Len(t, apiIssues, 10) // there are more but 10 is page item limit @@ -215,49 +215,49 @@ func TestAPISearchIssues(t *testing.T) { query.Add("limit", "20") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 15) - query = url.Values{"assigned": {"true"}, "state": {"all"}} + query = url.Values{"assigned": {"true"}, "state": {"all"}, "token": {token}} link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 1) - query = url.Values{"milestones": {"milestone1"}, "state": {"all"}} + query = url.Values{"milestones": {"milestone1"}, "state": {"all"}, "token": {token}} link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 1) - query = url.Values{"milestones": {"milestone1,milestone3"}, "state": {"all"}} + query = url.Values{"milestones": {"milestone1,milestone3"}, "state": {"all"}, "token": {token}} link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 2) - query = url.Values{"owner": {"user2"}} // user + query = url.Values{"owner": {"user2"}, "token": {token}} // user link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 6) - query = url.Values{"owner": {"user3"}} // organization + query = url.Values{"owner": {"user3"}, "token": {token}} // organization link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 3) - query = url.Values{"owner": {"user3"}, "team": {"team1"}} // organization + team + query = url.Values{"owner": {"user3"}, "team": {"team1"}, "token": {token}} // organization + team link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 2) } @@ -265,12 +265,11 @@ func TestAPISearchIssues(t *testing.T) { func TestAPISearchIssuesWithLabels(t *testing.T) { defer prepareTestEnv(t)() - session := loginUser(t, "user1") - token := getTokenForLoggedInUser(t, session) + token := getUserToken(t, "user1") link, _ := url.Parse("/api/v1/repos/issues/search") - req := NewRequest(t, "GET", link.String()) - resp := session.MakeRequest(t, req, http.StatusOK) + req := NewRequest(t, "GET", link.String()+"?token="+token) + resp := MakeRequest(t, req, http.StatusOK) var apiIssues []*api.Issue DecodeJSON(t, resp, &apiIssues) @@ -280,14 +279,14 @@ func TestAPISearchIssuesWithLabels(t *testing.T) { query.Add("token", token) link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 10) query.Add("labels", "label1") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 2) @@ -295,7 +294,7 @@ func TestAPISearchIssuesWithLabels(t *testing.T) { query.Set("labels", "label1,label2") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 2) @@ -303,7 +302,7 @@ func TestAPISearchIssuesWithLabels(t *testing.T) { query.Set("labels", "orglabel4") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 1) @@ -312,7 +311,7 @@ func TestAPISearchIssuesWithLabels(t *testing.T) { query.Add("state", "all") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 2) @@ -320,7 +319,7 @@ func TestAPISearchIssuesWithLabels(t *testing.T) { query.Set("labels", "label1,orglabel4") link.RawQuery = query.Encode() req = NewRequest(t, "GET", link.String()) - resp = session.MakeRequest(t, req, http.StatusOK) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiIssues) assert.Len(t, apiIssues, 2) } diff --git a/integrations/api_issue_tracked_time_test.go b/integrations/api_issue_tracked_time_test.go index 335fd2929a..b6f7091013 100644 --- a/integrations/api_issue_tracked_time_test.go +++ b/integrations/api_issue_tracked_time_test.go @@ -11,6 +11,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" api "code.gitea.io/gitea/modules/structs" @@ -23,7 +24,7 @@ func TestAPIGetTrackedTimes(t *testing.T) { user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) issue2 := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 2}).(*models.Issue) - assert.NoError(t, issue2.LoadRepo()) + assert.NoError(t, issue2.LoadRepo(db.DefaultContext)) session := loginUser(t, user2.Name) token := getTokenForLoggedInUser(t, session) @@ -65,7 +66,7 @@ func TestAPIDeleteTrackedTime(t *testing.T) { time6 := unittest.AssertExistsAndLoadBean(t, &models.TrackedTime{ID: 6}).(*models.TrackedTime) issue2 := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 2}).(*models.Issue) - assert.NoError(t, issue2.LoadRepo()) + assert.NoError(t, issue2.LoadRepo(db.DefaultContext)) user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) session := loginUser(t, user2.Name) @@ -99,7 +100,7 @@ func TestAPIAddTrackedTimes(t *testing.T) { defer prepareTestEnv(t)() issue2 := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 2}).(*models.Issue) - assert.NoError(t, issue2.LoadRepo()) + assert.NoError(t, issue2.LoadRepo(db.DefaultContext)) user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) diff --git a/integrations/api_nodeinfo_test.go b/integrations/api_nodeinfo_test.go index 1d25dc0269..822dbf3f0e 100644 --- a/integrations/api_nodeinfo_test.go +++ b/integrations/api_nodeinfo_test.go @@ -26,6 +26,10 @@ func TestNodeinfo(t *testing.T) { resp := MakeRequest(t, req, http.StatusOK) var nodeinfo api.NodeInfo DecodeJSON(t, resp, &nodeinfo) + assert.True(t, nodeinfo.OpenRegistrations) assert.Equal(t, "gitea", nodeinfo.Software.Name) + assert.Equal(t, 23, nodeinfo.Usage.Users.Total) + assert.Equal(t, 15, nodeinfo.Usage.LocalPosts) + assert.Equal(t, 2, nodeinfo.Usage.LocalComments) }) } diff --git a/integrations/api_org_test.go b/integrations/api_org_test.go index e33c010e88..a3c1827e70 100644 --- a/integrations/api_org_test.go +++ b/integrations/api_org_test.go @@ -20,9 +20,8 @@ import ( func TestAPIOrgCreate(t *testing.T) { onGiteaRun(t, func(*testing.T, *url.URL) { - session := loginUser(t, "user1") + token := getUserToken(t, "user1") - token := getTokenForLoggedInUser(t, session) org := api.CreateOrgOption{ UserName: "user1_org", FullName: "User1's organization", @@ -32,7 +31,7 @@ func TestAPIOrgCreate(t *testing.T) { Visibility: "limited", } req := NewRequestWithJSON(t, "POST", "/api/v1/orgs?token="+token, &org) - resp := session.MakeRequest(t, req, http.StatusCreated) + resp := MakeRequest(t, req, http.StatusCreated) var apiOrg api.Organization DecodeJSON(t, resp, &apiOrg) @@ -50,13 +49,13 @@ func TestAPIOrgCreate(t *testing.T) { FullName: org.FullName, }) - req = NewRequestf(t, "GET", "/api/v1/orgs/%s", org.UserName) - resp = session.MakeRequest(t, req, http.StatusOK) + req = NewRequestf(t, "GET", "/api/v1/orgs/%s?token=%s", org.UserName, token) + resp = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &apiOrg) assert.EqualValues(t, org.UserName, apiOrg.UserName) - req = NewRequestf(t, "GET", "/api/v1/orgs/%s/repos", org.UserName) - resp = session.MakeRequest(t, req, http.StatusOK) + req = NewRequestf(t, "GET", "/api/v1/orgs/%s/repos?token=%s", org.UserName, token) + resp = MakeRequest(t, req, http.StatusOK) var repos []*api.Repository DecodeJSON(t, resp, &repos) @@ -64,8 +63,8 @@ func TestAPIOrgCreate(t *testing.T) { assert.False(t, repo.Private) } - req = NewRequestf(t, "GET", "/api/v1/orgs/%s/members", org.UserName) - resp = session.MakeRequest(t, req, http.StatusOK) + req = NewRequestf(t, "GET", "/api/v1/orgs/%s/members?token=%s", org.UserName, token) + resp = MakeRequest(t, req, http.StatusOK) // user1 on this org is public var users []*api.User diff --git a/integrations/api_packages_composer_test.go b/integrations/api_packages_composer_test.go new file mode 100644 index 0000000000..59b975408d --- /dev/null +++ b/integrations/api_packages_composer_test.go @@ -0,0 +1,214 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "archive/zip" + "bytes" + "fmt" + "net/http" + neturl "net/url" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + composer_module "code.gitea.io/gitea/modules/packages/composer" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/composer" + + "github.com/stretchr/testify/assert" +) + +func TestPackageComposer(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + vendorName := "gitea" + projectName := "composer-package" + packageName := vendorName + "/" + projectName + packageVersion := "1.0.3" + packageDescription := "Package Description" + packageType := "composer-plugin" + packageAuthor := "Gitea Authors" + packageLicense := "MIT" + + var buf bytes.Buffer + archive := zip.NewWriter(&buf) + w, _ := archive.Create("composer.json") + w.Write([]byte(`{ + "name": "` + packageName + `", + "description": "` + packageDescription + `", + "type": "` + packageType + `", + "license": "` + packageLicense + `", + "authors": [ + { + "name": "` + packageAuthor + `" + } + ] + }`)) + archive.Close() + content := buf.Bytes() + + url := fmt.Sprintf("%sapi/packages/%s/composer", setting.AppURL, user.Name) + + t.Run("ServiceIndex", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/packages.json", url)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result composer.ServiceIndexResponse + DecodeJSON(t, resp, &result) + + assert.Equal(t, url+"/search.json?q=%query%&type=%type%", result.SearchTemplate) + assert.Equal(t, url+"/p2/%package%.json", result.MetadataTemplate) + assert.Equal(t, url+"/list.json", result.PackageList) + }) + + t.Run("Upload", func(t *testing.T) { + t.Run("MissingVersion", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", url, bytes.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusBadRequest) + }) + + t.Run("Valid", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadURL := url + "?version=" + packageVersion + + req := NewRequestWithBody(t, "PUT", uploadURL, bytes.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeComposer) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &composer_module.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, fmt.Sprintf("%s-%s.%s.zip", vendorName, projectName, packageVersion), pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(len(content)), pb.Size) + + req = NewRequestWithBody(t, "PUT", uploadURL, bytes.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusBadRequest) + }) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeComposer) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(0), pvs[0].DownloadCount) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + + req := NewRequest(t, "GET", fmt.Sprintf("%s/files/%s/%s/%s", url, neturl.PathEscape(packageName), neturl.PathEscape(pvs[0].LowerVersion), neturl.PathEscape(pfs[0].LowerName))) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, content, resp.Body.Bytes()) + + pvs, err = packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeComposer) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(1), pvs[0].DownloadCount) + }) + + t.Run("SearchService", func(t *testing.T) { + defer PrintCurrentTest(t)() + + cases := []struct { + Query string + Type string + Page int + PerPage int + ExpectedTotal int64 + ExpectedResults int + }{ + {"", "", 0, 0, 1, 1}, + {"", "", 1, 1, 1, 1}, + {"test", "", 1, 0, 0, 0}, + {"gitea", "", 1, 1, 1, 1}, + {"gitea", "", 2, 1, 1, 0}, + {"", packageType, 1, 1, 1, 1}, + {"gitea", packageType, 1, 1, 1, 1}, + {"gitea", "dummy", 1, 1, 0, 0}, + } + + for i, c := range cases { + req := NewRequest(t, "GET", fmt.Sprintf("%s/search.json?q=%s&type=%s&page=%d&per_page=%d", url, c.Query, c.Type, c.Page, c.PerPage)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result composer.SearchResultResponse + DecodeJSON(t, resp, &result) + + assert.Equal(t, c.ExpectedTotal, result.Total, "case %d: unexpected total hits", i) + assert.Len(t, result.Results, c.ExpectedResults, "case %d: unexpected result count", i) + } + }) + + t.Run("EnumeratePackages", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", url+"/list.json") + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result map[string][]string + DecodeJSON(t, resp, &result) + + assert.Contains(t, result, "packageNames") + names := result["packageNames"] + assert.Len(t, names, 1) + assert.Equal(t, packageName, names[0]) + }) + + t.Run("PackageMetadata", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/p2/%s/%s.json", url, vendorName, projectName)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result composer.PackageMetadataResponse + DecodeJSON(t, resp, &result) + + assert.Contains(t, result.Packages, packageName) + pkgs := result.Packages[packageName] + assert.Len(t, pkgs, 1) + assert.Equal(t, packageName, pkgs[0].Name) + assert.Equal(t, packageVersion, pkgs[0].Version) + assert.Equal(t, packageType, pkgs[0].Type) + assert.Equal(t, packageDescription, pkgs[0].Description) + assert.Len(t, pkgs[0].Authors, 1) + assert.Equal(t, packageAuthor, pkgs[0].Authors[0].Name) + assert.Equal(t, "zip", pkgs[0].Dist.Type) + assert.Equal(t, "7b40bfd6da811b2b78deec1e944f156dbb2c747b", pkgs[0].Dist.Checksum) + }) +} diff --git a/integrations/api_packages_conan_test.go b/integrations/api_packages_conan_test.go new file mode 100644 index 0000000000..65d16801fc --- /dev/null +++ b/integrations/api_packages_conan_test.go @@ -0,0 +1,724 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "fmt" + "net/http" + stdurl "net/url" + "strings" + "testing" + "time" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + conan_model "code.gitea.io/gitea/models/packages/conan" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + conan_module "code.gitea.io/gitea/modules/packages/conan" + "code.gitea.io/gitea/modules/setting" + conan_router "code.gitea.io/gitea/routers/api/packages/conan" + + "github.com/stretchr/testify/assert" +) + +const ( + conanfileName = "conanfile.py" + conaninfoName = "conaninfo.txt" + + conanLicense = "MIT" + conanAuthor = "Gitea " + conanHomepage = "https://gitea.io/" + conanURL = "https://gitea.com/" + conanDescription = "Description of ConanPackage" + conanTopic = "gitea" + + conanPackageReference = "dummyreference" + + contentConaninfo = `[settings] + arch=x84_64 + +[requires] + fmt/7.1.3 + +[options] + shared=False + +[full_settings] + arch=x84_64 + +[full_requires] + fmt/7.1.3 + +[full_options] + shared=False + +[recipe_hash] + 74714915a51073acb548ca1ce29afbac + +[env] +CC=gcc-10` +) + +func addTokenAuthHeader(request *http.Request, token string) *http.Request { + request.Header.Set("Authorization", token) + return request +} + +func buildConanfileContent(name, version string) string { + return `from conans import ConanFile, CMake, tools + +class ConanPackageConan(ConanFile): + name = "` + name + `" + version = "` + version + `" + license = "` + conanLicense + `" + author = "` + conanAuthor + `" + homepage = "` + conanHomepage + `" + url = "` + conanURL + `" + description = "` + conanDescription + `" + topics = ("` + conanTopic + `") + settings = "os", "compiler", "build_type", "arch" + options = {"shared": [True, False], "fPIC": [True, False]} + default_options = {"shared": False, "fPIC": True} + generators = "cmake"` +} + +func uploadConanPackageV1(t *testing.T, baseURL, token, name, version, user, channel string) { + contentConanfile := buildConanfileContent(name, version) + + recipeURL := fmt.Sprintf("%s/v1/conans/%s/%s/%s/%s", baseURL, name, version, user, channel) + + req := NewRequest(t, "GET", recipeURL) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/digest", recipeURL)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/download_urls", recipeURL)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "POST", fmt.Sprintf("%s/upload_urls", recipeURL)) + MakeRequest(t, req, http.StatusUnauthorized) + + req = NewRequestWithJSON(t, "POST", fmt.Sprintf("%s/upload_urls", recipeURL), map[string]int64{ + conanfileName: int64(len(contentConanfile)), + "removed.txt": 0, + }) + req = addTokenAuthHeader(req, token) + resp := MakeRequest(t, req, http.StatusOK) + + uploadURLs := make(map[string]string) + DecodeJSON(t, resp, &uploadURLs) + + assert.Contains(t, uploadURLs, conanfileName) + assert.NotContains(t, uploadURLs, "removed.txt") + + uploadURL := uploadURLs[conanfileName] + assert.NotEmpty(t, uploadURL) + + req = NewRequestWithBody(t, "PUT", uploadURL, strings.NewReader(contentConanfile)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusCreated) + + packageURL := fmt.Sprintf("%s/packages/%s", recipeURL, conanPackageReference) + + req = NewRequest(t, "GET", packageURL) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/digest", packageURL)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/download_urls", packageURL)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "POST", fmt.Sprintf("%s/upload_urls", packageURL)) + MakeRequest(t, req, http.StatusUnauthorized) + + req = NewRequestWithJSON(t, "POST", fmt.Sprintf("%s/upload_urls", packageURL), map[string]int64{ + conaninfoName: int64(len(contentConaninfo)), + "removed.txt": 0, + }) + req = addTokenAuthHeader(req, token) + resp = MakeRequest(t, req, http.StatusOK) + + uploadURLs = make(map[string]string) + DecodeJSON(t, resp, &uploadURLs) + + assert.Contains(t, uploadURLs, conaninfoName) + assert.NotContains(t, uploadURLs, "removed.txt") + + uploadURL = uploadURLs[conaninfoName] + assert.NotEmpty(t, uploadURL) + + req = NewRequestWithBody(t, "PUT", uploadURL, strings.NewReader(contentConaninfo)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusCreated) +} + +func uploadConanPackageV2(t *testing.T, baseURL, token, name, version, user, channel, recipeRevision, packageRevision string) { + contentConanfile := buildConanfileContent(name, version) + + recipeURL := fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/revisions/%s", baseURL, name, version, user, channel, recipeRevision) + + req := NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/files/%s", recipeURL, conanfileName), strings.NewReader(contentConanfile)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusCreated) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/files", recipeURL)) + req = addTokenAuthHeader(req, token) + resp := MakeRequest(t, req, http.StatusOK) + + var list *struct { + Files map[string]interface{} `json:"files"` + } + DecodeJSON(t, resp, &list) + assert.Len(t, list.Files, 1) + assert.Contains(t, list.Files, conanfileName) + + packageURL := fmt.Sprintf("%s/packages/%s/revisions/%s", recipeURL, conanPackageReference, packageRevision) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/files", packageURL)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/files/%s", packageURL, conaninfoName), strings.NewReader(contentConaninfo)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusCreated) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/files", packageURL)) + req = addTokenAuthHeader(req, token) + resp = MakeRequest(t, req, http.StatusOK) + + list = nil + DecodeJSON(t, resp, &list) + assert.Len(t, list.Files, 1) + assert.Contains(t, list.Files, conaninfoName) +} + +func TestPackageConan(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + name := "ConanPackage" + version1 := "1.2" + version2 := "1.3" + user1 := "dummy" + user2 := "gitea" + channel1 := "test" + channel2 := "final" + revision1 := "rev1" + revision2 := "rev2" + + url := fmt.Sprintf("%sapi/packages/%s/conan", setting.AppURL, user.Name) + + t.Run("v1", func(t *testing.T) { + t.Run("Ping", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v1/ping", url)) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, "revisions", resp.Header().Get("X-Conan-Server-Capabilities")) + }) + + token := "" + + t.Run("Authenticate", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v1/users/authenticate", url)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + body := resp.Body.String() + assert.NotEmpty(t, body) + + token = fmt.Sprintf("Bearer %s", body) + }) + + t.Run("CheckCredentials", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v1/users/check_credentials", url)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + }) + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadConanPackageV1(t, url, token, name, version1, user1, channel1) + + t.Run("Validate", func(t *testing.T) { + defer PrintCurrentTest(t)() + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeConan) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.Equal(t, name, pd.Package.Name) + assert.Equal(t, version1, pd.Version.Version) + assert.IsType(t, &conan_module.Metadata{}, pd.Metadata) + metadata := pd.Metadata.(*conan_module.Metadata) + assert.Equal(t, conanLicense, metadata.License) + assert.Equal(t, conanAuthor, metadata.Author) + assert.Equal(t, conanHomepage, metadata.ProjectURL) + assert.Equal(t, conanURL, metadata.RepositoryURL) + assert.Equal(t, conanDescription, metadata.Description) + assert.Equal(t, []string{conanTopic}, metadata.Keywords) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 2) + + for _, pf := range pfs { + pb, err := packages.GetBlobByID(db.DefaultContext, pf.BlobID) + assert.NoError(t, err) + + if pf.Name == conanfileName { + assert.True(t, pf.IsLead) + + assert.Equal(t, int64(len(buildConanfileContent(name, version1))), pb.Size) + } else if pf.Name == conaninfoName { + assert.False(t, pf.IsLead) + + assert.Equal(t, int64(len(contentConaninfo)), pb.Size) + } else { + assert.Fail(t, "unknown file: %s", pf.Name) + } + } + }) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + recipeURL := fmt.Sprintf("%s/v1/conans/%s/%s/%s/%s", url, name, version1, user1, channel1) + + req := NewRequest(t, "GET", recipeURL) + resp := MakeRequest(t, req, http.StatusOK) + + fileHashes := make(map[string]string) + DecodeJSON(t, resp, &fileHashes) + assert.Len(t, fileHashes, 1) + assert.Contains(t, fileHashes, conanfileName) + assert.Equal(t, "7abc52241c22090782c54731371847a8", fileHashes[conanfileName]) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/digest", recipeURL)) + resp = MakeRequest(t, req, http.StatusOK) + + downloadURLs := make(map[string]string) + DecodeJSON(t, resp, &downloadURLs) + assert.Contains(t, downloadURLs, conanfileName) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/download_urls", recipeURL)) + resp = MakeRequest(t, req, http.StatusOK) + + DecodeJSON(t, resp, &downloadURLs) + assert.Contains(t, downloadURLs, conanfileName) + + req = NewRequest(t, "GET", downloadURLs[conanfileName]) + resp = MakeRequest(t, req, http.StatusOK) + assert.Equal(t, buildConanfileContent(name, version1), resp.Body.String()) + + packageURL := fmt.Sprintf("%s/packages/%s", recipeURL, conanPackageReference) + + req = NewRequest(t, "GET", packageURL) + resp = MakeRequest(t, req, http.StatusOK) + + fileHashes = make(map[string]string) + DecodeJSON(t, resp, &fileHashes) + assert.Len(t, fileHashes, 1) + assert.Contains(t, fileHashes, conaninfoName) + assert.Equal(t, "7628bfcc5b17f1470c468621a78df394", fileHashes[conaninfoName]) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/digest", packageURL)) + resp = MakeRequest(t, req, http.StatusOK) + + downloadURLs = make(map[string]string) + DecodeJSON(t, resp, &downloadURLs) + assert.Contains(t, downloadURLs, conaninfoName) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/download_urls", packageURL)) + resp = MakeRequest(t, req, http.StatusOK) + + DecodeJSON(t, resp, &downloadURLs) + assert.Contains(t, downloadURLs, conaninfoName) + + req = NewRequest(t, "GET", downloadURLs[conaninfoName]) + resp = MakeRequest(t, req, http.StatusOK) + assert.Equal(t, contentConaninfo, resp.Body.String()) + }) + + t.Run("Search", func(t *testing.T) { + uploadConanPackageV1(t, url, token, name, version2, user1, channel1) + uploadConanPackageV1(t, url, token, name, version1, user1, channel2) + uploadConanPackageV1(t, url, token, name, version1, user2, channel1) + uploadConanPackageV1(t, url, token, name, version1, user2, channel2) + + t.Run("Recipe", func(t *testing.T) { + defer PrintCurrentTest(t)() + + cases := []struct { + Query string + Expected []string + }{ + {"ConanPackage", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.2", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.1", []string{}}, + {"Conan*", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/*", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1*", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/*2", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1*2", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.2@", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.2@du*", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@dummy/final"}}, + {"ConanPackage/1.2@du*/", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@dummy/final"}}, + {"ConanPackage/1.2@du*/*test", []string{"ConanPackage/1.2@dummy/test"}}, + {"ConanPackage/1.2@du*/*st", []string{"ConanPackage/1.2@dummy/test"}}, + {"ConanPackage/1.2@gitea/*", []string{"ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"*/*@dummy", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@dummy/final"}}, + {"*/*@*/final", []string{"ConanPackage/1.2@dummy/final", "ConanPackage/1.2@gitea/final"}}, + } + + for i, c := range cases { + req := NewRequest(t, "GET", fmt.Sprintf("%s/v1/conans/search?q=%s", url, stdurl.QueryEscape(c.Query))) + resp := MakeRequest(t, req, http.StatusOK) + + var result *conan_router.SearchResult + DecodeJSON(t, resp, &result) + + assert.ElementsMatch(t, c.Expected, result.Results, "case %d: unexpected result", i) + } + }) + + t.Run("Package", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v1/conans/%s/%s/%s/%s/search", url, name, version1, user1, channel2)) + resp := MakeRequest(t, req, http.StatusOK) + + var result map[string]*conan_module.Conaninfo + DecodeJSON(t, resp, &result) + + assert.Contains(t, result, conanPackageReference) + info := result[conanPackageReference] + assert.NotEmpty(t, info.Settings) + }) + }) + + t.Run("Delete", func(t *testing.T) { + t.Run("Package", func(t *testing.T) { + defer PrintCurrentTest(t)() + + cases := []struct { + Channel string + References []string + }{ + {channel1, []string{conanPackageReference}}, + {channel2, []string{}}, + } + + for i, c := range cases { + rref, _ := conan_module.NewRecipeReference(name, version1, user1, c.Channel, conan_module.DefaultRevision) + references, err := conan_model.GetPackageReferences(db.DefaultContext, user.ID, rref) + assert.NoError(t, err) + assert.NotEmpty(t, references) + + req := NewRequestWithJSON(t, "POST", fmt.Sprintf("%s/v1/conans/%s/%s/%s/%s/packages/delete", url, name, version1, user1, c.Channel), map[string][]string{ + "package_ids": c.References, + }) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + + references, err = conan_model.GetPackageReferences(db.DefaultContext, user.ID, rref) + assert.NoError(t, err) + assert.Empty(t, references, "case %d: should be empty", i) + } + }) + + t.Run("Recipe", func(t *testing.T) { + defer PrintCurrentTest(t)() + + cases := []struct { + Channel string + }{ + {channel1}, + {channel2}, + } + + for i, c := range cases { + rref, _ := conan_module.NewRecipeReference(name, version1, user1, c.Channel, conan_module.DefaultRevision) + revisions, err := conan_model.GetRecipeRevisions(db.DefaultContext, user.ID, rref) + assert.NoError(t, err) + assert.NotEmpty(t, revisions) + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/v1/conans/%s/%s/%s/%s", url, name, version1, user1, c.Channel)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + + revisions, err = conan_model.GetRecipeRevisions(db.DefaultContext, user.ID, rref) + assert.NoError(t, err) + assert.Empty(t, revisions, "case %d: should be empty", i) + } + }) + }) + }) + + t.Run("v2", func(t *testing.T) { + t.Run("Ping", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v2/ping", url)) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, "revisions", resp.Header().Get("X-Conan-Server-Capabilities")) + }) + + token := "" + + t.Run("Authenticate", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v2/users/authenticate", url)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + body := resp.Body.String() + assert.NotEmpty(t, body) + + token = fmt.Sprintf("Bearer %s", body) + }) + + t.Run("CheckCredentials", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v2/users/check_credentials", url)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + }) + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadConanPackageV2(t, url, token, name, version1, user1, channel1, revision1, revision1) + + t.Run("Validate", func(t *testing.T) { + defer PrintCurrentTest(t)() + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeConan) + assert.NoError(t, err) + assert.Len(t, pvs, 2) + }) + }) + + t.Run("Latest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + recipeURL := fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s", url, name, version1, user1, channel1) + + req := NewRequest(t, "GET", fmt.Sprintf("%s/latest", recipeURL)) + resp := MakeRequest(t, req, http.StatusOK) + + obj := make(map[string]string) + DecodeJSON(t, resp, &obj) + assert.Contains(t, obj, "revision") + assert.Equal(t, revision1, obj["revision"]) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/revisions/%s/packages/%s/latest", recipeURL, revision1, conanPackageReference)) + resp = MakeRequest(t, req, http.StatusOK) + + obj = make(map[string]string) + DecodeJSON(t, resp, &obj) + assert.Contains(t, obj, "revision") + assert.Equal(t, revision1, obj["revision"]) + }) + + t.Run("ListRevisions", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadConanPackageV2(t, url, token, name, version1, user1, channel1, revision1, revision2) + uploadConanPackageV2(t, url, token, name, version1, user1, channel1, revision2, revision1) + uploadConanPackageV2(t, url, token, name, version1, user1, channel1, revision2, revision2) + + recipeURL := fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/revisions", url, name, version1, user1, channel1) + + req := NewRequest(t, "GET", recipeURL) + resp := MakeRequest(t, req, http.StatusOK) + + type RevisionInfo struct { + Revision string `json:"revision"` + Time time.Time `json:"time"` + } + + type RevisionList struct { + Revisions []*RevisionInfo `json:"revisions"` + } + + var list *RevisionList + DecodeJSON(t, resp, &list) + assert.Len(t, list.Revisions, 2) + revs := make([]string, 0, len(list.Revisions)) + for _, rev := range list.Revisions { + revs = append(revs, rev.Revision) + } + assert.ElementsMatch(t, []string{revision1, revision2}, revs) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/%s/packages/%s/revisions", recipeURL, revision1, conanPackageReference)) + resp = MakeRequest(t, req, http.StatusOK) + + DecodeJSON(t, resp, &list) + assert.Len(t, list.Revisions, 2) + revs = make([]string, 0, len(list.Revisions)) + for _, rev := range list.Revisions { + revs = append(revs, rev.Revision) + } + assert.ElementsMatch(t, []string{revision1, revision2}, revs) + }) + + t.Run("Search", func(t *testing.T) { + t.Run("Recipe", func(t *testing.T) { + defer PrintCurrentTest(t)() + + cases := []struct { + Query string + Expected []string + }{ + {"ConanPackage", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.2", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.1", []string{}}, + {"Conan*", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/*", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1*", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/*2", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1*2", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.2@", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"ConanPackage/1.2@du*", []string{"ConanPackage/1.2@dummy/test"}}, + {"ConanPackage/1.2@du*/", []string{"ConanPackage/1.2@dummy/test"}}, + {"ConanPackage/1.2@du*/*test", []string{"ConanPackage/1.2@dummy/test"}}, + {"ConanPackage/1.2@du*/*st", []string{"ConanPackage/1.2@dummy/test"}}, + {"ConanPackage/1.2@gitea/*", []string{"ConanPackage/1.2@gitea/test", "ConanPackage/1.2@gitea/final"}}, + {"*/*@dummy", []string{"ConanPackage/1.2@dummy/test", "ConanPackage/1.3@dummy/test"}}, + {"*/*@*/final", []string{"ConanPackage/1.2@gitea/final"}}, + } + + for i, c := range cases { + req := NewRequest(t, "GET", fmt.Sprintf("%s/v2/conans/search?q=%s", url, stdurl.QueryEscape(c.Query))) + resp := MakeRequest(t, req, http.StatusOK) + + var result *conan_router.SearchResult + DecodeJSON(t, resp, &result) + + assert.ElementsMatch(t, c.Expected, result.Results, "case %d: unexpected result", i) + } + }) + + t.Run("Package", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/search", url, name, version1, user1, channel1)) + resp := MakeRequest(t, req, http.StatusOK) + + var result map[string]*conan_module.Conaninfo + DecodeJSON(t, resp, &result) + + assert.Contains(t, result, conanPackageReference) + info := result[conanPackageReference] + assert.NotEmpty(t, info.Settings) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/revisions/%s/search", url, name, version1, user1, channel1, revision1)) + resp = MakeRequest(t, req, http.StatusOK) + + result = make(map[string]*conan_module.Conaninfo) + DecodeJSON(t, resp, &result) + + assert.Contains(t, result, conanPackageReference) + info = result[conanPackageReference] + assert.NotEmpty(t, info.Settings) + }) + }) + + t.Run("Delete", func(t *testing.T) { + t.Run("Package", func(t *testing.T) { + defer PrintCurrentTest(t)() + + rref, _ := conan_module.NewRecipeReference(name, version1, user1, channel1, revision1) + pref, _ := conan_module.NewPackageReference(rref, conanPackageReference, conan_module.DefaultRevision) + + checkPackageRevisionCount := func(count int) { + revisions, err := conan_model.GetPackageRevisions(db.DefaultContext, user.ID, pref) + assert.NoError(t, err) + assert.Len(t, revisions, count) + } + checkPackageReferenceCount := func(count int) { + references, err := conan_model.GetPackageReferences(db.DefaultContext, user.ID, rref) + assert.NoError(t, err) + assert.Len(t, references, count) + } + + checkPackageRevisionCount(2) + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/revisions/%s/packages/%s/revisions/%s", url, name, version1, user1, channel1, revision1, conanPackageReference, revision1)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + + checkPackageRevisionCount(1) + + req = NewRequest(t, "DELETE", fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/revisions/%s/packages/%s", url, name, version1, user1, channel1, revision1, conanPackageReference)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + + checkPackageRevisionCount(0) + + rref = rref.WithRevision(revision2) + + checkPackageReferenceCount(1) + + req = NewRequest(t, "DELETE", fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/revisions/%s/packages", url, name, version1, user1, channel1, revision2)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + + checkPackageReferenceCount(0) + }) + + t.Run("Recipe", func(t *testing.T) { + defer PrintCurrentTest(t)() + + rref, _ := conan_module.NewRecipeReference(name, version1, user1, channel1, conan_module.DefaultRevision) + + checkRecipeRevisionCount := func(count int) { + revisions, err := conan_model.GetRecipeRevisions(db.DefaultContext, user.ID, rref) + assert.NoError(t, err) + assert.Len(t, revisions, count) + } + + checkRecipeRevisionCount(2) + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s/revisions/%s", url, name, version1, user1, channel1, revision1)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + + checkRecipeRevisionCount(1) + + req = NewRequest(t, "DELETE", fmt.Sprintf("%s/v2/conans/%s/%s/%s/%s", url, name, version1, user1, channel1)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusOK) + + checkRecipeRevisionCount(0) + }) + }) + }) +} diff --git a/integrations/api_packages_container_test.go b/integrations/api_packages_container_test.go new file mode 100644 index 0000000000..a8f49423e2 --- /dev/null +++ b/integrations/api_packages_container_test.go @@ -0,0 +1,534 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "bytes" + "encoding/base64" + "fmt" + "net/http" + "strings" + "testing" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + container_model "code.gitea.io/gitea/models/packages/container" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + container_module "code.gitea.io/gitea/modules/packages/container" + "code.gitea.io/gitea/modules/packages/container/oci" + "code.gitea.io/gitea/modules/setting" + + "github.com/stretchr/testify/assert" +) + +func TestPackageContainer(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + has := func(l packages_model.PackagePropertyList, name string) bool { + for _, pp := range l { + if pp.Name == name { + return true + } + } + return false + } + + images := []string{"test", "te/st"} + tags := []string{"latest", "main"} + multiTag := "multi" + + unknownDigest := "sha256:0000000000000000000000000000000000000000000000000000000000000000" + + blobDigest := "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4" + blobContent, _ := base64.StdEncoding.DecodeString(`H4sIAAAJbogA/2IYBaNgFIxYAAgAAP//Lq+17wAEAAA=`) + + configDigest := "sha256:4607e093bec406eaadb6f3a340f63400c9d3a7038680744c406903766b938f0d" + configContent := `{"architecture":"amd64","config":{"Env":["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"],"Cmd":["/true"],"ArgsEscaped":true,"Image":"sha256:9bd8b88dc68b80cffe126cc820e4b52c6e558eb3b37680bfee8e5f3ed7b8c257"},"container":"b89fe92a887d55c0961f02bdfbfd8ac3ddf66167db374770d2d9e9fab3311510","container_config":{"Hostname":"b89fe92a887d","Env":["PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"],"Cmd":["/bin/sh","-c","#(nop) ","CMD [\"/true\"]"],"ArgsEscaped":true,"Image":"sha256:9bd8b88dc68b80cffe126cc820e4b52c6e558eb3b37680bfee8e5f3ed7b8c257"},"created":"2022-01-01T00:00:00.000000000Z","docker_version":"20.10.12","history":[{"created":"2022-01-01T00:00:00.000000000Z","created_by":"/bin/sh -c #(nop) COPY file:0e7589b0c800daaf6fa460d2677101e4676dd9491980210cb345480e513f3602 in /true "},{"created":"2022-01-01T00:00:00.000000001Z","created_by":"/bin/sh -c #(nop) CMD [\"/true\"]","empty_layer":true}],"os":"linux","rootfs":{"type":"layers","diff_ids":["sha256:0ff3b91bdf21ecdf2f2f3d4372c2098a14dbe06cd678e8f0a85fd4902d00e2e2"]}}` + + manifestDigest := "sha256:4f10484d1c1bb13e3956b4de1cd42db8e0f14a75be1617b60f2de3cd59c803c6" + manifestContent := `{"schemaVersion":2,"mediaType":"` + oci.MediaTypeDockerManifest + `","config":{"mediaType":"application/vnd.docker.container.image.v1+json","digest":"sha256:4607e093bec406eaadb6f3a340f63400c9d3a7038680744c406903766b938f0d","size":1069},"layers":[{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4","size":32}]}` + + untaggedManifestDigest := "sha256:4305f5f5572b9a426b88909b036e52ee3cf3d7b9c1b01fac840e90747f56623d" + untaggedManifestContent := `{"schemaVersion":2,"mediaType":"` + oci.MediaTypeImageManifest + `","config":{"mediaType":"application/vnd.docker.container.image.v1+json","digest":"sha256:4607e093bec406eaadb6f3a340f63400c9d3a7038680744c406903766b938f0d","size":1069},"layers":[{"mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip","digest":"sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4","size":32}]}` + + indexManifestDigest := "sha256:bab112d6efb9e7f221995caaaa880352feb5bd8b1faf52fae8d12c113aa123ec" + indexManifestContent := `{"schemaVersion":2,"mediaType":"` + oci.MediaTypeImageIndex + `","manifests":[{"mediaType":"` + oci.MediaTypeDockerManifest + `","digest":"` + manifestDigest + `","platform":{"os":"linux","architecture":"arm","variant":"v7"}},{"mediaType":"` + oci.MediaTypeImageManifest + `","digest":"` + untaggedManifestDigest + `","platform":{"os":"linux","architecture":"arm64","variant":"v8"}}]}` + + anonymousToken := "" + userToken := "" + + t.Run("Authenticate", func(t *testing.T) { + type TokenResponse struct { + Token string `json:"token"` + } + + authenticate := []string{ + `Bearer realm="` + setting.AppURL + `v2/token"`, + `Basic`, + } + + t.Run("Anonymous", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%sv2", setting.AppURL)) + resp := MakeRequest(t, req, http.StatusUnauthorized) + + assert.ElementsMatch(t, authenticate, resp.Header().Values("WWW-Authenticate")) + + req = NewRequest(t, "GET", fmt.Sprintf("%sv2/token", setting.AppURL)) + resp = MakeRequest(t, req, http.StatusOK) + + tokenResponse := &TokenResponse{} + DecodeJSON(t, resp, &tokenResponse) + + assert.NotEmpty(t, tokenResponse.Token) + + anonymousToken = fmt.Sprintf("Bearer %s", tokenResponse.Token) + + req = NewRequest(t, "GET", fmt.Sprintf("%sv2", setting.AppURL)) + addTokenAuthHeader(req, anonymousToken) + resp = MakeRequest(t, req, http.StatusOK) + }) + + t.Run("User", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%sv2", setting.AppURL)) + resp := MakeRequest(t, req, http.StatusUnauthorized) + + assert.ElementsMatch(t, authenticate, resp.Header().Values("WWW-Authenticate")) + + req = NewRequest(t, "GET", fmt.Sprintf("%sv2/token", setting.AppURL)) + req = AddBasicAuthHeader(req, user.Name) + resp = MakeRequest(t, req, http.StatusOK) + + tokenResponse := &TokenResponse{} + DecodeJSON(t, resp, &tokenResponse) + + assert.NotEmpty(t, tokenResponse.Token) + + userToken = fmt.Sprintf("Bearer %s", tokenResponse.Token) + + req = NewRequest(t, "GET", fmt.Sprintf("%sv2", setting.AppURL)) + addTokenAuthHeader(req, userToken) + resp = MakeRequest(t, req, http.StatusOK) + }) + }) + + t.Run("DetermineSupport", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%sv2", setting.AppURL)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusOK) + assert.Equal(t, "registry/2.0", resp.Header().Get("Docker-Distribution-Api-Version")) + }) + + for _, image := range images { + t.Run(fmt.Sprintf("[Image:%s]", image), func(t *testing.T) { + url := fmt.Sprintf("%sv2/%s/%s", setting.AppURL, user.Name, image) + + t.Run("UploadBlob/Monolithic", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads", url)) + addTokenAuthHeader(req, anonymousToken) + MakeRequest(t, req, http.StatusUnauthorized) + + req = NewRequestWithBody(t, "POST", fmt.Sprintf("%s/blobs/uploads?digest=%s", url, unknownDigest), bytes.NewReader(blobContent)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusBadRequest) + + req = NewRequestWithBody(t, "POST", fmt.Sprintf("%s/blobs/uploads?digest=%s", url, blobDigest), bytes.NewReader(blobContent)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusCreated) + + assert.Equal(t, fmt.Sprintf("/v2/%s/%s/blobs/%s", user.Name, image, blobDigest), resp.Header().Get("Location")) + assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest")) + + pv, err := packages_model.GetInternalVersionByNameAndVersion(db.DefaultContext, user.ID, packages_model.TypeContainer, image, container_model.UploadVersion) + assert.NoError(t, err) + + pfs, err := packages_model.GetFilesByVersionID(db.DefaultContext, pv.ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + + pb, err := packages_model.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.EqualValues(t, len(blobContent), pb.Size) + }) + + t.Run("UploadBlob/Chunked", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads", url)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusAccepted) + + uuid := resp.Header().Get("Docker-Upload-Uuid") + assert.NotEmpty(t, uuid) + + pbu, err := packages_model.GetBlobUploadByID(db.DefaultContext, uuid) + assert.NoError(t, err) + assert.EqualValues(t, 0, pbu.BytesReceived) + + uploadURL := resp.Header().Get("Location") + assert.NotEmpty(t, uploadURL) + + req = NewRequestWithBody(t, "PATCH", setting.AppURL+uploadURL[1:]+"000", bytes.NewReader(blobContent)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequestWithBody(t, "PATCH", setting.AppURL+uploadURL[1:], bytes.NewReader(blobContent)) + addTokenAuthHeader(req, userToken) + + req.Header.Set("Content-Range", "1-10") + MakeRequest(t, req, http.StatusRequestedRangeNotSatisfiable) + + contentRange := fmt.Sprintf("0-%d", len(blobContent)-1) + req.Header.Set("Content-Range", contentRange) + resp = MakeRequest(t, req, http.StatusAccepted) + + assert.Equal(t, uuid, resp.Header().Get("Docker-Upload-Uuid")) + assert.Equal(t, contentRange, resp.Header().Get("Range")) + + pbu, err = packages_model.GetBlobUploadByID(db.DefaultContext, uuid) + assert.NoError(t, err) + assert.EqualValues(t, len(blobContent), pbu.BytesReceived) + + uploadURL = resp.Header().Get("Location") + + req = NewRequest(t, "PUT", fmt.Sprintf("%s?digest=%s", setting.AppURL+uploadURL[1:], blobDigest)) + addTokenAuthHeader(req, userToken) + resp = MakeRequest(t, req, http.StatusCreated) + + assert.Equal(t, fmt.Sprintf("/v2/%s/%s/blobs/%s", user.Name, image, blobDigest), resp.Header().Get("Location")) + assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest")) + }) + + for _, tag := range tags { + t.Run(fmt.Sprintf("[Tag:%s]", tag), func(t *testing.T) { + t.Run("UploadManifest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "POST", fmt.Sprintf("%s/blobs/uploads?digest=%s", url, configDigest), strings.NewReader(configContent)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusCreated) + + req = NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/manifests/%s", url, tag), strings.NewReader(manifestContent)) + addTokenAuthHeader(req, anonymousToken) + req.Header.Set("Content-Type", oci.MediaTypeDockerManifest) + MakeRequest(t, req, http.StatusUnauthorized) + + req = NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/manifests/%s", url, tag), strings.NewReader(manifestContent)) + addTokenAuthHeader(req, userToken) + req.Header.Set("Content-Type", oci.MediaTypeDockerManifest) + resp := MakeRequest(t, req, http.StatusCreated) + + assert.Equal(t, manifestDigest, resp.Header().Get("Docker-Content-Digest")) + + pv, err := packages_model.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages_model.TypeContainer, image, tag) + assert.NoError(t, err) + + pd, err := packages_model.GetPackageDescriptor(db.DefaultContext, pv) + assert.NoError(t, err) + assert.Nil(t, pd.SemVer) + assert.Equal(t, image, pd.Package.Name) + assert.Equal(t, tag, pd.Version.Version) + assert.True(t, has(pd.Properties, container_module.PropertyManifestTagged)) + + assert.IsType(t, &container_module.Metadata{}, pd.Metadata) + metadata := pd.Metadata.(*container_module.Metadata) + assert.Equal(t, container_module.TypeOCI, metadata.Type) + assert.Len(t, metadata.ImageLayers, 2) + assert.Empty(t, metadata.MultiArch) + + assert.Len(t, pd.Files, 3) + for _, pfd := range pd.Files { + switch pfd.File.Name { + case container_model.ManifestFilename: + assert.True(t, pfd.File.IsLead) + assert.Equal(t, oci.MediaTypeDockerManifest, pfd.Properties.GetByName(container_module.PropertyMediaType)) + assert.Equal(t, manifestDigest, pfd.Properties.GetByName(container_module.PropertyDigest)) + case strings.Replace(configDigest, ":", "_", 1): + assert.False(t, pfd.File.IsLead) + assert.Equal(t, "application/vnd.docker.container.image.v1+json", pfd.Properties.GetByName(container_module.PropertyMediaType)) + assert.Equal(t, configDigest, pfd.Properties.GetByName(container_module.PropertyDigest)) + case strings.Replace(blobDigest, ":", "_", 1): + assert.False(t, pfd.File.IsLead) + assert.Equal(t, "application/vnd.docker.image.rootfs.diff.tar.gzip", pfd.Properties.GetByName(container_module.PropertyMediaType)) + assert.Equal(t, blobDigest, pfd.Properties.GetByName(container_module.PropertyDigest)) + default: + assert.Fail(t, "unknown file: %s", pfd.File.Name) + } + } + + // Overwrite existing tag + req = NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/manifests/%s", url, tag), strings.NewReader(manifestContent)) + addTokenAuthHeader(req, userToken) + req.Header.Set("Content-Type", oci.MediaTypeDockerManifest) + MakeRequest(t, req, http.StatusCreated) + }) + + t.Run("HeadManifest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "HEAD", fmt.Sprintf("%s/manifests/unknown-tag", url)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "HEAD", fmt.Sprintf("%s/manifests/%s", url, tag)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, fmt.Sprintf("%d", len(manifestContent)), resp.Header().Get("Content-Length")) + assert.Equal(t, manifestDigest, resp.Header().Get("Docker-Content-Digest")) + }) + + t.Run("GetManifest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/manifests/unknown-tag", url)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/manifests/%s", url, tag)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, fmt.Sprintf("%d", len(manifestContent)), resp.Header().Get("Content-Length")) + assert.Equal(t, oci.MediaTypeDockerManifest, resp.Header().Get("Content-Type")) + assert.Equal(t, manifestDigest, resp.Header().Get("Docker-Content-Digest")) + assert.Equal(t, manifestContent, resp.Body.String()) + }) + }) + } + + t.Run("UploadUntaggedManifest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/manifests/%s", url, untaggedManifestDigest), strings.NewReader(untaggedManifestContent)) + addTokenAuthHeader(req, userToken) + req.Header.Set("Content-Type", oci.MediaTypeImageManifest) + resp := MakeRequest(t, req, http.StatusCreated) + + assert.Equal(t, untaggedManifestDigest, resp.Header().Get("Docker-Content-Digest")) + + req = NewRequest(t, "HEAD", fmt.Sprintf("%s/manifests/%s", url, untaggedManifestDigest)) + addTokenAuthHeader(req, userToken) + resp = MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, fmt.Sprintf("%d", len(untaggedManifestContent)), resp.Header().Get("Content-Length")) + assert.Equal(t, untaggedManifestDigest, resp.Header().Get("Docker-Content-Digest")) + + pv, err := packages_model.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages_model.TypeContainer, image, untaggedManifestDigest) + assert.NoError(t, err) + + pd, err := packages_model.GetPackageDescriptor(db.DefaultContext, pv) + assert.NoError(t, err) + assert.Nil(t, pd.SemVer) + assert.Equal(t, image, pd.Package.Name) + assert.Equal(t, untaggedManifestDigest, pd.Version.Version) + assert.False(t, has(pd.Properties, container_module.PropertyManifestTagged)) + + assert.IsType(t, &container_module.Metadata{}, pd.Metadata) + + assert.Len(t, pd.Files, 3) + for _, pfd := range pd.Files { + if pfd.File.Name == container_model.ManifestFilename { + assert.True(t, pfd.File.IsLead) + assert.Equal(t, oci.MediaTypeImageManifest, pfd.Properties.GetByName(container_module.PropertyMediaType)) + assert.Equal(t, untaggedManifestDigest, pfd.Properties.GetByName(container_module.PropertyDigest)) + } + } + }) + + t.Run("UploadIndexManifest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/manifests/%s", url, multiTag), strings.NewReader(indexManifestContent)) + addTokenAuthHeader(req, userToken) + req.Header.Set("Content-Type", oci.MediaTypeImageIndex) + resp := MakeRequest(t, req, http.StatusCreated) + + assert.Equal(t, indexManifestDigest, resp.Header().Get("Docker-Content-Digest")) + + pv, err := packages_model.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages_model.TypeContainer, image, multiTag) + assert.NoError(t, err) + + pd, err := packages_model.GetPackageDescriptor(db.DefaultContext, pv) + assert.NoError(t, err) + assert.Nil(t, pd.SemVer) + assert.Equal(t, image, pd.Package.Name) + assert.Equal(t, multiTag, pd.Version.Version) + assert.True(t, has(pd.Properties, container_module.PropertyManifestTagged)) + + getAllByName := func(l packages_model.PackagePropertyList, name string) []string { + values := make([]string, 0, len(l)) + for _, pp := range l { + if pp.Name == name { + values = append(values, pp.Value) + } + } + return values + } + assert.ElementsMatch(t, []string{manifestDigest, untaggedManifestDigest}, getAllByName(pd.Properties, container_module.PropertyManifestReference)) + + assert.IsType(t, &container_module.Metadata{}, pd.Metadata) + metadata := pd.Metadata.(*container_module.Metadata) + assert.Equal(t, container_module.TypeOCI, metadata.Type) + assert.Contains(t, metadata.MultiArch, "linux/arm/v7") + assert.Equal(t, manifestDigest, metadata.MultiArch["linux/arm/v7"]) + assert.Contains(t, metadata.MultiArch, "linux/arm64/v8") + assert.Equal(t, untaggedManifestDigest, metadata.MultiArch["linux/arm64/v8"]) + + assert.Len(t, pd.Files, 1) + assert.True(t, pd.Files[0].File.IsLead) + assert.Equal(t, oci.MediaTypeImageIndex, pd.Files[0].Properties.GetByName(container_module.PropertyMediaType)) + assert.Equal(t, indexManifestDigest, pd.Files[0].Properties.GetByName(container_module.PropertyDigest)) + }) + + t.Run("UploadBlob/Mount", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s", url, unknownDigest)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusAccepted) + + req = NewRequest(t, "POST", fmt.Sprintf("%s/blobs/uploads?mount=%s", url, blobDigest)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusCreated) + + assert.Equal(t, fmt.Sprintf("/v2/%s/%s/blobs/%s", user.Name, image, blobDigest), resp.Header().Get("Location")) + assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest")) + }) + + t.Run("HeadBlob", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "HEAD", fmt.Sprintf("%s/blobs/%s", url, unknownDigest)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "HEAD", fmt.Sprintf("%s/blobs/%s", url, blobDigest)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, fmt.Sprintf("%d", len(blobContent)), resp.Header().Get("Content-Length")) + assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest")) + }) + + t.Run("GetBlob", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/blobs/%s", url, unknownDigest)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/blobs/%s", url, blobDigest)) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, fmt.Sprintf("%d", len(blobContent)), resp.Header().Get("Content-Length")) + assert.Equal(t, blobDigest, resp.Header().Get("Docker-Content-Digest")) + assert.Equal(t, blobContent, resp.Body.Bytes()) + }) + + t.Run("GetTagList", func(t *testing.T) { + defer PrintCurrentTest(t)() + + cases := []struct { + URL string + ExpectedTags []string + ExpectedLink string + }{ + { + URL: fmt.Sprintf("%s/tags/list", url), + ExpectedTags: []string{"latest", "main", "multi"}, + ExpectedLink: fmt.Sprintf(`; rel="next"`, user.Name, image), + }, + { + URL: fmt.Sprintf("%s/tags/list?n=0", url), + ExpectedTags: []string{}, + ExpectedLink: "", + }, + { + URL: fmt.Sprintf("%s/tags/list?n=2", url), + ExpectedTags: []string{"latest", "main"}, + ExpectedLink: fmt.Sprintf(`; rel="next"`, user.Name, image), + }, + { + URL: fmt.Sprintf("%s/tags/list?last=main", url), + ExpectedTags: []string{"multi"}, + ExpectedLink: fmt.Sprintf(`; rel="next"`, user.Name, image), + }, + { + URL: fmt.Sprintf("%s/tags/list?n=1&last=latest", url), + ExpectedTags: []string{"main"}, + ExpectedLink: fmt.Sprintf(`; rel="next"`, user.Name, image), + }, + } + + for _, c := range cases { + req := NewRequest(t, "GET", c.URL) + addTokenAuthHeader(req, userToken) + resp := MakeRequest(t, req, http.StatusOK) + + type TagList struct { + Name string `json:"name"` + Tags []string `json:"tags"` + } + + tagList := &TagList{} + DecodeJSON(t, resp, &tagList) + + assert.Equal(t, user.Name+"/"+image, tagList.Name) + assert.Equal(t, c.ExpectedTags, tagList.Tags) + assert.Equal(t, c.ExpectedLink, resp.Header().Get("Link")) + } + }) + + t.Run("Delete", func(t *testing.T) { + t.Run("Blob", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/blobs/%s", url, blobDigest)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusAccepted) + + req = NewRequest(t, "HEAD", fmt.Sprintf("%s/blobs/%s", url, blobDigest)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + }) + + t.Run("ManifestByDigest", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/manifests/%s", url, untaggedManifestDigest)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusAccepted) + + req = NewRequest(t, "HEAD", fmt.Sprintf("%s/manifests/%s", url, untaggedManifestDigest)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + }) + + t.Run("ManifestByTag", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/manifests/%s", url, multiTag)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusAccepted) + + req = NewRequest(t, "HEAD", fmt.Sprintf("%s/manifests/%s", url, multiTag)) + addTokenAuthHeader(req, userToken) + MakeRequest(t, req, http.StatusNotFound) + }) + }) + }) + } +} diff --git a/integrations/api_packages_generic_test.go b/integrations/api_packages_generic_test.go new file mode 100644 index 0000000000..c507702eaa --- /dev/null +++ b/integrations/api_packages_generic_test.go @@ -0,0 +1,109 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "bytes" + "fmt" + "net/http" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + + "github.com/stretchr/testify/assert" +) + +func TestPackageGeneric(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + packageName := "te-st_pac.kage" + packageVersion := "1.0.3" + filename := "fi-le_na.me" + content := []byte{1, 2, 3} + + url := fmt.Sprintf("/api/packages/%s/generic/%s/%s/%s", user.Name, packageName, packageVersion, filename) + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", url, bytes.NewReader(content)) + AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeGeneric) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.Nil(t, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, filename, pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(len(content)), pb.Size) + }) + + t.Run("UploadExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", url, bytes.NewReader(content)) + AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusBadRequest) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", url) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, content, resp.Body.Bytes()) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeGeneric) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(1), pvs[0].DownloadCount) + }) + + t.Run("Delete", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", url) + AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusOK) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeGeneric) + assert.NoError(t, err) + assert.Empty(t, pvs) + }) + + t.Run("DownloadNotExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", url) + MakeRequest(t, req, http.StatusNotFound) + }) + + t.Run("DeleteNotExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", url) + AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusNotFound) + }) +} diff --git a/integrations/api_packages_helm_test.go b/integrations/api_packages_helm_test.go new file mode 100644 index 0000000000..fcf5d2f762 --- /dev/null +++ b/integrations/api_packages_helm_test.go @@ -0,0 +1,166 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "fmt" + "net/http" + "testing" + "time" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + helm_module "code.gitea.io/gitea/modules/packages/helm" + "code.gitea.io/gitea/modules/setting" + + "github.com/stretchr/testify/assert" + "gopkg.in/yaml.v2" +) + +func TestPackageHelm(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + packageName := "test-chart" + packageVersion := "1.0.3" + packageAuthor := "KN4CK3R" + packageDescription := "Gitea Test Package" + + filename := fmt.Sprintf("%s-%s.tgz", packageName, packageVersion) + + chartContent := `apiVersion: v2 +description: ` + packageDescription + ` +name: ` + packageName + ` +type: application +version: ` + packageVersion + ` +maintainers: +- name: ` + packageAuthor + ` +dependencies: +- name: dep1 + repository: https://example.com/ + version: 1.0.0` + + var buf bytes.Buffer + zw := gzip.NewWriter(&buf) + archive := tar.NewWriter(zw) + archive.WriteHeader(&tar.Header{ + Name: fmt.Sprintf("%s/Chart.yaml", packageName), + Mode: 0o600, + Size: int64(len(chartContent)), + }) + archive.Write([]byte(chartContent)) + archive.Close() + zw.Close() + content := buf.Bytes() + + url := fmt.Sprintf("/api/packages/%s/helm", user.Name) + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadURL := url + "/api/charts" + + req := NewRequestWithBody(t, "POST", uploadURL, bytes.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeHelm) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &helm_module.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, filename, pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(len(content)), pb.Size) + + req = NewRequestWithBody(t, "POST", uploadURL, bytes.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusCreated) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + checkDownloadCount := func(count int64) { + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeHelm) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, count, pvs[0].DownloadCount) + } + + checkDownloadCount(0) + + req := NewRequest(t, "GET", fmt.Sprintf("%s/%s", url, filename)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, content, resp.Body.Bytes()) + + checkDownloadCount(1) + }) + + t.Run("Index", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/index.yaml", url)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + type ChartVersion struct { + helm_module.Metadata `yaml:",inline"` + URLs []string `yaml:"urls"` + Created time.Time `yaml:"created,omitempty"` + Removed bool `yaml:"removed,omitempty"` + Digest string `yaml:"digest,omitempty"` + } + + type ServerInfo struct { + ContextPath string `yaml:"contextPath,omitempty"` + } + + type Index struct { + APIVersion string `yaml:"apiVersion"` + Entries map[string][]*ChartVersion `yaml:"entries"` + Generated time.Time `yaml:"generated,omitempty"` + ServerInfo *ServerInfo `yaml:"serverInfo,omitempty"` + } + + var result Index + assert.NoError(t, yaml.NewDecoder(resp.Body).Decode(&result)) + assert.NotEmpty(t, result.Entries) + assert.Contains(t, result.Entries, packageName) + + cvs := result.Entries[packageName] + assert.Len(t, cvs, 1) + + cv := cvs[0] + assert.Equal(t, packageName, cv.Name) + assert.Equal(t, packageVersion, cv.Version) + assert.Equal(t, packageDescription, cv.Description) + assert.Len(t, cv.Maintainers, 1) + assert.Equal(t, packageAuthor, cv.Maintainers[0].Name) + assert.Len(t, cv.Dependencies, 1) + assert.ElementsMatch(t, []string{fmt.Sprintf("%s%s/%s", setting.AppURL, url[1:], filename)}, cv.URLs) + + assert.Equal(t, url, result.ServerInfo.ContextPath) + }) +} diff --git a/integrations/api_packages_maven_test.go b/integrations/api_packages_maven_test.go new file mode 100644 index 0000000000..c7c4542685 --- /dev/null +++ b/integrations/api_packages_maven_test.go @@ -0,0 +1,205 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "fmt" + "net/http" + "strings" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/packages/maven" + + "github.com/stretchr/testify/assert" +) + +func TestPackageMaven(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + groupID := "com.gitea" + artifactID := "test-project" + packageName := groupID + "-" + artifactID + packageVersion := "1.0.1" + packageDescription := "Test Description" + + root := fmt.Sprintf("/api/packages/%s/maven/%s/%s", user.Name, strings.ReplaceAll(groupID, ".", "/"), artifactID) + filename := fmt.Sprintf("%s-%s.jar", packageName, packageVersion) + + putFile := func(t *testing.T, path, content string, expectedStatus int) { + req := NewRequestWithBody(t, "PUT", root+path, strings.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, expectedStatus) + } + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + putFile(t, fmt.Sprintf("/%s/%s", packageVersion, filename), "test", http.StatusCreated) + putFile(t, "/maven-metadata.xml", "test", http.StatusOK) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeMaven) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.Nil(t, pd.SemVer) + assert.Nil(t, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, filename, pfs[0].Name) + assert.False(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(4), pb.Size) + }) + + t.Run("UploadExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + putFile(t, fmt.Sprintf("/%s/%s", packageVersion, filename), "test", http.StatusBadRequest) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s", root, packageVersion, filename)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, []byte("test"), resp.Body.Bytes()) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeMaven) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(0), pvs[0].DownloadCount) + }) + + t.Run("UploadVerifySHA1", func(t *testing.T) { + defer PrintCurrentTest(t)() + + t.Run("Missmatch", func(t *testing.T) { + defer PrintCurrentTest(t)() + + putFile(t, fmt.Sprintf("/%s/%s.sha1", packageVersion, filename), "test", http.StatusBadRequest) + }) + t.Run("Valid", func(t *testing.T) { + defer PrintCurrentTest(t)() + + putFile(t, fmt.Sprintf("/%s/%s.sha1", packageVersion, filename), "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", http.StatusOK) + }) + }) + + pomContent := ` + + ` + groupID + ` + ` + artifactID + ` + ` + packageVersion + ` + ` + packageDescription + ` +` + + t.Run("UploadPOM", func(t *testing.T) { + defer PrintCurrentTest(t)() + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeMaven) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.Nil(t, pd.Metadata) + + putFile(t, fmt.Sprintf("/%s/%s.pom", packageVersion, filename), pomContent, http.StatusCreated) + + pvs, err = packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeMaven) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err = packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.IsType(t, &maven.Metadata{}, pd.Metadata) + assert.Equal(t, packageDescription, pd.Metadata.(*maven.Metadata).Description) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 2) + i := 0 + if strings.HasSuffix(pfs[1].Name, ".pom") { + i = 1 + } + assert.Equal(t, filename+".pom", pfs[i].Name) + assert.True(t, pfs[i].IsLead) + }) + + t.Run("DownloadPOM", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s.pom", root, packageVersion, filename)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, []byte(pomContent), resp.Body.Bytes()) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeMaven) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(1), pvs[0].DownloadCount) + }) + + t.Run("DownloadChecksums", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/1.2.3/%s", root, filename)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusNotFound) + + for key, checksum := range map[string]string{ + "md5": "098f6bcd4621d373cade4e832627b4f6", + "sha1": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", + "sha256": "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08", + "sha512": "ee26b0dd4af7e749aa1a8ee3c10ae9923f618980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5fa9ad8e6f57f50028a8ff", + } { + req := NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s.%s", root, packageVersion, filename, key)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, checksum, resp.Body.String()) + } + }) + + t.Run("DownloadMetadata", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", root+"/maven-metadata.xml") + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + expectedMetadata := `` + "\ncom.giteatest-project1.0.11.0.11.0.1" + assert.Equal(t, expectedMetadata, resp.Body.String()) + + for key, checksum := range map[string]string{ + "md5": "6bee0cebaaa686d658adf3e7e16371a0", + "sha1": "8696abce499fe84d9ea93e5492abe7147e195b6c", + "sha256": "3f48322f81c4b2c3bb8649ae1e5c9801476162b520e1c2734ac06b2c06143208", + "sha512": "cb075aa2e2ef1a83cdc14dd1e08c505b72d633399b39e73a21f00f0deecb39a3e2c79f157c1163f8a3854828750706e0dec3a0f5e4778e91f8ec2cf351a855f2", + } { + req := NewRequest(t, "GET", fmt.Sprintf("%s/maven-metadata.xml.%s", root, key)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, checksum, resp.Body.String()) + } + }) +} diff --git a/integrations/api_packages_npm_test.go b/integrations/api_packages_npm_test.go new file mode 100644 index 0000000000..28a3711939 --- /dev/null +++ b/integrations/api_packages_npm_test.go @@ -0,0 +1,222 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "encoding/base64" + "fmt" + "net/http" + "net/url" + "strings" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/packages/npm" + "code.gitea.io/gitea/modules/setting" + + "github.com/stretchr/testify/assert" +) + +func TestPackageNpm(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + token := fmt.Sprintf("Bearer %s", getTokenForLoggedInUser(t, loginUser(t, user.Name))) + + packageName := "@scope/test-package" + packageVersion := "1.0.1-pre" + packageTag := "latest" + packageTag2 := "release" + packageAuthor := "KN4CK3R" + packageDescription := "Test Description" + + data := "H4sIAAAAAAAA/ytITM5OTE/VL4DQelnF+XkMVAYGBgZmJiYK2MRBwNDcSIHB2NTMwNDQzMwAqA7IMDUxA9LUdgg2UFpcklgEdAql5kD8ogCnhwio5lJQUMpLzE1VslJQcihOzi9I1S9JLS7RhSYIJR2QgrLUouLM/DyQGkM9Az1D3YIiqExKanFyUWZBCVQ2BKhVwQVJDKwosbQkI78IJO/tZ+LsbRykxFXLNdA+HwWjYBSMgpENACgAbtAACAAA" + upload := `{ + "_id": "` + packageName + `", + "name": "` + packageName + `", + "description": "` + packageDescription + `", + "dist-tags": { + "` + packageTag + `": "` + packageVersion + `" + }, + "versions": { + "` + packageVersion + `": { + "name": "` + packageName + `", + "version": "` + packageVersion + `", + "description": "` + packageDescription + `", + "author": { + "name": "` + packageAuthor + `" + }, + "dist": { + "integrity": "sha512-yA4FJsVhetynGfOC1jFf79BuS+jrHbm0fhh+aHzCQkOaOBXKf9oBnC4a6DnLLnEsHQDRLYd00cwj8sCXpC+wIg==", + "shasum": "aaa7eaf852a948b0aa05afeda35b1badca155d90" + } + } + }, + "_attachments": { + "` + packageName + `-` + packageVersion + `.tgz": { + "data": "` + data + `" + } + } + }` + + root := fmt.Sprintf("/api/packages/%s/npm/%s", user.Name, url.QueryEscape(packageName)) + tagsRoot := fmt.Sprintf("/api/packages/%s/npm/-/package/%s/dist-tags", user.Name, url.QueryEscape(packageName)) + filename := fmt.Sprintf("%s-%s.tgz", strings.Split(packageName, "/")[1], packageVersion) + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", root, strings.NewReader(upload)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeNpm) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &npm.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + assert.Len(t, pd.Properties, 1) + assert.Equal(t, npm.TagProperty, pd.Properties[0].Name) + assert.Equal(t, packageTag, pd.Properties[0].Value) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, filename, pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(192), pb.Size) + }) + + t.Run("UploadExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", root, strings.NewReader(upload)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusBadRequest) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/-/%s/%s", root, packageVersion, filename)) + req = addTokenAuthHeader(req, token) + resp := MakeRequest(t, req, http.StatusOK) + + b, _ := base64.StdEncoding.DecodeString(data) + assert.Equal(t, b, resp.Body.Bytes()) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeNpm) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(1), pvs[0].DownloadCount) + }) + + t.Run("PackageMetadata", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("/api/packages/%s/npm/%s", user.Name, "does-not-exist")) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", root) + req = addTokenAuthHeader(req, token) + resp := MakeRequest(t, req, http.StatusOK) + + var result npm.PackageMetadata + DecodeJSON(t, resp, &result) + + assert.Equal(t, packageName, result.ID) + assert.Equal(t, packageName, result.Name) + assert.Equal(t, packageDescription, result.Description) + assert.Contains(t, result.DistTags, packageTag) + assert.Equal(t, packageVersion, result.DistTags[packageTag]) + assert.Equal(t, packageAuthor, result.Author.Name) + assert.Contains(t, result.Versions, packageVersion) + pmv := result.Versions[packageVersion] + assert.Equal(t, fmt.Sprintf("%s@%s", packageName, packageVersion), pmv.ID) + assert.Equal(t, packageName, pmv.Name) + assert.Equal(t, packageDescription, pmv.Description) + assert.Equal(t, packageAuthor, pmv.Author.Name) + assert.Equal(t, "sha512-yA4FJsVhetynGfOC1jFf79BuS+jrHbm0fhh+aHzCQkOaOBXKf9oBnC4a6DnLLnEsHQDRLYd00cwj8sCXpC+wIg==", pmv.Dist.Integrity) + assert.Equal(t, "aaa7eaf852a948b0aa05afeda35b1badca155d90", pmv.Dist.Shasum) + assert.Equal(t, fmt.Sprintf("%s%s/-/%s/%s", setting.AppURL, root[1:], packageVersion, filename), pmv.Dist.Tarball) + }) + + t.Run("AddTag", func(t *testing.T) { + defer PrintCurrentTest(t)() + + test := func(t *testing.T, status int, tag, version string) { + req := NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/%s", tagsRoot, tag), strings.NewReader(`"`+version+`"`)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, status) + } + + test(t, http.StatusBadRequest, "1.0", packageVersion) + test(t, http.StatusBadRequest, "v1.0", packageVersion) + test(t, http.StatusNotFound, packageTag2, "1.2") + test(t, http.StatusOK, packageTag, packageVersion) + test(t, http.StatusOK, packageTag2, packageVersion) + }) + + t.Run("ListTags", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", tagsRoot) + req = addTokenAuthHeader(req, token) + resp := MakeRequest(t, req, http.StatusOK) + + var result map[string]string + DecodeJSON(t, resp, &result) + + assert.Len(t, result, 2) + assert.Contains(t, result, packageTag) + assert.Equal(t, packageVersion, result[packageTag]) + assert.Contains(t, result, packageTag2) + assert.Equal(t, packageVersion, result[packageTag2]) + }) + + t.Run("PackageMetadataDistTags", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", root) + req = addTokenAuthHeader(req, token) + resp := MakeRequest(t, req, http.StatusOK) + + var result npm.PackageMetadata + DecodeJSON(t, resp, &result) + + assert.Len(t, result.DistTags, 2) + assert.Contains(t, result.DistTags, packageTag) + assert.Equal(t, packageVersion, result.DistTags[packageTag]) + assert.Contains(t, result.DistTags, packageTag2) + assert.Equal(t, packageVersion, result.DistTags[packageTag2]) + }) + + t.Run("DeleteTag", func(t *testing.T) { + defer PrintCurrentTest(t)() + + test := func(t *testing.T, status int, tag string) { + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/%s", tagsRoot, tag)) + req = addTokenAuthHeader(req, token) + MakeRequest(t, req, status) + } + + test(t, http.StatusBadRequest, "v1.0") + test(t, http.StatusBadRequest, "1.0") + test(t, http.StatusOK, "dummy") + test(t, http.StatusOK, packageTag2) + }) +} diff --git a/integrations/api_packages_nuget_test.go b/integrations/api_packages_nuget_test.go new file mode 100644 index 0000000000..e69dd0ff9b --- /dev/null +++ b/integrations/api_packages_nuget_test.go @@ -0,0 +1,381 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "archive/zip" + "bytes" + "encoding/base64" + "fmt" + "io" + "net/http" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + nuget_module "code.gitea.io/gitea/modules/packages/nuget" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/nuget" + + "github.com/stretchr/testify/assert" +) + +func TestPackageNuGet(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + packageName := "test.package" + packageVersion := "1.0.3" + packageAuthors := "KN4CK3R" + packageDescription := "Gitea Test Package" + symbolFilename := "test.pdb" + symbolID := "d910bb6948bd4c6cb40155bcf52c3c94" + + var buf bytes.Buffer + archive := zip.NewWriter(&buf) + w, _ := archive.Create("package.nuspec") + w.Write([]byte(` + + + ` + packageName + ` + ` + packageVersion + ` + ` + packageAuthors + ` + ` + packageDescription + ` + + + + + `)) + archive.Close() + content := buf.Bytes() + + url := fmt.Sprintf("/api/packages/%s/nuget", user.Name) + + t.Run("ServiceIndex", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/index.json", url)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result nuget.ServiceIndexResponse + DecodeJSON(t, resp, &result) + + assert.Equal(t, "3.0.0", result.Version) + assert.NotEmpty(t, result.Resources) + + root := setting.AppURL + url[1:] + for _, r := range result.Resources { + switch r.Type { + case "SearchQueryService": + fallthrough + case "SearchQueryService/3.0.0-beta": + fallthrough + case "SearchQueryService/3.0.0-rc": + assert.Equal(t, root+"/query", r.ID) + case "RegistrationsBaseUrl": + fallthrough + case "RegistrationsBaseUrl/3.0.0-beta": + fallthrough + case "RegistrationsBaseUrl/3.0.0-rc": + assert.Equal(t, root+"/registration", r.ID) + case "PackageBaseAddress/3.0.0": + assert.Equal(t, root+"/package", r.ID) + case "PackagePublish/2.0.0": + assert.Equal(t, root, r.ID) + } + } + }) + + t.Run("Upload", func(t *testing.T) { + t.Run("DependencyPackage", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", url, bytes.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeNuGet) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &nuget_module.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, fmt.Sprintf("%s.%s.nupkg", packageName, packageVersion), pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(len(content)), pb.Size) + + req = NewRequestWithBody(t, "PUT", url, bytes.NewReader(content)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusBadRequest) + }) + + t.Run("SymbolPackage", func(t *testing.T) { + defer PrintCurrentTest(t)() + + createPackage := func(id, packageType string) io.Reader { + var buf bytes.Buffer + archive := zip.NewWriter(&buf) + + w, _ := archive.Create("package.nuspec") + w.Write([]byte(` + + + ` + id + ` + ` + packageVersion + ` + ` + packageAuthors + ` + ` + packageDescription + ` + + + `)) + + w, _ = archive.Create(symbolFilename) + b, _ := base64.StdEncoding.DecodeString(`QlNKQgEAAQAAAAAADAAAAFBEQiB2MS4wAAAAAAAABgB8AAAAWAAAACNQZGIAAAAA1AAAAAgBAAAj +fgAA3AEAAAQAAAAjU3RyaW5ncwAAAADgAQAABAAAACNVUwDkAQAAMAAAACNHVUlEAAAAFAIAACgB +AAAjQmxvYgAAAGm7ENm9SGxMtAFVvPUsPJTF6PbtAAAAAFcVogEJAAAAAQAAAA==`) + w.Write(b) + + archive.Close() + return &buf + } + + req := NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/symbolpackage", url), createPackage("unknown-package", "SymbolsPackage")) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/symbolpackage", url), createPackage(packageName, "DummyPackage")) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusBadRequest) + + req = NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/symbolpackage", url), createPackage(packageName, "SymbolsPackage")) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeNuGet) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &nuget_module.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 3) + for _, pf := range pfs { + switch pf.Name { + case fmt.Sprintf("%s.%s.nupkg", packageName, packageVersion): + case fmt.Sprintf("%s.%s.snupkg", packageName, packageVersion): + assert.False(t, pf.IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pf.BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(616), pb.Size) + case symbolFilename: + assert.False(t, pf.IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pf.BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(160), pb.Size) + + pps, err := packages.GetProperties(db.DefaultContext, packages.PropertyTypeFile, pf.ID) + assert.NoError(t, err) + assert.Len(t, pps, 1) + assert.Equal(t, nuget_module.PropertySymbolID, pps[0].Name) + assert.Equal(t, symbolID, pps[0].Value) + default: + assert.Fail(t, "unexpected file: %v", pf.Name) + } + } + + req = NewRequestWithBody(t, "PUT", fmt.Sprintf("%s/symbolpackage", url), createPackage(packageName, "SymbolsPackage")) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusBadRequest) + }) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + checkDownloadCount := func(count int64) { + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeNuGet) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, count, pvs[0].DownloadCount) + } + + checkDownloadCount(0) + + req := NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s.%s.nupkg", url, packageName, packageVersion, packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, content, resp.Body.Bytes()) + + checkDownloadCount(1) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s.%s.snupkg", url, packageName, packageVersion, packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusOK) + + checkDownloadCount(1) + + t.Run("Symbol", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/symbols/%s/%sFFFFFFFF/gitea.pdb", url, symbolFilename, symbolID)) + MakeRequest(t, req, http.StatusBadRequest) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/symbols/%s/%sFFFFFFFF/%s", url, symbolFilename, "00000000000000000000000000000000", symbolFilename)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/symbols/%s/%sFFFFFFFF/%s", url, symbolFilename, symbolID, symbolFilename)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusOK) + + checkDownloadCount(1) + }) + }) + + t.Run("SearchService", func(t *testing.T) { + defer PrintCurrentTest(t)() + + cases := []struct { + Query string + Skip int + Take int + ExpectedTotal int64 + ExpectedResults int + }{ + {"", 0, 0, 1, 1}, + {"", 0, 10, 1, 1}, + {"gitea", 0, 10, 0, 0}, + {"test", 0, 10, 1, 1}, + {"test", 1, 10, 1, 0}, + } + + for i, c := range cases { + req := NewRequest(t, "GET", fmt.Sprintf("%s/query?q=%s&skip=%d&take=%d", url, c.Query, c.Skip, c.Take)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result nuget.SearchResultResponse + DecodeJSON(t, resp, &result) + + assert.Equal(t, c.ExpectedTotal, result.TotalHits, "case %d: unexpected total hits", i) + assert.Len(t, result.Data, c.ExpectedResults, "case %d: unexpected result count", i) + } + }) + + t.Run("RegistrationService", func(t *testing.T) { + indexURL := fmt.Sprintf("%s%s/registration/%s/index.json", setting.AppURL, url[1:], packageName) + leafURL := fmt.Sprintf("%s%s/registration/%s/%s.json", setting.AppURL, url[1:], packageName, packageVersion) + contentURL := fmt.Sprintf("%s%s/package/%s/%s/%s.%s.nupkg", setting.AppURL, url[1:], packageName, packageVersion, packageName, packageVersion) + + t.Run("RegistrationIndex", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/registration/%s/index.json", url, packageName)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result nuget.RegistrationIndexResponse + DecodeJSON(t, resp, &result) + + assert.Equal(t, indexURL, result.RegistrationIndexURL) + assert.Equal(t, 1, result.Count) + assert.Len(t, result.Pages, 1) + assert.Equal(t, indexURL, result.Pages[0].RegistrationPageURL) + assert.Equal(t, packageVersion, result.Pages[0].Lower) + assert.Equal(t, packageVersion, result.Pages[0].Upper) + assert.Equal(t, 1, result.Pages[0].Count) + assert.Len(t, result.Pages[0].Items, 1) + assert.Equal(t, packageName, result.Pages[0].Items[0].CatalogEntry.ID) + assert.Equal(t, packageVersion, result.Pages[0].Items[0].CatalogEntry.Version) + assert.Equal(t, packageAuthors, result.Pages[0].Items[0].CatalogEntry.Authors) + assert.Equal(t, packageDescription, result.Pages[0].Items[0].CatalogEntry.Description) + assert.Equal(t, leafURL, result.Pages[0].Items[0].CatalogEntry.CatalogLeafURL) + assert.Equal(t, contentURL, result.Pages[0].Items[0].CatalogEntry.PackageContentURL) + }) + + t.Run("RegistrationLeaf", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/registration/%s/%s.json", url, packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result nuget.RegistrationLeafResponse + DecodeJSON(t, resp, &result) + + assert.Equal(t, leafURL, result.RegistrationLeafURL) + assert.Equal(t, contentURL, result.PackageContentURL) + assert.Equal(t, indexURL, result.RegistrationIndexURL) + }) + }) + + t.Run("PackageService", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/index.json", url, packageName)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result nuget.PackageVersionsResponse + DecodeJSON(t, resp, &result) + + assert.Len(t, result.Versions, 1) + assert.Equal(t, packageVersion, result.Versions[0]) + }) + + t.Run("Delete", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/%s/%s", url, packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusOK) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeNuGet) + assert.NoError(t, err) + assert.Empty(t, pvs) + }) + + t.Run("DownloadNotExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s.%s.nupkg", url, packageName, packageVersion, packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("%s/package/%s/%s/%s.%s.snupkg", url, packageName, packageVersion, packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusNotFound) + }) + + t.Run("DeleteNotExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/package/%s/%s", url, packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusNotFound) + }) +} diff --git a/integrations/api_packages_pypi_test.go b/integrations/api_packages_pypi_test.go new file mode 100644 index 0000000000..5d610df39d --- /dev/null +++ b/integrations/api_packages_pypi_test.go @@ -0,0 +1,181 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "bytes" + "fmt" + "io" + "mime/multipart" + "net/http" + "regexp" + "strings" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/packages/pypi" + + "github.com/stretchr/testify/assert" +) + +func TestPackagePyPI(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + packageName := "test-package" + packageVersion := "1.0.1" + packageAuthor := "KN4CK3R" + packageDescription := "Test Description" + + content := "test" + hashSHA256 := "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08" + + root := fmt.Sprintf("/api/packages/%s/pypi", user.Name) + + uploadFile := func(t *testing.T, filename, content string, expectedStatus int) { + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + part, _ := writer.CreateFormFile("content", filename) + _, _ = io.Copy(part, strings.NewReader(content)) + + writer.WriteField("name", packageName) + writer.WriteField("version", packageVersion) + writer.WriteField("author", packageAuthor) + writer.WriteField("summary", packageDescription) + writer.WriteField("description", packageDescription) + writer.WriteField("sha256_digest", hashSHA256) + writer.WriteField("requires_python", "3.6") + + _ = writer.Close() + + req := NewRequestWithBody(t, "POST", root, body) + req.Header.Add("Content-Type", writer.FormDataContentType()) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, expectedStatus) + } + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + filename := "test.whl" + uploadFile(t, filename, content, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypePyPI) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &pypi.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, filename, pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(4), pb.Size) + }) + + t.Run("UploadAddFile", func(t *testing.T) { + defer PrintCurrentTest(t)() + + filename := "test.tar.gz" + uploadFile(t, filename, content, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypePyPI) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &pypi.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 2) + + pf, err := packages.GetFileForVersionByName(db.DefaultContext, pvs[0].ID, filename, packages.EmptyFileKey) + assert.NoError(t, err) + assert.Equal(t, filename, pf.Name) + assert.True(t, pf.IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pf.BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(4), pb.Size) + }) + + t.Run("UploadHashMismatch", func(t *testing.T) { + defer PrintCurrentTest(t)() + + filename := "test2.whl" + uploadFile(t, filename, "dummy", http.StatusBadRequest) + }) + + t.Run("UploadExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadFile(t, "test.whl", content, http.StatusBadRequest) + uploadFile(t, "test.tar.gz", content, http.StatusBadRequest) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + downloadFile := func(filename string) { + req := NewRequest(t, "GET", fmt.Sprintf("%s/files/%s/%s/%s", root, packageName, packageVersion, filename)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, []byte(content), resp.Body.Bytes()) + } + + downloadFile("test.whl") + downloadFile("test.tar.gz") + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypePyPI) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(2), pvs[0].DownloadCount) + }) + + t.Run("PackageMetadata", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/simple/%s", root, packageName)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + htmlDoc := NewHTMLParser(t, resp.Body) + nodes := htmlDoc.doc.Find("a").Nodes + assert.Len(t, nodes, 2) + + hrefMatcher := regexp.MustCompile(fmt.Sprintf(`%s/files/%s/%s/test\..+#sha256-%s`, root, packageName, packageVersion, hashSHA256)) + + for _, a := range nodes { + for _, att := range a.Attr { + switch att.Key { + case "href": + assert.Regexp(t, hrefMatcher, att.Val) + case "data-requires-python": + assert.Equal(t, "3.6", att.Val) + default: + t.Fail() + } + } + } + }) +} diff --git a/integrations/api_packages_rubygems_test.go b/integrations/api_packages_rubygems_test.go new file mode 100644 index 0000000000..269bc953b4 --- /dev/null +++ b/integrations/api_packages_rubygems_test.go @@ -0,0 +1,226 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "bytes" + "encoding/base64" + "fmt" + "mime/multipart" + "net/http" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/packages/rubygems" + + "github.com/stretchr/testify/assert" +) + +func TestPackageRubyGems(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + packageName := "gitea" + packageVersion := "1.0.5" + packageFilename := "gitea-1.0.5.gem" + + gemContent, _ := base64.StdEncoding.DecodeString(`bWV0YWRhdGEuZ3oAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADAwMDA0NDQAMDAwMDAw +MAAwMDAwMDAwADAwMDAwMDAxMDQxADE0MTEwNzcyMzY2ADAxMzQ0MQAgMAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB1c3RhcgAwMHdoZWVsAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAd2hlZWwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwADAwMDAw +MDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf +iwgA9vQjYQID1VVNb9QwEL37V5he9pRsmlJAFlQckCoOXAriQIUix5nNmsYf2JOqKwS/nYmz2d3Q +qqCCKpFdadfjmfdm5nmcLMv4k9DXm6Wrv4BCcQ5GiPcelF5pJVE7y6w0IHirESS7hhDJJu4I+jhu +Mc53Tsd5kZ8y30lcuWAEH2KY7HHtQhQs4+cJkwwuwNdeB6JhtbaNDoLTL1MQsFJrqQnr8jNrJJJH +WZTHWfEiK094UYj0zYvp4Z9YAx5sA1ZpSCS3M30zeWwo2bG60FvUBjIKJts2GwMW76r0Yr9NzjN3 +YhwsGX2Ozl4dpcWwvK9d43PQtDIv9igvHwSyIIwFmXHjqTqxLY8MPkCADmQk80p2EfZ6VbM6/ue6 +/1D0Bq7/qeA/zh6W82leHmhFWUHn/JbsEfT6q7QbiCpoj8l0QcEUFLmX6kq2wBEiMjBSd+Pwt7T5 +Ot0kuXYMbkD1KOuOBnWYb7hBsAP4bhlkFRqnqpWefMZ/pHCn6+WIFGq2dgY8EQq+RvRRLJcTyZJ1 +WhHqGPTu7QdmACXdJFLwb9+ZdxErbSPKrqsMxJhAWCJ1qaqRdtu6yktcT/STsamG0qp7rsa5EL/K +MBua30uw4ynzExqYWRJDfx8/kQWN3PwsDh2jYLr1W+pZcAmCs9splvnz/Flesqhbq21bXcGG/OLh ++2fv/JTF3hgZyCW9OaZjxoZjdnBGfgKpxZyJ1QYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZGF0 +YS50YXIuZ3oAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADAwMDA0NDQAMDAwMDAwMAAw +MDAwMDAwADAwMDAwMDAwMjQyADE0MTEwNzcyMzY2ADAxMzM2MQAgMAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB1c3RhcgAwMHdoZWVsAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAd2hlZWwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwADAwMDAwMDAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAfiwgA +9vQjYQID7M/NCsMgDABgz32KrA/QxersK/Q17ExXIcyhlr7+HLv1sJ02KPhBCPk5JOyn881nsl2c +xI+gRDRaC3zbZ8RBCamlxGHolTFlX11kLwDFH6wp21hO2RYi/rD3bb5/7iCubFOCMbBtABzNkIjn +bvGlAnisOUE7EnOALUR2p7b06e6aV4iqqqrquJ4AAAD//wMA+sA/NQAIAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGNoZWNr +c3Vtcy55YW1sLmd6AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwMDAwNDQ0ADAwMDAwMDAAMDAw +MDAwMAAwMDAwMDAwMDQ1MAAxNDExMDc3MjM2NgAwMTQ2MTIAIDAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAdXN0YXIAMDB3aGVlbAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAHdoZWVsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMDAwMDAwMAAwMDAwMDAwAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH4sIAPb0 +I2ECA2WQOa4UQAxE8znFXGCQ21vbPyMj5wRuL0Qk6EecnmZCyKyy9FSvXq/X4/u3ryj68Xg+f/Zn +VHzGlx+/P57qvU4XxWalBKftSXOgCjNYkdRycrC5Axem+W4HqS12PNEv7836jF9vnlHxwSyxKY+y +go0cPblyHzkrZ4HF1GSVhe7mOOoasXNk2fnbUxb+19Pp9tobD/QlJKMX7y204PREh6nQ5hG9Alw6 +x4TnmtA+aekGfm6wAseog2LSgpR4Q7cYnAH3K4qAQa6A6JCC1gpuY7P+9YxE5SZ+j0eVGbaBTwBQ +iIqRUyyzLCoFCBdYNWxniapTavD97blXTzFvgoVoAsKBAtlU48cdaOmeZDpwV01OtcGwjscfeUrY +B9QBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA`) + + root := fmt.Sprintf("/api/packages/%s/rubygems", user.Name) + + uploadFile := func(t *testing.T, expectedStatus int) { + req := NewRequestWithBody(t, "POST", fmt.Sprintf("%s/api/v1/gems", root), bytes.NewReader(gemContent)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, expectedStatus) + } + + t.Run("Upload", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadFile(t, http.StatusCreated) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeRubyGems) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &rubygems.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, packageFilename, pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.Equal(t, int64(4608), pb.Size) + }) + + t.Run("UploadExists", func(t *testing.T) { + defer PrintCurrentTest(t)() + + uploadFile(t, http.StatusBadRequest) + }) + + t.Run("Download", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/gems/%s", root, packageFilename)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, gemContent, resp.Body.Bytes()) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeRubyGems) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(1), pvs[0].DownloadCount) + }) + + t.Run("DownloadGemspec", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/quick/Marshal.4.8/%sspec.rz", root, packageFilename)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + b, _ := base64.StdEncoding.DecodeString(`eJxi4Si1EndPzbWyCi5ITc5My0xOLMnMz2M8zMIRLeGpxGWsZ6RnzGbF5hqSyempxJWeWZKayGbN +EBJqJQjWFZZaVJyZnxfN5qnEZahnoGcKkjTwVBJyB6lUKEhMzk5MTwULGngqcRaVJlWCONEMBp5K +DGAWSKc7zFhPJamg0qRK99TcYphehZLU4hKInFhGSUlBsZW+PtgZepn5+iDxECRzDUDGcfh6hoA4 +gAAAAP//MS06Gw==`) + assert.Equal(t, b, resp.Body.Bytes()) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeRubyGems) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + assert.Equal(t, int64(1), pvs[0].DownloadCount) + }) + + t.Run("EnumeratePackages", func(t *testing.T) { + defer PrintCurrentTest(t)() + + enumeratePackages := func(t *testing.T, endpoint string, expectedContent []byte) { + req := NewRequest(t, "GET", fmt.Sprintf("%s/%s", root, endpoint)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, expectedContent, resp.Body.Bytes()) + } + + b, _ := base64.StdEncoding.DecodeString(`H4sICAAAAAAA/3NwZWNzLjQuOABi4Yhmi+bwVOJKzyxJTWSzYnMNCbUSdE/NtbIKSy0qzszPi2bzVOIy1DPQM2WzZgjxVOIsKk2qBDEBAQAA///xOEYKOwAAAA==`) + enumeratePackages(t, "specs.4.8.gz", b) + b, _ = base64.StdEncoding.DecodeString(`H4sICAAAAAAA/2xhdGVzdF9zcGVjcy40LjgAYuGIZovm8FTiSs8sSU1ks2JzDQm1EnRPzbWyCkstKs7Mz4tm81TiMtQz0DNls2YI8VTiLCpNqgQxAQEAAP//8ThGCjsAAAA=`) + enumeratePackages(t, "latest_specs.4.8.gz", b) + b, _ = base64.StdEncoding.DecodeString(`H4sICAAAAAAA/3ByZXJlbGVhc2Vfc3BlY3MuNC44AGLhiGYABAAA//9snXr5BAAAAA==`) + enumeratePackages(t, "prerelease_specs.4.8.gz", b) + }) + + t.Run("Delete", func(t *testing.T) { + defer PrintCurrentTest(t)() + + body := bytes.Buffer{} + writer := multipart.NewWriter(&body) + writer.WriteField("gem_name", packageName) + writer.WriteField("version", packageVersion) + writer.Close() + + req := NewRequestWithBody(t, "DELETE", fmt.Sprintf("%s/api/v1/gems/yank", root), &body) + req.Header.Add("Content-Type", writer.FormDataContentType()) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusOK) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeRubyGems) + assert.NoError(t, err) + assert.Empty(t, pvs) + }) +} diff --git a/integrations/api_packages_test.go b/integrations/api_packages_test.go new file mode 100644 index 0000000000..1f24807060 --- /dev/null +++ b/integrations/api_packages_test.go @@ -0,0 +1,167 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "bytes" + "fmt" + "net/http" + "testing" + "time" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + container_model "code.gitea.io/gitea/models/packages/container" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + api "code.gitea.io/gitea/modules/structs" + packages_service "code.gitea.io/gitea/services/packages" + + "github.com/stretchr/testify/assert" +) + +func TestPackageAPI(t *testing.T) { + defer prepareTestEnv(t)() + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}).(*user_model.User) + session := loginUser(t, user.Name) + token := getTokenForLoggedInUser(t, session) + + packageName := "test-package" + packageVersion := "1.0.3" + filename := "file.bin" + + url := fmt.Sprintf("/api/packages/%s/generic/%s/%s/%s", user.Name, packageName, packageVersion, filename) + req := NewRequestWithBody(t, "PUT", url, bytes.NewReader([]byte{})) + AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusCreated) + + t.Run("ListPackages", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s?token=%s", user.Name, token)) + resp := MakeRequest(t, req, http.StatusOK) + + var apiPackages []*api.Package + DecodeJSON(t, resp, &apiPackages) + + assert.Len(t, apiPackages, 1) + assert.Equal(t, string(packages_model.TypeGeneric), apiPackages[0].Type) + assert.Equal(t, packageName, apiPackages[0].Name) + assert.Equal(t, packageVersion, apiPackages[0].Version) + assert.NotNil(t, apiPackages[0].Creator) + assert.Equal(t, user.Name, apiPackages[0].Creator.UserName) + }) + + t.Run("GetPackage", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s/dummy/%s/%s?token=%s", user.Name, packageName, packageVersion, token)) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s/generic/%s/%s?token=%s", user.Name, packageName, packageVersion, token)) + resp := MakeRequest(t, req, http.StatusOK) + + var p *api.Package + DecodeJSON(t, resp, &p) + + assert.Equal(t, string(packages_model.TypeGeneric), p.Type) + assert.Equal(t, packageName, p.Name) + assert.Equal(t, packageVersion, p.Version) + assert.NotNil(t, p.Creator) + assert.Equal(t, user.Name, p.Creator.UserName) + + t.Run("RepositoryLink", func(t *testing.T) { + defer PrintCurrentTest(t)() + + p, err := packages_model.GetPackageByName(db.DefaultContext, user.ID, packages_model.TypeGeneric, packageName) + assert.NoError(t, err) + + // no repository link + req := NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s/generic/%s/%s?token=%s", user.Name, packageName, packageVersion, token)) + resp := MakeRequest(t, req, http.StatusOK) + + var ap1 *api.Package + DecodeJSON(t, resp, &ap1) + assert.Nil(t, ap1.Repository) + + // link to public repository + assert.NoError(t, packages_model.SetRepositoryLink(db.DefaultContext, p.ID, 1)) + + req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s/generic/%s/%s?token=%s", user.Name, packageName, packageVersion, token)) + resp = MakeRequest(t, req, http.StatusOK) + + var ap2 *api.Package + DecodeJSON(t, resp, &ap2) + assert.NotNil(t, ap2.Repository) + assert.EqualValues(t, 1, ap2.Repository.ID) + + // link to private repository + assert.NoError(t, packages_model.SetRepositoryLink(db.DefaultContext, p.ID, 2)) + + req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s/generic/%s/%s?token=%s", user.Name, packageName, packageVersion, token)) + resp = MakeRequest(t, req, http.StatusOK) + + var ap3 *api.Package + DecodeJSON(t, resp, &ap3) + assert.Nil(t, ap3.Repository) + + assert.NoError(t, packages_model.UnlinkRepositoryFromAllPackages(db.DefaultContext, 2)) + }) + }) + + t.Run("ListPackageFiles", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s/dummy/%s/%s/files?token=%s", user.Name, packageName, packageVersion, token)) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("/api/v1/packages/%s/generic/%s/%s/files?token=%s", user.Name, packageName, packageVersion, token)) + resp := MakeRequest(t, req, http.StatusOK) + + var files []*api.PackageFile + DecodeJSON(t, resp, &files) + + assert.Len(t, files, 1) + assert.Equal(t, int64(0), files[0].Size) + assert.Equal(t, filename, files[0].Name) + assert.Equal(t, "d41d8cd98f00b204e9800998ecf8427e", files[0].HashMD5) + assert.Equal(t, "da39a3ee5e6b4b0d3255bfef95601890afd80709", files[0].HashSHA1) + assert.Equal(t, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", files[0].HashSHA256) + assert.Equal(t, "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", files[0].HashSHA512) + }) + + t.Run("DeletePackage", func(t *testing.T) { + defer PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", fmt.Sprintf("/api/v1/packages/%s/dummy/%s/%s?token=%s", user.Name, packageName, packageVersion, token)) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "DELETE", fmt.Sprintf("/api/v1/packages/%s/generic/%s/%s?token=%s", user.Name, packageName, packageVersion, token)) + MakeRequest(t, req, http.StatusNoContent) + }) +} + +func TestPackageCleanup(t *testing.T) { + defer prepareTestEnv(t)() + + time.Sleep(time.Second) + + pbs, err := packages_model.FindExpiredUnreferencedBlobs(db.DefaultContext, time.Duration(0)) + assert.NoError(t, err) + assert.NotEmpty(t, pbs) + + _, err = packages_model.GetInternalVersionByNameAndVersion(db.DefaultContext, 2, packages_model.TypeContainer, "test", container_model.UploadVersion) + assert.NoError(t, err) + + err = packages_service.Cleanup(nil, time.Duration(0)) + assert.NoError(t, err) + + pbs, err = packages_model.FindExpiredUnreferencedBlobs(db.DefaultContext, time.Duration(0)) + assert.NoError(t, err) + assert.Empty(t, pbs) + + _, err = packages_model.GetInternalVersionByNameAndVersion(db.DefaultContext, 2, packages_model.TypeContainer, "test", container_model.UploadVersion) + assert.ErrorIs(t, err, packages_model.ErrPackageNotExist) +} diff --git a/integrations/api_private_serv_test.go b/integrations/api_private_serv_test.go index a58d927cb9..fd3cb25ef2 100644 --- a/integrations/api_private_serv_test.go +++ b/integrations/api_private_serv_test.go @@ -47,7 +47,7 @@ func TestAPIPrivateServ(t *testing.T) { results, err := private.ServCommand(ctx, 1, "user2", "repo1", perm.AccessModeWrite, "git-upload-pack", "") assert.NoError(t, err) assert.False(t, results.IsWiki) - assert.False(t, results.IsDeployKey) + assert.Zero(t, results.DeployKeyID) assert.Equal(t, int64(1), results.KeyID) assert.Equal(t, "user2@localhost", results.KeyName) assert.Equal(t, "user2", results.UserName) @@ -70,7 +70,7 @@ func TestAPIPrivateServ(t *testing.T) { results, err = private.ServCommand(ctx, 1, "user15", "big_test_public_1", perm.AccessModeRead, "git-upload-pack", "") assert.NoError(t, err) assert.False(t, results.IsWiki) - assert.False(t, results.IsDeployKey) + assert.Zero(t, results.DeployKeyID) assert.Equal(t, int64(1), results.KeyID) assert.Equal(t, "user2@localhost", results.KeyName) assert.Equal(t, "user2", results.UserName) @@ -92,7 +92,7 @@ func TestAPIPrivateServ(t *testing.T) { results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_1", perm.AccessModeRead, "git-upload-pack", "") assert.NoError(t, err) assert.False(t, results.IsWiki) - assert.True(t, results.IsDeployKey) + assert.NotZero(t, results.DeployKeyID) assert.Equal(t, deployKey.KeyID, results.KeyID) assert.Equal(t, "test-deploy", results.KeyName) assert.Equal(t, "user15", results.UserName) @@ -129,7 +129,7 @@ func TestAPIPrivateServ(t *testing.T) { results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_2", perm.AccessModeRead, "git-upload-pack", "") assert.NoError(t, err) assert.False(t, results.IsWiki) - assert.True(t, results.IsDeployKey) + assert.NotZero(t, results.DeployKeyID) assert.Equal(t, deployKey.KeyID, results.KeyID) assert.Equal(t, "test-deploy", results.KeyName) assert.Equal(t, "user15", results.UserName) @@ -142,7 +142,7 @@ func TestAPIPrivateServ(t *testing.T) { results, err = private.ServCommand(ctx, deployKey.KeyID, "user15", "big_test_private_2", perm.AccessModeWrite, "git-upload-pack", "") assert.NoError(t, err) assert.False(t, results.IsWiki) - assert.True(t, results.IsDeployKey) + assert.NotZero(t, results.DeployKeyID) assert.Equal(t, deployKey.KeyID, results.KeyID) assert.Equal(t, "test-deploy", results.KeyName) assert.Equal(t, "user15", results.UserName) diff --git a/integrations/api_pull_test.go b/integrations/api_pull_test.go index b6b8ad8734..a1c2a4c3e6 100644 --- a/integrations/api_pull_test.go +++ b/integrations/api_pull_test.go @@ -77,7 +77,7 @@ func TestAPICreatePullSuccess(t *testing.T) { Base: "master", Title: "create a failure pr", }) - session.MakeRequest(t, req, 201) + session.MakeRequest(t, req, http.StatusCreated) session.MakeRequest(t, req, http.StatusUnprocessableEntity) // second request should fail } @@ -105,7 +105,7 @@ func TestAPICreatePullWithFieldsSuccess(t *testing.T) { req := NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls?token=%s", owner10.Name, repo10.Name, token), opts) - res := session.MakeRequest(t, req, 201) + res := session.MakeRequest(t, req, http.StatusCreated) pull := new(api.PullRequest) DecodeJSON(t, res, pull) @@ -165,7 +165,7 @@ func TestAPIEditPull(t *testing.T) { Title: "create a success pr", }) pull := new(api.PullRequest) - resp := session.MakeRequest(t, req, 201) + resp := session.MakeRequest(t, req, http.StatusCreated) DecodeJSON(t, resp, pull) assert.EqualValues(t, "master", pull.Base.Name) @@ -173,12 +173,12 @@ func TestAPIEditPull(t *testing.T) { Base: "feature/1", Title: "edit a this pr", }) - resp = session.MakeRequest(t, req, 201) + resp = session.MakeRequest(t, req, http.StatusCreated) DecodeJSON(t, resp, pull) assert.EqualValues(t, "feature/1", pull.Base.Name) req = NewRequestWithJSON(t, http.MethodPatch, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d?token=%s", owner10.Name, repo10.Name, pull.Index, token), &api.EditPullRequestOption{ Base: "not-exist", }) - session.MakeRequest(t, req, 404) + session.MakeRequest(t, req, http.StatusNotFound) } diff --git a/integrations/api_releases_test.go b/integrations/api_releases_test.go index 815b749110..ebb76cc163 100644 --- a/integrations/api_releases_test.go +++ b/integrations/api_releases_test.go @@ -25,12 +25,11 @@ func TestAPIListReleases(t *testing.T) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - session := loginUser(t, user2.LowerName) - token := getTokenForLoggedInUser(t, session) + token := getUserToken(t, user2.LowerName) link, _ := url.Parse(fmt.Sprintf("/api/v1/repos/%s/%s/releases", user2.Name, repo.Name)) link.RawQuery = url.Values{"token": {token}}.Encode() - resp := session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) + resp := MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) var apiReleases []*api.Release DecodeJSON(t, resp, &apiReleases) if assert.Len(t, apiReleases, 3) { @@ -53,13 +52,11 @@ func TestAPIListReleases(t *testing.T) { // test filter testFilterByLen := func(auth bool, query url.Values, expectedLength int, msgAndArgs ...string) { - link.RawQuery = query.Encode() if auth { query.Set("token", token) - resp = session.MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) - } else { - resp = MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) } + link.RawQuery = query.Encode() + resp = MakeRequest(t, NewRequest(t, "GET", link.String()), http.StatusOK) DecodeJSON(t, resp, &apiReleases) assert.Len(t, apiReleases, expectedLength, msgAndArgs) } @@ -105,7 +102,7 @@ func TestAPICreateAndUpdateRelease(t *testing.T) { session := loginUser(t, owner.LowerName) token := getTokenForLoggedInUser(t, session) - gitRepo, err := git.OpenRepository(repo.RepoPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo.RepoPath()) assert.NoError(t, err) defer gitRepo.Close() @@ -167,7 +164,7 @@ func TestAPICreateReleaseToDefaultBranchOnExistingTag(t *testing.T) { session := loginUser(t, owner.LowerName) token := getTokenForLoggedInUser(t, session) - gitRepo, err := git.OpenRepository(repo.RepoPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo.RepoPath()) assert.NoError(t, err) defer gitRepo.Close() diff --git a/integrations/api_repo_collaborator_test.go b/integrations/api_repo_collaborator_test.go new file mode 100644 index 0000000000..fdca1d9150 --- /dev/null +++ b/integrations/api_repo_collaborator_test.go @@ -0,0 +1,131 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "net/http" + "net/url" + "testing" + + "code.gitea.io/gitea/models/perm" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + api "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/assert" +) + +func TestAPIRepoCollaboratorPermission(t *testing.T) { + onGiteaRun(t, func(t *testing.T, u *url.URL) { + repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}).(*repo_model.Repository) + repo2Owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo2.OwnerID}).(*user_model.User) + + user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}).(*user_model.User) + user5 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) + user10 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 10}).(*user_model.User) + user11 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 11}).(*user_model.User) + + session := loginUser(t, repo2Owner.Name) + testCtx := NewAPITestContext(t, repo2Owner.Name, repo2.Name) + + t.Run("RepoOwnerShouldBeOwner", func(t *testing.T) { + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, repo2Owner.Name, testCtx.Token) + resp := session.MakeRequest(t, req, http.StatusOK) + + var repoPermission api.RepoCollaboratorPermission + DecodeJSON(t, resp, &repoPermission) + + assert.Equal(t, "owner", repoPermission.Permission) + }) + + t.Run("CollaboratorWithReadAccess", func(t *testing.T) { + t.Run("AddUserAsCollaboratorWithReadAccess", doAPIAddCollaborator(testCtx, user4.Name, perm.AccessModeRead)) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, user4.Name, testCtx.Token) + resp := session.MakeRequest(t, req, http.StatusOK) + + var repoPermission api.RepoCollaboratorPermission + DecodeJSON(t, resp, &repoPermission) + + assert.Equal(t, "read", repoPermission.Permission) + }) + + t.Run("CollaboratorWithWriteAccess", func(t *testing.T) { + t.Run("AddUserAsCollaboratorWithWriteAccess", doAPIAddCollaborator(testCtx, user4.Name, perm.AccessModeWrite)) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, user4.Name, testCtx.Token) + resp := session.MakeRequest(t, req, http.StatusOK) + + var repoPermission api.RepoCollaboratorPermission + DecodeJSON(t, resp, &repoPermission) + + assert.Equal(t, "write", repoPermission.Permission) + }) + + t.Run("CollaboratorWithAdminAccess", func(t *testing.T) { + t.Run("AddUserAsCollaboratorWithAdminAccess", doAPIAddCollaborator(testCtx, user4.Name, perm.AccessModeAdmin)) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, user4.Name, testCtx.Token) + resp := session.MakeRequest(t, req, http.StatusOK) + + var repoPermission api.RepoCollaboratorPermission + DecodeJSON(t, resp, &repoPermission) + + assert.Equal(t, "admin", repoPermission.Permission) + }) + + t.Run("CollaboratorNotFound", func(t *testing.T) { + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, "non-existent-user", testCtx.Token) + session.MakeRequest(t, req, http.StatusNotFound) + }) + + t.Run("CollaboratorCanQueryItsPermissions", func(t *testing.T) { + t.Run("AddUserAsCollaboratorWithReadAccess", doAPIAddCollaborator(testCtx, user5.Name, perm.AccessModeRead)) + + _session := loginUser(t, user5.Name) + _testCtx := NewAPITestContext(t, user5.Name, repo2.Name) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, user5.Name, _testCtx.Token) + resp := _session.MakeRequest(t, req, http.StatusOK) + + var repoPermission api.RepoCollaboratorPermission + DecodeJSON(t, resp, &repoPermission) + + assert.Equal(t, "read", repoPermission.Permission) + }) + + t.Run("CollaboratorCanQueryItsPermissions", func(t *testing.T) { + t.Run("AddUserAsCollaboratorWithReadAccess", doAPIAddCollaborator(testCtx, user5.Name, perm.AccessModeRead)) + + _session := loginUser(t, user5.Name) + _testCtx := NewAPITestContext(t, user5.Name, repo2.Name) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, user5.Name, _testCtx.Token) + resp := _session.MakeRequest(t, req, http.StatusOK) + + var repoPermission api.RepoCollaboratorPermission + DecodeJSON(t, resp, &repoPermission) + + assert.Equal(t, "read", repoPermission.Permission) + }) + + t.Run("RepoAdminCanQueryACollaboratorsPermissions", func(t *testing.T) { + t.Run("AddUserAsCollaboratorWithAdminAccess", doAPIAddCollaborator(testCtx, user10.Name, perm.AccessModeAdmin)) + t.Run("AddUserAsCollaboratorWithReadAccess", doAPIAddCollaborator(testCtx, user11.Name, perm.AccessModeRead)) + + _session := loginUser(t, user10.Name) + _testCtx := NewAPITestContext(t, user10.Name, repo2.Name) + + req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/collaborators/%s/permission?token=%s", repo2Owner.Name, repo2.Name, user11.Name, _testCtx.Token) + resp := _session.MakeRequest(t, req, http.StatusOK) + + var repoPermission api.RepoCollaboratorPermission + DecodeJSON(t, resp, &repoPermission) + + assert.Equal(t, "read", repoPermission.Permission) + }) + }) +} diff --git a/integrations/api_repo_file_create_test.go b/integrations/api_repo_file_create_test.go index ba6c1a56af..eb550f1d28 100644 --- a/integrations/api_repo_file_create_test.go +++ b/integrations/api_repo_file_create_test.go @@ -5,6 +5,7 @@ package integrations import ( + stdCtx "context" "encoding/base64" "fmt" "net/http" @@ -49,14 +50,14 @@ func getCreateFileOptions() api.CreateFileOptions { } } -func getExpectedFileResponseForCreate(commitID, treePath string) *api.FileResponse { +func getExpectedFileResponseForCreate(repoFullName, commitID, treePath string) *api.FileResponse { sha := "a635aa942442ddfdba07468cf9661c08fbdf0ebf" encoding := "base64" content := "VGhpcyBpcyBuZXcgdGV4dA==" - selfURL := setting.AppURL + "api/v1/repos/user2/repo1/contents/" + treePath + "?ref=master" - htmlURL := setting.AppURL + "user2/repo1/src/branch/master/" + treePath - gitURL := setting.AppURL + "api/v1/repos/user2/repo1/git/blobs/" + sha - downloadURL := setting.AppURL + "user2/repo1/raw/branch/master/" + treePath + selfURL := setting.AppURL + "api/v1/repos/" + repoFullName + "/contents/" + treePath + "?ref=master" + htmlURL := setting.AppURL + repoFullName + "/src/branch/master/" + treePath + gitURL := setting.AppURL + "api/v1/repos/" + repoFullName + "/git/blobs/" + sha + downloadURL := setting.AppURL + repoFullName + "/raw/branch/master/" + treePath return &api.FileResponse{ Content: &api.ContentsResponse{ Name: filepath.Base(treePath), @@ -78,10 +79,10 @@ func getExpectedFileResponseForCreate(commitID, treePath string) *api.FileRespon }, Commit: &api.FileCommitResponse{ CommitMeta: api.CommitMeta{ - URL: setting.AppURL + "api/v1/repos/user2/repo1/git/commits/" + commitID, + URL: setting.AppURL + "api/v1/repos/" + repoFullName + "/git/commits/" + commitID, SHA: commitID, }, - HTMLURL: setting.AppURL + "user2/repo1/commit/" + commitID, + HTMLURL: setting.AppURL + repoFullName + "/commit/" + commitID, Author: &api.CommitUser{ Identity: api.Identity{ Name: "Anne Doe", @@ -167,9 +168,9 @@ func TestAPICreateFile(t *testing.T) { url := fmt.Sprintf("/api/v1/repos/%s/%s/contents/%s?token=%s", user2.Name, repo1.Name, treePath, token2) req := NewRequestWithJSON(t, "POST", url, &createFileOptions) resp := session.MakeRequest(t, req, http.StatusCreated) - gitRepo, _ := git.OpenRepository(repo1.RepoPath()) + gitRepo, _ := git.OpenRepository(stdCtx.Background(), repo1.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(createFileOptions.NewBranchName) - expectedFileResponse := getExpectedFileResponseForCreate(commitID, treePath) + expectedFileResponse := getExpectedFileResponseForCreate("user2/repo1", commitID, treePath) var fileResponse api.FileResponse DecodeJSON(t, resp, &fileResponse) assert.EqualValues(t, expectedFileResponse.Content, fileResponse.Content) @@ -276,5 +277,29 @@ func TestAPICreateFile(t *testing.T) { url = fmt.Sprintf("/api/v1/repos/%s/%s/contents/%s?token=%s", user2.Name, repo1.Name, treePath, token4) req = NewRequestWithJSON(t, "POST", url, &createFileOptions) session.MakeRequest(t, req, http.StatusForbidden) + + // Test creating a file in an empty repository + doAPICreateRepository(NewAPITestContext(t, "user2", "empty-repo"), true)(t) + createFileOptions = getCreateFileOptions() + fileID++ + treePath = fmt.Sprintf("new/file%d.txt", fileID) + url = fmt.Sprintf("/api/v1/repos/%s/%s/contents/%s?token=%s", user2.Name, "empty-repo", treePath, token2) + req = NewRequestWithJSON(t, "POST", url, &createFileOptions) + resp = session.MakeRequest(t, req, http.StatusCreated) + emptyRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerName: "user2", Name: "empty-repo"}).(*repo_model.Repository) // public repo + gitRepo, _ := git.OpenRepository(stdCtx.Background(), emptyRepo.RepoPath()) + commitID, _ := gitRepo.GetBranchCommitID(createFileOptions.NewBranchName) + expectedFileResponse := getExpectedFileResponseForCreate("user2/empty-repo", commitID, treePath) + DecodeJSON(t, resp, &fileResponse) + assert.EqualValues(t, expectedFileResponse.Content, fileResponse.Content) + assert.EqualValues(t, expectedFileResponse.Commit.SHA, fileResponse.Commit.SHA) + assert.EqualValues(t, expectedFileResponse.Commit.HTMLURL, fileResponse.Commit.HTMLURL) + assert.EqualValues(t, expectedFileResponse.Commit.Author.Email, fileResponse.Commit.Author.Email) + assert.EqualValues(t, expectedFileResponse.Commit.Author.Name, fileResponse.Commit.Author.Name) + assert.EqualValues(t, expectedFileResponse.Commit.Author.Date, fileResponse.Commit.Author.Date) + assert.EqualValues(t, expectedFileResponse.Commit.Committer.Email, fileResponse.Commit.Committer.Email) + assert.EqualValues(t, expectedFileResponse.Commit.Committer.Name, fileResponse.Commit.Committer.Name) + assert.EqualValues(t, expectedFileResponse.Commit.Committer.Date, fileResponse.Commit.Committer.Date) + gitRepo.Close() }) } diff --git a/integrations/api_repo_file_update_test.go b/integrations/api_repo_file_update_test.go index 63bee11458..0c9c0763f4 100644 --- a/integrations/api_repo_file_update_test.go +++ b/integrations/api_repo_file_update_test.go @@ -5,6 +5,7 @@ package integrations import ( + stdCtx "context" "encoding/base64" "fmt" "net/http" @@ -134,7 +135,7 @@ func TestAPIUpdateFile(t *testing.T) { url := fmt.Sprintf("/api/v1/repos/%s/%s/contents/%s?token=%s", user2.Name, repo1.Name, treePath, token2) req := NewRequestWithJSON(t, "PUT", url, &updateFileOptions) resp := session.MakeRequest(t, req, http.StatusOK) - gitRepo, _ := git.OpenRepository(repo1.RepoPath()) + gitRepo, _ := git.OpenRepository(stdCtx.Background(), repo1.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(updateFileOptions.NewBranchName) expectedFileResponse := getExpectedFileResponseForUpdate(commitID, treePath) var fileResponse api.FileResponse diff --git a/integrations/api_repo_get_contents_list_test.go b/integrations/api_repo_get_contents_list_test.go index b8822d13ec..42227a9c4b 100644 --- a/integrations/api_repo_get_contents_list_test.go +++ b/integrations/api_repo_get_contents_list_test.go @@ -75,7 +75,7 @@ func testAPIGetContentsList(t *testing.T, u *url.URL) { err := repo_service.CreateNewBranch(git.DefaultContext, user2, repo1, repo1.DefaultBranch, newBranch) assert.NoError(t, err) // Get the commit ID of the default branch - gitRepo, err := git.OpenRepository(repo1.RepoPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo1.RepoPath()) assert.NoError(t, err) defer gitRepo.Close() diff --git a/integrations/api_repo_get_contents_test.go b/integrations/api_repo_get_contents_test.go index bc8471c70d..67f2cb8362 100644 --- a/integrations/api_repo_get_contents_test.go +++ b/integrations/api_repo_get_contents_test.go @@ -76,7 +76,7 @@ func testAPIGetContents(t *testing.T, u *url.URL) { err := repo_service.CreateNewBranch(git.DefaultContext, user2, repo1, repo1.DefaultBranch, newBranch) assert.NoError(t, err) // Get the commit ID of the default branch - gitRepo, err := git.OpenRepository(repo1.RepoPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo1.RepoPath()) assert.NoError(t, err) defer gitRepo.Close() diff --git a/integrations/api_repo_git_tags_test.go b/integrations/api_repo_git_tags_test.go index 6aa2f9f642..9e870d2489 100644 --- a/integrations/api_repo_git_tags_test.go +++ b/integrations/api_repo_git_tags_test.go @@ -28,10 +28,10 @@ func TestAPIGitTags(t *testing.T) { token := getTokenForLoggedInUser(t, session) // Set up git config for the tagger - git.NewCommand(git.DefaultContext, "config", "user.name", user.Name).RunInDir(repo.RepoPath()) - git.NewCommand(git.DefaultContext, "config", "user.email", user.Email).RunInDir(repo.RepoPath()) + _ = git.NewCommand(git.DefaultContext, "config", "user.name", user.Name).Run(&git.RunOpts{Dir: repo.RepoPath()}) + _ = git.NewCommand(git.DefaultContext, "config", "user.email", user.Email).Run(&git.RunOpts{Dir: repo.RepoPath()}) - gitRepo, _ := git.OpenRepository(repo.RepoPath()) + gitRepo, _ := git.OpenRepository(git.DefaultContext, repo.RepoPath()) defer gitRepo.Close() commit, _ := gitRepo.GetBranchCommit("master") diff --git a/integrations/api_repo_languages_test.go b/integrations/api_repo_languages_test.go index ea20e6d542..8775d25029 100644 --- a/integrations/api_repo_languages_test.go +++ b/integrations/api_repo_languages_test.go @@ -33,7 +33,7 @@ func TestRepoLanguages(t *testing.T) { "content": "package main", "commit_choice": "direct", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) // let gitea calculate language stats time.Sleep(time.Second) diff --git a/integrations/api_repo_teams_test.go b/integrations/api_repo_teams_test.go index a3baeba63c..efd6ddb457 100644 --- a/integrations/api_repo_teams_test.go +++ b/integrations/api_repo_teams_test.go @@ -37,7 +37,7 @@ func TestAPIRepoTeams(t *testing.T) { DecodeJSON(t, res, &teams) if assert.Len(t, teams, 2) { assert.EqualValues(t, "Owners", teams[0].Name) - assert.False(t, teams[0].CanCreateOrgRepo) + assert.True(t, teams[0].CanCreateOrgRepo) assert.True(t, util.IsEqualSlice(unit.AllUnitKeyNames(), teams[0].Units), fmt.Sprintf("%v == %v", unit.AllUnitKeyNames(), teams[0].Units)) assert.EqualValues(t, "owner", teams[0].Permission) diff --git a/integrations/api_repo_test.go b/integrations/api_repo_test.go index ce1ecb1d43..b585ad15e3 100644 --- a/integrations/api_repo_test.go +++ b/integrations/api_repo_test.go @@ -405,6 +405,27 @@ func testAPIRepoMigrateConflict(t *testing.T, u *url.URL) { }) } +// mirror-sync must fail with "400 (Bad Request)" when an attempt is made to +// sync a non-mirror repository. +func TestAPIMirrorSyncNonMirrorRepo(t *testing.T) { + defer prepareTestEnv(t)() + + session := loginUser(t, "user2") + token := getTokenForLoggedInUser(t, session) + + var repo api.Repository + req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1") + resp := MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &repo) + assert.EqualValues(t, false, repo.Mirror) + + req = NewRequestf(t, "POST", "/api/v1/repos/user2/repo1/mirror-sync?token=%s", token) + resp = session.MakeRequest(t, req, http.StatusBadRequest) + errRespJSON := map[string]string{} + DecodeJSON(t, resp, &errRespJSON) + assert.Equal(t, "Repository is not a mirror", errRespJSON["message"]) +} + func TestAPIOrgRepoCreate(t *testing.T) { testCases := []struct { ctxUserID int64 diff --git a/integrations/api_repo_topic_test.go b/integrations/api_repo_topic_test.go index b7f9a5a5a6..04295724a7 100644 --- a/integrations/api_repo_topic_test.go +++ b/integrations/api_repo_topic_test.go @@ -59,36 +59,34 @@ func TestAPIRepoTopic(t *testing.T) { repo3 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 3}).(*repo_model.Repository) // Get user2's token - session := loginUser(t, user2.Name) - token2 := getTokenForLoggedInUser(t, session) + token2 := getUserToken(t, user2.Name) // Test read topics using login url := fmt.Sprintf("/api/v1/repos/%s/%s/topics", user2.Name, repo2.Name) - req := NewRequest(t, "GET", url) - res := session.MakeRequest(t, req, http.StatusOK) + req := NewRequest(t, "GET", url+"?token="+token2) + res := MakeRequest(t, req, http.StatusOK) var topics *api.TopicName DecodeJSON(t, res, &topics) assert.ElementsMatch(t, []string{"topicname1", "topicname2"}, topics.TopicNames) // Log out user2 - session = emptyTestSession(t) url = fmt.Sprintf("/api/v1/repos/%s/%s/topics?token=%s", user2.Name, repo2.Name, token2) // Test delete a topic req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/topics/%s?token=%s", user2.Name, repo2.Name, "Topicname1", token2) - session.MakeRequest(t, req, http.StatusNoContent) + MakeRequest(t, req, http.StatusNoContent) // Test add an existing topic req = NewRequestf(t, "PUT", "/api/v1/repos/%s/%s/topics/%s?token=%s", user2.Name, repo2.Name, "Golang", token2) - session.MakeRequest(t, req, http.StatusNoContent) + MakeRequest(t, req, http.StatusNoContent) // Test add a topic req = NewRequestf(t, "PUT", "/api/v1/repos/%s/%s/topics/%s?token=%s", user2.Name, repo2.Name, "topicName3", token2) - session.MakeRequest(t, req, http.StatusNoContent) + MakeRequest(t, req, http.StatusNoContent) // Test read topics using token req = NewRequest(t, "GET", url) - res = session.MakeRequest(t, req, http.StatusOK) + res = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, res, &topics) assert.ElementsMatch(t, []string{"topicname2", "golang", "topicname3"}, topics.TopicNames) @@ -97,9 +95,9 @@ func TestAPIRepoTopic(t *testing.T) { req = NewRequestWithJSON(t, "PUT", url, &api.RepoTopicOptions{ Topics: newTopics, }) - session.MakeRequest(t, req, http.StatusNoContent) + MakeRequest(t, req, http.StatusNoContent) req = NewRequest(t, "GET", url) - res = session.MakeRequest(t, req, http.StatusOK) + res = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, res, &topics) assert.ElementsMatch(t, []string{"windows", "mac"}, topics.TopicNames) @@ -108,9 +106,9 @@ func TestAPIRepoTopic(t *testing.T) { req = NewRequestWithJSON(t, "PUT", url, &api.RepoTopicOptions{ Topics: newTopics, }) - session.MakeRequest(t, req, http.StatusUnprocessableEntity) + MakeRequest(t, req, http.StatusUnprocessableEntity) req = NewRequest(t, "GET", url) - res = session.MakeRequest(t, req, http.StatusOK) + res = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, res, &topics) assert.ElementsMatch(t, []string{"windows", "mac"}, topics.TopicNames) @@ -119,9 +117,9 @@ func TestAPIRepoTopic(t *testing.T) { req = NewRequestWithJSON(t, "PUT", url, &api.RepoTopicOptions{ Topics: newTopics, }) - session.MakeRequest(t, req, http.StatusNoContent) + MakeRequest(t, req, http.StatusNoContent) req = NewRequest(t, "GET", url) - res = session.MakeRequest(t, req, http.StatusOK) + res = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, res, &topics) assert.Len(t, topics.TopicNames, 25) @@ -130,29 +128,27 @@ func TestAPIRepoTopic(t *testing.T) { req = NewRequestWithJSON(t, "PUT", url, &api.RepoTopicOptions{ Topics: newTopics, }) - session.MakeRequest(t, req, http.StatusUnprocessableEntity) + MakeRequest(t, req, http.StatusUnprocessableEntity) // Test add a topic when there is already maximum req = NewRequestf(t, "PUT", "/api/v1/repos/%s/%s/topics/%s?token=%s", user2.Name, repo2.Name, "t26", token2) - session.MakeRequest(t, req, http.StatusUnprocessableEntity) + MakeRequest(t, req, http.StatusUnprocessableEntity) // Test delete a topic that repo doesn't have req = NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/topics/%s?token=%s", user2.Name, repo2.Name, "Topicname1", token2) - session.MakeRequest(t, req, http.StatusNotFound) + MakeRequest(t, req, http.StatusNotFound) // Get user4's token - session = loginUser(t, user4.Name) - token4 := getTokenForLoggedInUser(t, session) - session = emptyTestSession(t) + token4 := getUserToken(t, user4.Name) // Test read topics with write access url = fmt.Sprintf("/api/v1/repos/%s/%s/topics?token=%s", user3.Name, repo3.Name, token4) req = NewRequest(t, "GET", url) - res = session.MakeRequest(t, req, http.StatusOK) + res = MakeRequest(t, req, http.StatusOK) DecodeJSON(t, res, &topics) assert.Empty(t, topics.TopicNames) // Test add a topic to repo with write access (requires repo admin access) req = NewRequestf(t, "PUT", "/api/v1/repos/%s/%s/topics/%s?token=%s", user3.Name, repo3.Name, "topicName", token4) - session.MakeRequest(t, req, http.StatusForbidden) + MakeRequest(t, req, http.StatusForbidden) } diff --git a/integrations/api_team_test.go b/integrations/api_team_test.go index a622c63145..412fd4c73d 100644 --- a/integrations/api_team_test.go +++ b/integrations/api_team_test.go @@ -10,7 +10,8 @@ import ( "sort" "testing" - "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" + "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -23,8 +24,8 @@ import ( func TestAPITeam(t *testing.T) { defer prepareTestEnv(t)() - teamUser := unittest.AssertExistsAndLoadBean(t, &models.TeamUser{}).(*models.TeamUser) - team := unittest.AssertExistsAndLoadBean(t, &models.Team{ID: teamUser.TeamID}).(*models.Team) + teamUser := unittest.AssertExistsAndLoadBean(t, &organization.TeamUser{}).(*organization.TeamUser) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamUser.TeamID}).(*organization.Team) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: teamUser.UID}).(*user_model.User) session := loginUser(t, user.Name) @@ -38,7 +39,7 @@ func TestAPITeam(t *testing.T) { assert.Equal(t, team.Name, apiTeam.Name) // non team member user will not access the teams details - teamUser2 := unittest.AssertExistsAndLoadBean(t, &models.TeamUser{ID: 3}).(*models.TeamUser) + teamUser2 := unittest.AssertExistsAndLoadBean(t, &organization.TeamUser{ID: 3}).(*organization.TeamUser) user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: teamUser2.UID}).(*user_model.User) session = loginUser(t, user2.Name) @@ -107,7 +108,7 @@ func TestAPITeam(t *testing.T) { teamToEdit.Permission, unit.AllUnitKeyNames(), nil) // Read team. - teamRead := unittest.AssertExistsAndLoadBean(t, &models.Team{ID: teamID}).(*models.Team) + teamRead := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) assert.NoError(t, teamRead.GetUnits()) req = NewRequestf(t, "GET", "/api/v1/teams/%d?token="+token, teamID) resp = session.MakeRequest(t, req, http.StatusOK) @@ -119,7 +120,7 @@ func TestAPITeam(t *testing.T) { // Delete team. req = NewRequestf(t, "DELETE", "/api/v1/teams/%d?token="+token, teamID) session.MakeRequest(t, req, http.StatusNoContent) - unittest.AssertNotExistsBean(t, &models.Team{ID: teamID}) + unittest.AssertNotExistsBean(t, &organization.Team{ID: teamID}) // create team again via UnitsMap // Create team. @@ -173,7 +174,7 @@ func TestAPITeam(t *testing.T) { "read", nil, teamToEdit.UnitsMap) // Read team. - teamRead = unittest.AssertExistsAndLoadBean(t, &models.Team{ID: teamID}).(*models.Team) + teamRead = unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) req = NewRequestf(t, "GET", "/api/v1/teams/%d?token="+token, teamID) resp = session.MakeRequest(t, req, http.StatusOK) apiTeam = api.Team{} @@ -185,7 +186,7 @@ func TestAPITeam(t *testing.T) { // Delete team. req = NewRequestf(t, "DELETE", "/api/v1/teams/%d?token="+token, teamID) session.MakeRequest(t, req, http.StatusNoContent) - unittest.AssertNotExistsBean(t, &models.Team{ID: teamID}) + unittest.AssertNotExistsBean(t, &organization.Team{ID: teamID}) } func checkTeamResponse(t *testing.T, apiTeam *api.Team, name, description string, includesAllRepositories bool, permission string, units []string, unitsMap map[string]string) { @@ -206,7 +207,7 @@ func checkTeamResponse(t *testing.T, apiTeam *api.Team, name, description string } func checkTeamBean(t *testing.T, id int64, name, description string, includesAllRepositories bool, permission string, units []string, unitsMap map[string]string) { - team := unittest.AssertExistsAndLoadBean(t, &models.Team{ID: id}).(*models.Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: id}).(*organization.Team) assert.NoError(t, team.GetUnits(), "GetUnits") checkTeamResponse(t, convert.ToTeam(team), name, description, includesAllRepositories, permission, units, unitsMap) } @@ -224,11 +225,9 @@ func TestAPITeamSearch(t *testing.T) { var results TeamSearchResults - session := loginUser(t, user.Name) - csrf := GetCSRF(t, session, "/"+org.Name) - req := NewRequestf(t, "GET", "/api/v1/orgs/%s/teams/search?q=%s", org.Name, "_team") - req.Header.Add("X-Csrf-Token", csrf) - resp := session.MakeRequest(t, req, http.StatusOK) + token := getUserToken(t, user.Name) + req := NewRequestf(t, "GET", "/api/v1/orgs/%s/teams/search?q=%s&token=%s", org.Name, "_team", token) + resp := MakeRequest(t, req, http.StatusOK) DecodeJSON(t, resp, &results) assert.NotEmpty(t, results.Data) assert.Len(t, results.Data, 1) @@ -236,9 +235,31 @@ func TestAPITeamSearch(t *testing.T) { // no access if not organization member user5 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) - session = loginUser(t, user5.Name) - csrf = GetCSRF(t, session, "/"+org.Name) - req = NewRequestf(t, "GET", "/api/v1/orgs/%s/teams/search?q=%s", org.Name, "team") - req.Header.Add("X-Csrf-Token", csrf) - session.MakeRequest(t, req, http.StatusForbidden) + token5 := getUserToken(t, user5.Name) + + req = NewRequestf(t, "GET", "/api/v1/orgs/%s/teams/search?q=%s&token=%s", org.Name, "team", token5) + MakeRequest(t, req, http.StatusForbidden) +} + +func TestAPIGetTeamRepo(t *testing.T) { + defer prepareTestEnv(t)() + + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15}).(*user_model.User) + teamRepo := unittest.AssertExistsAndLoadBean(t, &repo.Repository{ID: 24}).(*repo.Repository) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 5}).(*organization.Team) + + var results api.Repository + + token := getUserToken(t, user.Name) + req := NewRequestf(t, "GET", "/api/v1/teams/%d/repos/%s/?token=%s", team.ID, teamRepo.FullName(), token) + resp := MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &results) + assert.Equal(t, "big_test_private_4", teamRepo.Name) + + // no access if not organization member + user5 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) + token5 := getUserToken(t, user5.Name) + + req = NewRequestf(t, "GET", "/api/v1/teams/%d/repos/%s/?token=%s", team.ID, teamRepo.FullName(), token5) + MakeRequest(t, req, http.StatusNotFound) } diff --git a/integrations/api_user_email_test.go b/integrations/api_user_email_test.go index 9d2b7485d8..08d236df30 100644 --- a/integrations/api_user_email_test.go +++ b/integrations/api_user_email_test.go @@ -69,6 +69,12 @@ func TestAPIAddEmail(t *testing.T) { Primary: false, }, }, emails) + + opts = api.CreateEmailOption{ + Emails: []string{"notAEmail"}, + } + req = NewRequestWithJSON(t, "POST", "/api/v1/user/emails?token="+token, &opts) + session.MakeRequest(t, req, http.StatusUnprocessableEntity) } func TestAPIDeleteEmail(t *testing.T) { diff --git a/integrations/api_user_heatmap_test.go b/integrations/api_user_heatmap_test.go index 69f4ff2249..62e70d4c3d 100644 --- a/integrations/api_user_heatmap_test.go +++ b/integrations/api_user_heatmap_test.go @@ -20,15 +20,15 @@ func TestUserHeatmap(t *testing.T) { defer prepareTestEnv(t)() adminUsername := "user1" normalUsername := "user2" - session := loginUser(t, adminUsername) + token := getUserToken(t, adminUsername) fakeNow := time.Date(2011, 10, 20, 0, 0, 0, 0, time.Local) timeutil.Set(fakeNow) defer timeutil.Unset() - urlStr := fmt.Sprintf("/api/v1/users/%s/heatmap", normalUsername) + urlStr := fmt.Sprintf("/api/v1/users/%s/heatmap?token=%s", normalUsername, token) req := NewRequest(t, "GET", urlStr) - resp := session.MakeRequest(t, req, http.StatusOK) + resp := MakeRequest(t, req, http.StatusOK) var heatmap []*models.UserHeatmapData DecodeJSON(t, resp, &heatmap) var dummyheatmap []*models.UserHeatmapData diff --git a/integrations/api_user_org_perm_test.go b/integrations/api_user_org_perm_test.go index 0dcdbd77ad..f4047e72be 100644 --- a/integrations/api_user_org_perm_test.go +++ b/integrations/api_user_org_perm_test.go @@ -133,7 +133,7 @@ func TestUnknowUser(t *testing.T) { var apiError api.APIError DecodeJSON(t, resp, &apiError) - assert.Equal(t, "GetUserByName", apiError.Message) + assert.Equal(t, "user redirect does not exist [name: unknow]", apiError.Message) } func TestUnknowOrganization(t *testing.T) { diff --git a/integrations/attachment_test.go b/integrations/attachment_test.go index 25243feb3c..00e5b9de54 100644 --- a/integrations/attachment_test.go +++ b/integrations/attachment_test.go @@ -59,7 +59,7 @@ func createAttachment(t *testing.T, session *TestSession, repoURL, filename stri func TestCreateAnonymousAttachment(t *testing.T) { defer prepareTestEnv(t)() session := emptyTestSession(t) - createAttachment(t, session, "user2/repo1", "image.png", generateImg(), http.StatusFound) + createAttachment(t, session, "user2/repo1", "image.png", generateImg(), http.StatusSeeOther) } func TestCreateIssueAttachment(t *testing.T) { @@ -83,7 +83,7 @@ func TestCreateIssueAttachment(t *testing.T) { } req = NewRequestWithValues(t, "POST", link, postData) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) test.RedirectURL(resp) // check that redirect URL exists // Validate that attachment is available diff --git a/integrations/auth_ldap_test.go b/integrations/auth_ldap_test.go index ef0fafc93d..0eee5ae0cd 100644 --- a/integrations/auth_ldap_test.go +++ b/integrations/auth_ldap_test.go @@ -12,12 +12,14 @@ import ( "testing" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/translation/i18n" "code.gitea.io/gitea/services/auth" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) type ldapUser struct { @@ -135,7 +137,7 @@ func addAuthSourceLDAP(t *testing.T, sshKeyAttribute string, groupMapParams ...s "group_team_map_removal": groupTeamMapRemoval, "user_uid": "DN", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) } func TestLDAPUserSignin(t *testing.T) { @@ -202,7 +204,7 @@ func TestLDAPAuthChange(t *testing.T) { "is_sync_enabled": "on", "is_active": "on", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) req = NewRequest(t, "GET", href) resp = session.MakeRequest(t, req, http.StatusOK) @@ -317,37 +319,37 @@ func TestLDAPGroupTeamSyncAddMember(t *testing.T) { } defer prepareTestEnv(t)() addAuthSourceLDAP(t, "", "on", `{"cn=ship_crew,ou=people,dc=planetexpress,dc=com":{"org26": ["team11"]},"cn=admin_staff,ou=people,dc=planetexpress,dc=com": {"non-existent": ["non-existent"]}}`) - org, err := models.GetOrgByName("org26") + org, err := organization.GetOrgByName("org26") assert.NoError(t, err) - team, err := models.GetTeam(org.ID, "team11") + team, err := organization.GetTeam(org.ID, "team11") assert.NoError(t, err) auth.SyncExternalUsers(context.Background(), true) for _, gitLDAPUser := range gitLDAPUsers { user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ Name: gitLDAPUser.UserName, }).(*user_model.User) - usersOrgs, err := models.FindOrgs(models.FindOrgOptions{ + usersOrgs, err := organization.FindOrgs(organization.FindOrgOptions{ UserID: user.ID, IncludePrivate: true, }) assert.NoError(t, err) - allOrgTeams, err := models.GetUserOrgTeams(org.ID, user.ID) + allOrgTeams, err := organization.GetUserOrgTeams(db.DefaultContext, org.ID, user.ID) assert.NoError(t, err) if user.Name == "fry" || user.Name == "leela" || user.Name == "bender" { // assert members of LDAP group "cn=ship_crew" are added to mapped teams assert.Equal(t, len(usersOrgs), 1, "User [%s] should be member of one organization", user.Name) assert.Equal(t, usersOrgs[0].Name, "org26", "Membership should be added to the right organization") - isMember, err := models.IsTeamMember(usersOrgs[0].ID, team.ID, user.ID) + isMember, err := organization.IsTeamMember(db.DefaultContext, usersOrgs[0].ID, team.ID, user.ID) assert.NoError(t, err) assert.True(t, isMember, "Membership should be added to the right team") - err = team.RemoveMember(user.ID) + err = models.RemoveTeamMember(team, user.ID) assert.NoError(t, err) - err = usersOrgs[0].RemoveMember(user.ID) + err = models.RemoveOrgUser(usersOrgs[0].ID, user.ID) assert.NoError(t, err) } else { // assert members of LDAP group "cn=admin_staff" keep initial team membership since mapped team does not exist assert.Empty(t, usersOrgs, "User should be member of no organization") - isMember, err := models.IsTeamMember(org.ID, team.ID, user.ID) + isMember, err := organization.IsTeamMember(db.DefaultContext, org.ID, team.ID, user.ID) assert.NoError(t, err) assert.False(t, isMember, "User should no be added to this team") assert.Empty(t, allOrgTeams, "User should not be added to any team") @@ -362,30 +364,30 @@ func TestLDAPGroupTeamSyncRemoveMember(t *testing.T) { } defer prepareTestEnv(t)() addAuthSourceLDAP(t, "", "on", `{"cn=dispatch,ou=people,dc=planetexpress,dc=com": {"org26": ["team11"]}}`) - org, err := models.GetOrgByName("org26") + org, err := organization.GetOrgByName("org26") assert.NoError(t, err) - team, err := models.GetTeam(org.ID, "team11") + team, err := organization.GetTeam(org.ID, "team11") assert.NoError(t, err) loginUserWithPassword(t, gitLDAPUsers[0].UserName, gitLDAPUsers[0].Password) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ Name: gitLDAPUsers[0].UserName, }).(*user_model.User) - err = org.AddMember(user.ID) + err = organization.AddOrgUser(org.ID, user.ID) assert.NoError(t, err) - err = team.AddMember(user.ID) + err = models.AddTeamMember(team, user.ID) assert.NoError(t, err) - isMember, err := models.IsOrganizationMember(org.ID, user.ID) + isMember, err := organization.IsOrganizationMember(db.DefaultContext, org.ID, user.ID) assert.NoError(t, err) assert.True(t, isMember, "User should be member of this organization") - isMember, err = models.IsTeamMember(org.ID, team.ID, user.ID) + isMember, err = organization.IsTeamMember(db.DefaultContext, org.ID, team.ID, user.ID) assert.NoError(t, err) assert.True(t, isMember, "User should be member of this team") // assert team member "professor" gets removed from org26 team11 loginUserWithPassword(t, gitLDAPUsers[0].UserName, gitLDAPUsers[0].Password) - isMember, err = models.IsOrganizationMember(org.ID, user.ID) + isMember, err = organization.IsOrganizationMember(db.DefaultContext, org.ID, user.ID) assert.NoError(t, err) assert.False(t, isMember, "User membership should have been removed from organization") - isMember, err = models.IsTeamMember(org.ID, team.ID, user.ID) + isMember, err = organization.IsTeamMember(db.DefaultContext, org.ID, team.ID, user.ID) assert.NoError(t, err) assert.False(t, isMember, "User membership should have been removed from team") } diff --git a/integrations/branches_test.go b/integrations/branches_test.go index aa4df6ac6a..551c5f8af8 100644 --- a/integrations/branches_test.go +++ b/integrations/branches_test.go @@ -9,8 +9,9 @@ import ( "net/url" "testing" + "code.gitea.io/gitea/modules/translation/i18n" + "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) func TestViewBranches(t *testing.T) { diff --git a/integrations/change_default_branch_test.go b/integrations/change_default_branch_test.go index af5542e0b2..096afa28f4 100644 --- a/integrations/change_default_branch_test.go +++ b/integrations/change_default_branch_test.go @@ -28,7 +28,7 @@ func TestChangeDefaultBranch(t *testing.T) { "action": "default_branch", "branch": "DefaultBranch", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) csrf = GetCSRF(t, session, branchesURL) req = NewRequestWithValues(t, "POST", branchesURL, map[string]string{ diff --git a/integrations/create_no_session_test.go b/integrations/create_no_session_test.go index a76ff1eaaf..49234c1e95 100644 --- a/integrations/create_no_session_test.go +++ b/integrations/create_no_session_test.go @@ -110,7 +110,7 @@ func TestSessionFileCreation(t *testing.T) { "user_name": "user2", "password": userPassword, }) - resp = MakeRequest(t, req, http.StatusFound) + resp = MakeRequest(t, req, http.StatusSeeOther) sessionID = getSessionID(t, resp) assert.FileExists(t, sessionFile(tmpDir, sessionID)) diff --git a/integrations/csrf_test.go b/integrations/csrf_test.go new file mode 100644 index 0000000000..5bfc97bbd1 --- /dev/null +++ b/integrations/csrf_test.go @@ -0,0 +1,52 @@ +// Copyright 2017 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "net/http" + "strings" + "testing" + + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + + "github.com/stretchr/testify/assert" +) + +func TestCsrfProtection(t *testing.T) { + defer prepareTestEnv(t)() + + // test web form csrf via form + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + session := loginUser(t, user.Name) + req := NewRequestWithValues(t, "POST", "/user/settings", map[string]string{ + "_csrf": "fake_csrf", + }) + session.MakeRequest(t, req, http.StatusSeeOther) + + resp := session.MakeRequest(t, req, http.StatusSeeOther) + loc := resp.Header().Get("Location") + assert.Equal(t, setting.AppSubURL+"/", loc) + resp = session.MakeRequest(t, NewRequest(t, "GET", loc), http.StatusOK) + htmlDoc := NewHTMLParser(t, resp.Body) + assert.Equal(t, "Bad Request: invalid CSRF token", + strings.TrimSpace(htmlDoc.doc.Find(".ui.message").Text()), + ) + + // test web form csrf via header. TODO: should use an UI api to test + req = NewRequest(t, "POST", "/user/settings") + req.Header.Add("X-Csrf-Token", "fake_csrf") + session.MakeRequest(t, req, http.StatusSeeOther) + + resp = session.MakeRequest(t, req, http.StatusSeeOther) + loc = resp.Header().Get("Location") + assert.Equal(t, setting.AppSubURL+"/", loc) + resp = session.MakeRequest(t, NewRequest(t, "GET", loc), http.StatusOK) + htmlDoc = NewHTMLParser(t, resp.Body) + assert.Equal(t, "Bad Request: invalid CSRF token", + strings.TrimSpace(htmlDoc.doc.Find(".ui.message").Text()), + ) +} diff --git a/integrations/delete_user_test.go b/integrations/delete_user_test.go index e44d2e7bd3..4b67c05951 100644 --- a/integrations/delete_user_test.go +++ b/integrations/delete_user_test.go @@ -10,6 +10,7 @@ import ( "testing" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -21,9 +22,9 @@ func assertUserDeleted(t *testing.T, userID int64) { unittest.AssertNotExistsBean(t, &user_model.Follow{FollowID: userID}) unittest.AssertNotExistsBean(t, &repo_model.Repository{OwnerID: userID}) unittest.AssertNotExistsBean(t, &models.Access{UserID: userID}) - unittest.AssertNotExistsBean(t, &models.OrgUser{UID: userID}) + unittest.AssertNotExistsBean(t, &organization.OrgUser{UID: userID}) unittest.AssertNotExistsBean(t, &models.IssueUser{UID: userID}) - unittest.AssertNotExistsBean(t, &models.TeamUser{UID: userID}) + unittest.AssertNotExistsBean(t, &organization.TeamUser{UID: userID}) unittest.AssertNotExistsBean(t, &repo_model.Star{UID: userID}) } @@ -36,7 +37,7 @@ func TestUserDeleteAccount(t *testing.T) { req := NewRequestWithValues(t, "POST", urlStr, map[string]string{ "_csrf": csrf, }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) assertUserDeleted(t, 8) unittest.CheckConsistencyFor(t, &user_model.User{}) @@ -51,7 +52,7 @@ func TestUserDeleteAccountStillOwnRepos(t *testing.T) { req := NewRequestWithValues(t, "POST", urlStr, map[string]string{ "_csrf": csrf, }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) // user should not have been deleted, because the user still owns repos unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) diff --git a/integrations/editor_test.go b/integrations/editor_test.go index 05892aa906..3ed0e510c4 100644 --- a/integrations/editor_test.go +++ b/integrations/editor_test.go @@ -34,7 +34,7 @@ func TestCreateFile(t *testing.T) { "content": "Content", "commit_choice": "direct", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) }) } @@ -48,7 +48,7 @@ func TestCreateFileOnProtectedBranch(t *testing.T) { "_csrf": csrf, "protected": "on", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) // Check if master branch has been locked successfully flashCookie := session.GetCookie("macaron_flash") assert.NotNil(t, flashCookie) @@ -82,7 +82,7 @@ func TestCreateFileOnProtectedBranch(t *testing.T) { "_csrf": csrf, "protected": "off", }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) // Check if master branch has been locked successfully flashCookie = session.GetCookie("macaron_flash") assert.NotNil(t, flashCookie) @@ -109,7 +109,7 @@ func testEditFile(t *testing.T, session *TestSession, user, repo, branch, filePa "commit_choice": "direct", }, ) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) // Verify the change req = NewRequest(t, "GET", path.Join(user, repo, "raw/branch", branch, filePath)) @@ -139,7 +139,7 @@ func testEditFileToNewBranch(t *testing.T, session *TestSession, user, repo, bra "new_branch_name": targetBranch, }, ) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) // Verify the change req = NewRequest(t, "GET", path.Join(user, repo, "raw/branch", targetBranch, filePath)) diff --git a/integrations/git_helper_for_declarative_test.go b/integrations/git_helper_for_declarative_test.go index 674fad5f18..1ea594b739 100644 --- a/integrations/git_helper_for_declarative_test.go +++ b/integrations/git_helper_for_declarative_test.go @@ -134,7 +134,7 @@ func doGitInitTestRepository(dstPath string) func(*testing.T) { // Init repository in dstPath assert.NoError(t, git.InitRepository(git.DefaultContext, dstPath, false)) // forcibly set default branch to master - _, err := git.NewCommand(git.DefaultContext, "symbolic-ref", "HEAD", git.BranchPrefix+"master").RunInDir(dstPath) + _, _, err := git.NewCommand(git.DefaultContext, "symbolic-ref", "HEAD", git.BranchPrefix+"master").RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) assert.NoError(t, os.WriteFile(filepath.Join(dstPath, "README.md"), []byte(fmt.Sprintf("# Testing Repository\n\nOriginally created in: %s", dstPath)), 0o644)) assert.NoError(t, git.AddChanges(dstPath, true)) @@ -153,49 +153,49 @@ func doGitInitTestRepository(dstPath string) func(*testing.T) { func doGitAddRemote(dstPath, remoteName string, u *url.URL) func(*testing.T) { return func(t *testing.T) { - _, err := git.NewCommand(git.DefaultContext, "remote", "add", remoteName, u.String()).RunInDir(dstPath) + _, _, err := git.NewCommand(git.DefaultContext, "remote", "add", remoteName, u.String()).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } func doGitPushTestRepository(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, err := git.NewCommand(git.DefaultContext, append([]string{"push", "-u"}, args...)...).RunInDir(dstPath) + _, _, err := git.NewCommand(git.DefaultContext, append([]string{"push", "-u"}, args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } func doGitPushTestRepositoryFail(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, err := git.NewCommand(git.DefaultContext, append([]string{"push"}, args...)...).RunInDir(dstPath) + _, _, err := git.NewCommand(git.DefaultContext, append([]string{"push"}, args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.Error(t, err) } } func doGitCreateBranch(dstPath, branch string) func(*testing.T) { return func(t *testing.T) { - _, err := git.NewCommand(git.DefaultContext, "checkout", "-b", branch).RunInDir(dstPath) + _, _, err := git.NewCommand(git.DefaultContext, "checkout", "-b", branch).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } func doGitCheckoutBranch(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, err := git.NewCommandNoGlobals(append(append(git.AllowLFSFiltersArgs(), "checkout"), args...)...).RunInDir(dstPath) + _, _, err := git.NewCommandNoGlobals(append(append(git.AllowLFSFiltersArgs(), "checkout"), args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } func doGitMerge(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, err := git.NewCommand(git.DefaultContext, append([]string{"merge"}, args...)...).RunInDir(dstPath) + _, _, err := git.NewCommand(git.DefaultContext, append([]string{"merge"}, args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } func doGitPull(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, err := git.NewCommandNoGlobals(append(append(git.AllowLFSFiltersArgs(), "pull"), args...)...).RunInDir(dstPath) + _, _, err := git.NewCommandNoGlobals(append(append(git.AllowLFSFiltersArgs(), "pull"), args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } diff --git a/integrations/git_smart_http_test.go b/integrations/git_smart_http_test.go index b6043fe706..a6baafe374 100644 --- a/integrations/git_smart_http_test.go +++ b/integrations/git_smart_http_test.go @@ -24,31 +24,31 @@ func testGitSmartHTTP(t *testing.T, u *url.URL) { }{ { p: "user2/repo1/info/refs", - code: 200, + code: http.StatusOK, }, { p: "user2/repo1/HEAD", - code: 200, + code: http.StatusOK, }, { p: "user2/repo1/objects/info/alternates", - code: 404, + code: http.StatusNotFound, }, { p: "user2/repo1/objects/info/http-alternates", - code: 404, + code: http.StatusNotFound, }, { p: "user2/repo1/../../custom/conf/app.ini", - code: 404, + code: http.StatusNotFound, }, { p: "user2/repo1/objects/info/../../../../custom/conf/app.ini", - code: 404, + code: http.StatusNotFound, }, { p: `user2/repo1/objects/info/..\..\..\..\custom\conf\app.ini`, - code: 400, + code: http.StatusBadRequest, }, } diff --git a/integrations/git_test.go b/integrations/git_test.go index e1df8ac546..04cdf633bd 100644 --- a/integrations/git_test.go +++ b/integrations/git_test.go @@ -82,6 +82,7 @@ func testGit(t *testing.T, u *url.URL) { t.Run("CreateAgitFlowPull", doCreateAgitFlowPull(dstPath, &httpContext, "master", "test/head")) t.Run("BranchProtectMerge", doBranchProtectPRMerge(&httpContext, dstPath)) + t.Run("AutoMerge", doAutoPRMerge(&httpContext, dstPath)) t.Run("CreatePRAndSetManuallyMerged", doCreatePRAndSetManuallyMerged(httpContext, httpContext, dstPath, "master", "test-manually-merge")) t.Run("MergeFork", func(t *testing.T) { defer PrintCurrentTest(t)() @@ -160,9 +161,9 @@ func lfsCommitAndPushTest(t *testing.T, dstPath string) (littleLFS, bigLFS strin return } prefix := "lfs-data-file-" - _, err := git.NewCommand(git.DefaultContext, "lfs").AddArguments("install").RunInDir(dstPath) + err := git.NewCommand(git.DefaultContext, "lfs").AddArguments("install").Run(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) - _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("track", prefix+"*").RunInDir(dstPath) + _, _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("track", prefix+"*").RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) err = git.AddChanges(dstPath, false, ".gitattributes") assert.NoError(t, err) @@ -292,20 +293,20 @@ func lockTest(t *testing.T, repoPath string) { } func lockFileTest(t *testing.T, filename, repoPath string) { - _, err := git.NewCommand(git.DefaultContext, "lfs").AddArguments("locks").RunInDir(repoPath) + _, _, err := git.NewCommand(git.DefaultContext, "lfs").AddArguments("locks").RunStdString(&git.RunOpts{Dir: repoPath}) assert.NoError(t, err) - _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("lock", filename).RunInDir(repoPath) + _, _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("lock", filename).RunStdString(&git.RunOpts{Dir: repoPath}) assert.NoError(t, err) - _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("locks").RunInDir(repoPath) + _, _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("locks").RunStdString(&git.RunOpts{Dir: repoPath}) assert.NoError(t, err) - _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("unlock", filename).RunInDir(repoPath) + _, _, err = git.NewCommand(git.DefaultContext, "lfs").AddArguments("unlock", filename).RunStdString(&git.RunOpts{Dir: repoPath}) assert.NoError(t, err) } func doCommitAndPush(t *testing.T, size int, repoPath, prefix string) string { name, err := generateCommitWithNewData(size, repoPath, "user2@example.com", "User Two", prefix) assert.NoError(t, err) - _, err = git.NewCommand(git.DefaultContext, "push", "origin", "master").RunInDir(repoPath) // Push + _, _, err = git.NewCommand(git.DefaultContext, "push", "origin", "master").RunStdString(&git.RunOpts{Dir: repoPath}) // Push assert.NoError(t, err) return name } @@ -435,7 +436,7 @@ func doProtectBranch(ctx APITestContext, branch, userToWhitelist, unprotectedFil "protected": "on", "unprotected_file_patterns": unprotectedFilePatterns, }) - ctx.Session.MakeRequest(t, req, http.StatusFound) + ctx.Session.MakeRequest(t, req, http.StatusSeeOther) } else { user, err := user_model.GetUserByName(userToWhitelist) assert.NoError(t, err) @@ -448,7 +449,7 @@ func doProtectBranch(ctx APITestContext, branch, userToWhitelist, unprotectedFil "whitelist_users": strconv.FormatInt(user.ID, 10), "unprotected_file_patterns": unprotectedFilePatterns, }) - ctx.Session.MakeRequest(t, req, http.StatusFound) + ctx.Session.MakeRequest(t, req, http.StatusSeeOther) } // Check if master branch has been locked successfully flashCookie := ctx.Session.GetCookie("macaron_flash") @@ -615,6 +616,88 @@ func doBranchDelete(ctx APITestContext, owner, repo, branch string) func(*testin } } +func doAutoPRMerge(baseCtx *APITestContext, dstPath string) func(t *testing.T) { + return func(t *testing.T) { + defer PrintCurrentTest(t)() + + ctx := NewAPITestContext(t, baseCtx.Username, baseCtx.Reponame) + + t.Run("CheckoutProtected", doGitCheckoutBranch(dstPath, "protected")) + t.Run("PullProtected", doGitPull(dstPath, "origin", "protected")) + t.Run("GenerateCommit", func(t *testing.T) { + _, err := generateCommitWithNewData(littleSize, dstPath, "user2@example.com", "User Two", "branch-data-file-") + assert.NoError(t, err) + }) + t.Run("PushToUnprotectedBranch", doGitPushTestRepository(dstPath, "origin", "protected:unprotected3")) + var pr api.PullRequest + var err error + t.Run("CreatePullRequest", func(t *testing.T) { + pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, "protected", "unprotected3")(t) + assert.NoError(t, err) + }) + + // Request repository commits page + req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits", baseCtx.Username, baseCtx.Reponame, pr.Index)) + resp := ctx.Session.MakeRequest(t, req, http.StatusOK) + doc := NewHTMLParser(t, resp.Body) + + // Get first commit URL + commitURL, exists := doc.doc.Find("#commits-table tbody tr td.sha a").Last().Attr("href") + assert.True(t, exists) + assert.NotEmpty(t, commitURL) + + commitID := path.Base(commitURL) + + // Call API to add Pending status for commit + t.Run("CreateStatus", doAPICreateCommitStatus(ctx, commitID, api.CommitStatusPending)) + + // Cancel not existing auto merge + ctx.ExpectedCode = http.StatusNotFound + t.Run("CancelAutoMergePR", doAPICancelAutoMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)) + + // Add auto merge request + ctx.ExpectedCode = http.StatusCreated + t.Run("AutoMergePR", doAPIAutoMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)) + + // Can not create schedule twice + ctx.ExpectedCode = http.StatusConflict + t.Run("AutoMergePRTwice", doAPIAutoMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)) + + // Cancel auto merge request + ctx.ExpectedCode = http.StatusNoContent + t.Run("CancelAutoMergePR", doAPICancelAutoMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)) + + // Add auto merge request + ctx.ExpectedCode = http.StatusCreated + t.Run("AutoMergePR", doAPIAutoMergePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)) + + // Check pr status + ctx.ExpectedCode = 0 + pr, err = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t) + assert.NoError(t, err) + assert.False(t, pr.HasMerged) + + // Call API to add Failure status for commit + t.Run("CreateStatus", doAPICreateCommitStatus(ctx, commitID, api.CommitStatusFailure)) + + // Check pr status + pr, err = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t) + assert.NoError(t, err) + assert.False(t, pr.HasMerged) + + // Call API to add Success status for commit + t.Run("CreateStatus", doAPICreateCommitStatus(ctx, commitID, api.CommitStatusSuccess)) + + // wait to let gitea merge stuff + time.Sleep(time.Second) + + // test pr status + pr, err = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t) + assert.NoError(t, err) + assert.True(t, pr.HasMerged) + } +} + func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headBranch string) func(t *testing.T) { return func(t *testing.T) { defer PrintCurrentTest(t)() @@ -624,7 +707,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB return } - gitRepo, err := git.OpenRepository(dstPath) + gitRepo, err := git.OpenRepository(git.DefaultContext, dstPath) if !assert.NoError(t, err) { return } @@ -671,7 +754,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB }) t.Run("Push", func(t *testing.T) { - _, err := git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master", "-o", "topic="+headBranch).RunInDir(dstPath) + err := git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master", "-o", "topic="+headBranch).Run(&git.RunOpts{Dir: dstPath}) if !assert.NoError(t, err) { return } @@ -692,7 +775,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB assert.Contains(t, "Testing commit 1", prMsg.Body) assert.Equal(t, commit, prMsg.Head.Sha) - _, err = git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master/test/"+headBranch).RunInDir(dstPath) + _, _, err = git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master/test/"+headBranch).RunStdString(&git.RunOpts{Dir: dstPath}) if !assert.NoError(t, err) { return } @@ -745,7 +828,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB }) t.Run("Push2", func(t *testing.T) { - _, err := git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master", "-o", "topic="+headBranch).RunInDir(dstPath) + err := git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master", "-o", "topic="+headBranch).Run(&git.RunOpts{Dir: dstPath}) if !assert.NoError(t, err) { return } @@ -757,7 +840,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, baseBranch, headB assert.Equal(t, false, prMsg.HasMerged) assert.Equal(t, commit, prMsg.Head.Sha) - _, err = git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master/test/"+headBranch).RunInDir(dstPath) + _, _, err = git.NewCommand(git.DefaultContext, "push", "origin", "HEAD:refs/for/master/test/"+headBranch).RunStdString(&git.RunOpts{Dir: dstPath}) if !assert.NoError(t, err) { return } diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-commit.sample deleted file mode 100755 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/update.sample b/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/private_repo_on_limited_org.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-commit.sample deleted file mode 100755 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/update.sample b/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/limited_org/public_repo_on_limited_org.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/applypatch-msg.sample deleted file mode 100644 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/commit-msg.sample deleted file mode 100644 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/fsmonitor-watchman.sample deleted file mode 100644 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/post-update.sample deleted file mode 100644 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-applypatch.sample deleted file mode 100644 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-commit.sample deleted file mode 100644 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-push.sample deleted file mode 100644 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-rebase.sample deleted file mode 100644 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-receive.sample deleted file mode 100644 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/prepare-commit-msg.sample deleted file mode 100644 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/update.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/update.sample deleted file mode 100644 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/applypatch-msg.sample deleted file mode 100644 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/commit-msg.sample deleted file mode 100644 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/fsmonitor-watchman.sample deleted file mode 100644 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/post-update.sample deleted file mode 100644 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-applypatch.sample deleted file mode 100644 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-commit.sample deleted file mode 100644 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-push.sample deleted file mode 100644 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-rebase.sample deleted file mode 100644 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-receive.sample deleted file mode 100644 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/prepare-commit-msg.sample deleted file mode 100644 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/update.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/update.sample deleted file mode 100644 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_alpha.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/applypatch-msg.sample deleted file mode 100644 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/commit-msg.sample deleted file mode 100644 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/fsmonitor-watchman.sample deleted file mode 100644 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/post-update.sample deleted file mode 100644 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-applypatch.sample deleted file mode 100644 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-commit.sample deleted file mode 100644 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-push.sample deleted file mode 100644 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-rebase.sample deleted file mode 100644 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-receive.sample deleted file mode 100644 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/prepare-commit-msg.sample deleted file mode 100644 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/update.sample b/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/update.sample deleted file mode 100644 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/org26/repo_external_tracker_numeric.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-commit.sample deleted file mode 100755 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/update.sample b/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/private_repo_on_private_org.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-commit.sample deleted file mode 100755 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/update.sample b/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/privated_org/public_repo_on_private_org.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/update.sample b/integrations/gitea-repositories-meta/user12/repo10.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user12/repo10.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/update.sample b/integrations/gitea-repositories-meta/user13/repo11.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user13/repo11.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/commits_search_test.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/git_hooks_test.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-commit.sample deleted file mode 100755 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/user2/glob.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/glob.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/glob.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/repo1.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo1.wiki.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/repo15.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo15.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-rebase.sample deleted file mode 100755 index 9773ed4cb2..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -exit 0 - -################################################################ - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/repo16.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo16.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/repo2.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo2.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/repo20.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/repo20.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/update.sample b/integrations/gitea-repositories-meta/user2/utf8.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user2/utf8.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/applypatch-msg.sample deleted file mode 100644 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/commit-msg.sample deleted file mode 100644 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/fsmonitor-watchman.sample deleted file mode 100644 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/post-update.sample deleted file mode 100644 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-applypatch.sample deleted file mode 100644 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-commit.sample deleted file mode 100644 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-push.sample deleted file mode 100644 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-rebase.sample deleted file mode 100644 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-receive.sample deleted file mode 100644 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/prepare-commit-msg.sample deleted file mode 100644 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/update.sample b/integrations/gitea-repositories-meta/user27/repo49.git/hooks/update.sample deleted file mode 100644 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user27/repo49.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/applypatch-msg.sample deleted file mode 100644 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/commit-msg.sample deleted file mode 100644 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/fsmonitor-watchman.sample deleted file mode 100644 index e673bb3980..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/post-update.sample deleted file mode 100644 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-applypatch.sample deleted file mode 100644 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-commit.sample deleted file mode 100644 index 6a75641638..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-push.sample deleted file mode 100644 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-rebase.sample deleted file mode 100644 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-receive.sample deleted file mode 100644 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/prepare-commit-msg.sample deleted file mode 100644 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/user27/template1.git/hooks/update.sample b/integrations/gitea-repositories-meta/user27/template1.git/hooks/update.sample deleted file mode 100644 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user27/template1.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/update.sample b/integrations/gitea-repositories-meta/user3/repo3.git/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/integrations/gitea-repositories-meta/user3/repo3.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/applypatch-msg.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/commit-msg.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/fsmonitor-watchman.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/fsmonitor-watchman.sample deleted file mode 100755 index 14ed0aa42d..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,173 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 2) and last update token -# formatted as a string and outputs to stdout a new update token and -# all files that have been modified since the update token. Paths must -# be relative to the root of the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $last_update_token) = @ARGV; - -# Uncomment for debugging -# print STDERR "$0 $version $last_update_token\n"; - -# Check the hook interface version -if ($version ne 2) { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree = get_working_dir(); - -my $retry = 1; - -my $json_pkg; -eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; -} or do { - require JSON::PP; - $json_pkg = "JSON::PP"; -}; - -launch_watchman(); - -sub launch_watchman { - my $o = watchman_query(); - if (is_work_tree_watched($o)) { - output_result($o->{clock}, @{$o->{files}}); - } -} - -sub output_result { - my ($clockid, @files) = @_; - - # Uncomment for debugging watchman output - # open (my $fh, ">", ".git/watchman-output.out"); - # binmode $fh, ":utf8"; - # print $fh "$clockid\n@files\n"; - # close $fh; - - binmode STDOUT, ":utf8"; - print $clockid; - print "\0"; - local $, = "\0"; - print @files; -} - -sub watchman_clock { - my $response = qx/watchman clock "$git_work_tree"/; - die "Failed to get clock id on '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - return $json_pkg->new->utf8->decode($response); -} - -sub watchman_query { - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $last_update_token but not from the .git folder. - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - if (substr($last_update_token, 0, 1) eq "c") { - $last_update_token = "\"$last_update_token\""; - } - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $last_update_token, - "fields": ["name"], - "expression": ["not", ["dirname", ".git"]] - }] - END - - # Uncomment for debugging the watchman query - # open (my $fh, ">", ".git/watchman-query.json"); - # print $fh $query; - # close $fh; - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - # Uncomment for debugging the watch response - # open ($fh, ">", ".git/watchman-response.json"); - # print $fh $response; - # close $fh; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - return $json_pkg->new->utf8->decode($response); -} - -sub is_work_tree_watched { - my ($output) = @_; - my $error = $output->{error}; - if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) { - $retry--; - my $response = qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - $output = $json_pkg->new->utf8->decode($response); - $error = $output->{error}; - die "Watchman: $error.\n" . - "Falling back to scanning...\n" if $error; - - # Uncomment for debugging watchman output - # open (my $fh, ">", ".git/watchman-output.out"); - # close $fh; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - my $o = watchman_clock(); - $error = $output->{error}; - - die "Watchman: $error.\n" . - "Falling back to scanning...\n" if $error; - - output_result($o->{clock}, ("/")); - $last_update_token = $o->{clock}; - - eval { launch_watchman() }; - return 0; - } - - die "Watchman: $error.\n" . - "Falling back to scanning...\n" if $error; - - return 1; -} - -sub get_working_dir { - my $working_dir; - if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $working_dir = Win32::GetCwd(); - $working_dir =~ tr/\\/\//; - } else { - require Cwd; - $working_dir = Cwd::cwd(); - } - - return $working_dir; -} diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/post-update.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-applypatch.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-commit.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-commit.sample deleted file mode 100755 index e144712c85..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --type=bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-merge-commit.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-merge-commit.sample deleted file mode 100755 index 399eab1924..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-merge-commit.sample +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git merge" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message to -# stderr if it wants to stop the merge commit. -# -# To enable this hook, rename this file to "pre-merge-commit". - -. git-sh-setup -test -x "$GIT_DIR/hooks/pre-commit" && - exec "$GIT_DIR/hooks/pre-commit" -: diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-push.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-rebase.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-receive.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/prepare-commit-msg.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/update.sample b/integrations/gitea-repositories-meta/user5/repo4.git/hooks/update.sample deleted file mode 100755 index 5014c4b31c..0000000000 --- a/integrations/gitea-repositories-meta/user5/repo4.git/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --type=bool hooks.allowunannotated) -allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch) -denycreatebranch=$(git config --type=bool hooks.denycreatebranch) -allowdeletetag=$(git config --type=bool hooks.allowdeletetag) -allowmodifytag=$(git config --type=bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/integrations/goget_test.go b/integrations/goget_test.go index 5dc9c5e0a8..504d869990 100644 --- a/integrations/goget_test.go +++ b/integrations/goget_test.go @@ -29,8 +29,7 @@ func TestGoGet(t *testing.T) { go get --insecure %[1]s:%[2]s/blah/glah - -`, setting.Domain, setting.HTTPPort, setting.AppURL) +`, setting.Domain, setting.HTTPPort, setting.AppURL) assert.Equal(t, expected, resp.Body.String()) } diff --git a/integrations/integration_test.go b/integrations/integration_test.go index dfa5bade78..4df485a6e8 100644 --- a/integrations/integration_test.go +++ b/integrations/integration_test.go @@ -24,7 +24,6 @@ import ( "testing" "time" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/git" @@ -32,6 +31,7 @@ import ( "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/queue" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/util" @@ -112,6 +112,13 @@ func TestMain(m *testing.M) { } } + os.Unsetenv("GIT_AUTHOR_NAME") + os.Unsetenv("GIT_AUTHOR_EMAIL") + os.Unsetenv("GIT_AUTHOR_DATE") + os.Unsetenv("GIT_COMMITTER_NAME") + os.Unsetenv("GIT_COMMITTER_EMAIL") + os.Unsetenv("GIT_COMMITTER_DATE") + err := unittest.InitFixtures( unittest.FixturesOptions{ Dir: filepath.Join(filepath.Dir(setting.AppPath), "models/fixtures/"), @@ -165,7 +172,8 @@ func initIntegrationTest() { setting.SetCustomPathAndConf("", "", "") setting.LoadForTest() - _ = util.RemoveAll(models.LocalCopyPath()) + setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master" + _ = util.RemoveAll(repo_module.LocalCopyPath()) git.CheckLFSVersion() setting.InitDBConfig() if err := storage.Init(); err != nil { @@ -254,7 +262,7 @@ func prepareTestEnv(t testing.TB, skip ...int) func() { assert.NoError(t, unittest.LoadFixtures()) assert.NoError(t, util.RemoveAll(setting.RepoRootPath)) - assert.NoError(t, util.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) + assert.NoError(t, unittest.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) ownerDirs, err := os.ReadDir(setting.RepoRootPath) if err != nil { assert.NoError(t, err, "unable to read the new repo root: %v\n", err) @@ -359,6 +367,10 @@ func emptyTestSession(t testing.TB) *TestSession { return &TestSession{jar: jar} } +func getUserToken(t testing.TB, userName string) string { + return getTokenForLoggedInUser(t, loginUser(t, userName)) +} + func loginUser(t testing.TB, userName string) *TestSession { t.Helper() if session, ok := loginSessionCache[userName]; ok { @@ -380,7 +392,7 @@ func loginUserWithPassword(t testing.TB, userName, password string) *TestSession "user_name": userName, "password": password, }) - resp = MakeRequest(t, req, http.StatusFound) + resp = MakeRequest(t, req, http.StatusSeeOther) ch := http.Header{} ch.Add("Cookie", strings.Join(resp.Header()["Set-Cookie"], ";")) @@ -408,7 +420,7 @@ func getTokenForLoggedInUser(t testing.TB, session *TestSession) string { "_csrf": doc.GetCSRF(), "name": fmt.Sprintf("api-testing-token-%d", tokenCounter), }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) req = NewRequest(t, "GET", "/user/settings/applications") resp = session.MakeRequest(t, req, http.StatusOK) htmlDoc := NewHTMLParser(t, resp.Body) @@ -550,7 +562,7 @@ func resetFixtures(t *testing.T) { assert.NoError(t, queue.GetManager().FlushAll(context.Background(), -1)) assert.NoError(t, unittest.LoadFixtures()) assert.NoError(t, util.RemoveAll(setting.RepoRootPath)) - assert.NoError(t, util.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) + assert.NoError(t, unittest.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) ownerDirs, err := os.ReadDir(setting.RepoRootPath) if err != nil { assert.NoError(t, err, "unable to read the new repo root: %v\n", err) diff --git a/integrations/issue_test.go b/integrations/issue_test.go index 29de774ee4..c6b801c9a6 100644 --- a/integrations/issue_test.go +++ b/integrations/issue_test.go @@ -7,6 +7,7 @@ package integrations import ( "fmt" "net/http" + "net/url" "path" "strconv" "strings" @@ -20,6 +21,7 @@ import ( "code.gitea.io/gitea/modules/indexer/issues" "code.gitea.io/gitea/modules/references" "code.gitea.io/gitea/modules/setting" + api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/test" "github.com/PuerkitoBio/goquery" @@ -132,7 +134,7 @@ func testNewIssue(t *testing.T, session *TestSession, user, repo, title, content "title": title, "content": content, }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) issueURL := test.RedirectURL(resp) req = NewRequest(t, "GET", issueURL) @@ -162,7 +164,7 @@ func testIssueAddComment(t *testing.T, session *TestSession, issueURL, content, "content": content, "status": status, }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) req = NewRequest(t, "GET", test.RedirectURL(resp)) resp = session.MakeRequest(t, req, http.StatusOK) @@ -334,16 +336,224 @@ func TestIssueRedirect(t *testing.T) { // Test external tracker where style not set (shall default numeric) req := NewRequest(t, "GET", path.Join("org26", "repo_external_tracker", "issues", "1")) - resp := session.MakeRequest(t, req, http.StatusFound) + resp := session.MakeRequest(t, req, http.StatusSeeOther) assert.Equal(t, "https://tracker.com/org26/repo_external_tracker/issues/1", test.RedirectURL(resp)) // Test external tracker with numeric style req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_numeric", "issues", "1")) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) assert.Equal(t, "https://tracker.com/org26/repo_external_tracker_numeric/issues/1", test.RedirectURL(resp)) // Test external tracker with alphanumeric style (for a pull request) req = NewRequest(t, "GET", path.Join("org26", "repo_external_tracker_alpha", "issues", "1")) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) assert.Equal(t, "/"+path.Join("org26", "repo_external_tracker_alpha", "pulls", "1"), test.RedirectURL(resp)) } + +func TestSearchIssues(t *testing.T) { + defer prepareTestEnv(t)() + + session := loginUser(t, "user2") + + link, _ := url.Parse("/issues/search") + req := NewRequest(t, "GET", link.String()) + resp := session.MakeRequest(t, req, http.StatusOK) + var apiIssues []*api.Issue + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 10) + + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 10) + + since := "2000-01-01T00%3A50%3A01%2B00%3A00" // 946687801 + before := time.Unix(999307200, 0).Format(time.RFC3339) + query := url.Values{} + query.Add("since", since) + query.Add("before", before) + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 8) + query.Del("since") + query.Del("before") + + query.Add("state", "closed") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) + + query.Set("state", "all") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.EqualValues(t, "15", resp.Header().Get("X-Total-Count")) + assert.Len(t, apiIssues, 10) // there are more but 10 is page item limit + + query.Add("limit", "20") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 15) + + query = url.Values{"assigned": {"true"}, "state": {"all"}} + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 1) + + query = url.Values{"milestones": {"milestone1"}, "state": {"all"}} + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 1) + + query = url.Values{"milestones": {"milestone1,milestone3"}, "state": {"all"}} + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) + + query = url.Values{"owner": {"user2"}} // user + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 6) + + query = url.Values{"owner": {"user3"}} // organization + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 3) + + query = url.Values{"owner": {"user3"}, "team": {"team1"}} // organization + team + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) +} + +func TestSearchIssuesWithLabels(t *testing.T) { + defer prepareTestEnv(t)() + + token := getUserToken(t, "user1") + + link, _ := url.Parse("/api/v1/repos/issues/search?token=" + token) + req := NewRequest(t, "GET", link.String()) + resp := MakeRequest(t, req, http.StatusOK) + var apiIssues []*api.Issue + DecodeJSON(t, resp, &apiIssues) + + assert.Len(t, apiIssues, 10) + + query := url.Values{ + "token": []string{token}, + } + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 10) + + query.Add("labels", "label1") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) + + // multiple labels + query.Set("labels", "label1,label2") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) + + // an org label + query.Set("labels", "orglabel4") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 1) + + // org and repo label + query.Set("labels", "label2,orglabel4") + query.Add("state", "all") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) + + // org and repo label which share the same issue + query.Set("labels", "label1,orglabel4") + link.RawQuery = query.Encode() + req = NewRequest(t, "GET", link.String()) + resp = MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &apiIssues) + assert.Len(t, apiIssues, 2) +} + +func TestGetIssueInfo(t *testing.T) { + defer prepareTestEnv(t)() + + issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue) + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID}).(*repo_model.Repository) + owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID}).(*user_model.User) + assert.NoError(t, issue.LoadAttributes()) + assert.Equal(t, int64(1019307200), int64(issue.DeadlineUnix)) + assert.Equal(t, api.StateOpen, issue.State()) + + session := loginUser(t, owner.Name) + + urlStr := fmt.Sprintf("/%s/%s/issues/%d/info", owner.Name, repo.Name, issue.Index) + req := NewRequest(t, "GET", urlStr) + resp := session.MakeRequest(t, req, http.StatusOK) + var apiIssue api.Issue + DecodeJSON(t, resp, &apiIssue) + + assert.EqualValues(t, issue.ID, apiIssue.ID) +} + +func TestUpdateIssueDeadline(t *testing.T) { + defer prepareTestEnv(t)() + + issueBefore := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 10}).(*models.Issue) + repoBefore := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issueBefore.RepoID}).(*repo_model.Repository) + owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repoBefore.OwnerID}).(*user_model.User) + assert.NoError(t, issueBefore.LoadAttributes()) + assert.Equal(t, int64(1019307200), int64(issueBefore.DeadlineUnix)) + assert.Equal(t, api.StateOpen, issueBefore.State()) + + session := loginUser(t, owner.Name) + + issueURL := fmt.Sprintf("%s/%s/issues/%d", owner.Name, repoBefore.Name, issueBefore.Index) + req := NewRequest(t, "GET", issueURL) + resp := session.MakeRequest(t, req, http.StatusOK) + htmlDoc := NewHTMLParser(t, resp.Body) + + urlStr := issueURL + "/deadline?_csrf=" + htmlDoc.GetCSRF() + req = NewRequestWithJSON(t, "POST", urlStr, map[string]string{ + "due_date": "2022-04-06T00:00:00.000Z", + }) + + resp = session.MakeRequest(t, req, http.StatusCreated) + var apiIssue api.IssueDeadline + DecodeJSON(t, resp, &apiIssue) + + assert.EqualValues(t, "2022-04-06", apiIssue.Deadline.Format("2006-01-02")) +} diff --git a/integrations/links_test.go b/integrations/links_test.go index f514aa7757..bc87ffad83 100644 --- a/integrations/links_test.go +++ b/integrations/links_test.go @@ -59,7 +59,7 @@ func TestRedirectsNoLogin(t *testing.T) { } for link, redirectLink := range redirects { req := NewRequest(t, "GET", link) - resp := MakeRequest(t, req, http.StatusFound) + resp := MakeRequest(t, req, http.StatusSeeOther) assert.EqualValues(t, path.Join(setting.AppSubURL, redirectLink), test.RedirectURL(resp)) } } diff --git a/integrations/migration-test/migration_test.go b/integrations/migration-test/migration_test.go index 0518dd1179..6e55807c27 100644 --- a/integrations/migration-test/migration_test.go +++ b/integrations/migration-test/migration_test.go @@ -21,6 +21,7 @@ import ( "code.gitea.io/gitea/integrations" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/migrations" + "code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/git" @@ -60,7 +61,7 @@ func initMigrationTest(t *testing.T) func() { assert.True(t, len(setting.RepoRootPath) != 0) assert.NoError(t, util.RemoveAll(setting.RepoRootPath)) - assert.NoError(t, util.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) + assert.NoError(t, unittest.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) ownerDirs, err := os.ReadDir(setting.RepoRootPath) if err != nil { assert.NoError(t, err, "unable to read the new repo root: %v\n", err) diff --git a/integrations/mirror_pull_test.go b/integrations/mirror_pull_test.go index eb798fe2ce..dd66974e04 100644 --- a/integrations/mirror_pull_test.go +++ b/integrations/mirror_pull_test.go @@ -46,13 +46,14 @@ func TestMirrorPull(t *testing.T) { Status: repo_model.RepositoryBeingMigrated, }) assert.NoError(t, err) + assert.True(t, mirrorRepo.IsMirror, "expected pull-mirror repo to be marked as a mirror immediately after its creation") ctx := context.Background() mirror, err := repository.MigrateRepositoryGitData(ctx, user, mirrorRepo, opts, nil) assert.NoError(t, err) - gitRepo, err := git.OpenRepository(repoPath) + gitRepo, err := git.OpenRepository(git.DefaultContext, repoPath) assert.NoError(t, err) defer gitRepo.Close() diff --git a/integrations/mirror_push_test.go b/integrations/mirror_push_test.go index b231b7a919..a73b69e786 100644 --- a/integrations/mirror_push_test.go +++ b/integrations/mirror_push_test.go @@ -54,14 +54,14 @@ func testMirrorPush(t *testing.T, u *url.URL) { ok := mirror_service.SyncPushMirror(context.Background(), mirrors[0].ID) assert.True(t, ok) - srcGitRepo, err := git.OpenRepository(srcRepo.RepoPath()) + srcGitRepo, err := git.OpenRepository(git.DefaultContext, srcRepo.RepoPath()) assert.NoError(t, err) defer srcGitRepo.Close() srcCommit, err := srcGitRepo.GetBranchCommit("master") assert.NoError(t, err) - mirrorGitRepo, err := git.OpenRepository(mirrorRepo.RepoPath()) + mirrorGitRepo, err := git.OpenRepository(git.DefaultContext, mirrorRepo.RepoPath()) assert.NoError(t, err) defer mirrorGitRepo.Close() @@ -89,7 +89,7 @@ func doCreatePushMirror(ctx APITestContext, address, username, password string) "push_mirror_password": password, "push_mirror_interval": "0", }) - ctx.Session.MakeRequest(t, req, http.StatusFound) + ctx.Session.MakeRequest(t, req, http.StatusSeeOther) flashCookie := ctx.Session.GetCookie("macaron_flash") assert.NotNil(t, flashCookie) @@ -110,7 +110,7 @@ func doRemovePushMirror(ctx APITestContext, address, username, password string, "push_mirror_password": password, "push_mirror_interval": "0", }) - ctx.Session.MakeRequest(t, req, http.StatusFound) + ctx.Session.MakeRequest(t, req, http.StatusSeeOther) flashCookie := ctx.Session.GetCookie("macaron_flash") assert.NotNil(t, flashCookie) diff --git a/integrations/mssql.ini.tmpl b/integrations/mssql.ini.tmpl index 00f55ec620..b076bb863c 100644 --- a/integrations/mssql.ini.tmpl +++ b/integrations/mssql.ini.tmpl @@ -102,3 +102,6 @@ INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE0OTU1NTE2MTh9.h [lfs] PATH = integrations/gitea-integration-mssql/data/lfs + +[packages] +ENABLED = true diff --git a/integrations/mysql.ini.tmpl b/integrations/mysql.ini.tmpl index c1780110a3..6a0fd4ab86 100644 --- a/integrations/mysql.ini.tmpl +++ b/integrations/mysql.ini.tmpl @@ -118,3 +118,6 @@ DISABLE_GIT_HOOKS = false INSTALL_LOCK = true SECRET_KEY = 9pCviYTWSb INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE0OTU1NTE2MTh9.hhSVGOANkaKk3vfCd2jDOIww4pUk0xtg9JRde5UogyQ + +[packages] +ENABLED = true diff --git a/integrations/mysql8.ini.tmpl b/integrations/mysql8.ini.tmpl index 0d898ac13d..16f8851fe5 100644 --- a/integrations/mysql8.ini.tmpl +++ b/integrations/mysql8.ini.tmpl @@ -99,3 +99,6 @@ INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE0OTU1NTE2MTh9.h [lfs] PATH = integrations/gitea-integration-mysql8/data/lfs + +[packages] +ENABLED = true diff --git a/integrations/nonascii_branches_test.go b/integrations/nonascii_branches_test.go index f2992ecc1f..5ab7b8526a 100644 --- a/integrations/nonascii_branches_test.go +++ b/integrations/nonascii_branches_test.go @@ -18,7 +18,7 @@ func testSrcRouteRedirect(t *testing.T, session *TestSession, user, repo, route, // Make request req := NewRequest(t, "GET", path.Join(prefix, route)) - resp := session.MakeRequest(t, req, http.StatusFound) + resp := session.MakeRequest(t, req, http.StatusSeeOther) // Check Location header location := resp.HeaderMap.Get("Location") @@ -37,7 +37,7 @@ func setDefaultBranch(t *testing.T, session *TestSession, user, repo, branch str "action": "default_branch", "branch": branch, }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) } func TestNonasciiBranches(t *testing.T) { diff --git a/integrations/oauth_test.go b/integrations/oauth_test.go index c36aab652b..678dfbae2d 100644 --- a/integrations/oauth_test.go +++ b/integrations/oauth_test.go @@ -7,6 +7,7 @@ package integrations import ( "bytes" "io" + "net/http" "testing" "code.gitea.io/gitea/modules/json" @@ -21,20 +22,20 @@ func TestNoClientID(t *testing.T) { defer prepareTestEnv(t)() req := NewRequest(t, "GET", "/login/oauth/authorize") ctx := loginUser(t, "user2") - ctx.MakeRequest(t, req, 400) + ctx.MakeRequest(t, req, http.StatusBadRequest) } func TestLoginRedirect(t *testing.T) { defer prepareTestEnv(t)() req := NewRequest(t, "GET", "/login/oauth/authorize") - assert.Contains(t, MakeRequest(t, req, 302).Body.String(), "/user/login") + assert.Contains(t, MakeRequest(t, req, http.StatusSeeOther).Body.String(), "/user/login") } func TestShowAuthorize(t *testing.T) { defer prepareTestEnv(t)() req := NewRequest(t, "GET", defaultAuthorize) ctx := loginUser(t, "user4") - resp := ctx.MakeRequest(t, req, 200) + resp := ctx.MakeRequest(t, req, http.StatusOK) htmlDoc := NewHTMLParser(t, resp.Body) htmlDoc.AssertElement(t, "#authorize-app", true) @@ -45,7 +46,7 @@ func TestRedirectWithExistingGrant(t *testing.T) { defer prepareTestEnv(t)() req := NewRequest(t, "GET", defaultAuthorize) ctx := loginUser(t, "user1") - resp := ctx.MakeRequest(t, req, 302) + resp := ctx.MakeRequest(t, req, http.StatusSeeOther) u, err := resp.Result().Location() assert.NoError(t, err) assert.Equal(t, "thestate", u.Query().Get("state")) @@ -62,7 +63,7 @@ func TestAccessTokenExchange(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - resp := MakeRequest(t, req, 200) + resp := MakeRequest(t, req, http.StatusOK) type response struct { AccessToken string `json:"access_token"` TokenType string `json:"token_type"` @@ -86,7 +87,7 @@ func TestAccessTokenExchangeWithoutPKCE(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - resp := MakeRequest(t, req, 200) + resp := MakeRequest(t, req, http.StatusOK) type response struct { AccessToken string `json:"access_token"` TokenType string `json:"token_type"` @@ -109,7 +110,7 @@ func TestAccessTokenExchangeJSON(t *testing.T) { "redirect_uri": "a", "code": "authcode", }) - MakeRequest(t, req, 400) + MakeRequest(t, req, http.StatusBadRequest) } func TestAccessTokenExchangeWithInvalidCredentials(t *testing.T) { @@ -123,7 +124,7 @@ func TestAccessTokenExchangeWithInvalidCredentials(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - MakeRequest(t, req, 400) + MakeRequest(t, req, http.StatusBadRequest) // invalid client secret req = NewRequestWithValues(t, "POST", "/login/oauth/access_token", map[string]string{ "grant_type": "authorization_code", @@ -133,7 +134,7 @@ func TestAccessTokenExchangeWithInvalidCredentials(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - MakeRequest(t, req, 400) + MakeRequest(t, req, http.StatusBadRequest) // invalid redirect uri req = NewRequestWithValues(t, "POST", "/login/oauth/access_token", map[string]string{ "grant_type": "authorization_code", @@ -143,7 +144,7 @@ func TestAccessTokenExchangeWithInvalidCredentials(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - MakeRequest(t, req, 400) + MakeRequest(t, req, http.StatusBadRequest) // invalid authorization code req = NewRequestWithValues(t, "POST", "/login/oauth/access_token", map[string]string{ "grant_type": "authorization_code", @@ -153,7 +154,7 @@ func TestAccessTokenExchangeWithInvalidCredentials(t *testing.T) { "code": "???", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - MakeRequest(t, req, 400) + MakeRequest(t, req, http.StatusBadRequest) // invalid grant_type req = NewRequestWithValues(t, "POST", "/login/oauth/access_token", map[string]string{ "grant_type": "???", @@ -163,7 +164,7 @@ func TestAccessTokenExchangeWithInvalidCredentials(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - MakeRequest(t, req, 400) + MakeRequest(t, req, http.StatusBadRequest) } func TestAccessTokenExchangeWithBasicAuth(t *testing.T) { @@ -175,7 +176,7 @@ func TestAccessTokenExchangeWithBasicAuth(t *testing.T) { "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) req.Header.Add("Authorization", "Basic ZGE3ZGEzYmEtOWExMy00MTY3LTg1NmYtMzg5OWRlMGIwMTM4OjRNSzhOYTZSNTVzbWRDWTBXdUNDdW1aNmhqUlBuR1k1c2FXVlJISGpKaUE9") - resp := MakeRequest(t, req, 200) + resp := MakeRequest(t, req, http.StatusOK) type response struct { AccessToken string `json:"access_token"` TokenType string `json:"token_type"` @@ -196,7 +197,7 @@ func TestAccessTokenExchangeWithBasicAuth(t *testing.T) { "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) req.Header.Add("Authorization", "Basic ZGE3ZGEzYmEtOWExMy00MTY3LTg1NmYtMzg5OWRlMGIwMTM4OmJsYWJsYQ==") - resp = MakeRequest(t, req, 400) + resp = MakeRequest(t, req, http.StatusBadRequest) // missing header req = NewRequestWithValues(t, "POST", "/login/oauth/access_token", map[string]string{ @@ -205,7 +206,7 @@ func TestAccessTokenExchangeWithBasicAuth(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - resp = MakeRequest(t, req, 400) + resp = MakeRequest(t, req, http.StatusBadRequest) } func TestRefreshTokenInvalidation(t *testing.T) { @@ -218,7 +219,7 @@ func TestRefreshTokenInvalidation(t *testing.T) { "code": "authcode", "code_verifier": "N1Zo9-8Rfwhkt68r1r29ty8YwIraXR8eh_1Qwxg7yQXsonBt", // test PKCE additionally }) - resp := MakeRequest(t, req, 200) + resp := MakeRequest(t, req, http.StatusOK) type response struct { AccessToken string `json:"access_token"` TokenType string `json:"token_type"` @@ -244,16 +245,16 @@ func TestRefreshTokenInvalidation(t *testing.T) { assert.NoError(t, err) refreshReq.Body = io.NopCloser(bytes.NewReader(bs)) - MakeRequest(t, refreshReq, 200) + MakeRequest(t, refreshReq, http.StatusOK) refreshReq.Body = io.NopCloser(bytes.NewReader(bs)) - MakeRequest(t, refreshReq, 200) + MakeRequest(t, refreshReq, http.StatusOK) // test with invalidation setting.OAuth2.InvalidateRefreshTokens = true refreshReq.Body = io.NopCloser(bytes.NewReader(bs)) - MakeRequest(t, refreshReq, 200) + MakeRequest(t, refreshReq, http.StatusOK) refreshReq.Body = io.NopCloser(bytes.NewReader(bs)) - MakeRequest(t, refreshReq, 400) + MakeRequest(t, refreshReq, http.StatusBadRequest) } diff --git a/integrations/org_count_test.go b/integrations/org_count_test.go index b7d0da2f4b..eca51eb0f6 100644 --- a/integrations/org_count_test.go +++ b/integrations/org_count_test.go @@ -9,7 +9,7 @@ import ( "strings" "testing" - "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" api "code.gitea.io/gitea/modules/structs" @@ -117,7 +117,7 @@ func doCheckOrgCounts(username string, orgCounts map[string]int, strict bool, ca Name: username, }).(*user_model.User) - orgs, err := models.FindOrgs(models.FindOrgOptions{ + orgs, err := organization.FindOrgs(organization.FindOrgOptions{ UserID: user.ID, IncludePrivate: true, }) diff --git a/integrations/org_test.go b/integrations/org_test.go index 794475a924..227a1b8d40 100644 --- a/integrations/org_test.go +++ b/integrations/org_test.go @@ -10,6 +10,8 @@ import ( "strings" "testing" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" api "code.gitea.io/gitea/modules/structs" "github.com/stretchr/testify/assert" @@ -173,3 +175,30 @@ func TestOrgRestrictedUser(t *testing.T) { req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s", orgName, repoName)) restrictedSession.MakeRequest(t, req, http.StatusOK) } + +func TestTeamSearch(t *testing.T) { + defer prepareTestEnv(t)() + + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) + + var results TeamSearchResults + + session := loginUser(t, user.Name) + csrf := GetCSRF(t, session, "/"+org.Name) + req := NewRequestf(t, "GET", "/org/%s/teams/-/search?q=%s", org.Name, "_team") + req.Header.Add("X-Csrf-Token", csrf) + resp := session.MakeRequest(t, req, http.StatusOK) + DecodeJSON(t, resp, &results) + assert.NotEmpty(t, results.Data) + assert.Len(t, results.Data, 1) + assert.Equal(t, "test_team", results.Data[0].Name) + + // no access if not organization member + user5 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) + session = loginUser(t, user5.Name) + csrf = GetCSRF(t, session, "/"+org.Name) + req = NewRequestf(t, "GET", "/org/%s/teams/-/search?q=%s", org.Name, "team") + req.Header.Add("X-Csrf-Token", csrf) + session.MakeRequest(t, req, http.StatusNotFound) +} diff --git a/integrations/pgsql.ini.tmpl b/integrations/pgsql.ini.tmpl index 1c4d843712..5c4fbcc829 100644 --- a/integrations/pgsql.ini.tmpl +++ b/integrations/pgsql.ini.tmpl @@ -103,3 +103,6 @@ INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE0OTU1NTE2MTh9.h [lfs] PATH = integrations/gitea-integration-pgsql/data/lfs + +[packages] +ENABLED = true diff --git a/integrations/privateactivity_test.go b/integrations/privateactivity_test.go index 44df39b6e0..c5cdc27d6e 100644 --- a/integrations/privateactivity_test.go +++ b/integrations/privateactivity_test.go @@ -53,7 +53,7 @@ func testPrivateActivityHelperEnablePrivateActivity(t *testing.T) { "language": "en-US", "keep_activity_private": "1", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) } func testPrivateActivityHelperHasVisibleActivitiesInHTMLDoc(htmlDoc *HTMLDoc) bool { diff --git a/integrations/pull_create_test.go b/integrations/pull_create_test.go index 948c0b9ce2..671b5e7551 100644 --- a/integrations/pull_create_test.go +++ b/integrations/pull_create_test.go @@ -38,7 +38,7 @@ func testPullCreate(t *testing.T, session *TestSession, user, repo, branch, titl "_csrf": htmlDoc.GetCSRF(), "title": title, }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) return resp } @@ -130,7 +130,7 @@ func testDeleteRepository(t *testing.T, session *TestSession, ownerName, repoNam "_csrf": htmlDoc.GetCSRF(), "repo_name": repoName, }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) } func TestPullBranchDelete(t *testing.T) { @@ -139,7 +139,7 @@ func TestPullBranchDelete(t *testing.T) { session := loginUser(t, "user1") testRepoFork(t, session, "user2", "repo1", "user1", "repo1") - testCreateBranch(t, session, "user1", "repo1", "branch/master", "master1", http.StatusFound) + testCreateBranch(t, session, "user1", "repo1", "branch/master", "master1", http.StatusSeeOther) testEditFile(t, session, "user1", "repo1", "master1", "README.md", "Hello, World (Edited)\n") resp := testPullCreate(t, session, "user1", "repo1", "master1", "This is a pull title") diff --git a/integrations/pull_merge_test.go b/integrations/pull_merge_test.go index 8aded910d4..c50913383c 100644 --- a/integrations/pull_merge_test.go +++ b/integrations/pull_merge_test.go @@ -6,6 +6,7 @@ package integrations import ( "bytes" + "context" "fmt" "net/http" "net/http/httptest" @@ -24,10 +25,12 @@ import ( "code.gitea.io/gitea/modules/git" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/test" + "code.gitea.io/gitea/modules/translation/i18n" "code.gitea.io/gitea/services/pull" + repo_service "code.gitea.io/gitea/services/repository" + files_service "code.gitea.io/gitea/services/repository/files" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) func testPullMerge(t *testing.T, session *TestSession, user, repo, pullnum string, mergeStyle repo_model.MergeStyle) *httptest.ResponseRecorder { @@ -42,7 +45,7 @@ func testPullMerge(t *testing.T, session *TestSession, user, repo, pullnum strin "_csrf": htmlDoc.GetCSRF(), "do": string(mergeStyle), }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) return resp } @@ -220,7 +223,7 @@ func TestCantMergeConflict(t *testing.T) { Base: "base", Title: "create a conflicting pr", }) - session.MakeRequest(t, req, 201) + session.MakeRequest(t, req, http.StatusCreated) // Now this PR will be marked conflict - or at least a race will do - so drop down to pure code at this point... user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ @@ -238,14 +241,14 @@ func TestCantMergeConflict(t *testing.T) { BaseBranch: "base", }).(*models.PullRequest) - gitRepo, err := git.OpenRepository(repo_model.RepoPath(user1.Name, repo1.Name)) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo_model.RepoPath(user1.Name, repo1.Name)) assert.NoError(t, err) - err = pull.Merge(git.DefaultContext, pr, user1, gitRepo, repo_model.MergeStyleMerge, "", "CONFLICT") + err = pull.Merge(context.Background(), pr, user1, gitRepo, repo_model.MergeStyleMerge, "", "CONFLICT") assert.Error(t, err, "Merge should return an error due to conflict") assert.True(t, models.IsErrMergeConflicts(err), "Merge error is not a conflict error") - err = pull.Merge(git.DefaultContext, pr, user1, gitRepo, repo_model.MergeStyleRebase, "", "CONFLICT") + err = pull.Merge(context.Background(), pr, user1, gitRepo, repo_model.MergeStyleRebase, "", "CONFLICT") assert.Error(t, err, "Merge should return an error due to conflict") assert.True(t, models.IsErrRebaseConflicts(err), "Merge error is not a conflict error") gitRepo.Close() @@ -269,25 +272,24 @@ func TestCantMergeUnrelated(t *testing.T) { }).(*repo_model.Repository) path := repo_model.RepoPath(user1.Name, repo1.Name) - _, err := git.NewCommand(git.DefaultContext, "read-tree", "--empty").RunInDir(path) + err := git.NewCommand(git.DefaultContext, "read-tree", "--empty").Run(&git.RunOpts{Dir: path}) assert.NoError(t, err) stdin := bytes.NewBufferString("Unrelated File") var stdout strings.Builder - err = git.NewCommand(git.DefaultContext, "hash-object", "-w", "--stdin").RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: path, - Stdin: stdin, - Stdout: &stdout, + err = git.NewCommand(git.DefaultContext, "hash-object", "-w", "--stdin").Run(&git.RunOpts{ + Dir: path, + Stdin: stdin, + Stdout: &stdout, }) assert.NoError(t, err) sha := strings.TrimSpace(stdout.String()) - _, err = git.NewCommand(git.DefaultContext, "update-index", "--add", "--replace", "--cacheinfo", "100644", sha, "somewher-over-the-rainbow").RunInDir(path) + _, _, err = git.NewCommand(git.DefaultContext, "update-index", "--add", "--replace", "--cacheinfo", "100644", sha, "somewher-over-the-rainbow").RunStdString(&git.RunOpts{Dir: path}) assert.NoError(t, err) - treeSha, err := git.NewCommand(git.DefaultContext, "write-tree").RunInDir(path) + treeSha, _, err := git.NewCommand(git.DefaultContext, "write-tree").RunStdString(&git.RunOpts{Dir: path}) assert.NoError(t, err) treeSha = strings.TrimSpace(treeSha) @@ -308,17 +310,16 @@ func TestCantMergeUnrelated(t *testing.T) { stdout.Reset() err = git.NewCommand(git.DefaultContext, "commit-tree", treeSha). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: path, - Stdin: messageBytes, - Stdout: &stdout, + Run(&git.RunOpts{ + Env: env, + Dir: path, + Stdin: messageBytes, + Stdout: &stdout, }) assert.NoError(t, err) commitSha := strings.TrimSpace(stdout.String()) - _, err = git.NewCommand(git.DefaultContext, "branch", "unrelated", commitSha).RunInDir(path) + _, _, err = git.NewCommand(git.DefaultContext, "branch", "unrelated", commitSha).RunStdString(&git.RunOpts{Dir: path}) assert.NoError(t, err) testEditFileToNewBranch(t, session, "user1", "repo1", "master", "conflict", "README.md", "Hello, World (Edited Once)\n") @@ -330,10 +331,10 @@ func TestCantMergeUnrelated(t *testing.T) { Base: "base", Title: "create an unrelated pr", }) - session.MakeRequest(t, req, 201) + session.MakeRequest(t, req, http.StatusCreated) // Now this PR could be marked conflict - or at least a race may occur - so drop down to pure code at this point... - gitRepo, err := git.OpenRepository(path) + gitRepo, err := git.OpenRepository(git.DefaultContext, path) assert.NoError(t, err) pr := unittest.AssertExistsAndLoadBean(t, &models.PullRequest{ HeadRepoID: repo1.ID, @@ -342,9 +343,80 @@ func TestCantMergeUnrelated(t *testing.T) { BaseBranch: "base", }).(*models.PullRequest) - err = pull.Merge(git.DefaultContext, pr, user1, gitRepo, repo_model.MergeStyleMerge, "", "UNRELATED") + err = pull.Merge(context.Background(), pr, user1, gitRepo, repo_model.MergeStyleMerge, "", "UNRELATED") assert.Error(t, err, "Merge should return an error due to unrelated") assert.True(t, models.IsErrMergeUnrelatedHistories(err), "Merge error is not a unrelated histories error") gitRepo.Close() }) } + +func TestConflictChecking(t *testing.T) { + onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) { + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + // Create new clean repo to test conflict checking. + baseRepo, err := repo_service.CreateRepository(user, user, models.CreateRepoOptions{ + Name: "conflict-checking", + Description: "Tempo repo", + AutoInit: true, + Readme: "Default", + DefaultBranch: "main", + }) + assert.NoError(t, err) + assert.NotEmpty(t, baseRepo) + + // create a commit on new branch. + _, err = files_service.CreateOrUpdateRepoFile(git.DefaultContext, baseRepo, user, &files_service.UpdateRepoFileOptions{ + TreePath: "important_file", + Message: "Add a important file", + Content: "Just a non-important file", + IsNewFile: true, + OldBranch: "main", + NewBranch: "important-secrets", + }) + assert.NoError(t, err) + + // create a commit on main branch. + _, err = files_service.CreateOrUpdateRepoFile(git.DefaultContext, baseRepo, user, &files_service.UpdateRepoFileOptions{ + TreePath: "important_file", + Message: "Add a important file", + Content: "Not the same content :P", + IsNewFile: true, + OldBranch: "main", + NewBranch: "main", + }) + assert.NoError(t, err) + + // create Pull to merge the important-secrets branch into main branch. + pullIssue := &models.Issue{ + RepoID: baseRepo.ID, + Title: "PR with conflict!", + PosterID: user.ID, + Poster: user, + IsPull: true, + } + + pullRequest := &models.PullRequest{ + HeadRepoID: baseRepo.ID, + BaseRepoID: baseRepo.ID, + HeadBranch: "important-secrets", + BaseBranch: "main", + HeadRepo: baseRepo, + BaseRepo: baseRepo, + Type: models.PullRequestGitea, + } + err = pull.NewPullRequest(git.DefaultContext, baseRepo, pullIssue, nil, nil, pullRequest, nil) + assert.NoError(t, err) + + issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{Title: "PR with conflict!"}).(*models.Issue) + conflictingPR, err := models.GetPullRequestByIssueID(issue.ID) + assert.NoError(t, err) + + // Ensure conflictedFiles is populated. + assert.Equal(t, 1, len(conflictingPR.ConflictedFiles)) + // Check if status is correct. + assert.Equal(t, models.PullRequestStatusConflict, conflictingPR.Status) + // Ensure that mergeable returns false + assert.False(t, conflictingPR.Mergeable()) + }) +} diff --git a/integrations/pull_status_test.go b/integrations/pull_status_test.go index f818643005..a5247f56ec 100644 --- a/integrations/pull_status_test.go +++ b/integrations/pull_status_test.go @@ -29,7 +29,7 @@ func TestPullCreate_CommitStatus(t *testing.T) { "title": "pull request from status1", }, ) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) req = NewRequest(t, "GET", "/user1/repo1/pulls") resp := session.MakeRequest(t, req, http.StatusOK) @@ -63,20 +63,13 @@ func TestPullCreate_CommitStatus(t *testing.T) { api.CommitStatusWarning: "warning sign icon yellow", } + testCtx := NewAPITestContext(t, "user1", "repo1") + // Update commit status, and check if icon is updated as well for _, status := range statusList { // Call API to add status for commit - token := getTokenForLoggedInUser(t, session) - req = NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/repos/user1/repo1/statuses/%s?token=%s", commitID, token), - api.CreateStatusOption{ - State: status, - TargetURL: "http://test.ci/", - Description: "", - Context: "testci", - }, - ) - session.MakeRequest(t, req, http.StatusCreated) + t.Run("CreateStatus", doAPICreateCommitStatus(testCtx, commitID, status)) req = NewRequestf(t, "GET", "/user1/repo1/pulls/1/commits") resp = session.MakeRequest(t, req, http.StatusOK) @@ -94,6 +87,24 @@ func TestPullCreate_CommitStatus(t *testing.T) { }) } +func doAPICreateCommitStatus(ctx APITestContext, commitID string, status api.CommitStatusState) func(*testing.T) { + return func(t *testing.T) { + req := NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/statuses/%s?token=%s", ctx.Username, ctx.Reponame, commitID, ctx.Token), + api.CreateStatusOption{ + State: status, + TargetURL: "http://test.ci/", + Description: "", + Context: "testci", + }, + ) + if ctx.ExpectedCode != 0 { + ctx.Session.MakeRequest(t, req, ctx.ExpectedCode) + return + } + ctx.Session.MakeRequest(t, req, http.StatusCreated) + } +} + func TestPullCreate_EmptyChangesWithCommits(t *testing.T) { onGiteaRun(t, func(t *testing.T, u *url.URL) { session := loginUser(t, "user1") @@ -108,7 +119,7 @@ func TestPullCreate_EmptyChangesWithCommits(t *testing.T) { "title": "pull request from status1", }, ) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) req = NewRequest(t, "GET", "/user1/repo1/pulls/1") resp := session.MakeRequest(t, req, http.StatusOK) diff --git a/integrations/pull_update_test.go b/integrations/pull_update_test.go index dc2803aca0..20b4eaeb4a 100644 --- a/integrations/pull_update_test.go +++ b/integrations/pull_update_test.go @@ -90,7 +90,7 @@ func createOutdatedPR(t *testing.T, actor, forkOrg *user_model.User) *models.Pul assert.NoError(t, err) assert.NotEmpty(t, baseRepo) - headRepo, err := repo_service.ForkRepository(actor, forkOrg, repo_service.ForkRepoOptions{ + headRepo, err := repo_service.ForkRepository(git.DefaultContext, actor, forkOrg, repo_service.ForkRepoOptions{ BaseRepo: baseRepo, Name: "repo-pr-update", Description: "desc", diff --git a/integrations/release_test.go b/integrations/release_test.go index 28e0347791..d75d74956e 100644 --- a/integrations/release_test.go +++ b/integrations/release_test.go @@ -14,10 +14,10 @@ import ( "code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/test" + "code.gitea.io/gitea/modules/translation/i18n" "github.com/PuerkitoBio/goquery" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) func createNewRelease(t *testing.T, session *TestSession, repoURL, tag, title string, preRelease, draft bool) { @@ -43,7 +43,7 @@ func createNewRelease(t *testing.T, session *TestSession, repoURL, tag, title st } req = NewRequestWithValues(t, "POST", link, postData) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) test.RedirectURL(resp) // check that redirect URL exists } diff --git a/integrations/rename_branch_test.go b/integrations/rename_branch_test.go index e856214f3c..7760a2d946 100644 --- a/integrations/rename_branch_test.go +++ b/integrations/rename_branch_test.go @@ -27,7 +27,7 @@ func TestRenameBranch(t *testing.T) { "to": "main", } req = NewRequestWithValues(t, "POST", "/user2/repo1/settings/rename_branch", postData) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) // check new branch link req = NewRequestWithValues(t, "GET", "/user2/repo1/src/branch/main/README.md", postData) @@ -35,7 +35,7 @@ func TestRenameBranch(t *testing.T) { // check old branch link req = NewRequestWithValues(t, "GET", "/user2/repo1/src/branch/master/README.md", postData) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) location := resp.HeaderMap.Get("Location") assert.Equal(t, "/user2/repo1/src/branch/main/README.md", location) diff --git a/integrations/repo_branch_test.go b/integrations/repo_branch_test.go index aef28515e7..30a446ccec 100644 --- a/integrations/repo_branch_test.go +++ b/integrations/repo_branch_test.go @@ -13,9 +13,9 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/test" + "code.gitea.io/gitea/modules/translation/i18n" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) func testCreateBranch(t testing.TB, session *TestSession, user, repo, oldRefSubURL, newBranchName string, expectedStatus int) string { @@ -30,7 +30,7 @@ func testCreateBranch(t testing.TB, session *TestSession, user, repo, oldRefSubU "new_branch_name": newBranchName, }) resp := session.MakeRequest(t, req, expectedStatus) - if expectedStatus != http.StatusFound { + if expectedStatus != http.StatusSeeOther { return "" } return test.RedirectURL(resp) @@ -51,37 +51,37 @@ func testCreateBranches(t *testing.T, giteaURL *url.URL) { { OldRefSubURL: "branch/master", NewBranch: "feature/test1", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "repo.branch.create_success", "feature/test1"), }, { OldRefSubURL: "branch/master", NewBranch: "", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "form.NewBranchName") + i18n.Tr("en", "form.require_error"), }, { OldRefSubURL: "branch/master", NewBranch: "feature=test1", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "repo.branch.create_success", "feature=test1"), }, { OldRefSubURL: "branch/master", NewBranch: strings.Repeat("b", 101), - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "form.NewBranchName") + i18n.Tr("en", "form.max_size_error", "100"), }, { OldRefSubURL: "branch/master", NewBranch: "master", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "repo.branch.branch_already_exists", "master"), }, { OldRefSubURL: "branch/master", NewBranch: "master/test", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "repo.branch.branch_name_conflict", "master/test", "master"), }, { @@ -92,21 +92,21 @@ func testCreateBranches(t *testing.T, giteaURL *url.URL) { { OldRefSubURL: "commit/65f1bf27bc3bf70f64657658635e66094edbcb4d", NewBranch: "feature/test3", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "repo.branch.create_success", "feature/test3"), }, { OldRefSubURL: "branch/master", NewBranch: "v1.0.0", CreateRelease: "v1.0.0", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "repo.branch.tag_collision", "v1.0.0"), }, { OldRefSubURL: "tag/v1.0.0", NewBranch: "feature/test4", CreateRelease: "v1.0.1", - ExpectedStatus: http.StatusFound, + ExpectedStatus: http.StatusSeeOther, FlashMessage: i18n.Tr("en", "repo.branch.create_success", "feature/test4"), }, } @@ -116,7 +116,7 @@ func testCreateBranches(t *testing.T, giteaURL *url.URL) { createNewRelease(t, session, "/user2/repo1", test.CreateRelease, test.CreateRelease, false, false) } redirectURL := testCreateBranch(t, session, "user2", "repo1", test.OldRefSubURL, test.NewBranch, test.ExpectedStatus) - if test.ExpectedStatus == http.StatusFound { + if test.ExpectedStatus == http.StatusSeeOther { req := NewRequest(t, "GET", redirectURL) resp := session.MakeRequest(t, req, http.StatusOK) htmlDoc := NewHTMLParser(t, resp.Body) @@ -135,7 +135,7 @@ func TestCreateBranchInvalidCSRF(t *testing.T) { "_csrf": "fake_csrf", "new_branch_name": "test", }) - resp := session.MakeRequest(t, req, http.StatusFound) + resp := session.MakeRequest(t, req, http.StatusSeeOther) loc := resp.Header().Get("Location") assert.Equal(t, setting.AppSubURL+"/", loc) resp = session.MakeRequest(t, NewRequest(t, "GET", loc), http.StatusOK) diff --git a/integrations/repo_commits_test.go b/integrations/repo_commits_test.go index b53d988c58..7107f43b0f 100644 --- a/integrations/repo_commits_test.go +++ b/integrations/repo_commits_test.go @@ -36,7 +36,6 @@ func doTestRepoCommitWithStatus(t *testing.T, state string, classes ...string) { defer prepareTestEnv(t)() session := loginUser(t, "user2") - token := getTokenForLoggedInUser(t, session) // Request repository commits page req := NewRequest(t, "GET", "/user2/repo1/commits/branch/master") @@ -49,16 +48,7 @@ func doTestRepoCommitWithStatus(t *testing.T, state string, classes ...string) { assert.NotEmpty(t, commitURL) // Call API to add status for commit - req = NewRequestWithJSON(t, "POST", "/api/v1/repos/user2/repo1/statuses/"+path.Base(commitURL)+"?token="+token, - api.CreateStatusOption{ - State: api.CommitStatusState(state), - TargetURL: "http://test.ci/", - Description: "", - Context: "testci", - }, - ) - - resp = session.MakeRequest(t, req, http.StatusCreated) + t.Run("CreateStatus", doAPICreateCommitStatus(NewAPITestContext(t, "user2", "repo1"), path.Base(commitURL), api.CommitStatusState(state))) req = NewRequest(t, "GET", "/user2/repo1/commits/branch/master") resp = session.MakeRequest(t, req, http.StatusOK) diff --git a/integrations/repo_fork_test.go b/integrations/repo_fork_test.go index 27b62d2646..d701850f14 100644 --- a/integrations/repo_fork_test.go +++ b/integrations/repo_fork_test.go @@ -45,7 +45,7 @@ func testRepoFork(t *testing.T, session *TestSession, ownerName, repoName, forkO "uid": fmt.Sprintf("%d", forkOwner.ID), "repo_name": forkRepoName, }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) // Step4: check the existence of the forked repo req = NewRequestf(t, "GET", "/%s/%s", forkOwnerName, forkRepoName) diff --git a/integrations/repo_generate_test.go b/integrations/repo_generate_test.go index b70a4a4797..4fbbb56c50 100644 --- a/integrations/repo_generate_test.go +++ b/integrations/repo_generate_test.go @@ -46,7 +46,7 @@ func testRepoGenerate(t *testing.T, session *TestSession, templateOwnerName, tem "repo_name": generateRepoName, "git_content": "true", }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) // Step4: check the existence of the generated repo req = NewRequestf(t, "GET", "/%s/%s", generateOwnerName, generateRepoName) diff --git a/integrations/repo_migrate_test.go b/integrations/repo_migrate_test.go index e6ba15b137..4e6923dd6f 100644 --- a/integrations/repo_migrate_test.go +++ b/integrations/repo_migrate_test.go @@ -33,7 +33,7 @@ func testRepoMigrate(t testing.TB, session *TestSession, cloneAddr, repoName str "repo_name": repoName, "service": fmt.Sprintf("%d", structs.PlainGitService), }) - resp = session.MakeRequest(t, req, http.StatusFound) + resp = session.MakeRequest(t, req, http.StatusSeeOther) return resp } diff --git a/integrations/repo_tag_test.go b/integrations/repo_tag_test.go index 20bfc555b8..ef7a11422e 100644 --- a/integrations/repo_tag_test.go +++ b/integrations/repo_tag_test.go @@ -66,10 +66,10 @@ func TestCreateNewTagProtected(t *testing.T) { doGitClone(dstPath, u)(t) - _, err = git.NewCommand(git.DefaultContext, "tag", "v-2").RunInDir(dstPath) + _, _, err = git.NewCommand(git.DefaultContext, "tag", "v-2").RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) - _, err = git.NewCommand(git.DefaultContext, "push", "--tags").RunInDir(dstPath) + _, _, err = git.NewCommand(git.DefaultContext, "push", "--tags").RunStdString(&git.RunOpts{Dir: dstPath}) assert.Error(t, err) assert.Contains(t, err.Error(), "Tag v-2 is protected") }) diff --git a/integrations/repo_test.go b/integrations/repo_test.go index 677ba57f80..872d3f24d1 100644 --- a/integrations/repo_test.go +++ b/integrations/repo_test.go @@ -155,7 +155,7 @@ func TestViewRepoWithSymlinks(t *testing.T) { return fmt.Sprintf("%s: %s", file, cls) }) assert.Len(t, items, 5) - assert.Equal(t, "a: svg octicon-file-directory", items[0]) + assert.Equal(t, "a: svg octicon-file-directory-fill", items[0]) assert.Equal(t, "link_b: svg octicon-file-submodule", items[1]) assert.Equal(t, "link_d: svg octicon-file-symlink-file", items[2]) assert.Equal(t, "link_hi: svg octicon-file-symlink-file", items[3]) diff --git a/integrations/repo_topic_test.go b/integrations/repo_topic_test.go new file mode 100644 index 0000000000..e049afdd7c --- /dev/null +++ b/integrations/repo_topic_test.go @@ -0,0 +1,47 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "net/http" + "net/url" + "testing" + + api "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/assert" +) + +func TestTopicSearch(t *testing.T) { + defer prepareTestEnv(t)() + searchURL, _ := url.Parse("/explore/topics/search") + var topics struct { + TopicNames []*api.TopicResponse `json:"topics"` + } + + query := url.Values{"page": []string{"1"}, "limit": []string{"4"}} + + searchURL.RawQuery = query.Encode() + res := MakeRequest(t, NewRequest(t, "GET", searchURL.String()), http.StatusOK) + DecodeJSON(t, res, &topics) + assert.Len(t, topics.TopicNames, 4) + assert.EqualValues(t, "6", res.Header().Get("x-total-count")) + + query.Add("q", "topic") + searchURL.RawQuery = query.Encode() + res = MakeRequest(t, NewRequest(t, "GET", searchURL.String()), http.StatusOK) + DecodeJSON(t, res, &topics) + assert.Len(t, topics.TopicNames, 2) + + query.Set("q", "database") + searchURL.RawQuery = query.Encode() + res = MakeRequest(t, NewRequest(t, "GET", searchURL.String()), http.StatusOK) + DecodeJSON(t, res, &topics) + if assert.Len(t, topics.TopicNames, 1) { + assert.EqualValues(t, 2, topics.TopicNames[0].ID) + assert.EqualValues(t, "database", topics.TopicNames[0].Name) + assert.EqualValues(t, 1, topics.TopicNames[0].RepoCount) + } +} diff --git a/integrations/repofiles_delete_test.go b/integrations/repofiles_delete_test.go index 09794f8c9f..a6c2484a56 100644 --- a/integrations/repofiles_delete_test.go +++ b/integrations/repofiles_delete_test.go @@ -77,7 +77,7 @@ func testDeleteRepoFile(t *testing.T, u *url.URL) { test.LoadGitRepo(t, ctx) defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer opts := getDeleteRepoFileOptions(repo) t.Run("Delete README.md file", func(t *testing.T) { @@ -117,7 +117,7 @@ func testDeleteRepoFileWithoutBranchNames(t *testing.T, u *url.URL) { defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer opts := getDeleteRepoFileOptions(repo) opts.OldBranch = "" opts.NewBranch = "" @@ -147,7 +147,7 @@ func TestDeleteRepoFileErrors(t *testing.T) { defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer t.Run("Bad branch", func(t *testing.T) { opts := getDeleteRepoFileOptions(repo) diff --git a/integrations/repofiles_update_test.go b/integrations/repofiles_update_test.go index 46c73b48f6..2add99cc86 100644 --- a/integrations/repofiles_update_test.go +++ b/integrations/repofiles_update_test.go @@ -194,7 +194,7 @@ func TestCreateOrUpdateRepoFileForCreate(t *testing.T) { defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer opts := getCreateRepoFileOptions(repo) // test @@ -202,7 +202,7 @@ func TestCreateOrUpdateRepoFileForCreate(t *testing.T) { // asserts assert.NoError(t, err) - gitRepo, _ := git.OpenRepository(repo.RepoPath()) + gitRepo, _ := git.OpenRepository(git.DefaultContext, repo.RepoPath()) defer gitRepo.Close() commitID, _ := gitRepo.GetBranchCommitID(opts.NewBranch) @@ -230,7 +230,7 @@ func TestCreateOrUpdateRepoFileForUpdate(t *testing.T) { defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer opts := getUpdateRepoFileOptions(repo) // test @@ -238,7 +238,7 @@ func TestCreateOrUpdateRepoFileForUpdate(t *testing.T) { // asserts assert.NoError(t, err) - gitRepo, _ := git.OpenRepository(repo.RepoPath()) + gitRepo, _ := git.OpenRepository(git.DefaultContext, repo.RepoPath()) defer gitRepo.Close() commitID, _ := gitRepo.GetBranchCommitID(opts.NewBranch) @@ -263,7 +263,7 @@ func TestCreateOrUpdateRepoFileForUpdateWithFileMove(t *testing.T) { defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer opts := getUpdateRepoFileOptions(repo) opts.FromTreePath = "README.md" opts.TreePath = "README_new.md" // new file name, README_new.md @@ -273,7 +273,7 @@ func TestCreateOrUpdateRepoFileForUpdateWithFileMove(t *testing.T) { // asserts assert.NoError(t, err) - gitRepo, _ := git.OpenRepository(repo.RepoPath()) + gitRepo, _ := git.OpenRepository(git.DefaultContext, repo.RepoPath()) defer gitRepo.Close() commit, _ := gitRepo.GetBranchCommit(opts.NewBranch) @@ -313,7 +313,7 @@ func TestCreateOrUpdateRepoFileWithoutBranchNames(t *testing.T) { defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer opts := getUpdateRepoFileOptions(repo) opts.OldBranch = "" opts.NewBranch = "" @@ -323,7 +323,7 @@ func TestCreateOrUpdateRepoFileWithoutBranchNames(t *testing.T) { // asserts assert.NoError(t, err) - gitRepo, _ := git.OpenRepository(repo.RepoPath()) + gitRepo, _ := git.OpenRepository(git.DefaultContext, repo.RepoPath()) defer gitRepo.Close() commitID, _ := gitRepo.GetBranchCommitID(repo.DefaultBranch) @@ -344,7 +344,7 @@ func TestCreateOrUpdateRepoFileErrors(t *testing.T) { defer ctx.Repo.GitRepo.Close() repo := ctx.Repo.Repository - doer := ctx.User + doer := ctx.Doer t.Run("bad branch", func(t *testing.T) { opts := getUpdateRepoFileOptions(repo) diff --git a/integrations/setting_test.go b/integrations/setting_test.go index c0455a4520..3852eb4955 100644 --- a/integrations/setting_test.go +++ b/integrations/setting_test.go @@ -90,17 +90,17 @@ func TestSettingLandingPage(t *testing.T) { setting.LandingPageURL = setting.LandingPageExplore req = NewRequest(t, "GET", "/") - resp := MakeRequest(t, req, http.StatusFound) + resp := MakeRequest(t, req, http.StatusSeeOther) assert.Equal(t, "/explore", resp.Header().Get("Location")) setting.LandingPageURL = setting.LandingPageOrganizations req = NewRequest(t, "GET", "/") - resp = MakeRequest(t, req, http.StatusFound) + resp = MakeRequest(t, req, http.StatusSeeOther) assert.Equal(t, "/explore/organizations", resp.Header().Get("Location")) setting.LandingPageURL = setting.LandingPageLogin req = NewRequest(t, "GET", "/") - resp = MakeRequest(t, req, http.StatusFound) + resp = MakeRequest(t, req, http.StatusSeeOther) assert.Equal(t, "/user/login", resp.Header().Get("Location")) setting.LandingPageURL = landingPage diff --git a/integrations/signin_test.go b/integrations/signin_test.go index a6e4b7d4d2..811f9326ec 100644 --- a/integrations/signin_test.go +++ b/integrations/signin_test.go @@ -11,9 +11,9 @@ import ( "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/translation/i18n" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) func testLoginFailed(t *testing.T, username, password, message string) { diff --git a/integrations/signout_test.go b/integrations/signout_test.go index b54e7ee9ee..8ef97e89c5 100644 --- a/integrations/signout_test.go +++ b/integrations/signout_test.go @@ -15,7 +15,7 @@ func TestSignOut(t *testing.T) { session := loginUser(t, "user2") req := NewRequest(t, "POST", "/user/logout") - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) // try to view a private repo, should fail req = NewRequest(t, "GET", "/user2/repo2") diff --git a/integrations/signup_test.go b/integrations/signup_test.go index 93e384076f..7b45674376 100644 --- a/integrations/signup_test.go +++ b/integrations/signup_test.go @@ -13,9 +13,9 @@ import ( "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/translation/i18n" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) func TestSignup(t *testing.T) { @@ -29,7 +29,7 @@ func TestSignup(t *testing.T) { "password": "examplePassword!1", "retype": "examplePassword!1", }) - MakeRequest(t, req, http.StatusFound) + MakeRequest(t, req, http.StatusSeeOther) // should be able to view new user's page req = NewRequest(t, "GET", "/exampleUser") @@ -48,7 +48,7 @@ func TestSignupAsRestricted(t *testing.T) { "password": "examplePassword!1", "retype": "examplePassword!1", }) - MakeRequest(t, req, http.StatusFound) + MakeRequest(t, req, http.StatusSeeOther) // should be able to view new user's page req = NewRequest(t, "GET", "/restrictedUser") @@ -68,10 +68,10 @@ func TestSignupEmail(t *testing.T) { wantStatus int wantMsg string }{ - {"exampleUser@example.com\r\n", http.StatusOK, i18n.Tr("en", "form.email_invalid", nil)}, - {"exampleUser@example.com\r", http.StatusOK, i18n.Tr("en", "form.email_invalid", nil)}, - {"exampleUser@example.com\n", http.StatusOK, i18n.Tr("en", "form.email_invalid", nil)}, - {"exampleUser@example.com", http.StatusFound, ""}, + {"exampleUser@example.com\r\n", http.StatusOK, i18n.Tr("en", "form.email_invalid")}, + {"exampleUser@example.com\r", http.StatusOK, i18n.Tr("en", "form.email_invalid")}, + {"exampleUser@example.com\n", http.StatusOK, i18n.Tr("en", "form.email_invalid")}, + {"exampleUser@example.com", http.StatusSeeOther, ""}, } for i, test := range tests { diff --git a/integrations/sqlite.ini.tmpl b/integrations/sqlite.ini.tmpl index e2d999666a..c180148891 100644 --- a/integrations/sqlite.ini.tmpl +++ b/integrations/sqlite.ini.tmpl @@ -101,3 +101,6 @@ JWT_SECRET = KZb_QLUd4fYVyxetjxC4eZkrBgWM2SndOOWDNtgUUko [lfs] PATH = integrations/gitea-integration-sqlite/data/lfs + +[packages] +ENABLED = true diff --git a/integrations/user_avatar_test.go b/integrations/user_avatar_test.go index 7c2267885a..2bf6fde5ff 100644 --- a/integrations/user_avatar_test.go +++ b/integrations/user_avatar_test.go @@ -70,7 +70,7 @@ func TestUserAvatar(t *testing.T) { req.Header.Add("X-Csrf-Token", csrf) req.Header.Add("Content-Type", writer.FormDataContentType()) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) user2 = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) // owner of the repo3, is an org diff --git a/integrations/user_test.go b/integrations/user_test.go index 4cfe7700e1..d0523d8b3a 100644 --- a/integrations/user_test.go +++ b/integrations/user_test.go @@ -8,12 +8,15 @@ import ( "net/http" "testing" + "code.gitea.io/gitea/models" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/test" + "code.gitea.io/gitea/modules/translation/i18n" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) func TestViewUser(t *testing.T) { @@ -33,7 +36,7 @@ func TestRenameUsername(t *testing.T) { "email": "user2@example.com", "language": "en-US", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: "newUsername"}) unittest.AssertNotExistsBean(t, &user_model.User{Name: "user2"}) @@ -75,23 +78,41 @@ func TestRenameReservedUsername(t *testing.T) { defer prepareTestEnv(t)() reservedUsernames := []string{ + ".", + "..", + ".well-known", "admin", "api", + "assets", "attachments", + "avatar", "avatars", + "captcha", + "commits", + "debug", + "error", "explore", - "help", - "install", + "favicon.ico", + "ghost", "issues", "login", + "manifest.json", "metrics", + "milestones", + "new", "notifications", "org", "pulls", + "raw", "repo", - "template", - "user", + "repo-avatars", + "robots.txt", "search", + "serviceworker.js", + "ssh_info", + "swagger.v1.json", + "user", + "v2", } session := loginUser(t, "user2") @@ -103,7 +124,7 @@ func TestRenameReservedUsername(t *testing.T) { "email": "user2@example.com", "language": "en-US", }) - resp := session.MakeRequest(t, req, http.StatusFound) + resp := session.MakeRequest(t, req, http.StatusSeeOther) req = NewRequest(t, "GET", test.RedirectURL(resp)) resp = session.MakeRequest(t, req, http.StatusOK) @@ -204,3 +225,26 @@ func testExportUserGPGKeys(t *testing.T, user, expected string) { // t.Log(resp.Body.String()) assert.Equal(t, expected, resp.Body.String()) } + +func TestListStopWatches(t *testing.T) { + defer prepareTestEnv(t)() + + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) + owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID}).(*user_model.User) + + session := loginUser(t, owner.Name) + req := NewRequestf(t, "GET", "/user/stopwatches") + resp := session.MakeRequest(t, req, http.StatusOK) + var apiWatches []*api.StopWatch + DecodeJSON(t, resp, &apiWatches) + stopwatch := unittest.AssertExistsAndLoadBean(t, &models.Stopwatch{UserID: owner.ID}).(*models.Stopwatch) + issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: stopwatch.IssueID}).(*models.Issue) + if assert.Len(t, apiWatches, 1) { + assert.EqualValues(t, stopwatch.CreatedUnix.AsTime().Unix(), apiWatches[0].Created.Unix()) + assert.EqualValues(t, issue.Index, apiWatches[0].IssueIndex) + assert.EqualValues(t, issue.Title, apiWatches[0].IssueTitle) + assert.EqualValues(t, repo.Name, apiWatches[0].RepoName) + assert.EqualValues(t, repo.OwnerName, apiWatches[0].RepoOwnerName) + assert.Greater(t, int64(apiWatches[0].Seconds), int64(0)) + } +} diff --git a/integrations/webfinger_test.go b/integrations/webfinger_test.go new file mode 100644 index 0000000000..8ba93c3f20 --- /dev/null +++ b/integrations/webfinger_test.go @@ -0,0 +1,68 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integrations + +import ( + "fmt" + "net/http" + "net/url" + "testing" + + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + + "github.com/stretchr/testify/assert" +) + +func TestWebfinger(t *testing.T) { + defer prepareTestEnv(t)() + + setting.Federation.Enabled = true + defer func() { + setting.Federation.Enabled = false + }() + + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + + appURL, _ := url.Parse(setting.AppURL) + + type webfingerLink struct { + Rel string `json:"rel,omitempty"` + Type string `json:"type,omitempty"` + Href string `json:"href,omitempty"` + Titles map[string]string `json:"titles,omitempty"` + Properties map[string]interface{} `json:"properties,omitempty"` + } + + type webfingerJRD struct { + Subject string `json:"subject,omitempty"` + Aliases []string `json:"aliases,omitempty"` + Properties map[string]interface{} `json:"properties,omitempty"` + Links []*webfingerLink `json:"links,omitempty"` + } + + session := loginUser(t, "user1") + + req := NewRequest(t, "GET", fmt.Sprintf("/.well-known/webfinger?resource=acct:%s@%s", user.LowerName, appURL.Host)) + resp := MakeRequest(t, req, http.StatusOK) + + var jrd webfingerJRD + DecodeJSON(t, resp, &jrd) + assert.Equal(t, "acct:user2@"+appURL.Host, jrd.Subject) + assert.ElementsMatch(t, []string{user.HTMLURL()}, jrd.Aliases) + + req = NewRequest(t, "GET", fmt.Sprintf("/.well-known/webfinger?resource=acct:%s@%s", user.LowerName, "unknown.host")) + MakeRequest(t, req, http.StatusBadRequest) + + req = NewRequest(t, "GET", fmt.Sprintf("/.well-known/webfinger?resource=acct:%s@%s", "user31", appURL.Host)) + MakeRequest(t, req, http.StatusNotFound) + + req = NewRequest(t, "GET", fmt.Sprintf("/.well-known/webfinger?resource=acct:%s@%s", "user31", appURL.Host)) + session.MakeRequest(t, req, http.StatusOK) + + req = NewRequest(t, "GET", fmt.Sprintf("/.well-known/webfinger?resource=mailto:%s", user.Email)) + MakeRequest(t, req, http.StatusNotFound) +} diff --git a/integrations/xss_test.go b/integrations/xss_test.go index 4c2e60e799..1ce25e1bf5 100644 --- a/integrations/xss_test.go +++ b/integrations/xss_test.go @@ -27,7 +27,7 @@ func TestXSSUserFullName(t *testing.T) { "email": user.Email, "language": "en-US", }) - session.MakeRequest(t, req, http.StatusFound) + session.MakeRequest(t, req, http.StatusSeeOther) req = NewRequestf(t, "GET", "/%s", user.Name) resp := session.MakeRequest(t, req, http.StatusOK) diff --git a/models/access.go b/models/access.go index 48b65c2c0f..d49d3430dc 100644 --- a/models/access.go +++ b/models/access.go @@ -10,6 +10,7 @@ import ( "fmt" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -149,7 +150,7 @@ func recalculateTeamAccesses(ctx context.Context, repo *repo_model.Repository, i return fmt.Errorf("refreshCollaboratorAccesses: %v", err) } - teams, err := OrgFromUser(repo.Owner).loadTeams(e) + teams, err := organization.FindOrgTeams(ctx, repo.Owner.ID) if err != nil { return err } @@ -163,11 +164,11 @@ func recalculateTeamAccesses(ctx context.Context, repo *repo_model.Repository, i // have relations with repository. if t.IsOwnerTeam() { t.AccessMode = perm.AccessModeOwner - } else if !t.hasRepository(e, repo.ID) { + } else if !hasRepository(ctx, t, repo.ID) { continue } - if err = t.getMembers(e); err != nil { + if err = t.GetMembersCtx(ctx); err != nil { return fmt.Errorf("getMembers '%d': %v", t.ID, err) } for _, m := range t.Members { @@ -198,7 +199,7 @@ func recalculateUserAccess(ctx context.Context, repo *repo_model.Repository, uid if err = repo.GetOwner(ctx); err != nil { return err } else if repo.Owner.IsOrganization() { - var teams []Team + var teams []organization.Team if err := e.Join("INNER", "team_repo", "team_repo.team_id = team.id"). Join("INNER", "team_user", "team_user.team_id = team.id"). Where("team.org_id = ?", repo.OwnerID). diff --git a/models/access_test.go b/models/access_test.go index 43e61e812b..7533381dca 100644 --- a/models/access_test.go +++ b/models/access_test.go @@ -8,6 +8,7 @@ import ( "testing" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" @@ -127,7 +128,7 @@ func TestRepository_RecalculateAccesses2(t *testing.T) { func TestRepository_RecalculateAccesses3(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - team5 := unittest.AssertExistsAndLoadBean(t, &Team{ID: 5}).(*Team) + team5 := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 5}).(*organization.Team) user29 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 29}).(*user_model.User) has, err := db.GetEngine(db.DefaultContext).Get(&Access{UserID: 29, RepoID: 23}) diff --git a/models/action.go b/models/action.go index 26d05730c5..7055ce81d6 100644 --- a/models/action.go +++ b/models/action.go @@ -15,6 +15,7 @@ import ( "time" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -22,6 +23,7 @@ import ( "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" @@ -241,7 +243,7 @@ func (a *Action) getCommentLink(ctx context.Context) string { return "#" } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return "#" } @@ -315,19 +317,21 @@ func (a *Action) GetIssueContent() string { // GetFeedsOptions options for retrieving feeds type GetFeedsOptions struct { - RequestedUser *user_model.User // the user we want activity for - RequestedTeam *Team // the team we want activity for - Actor *user_model.User // the user viewing the activity - IncludePrivate bool // include private actions - OnlyPerformedBy bool // only actions performed by requested user - IncludeDeleted bool // include deleted actions - Date string // the day we want activity for: YYYY-MM-DD + db.ListOptions + RequestedUser *user_model.User // the user we want activity for + RequestedTeam *organization.Team // the team we want activity for + RequestedRepo *repo_model.Repository // the repo we want activity for + Actor *user_model.User // the user viewing the activity + IncludePrivate bool // include private actions + OnlyPerformedBy bool // only actions performed by requested user + IncludeDeleted bool // include deleted actions + Date string // the day we want activity for: YYYY-MM-DD } // GetFeeds returns actions according to the provided options -func GetFeeds(opts GetFeedsOptions) ([]*Action, error) { - if !activityReadable(opts.RequestedUser, opts.Actor) { - return make([]*Action, 0), nil +func GetFeeds(ctx context.Context, opts GetFeedsOptions) (ActionList, error) { + if opts.RequestedUser == nil && opts.RequestedTeam == nil && opts.RequestedRepo == nil { + return nil, fmt.Errorf("need at least one of these filters: RequestedUser, RequestedTeam, RequestedRepo") } cond, err := activityQueryCondition(opts) @@ -335,13 +339,21 @@ func GetFeeds(opts GetFeedsOptions) ([]*Action, error) { return nil, err } - actions := make([]*Action, 0, setting.UI.FeedPagingNum) + e := db.GetEngine(ctx) + sess := e.Where(cond). + Select("`action`.*"). // this line will avoid select other joined table's columns + Join("INNER", "repository", "`repository`.id = `action`.repo_id") - if err := db.GetEngine(db.DefaultContext).Limit(setting.UI.FeedPagingNum).Desc("created_unix").Where(cond).Find(&actions); err != nil { + opts.SetDefaultValues() + sess = db.SetSessionPagination(sess, &opts) + + actions := make([]*Action, 0, opts.PageSize) + + if err := sess.Desc("`action`.created_unix").Find(&actions); err != nil { return nil, fmt.Errorf("Find: %v", err) } - if err := ActionList(actions).LoadAttributes(); err != nil { + if err := ActionList(actions).loadAttributes(e); err != nil { return nil, fmt.Errorf("LoadAttributes: %v", err) } @@ -349,45 +361,48 @@ func GetFeeds(opts GetFeedsOptions) ([]*Action, error) { } func activityReadable(user, doer *user_model.User) bool { - var doerID int64 - if doer != nil { - doerID = doer.ID - } - if doer == nil || !doer.IsAdmin { - if user.KeepActivityPrivate && doerID != user.ID { - return false - } - } - return true + return !user.KeepActivityPrivate || + doer != nil && (doer.IsAdmin || user.ID == doer.ID) } func activityQueryCondition(opts GetFeedsOptions) (builder.Cond, error) { cond := builder.NewCond() - var repoIDs []int64 - var actorID int64 - if opts.Actor != nil { - actorID = opts.Actor.ID + if opts.RequestedTeam != nil && opts.RequestedUser == nil { + org, err := user_model.GetUserByID(opts.RequestedTeam.OrgID) + if err != nil { + return nil, err + } + opts.RequestedUser = org + } + + // check activity visibility for actor ( similar to activityReadable() ) + if opts.Actor == nil { + cond = cond.And(builder.In("act_user_id", + builder.Select("`user`.id").Where( + builder.Eq{"keep_activity_private": false, "visibility": structs.VisibleTypePublic}, + ).From("`user`"), + )) + } else if !opts.Actor.IsAdmin { + cond = cond.And(builder.In("act_user_id", + builder.Select("`user`.id").Where( + builder.Eq{"keep_activity_private": false}. + And(builder.In("visibility", structs.VisibleTypePublic, structs.VisibleTypeLimited))). + Or(builder.Eq{"id": opts.Actor.ID}).From("`user`"), + )) } // check readable repositories by doer/actor if opts.Actor == nil || !opts.Actor.IsAdmin { - if opts.RequestedUser.IsOrganization() { - env, err := OrgFromUser(opts.RequestedUser).AccessibleReposEnv(actorID) - if err != nil { - return nil, fmt.Errorf("AccessibleReposEnv: %v", err) - } - if repoIDs, err = env.RepoIDs(1, opts.RequestedUser.NumRepos); err != nil { - return nil, fmt.Errorf("GetUserRepositories: %v", err) - } - cond = cond.And(builder.In("repo_id", repoIDs)) - } else { - cond = cond.And(builder.In("repo_id", AccessibleRepoIDsQuery(opts.Actor))) - } + cond = cond.And(builder.In("repo_id", AccessibleRepoIDsQuery(opts.Actor))) + } + + if opts.RequestedRepo != nil { + cond = cond.And(builder.Eq{"repo_id": opts.RequestedRepo.ID}) } if opts.RequestedTeam != nil { - env := OrgFromUser(opts.RequestedUser).AccessibleTeamReposEnv(opts.RequestedTeam) + env := organization.OrgFromUser(opts.RequestedUser).AccessibleTeamReposEnv(opts.RequestedTeam) teamRepoIDs, err := env.RepoIDs(1, opts.RequestedUser.NumRepos) if err != nil { return nil, fmt.Errorf("GetTeamRepositories: %v", err) @@ -395,13 +410,16 @@ func activityQueryCondition(opts GetFeedsOptions) (builder.Cond, error) { cond = cond.And(builder.In("repo_id", teamRepoIDs)) } - cond = cond.And(builder.Eq{"user_id": opts.RequestedUser.ID}) + if opts.RequestedUser != nil { + cond = cond.And(builder.Eq{"user_id": opts.RequestedUser.ID}) - if opts.OnlyPerformedBy { - cond = cond.And(builder.Eq{"act_user_id": opts.RequestedUser.ID}) + if opts.OnlyPerformedBy { + cond = cond.And(builder.Eq{"act_user_id": opts.RequestedUser.ID}) + } } + if !opts.IncludePrivate { - cond = cond.And(builder.Eq{"is_private": false}) + cond = cond.And(builder.Eq{"`action`.is_private": false}) } if !opts.IncludeDeleted { cond = cond.And(builder.Eq{"is_deleted": false}) @@ -414,8 +432,8 @@ func activityQueryCondition(opts GetFeedsOptions) (builder.Cond, error) { } else { dateHigh := dateLow.Add(86399000000000) // 23h59m59s - cond = cond.And(builder.Gte{"created_unix": dateLow.Unix()}) - cond = cond.And(builder.Lte{"created_unix": dateHigh.Unix()}) + cond = cond.And(builder.Gte{"`action`.created_unix": dateLow.Unix()}) + cond = cond.And(builder.Lte{"`action`.created_unix": dateHigh.Unix()}) } } @@ -492,7 +510,7 @@ func notifyWatchers(ctx context.Context, actions ...*Action) error { permPR[i] = false continue } - perm, err := getUserRepoPermission(ctx, repo, user) + perm, err := GetUserRepoPermission(ctx, repo, user) if err != nil { permCode[i] = false permIssue[i] = false diff --git a/models/action_list.go b/models/action_list.go index 3f52d3cd5e..5f7b17b9de 100644 --- a/models/action_list.go +++ b/models/action_list.go @@ -10,6 +10,7 @@ import ( "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" ) // ActionList defines a list of actions @@ -22,10 +23,10 @@ func (actions ActionList) getUserIDs() []int64 { userIDs[action.ActUserID] = struct{}{} } } - return keysInt64(userIDs) + return container.KeysInt64(userIDs) } -func (actions ActionList) loadUsers(e db.Engine) ([]*user_model.User, error) { +func (actions ActionList) loadUsers(e db.Engine) (map[int64]*user_model.User, error) { if len(actions) == 0 { return nil, nil } @@ -42,12 +43,7 @@ func (actions ActionList) loadUsers(e db.Engine) ([]*user_model.User, error) { for _, action := range actions { action.ActUser = userMaps[action.ActUserID] } - return valuesUser(userMaps), nil -} - -// LoadUsers loads actions' all users -func (actions ActionList) LoadUsers() ([]*user_model.User, error) { - return actions.loadUsers(db.GetEngine(db.DefaultContext)) + return userMaps, nil } func (actions ActionList) getRepoIDs() []int64 { @@ -57,48 +53,63 @@ func (actions ActionList) getRepoIDs() []int64 { repoIDs[action.RepoID] = struct{}{} } } - return keysInt64(repoIDs) + return container.KeysInt64(repoIDs) } -func (actions ActionList) loadRepositories(e db.Engine) ([]*repo_model.Repository, error) { +func (actions ActionList) loadRepositories(e db.Engine) error { if len(actions) == 0 { - return nil, nil + return nil } repoIDs := actions.getRepoIDs() repoMaps := make(map[int64]*repo_model.Repository, len(repoIDs)) - err := e. - In("id", repoIDs). - Find(&repoMaps) + err := e.In("id", repoIDs).Find(&repoMaps) if err != nil { - return nil, fmt.Errorf("find repository: %v", err) + return fmt.Errorf("find repository: %v", err) } for _, action := range actions { action.Repo = repoMaps[action.RepoID] } - return valuesRepository(repoMaps), nil + return nil } -// LoadRepositories loads actions' all repositories -func (actions ActionList) LoadRepositories() ([]*repo_model.Repository, error) { - return actions.loadRepositories(db.GetEngine(db.DefaultContext)) -} - -// loadAttributes loads all attributes -func (actions ActionList) loadAttributes(e db.Engine) (err error) { - if _, err = actions.loadUsers(e); err != nil { - return +func (actions ActionList) loadRepoOwner(e db.Engine, userMap map[int64]*user_model.User) (err error) { + if userMap == nil { + userMap = make(map[int64]*user_model.User) } - if _, err = actions.loadRepositories(e); err != nil { - return + for _, action := range actions { + if action.Repo == nil { + continue + } + repoOwner, ok := userMap[action.Repo.OwnerID] + if !ok { + repoOwner, err = user_model.GetUserByID(action.Repo.OwnerID) + if err != nil { + if user_model.IsErrUserNotExist(err) { + continue + } + return err + } + userMap[repoOwner.ID] = repoOwner + } + action.Repo.Owner = repoOwner } return nil } -// LoadAttributes loads attributes of the actions -func (actions ActionList) LoadAttributes() error { - return actions.loadAttributes(db.GetEngine(db.DefaultContext)) +// loadAttributes loads all attributes +func (actions ActionList) loadAttributes(e db.Engine) error { + userMap, err := actions.loadUsers(e) + if err != nil { + return err + } + + if err := actions.loadRepositories(e); err != nil { + return err + } + + return actions.loadRepoOwner(e, userMap) } diff --git a/models/action_test.go b/models/action_test.go index 306d382364..fb8a6c2686 100644 --- a/models/action_test.go +++ b/models/action_test.go @@ -8,6 +8,7 @@ import ( "path" "testing" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -39,7 +40,7 @@ func TestGetFeeds(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - actions, err := GetFeeds(GetFeedsOptions{ + actions, err := GetFeeds(db.DefaultContext, GetFeedsOptions{ RequestedUser: user, Actor: user, IncludePrivate: true, @@ -52,7 +53,7 @@ func TestGetFeeds(t *testing.T) { assert.EqualValues(t, user.ID, actions[0].UserID) } - actions, err = GetFeeds(GetFeedsOptions{ + actions, err = GetFeeds(db.DefaultContext, GetFeedsOptions{ RequestedUser: user, Actor: user, IncludePrivate: false, @@ -62,13 +63,54 @@ func TestGetFeeds(t *testing.T) { assert.Len(t, actions, 0) } +func TestGetFeedsForRepos(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + privRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2}).(*repo_model.Repository) + pubRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 8}).(*repo_model.Repository) + + // private repo & no login + actions, err := GetFeeds(db.DefaultContext, GetFeedsOptions{ + RequestedRepo: privRepo, + IncludePrivate: true, + }) + assert.NoError(t, err) + assert.Len(t, actions, 0) + + // public repo & no login + actions, err = GetFeeds(db.DefaultContext, GetFeedsOptions{ + RequestedRepo: pubRepo, + IncludePrivate: true, + }) + assert.NoError(t, err) + assert.Len(t, actions, 1) + + // private repo and login + actions, err = GetFeeds(db.DefaultContext, GetFeedsOptions{ + RequestedRepo: privRepo, + IncludePrivate: true, + Actor: user, + }) + assert.NoError(t, err) + assert.Len(t, actions, 1) + + // public repo & login + actions, err = GetFeeds(db.DefaultContext, GetFeedsOptions{ + RequestedRepo: pubRepo, + IncludePrivate: true, + Actor: user, + }) + assert.NoError(t, err) + assert.Len(t, actions, 1) +} + func TestGetFeeds2(t *testing.T) { // test with an organization user assert.NoError(t, unittest.PrepareTestDatabase()) org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - actions, err := GetFeeds(GetFeedsOptions{ + actions, err := GetFeeds(db.DefaultContext, GetFeedsOptions{ RequestedUser: org, Actor: user, IncludePrivate: true, @@ -82,7 +124,7 @@ func TestGetFeeds2(t *testing.T) { assert.EqualValues(t, org.ID, actions[0].UserID) } - actions, err = GetFeeds(GetFeedsOptions{ + actions, err = GetFeeds(db.DefaultContext, GetFeedsOptions{ RequestedUser: org, Actor: user, IncludePrivate: false, @@ -93,6 +135,46 @@ func TestGetFeeds2(t *testing.T) { assert.Len(t, actions, 0) } +func TestActivityReadable(t *testing.T) { + tt := []struct { + desc string + user *user_model.User + doer *user_model.User + result bool + }{{ + desc: "user should see own activity", + user: &user_model.User{ID: 1}, + doer: &user_model.User{ID: 1}, + result: true, + }, { + desc: "anon should see activity if public", + user: &user_model.User{ID: 1}, + result: true, + }, { + desc: "anon should NOT see activity", + user: &user_model.User{ID: 1, KeepActivityPrivate: true}, + result: false, + }, { + desc: "user should see own activity if private too", + user: &user_model.User{ID: 1, KeepActivityPrivate: true}, + doer: &user_model.User{ID: 1}, + result: true, + }, { + desc: "other user should NOT see activity", + user: &user_model.User{ID: 1, KeepActivityPrivate: true}, + doer: &user_model.User{ID: 2}, + result: false, + }, { + desc: "admin should see activity", + user: &user_model.User{ID: 1, KeepActivityPrivate: true}, + doer: &user_model.User{ID: 2, IsAdmin: true}, + result: true, + }} + for _, test := range tt { + assert.Equal(t, test.result, activityReadable(test.user, test.doer), test.desc) + } +} + func TestNotifyWatchers(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) @@ -129,3 +211,20 @@ func TestNotifyWatchers(t *testing.T) { OpType: action.OpType, }) } + +func TestGetFeedsCorrupted(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) + unittest.AssertExistsAndLoadBean(t, &Action{ + ID: 8, + RepoID: 1700, + }) + + actions, err := GetFeeds(db.DefaultContext, GetFeedsOptions{ + RequestedUser: user, + Actor: user, + IncludePrivate: true, + }) + assert.NoError(t, err) + assert.Len(t, actions, 0) +} diff --git a/models/admin/main_test.go b/models/admin/main_test.go index f6c9dce57a..693b70fbf7 100644 --- a/models/admin/main_test.go +++ b/models/admin/main_test.go @@ -12,7 +12,8 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), - "notice.yml", - ) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{"notice.yml"}, + }) } diff --git a/models/admin/notice.go b/models/admin/notice.go index daf095f680..77277e4b2d 100644 --- a/models/admin/notice.go +++ b/models/admin/notice.go @@ -7,6 +7,7 @@ package admin import ( "context" "fmt" + "time" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/log" @@ -133,3 +134,13 @@ func DeleteNoticesByIDs(ids []int64) error { Delete(new(Notice)) return err } + +// DeleteOldSystemNotices deletes all old system notices from database. +func DeleteOldSystemNotices(olderThan time.Duration) (err error) { + if olderThan <= 0 { + return nil + } + + _, err = db.GetEngine(db.DefaultContext).Where("created_unix < ?", time.Now().Add(-olderThan).Unix()).Delete(&Notice{}) + return +} diff --git a/models/asymkey/main_test.go b/models/asymkey/main_test.go index 1c4f7752e2..9f53e335fe 100644 --- a/models/asymkey/main_test.go +++ b/models/asymkey/main_test.go @@ -18,12 +18,15 @@ func init() { } func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), - "gpg_key.yml", - "public_key.yml", - "deploy_key.yml", - "gpg_key_import.yml", - "user.yml", - "email_address.yml", - ) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "gpg_key.yml", + "public_key.yml", + "deploy_key.yml", + "gpg_key_import.yml", + "user.yml", + "email_address.yml", + }, + }) } diff --git a/models/asymkey/ssh_key_deploy.go b/models/asymkey/ssh_key_deploy.go index fc6324792a..fe2ade43ae 100644 --- a/models/asymkey/ssh_key_deploy.go +++ b/models/asymkey/ssh_key_deploy.go @@ -58,7 +58,7 @@ func (key *DeployKey) GetContent() error { return nil } -// IsReadOnly checks if the key can only be used for read operations +// IsReadOnly checks if the key can only be used for read operations, used by template func (key *DeployKey) IsReadOnly() bool { return key.Mode == perm.AccessModeRead } @@ -203,12 +203,6 @@ func UpdateDeployKeyCols(key *DeployKey, cols ...string) error { return err } -// UpdateDeployKey updates deploy key information. -func UpdateDeployKey(key *DeployKey) error { - _, err := db.GetEngine(db.DefaultContext).ID(key.ID).AllCols().Update(key) - return err -} - // ListDeployKeysOptions are options for ListDeployKeys type ListDeployKeysOptions struct { db.ListOptions diff --git a/models/auth/main_test.go b/models/auth/main_test.go index 4255f488fe..ccbdd4e81c 100644 --- a/models/auth/main_test.go +++ b/models/auth/main_test.go @@ -12,11 +12,14 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), - "login_source.yml", - "oauth2_application.yml", - "oauth2_authorization_code.yml", - "oauth2_grant.yml", - "webauthn_credential.yml", - ) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "login_source.yml", + "oauth2_application.yml", + "oauth2_authorization_code.yml", + "oauth2_grant.yml", + "webauthn_credential.yml", + }, + }) } diff --git a/models/auth/oauth2.go b/models/auth/oauth2.go index 2341e08620..4d44a8842a 100644 --- a/models/auth/oauth2.go +++ b/models/auth/oauth2.go @@ -245,7 +245,7 @@ func deleteOAuth2Application(sess db.Engine, id, userid int64) error { "oauth2_authorization_code.grant_id = oauth2_grant.id AND oauth2_grant.application_id = ?", id).Find(&codes); err != nil { return err } - codeIDs := make([]int64, 0) + codeIDs := make([]int64, 0, len(codes)) for _, grant := range codes { codeIDs = append(codeIDs, grant.ID) } diff --git a/models/branches.go b/models/branches.go index e6d8b7441a..47fd3dc0a4 100644 --- a/models/branches.go +++ b/models/branches.go @@ -11,6 +11,7 @@ import ( "time" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -94,7 +95,7 @@ func (protectBranch *ProtectedBranch) CanUserPush(userID int64) bool { return false } - in, err := IsUserInTeams(userID, protectBranch.WhitelistTeamIDs) + in, err := organization.IsUserInTeams(db.DefaultContext, userID, protectBranch.WhitelistTeamIDs) if err != nil { log.Error("IsUserInTeams: %v", err) return false @@ -103,7 +104,7 @@ func (protectBranch *ProtectedBranch) CanUserPush(userID int64) bool { } // IsUserMergeWhitelisted checks if some user is whitelisted to merge to this branch -func IsUserMergeWhitelisted(protectBranch *ProtectedBranch, userID int64, permissionInRepo Permission) bool { +func IsUserMergeWhitelisted(ctx context.Context, protectBranch *ProtectedBranch, userID int64, permissionInRepo Permission) bool { if !protectBranch.EnableMergeWhitelist { // Then we need to fall back on whether the user has write permission return permissionInRepo.CanWrite(unit.TypeCode) @@ -117,7 +118,7 @@ func IsUserMergeWhitelisted(protectBranch *ProtectedBranch, userID int64, permis return false } - in, err := IsUserInTeams(userID, protectBranch.MergeWhitelistTeamIDs) + in, err := organization.IsUserInTeams(ctx, userID, protectBranch.MergeWhitelistTeamIDs) if err != nil { log.Error("IsUserInTeams: %v", err) return false @@ -149,7 +150,7 @@ func isUserOfficialReviewer(ctx context.Context, protectBranch *ProtectedBranch, return true, nil } - inTeam, err := isUserInTeams(db.GetEngine(ctx), user.ID, protectBranch.ApprovalsWhitelistTeamIDs) + inTeam, err := organization.IsUserInTeams(ctx, user.ID, protectBranch.ApprovalsWhitelistTeamIDs) if err != nil { return false, err } @@ -158,16 +159,16 @@ func isUserOfficialReviewer(ctx context.Context, protectBranch *ProtectedBranch, } // HasEnoughApprovals returns true if pr has enough granted approvals. -func (protectBranch *ProtectedBranch) HasEnoughApprovals(pr *PullRequest) bool { +func (protectBranch *ProtectedBranch) HasEnoughApprovals(ctx context.Context, pr *PullRequest) bool { if protectBranch.RequiredApprovals == 0 { return true } - return protectBranch.GetGrantedApprovalsCount(pr) >= protectBranch.RequiredApprovals + return protectBranch.GetGrantedApprovalsCount(ctx, pr) >= protectBranch.RequiredApprovals } // GetGrantedApprovalsCount returns the number of granted approvals for pr. A granted approval must be authored by a user in an approval whitelist. -func (protectBranch *ProtectedBranch) GetGrantedApprovalsCount(pr *PullRequest) int64 { - sess := db.GetEngine(db.DefaultContext).Where("issue_id = ?", pr.IssueID). +func (protectBranch *ProtectedBranch) GetGrantedApprovalsCount(ctx context.Context, pr *PullRequest) int64 { + sess := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID). And("type = ?", ReviewTypeApprove). And("official = ?", true). And("dismissed = ?", false) @@ -184,11 +185,11 @@ func (protectBranch *ProtectedBranch) GetGrantedApprovalsCount(pr *PullRequest) } // MergeBlockedByRejectedReview returns true if merge is blocked by rejected reviews -func (protectBranch *ProtectedBranch) MergeBlockedByRejectedReview(pr *PullRequest) bool { +func (protectBranch *ProtectedBranch) MergeBlockedByRejectedReview(ctx context.Context, pr *PullRequest) bool { if !protectBranch.BlockOnRejectedReviews { return false } - rejectExist, err := db.GetEngine(db.DefaultContext).Where("issue_id = ?", pr.IssueID). + rejectExist, err := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID). And("type = ?", ReviewTypeReject). And("official = ?", true). And("dismissed = ?", false). @@ -203,11 +204,11 @@ func (protectBranch *ProtectedBranch) MergeBlockedByRejectedReview(pr *PullReque // MergeBlockedByOfficialReviewRequests block merge because of some review request to official reviewer // of from official review -func (protectBranch *ProtectedBranch) MergeBlockedByOfficialReviewRequests(pr *PullRequest) bool { +func (protectBranch *ProtectedBranch) MergeBlockedByOfficialReviewRequests(ctx context.Context, pr *PullRequest) bool { if !protectBranch.BlockOnOfficialReviewRequests { return false } - has, err := db.GetEngine(db.DefaultContext).Where("issue_id = ?", pr.IssueID). + has, err := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID). And("type = ?", ReviewTypeRequest). And("official = ?", true). Exist(new(Review)) @@ -336,43 +337,43 @@ type WhitelistOptions struct { // If ID is 0, it creates a new record. Otherwise, updates existing record. // This function also performs check if whitelist user and team's IDs have been changed // to avoid unnecessary whitelist delete and regenerate. -func UpdateProtectBranch(repo *repo_model.Repository, protectBranch *ProtectedBranch, opts WhitelistOptions) (err error) { - if err = repo.GetOwner(db.DefaultContext); err != nil { +func UpdateProtectBranch(ctx context.Context, repo *repo_model.Repository, protectBranch *ProtectedBranch, opts WhitelistOptions) (err error) { + if err = repo.GetOwner(ctx); err != nil { return fmt.Errorf("GetOwner: %v", err) } - whitelist, err := updateUserWhitelist(repo, protectBranch.WhitelistUserIDs, opts.UserIDs) + whitelist, err := updateUserWhitelist(ctx, repo, protectBranch.WhitelistUserIDs, opts.UserIDs) if err != nil { return err } protectBranch.WhitelistUserIDs = whitelist - whitelist, err = updateUserWhitelist(repo, protectBranch.MergeWhitelistUserIDs, opts.MergeUserIDs) + whitelist, err = updateUserWhitelist(ctx, repo, protectBranch.MergeWhitelistUserIDs, opts.MergeUserIDs) if err != nil { return err } protectBranch.MergeWhitelistUserIDs = whitelist - whitelist, err = updateApprovalWhitelist(repo, protectBranch.ApprovalsWhitelistUserIDs, opts.ApprovalsUserIDs) + whitelist, err = updateApprovalWhitelist(ctx, repo, protectBranch.ApprovalsWhitelistUserIDs, opts.ApprovalsUserIDs) if err != nil { return err } protectBranch.ApprovalsWhitelistUserIDs = whitelist // if the repo is in an organization - whitelist, err = updateTeamWhitelist(repo, protectBranch.WhitelistTeamIDs, opts.TeamIDs) + whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.WhitelistTeamIDs, opts.TeamIDs) if err != nil { return err } protectBranch.WhitelistTeamIDs = whitelist - whitelist, err = updateTeamWhitelist(repo, protectBranch.MergeWhitelistTeamIDs, opts.MergeTeamIDs) + whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.MergeWhitelistTeamIDs, opts.MergeTeamIDs) if err != nil { return err } protectBranch.MergeWhitelistTeamIDs = whitelist - whitelist, err = updateTeamWhitelist(repo, protectBranch.ApprovalsWhitelistTeamIDs, opts.ApprovalsTeamIDs) + whitelist, err = updateTeamWhitelist(ctx, repo, protectBranch.ApprovalsWhitelistTeamIDs, opts.ApprovalsTeamIDs) if err != nil { return err } @@ -380,13 +381,13 @@ func UpdateProtectBranch(repo *repo_model.Repository, protectBranch *ProtectedBr // Make sure protectBranch.ID is not 0 for whitelists if protectBranch.ID == 0 { - if _, err = db.GetEngine(db.DefaultContext).Insert(protectBranch); err != nil { + if _, err = db.GetEngine(ctx).Insert(protectBranch); err != nil { return fmt.Errorf("Insert: %v", err) } return nil } - if _, err = db.GetEngine(db.DefaultContext).ID(protectBranch.ID).AllCols().Update(protectBranch); err != nil { + if _, err = db.GetEngine(ctx).ID(protectBranch.ID).AllCols().Update(protectBranch); err != nil { return fmt.Errorf("Update: %v", err) } @@ -415,7 +416,7 @@ func IsProtectedBranch(repoID int64, branchName string) (bool, error) { // updateApprovalWhitelist checks whether the user whitelist changed and returns a whitelist with // the users from newWhitelist which have explicit read or write access to the repo. -func updateApprovalWhitelist(repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { +func updateApprovalWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { hasUsersChanged := !util.IsSliceInt64Eq(currentWhitelist, newWhitelist) if !hasUsersChanged { return currentWhitelist, nil @@ -423,7 +424,7 @@ func updateApprovalWhitelist(repo *repo_model.Repository, currentWhitelist, newW whitelist = make([]int64, 0, len(newWhitelist)) for _, userID := range newWhitelist { - if reader, err := IsRepoReader(repo, userID); err != nil { + if reader, err := IsRepoReader(ctx, repo, userID); err != nil { return nil, err } else if !reader { continue @@ -436,7 +437,7 @@ func updateApprovalWhitelist(repo *repo_model.Repository, currentWhitelist, newW // updateUserWhitelist checks whether the user whitelist changed and returns a whitelist with // the users from newWhitelist which have write access to the repo. -func updateUserWhitelist(repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { +func updateUserWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { hasUsersChanged := !util.IsSliceInt64Eq(currentWhitelist, newWhitelist) if !hasUsersChanged { return currentWhitelist, nil @@ -444,11 +445,11 @@ func updateUserWhitelist(repo *repo_model.Repository, currentWhitelist, newWhite whitelist = make([]int64, 0, len(newWhitelist)) for _, userID := range newWhitelist { - user, err := user_model.GetUserByID(userID) + user, err := user_model.GetUserByIDCtx(ctx, userID) if err != nil { return nil, fmt.Errorf("GetUserByID [user_id: %d, repo_id: %d]: %v", userID, repo.ID, err) } - perm, err := GetUserRepoPermission(repo, user) + perm, err := GetUserRepoPermission(ctx, repo, user) if err != nil { return nil, fmt.Errorf("GetUserRepoPermission [user_id: %d, repo_id: %d]: %v", userID, repo.ID, err) } @@ -465,13 +466,13 @@ func updateUserWhitelist(repo *repo_model.Repository, currentWhitelist, newWhite // updateTeamWhitelist checks whether the team whitelist changed and returns a whitelist with // the teams from newWhitelist which have write access to the repo. -func updateTeamWhitelist(repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { +func updateTeamWhitelist(ctx context.Context, repo *repo_model.Repository, currentWhitelist, newWhitelist []int64) (whitelist []int64, err error) { hasTeamsChanged := !util.IsSliceInt64Eq(currentWhitelist, newWhitelist) if !hasTeamsChanged { return currentWhitelist, nil } - teams, err := GetTeamsWithAccessToRepo(repo.OwnerID, repo.ID, perm.AccessModeRead) + teams, err := organization.GetTeamsWithAccessToRepo(ctx, repo.OwnerID, repo.ID, perm.AccessModeRead) if err != nil { return nil, fmt.Errorf("GetTeamsWithAccessToRepo [org_id: %d, repo_id: %d]: %v", repo.OwnerID, repo.ID, err) } diff --git a/models/branches_test.go b/models/branches_test.go index e1a71853f2..0a0f125cc6 100644 --- a/models/branches_test.go +++ b/models/branches_test.go @@ -7,6 +7,7 @@ package models import ( "testing" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" @@ -99,11 +100,14 @@ func TestRenameBranch(t *testing.T) { repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) _isDefault := false - err := UpdateProtectBranch(repo1, &ProtectedBranch{ + ctx, committer, err := db.TxContext() + defer committer.Close() + assert.NoError(t, err) + assert.NoError(t, UpdateProtectBranch(ctx, repo1, &ProtectedBranch{ RepoID: repo1.ID, BranchName: "master", - }, WhitelistOptions{}) - assert.NoError(t, err) + }, WhitelistOptions{})) + assert.NoError(t, committer.Commit()) assert.NoError(t, RenameBranch(repo1, "master", "main", func(isDefault bool) error { _isDefault = isDefault diff --git a/models/commit_status.go b/models/commit_status.go index cd7497eed8..cf2143d30f 100644 --- a/models/commit_status.go +++ b/models/commit_status.go @@ -232,12 +232,13 @@ type CommitStatusIndex struct { // GetLatestCommitStatus returns all statuses with a unique context for a given commit. func GetLatestCommitStatus(repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, int64, error) { - return getLatestCommitStatus(db.GetEngine(db.DefaultContext), repoID, sha, listOptions) + return GetLatestCommitStatusCtx(db.DefaultContext, repoID, sha, listOptions) } -func getLatestCommitStatus(e db.Engine, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, int64, error) { +// GetLatestCommitStatusCtx returns all statuses with a unique context for a given commit. +func GetLatestCommitStatusCtx(ctx context.Context, repoID int64, sha string, listOptions db.ListOptions) ([]*CommitStatus, int64, error) { ids := make([]int64, 0, 10) - sess := e.Table(&CommitStatus{}). + sess := db.GetEngine(ctx).Table(&CommitStatus{}). Where("repo_id = ?", repoID).And("sha = ?", sha). Select("max( id ) as id"). GroupBy("context_hash").OrderBy("max( id ) desc") @@ -252,7 +253,7 @@ func getLatestCommitStatus(e db.Engine, repoID int64, sha string, listOptions db if len(ids) == 0 { return statuses, count, nil } - return statuses, count, e.In("id", ids).Find(&statuses) + return statuses, count, db.GetEngine(ctx).In("id", ids).Find(&statuses) } // FindRepoRecentCommitStatusContexts returns repository's recent commit status contexts diff --git a/models/consistency_test.go b/models/consistency_test.go index d49a0132f0..1593500361 100644 --- a/models/consistency_test.go +++ b/models/consistency_test.go @@ -8,7 +8,10 @@ import ( "testing" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" ) @@ -33,3 +36,70 @@ func TestDeleteOrphanedObjects(t *testing.T) { assert.NoError(t, err) assert.EqualValues(t, countBefore, countAfter) } + +func TestNewMilestone(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + milestone := &issues_model.Milestone{ + RepoID: 1, + Name: "milestoneName", + Content: "milestoneContent", + } + + assert.NoError(t, issues_model.NewMilestone(milestone)) + unittest.AssertExistsAndLoadBean(t, milestone) + unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &issues_model.Milestone{}) +} + +func TestChangeMilestoneStatus(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}).(*issues_model.Milestone) + + assert.NoError(t, issues_model.ChangeMilestoneStatus(milestone, true)) + unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}, "is_closed=1") + unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &issues_model.Milestone{}) + + assert.NoError(t, issues_model.ChangeMilestoneStatus(milestone, false)) + unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}, "is_closed=0") + unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &issues_model.Milestone{}) +} + +func TestDeleteMilestoneByRepoID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + assert.NoError(t, issues_model.DeleteMilestoneByRepoID(1, 1)) + unittest.AssertNotExistsBean(t, &issues_model.Milestone{ID: 1}) + unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: 1}) + + assert.NoError(t, issues_model.DeleteMilestoneByRepoID(unittest.NonexistentID, unittest.NonexistentID)) +} + +func TestUpdateMilestone(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}).(*issues_model.Milestone) + milestone.Name = " newMilestoneName " + milestone.Content = "newMilestoneContent" + assert.NoError(t, issues_model.UpdateMilestone(milestone, milestone.IsClosed)) + milestone = unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}).(*issues_model.Milestone) + assert.EqualValues(t, "newMilestoneName", milestone.Name) + unittest.CheckConsistencyFor(t, &issues_model.Milestone{}) +} + +func TestUpdateMilestoneCounters(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + issue := unittest.AssertExistsAndLoadBean(t, &Issue{MilestoneID: 1}, + "is_closed=0").(*Issue) + + issue.IsClosed = true + issue.ClosedUnix = timeutil.TimeStampNow() + _, err := db.GetEngine(db.DefaultContext).ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue) + assert.NoError(t, err) + assert.NoError(t, issues_model.UpdateMilestoneCounters(db.DefaultContext, issue.MilestoneID)) + unittest.CheckConsistencyFor(t, &issues_model.Milestone{}) + + issue.IsClosed = false + issue.ClosedUnix = 0 + _, err = db.GetEngine(db.DefaultContext).ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue) + assert.NoError(t, err) + assert.NoError(t, issues_model.UpdateMilestoneCounters(db.DefaultContext, issue.MilestoneID)) + unittest.CheckConsistencyFor(t, &issues_model.Milestone{}) +} diff --git a/models/db/context.go b/models/db/context.go index 1cd23d453c..c823952cf6 100644 --- a/models/db/context.go +++ b/models/db/context.go @@ -103,7 +103,14 @@ func WithContext(f func(ctx *Context) error) error { } // WithTx represents executing database operations on a transaction -func WithTx(f func(ctx context.Context) error) error { +// you can optionally change the context to a parrent one +func WithTx(f func(ctx context.Context) error, stdCtx ...context.Context) error { + parentCtx := DefaultContext + if len(stdCtx) != 0 && stdCtx[0] != nil { + // TODO: make sure parent context has no open session + parentCtx = stdCtx[0] + } + sess := x.NewSession() defer sess.Close() if err := sess.Begin(); err != nil { @@ -111,7 +118,7 @@ func WithTx(f func(ctx context.Context) error) error { } if err := f(&Context{ - Context: DefaultContext, + Context: parentCtx, e: sess, }); err != nil { return err diff --git a/models/db/error.go b/models/db/error.go index f20cc9b4cb..6557229943 100644 --- a/models/db/error.go +++ b/models/db/error.go @@ -42,3 +42,18 @@ func IsErrSSHDisabled(err error) bool { func (err ErrSSHDisabled) Error() string { return "SSH is disabled" } + +// ErrNotExist represents a non-exist error. +type ErrNotExist struct { + ID int64 +} + +// IsErrNotExist checks if an error is an ErrNotExist +func IsErrNotExist(err error) bool { + _, ok := err.(ErrNotExist) + return ok +} + +func (err ErrNotExist) Error() string { + return fmt.Sprintf("record does not exist [id: %d]", err.ID) +} diff --git a/models/db/paginator/main_test.go b/models/db/paginator/main_test.go index 601ed89710..1127987e53 100644 --- a/models/db/paginator/main_test.go +++ b/models/db/paginator/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } diff --git a/models/error.go b/models/error.go index f0e8751d75..0dc14c3e31 100644 --- a/models/error.go +++ b/models/error.go @@ -13,21 +13,6 @@ import ( "code.gitea.io/gitea/modules/git" ) -// ErrNotExist represents a non-exist error. -type ErrNotExist struct { - ID int64 -} - -// IsErrNotExist checks if an error is an ErrNotExist -func IsErrNotExist(err error) bool { - _, ok := err.(ErrNotExist) - return ok -} - -func (err ErrNotExist) Error() string { - return fmt.Sprintf("record does not exist [id: %d]", err.ID) -} - // ErrUserOwnRepos represents a "UserOwnRepos" kind of error. type ErrUserOwnRepos struct { UID int64 @@ -58,17 +43,19 @@ func (err ErrUserHasOrgs) Error() string { return fmt.Sprintf("user still has membership of organizations [uid: %d]", err.UID) } -// ErrUserNotAllowedCreateOrg represents a "UserNotAllowedCreateOrg" kind of error. -type ErrUserNotAllowedCreateOrg struct{} +// ErrUserOwnPackages notifies that the user (still) owns the packages. +type ErrUserOwnPackages struct { + UID int64 +} -// IsErrUserNotAllowedCreateOrg checks if an error is an ErrUserNotAllowedCreateOrg. -func IsErrUserNotAllowedCreateOrg(err error) bool { - _, ok := err.(ErrUserNotAllowedCreateOrg) +// IsErrUserOwnPackages checks if an error is an ErrUserOwnPackages. +func IsErrUserOwnPackages(err error) bool { + _, ok := err.(ErrUserOwnPackages) return ok } -func (err ErrUserNotAllowedCreateOrg) Error() string { - return "user is not allowed to create organizations" +func (err ErrUserOwnPackages) Error() string { + return fmt.Sprintf("user still has ownership of packages [uid: %d]", err.UID) } // __ __.__ __ .__ @@ -158,44 +145,6 @@ func (err ErrAccessTokenEmpty) Error() string { return "access token is empty" } -// ________ .__ __ .__ -// \_____ \_______ _________ ____ |__|____________ _/ |_|__| ____ ____ -// / | \_ __ \/ ___\__ \ / \| \___ /\__ \\ __\ |/ _ \ / \ -// / | \ | \/ /_/ > __ \| | \ |/ / / __ \| | | ( <_> ) | \ -// \_______ /__| \___ (____ /___| /__/_____ \(____ /__| |__|\____/|___| / -// \/ /_____/ \/ \/ \/ \/ \/ - -// ErrOrgNotExist represents a "OrgNotExist" kind of error. -type ErrOrgNotExist struct { - ID int64 - Name string -} - -// IsErrOrgNotExist checks if an error is a ErrOrgNotExist. -func IsErrOrgNotExist(err error) bool { - _, ok := err.(ErrOrgNotExist) - return ok -} - -func (err ErrOrgNotExist) Error() string { - return fmt.Sprintf("org does not exist [id: %d, name: %s]", err.ID, err.Name) -} - -// ErrLastOrgOwner represents a "LastOrgOwner" kind of error. -type ErrLastOrgOwner struct { - UID int64 -} - -// IsErrLastOrgOwner checks if an error is a ErrLastOrgOwner. -func IsErrLastOrgOwner(err error) bool { - _, ok := err.(ErrLastOrgOwner) - return ok -} - -func (err ErrLastOrgOwner) Error() string { - return fmt.Sprintf("user is the last member of owner team [uid: %d]", err.UID) -} - //.____ ____________________ //| | \_ _____/ _____/ //| | | __) \_____ \ @@ -332,7 +281,6 @@ type ErrInvalidCloneAddr struct { IsProtocolInvalid bool IsPermissionDenied bool LocalPath bool - NotResolvedIP bool } // IsErrInvalidCloneAddr checks if an error is a ErrInvalidCloneAddr. @@ -342,9 +290,6 @@ func IsErrInvalidCloneAddr(err error) bool { } func (err *ErrInvalidCloneAddr) Error() string { - if err.NotResolvedIP { - return fmt.Sprintf("migration/cloning from '%s' is not allowed: unknown hostname", err.Host) - } if err.IsInvalidPath { return fmt.Sprintf("migration/cloning from '%s' is not allowed: the provided path is invalid", err.Host) } @@ -622,18 +567,18 @@ func (err ErrBranchesEqual) Error() string { return fmt.Sprintf("branches are equal [head: %sm base: %s]", err.HeadBranchName, err.BaseBranchName) } -// ErrNotAllowedToMerge represents an error that a branch is protected and the current user is not allowed to modify it. -type ErrNotAllowedToMerge struct { +// ErrDisallowedToMerge represents an error that a branch is protected and the current user is not allowed to modify it. +type ErrDisallowedToMerge struct { Reason string } -// IsErrNotAllowedToMerge checks if an error is an ErrNotAllowedToMerge. -func IsErrNotAllowedToMerge(err error) bool { - _, ok := err.(ErrNotAllowedToMerge) +// IsErrDisallowedToMerge checks if an error is an ErrDisallowedToMerge. +func IsErrDisallowedToMerge(err error) bool { + _, ok := err.(ErrDisallowedToMerge) return ok } -func (err ErrNotAllowedToMerge) Error() string { +func (err ErrDisallowedToMerge) Error() string { return fmt.Sprintf("not allowed to merge [reason: %s]", err.Reason) } @@ -754,22 +699,6 @@ func (err ErrIssueIsClosed) Error() string { return fmt.Sprintf("issue is closed [id: %d, repo_id: %d, index: %d]", err.ID, err.RepoID, err.Index) } -// ErrIssueLabelTemplateLoad represents a "ErrIssueLabelTemplateLoad" kind of error. -type ErrIssueLabelTemplateLoad struct { - TemplateFile string - OriginalError error -} - -// IsErrIssueLabelTemplateLoad checks if an error is a ErrIssueLabelTemplateLoad. -func IsErrIssueLabelTemplateLoad(err error) bool { - _, ok := err.(ErrIssueLabelTemplateLoad) - return ok -} - -func (err ErrIssueLabelTemplateLoad) Error() string { - return fmt.Sprintf("Failed to load label template file '%s': %v", err.TemplateFile, err.OriginalError) -} - // ErrNewIssueInsert is used when the INSERT statement in newIssue fails type ErrNewIssueInsert struct { OriginalError error @@ -817,36 +746,6 @@ func (err ErrPullWasClosed) Error() string { return fmt.Sprintf("Pull request [%d] %d was already closed", err.ID, err.Index) } -// ErrForbiddenIssueReaction is used when a forbidden reaction was try to created -type ErrForbiddenIssueReaction struct { - Reaction string -} - -// IsErrForbiddenIssueReaction checks if an error is a ErrForbiddenIssueReaction. -func IsErrForbiddenIssueReaction(err error) bool { - _, ok := err.(ErrForbiddenIssueReaction) - return ok -} - -func (err ErrForbiddenIssueReaction) Error() string { - return fmt.Sprintf("'%s' is not an allowed reaction", err.Reaction) -} - -// ErrReactionAlreadyExist is used when a existing reaction was try to created -type ErrReactionAlreadyExist struct { - Reaction string -} - -// IsErrReactionAlreadyExist checks if an error is a ErrReactionAlreadyExist. -func IsErrReactionAlreadyExist(err error) bool { - _, ok := err.(ErrReactionAlreadyExist) - return ok -} - -func (err ErrReactionAlreadyExist) Error() string { - return fmt.Sprintf("reaction '%s' already exists", err.Reaction) -} - // __________ .__ .__ __________ __ // \______ \__ __| | | |\______ \ ____ ________ __ ____ _______/ |_ // | ___/ | \ | | | | _// __ \/ ____/ | \_/ __ \ / ___/\ __\ @@ -1130,111 +1029,6 @@ func (err ErrLabelNotExist) Error() string { return fmt.Sprintf("label does not exist [label_id: %d]", err.LabelID) } -// __________ __ __ -// \______ \_______ ____ |__| ____ _____/ |_ ______ -// | ___/\_ __ \/ _ \ | |/ __ \_/ ___\ __\/ ___/ -// | | | | \( <_> ) | \ ___/\ \___| | \___ \ -// |____| |__| \____/\__| |\___ >\___ >__| /____ > -// \______| \/ \/ \/ - -// ErrProjectNotExist represents a "ProjectNotExist" kind of error. -type ErrProjectNotExist struct { - ID int64 - RepoID int64 -} - -// IsErrProjectNotExist checks if an error is a ErrProjectNotExist -func IsErrProjectNotExist(err error) bool { - _, ok := err.(ErrProjectNotExist) - return ok -} - -func (err ErrProjectNotExist) Error() string { - return fmt.Sprintf("projects does not exist [id: %d]", err.ID) -} - -// ErrProjectBoardNotExist represents a "ProjectBoardNotExist" kind of error. -type ErrProjectBoardNotExist struct { - BoardID int64 -} - -// IsErrProjectBoardNotExist checks if an error is a ErrProjectBoardNotExist -func IsErrProjectBoardNotExist(err error) bool { - _, ok := err.(ErrProjectBoardNotExist) - return ok -} - -func (err ErrProjectBoardNotExist) Error() string { - return fmt.Sprintf("project board does not exist [id: %d]", err.BoardID) -} - -// _____ .__.__ __ -// / \ |__| | ____ _______/ |_ ____ ____ ____ -// / \ / \| | | _/ __ \ / ___/\ __\/ _ \ / \_/ __ \ -// / Y \ | |_\ ___/ \___ \ | | ( <_> ) | \ ___/ -// \____|__ /__|____/\___ >____ > |__| \____/|___| /\___ > -// \/ \/ \/ \/ \/ - -// ErrMilestoneNotExist represents a "MilestoneNotExist" kind of error. -type ErrMilestoneNotExist struct { - ID int64 - RepoID int64 - Name string -} - -// IsErrMilestoneNotExist checks if an error is a ErrMilestoneNotExist. -func IsErrMilestoneNotExist(err error) bool { - _, ok := err.(ErrMilestoneNotExist) - return ok -} - -func (err ErrMilestoneNotExist) Error() string { - if len(err.Name) > 0 { - return fmt.Sprintf("milestone does not exist [name: %s, repo_id: %d]", err.Name, err.RepoID) - } - return fmt.Sprintf("milestone does not exist [id: %d, repo_id: %d]", err.ID, err.RepoID) -} - -// ___________ -// \__ ___/___ _____ _____ -// | |_/ __ \\__ \ / \ -// | |\ ___/ / __ \| Y Y \ -// |____| \___ >____ /__|_| / -// \/ \/ \/ - -// ErrTeamAlreadyExist represents a "TeamAlreadyExist" kind of error. -type ErrTeamAlreadyExist struct { - OrgID int64 - Name string -} - -// IsErrTeamAlreadyExist checks if an error is a ErrTeamAlreadyExist. -func IsErrTeamAlreadyExist(err error) bool { - _, ok := err.(ErrTeamAlreadyExist) - return ok -} - -func (err ErrTeamAlreadyExist) Error() string { - return fmt.Sprintf("team already exists [org_id: %d, name: %s]", err.OrgID, err.Name) -} - -// ErrTeamNotExist represents a "TeamNotExist" error -type ErrTeamNotExist struct { - OrgID int64 - TeamID int64 - Name string -} - -// IsErrTeamNotExist checks if an error is a ErrTeamNotExist. -func IsErrTeamNotExist(err error) bool { - _, ok := err.(ErrTeamNotExist) - return ok -} - -func (err ErrTeamNotExist) Error() string { - return fmt.Sprintf("team does not exist [org_id %d, team_id %d, name: %s]", err.OrgID, err.TeamID, err.Name) -} - // ____ ___ .__ .___ // | | \______ | | _________ __| _/ // | | /\____ \| | / _ \__ \ / __ | diff --git a/models/fixtures/action.yml b/models/fixtures/action.yml index e3f3d2a971..a75092feb0 100644 --- a/models/fixtures/action.yml +++ b/models/fixtures/action.yml @@ -3,7 +3,7 @@ user_id: 2 op_type: 12 # close issue act_user_id: 2 - repo_id: 2 + repo_id: 2 # private is_private: true created_unix: 1603228283 @@ -12,7 +12,7 @@ user_id: 3 op_type: 2 # rename repo act_user_id: 2 - repo_id: 3 + repo_id: 3 # private is_private: true content: oldRepoName @@ -21,7 +21,7 @@ user_id: 11 op_type: 1 # create repo act_user_id: 11 - repo_id: 9 + repo_id: 9 # public is_private: false - @@ -29,7 +29,7 @@ user_id: 16 op_type: 12 # close issue act_user_id: 16 - repo_id: 22 + repo_id: 22 # private is_private: true created_unix: 1603267920 @@ -37,7 +37,7 @@ user_id: 10 op_type: 1 # create repo act_user_id: 10 - repo_id: 6 + repo_id: 6 # private is_private: true created_unix: 1603010100 @@ -45,7 +45,7 @@ user_id: 10 op_type: 1 # create repo act_user_id: 10 - repo_id: 7 + repo_id: 7 # private is_private: true created_unix: 1603011300 @@ -53,6 +53,14 @@ user_id: 10 op_type: 1 # create repo act_user_id: 10 - repo_id: 8 + repo_id: 8 # public is_private: false created_unix: 1603011540 # grouped with id:7 + +- id: 8 + user_id: 1 + op_type: 12 # close issue + act_user_id: 1 + repo_id: 1700 # dangling intentional + is_private: false + created_unix: 1603011541 diff --git a/models/fixtures/foreign_reference.yml b/models/fixtures/foreign_reference.yml new file mode 100644 index 0000000000..ca780a73aa --- /dev/null +++ b/models/fixtures/foreign_reference.yml @@ -0,0 +1 @@ +[] # empty diff --git a/models/fixtures/team.yml b/models/fixtures/team.yml index 9a8b0aff76..f6dfd1e9d0 100644 --- a/models/fixtures/team.yml +++ b/models/fixtures/team.yml @@ -6,6 +6,7 @@ authorize: 4 # owner num_repos: 3 num_members: 1 + can_create_org_repo: true - id: 2 @@ -15,6 +16,7 @@ authorize: 2 # write num_repos: 1 num_members: 2 + can_create_org_repo: false - id: 3 @@ -24,6 +26,7 @@ authorize: 4 # owner num_repos: 0 num_members: 1 + can_create_org_repo: true - id: 4 @@ -33,6 +36,7 @@ authorize: 4 # owner num_repos: 0 num_members: 1 + can_create_org_repo: true - id: 5 @@ -42,6 +46,7 @@ authorize: 4 # owner num_repos: 2 num_members: 2 + can_create_org_repo: true - id: 6 @@ -51,6 +56,7 @@ authorize: 4 # owner num_repos: 2 num_members: 1 + can_create_org_repo: true - id: 7 @@ -60,6 +66,7 @@ authorize: 2 # write num_repos: 1 num_members: 1 + can_create_org_repo: false - id: 8 @@ -69,6 +76,7 @@ authorize: 2 # write num_repos: 1 num_members: 1 + can_create_org_repo: false - id: 9 @@ -78,6 +86,7 @@ authorize: 1 # read num_repos: 1 num_members: 2 + can_create_org_repo: false - id: 10 @@ -87,6 +96,7 @@ authorize: 1 # owner num_repos: 0 num_members: 1 + can_create_org_repo: false - id: 11 @@ -96,6 +106,7 @@ authorize: 1 # read num_repos: 0 num_members: 0 + can_create_org_repo: false - id: 12 diff --git a/models/fixtures/user.yml b/models/fixtures/user.yml index 670b305621..67ba869c76 100644 --- a/models/fixtures/user.yml +++ b/models/fixtures/user.yml @@ -4,6 +4,7 @@ id: 1 lower_name: user1 name: user1 + login_name: user1 full_name: User One email: user1@example.com email_notifications_preference: enabled @@ -21,6 +22,7 @@ id: 2 lower_name: user2 name: user2 + login_name: user2 full_name: " < Ur Tw >< " email: user2@example.com keep_email_private: true @@ -42,6 +44,7 @@ id: 3 lower_name: user3 name: user3 + login_name: user3 full_name: " <<<< >> >> > >> > >>> >> " email: user3@example.com email_notifications_preference: onmention @@ -60,6 +63,7 @@ id: 4 lower_name: user4 name: user4 + login_name: user4 full_name: " " email: user4@example.com email_notifications_preference: onmention @@ -78,6 +82,7 @@ id: 5 lower_name: user5 name: user5 + login_name: user5 full_name: User Five email: user5@example.com email_notifications_preference: enabled @@ -97,6 +102,7 @@ id: 6 lower_name: user6 name: user6 + login_name: user6 full_name: User Six email: user6@example.com email_notifications_preference: enabled @@ -115,6 +121,7 @@ id: 7 lower_name: user7 name: user7 + login_name: user7 full_name: User Seven email: user7@example.com email_notifications_preference: disabled @@ -133,6 +140,7 @@ id: 8 lower_name: user8 name: user8 + login_name: user8 full_name: User Eight email: user8@example.com email_notifications_preference: enabled @@ -152,6 +160,7 @@ id: 9 lower_name: user9 name: user9 + login_name: user9 full_name: User Nine email: user9@example.com email_notifications_preference: onmention @@ -169,6 +178,7 @@ id: 10 lower_name: user10 name: user10 + login_name: user10 full_name: User Ten email: user10@example.com passwd_hash_algo: argon2 @@ -185,6 +195,7 @@ id: 11 lower_name: user11 name: user11 + login_name: user11 full_name: User Eleven email: user11@example.com passwd_hash_algo: argon2 @@ -201,6 +212,7 @@ id: 12 lower_name: user12 name: user12 + login_name: user12 full_name: User 12 email: user12@example.com passwd_hash_algo: argon2 @@ -217,6 +229,7 @@ id: 13 lower_name: user13 name: user13 + login_name: user13 full_name: User 13 email: user13@example.com passwd_hash_algo: argon2 @@ -233,6 +246,7 @@ id: 14 lower_name: user14 name: user14 + login_name: user14 full_name: User 14 email: user14@example.com passwd_hash_algo: argon2 @@ -249,6 +263,7 @@ id: 15 lower_name: user15 name: user15 + login_name: user15 full_name: User 15 email: user15@example.com passwd_hash_algo: argon2 @@ -265,6 +280,7 @@ id: 16 lower_name: user16 name: user16 + login_name: user16 full_name: User 16 email: user16@example.com passwd_hash_algo: argon2 @@ -281,6 +297,7 @@ id: 17 lower_name: user17 name: user17 + login_name: user17 full_name: User 17 email: user17@example.com passwd_hash_algo: argon2 @@ -299,6 +316,7 @@ id: 18 lower_name: user18 name: user18 + login_name: user18 full_name: User 18 email: user18@example.com passwd_hash_algo: argon2 @@ -315,6 +333,7 @@ id: 19 lower_name: user19 name: user19 + login_name: user19 full_name: User 19 email: user19@example.com passwd_hash_algo: argon2 @@ -333,6 +352,7 @@ id: 20 lower_name: user20 name: user20 + login_name: user20 full_name: User 20 email: user20@example.com passwd_hash_algo: argon2 @@ -349,6 +369,7 @@ id: 21 lower_name: user21 name: user21 + login_name: user21 full_name: User 21 email: user21@example.com passwd_hash_algo: argon2 @@ -365,6 +386,7 @@ id: 22 lower_name: limited_org name: limited_org + login_name: limited_org full_name: Limited Org email: limited_org@example.com passwd_hash_algo: argon2 @@ -384,6 +406,7 @@ id: 23 lower_name: privated_org name: privated_org + login_name: privated_org full_name: Privated Org email: privated_org@example.com passwd_hash_algo: argon2 @@ -403,6 +426,7 @@ id: 24 lower_name: user24 name: user24 + login_name: user24 full_name: "user24" email: user24@example.com keep_email_private: true @@ -423,6 +447,7 @@ id: 25 lower_name: org25 name: org25 + login_name: org25 full_name: "org25" email: org25@example.com passwd_hash_algo: argon2 @@ -440,6 +465,7 @@ id: 26 lower_name: org26 name: org26 + login_name: org26 full_name: "Org26" email: org26@example.com email_notifications_preference: onmention @@ -459,6 +485,7 @@ id: 27 lower_name: user27 name: user27 + login_name: user27 full_name: User Twenty-Seven email: user27@example.com email_notifications_preference: enabled @@ -475,6 +502,7 @@ id: 28 lower_name: user28 name: user28 + login_name: user28 full_name: "user27" email: user28@example.com keep_email_private: true @@ -495,6 +523,7 @@ id: 29 lower_name: user29 name: user29 + login_name: user29 full_name: User 29 email: user29@example.com passwd_hash_algo: argon2 @@ -512,6 +541,7 @@ id: 30 lower_name: user30 name: user30 + login_name: user30 full_name: User Thirty email: user30@example.com passwd_hash_algo: argon2 @@ -530,6 +560,7 @@ id: 31 lower_name: user31 name: user31 + login_name: user31 full_name: "user31" email: user31@example.com passwd_hash_algo: argon2 @@ -547,6 +578,7 @@ id: 32 lower_name: user32 name: user32 + login_name: user32 full_name: User 32 (U2F test) email: user32@example.com passwd: 7d93daa0d1e6f2305cc8fa496847d61dc7320bb16262f9c55dd753480207234cdd96a93194e408341971742f4701772a025a # password diff --git a/models/foreignreference/error.go b/models/foreignreference/error.go new file mode 100644 index 0000000000..d783a08730 --- /dev/null +++ b/models/foreignreference/error.go @@ -0,0 +1,43 @@ +// Copyright 2022 Gitea. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package foreignreference + +import ( + "fmt" +) + +// ErrLocalIndexNotExist represents a "LocalIndexNotExist" kind of error. +type ErrLocalIndexNotExist struct { + RepoID int64 + ForeignIndex int64 + Type string +} + +// ErrLocalIndexNotExist checks if an error is a ErrLocalIndexNotExist. +func IsErrLocalIndexNotExist(err error) bool { + _, ok := err.(ErrLocalIndexNotExist) + return ok +} + +func (err ErrLocalIndexNotExist) Error() string { + return fmt.Sprintf("repository %d has no LocalIndex for ForeignIndex %d of type %s", err.RepoID, err.ForeignIndex, err.Type) +} + +// ErrForeignIndexNotExist represents a "ForeignIndexNotExist" kind of error. +type ErrForeignIndexNotExist struct { + RepoID int64 + LocalIndex int64 + Type string +} + +// ErrForeignIndexNotExist checks if an error is a ErrForeignIndexNotExist. +func IsErrForeignIndexNotExist(err error) bool { + _, ok := err.(ErrForeignIndexNotExist) + return ok +} + +func (err ErrForeignIndexNotExist) Error() string { + return fmt.Sprintf("repository %d has no ForeignIndex for LocalIndex %d of type %s", err.RepoID, err.LocalIndex, err.Type) +} diff --git a/models/foreignreference/foreignreference.go b/models/foreignreference/foreignreference.go new file mode 100644 index 0000000000..cc657a0fbe --- /dev/null +++ b/models/foreignreference/foreignreference.go @@ -0,0 +1,32 @@ +// Copyright 2022 Gitea. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package foreignreference + +import ( + "code.gitea.io/gitea/models/db" +) + +// Type* are valid values for the Type field of ForeignReference +const ( + TypeIssue = "issue" + TypePullRequest = "pull_request" + TypeComment = "comment" + TypeReview = "review" + TypeReviewComment = "review_comment" + TypeRelease = "release" +) + +// ForeignReference represents external references +type ForeignReference struct { + // RepoID is the first column in all indices. now we only need 2 indices: (repo, local) and (repo, foreign, type) + RepoID int64 `xorm:"UNIQUE(repo_foreign_type) INDEX(repo_local)" ` + LocalIndex int64 `xorm:"INDEX(repo_local)"` // the resource key inside Gitea, it can be IssueIndex, or some model ID. + ForeignIndex string `xorm:"INDEX UNIQUE(repo_foreign_type)"` + Type string `xorm:"VARCHAR(16) INDEX UNIQUE(repo_foreign_type)"` +} + +func init() { + db.RegisterModel(new(ForeignReference)) +} diff --git a/models/helper.go b/models/helper.go deleted file mode 100644 index 15df424539..0000000000 --- a/models/helper.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2017 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package models - -import ( - repo_model "code.gitea.io/gitea/models/repo" - user_model "code.gitea.io/gitea/models/user" -) - -func keysInt64(m map[int64]struct{}) []int64 { - keys := make([]int64, 0, len(m)) - for k := range m { - keys = append(keys, k) - } - return keys -} - -func valuesRepository(m map[int64]*repo_model.Repository) []*repo_model.Repository { - values := make([]*repo_model.Repository, 0, len(m)) - for _, v := range m { - values = append(values, v) - } - return values -} - -func valuesUser(m map[int64]*user_model.User) []*user_model.User { - values := make([]*user_model.User, 0, len(m)) - for _, v := range m { - values = append(values, v) - } - return values -} diff --git a/models/issue.go b/models/issue.go index fd59ac0a4b..98e64adafd 100644 --- a/models/issue.go +++ b/models/issue.go @@ -15,8 +15,11 @@ import ( admin_model "code.gitea.io/gitea/models/admin" "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/foreignreference" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" + project_model "code.gitea.io/gitea/models/project" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -43,14 +46,14 @@ type Issue struct { PosterID int64 `xorm:"INDEX"` Poster *user_model.User `xorm:"-"` OriginalAuthor string - OriginalAuthorID int64 `xorm:"index"` - Title string `xorm:"name"` - Content string `xorm:"LONGTEXT"` - RenderedContent string `xorm:"-"` - Labels []*Label `xorm:"-"` - MilestoneID int64 `xorm:"INDEX"` - Milestone *Milestone `xorm:"-"` - Project *Project `xorm:"-"` + OriginalAuthorID int64 `xorm:"index"` + Title string `xorm:"name"` + Content string `xorm:"LONGTEXT"` + RenderedContent string `xorm:"-"` + Labels []*Label `xorm:"-"` + MilestoneID int64 `xorm:"INDEX"` + Milestone *issues_model.Milestone `xorm:"-"` + Project *project_model.Project `xorm:"-"` Priority int AssigneeID int64 `xorm:"-"` Assignee *user_model.User `xorm:"-"` @@ -67,11 +70,12 @@ type Issue struct { UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` ClosedUnix timeutil.TimeStamp `xorm:"INDEX"` - Attachments []*repo_model.Attachment `xorm:"-"` - Comments []*Comment `xorm:"-"` - Reactions ReactionList `xorm:"-"` - TotalTrackedTime int64 `xorm:"-"` - Assignees []*user_model.User `xorm:"-"` + Attachments []*repo_model.Attachment `xorm:"-"` + Comments []*Comment `xorm:"-"` + Reactions issues_model.ReactionList `xorm:"-"` + TotalTrackedTime int64 `xorm:"-"` + Assignees []*user_model.User `xorm:"-"` + ForeignReference *foreignreference.ForeignReference `xorm:"-"` // IsLocked limits commenting abilities to users on an issue // with write access @@ -120,11 +124,7 @@ func (issue *Issue) IsOverdue() bool { } // LoadRepo loads issue's repository -func (issue *Issue) LoadRepo() error { - return issue.loadRepo(db.DefaultContext) -} - -func (issue *Issue) loadRepo(ctx context.Context) (err error) { +func (issue *Issue) LoadRepo(ctx context.Context) (err error) { if issue.Repo == nil { issue.Repo, err = repo_model.GetRepositoryByIDCtx(ctx, issue.RepoID) if err != nil { @@ -140,7 +140,7 @@ func (issue *Issue) IsTimetrackerEnabled() bool { } func (issue *Issue) isTimetrackerEnabled(ctx context.Context) bool { - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { log.Error(fmt.Sprintf("loadRepo: %v", err)) return false } @@ -162,13 +162,9 @@ func (issue *Issue) GetPullRequest() (pr *PullRequest, err error) { } // LoadLabels loads labels -func (issue *Issue) LoadLabels() error { - return issue.loadLabels(db.GetEngine(db.DefaultContext)) -} - -func (issue *Issue) loadLabels(e db.Engine) (err error) { +func (issue *Issue) LoadLabels(ctx context.Context) (err error) { if issue.Labels == nil { - issue.Labels, err = getLabelsByIssueID(e, issue.ID) + issue.Labels, err = getLabelsByIssueID(db.GetEngine(ctx), issue.ID) if err != nil { return fmt.Errorf("getLabelsByIssueID [%d]: %v", issue.ID, err) } @@ -240,18 +236,17 @@ func (issue *Issue) loadReactions(ctx context.Context) (err error) { if issue.Reactions != nil { return nil } - e := db.GetEngine(ctx) - reactions, _, err := findReactions(e, FindReactionsOptions{ + reactions, _, err := issues_model.FindReactions(ctx, issues_model.FindReactionsOptions{ IssueID: issue.ID, }) if err != nil { return err } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return err } // Load reaction user data - if _, err := ReactionList(reactions).loadUsers(e, issue.Repo); err != nil { + if _, err := issues_model.ReactionList(reactions).LoadUsers(ctx, issue.Repo); err != nil { return err } @@ -271,10 +266,33 @@ func (issue *Issue) loadReactions(ctx context.Context) (err error) { return nil } -func (issue *Issue) loadMilestone(e db.Engine) (err error) { +func (issue *Issue) loadForeignReference(ctx context.Context) (err error) { + if issue.ForeignReference != nil { + return nil + } + reference := &foreignreference.ForeignReference{ + RepoID: issue.RepoID, + LocalIndex: issue.Index, + Type: foreignreference.TypeIssue, + } + has, err := db.GetEngine(ctx).Get(reference) + if err != nil { + return err + } else if !has { + return foreignreference.ErrForeignIndexNotExist{ + RepoID: issue.RepoID, + LocalIndex: issue.Index, + Type: foreignreference.TypeIssue, + } + } + issue.ForeignReference = reference + return nil +} + +func (issue *Issue) loadMilestone(ctx context.Context) (err error) { if (issue.Milestone == nil || issue.Milestone.ID != issue.MilestoneID) && issue.MilestoneID > 0 { - issue.Milestone, err = getMilestoneByRepoID(e, issue.RepoID, issue.MilestoneID) - if err != nil && !IsErrMilestoneNotExist(err) { + issue.Milestone, err = issues_model.GetMilestoneByRepoID(ctx, issue.RepoID, issue.MilestoneID) + if err != nil && !issues_model.IsErrMilestoneNotExist(err) { return fmt.Errorf("getMilestoneByRepoID [repo_id: %d, milestone_id: %d]: %v", issue.RepoID, issue.MilestoneID, err) } } @@ -283,7 +301,7 @@ func (issue *Issue) loadMilestone(e db.Engine) (err error) { func (issue *Issue) loadAttributes(ctx context.Context) (err error) { e := db.GetEngine(ctx) - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return } @@ -291,11 +309,11 @@ func (issue *Issue) loadAttributes(ctx context.Context) (err error) { return } - if err = issue.loadLabels(e); err != nil { + if err = issue.LoadLabels(ctx); err != nil { return } - if err = issue.loadMilestone(e); err != nil { + if err = issue.loadMilestone(ctx); err != nil { return } @@ -332,6 +350,10 @@ func (issue *Issue) loadAttributes(ctx context.Context) (err error) { } } + if err = issue.loadForeignReference(ctx); err != nil && !foreignreference.IsErrForeignIndexNotExist(err) { + return err + } + return issue.loadReactions(ctx) } @@ -342,7 +364,7 @@ func (issue *Issue) LoadAttributes() error { // LoadMilestone load milestone of this issue. func (issue *Issue) LoadMilestone() error { - return issue.loadMilestone(db.GetEngine(db.DefaultContext)) + return issue.loadMilestone(db.DefaultContext) } // GetIsRead load the `IsRead` field of the issue @@ -361,7 +383,7 @@ func (issue *Issue) GetIsRead(userID int64) error { // APIURL returns the absolute APIURL to this issue. func (issue *Issue) APIURL() string { if issue.Repo == nil { - err := issue.LoadRepo() + err := issue.LoadRepo(db.DefaultContext) if err != nil { log.Error("Issue[%d].APIURL(): %v", issue.ID, err) return "" @@ -426,23 +448,6 @@ func (issue *Issue) IsPoster(uid int64) bool { return issue.OriginalAuthorID == 0 && issue.PosterID == uid } -func (issue *Issue) hasLabel(e db.Engine, labelID int64) bool { - return hasIssueLabel(e, issue.ID, labelID) -} - -// HasLabel returns true if issue has been labeled by given ID. -func (issue *Issue) HasLabel(labelID int64) bool { - return issue.hasLabel(db.GetEngine(db.DefaultContext), labelID) -} - -func (issue *Issue) addLabel(ctx context.Context, label *Label, doer *user_model.User) error { - return newIssueLabel(ctx, issue, label, doer) -} - -func (issue *Issue) addLabels(ctx context.Context, labels []*Label, doer *user_model.User) error { - return newIssueLabels(ctx, issue, labels, doer) -} - func (issue *Issue) getLabels(e db.Engine) (err error) { if len(issue.Labels) > 0 { return nil @@ -455,17 +460,13 @@ func (issue *Issue) getLabels(e db.Engine) (err error) { return nil } -func (issue *Issue) removeLabel(ctx context.Context, doer *user_model.User, label *Label) error { - return deleteIssueLabel(ctx, issue, label, doer) -} - -func (issue *Issue) clearLabels(ctx context.Context, doer *user_model.User) (err error) { +func clearIssueLabels(ctx context.Context, issue *Issue, doer *user_model.User) (err error) { if err = issue.getLabels(db.GetEngine(ctx)); err != nil { return fmt.Errorf("getLabels: %v", err) } for i := range issue.Labels { - if err = issue.removeLabel(ctx, doer, issue.Labels[i]); err != nil { + if err = deleteIssueLabel(ctx, issue, issue.Labels[i], doer); err != nil { return fmt.Errorf("removeLabel: %v", err) } } @@ -473,22 +474,22 @@ func (issue *Issue) clearLabels(ctx context.Context, doer *user_model.User) (err return nil } -// ClearLabels removes all issue labels as the given user. +// ClearIssueLabels removes all issue labels as the given user. // Triggers appropriate WebHooks, if any. -func (issue *Issue) ClearLabels(doer *user_model.User) (err error) { +func ClearIssueLabels(issue *Issue, doer *user_model.User) (err error) { ctx, committer, err := db.TxContext() if err != nil { return err } defer committer.Close() - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return err } else if err = issue.loadPullRequest(db.GetEngine(ctx)); err != nil { return err } - perm, err := getUserRepoPermission(ctx, issue.Repo, doer) + perm, err := GetUserRepoPermission(ctx, issue.Repo, doer) if err != nil { return err } @@ -496,7 +497,7 @@ func (issue *Issue) ClearLabels(doer *user_model.User) (err error) { return ErrRepoLabelNotExist{} } - if err = issue.clearLabels(ctx, doer); err != nil { + if err = clearIssueLabels(ctx, issue, doer); err != nil { return err } @@ -521,20 +522,20 @@ func (ts labelSorter) Swap(i, j int) { []*Label(ts)[i], []*Label(ts)[j] = []*Label(ts)[j], []*Label(ts)[i] } -// ReplaceLabels removes all current labels and add new labels to the issue. +// ReplaceIssueLabels removes all current labels and add new labels to the issue. // Triggers appropriate WebHooks, if any. -func (issue *Issue) ReplaceLabels(labels []*Label, doer *user_model.User) (err error) { +func ReplaceIssueLabels(issue *Issue, labels []*Label, doer *user_model.User) (err error) { ctx, committer, err := db.TxContext() if err != nil { return err } defer committer.Close() - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return err } - if err = issue.loadLabels(db.GetEngine(ctx)); err != nil { + if err = issue.LoadLabels(ctx); err != nil { return err } @@ -570,19 +571,19 @@ func (issue *Issue) ReplaceLabels(labels []*Label, doer *user_model.User) (err e toRemove = append(toRemove, issue.Labels[removeIndex:]...) if len(toAdd) > 0 { - if err = issue.addLabels(ctx, toAdd, doer); err != nil { + if err = newIssueLabels(ctx, issue, toAdd, doer); err != nil { return fmt.Errorf("addLabels: %v", err) } } for _, l := range toRemove { - if err = issue.removeLabel(ctx, doer, l); err != nil { + if err = deleteIssueLabel(ctx, issue, l, doer); err != nil { return fmt.Errorf("removeLabel: %v", err) } } issue.Labels = nil - if err = issue.loadLabels(db.GetEngine(ctx)); err != nil { + if err = issue.LoadLabels(ctx); err != nil { return err } @@ -590,7 +591,7 @@ func (issue *Issue) ReplaceLabels(labels []*Label, doer *user_model.User) (err e } // ReadBy sets issue to be read by given user. -func (issue *Issue) ReadBy(userID int64) error { +func (issue *Issue) ReadBy(ctx context.Context, userID int64) error { if err := UpdateIssueUserByRead(userID, issue.ID); err != nil { return err } @@ -598,14 +599,15 @@ func (issue *Issue) ReadBy(userID int64) error { return setIssueNotificationStatusReadIfUnread(db.GetEngine(db.DefaultContext), userID, issue.ID) } -func updateIssueCols(ctx context.Context, issue *Issue, cols ...string) error { +// UpdateIssueCols updates cols of issue +func UpdateIssueCols(ctx context.Context, issue *Issue, cols ...string) error { if _, err := db.GetEngine(ctx).ID(issue.ID).Cols(cols...).Update(issue); err != nil { return err } return nil } -func (issue *Issue) changeStatus(ctx context.Context, doer *user_model.User, isClosed, isMergePull bool) (*Comment, error) { +func changeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isClosed, isMergePull bool) (*Comment, error) { // Reload the issue currentIssue, err := getIssueByID(db.GetEngine(ctx), issue.ID) if err != nil { @@ -625,15 +627,15 @@ func (issue *Issue) changeStatus(ctx context.Context, doer *user_model.User, isC } issue.IsClosed = isClosed - return issue.doChangeStatus(ctx, doer, isMergePull) + return doChangeIssueStatus(ctx, issue, doer, isMergePull) } -func (issue *Issue) doChangeStatus(ctx context.Context, doer *user_model.User, isMergePull bool) (*Comment, error) { +func doChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isMergePull bool) (*Comment, error) { e := db.GetEngine(ctx) // Check for open dependencies if issue.IsClosed && issue.Repo.IsDependenciesEnabledCtx(ctx) { // only check if dependencies are enabled and we're about to close an issue, otherwise reopening an issue would fail when there are unsatisfied dependencies - noDeps, err := issueNoDependenciesLeft(e, issue) + noDeps, err := IssueNoDependenciesLeft(ctx, issue) if err != nil { return nil, err } @@ -649,7 +651,7 @@ func (issue *Issue) doChangeStatus(ctx context.Context, doer *user_model.User, i issue.ClosedUnix = 0 } - if err := updateIssueCols(ctx, issue, "is_closed", "closed_unix"); err != nil { + if err := UpdateIssueCols(ctx, issue, "is_closed", "closed_unix"); err != nil { return nil, err } @@ -665,12 +667,12 @@ func (issue *Issue) doChangeStatus(ctx context.Context, doer *user_model.User, i // Update issue count of milestone if issue.MilestoneID > 0 { - if err := updateMilestoneCounters(ctx, issue.MilestoneID); err != nil { + if err := issues_model.UpdateMilestoneCounters(ctx, issue.MilestoneID); err != nil { return nil, err } } - if err := issue.updateClosedNum(ctx); err != nil { + if err := updateIssueClosedNum(ctx, issue); err != nil { return nil, err } @@ -682,7 +684,7 @@ func (issue *Issue) doChangeStatus(ctx context.Context, doer *user_model.User, i cmtType = CommentTypeMergePull } - return createComment(ctx, &CreateCommentOptions{ + return CreateCommentCtx(ctx, &CreateCommentOptions{ Type: cmtType, Doer: doer, Repo: issue.Repo, @@ -690,46 +692,31 @@ func (issue *Issue) doChangeStatus(ctx context.Context, doer *user_model.User, i }) } -// ChangeStatus changes issue status to open or closed. -func (issue *Issue) ChangeStatus(doer *user_model.User, isClosed bool) (*Comment, error) { - ctx, committer, err := db.TxContext() - if err != nil { - return nil, err - } - defer committer.Close() - - if err := issue.loadRepo(ctx); err != nil { +// ChangeIssueStatus changes issue status to open or closed. +func ChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isClosed bool) (*Comment, error) { + if err := issue.LoadRepo(ctx); err != nil { return nil, err } if err := issue.loadPoster(db.GetEngine(ctx)); err != nil { return nil, err } - comment, err := issue.changeStatus(ctx, doer, isClosed, false) - if err != nil { - return nil, err - } - - if err = committer.Commit(); err != nil { - return nil, fmt.Errorf("Commit: %v", err) - } - - return comment, nil + return changeIssueStatus(ctx, issue, doer, isClosed, false) } -// ChangeTitle changes the title of this issue, as the given user. -func (issue *Issue) ChangeTitle(doer *user_model.User, oldTitle string) (err error) { +// ChangeIssueTitle changes the title of this issue, as the given user. +func ChangeIssueTitle(issue *Issue, doer *user_model.User, oldTitle string) (err error) { ctx, committer, err := db.TxContext() if err != nil { return err } defer committer.Close() - if err = updateIssueCols(ctx, issue, "name"); err != nil { + if err = UpdateIssueCols(ctx, issue, "name"); err != nil { return fmt.Errorf("updateIssueCols: %v", err) } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return fmt.Errorf("loadRepo: %v", err) } @@ -741,7 +728,7 @@ func (issue *Issue) ChangeTitle(doer *user_model.User, oldTitle string) (err err OldTitle: oldTitle, NewTitle: issue.Title, } - if _, err = createComment(ctx, opts); err != nil { + if _, err = CreateCommentCtx(ctx, opts); err != nil { return fmt.Errorf("createComment: %v", err) } if err = issue.addCrossReferences(ctx, doer, true); err != nil { @@ -751,19 +738,19 @@ func (issue *Issue) ChangeTitle(doer *user_model.User, oldTitle string) (err err return committer.Commit() } -// ChangeRef changes the branch of this issue, as the given user. -func (issue *Issue) ChangeRef(doer *user_model.User, oldRef string) (err error) { +// ChangeIssueRef changes the branch of this issue, as the given user. +func ChangeIssueRef(issue *Issue, doer *user_model.User, oldRef string) (err error) { ctx, committer, err := db.TxContext() if err != nil { return err } defer committer.Close() - if err = updateIssueCols(ctx, issue, "ref"); err != nil { + if err = UpdateIssueCols(ctx, issue, "ref"); err != nil { return fmt.Errorf("updateIssueCols: %v", err) } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return fmt.Errorf("loadRepo: %v", err) } oldRefFriendly := strings.TrimPrefix(oldRef, git.BranchPrefix) @@ -777,7 +764,7 @@ func (issue *Issue) ChangeRef(doer *user_model.User, oldRef string) (err error) OldRef: oldRefFriendly, NewRef: newRefFriendly, } - if _, err = createComment(ctx, opts); err != nil { + if _, err = CreateCommentCtx(ctx, opts); err != nil { return fmt.Errorf("createComment: %v", err) } @@ -785,16 +772,11 @@ func (issue *Issue) ChangeRef(doer *user_model.User, oldRef string) (err error) } // AddDeletePRBranchComment adds delete branch comment for pull request issue -func AddDeletePRBranchComment(doer *user_model.User, repo *repo_model.Repository, issueID int64, branchName string) error { - issue, err := getIssueByID(db.GetEngine(db.DefaultContext), issueID) +func AddDeletePRBranchComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issueID int64, branchName string) error { + issue, err := getIssueByID(db.GetEngine(ctx), issueID) if err != nil { return err } - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() opts := &CreateCommentOptions{ Type: CommentTypeDeleteBranch, Doer: doer, @@ -802,15 +784,12 @@ func AddDeletePRBranchComment(doer *user_model.User, repo *repo_model.Repository Issue: issue, OldRef: branchName, } - if _, err = createComment(ctx, opts); err != nil { - return err - } - - return committer.Commit() + _, err = CreateCommentCtx(ctx, opts) + return err } -// UpdateAttachments update attachments by UUIDs for the issue -func (issue *Issue) UpdateAttachments(uuids []string) (err error) { +// UpdateIssueAttachments update attachments by UUIDs for the issue +func UpdateIssueAttachments(issueID int64, uuids []string) (err error) { ctx, committer, err := db.TxContext() if err != nil { return err @@ -821,7 +800,7 @@ func (issue *Issue) UpdateAttachments(uuids []string) (err error) { return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %v", uuids, err) } for i := 0; i < len(attachments); i++ { - attachments[i].IssueID = issue.ID + attachments[i].IssueID = issueID if err := repo_model.UpdateAttachmentCtx(ctx, attachments[i]); err != nil { return fmt.Errorf("update attachment [id: %d]: %v", attachments[i].ID, err) } @@ -829,20 +808,20 @@ func (issue *Issue) UpdateAttachments(uuids []string) (err error) { return committer.Commit() } -// ChangeContent changes issue content, as the given user. -func (issue *Issue) ChangeContent(doer *user_model.User, content string) (err error) { +// ChangeIssueContent changes issue content, as the given user. +func ChangeIssueContent(issue *Issue, doer *user_model.User, content string) (err error) { ctx, committer, err := db.TxContext() if err != nil { return err } defer committer.Close() - hasContentHistory, err := issues.HasIssueContentHistory(ctx, issue.ID, 0) + hasContentHistory, err := issues_model.HasIssueContentHistory(ctx, issue.ID, 0) if err != nil { return fmt.Errorf("HasIssueContentHistory: %v", err) } if !hasContentHistory { - if err = issues.SaveIssueContentHistory(db.GetEngine(ctx), issue.PosterID, issue.ID, 0, + if err = issues_model.SaveIssueContentHistory(db.GetEngine(ctx), issue.PosterID, issue.ID, 0, issue.CreatedUnix, issue.Content, true); err != nil { return fmt.Errorf("SaveIssueContentHistory: %v", err) } @@ -850,11 +829,11 @@ func (issue *Issue) ChangeContent(doer *user_model.User, content string) (err er issue.Content = content - if err = updateIssueCols(ctx, issue, "content"); err != nil { + if err = UpdateIssueCols(ctx, issue, "content"); err != nil { return fmt.Errorf("UpdateIssueCols: %v", err) } - if err = issues.SaveIssueContentHistory(db.GetEngine(ctx), doer.ID, issue.ID, 0, + if err = issues_model.SaveIssueContentHistory(db.GetEngine(ctx), doer.ID, issue.ID, 0, timeutil.TimeStampNow(), issue.Content, false); err != nil { return fmt.Errorf("SaveIssueContentHistory: %v", err) } @@ -934,8 +913,8 @@ func newIssue(ctx context.Context, doer *user_model.User, opts NewIssueOptions) opts.Issue.Title = strings.TrimSpace(opts.Issue.Title) if opts.Issue.MilestoneID > 0 { - milestone, err := getMilestoneByRepoID(e, opts.Issue.RepoID, opts.Issue.MilestoneID) - if err != nil && !IsErrMilestoneNotExist(err) { + milestone, err := issues_model.GetMilestoneByRepoID(ctx, opts.Issue.RepoID, opts.Issue.MilestoneID) + if err != nil && !issues_model.IsErrMilestoneNotExist(err) { return fmt.Errorf("getMilestoneByID: %v", err) } @@ -959,7 +938,7 @@ func newIssue(ctx context.Context, doer *user_model.User, opts NewIssueOptions) } if opts.Issue.MilestoneID > 0 { - if err := updateMilestoneCounters(ctx, opts.Issue.MilestoneID); err != nil { + if err := issues_model.UpdateMilestoneCounters(ctx, opts.Issue.MilestoneID); err != nil { return err } @@ -971,7 +950,7 @@ func newIssue(ctx context.Context, doer *user_model.User, opts NewIssueOptions) OldMilestoneID: 0, MilestoneID: opts.Issue.MilestoneID, } - if _, err = createComment(ctx, opts); err != nil { + if _, err = CreateCommentCtx(ctx, opts); err != nil { return err } } @@ -1003,7 +982,7 @@ func newIssue(ctx context.Context, doer *user_model.User, opts NewIssueOptions) continue } - if err = opts.Issue.addLabel(ctx, label, opts.Issue.Poster); err != nil { + if err = newIssueLabel(ctx, opts.Issue, label, opts.Issue.Poster); err != nil { return fmt.Errorf("addLabel [id: %d]: %v", label.ID, err) } } @@ -1110,6 +1089,26 @@ func GetIssueByIndex(repoID, index int64) (*Issue, error) { return issue, nil } +// GetIssueByForeignIndex returns raw issue by foreign ID +func GetIssueByForeignIndex(ctx context.Context, repoID, foreignIndex int64) (*Issue, error) { + reference := &foreignreference.ForeignReference{ + RepoID: repoID, + ForeignIndex: strconv.FormatInt(foreignIndex, 10), + Type: foreignreference.TypeIssue, + } + has, err := db.GetEngine(ctx).Get(reference) + if err != nil { + return nil, err + } else if !has { + return nil, foreignreference.ErrLocalIndexNotExist{ + RepoID: repoID, + ForeignIndex: foreignIndex, + Type: foreignreference.TypeIssue, + } + } + return GetIssueByIndex(repoID, reference.LocalIndex) +} + // GetIssueWithAttrsByIndex returns issue by index in a repository. func GetIssueWithAttrsByIndex(repoID, index int64) (*Issue, error) { issue, err := GetIssueByIndex(repoID, index) @@ -1168,7 +1167,8 @@ func GetIssuesByIDs(issueIDs []int64) ([]*Issue, error) { // IssuesOptions represents options of an issue. type IssuesOptions struct { db.ListOptions - RepoIDs []int64 // include all repos if empty + RepoID int64 // overwrites RepoCond if not 0 + RepoCond builder.Cond AssigneeID int64 PosterID int64 MentionedID int64 @@ -1189,9 +1189,9 @@ type IssuesOptions struct { // prioritize issues from this repo PriorityRepoID int64 IsArchived util.OptionalBool - Org *Organization // issues permission scope - Team *Team // issues permission scope - User *user_model.User // issues permission scope + Org *organization.Organization // issues permission scope + Team *organization.Team // issues permission scope + User *user_model.User // issues permission scope } // sortIssuesSession sort an issues-related session based on the provided @@ -1241,7 +1241,7 @@ func sortIssuesSession(sess *xorm.Session, sortType string, priorityRepoID int64 } } -func (opts *IssuesOptions) setupSession(sess *xorm.Session) { +func (opts *IssuesOptions) setupSessionWithLimit(sess *xorm.Session) { if opts.Page >= 0 && opts.PageSize > 0 { var start int if opts.Page == 0 { @@ -1251,20 +1251,23 @@ func (opts *IssuesOptions) setupSession(sess *xorm.Session) { } sess.Limit(opts.PageSize, start) } + opts.setupSessionNoLimit(sess) +} +func (opts *IssuesOptions) setupSessionNoLimit(sess *xorm.Session) { if len(opts.IssueIDs) > 0 { sess.In("issue.id", opts.IssueIDs) } - if len(opts.RepoIDs) > 0 { - applyReposCondition(sess, opts.RepoIDs) + if opts.RepoID != 0 { + opts.RepoCond = builder.Eq{"issue.repo_id": opts.RepoID} + } + if opts.RepoCond != nil { + sess.And(opts.RepoCond) } - switch opts.IsClosed { - case util.OptionalBoolTrue: - sess.And("issue.is_closed=?", true) - case util.OptionalBoolFalse: - sess.And("issue.is_closed=?", false) + if !opts.IsClosed.IsNone() { + sess.And("issue.is_closed=?", opts.IsClosed.IsTrue()) } if opts.AssigneeID > 0 { @@ -1352,7 +1355,7 @@ func (opts *IssuesOptions) setupSession(sess *xorm.Session) { } // issuePullAccessibleRepoCond userID must not be zero, this condition require join repository table -func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *Organization, team *Team, isPull bool) builder.Cond { +func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *organization.Organization, team *organization.Team, isPull bool) builder.Cond { cond := builder.NewCond() unitType := unit.TypeIssues if isPull { @@ -1383,10 +1386,6 @@ func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *Organizati return cond } -func applyReposCondition(sess *xorm.Session, repoIDs []int64) *xorm.Session { - return sess.In("issue.repo_id", repoIDs) -} - func applyAssigneeCondition(sess *xorm.Session, assigneeID int64) *xorm.Session { return sess.Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id"). And("issue_assignees.assignee_id = ?", assigneeID) @@ -1417,7 +1416,7 @@ func CountIssuesByRepo(opts *IssuesOptions) (map[int64]int64, error) { sess := e.Join("INNER", "repository", "`issue`.repo_id = `repository`.id") - opts.setupSession(sess) + opts.setupSessionNoLimit(sess) countsSlice := make([]*struct { RepoID int64 @@ -1427,7 +1426,7 @@ func CountIssuesByRepo(opts *IssuesOptions) (map[int64]int64, error) { Select("issue.repo_id AS repo_id, COUNT(*) AS count"). Table("issue"). Find(&countsSlice); err != nil { - return nil, err + return nil, fmt.Errorf("unable to CountIssuesByRepo: %w", err) } countMap := make(map[int64]int64, len(countsSlice)) @@ -1444,14 +1443,14 @@ func GetRepoIDsForIssuesOptions(opts *IssuesOptions, user *user_model.User) ([]i sess := e.Join("INNER", "repository", "`issue`.repo_id = `repository`.id") - opts.setupSession(sess) + opts.setupSessionNoLimit(sess) accessCond := accessibleRepositoryCondition(user) if err := sess.Where(accessCond). Distinct("issue.repo_id"). Table("issue"). Find(&repoIDs); err != nil { - return nil, err + return nil, fmt.Errorf("unable to GetRepoIDsForIssuesOptions: %w", err) } return repoIDs, nil @@ -1462,17 +1461,16 @@ func Issues(opts *IssuesOptions) ([]*Issue, error) { e := db.GetEngine(db.DefaultContext) sess := e.Join("INNER", "repository", "`issue`.repo_id = `repository`.id") - opts.setupSession(sess) + opts.setupSessionWithLimit(sess) sortIssuesSession(sess, opts.SortType, opts.PriorityRepoID) issues := make([]*Issue, 0, opts.ListOptions.PageSize) if err := sess.Find(&issues); err != nil { - return nil, fmt.Errorf("Find: %v", err) + return nil, fmt.Errorf("unable to query Issues: %w", err) } - sess.Close() if err := IssueList(issues).LoadAttributes(); err != nil { - return nil, fmt.Errorf("LoadAttributes: %v", err) + return nil, fmt.Errorf("unable to LoadAttributes for Issues: %w", err) } return issues, nil @@ -1482,21 +1480,10 @@ func Issues(opts *IssuesOptions) ([]*Issue, error) { func CountIssues(opts *IssuesOptions) (int64, error) { e := db.GetEngine(db.DefaultContext) - countsSlice := make([]*struct { - RepoID int64 - Count int64 - }, 0, 1) - sess := e.Select("COUNT(issue.id) AS count").Table("issue") sess.Join("INNER", "repository", "`issue`.repo_id = `repository`.id") - opts.setupSession(sess) - if err := sess.Find(&countsSlice); err != nil { - return 0, fmt.Errorf("Find: %v", err) - } - if len(countsSlice) < 1 { - return 0, fmt.Errorf("there is less than one result sql record") - } - return countsSlice[0].Count, nil + opts.setupSessionNoLimit(sess) + return sess.Count() } // GetParticipantsIDsByIssueID returns the IDs of all users who participated in comments of an issue, @@ -1554,6 +1541,7 @@ const ( FilterModeCreate FilterModeMention FilterModeReviewRequested + FilterModeYourRepositories ) func parseCountResult(results []map[string][]byte) int64 { @@ -1698,8 +1686,9 @@ type UserIssueStatsOptions struct { IssueIDs []int64 IsArchived util.OptionalBool LabelIDs []int64 - Org *Organization - Team *Team + RepoCond builder.Cond + Org *organization.Organization + Team *organization.Team } // GetUserIssueStats returns issue statistic information for dashboard by given conditions. @@ -1715,6 +1704,9 @@ func GetUserIssueStats(opts UserIssueStatsOptions) (*IssueStats, error) { if len(opts.IssueIDs) > 0 { cond = cond.And(builder.In("issue.id", opts.IssueIDs)) } + if opts.RepoCond != nil { + cond = cond.And(opts.RepoCond) + } if opts.UserID > 0 { cond = cond.And(issuePullAccessibleRepoCond("issue.repo_id", opts.UserID, opts.Org, opts.Team, opts.IsPull)) @@ -1736,7 +1728,7 @@ func GetUserIssueStats(opts UserIssueStatsOptions) (*IssueStats, error) { } switch opts.FilterMode { - case FilterModeAll: + case FilterModeAll, FilterModeYourRepositories: stats.OpenCount, err = sess(cond). And("issue.is_closed = ?", false). Count(new(Issue)) @@ -1921,7 +1913,7 @@ func UpdateIssueByAPI(issue *Issue, doer *user_model.User) (statusChangeComment defer committer.Close() sess := db.GetEngine(ctx) - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return nil, false, fmt.Errorf("loadRepo: %v", err) } @@ -1948,14 +1940,14 @@ func UpdateIssueByAPI(issue *Issue, doer *user_model.User) (statusChangeComment OldTitle: currentIssue.Title, NewTitle: issue.Title, } - _, err := createComment(ctx, opts) + _, err := CreateCommentCtx(ctx, opts) if err != nil { return nil, false, fmt.Errorf("createComment: %v", err) } } if currentIssue.IsClosed != issue.IsClosed { - statusChangeComment, err = issue.doChangeStatus(ctx, doer, false) + statusChangeComment, err = doChangeIssueStatus(ctx, issue, doer, false) if err != nil { return nil, false, err } @@ -1980,7 +1972,7 @@ func UpdateIssueDeadline(issue *Issue, deadlineUnix timeutil.TimeStamp, doer *us defer committer.Close() // Update the deadline - if err = updateIssueCols(ctx, &Issue{ID: issue.ID, DeadlineUnix: deadlineUnix}, "deadline_unix"); err != nil { + if err = UpdateIssueCols(ctx, &Issue{ID: issue.ID, DeadlineUnix: deadlineUnix}, "deadline_unix"); err != nil { return err } @@ -2069,17 +2061,18 @@ func deleteIssue(ctx context.Context, issue *Issue) error { // delete all database data still assigned to this issue if err := deleteInIssue(e, issue.ID, - &issues.ContentHistory{}, + &issues_model.ContentHistory{}, &Comment{}, &IssueLabel{}, &IssueDependency{}, &IssueAssignees{}, &IssueUser{}, - &Reaction{}, + &Notification{}, + &issues_model.Reaction{}, &IssueWatch{}, &Stopwatch{}, &TrackedTime{}, - &ProjectIssue{}, + &project_model.ProjectIssue{}, &repo_model.Attachment{}, &PullRequest{}, ); err != nil { @@ -2178,7 +2171,7 @@ func (issue *Issue) BlockingDependencies() ([]*DependencyInfo, error) { return issue.getBlockingDependencies(db.GetEngine(db.DefaultContext)) } -func (issue *Issue) updateClosedNum(ctx context.Context) (err error) { +func updateIssueClosedNum(ctx context.Context, issue *Issue) (err error) { if issue.IsPull { err = repoStatsCorrectNumClosed(ctx, issue.RepoID, true, "num_closed_pulls") } else { @@ -2188,9 +2181,9 @@ func (issue *Issue) updateClosedNum(ctx context.Context) (err error) { } // FindAndUpdateIssueMentions finds users mentioned in the given content string, and saves them in the database. -func (issue *Issue) FindAndUpdateIssueMentions(ctx context.Context, doer *user_model.User, content string) (mentions []*user_model.User, err error) { +func FindAndUpdateIssueMentions(ctx context.Context, issue *Issue, doer *user_model.User, content string) (mentions []*user_model.User, err error) { rawMentions := references.FindAllMentionsMarkdown(content) - mentions, err = issue.ResolveMentionsByVisibility(ctx, doer, rawMentions) + mentions, err = ResolveIssueMentionsByVisibility(ctx, issue, doer, rawMentions) if err != nil { return nil, fmt.Errorf("UpdateIssueMentions [%d]: %v", issue.ID, err) } @@ -2200,13 +2193,13 @@ func (issue *Issue) FindAndUpdateIssueMentions(ctx context.Context, doer *user_m return } -// ResolveMentionsByVisibility returns the users mentioned in an issue, removing those that +// ResolveIssueMentionsByVisibility returns the users mentioned in an issue, removing those that // don't have access to reading it. Teams are expanded into their users, but organizations are ignored. -func (issue *Issue) ResolveMentionsByVisibility(ctx context.Context, doer *user_model.User, mentions []string) (users []*user_model.User, err error) { +func ResolveIssueMentionsByVisibility(ctx context.Context, issue *Issue, doer *user_model.User, mentions []string) (users []*user_model.User, err error) { if len(mentions) == 0 { return } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return } @@ -2241,7 +2234,7 @@ func (issue *Issue) ResolveMentionsByVisibility(ctx context.Context, doer *user_ } if issue.Repo.Owner.IsOrganization() && len(mentionTeams) > 0 { - teams := make([]*Team, 0, len(mentionTeams)) + teams := make([]*organization.Team, 0, len(mentionTeams)) if err := db.GetEngine(ctx). Join("INNER", "team_repo", "team_repo.team_id = team.id"). Where("team_repo.repo_id=?", issue.Repo.ID). @@ -2261,7 +2254,7 @@ func (issue *Issue) ResolveMentionsByVisibility(ctx context.Context, doer *user_ resolved[issue.Repo.Owner.LowerName+"/"+team.LowerName] = true continue } - has, err := db.GetEngine(ctx).Get(&TeamUnit{OrgID: issue.Repo.Owner.ID, TeamID: team.ID, Type: unittype}) + has, err := db.GetEngine(ctx).Get(&organization.TeamUnit{OrgID: issue.Repo.Owner.ID, TeamID: team.ID, Type: unittype}) if err != nil { return nil, fmt.Errorf("get team units (%d): %v", team.ID, err) } @@ -2321,9 +2314,9 @@ func (issue *Issue) ResolveMentionsByVisibility(ctx context.Context, doer *user_ continue } // Normal users must have read access to the referencing issue - perm, err := getUserRepoPermission(ctx, issue.Repo, user) + perm, err := GetUserRepoPermission(ctx, issue.Repo, user) if err != nil { - return nil, fmt.Errorf("getUserRepoPermission [%d]: %v", user.ID, err) + return nil, fmt.Errorf("GetUserRepoPermission [%d]: %v", user.ID, err) } if !perm.CanReadIssuesOrPulls(issue.IsPull) { continue @@ -2365,7 +2358,7 @@ func deleteIssuesByRepoID(sess db.Engine, repoID int64) (attachmentPaths []strin // Delete content histories if _, err = sess.In("issue_id", deleteCond). - Delete(&issues.ContentHistory{}); err != nil { + Delete(&issues_model.ContentHistory{}); err != nil { return } @@ -2393,7 +2386,7 @@ func deleteIssuesByRepoID(sess db.Engine, repoID int64) (attachmentPaths []strin } if _, err = sess.In("issue_id", deleteCond). - Delete(&Reaction{}); err != nil { + Delete(&issues_model.Reaction{}); err != nil { return } @@ -2413,7 +2406,7 @@ func deleteIssuesByRepoID(sess db.Engine, repoID int64) (attachmentPaths []strin } if _, err = sess.In("issue_id", deleteCond). - Delete(&ProjectIssue{}); err != nil { + Delete(&project_model.ProjectIssue{}); err != nil { return } diff --git a/models/issue_assignees.go b/models/issue_assignees.go index b3511f8b59..0f1f7b6576 100644 --- a/models/issue_assignees.go +++ b/models/issue_assignees.go @@ -92,15 +92,15 @@ func clearAssigneeByUserID(sess db.Engine, userID int64) (err error) { return } -// ToggleAssignee changes a user between assigned and not assigned for this issue, and make issue comment for it. -func (issue *Issue) ToggleAssignee(doer *user_model.User, assigneeID int64) (removed bool, comment *Comment, err error) { +// ToggleIssueAssignee changes a user between assigned and not assigned for this issue, and make issue comment for it. +func ToggleIssueAssignee(issue *Issue, doer *user_model.User, assigneeID int64) (removed bool, comment *Comment, err error) { ctx, committer, err := db.TxContext() if err != nil { return false, nil, err } defer committer.Close() - removed, comment, err = issue.toggleAssignee(ctx, doer, assigneeID, false) + removed, comment, err = toggleIssueAssignee(ctx, issue, doer, assigneeID, false) if err != nil { return false, nil, err } @@ -112,7 +112,7 @@ func (issue *Issue) ToggleAssignee(doer *user_model.User, assigneeID int64) (rem return removed, comment, nil } -func (issue *Issue) toggleAssignee(ctx context.Context, doer *user_model.User, assigneeID int64, isCreate bool) (removed bool, comment *Comment, err error) { +func toggleIssueAssignee(ctx context.Context, issue *Issue, doer *user_model.User, assigneeID int64, isCreate bool) (removed bool, comment *Comment, err error) { sess := db.GetEngine(ctx) removed, err = toggleUserAssignee(sess, issue, assigneeID) if err != nil { @@ -120,7 +120,7 @@ func (issue *Issue) toggleAssignee(ctx context.Context, doer *user_model.User, a } // Repo infos - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return false, nil, fmt.Errorf("loadRepo: %v", err) } @@ -133,7 +133,7 @@ func (issue *Issue) toggleAssignee(ctx context.Context, doer *user_model.User, a AssigneeID: assigneeID, } // Comment - comment, err = createComment(ctx, opts) + comment, err = CreateCommentCtx(ctx, opts) if err != nil { return false, nil, fmt.Errorf("createComment: %v", err) } diff --git a/models/issue_assignees_test.go b/models/issue_assignees_test.go index dd9a42b572..41a3ad86e3 100644 --- a/models/issue_assignees_test.go +++ b/models/issue_assignees_test.go @@ -23,17 +23,17 @@ func TestUpdateAssignee(t *testing.T) { // Assign multiple users user2, err := user_model.GetUserByID(2) assert.NoError(t, err) - _, _, err = issue.ToggleAssignee(&user_model.User{ID: 1}, user2.ID) + _, _, err = ToggleIssueAssignee(issue, &user_model.User{ID: 1}, user2.ID) assert.NoError(t, err) user3, err := user_model.GetUserByID(3) assert.NoError(t, err) - _, _, err = issue.ToggleAssignee(&user_model.User{ID: 1}, user3.ID) + _, _, err = ToggleIssueAssignee(issue, &user_model.User{ID: 1}, user3.ID) assert.NoError(t, err) user1, err := user_model.GetUserByID(1) // This user is already assigned (see the definition in fixtures), so running UpdateAssignee should unassign him assert.NoError(t, err) - _, _, err = issue.ToggleAssignee(&user_model.User{ID: 1}, user1.ID) + _, _, err = ToggleIssueAssignee(issue, &user_model.User{ID: 1}, user1.ID) assert.NoError(t, err) // Check if he got removed diff --git a/models/issue_comment.go b/models/issue_comment.go index 0af45e80e8..2cf3d5a61d 100644 --- a/models/issue_comment.go +++ b/models/issue_comment.go @@ -15,7 +15,9 @@ import ( "unicode/utf8" "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/models/issues" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" + project_model "code.gitea.io/gitea/models/project" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" @@ -108,6 +110,10 @@ const ( CommentTypeDismissReview // 33 Change issue ref CommentTypeChangeIssueRef + // 34 pr was scheduled to auto merge when checks succeed + CommentTypePRScheduledToAutoMerge + // 35 pr was un scheduled to auto merge when checks succeed + CommentTypePRUnScheduledToAutoMerge ) var commentStrings = []string{ @@ -145,6 +151,8 @@ var commentStrings = []string{ "project_board", "dismiss_review", "change_issue_ref", + "pull_scheduled_merge", + "pull_cancel_scheduled_merge", } func (t CommentType) String() string { @@ -203,19 +211,19 @@ type Comment struct { RemovedLabels []*Label `xorm:"-"` OldProjectID int64 ProjectID int64 - OldProject *Project `xorm:"-"` - Project *Project `xorm:"-"` + OldProject *project_model.Project `xorm:"-"` + Project *project_model.Project `xorm:"-"` OldMilestoneID int64 MilestoneID int64 - OldMilestone *Milestone `xorm:"-"` - Milestone *Milestone `xorm:"-"` + OldMilestone *issues_model.Milestone `xorm:"-"` + Milestone *issues_model.Milestone `xorm:"-"` TimeID int64 Time *TrackedTime `xorm:"-"` AssigneeID int64 RemovedAssignee bool - Assignee *user_model.User `xorm:"-"` - AssigneeTeamID int64 `xorm:"NOT NULL DEFAULT 0"` - AssigneeTeam *Team `xorm:"-"` + Assignee *user_model.User `xorm:"-"` + AssigneeTeamID int64 `xorm:"NOT NULL DEFAULT 0"` + AssigneeTeam *organization.Team `xorm:"-"` ResolveDoerID int64 ResolveDoer *user_model.User `xorm:"-"` OldTitle string @@ -241,8 +249,8 @@ type Comment struct { // Reference issue in commit message CommitSHA string `xorm:"VARCHAR(40)"` - Attachments []*repo_model.Attachment `xorm:"-"` - Reactions ReactionList `xorm:"-"` + Attachments []*repo_model.Attachment `xorm:"-"` + Reactions issues_model.ReactionList `xorm:"-"` // For view issue page. ShowRole RoleDescriptor `xorm:"-"` @@ -282,14 +290,15 @@ type PushActionContent struct { // LoadIssue loads issue from database func (c *Comment) LoadIssue() (err error) { - return c.loadIssue(db.GetEngine(db.DefaultContext)) + return c.LoadIssueCtx(db.DefaultContext) } -func (c *Comment) loadIssue(e db.Engine) (err error) { +// LoadIssueCtx loads issue from database +func (c *Comment) LoadIssueCtx(ctx context.Context) (err error) { if c.Issue != nil { return nil } - c.Issue, err = getIssueByID(e, c.IssueID) + c.Issue, err = getIssueByID(db.GetEngine(ctx), c.IssueID) return } @@ -356,7 +365,7 @@ func (c *Comment) HTMLURL() string { log.Error("LoadIssue(%d): %v", c.IssueID, err) return "" } - err = c.Issue.loadRepo(db.DefaultContext) + err = c.Issue.LoadRepo(db.DefaultContext) if err != nil { // Silently dropping errors :unamused: log.Error("loadRepo(%d): %v", c.Issue.RepoID, err) return "" @@ -385,7 +394,7 @@ func (c *Comment) APIURL() string { log.Error("LoadIssue(%d): %v", c.IssueID, err) return "" } - err = c.Issue.loadRepo(db.DefaultContext) + err = c.Issue.LoadRepo(db.DefaultContext) if err != nil { // Silently dropping errors :unamused: log.Error("loadRepo(%d): %v", c.Issue.RepoID, err) return "" @@ -406,7 +415,7 @@ func (c *Comment) IssueURL() string { return "" } - err = c.Issue.loadRepo(db.DefaultContext) + err = c.Issue.LoadRepo(db.DefaultContext) if err != nil { // Silently dropping errors :unamused: log.Error("loadRepo(%d): %v", c.Issue.RepoID, err) return "" @@ -422,7 +431,7 @@ func (c *Comment) PRURL() string { return "" } - err = c.Issue.loadRepo(db.DefaultContext) + err = c.Issue.LoadRepo(db.DefaultContext) if err != nil { // Silently dropping errors :unamused: log.Error("loadRepo(%d): %v", c.Issue.RepoID, err) return "" @@ -468,7 +477,7 @@ func (c *Comment) LoadLabel() error { // LoadProject if comment.Type is CommentTypeProject, then load project. func (c *Comment) LoadProject() error { if c.OldProjectID > 0 { - var oldProject Project + var oldProject project_model.Project has, err := db.GetEngine(db.DefaultContext).ID(c.OldProjectID).Get(&oldProject) if err != nil { return err @@ -478,7 +487,7 @@ func (c *Comment) LoadProject() error { } if c.ProjectID > 0 { - var project Project + var project project_model.Project has, err := db.GetEngine(db.DefaultContext).ID(c.ProjectID).Get(&project) if err != nil { return err @@ -493,7 +502,7 @@ func (c *Comment) LoadProject() error { // LoadMilestone if comment.Type is CommentTypeMilestone, then load milestone func (c *Comment) LoadMilestone() error { if c.OldMilestoneID > 0 { - var oldMilestone Milestone + var oldMilestone issues_model.Milestone has, err := db.GetEngine(db.DefaultContext).ID(c.OldMilestoneID).Get(&oldMilestone) if err != nil { return err @@ -503,7 +512,7 @@ func (c *Comment) LoadMilestone() error { } if c.MilestoneID > 0 { - var milestone Milestone + var milestone issues_model.Milestone has, err := db.GetEngine(db.DefaultContext).ID(c.MilestoneID).Get(&milestone) if err != nil { return err @@ -572,7 +581,7 @@ func (c *Comment) LoadAssigneeUserAndTeam() error { return err } - if err = c.Issue.LoadRepo(); err != nil { + if err = c.Issue.LoadRepo(db.DefaultContext); err != nil { return err } @@ -581,8 +590,8 @@ func (c *Comment) LoadAssigneeUserAndTeam() error { } if c.Issue.Repo.Owner.IsOrganization() { - c.AssigneeTeam, err = GetTeamByID(c.AssigneeTeamID) - if err != nil && !IsErrTeamNotExist(err) { + c.AssigneeTeam, err = organization.GetTeamByID(c.AssigneeTeamID) + if err != nil && !organization.IsErrTeamNotExist(err) { return err } } @@ -629,11 +638,11 @@ func (c *Comment) LoadTime() error { return err } -func (c *Comment) loadReactions(e db.Engine, repo *repo_model.Repository) (err error) { +func (c *Comment) loadReactions(ctx context.Context, repo *repo_model.Repository) (err error) { if c.Reactions != nil { return nil } - c.Reactions, _, err = findReactions(e, FindReactionsOptions{ + c.Reactions, _, err = issues_model.FindReactions(ctx, issues_model.FindReactionsOptions{ IssueID: c.IssueID, CommentID: c.ID, }) @@ -641,7 +650,7 @@ func (c *Comment) loadReactions(e db.Engine, repo *repo_model.Repository) (err e return err } // Load reaction user data - if _, err := c.Reactions.loadUsers(e, repo); err != nil { + if _, err := c.Reactions.LoadUsers(ctx, repo); err != nil { return err } return nil @@ -649,7 +658,7 @@ func (c *Comment) loadReactions(e db.Engine, repo *repo_model.Repository) (err e // LoadReactions loads comment reactions func (c *Comment) LoadReactions(repo *repo_model.Repository) error { - return c.loadReactions(db.GetEngine(db.DefaultContext), repo) + return c.loadReactions(db.DefaultContext, repo) } func (c *Comment) loadReview(e db.Engine) (err error) { @@ -715,7 +724,7 @@ func (c *Comment) CodeCommentURL() string { log.Error("LoadIssue(%d): %v", c.IssueID, err) return "" } - err = c.Issue.loadRepo(db.DefaultContext) + err = c.Issue.LoadRepo(db.DefaultContext) if err != nil { // Silently dropping errors :unamused: log.Error("loadRepo(%d): %v", c.Issue.RepoID, err) return "" @@ -759,7 +768,8 @@ func (c *Comment) LoadPushCommits(ctx context.Context) (err error) { return err } -func createComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) { +// CreateCommentCtx creates comment with context +func CreateCommentCtx(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) { e := db.GetEngine(ctx) var LabelID int64 if opts.Label != nil { @@ -856,12 +866,12 @@ func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment } } case CommentTypeReopen, CommentTypeClose: - if err = opts.Issue.updateClosedNum(ctx); err != nil { + if err = updateIssueClosedNum(ctx, opts.Issue); err != nil { return err } } // update the issue's updated_unix column - return updateIssueCols(ctx, opts.Issue, "updated_unix") + return UpdateIssueCols(ctx, opts.Issue, "updated_unix") } func createDeadlineComment(ctx context.Context, doer *user_model.User, issue *Issue, newDeadlineUnix timeutil.TimeStamp) (*Comment, error) { @@ -882,7 +892,7 @@ func createDeadlineComment(ctx context.Context, doer *user_model.User, issue *Is content = newDeadlineUnix.Format("2006-01-02") + "|" + issue.DeadlineUnix.Format("2006-01-02") } - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return nil, err } @@ -893,7 +903,7 @@ func createDeadlineComment(ctx context.Context, doer *user_model.User, issue *Is Issue: issue, Content: content, } - comment, err := createComment(ctx, opts) + comment, err := CreateCommentCtx(ctx, opts) if err != nil { return nil, err } @@ -906,7 +916,7 @@ func createIssueDependencyComment(ctx context.Context, doer *user_model.User, is if !add { cType = CommentTypeRemoveDependency } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return } @@ -918,7 +928,7 @@ func createIssueDependencyComment(ctx context.Context, doer *user_model.User, is Issue: issue, DependentIssueID: dependentIssue.ID, } - if _, err = createComment(ctx, opts); err != nil { + if _, err = CreateCommentCtx(ctx, opts); err != nil { return } @@ -929,7 +939,7 @@ func createIssueDependencyComment(ctx context.Context, doer *user_model.User, is Issue: dependentIssue, DependentIssueID: issue.ID, } - _, err = createComment(ctx, opts) + _, err = CreateCommentCtx(ctx, opts) return } @@ -979,7 +989,7 @@ func CreateComment(opts *CreateCommentOptions) (comment *Comment, err error) { } defer committer.Close() - comment, err = createComment(ctx, opts) + comment, err = CreateCommentCtx(ctx, opts) if err != nil { return nil, err } @@ -1123,7 +1133,7 @@ func UpdateComment(c *Comment, doer *user_model.User) error { if _, err := sess.ID(c.ID).AllCols().Update(c); err != nil { return err } - if err := c.loadIssue(sess); err != nil { + if err := c.LoadIssueCtx(ctx); err != nil { return err } if err := c.addCrossReferences(ctx, doer, true); err != nil { @@ -1144,26 +1154,27 @@ func DeleteComment(comment *Comment) error { } defer committer.Close() - if err := deleteComment(db.GetEngine(ctx), comment); err != nil { + if err := deleteComment(ctx, comment); err != nil { return err } return committer.Commit() } -func deleteComment(e db.Engine, comment *Comment) error { +func deleteComment(ctx context.Context, comment *Comment) error { + e := db.GetEngine(ctx) if _, err := e.ID(comment.ID).NoAutoCondition().Delete(comment); err != nil { return err } - if _, err := e.Delete(&issues.ContentHistory{ + if _, err := e.Delete(&issues_model.ContentHistory{ CommentID: comment.ID, }); err != nil { return err } if comment.Type == CommentTypeComment { - if _, err := e.Exec("UPDATE `issue` SET num_comments = num_comments - 1 WHERE id = ?", comment.IssueID); err != nil { + if _, err := e.ID(comment.IssueID).Decr("num_comments").Update(new(Issue)); err != nil { return err } } @@ -1175,7 +1186,7 @@ func deleteComment(e db.Engine, comment *Comment) error { return err } - return deleteReaction(e, &ReactionOptions{Comment: comment}) + return issues_model.DeleteReaction(ctx, &issues_model.ReactionOptions{CommentID: comment.ID}) } // CodeComments represents comments on code by using this structure: FILENAME -> LINE (+ == proposed; - == previous) -> COMMENTS @@ -1227,7 +1238,7 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu return nil, err } - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return nil, err } @@ -1349,6 +1360,28 @@ func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *Pul return } +// CreateAutoMergeComment is a internal function, only use it for CommentTypePRScheduledToAutoMerge and CommentTypePRUnScheduledToAutoMerge CommentTypes +func CreateAutoMergeComment(ctx context.Context, typ CommentType, pr *PullRequest, doer *user_model.User) (comment *Comment, err error) { + if typ != CommentTypePRScheduledToAutoMerge && typ != CommentTypePRUnScheduledToAutoMerge { + return nil, fmt.Errorf("comment type %d cannot be used to create an auto merge comment", typ) + } + if err = pr.LoadIssueCtx(ctx); err != nil { + return + } + + if err = pr.LoadBaseRepoCtx(ctx); err != nil { + return + } + + comment, err = CreateCommentCtx(ctx, &CreateCommentOptions{ + Type: typ, + Doer: doer, + Repo: pr.BaseRepo, + Issue: pr.Issue, + }) + return +} + // getCommitsFromRepo get commit IDs from repo in between oldCommitID and newCommitID // isForcePush will be true if oldCommit isn't on the branch // Commit on baseBranch will skip diff --git a/models/issue_comment_list.go b/models/issue_comment_list.go index 23a2756dcf..4133fc8761 100644 --- a/models/issue_comment_list.go +++ b/models/issue_comment_list.go @@ -8,8 +8,10 @@ import ( "context" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" ) // CommentList defines a list of comments @@ -22,7 +24,7 @@ func (comments CommentList) getPosterIDs() []int64 { posterIDs[comment.PosterID] = struct{}{} } } - return keysInt64(posterIDs) + return container.KeysInt64(posterIDs) } func (comments CommentList) loadPosters(e db.Engine) error { @@ -75,7 +77,7 @@ func (comments CommentList) getLabelIDs() []int64 { ids[comment.LabelID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } func (comments CommentList) loadLabels(e db.Engine) error { @@ -125,7 +127,7 @@ func (comments CommentList) getMilestoneIDs() []int64 { ids[comment.MilestoneID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } func (comments CommentList) loadMilestones(e db.Engine) error { @@ -138,7 +140,7 @@ func (comments CommentList) loadMilestones(e db.Engine) error { return nil } - milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs)) + milestoneMaps := make(map[int64]*issues_model.Milestone, len(milestoneIDs)) left := len(milestoneIDs) for left > 0 { limit := defaultMaxInSize @@ -168,7 +170,7 @@ func (comments CommentList) getOldMilestoneIDs() []int64 { ids[comment.OldMilestoneID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } func (comments CommentList) loadOldMilestones(e db.Engine) error { @@ -181,7 +183,7 @@ func (comments CommentList) loadOldMilestones(e db.Engine) error { return nil } - milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs)) + milestoneMaps := make(map[int64]*issues_model.Milestone, len(milestoneIDs)) left := len(milestoneIDs) for left > 0 { limit := defaultMaxInSize @@ -211,7 +213,7 @@ func (comments CommentList) getAssigneeIDs() []int64 { ids[comment.AssigneeID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } func (comments CommentList) loadAssignees(e db.Engine) error { @@ -267,7 +269,7 @@ func (comments CommentList) getIssueIDs() []int64 { ids[comment.IssueID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } // Issues returns all the issues of comments @@ -342,7 +344,7 @@ func (comments CommentList) getDependentIssueIDs() []int64 { ids[comment.DependentIssueID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } func (comments CommentList) loadDependentIssues(ctx context.Context) error { @@ -386,7 +388,7 @@ func (comments CommentList) loadDependentIssues(ctx context.Context) error { if comment.DependentIssue == nil { comment.DependentIssue = issues[comment.DependentIssueID] if comment.DependentIssue != nil { - if err := comment.DependentIssue.loadRepo(ctx); err != nil { + if err := comment.DependentIssue.LoadRepo(ctx); err != nil { return err } } @@ -444,7 +446,7 @@ func (comments CommentList) getReviewIDs() []int64 { ids[comment.ReviewID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } func (comments CommentList) loadReviews(e db.Engine) error { diff --git a/models/issue_dependency.go b/models/issue_dependency.go index d2c5785b90..b292db57e0 100644 --- a/models/issue_dependency.go +++ b/models/issue_dependency.go @@ -5,6 +5,8 @@ package models import ( + "context" + "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/timeutil" @@ -117,12 +119,8 @@ func issueDepExists(e db.Engine, issueID, depID int64) (bool, error) { } // IssueNoDependenciesLeft checks if issue can be closed -func IssueNoDependenciesLeft(issue *Issue) (bool, error) { - return issueNoDependenciesLeft(db.GetEngine(db.DefaultContext), issue) -} - -func issueNoDependenciesLeft(e db.Engine, issue *Issue) (bool, error) { - exists, err := e. +func IssueNoDependenciesLeft(ctx context.Context, issue *Issue) (bool, error) { + exists, err := db.GetEngine(ctx). Table("issue_dependency"). Select("issue.*"). Join("INNER", "issue", "issue.id = issue_dependency.dependency_id"). diff --git a/models/issue_dependency_test.go b/models/issue_dependency_test.go index 61215dedef..345a9077cd 100644 --- a/models/issue_dependency_test.go +++ b/models/issue_dependency_test.go @@ -7,6 +7,7 @@ package models import ( "testing" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -43,15 +44,15 @@ func TestCreateIssueDependency(t *testing.T) { _ = unittest.AssertExistsAndLoadBean(t, &Comment{Type: CommentTypeAddDependency, PosterID: user1.ID, IssueID: issue1.ID}) // Check if dependencies left is correct - left, err := IssueNoDependenciesLeft(issue1) + left, err := IssueNoDependenciesLeft(db.DefaultContext, issue1) assert.NoError(t, err) assert.False(t, left) // Close #2 and check again - _, err = issue2.ChangeStatus(user1, true) + _, err = ChangeIssueStatus(db.DefaultContext, issue2, user1, true) assert.NoError(t, err) - left, err = IssueNoDependenciesLeft(issue1) + left, err = IssueNoDependenciesLeft(db.DefaultContext, issue1) assert.NoError(t, err) assert.True(t, left) diff --git a/models/issue_label.go b/models/issue_label.go index 0aea620773..d069153939 100644 --- a/models/issue_label.go +++ b/models/issue_label.go @@ -50,50 +50,6 @@ func init() { db.RegisterModel(new(IssueLabel)) } -// GetLabelTemplateFile loads the label template file by given name, -// then parses and returns a list of name-color pairs and optionally description. -func GetLabelTemplateFile(name string) ([][3]string, error) { - data, err := GetRepoInitFile("label", name) - if err != nil { - return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("GetRepoInitFile: %v", err)} - } - - lines := strings.Split(string(data), "\n") - list := make([][3]string, 0, len(lines)) - for i := 0; i < len(lines); i++ { - line := strings.TrimSpace(lines[i]) - if len(line) == 0 { - continue - } - - parts := strings.SplitN(line, ";", 2) - - fields := strings.SplitN(parts[0], " ", 2) - if len(fields) != 2 { - return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("line is malformed: %s", line)} - } - - color := strings.Trim(fields[0], " ") - if len(color) == 6 { - color = "#" + color - } - if !LabelColorPattern.MatchString(color) { - return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("bad HTML color code in line: %s", line)} - } - - var description string - - if len(parts) > 1 { - description = strings.TrimSpace(parts[1]) - } - - fields[1] = strings.TrimSpace(fields[1]) - list = append(list, [3]string{fields[1], color, description}) - } - - return list, nil -} - // CalOpenIssues sets the number of open issues of a label based on the already stored number of closed issues. func (label *Label) CalOpenIssues() { label.NumOpenIssues = label.NumIssues - label.NumClosedIssues @@ -101,12 +57,9 @@ func (label *Label) CalOpenIssues() { // CalOpenOrgIssues calculates the open issues of a label for a specific repo func (label *Label) CalOpenOrgIssues(repoID, labelID int64) { - repoIDs := []int64{repoID} - labelIDs := []int64{labelID} - counts, _ := CountIssuesByRepo(&IssuesOptions{ - RepoIDs: repoIDs, - LabelIDs: labelIDs, + RepoID: repoID, + LabelIDs: []int64{labelID}, }) for _, count := range counts { @@ -191,70 +144,8 @@ func (label *Label) ForegroundColor() template.CSS { return template.CSS("#000") } -// .____ ___. .__ -// | | _____ \_ |__ ____ | | -// | | \__ \ | __ \_/ __ \| | -// | |___ / __ \| \_\ \ ___/| |__ -// >_______ (____ /___ /\___ >____/ - -func loadLabels(labelTemplate string) ([]string, error) { - list, err := GetLabelTemplateFile(labelTemplate) - if err != nil { - return nil, err - } - - labels := make([]string, len(list)) - for i := 0; i < len(list); i++ { - labels[i] = list[i][0] - } - return labels, nil -} - -// LoadLabelsFormatted loads the labels' list of a template file as a string separated by comma -func LoadLabelsFormatted(labelTemplate string) (string, error) { - labels, err := loadLabels(labelTemplate) - return strings.Join(labels, ", "), err -} - -func initializeLabels(e db.Engine, id int64, labelTemplate string, isOrg bool) error { - list, err := GetLabelTemplateFile(labelTemplate) - if err != nil { - return err - } - - labels := make([]*Label, len(list)) - for i := 0; i < len(list); i++ { - labels[i] = &Label{ - Name: list[i][0], - Description: list[i][2], - Color: list[i][1], - } - if isOrg { - labels[i].OrgID = id - } else { - labels[i].RepoID = id - } - } - for _, label := range labels { - if err = newLabel(e, label); err != nil { - return err - } - } - return nil -} - -// InitializeLabels adds a label set to a repository using a template -func InitializeLabels(ctx context.Context, repoID int64, labelTemplate string, isOrg bool) error { - return initializeLabels(db.GetEngine(ctx), repoID, labelTemplate, isOrg) -} - -func newLabel(e db.Engine, label *Label) error { - _, err := e.Insert(label) - return err -} - // NewLabel creates a new label -func NewLabel(label *Label) error { +func NewLabel(ctx context.Context, label *Label) error { if !LabelColorPattern.MatchString(label.Color) { return fmt.Errorf("bad color code: %s", label.Color) } @@ -275,7 +166,7 @@ func NewLabel(label *Label) error { label.Color = fmt.Sprintf("#%c%c%c%c%c%c", r, r, g, g, b, b) } - return newLabel(db.GetEngine(db.DefaultContext), label) + return db.Insert(ctx, label) } // NewLabels creates new labels @@ -290,7 +181,7 @@ func NewLabels(labels ...*Label) error { if !LabelColorPattern.MatchString(label.Color) { return fmt.Errorf("bad color code: %s", label.Color) } - if err := newLabel(db.GetEngine(ctx), label); err != nil { + if err := db.Insert(ctx, label); err != nil { return err } } @@ -692,7 +583,7 @@ func newIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_m return err } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return } @@ -704,7 +595,7 @@ func newIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_m Label: label, Content: "1", } - if _, err = createComment(ctx, opts); err != nil { + if _, err = CreateCommentCtx(ctx, opts); err != nil { return err } @@ -722,9 +613,8 @@ func NewIssueLabel(issue *Issue, label *Label, doer *user_model.User) (err error return err } defer committer.Close() - sess := db.GetEngine(ctx) - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return err } @@ -738,7 +628,7 @@ func NewIssueLabel(issue *Issue, label *Label, doer *user_model.User) (err error } issue.Labels = nil - if err = issue.loadLabels(sess); err != nil { + if err = issue.LoadLabels(ctx); err != nil { return err } @@ -748,7 +638,7 @@ func NewIssueLabel(issue *Issue, label *Label, doer *user_model.User) (err error // newIssueLabels add labels to an issue. It will check if the labels are valid for the issue func newIssueLabels(ctx context.Context, issue *Issue, labels []*Label, doer *user_model.User) (err error) { e := db.GetEngine(ctx) - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return err } for _, label := range labels { @@ -779,7 +669,7 @@ func NewIssueLabels(issue *Issue, labels []*Label, doer *user_model.User) (err e } issue.Labels = nil - if err = issue.loadLabels(db.GetEngine(ctx)); err != nil { + if err = issue.LoadLabels(ctx); err != nil { return err } @@ -797,7 +687,7 @@ func deleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *use return nil } - if err = issue.loadRepo(ctx); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return } @@ -808,7 +698,7 @@ func deleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *use Issue: issue, Label: label, } - if _, err = createComment(ctx, opts); err != nil { + if _, err = CreateCommentCtx(ctx, opts); err != nil { return err } @@ -816,23 +706,13 @@ func deleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *use } // DeleteIssueLabel deletes issue-label relation. -func DeleteIssueLabel(issue *Issue, label *Label, doer *user_model.User) (err error) { - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - if err = deleteIssueLabel(ctx, issue, label, doer); err != nil { +func DeleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_model.User) error { + if err := deleteIssueLabel(ctx, issue, label, doer); err != nil { return err } issue.Labels = nil - if err = issue.loadLabels(db.GetEngine(ctx)); err != nil { - return err - } - - return committer.Commit() + return issue.LoadLabels(ctx) } func deleteLabelsByRepoID(sess db.Engine, repoID int64) error { diff --git a/models/issue_label_test.go b/models/issue_label_test.go index 68281dd7ad..2dd0cf98e0 100644 --- a/models/issue_label_test.go +++ b/models/issue_label_test.go @@ -42,11 +42,11 @@ func TestNewLabels(t *testing.T) { {RepoID: 4, Name: "labelName4", Color: "ABCDEF"}, {RepoID: 5, Name: "labelName5", Color: "DEF"}, } - assert.Error(t, NewLabel(&Label{RepoID: 3, Name: "invalid Color", Color: ""})) - assert.Error(t, NewLabel(&Label{RepoID: 3, Name: "invalid Color", Color: "#45G"})) - assert.Error(t, NewLabel(&Label{RepoID: 3, Name: "invalid Color", Color: "#12345G"})) - assert.Error(t, NewLabel(&Label{RepoID: 3, Name: "invalid Color", Color: "45G"})) - assert.Error(t, NewLabel(&Label{RepoID: 3, Name: "invalid Color", Color: "12345G"})) + assert.Error(t, NewLabel(db.DefaultContext, &Label{RepoID: 3, Name: "invalid Color", Color: ""})) + assert.Error(t, NewLabel(db.DefaultContext, &Label{RepoID: 3, Name: "invalid Color", Color: "#45G"})) + assert.Error(t, NewLabel(db.DefaultContext, &Label{RepoID: 3, Name: "invalid Color", Color: "#12345G"})) + assert.Error(t, NewLabel(db.DefaultContext, &Label{RepoID: 3, Name: "invalid Color", Color: "45G"})) + assert.Error(t, NewLabel(db.DefaultContext, &Label{RepoID: 3, Name: "invalid Color", Color: "12345G"})) for _, label := range labels { unittest.AssertNotExistsBean(t, label) } @@ -369,7 +369,12 @@ func TestDeleteIssueLabel(t *testing.T) { } } - assert.NoError(t, DeleteIssueLabel(issue, label, doer)) + ctx, committer, err := db.TxContext() + defer committer.Close() + assert.NoError(t, err) + assert.NoError(t, DeleteIssueLabel(ctx, issue, label, doer)) + assert.NoError(t, committer.Commit()) + unittest.AssertNotExistsBean(t, &IssueLabel{IssueID: issueID, LabelID: labelID}) unittest.AssertExistsAndLoadBean(t, &Comment{ Type: CommentTypeLabel, diff --git a/models/issue_list.go b/models/issue_list.go index b516e7336e..3116b49d8a 100644 --- a/models/issue_list.go +++ b/models/issue_list.go @@ -8,8 +8,10 @@ import ( "fmt" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" "xorm.io/builder" ) @@ -32,7 +34,7 @@ func (issues IssueList) getRepoIDs() []int64 { repoIDs[issue.RepoID] = struct{}{} } } - return keysInt64(repoIDs) + return container.KeysInt64(repoIDs) } func (issues IssueList) loadRepositories(e db.Engine) ([]*repo_model.Repository, error) { @@ -83,7 +85,7 @@ func (issues IssueList) getPosterIDs() []int64 { posterIDs[issue.PosterID] = struct{}{} } } - return keysInt64(posterIDs) + return container.KeysInt64(posterIDs) } func (issues IssueList) loadPosters(e db.Engine) error { @@ -189,7 +191,7 @@ func (issues IssueList) getMilestoneIDs() []int64 { ids[issue.MilestoneID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } func (issues IssueList) loadMilestones(e db.Engine) error { @@ -198,7 +200,7 @@ func (issues IssueList) loadMilestones(e db.Engine) error { return nil } - milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs)) + milestoneMaps := make(map[int64]*issues_model.Milestone, len(milestoneIDs)) left := len(milestoneIDs) for left > 0 { limit := defaultMaxInSize diff --git a/models/issue_lock.go b/models/issue_lock.go index 20e94c7b21..a122f618d0 100644 --- a/models/issue_lock.go +++ b/models/issue_lock.go @@ -46,7 +46,7 @@ func updateIssueLock(opts *IssueLockOptions, lock bool) error { } defer committer.Close() - if err := updateIssueCols(ctx, opts.Issue, "is_locked"); err != nil { + if err := UpdateIssueCols(ctx, opts.Issue, "is_locked"); err != nil { return err } @@ -57,7 +57,7 @@ func updateIssueLock(opts *IssueLockOptions, lock bool) error { Type: commentType, Content: opts.Reason, } - if _, err := createComment(ctx, opt); err != nil { + if _, err := CreateCommentCtx(ctx, opt); err != nil { return err } diff --git a/models/issue_project.go b/models/issue_project.go new file mode 100644 index 0000000000..0e993b39c5 --- /dev/null +++ b/models/issue_project.go @@ -0,0 +1,181 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package models + +import ( + "context" + "fmt" + + "code.gitea.io/gitea/models/db" + project_model "code.gitea.io/gitea/models/project" + user_model "code.gitea.io/gitea/models/user" +) + +// LoadProject load the project the issue was assigned to +func (i *Issue) LoadProject() (err error) { + return i.loadProject(db.GetEngine(db.DefaultContext)) +} + +func (i *Issue) loadProject(e db.Engine) (err error) { + if i.Project == nil { + var p project_model.Project + if _, err = e.Table("project"). + Join("INNER", "project_issue", "project.id=project_issue.project_id"). + Where("project_issue.issue_id = ?", i.ID). + Get(&p); err != nil { + return err + } + i.Project = &p + } + return +} + +// ProjectID return project id if issue was assigned to one +func (i *Issue) ProjectID() int64 { + return i.projectID(db.GetEngine(db.DefaultContext)) +} + +func (i *Issue) projectID(e db.Engine) int64 { + var ip project_model.ProjectIssue + has, err := e.Where("issue_id=?", i.ID).Get(&ip) + if err != nil || !has { + return 0 + } + return ip.ProjectID +} + +// ProjectBoardID return project board id if issue was assigned to one +func (i *Issue) ProjectBoardID() int64 { + return i.projectBoardID(db.GetEngine(db.DefaultContext)) +} + +func (i *Issue) projectBoardID(e db.Engine) int64 { + var ip project_model.ProjectIssue + has, err := e.Where("issue_id=?", i.ID).Get(&ip) + if err != nil || !has { + return 0 + } + return ip.ProjectBoardID +} + +// LoadIssuesFromBoard load issues assigned to this board +func LoadIssuesFromBoard(b *project_model.Board) (IssueList, error) { + issueList := make([]*Issue, 0, 10) + + if b.ID != 0 { + issues, err := Issues(&IssuesOptions{ + ProjectBoardID: b.ID, + ProjectID: b.ProjectID, + }) + if err != nil { + return nil, err + } + issueList = issues + } + + if b.Default { + issues, err := Issues(&IssuesOptions{ + ProjectBoardID: -1, // Issues without ProjectBoardID + ProjectID: b.ProjectID, + }) + if err != nil { + return nil, err + } + issueList = append(issueList, issues...) + } + + if err := IssueList(issueList).LoadComments(); err != nil { + return nil, err + } + + return issueList, nil +} + +// LoadIssuesFromBoardList load issues assigned to the boards +func LoadIssuesFromBoardList(bs project_model.BoardList) (map[int64]IssueList, error) { + issuesMap := make(map[int64]IssueList, len(bs)) + for i := range bs { + il, err := LoadIssuesFromBoard(bs[i]) + if err != nil { + return nil, err + } + issuesMap[bs[i].ID] = il + } + return issuesMap, nil +} + +// ChangeProjectAssign changes the project associated with an issue +func ChangeProjectAssign(issue *Issue, doer *user_model.User, newProjectID int64) error { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + if err := addUpdateIssueProject(ctx, issue, doer, newProjectID); err != nil { + return err + } + + return committer.Commit() +} + +func addUpdateIssueProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID int64) error { + e := db.GetEngine(ctx) + oldProjectID := issue.projectID(e) + + if _, err := e.Where("project_issue.issue_id=?", issue.ID).Delete(&project_model.ProjectIssue{}); err != nil { + return err + } + + if err := issue.LoadRepo(ctx); err != nil { + return err + } + + if oldProjectID > 0 || newProjectID > 0 { + if _, err := CreateCommentCtx(ctx, &CreateCommentOptions{ + Type: CommentTypeProject, + Doer: doer, + Repo: issue.Repo, + Issue: issue, + OldProjectID: oldProjectID, + ProjectID: newProjectID, + }); err != nil { + return err + } + } + + _, err := e.Insert(&project_model.ProjectIssue{ + IssueID: issue.ID, + ProjectID: newProjectID, + }) + return err +} + +// MoveIssueAcrossProjectBoards move a card from one board to another +func MoveIssueAcrossProjectBoards(issue *Issue, board *project_model.Board) error { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + sess := db.GetEngine(ctx) + + var pis project_model.ProjectIssue + has, err := sess.Where("issue_id=?", issue.ID).Get(&pis) + if err != nil { + return err + } + + if !has { + return fmt.Errorf("issue has to be added to a project first") + } + + pis.ProjectBoardID = board.ID + if _, err := sess.ID(pis.ID).Cols("project_board_id").Update(&pis); err != nil { + return err + } + + return committer.Commit() +} diff --git a/models/issue_stopwatch.go b/models/issue_stopwatch.go index 3be9ad4e3f..81459ba446 100644 --- a/models/issue_stopwatch.go +++ b/models/issue_stopwatch.go @@ -66,6 +66,38 @@ func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, ex return } +// UserIDCount is a simple coalition of UserID and Count +type UserStopwatch struct { + UserID int64 + StopWatches []*Stopwatch +} + +// GetUIDsAndNotificationCounts between the two provided times +func GetUIDsAndStopwatch() ([]*UserStopwatch, error) { + sws := []*Stopwatch{} + if err := db.GetEngine(db.DefaultContext).Find(&sws); err != nil { + return nil, err + } + if len(sws) == 0 { + return []*UserStopwatch{}, nil + } + + lastUserID := int64(-1) + res := []*UserStopwatch{} + for _, sw := range sws { + if lastUserID == sw.UserID { + lastUserStopwatch := res[len(res)-1] + lastUserStopwatch.StopWatches = append(lastUserStopwatch.StopWatches, sw) + } else { + res = append(res, &UserStopwatch{ + UserID: sw.UserID, + StopWatches: []*Stopwatch{sw}, + }) + } + } + return res, nil +} + // GetUserStopwatches return list of all stopwatches of a user func GetUserStopwatches(userID int64, listOptions db.ListOptions) ([]*Stopwatch, error) { sws := make([]*Stopwatch, 0, 8) @@ -157,11 +189,11 @@ func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss return err } - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return err } - if _, err := createComment(ctx, &CreateCommentOptions{ + if _, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Doer: user, Issue: issue, Repo: issue.Repo, @@ -178,7 +210,7 @@ func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss // CreateIssueStopwatch creates a stopwatch if not exist, otherwise return an error func CreateIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error { e := db.GetEngine(ctx) - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return err } @@ -208,11 +240,11 @@ func CreateIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss return err } - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return err } - if _, err := createComment(ctx, &CreateCommentOptions{ + if _, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Doer: user, Issue: issue, Repo: issue.Repo, @@ -249,11 +281,11 @@ func cancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) e return err } - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return err } - if _, err := createComment(ctx, &CreateCommentOptions{ + if _, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Doer: user, Issue: issue, Repo: issue.Repo, diff --git a/models/issue_test.go b/models/issue_test.go index 7cc0aa61b0..9b82fc80fb 100644 --- a/models/issue_test.go +++ b/models/issue_test.go @@ -8,16 +8,20 @@ import ( "context" "fmt" "sort" + "strconv" "sync" "testing" "time" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/foreignreference" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "github.com/stretchr/testify/assert" + "xorm.io/builder" ) func TestIssue_ReplaceLabels(t *testing.T) { @@ -32,7 +36,7 @@ func TestIssue_ReplaceLabels(t *testing.T) { for i, labelID := range labelIDs { labels[i] = unittest.AssertExistsAndLoadBean(t, &Label{ID: labelID, RepoID: repo.ID}).(*Label) } - assert.NoError(t, issue.ReplaceLabels(labels, doer)) + assert.NoError(t, ReplaceIssueLabels(issue, labels, doer)) unittest.AssertCount(t, &IssueLabel{IssueID: issueID}, len(labelIDs)) for _, labelID := range labelIDs { unittest.AssertExistsAndLoadBean(t, &IssueLabel{IssueID: issueID, LabelID: labelID}) @@ -114,7 +118,7 @@ func TestIssue_ClearLabels(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) issue := unittest.AssertExistsAndLoadBean(t, &Issue{ID: test.issueID}).(*Issue) doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: test.doerID}).(*user_model.User) - assert.NoError(t, issue.ClearLabels(doer)) + assert.NoError(t, ClearIssueLabels(issue, doer)) unittest.AssertNotExistsBean(t, &IssueLabel{IssueID: test.issueID}) } } @@ -130,7 +134,7 @@ func TestUpdateIssueCols(t *testing.T) { issue.Content = "This should have no effect" now := time.Now().Unix() - assert.NoError(t, updateIssueCols(db.DefaultContext, issue, "name")) + assert.NoError(t, UpdateIssueCols(db.DefaultContext, issue, "name")) then := time.Now().Unix() updatedIssue := unittest.AssertExistsAndLoadBean(t, &Issue{ID: issue.ID}).(*Issue) @@ -154,7 +158,7 @@ func TestIssues(t *testing.T) { }, { IssuesOptions{ - RepoIDs: []int64{1, 3}, + RepoCond: builder.In("repo_id", 1, 3), SortType: "oldest", ListOptions: db.ListOptions{ Page: 1, @@ -341,7 +345,7 @@ func TestGetRepoIDsForIssuesOptions(t *testing.T) { }, { IssuesOptions{ - RepoIDs: []int64{1, 2}, + RepoCond: builder.In("repo_id", 1, 2), }, []int64{1, 2}, }, @@ -439,12 +443,12 @@ func TestIssue_DeleteIssue(t *testing.T) { assert.NoError(t, err) err = CreateIssueDependency(user, issue1, issue2) assert.NoError(t, err) - left, err := IssueNoDependenciesLeft(issue1) + left, err := IssueNoDependenciesLeft(db.DefaultContext, issue1) assert.NoError(t, err) assert.False(t, left) err = DeleteIssue(&Issue{ID: 2}) assert.NoError(t, err) - left, err = IssueNoDependenciesLeft(issue1) + left, err = IssueNoDependenciesLeft(db.DefaultContext, issue1) assert.NoError(t, err) assert.True(t, left) } @@ -457,7 +461,7 @@ func TestIssue_ResolveMentions(t *testing.T) { r := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: o.ID, LowerName: repo}).(*repo_model.Repository) issue := &Issue{RepoID: r.ID} d := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: doer}).(*user_model.User) - resolved, err := issue.ResolveMentionsByVisibility(db.DefaultContext, d, mentions) + resolved, err := ResolveIssueMentionsByVisibility(db.DefaultContext, issue, d, mentions) assert.NoError(t, err) ids := make([]int64, len(resolved)) for i, user := range resolved { @@ -534,3 +538,62 @@ func TestCorrectIssueStats(t *testing.T) { assert.NoError(t, err) assert.EqualValues(t, issueStats.OpenCount, issueAmount) } + +func TestIssueForeignReference(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + issue := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 4}).(*Issue) + assert.NotEqualValues(t, issue.Index, issue.ID) // make sure they are different to avoid false positive + + // it is fine for an issue to not have a foreign reference + err := issue.LoadAttributes() + assert.NoError(t, err) + assert.Nil(t, issue.ForeignReference) + + var foreignIndex int64 = 12345 + _, err = GetIssueByForeignIndex(context.Background(), issue.RepoID, foreignIndex) + assert.True(t, foreignreference.IsErrLocalIndexNotExist(err)) + + _, err = db.GetEngine(db.DefaultContext).Insert(&foreignreference.ForeignReference{ + LocalIndex: issue.Index, + ForeignIndex: strconv.FormatInt(foreignIndex, 10), + RepoID: issue.RepoID, + Type: foreignreference.TypeIssue, + }) + assert.NoError(t, err) + + err = issue.LoadAttributes() + assert.NoError(t, err) + + assert.EqualValues(t, issue.ForeignReference.ForeignIndex, strconv.FormatInt(foreignIndex, 10)) + + found, err := GetIssueByForeignIndex(context.Background(), issue.RepoID, foreignIndex) + assert.NoError(t, err) + assert.EqualValues(t, found.Index, issue.Index) +} + +func TestMilestoneList_LoadTotalTrackedTimes(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + miles := issues_model.MilestoneList{ + unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}).(*issues_model.Milestone), + } + + assert.NoError(t, miles.LoadTotalTrackedTimes()) + + assert.Equal(t, int64(3682), miles[0].TotalTrackedTime) +} + +func TestLoadTotalTrackedTime(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}).(*issues_model.Milestone) + + assert.NoError(t, milestone.LoadTotalTrackedTime()) + + assert.Equal(t, int64(3682), milestone.TotalTrackedTime) +} + +func TestCountIssues(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + count, err := CountIssues(&IssuesOptions{}) + assert.NoError(t, err) + assert.EqualValues(t, 15, count) +} diff --git a/models/issue_tracked_time.go b/models/issue_tracked_time.go index 2d7bef19e1..76ff874c59 100644 --- a/models/issue_tracked_time.go +++ b/models/issue_tracked_time.go @@ -53,7 +53,7 @@ func (t *TrackedTime) loadAttributes(ctx context.Context) (err error) { if err != nil { return } - err = t.Issue.loadRepo(ctx) + err = t.Issue.LoadRepo(ctx) if err != nil { return } @@ -170,11 +170,11 @@ func AddTime(user *user_model.User, issue *Issue, amount int64, created time.Tim return nil, err } - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return nil, err } - if _, err := createComment(ctx, &CreateCommentOptions{ + if _, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Issue: issue, Repo: issue.Repo, Doer: user, @@ -251,13 +251,13 @@ func DeleteIssueUserTimes(issue *Issue, user *user_model.User) error { return err } if removedTime == 0 { - return ErrNotExist{} + return db.ErrNotExist{} } - if err := issue.loadRepo(ctx); err != nil { + if err := issue.LoadRepo(ctx); err != nil { return err } - if _, err := createComment(ctx, &CreateCommentOptions{ + if _, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Issue: issue, Repo: issue.Repo, Doer: user, @@ -286,7 +286,7 @@ func DeleteTime(t *TrackedTime) error { return err } - if _, err := createComment(ctx, &CreateCommentOptions{ + if _, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Issue: t.Issue, Repo: t.Issue.Repo, Doer: t.User, @@ -311,7 +311,7 @@ func deleteTimes(e db.Engine, opts FindTrackedTimesOptions) (removedTime int64, func deleteTime(e db.Engine, t *TrackedTime) error { if t.Deleted { - return ErrNotExist{ID: t.ID} + return db.ErrNotExist{ID: t.ID} } t.Deleted = true _, err := e.ID(t.ID).Cols("deleted").Update(t) @@ -325,7 +325,7 @@ func GetTrackedTimeByID(id int64) (*TrackedTime, error) { if err != nil { return nil, err } else if !has { - return nil, ErrNotExist{ID: id} + return nil, db.ErrNotExist{ID: id} } return time, nil } diff --git a/models/issue_xref.go b/models/issue_xref.go index 7b2f097c1c..cd1c122252 100644 --- a/models/issue_xref.go +++ b/models/issue_xref.go @@ -129,7 +129,7 @@ func (issue *Issue) createCrossReferences(stdCtx context.Context, ctx *crossRefe RefAction: xref.Action, RefIsPull: ctx.OrigIssue.IsPull, } - _, err := createComment(stdCtx, opts) + _, err := CreateCommentCtx(stdCtx, opts) if err != nil { return err } @@ -150,7 +150,7 @@ func (issue *Issue) getCrossReferences(stdCtx context.Context, ctx *crossReferen for _, ref := range allrefs { if ref.Owner == "" && ref.Name == "" { // Issues in the same repository - if err := ctx.OrigIssue.loadRepo(stdCtx); err != nil { + if err := ctx.OrigIssue.LoadRepo(stdCtx); err != nil { return nil, err } refRepo = ctx.OrigIssue.Repo @@ -204,7 +204,7 @@ func (issue *Issue) verifyReferencedIssue(stdCtx context.Context, ctx *crossRefe if has, _ := e.Get(refIssue); !has { return nil, references.XRefActionNone, nil } - if err := refIssue.loadRepo(stdCtx); err != nil { + if err := refIssue.LoadRepo(stdCtx); err != nil { return nil, references.XRefActionNone, err } @@ -215,7 +215,7 @@ func (issue *Issue) verifyReferencedIssue(stdCtx context.Context, ctx *crossRefe // Check doer permissions; set action to None if the doer can't change the destination if refIssue.RepoID != ctx.OrigIssue.RepoID || ref.Action != references.XRefActionNone { - perm, err := getUserRepoPermission(stdCtx, refIssue.Repo, ctx.Doer) + perm, err := GetUserRepoPermission(stdCtx, refIssue.Repo, ctx.Doer) if err != nil { return nil, references.XRefActionNone, err } @@ -249,7 +249,7 @@ func (comment *Comment) addCrossReferences(stdCtx context.Context, doer *user_mo if comment.Type != CommentTypeCode && comment.Type != CommentTypeComment { return nil } - if err := comment.loadIssue(db.GetEngine(stdCtx)); err != nil { + if err := comment.LoadIssueCtx(stdCtx); err != nil { return err } ctx := &crossReferencesContext{ @@ -282,7 +282,7 @@ func (comment *Comment) LoadRefIssue() (err error) { } comment.RefIssue, err = GetIssueByID(comment.RefIssueID) if err == nil { - err = comment.RefIssue.loadRepo(db.DefaultContext) + err = comment.RefIssue.LoadRepo(db.DefaultContext) } return } @@ -340,9 +340,9 @@ func (comment *Comment) RefIssueIdent() string { // \/ \/ |__| \/ \/ // ResolveCrossReferences will return the list of references to close/reopen by this PR -func (pr *PullRequest) ResolveCrossReferences() ([]*Comment, error) { +func (pr *PullRequest) ResolveCrossReferences(ctx context.Context) ([]*Comment, error) { unfiltered := make([]*Comment, 0, 5) - if err := db.GetEngine(db.DefaultContext). + if err := db.GetEngine(ctx). Where("ref_repo_id = ? AND ref_issue_id = ?", pr.Issue.RepoID, pr.Issue.ID). In("ref_action", []references.XRefAction{references.XRefActionCloses, references.XRefActionReopens}). OrderBy("id"). diff --git a/models/issue_xref_test.go b/models/issue_xref_test.go index 1deeb44ad5..677caa48ba 100644 --- a/models/issue_xref_test.go +++ b/models/issue_xref_test.go @@ -83,7 +83,7 @@ func TestXRef_NeuterCrossReferences(t *testing.T) { d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) i.Title = "title2, no mentions" - assert.NoError(t, i.ChangeTitle(d, title)) + assert.NoError(t, ChangeIssueTitle(i, d, title)) ref = unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment) assert.Equal(t, CommentTypeIssueRef, ref.Type) @@ -98,7 +98,7 @@ func TestXRef_ResolveCrossReferences(t *testing.T) { i1 := testCreateIssue(t, 1, 2, "title1", "content1", false) i2 := testCreateIssue(t, 1, 2, "title2", "content2", false) i3 := testCreateIssue(t, 1, 2, "title3", "content3", false) - _, err := i3.ChangeStatus(d, true) + _, err := ChangeIssueStatus(db.DefaultContext, i3, d, true) assert.NoError(t, err) pr := testCreatePR(t, 1, 2, "titlepr", fmt.Sprintf("closes #%d", i1.Index)) @@ -118,7 +118,7 @@ func TestXRef_ResolveCrossReferences(t *testing.T) { c4 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index)) r4 := unittest.AssertExistsAndLoadBean(t, &Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c4.ID}).(*Comment) - refs, err := pr.ResolveCrossReferences() + refs, err := pr.ResolveCrossReferences(db.DefaultContext) assert.NoError(t, err) assert.Len(t, refs, 3) assert.Equal(t, rp.ID, refs[0].ID, "bad ref rp: %+v", refs[0]) diff --git a/models/issues/main_test.go b/models/issues/main_test.go index af71f038d6..30f6ff02fb 100644 --- a/models/issues/main_test.go +++ b/models/issues/main_test.go @@ -9,8 +9,22 @@ import ( "testing" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/setting" ) -func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), "") +func init() { + setting.SetCustomPathAndConf("", "", "") + setting.LoadForTest() +} + +func TestMain(m *testing.M) { + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "reaction.yml", + "user.yml", + "repository.yml", + "milestone.yml", + }, + }) } diff --git a/models/issue_milestone.go b/models/issues/milestone.go similarity index 90% rename from models/issue_milestone.go rename to models/issues/milestone.go index a321718513..07c38754d4 100644 --- a/models/issue_milestone.go +++ b/models/issues/milestone.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models +package issues import ( "context" @@ -12,7 +12,6 @@ import ( "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" @@ -20,6 +19,26 @@ import ( "xorm.io/builder" ) +// ErrMilestoneNotExist represents a "MilestoneNotExist" kind of error. +type ErrMilestoneNotExist struct { + ID int64 + RepoID int64 + Name string +} + +// IsErrMilestoneNotExist checks if an error is a ErrMilestoneNotExist. +func IsErrMilestoneNotExist(err error) bool { + _, ok := err.(ErrMilestoneNotExist) + return ok +} + +func (err ErrMilestoneNotExist) Error() string { + if len(err.Name) > 0 { + return fmt.Sprintf("milestone does not exist [name: %s, repo_id: %d]", err.Name, err.RepoID) + } + return fmt.Sprintf("milestone does not exist [id: %d, repo_id: %d]", err.ID, err.RepoID) +} + // Milestone represents a milestone of repository. type Milestone struct { ID int64 `xorm:"pk autoincr"` @@ -105,9 +124,10 @@ func NewMilestone(m *Milestone) (err error) { return committer.Commit() } -func getMilestoneByRepoID(e db.Engine, repoID, id int64) (*Milestone, error) { +// GetMilestoneByRepoID returns the milestone in a repository. +func GetMilestoneByRepoID(ctx context.Context, repoID, id int64) (*Milestone, error) { m := new(Milestone) - has, err := e.ID(id).Where("repo_id=?", repoID).Get(m) + has, err := db.GetEngine(ctx).ID(id).Where("repo_id=?", repoID).Get(m) if err != nil { return nil, err } else if !has { @@ -116,11 +136,6 @@ func getMilestoneByRepoID(e db.Engine, repoID, id int64) (*Milestone, error) { return m, nil } -// GetMilestoneByRepoID returns the milestone in a repository. -func GetMilestoneByRepoID(repoID, id int64) (*Milestone, error) { - return getMilestoneByRepoID(db.GetEngine(db.DefaultContext), repoID, id) -} - // GetMilestoneByRepoIDANDName return a milestone if one exist by name and repo func GetMilestoneByRepoIDANDName(repoID int64, name string) (*Milestone, error) { var mile Milestone @@ -166,11 +181,11 @@ func updateMilestone(ctx context.Context, m *Milestone) error { if err != nil { return err } - return updateMilestoneCounters(ctx, m.ID) + return UpdateMilestoneCounters(ctx, m.ID) } -// updateMilestoneCounters calculates NumIssues, NumClosesIssues and Completeness -func updateMilestoneCounters(ctx context.Context, id int64) error { +// UpdateMilestoneCounters calculates NumIssues, NumClosesIssues and Completeness +func UpdateMilestoneCounters(ctx context.Context, id int64) error { e := db.GetEngine(ctx) _, err := e.ID(id). SetExpr("num_issues", builder.Select("count(*)").From("issue").Where( @@ -250,65 +265,9 @@ func changeMilestoneStatus(ctx context.Context, m *Milestone, isClosed bool) err return updateRepoMilestoneNum(ctx, m.RepoID) } -func changeMilestoneAssign(ctx context.Context, doer *user_model.User, issue *Issue, oldMilestoneID int64) error { - if err := updateIssueCols(ctx, issue, "milestone_id"); err != nil { - return err - } - - if oldMilestoneID > 0 { - if err := updateMilestoneCounters(ctx, oldMilestoneID); err != nil { - return err - } - } - - if issue.MilestoneID > 0 { - if err := updateMilestoneCounters(ctx, issue.MilestoneID); err != nil { - return err - } - } - - if oldMilestoneID > 0 || issue.MilestoneID > 0 { - if err := issue.loadRepo(ctx); err != nil { - return err - } - - opts := &CreateCommentOptions{ - Type: CommentTypeMilestone, - Doer: doer, - Repo: issue.Repo, - Issue: issue, - OldMilestoneID: oldMilestoneID, - MilestoneID: issue.MilestoneID, - } - if _, err := createComment(ctx, opts); err != nil { - return err - } - } - - return nil -} - -// ChangeMilestoneAssign changes assignment of milestone for issue. -func ChangeMilestoneAssign(issue *Issue, doer *user_model.User, oldMilestoneID int64) (err error) { - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - if err = changeMilestoneAssign(ctx, doer, issue, oldMilestoneID); err != nil { - return err - } - - if err = committer.Commit(); err != nil { - return fmt.Errorf("Commit: %v", err) - } - return nil -} - // DeleteMilestoneByRepoID deletes a milestone from a repository. func DeleteMilestoneByRepoID(repoID, id int64) error { - m, err := GetMilestoneByRepoID(repoID, id) + m, err := GetMilestoneByRepoID(db.DefaultContext, repoID, id) if err != nil { if IsErrMilestoneNotExist(err) { return nil diff --git a/models/issue_milestone_test.go b/models/issues/milestone_test.go similarity index 67% rename from models/issue_milestone_test.go rename to models/issues/milestone_test.go index 6593f78fa1..09f51de45c 100644 --- a/models/issue_milestone_test.go +++ b/models/issues/milestone_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models +package issues import ( "sort" @@ -11,10 +11,8 @@ import ( "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/modules/timeutil" "github.com/stretchr/testify/assert" "xorm.io/builder" @@ -25,28 +23,15 @@ func TestMilestone_State(t *testing.T) { assert.Equal(t, api.StateClosed, (&Milestone{IsClosed: true}).State()) } -func TestNewMilestone(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - milestone := &Milestone{ - RepoID: 1, - Name: "milestoneName", - Content: "milestoneContent", - } - - assert.NoError(t, NewMilestone(milestone)) - unittest.AssertExistsAndLoadBean(t, milestone) - unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &Milestone{}) -} - func TestGetMilestoneByRepoID(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - milestone, err := GetMilestoneByRepoID(1, 1) + milestone, err := GetMilestoneByRepoID(db.DefaultContext, 1, 1) assert.NoError(t, err) assert.EqualValues(t, 1, milestone.ID) assert.EqualValues(t, 1, milestone.RepoID) - _, err = GetMilestoneByRepoID(unittest.NonexistentID, unittest.NonexistentID) + _, err = GetMilestoneByRepoID(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID) assert.True(t, IsErrMilestoneNotExist(err)) } @@ -160,18 +145,6 @@ func TestGetMilestones(t *testing.T) { }) } -func TestUpdateMilestone(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - milestone := unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone) - milestone.Name = " newMilestoneName " - milestone.Content = "newMilestoneContent" - assert.NoError(t, UpdateMilestone(milestone, milestone.IsClosed)) - milestone = unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone) - assert.EqualValues(t, "newMilestoneName", milestone.Name) - unittest.CheckConsistencyFor(t, &Milestone{}) -} - func TestCountRepoMilestones(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) test := func(repoID int64) { @@ -206,78 +179,6 @@ func TestCountRepoClosedMilestones(t *testing.T) { assert.EqualValues(t, 0, count) } -func TestChangeMilestoneStatus(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - milestone := unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone) - - assert.NoError(t, ChangeMilestoneStatus(milestone, true)) - unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}, "is_closed=1") - unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &Milestone{}) - - assert.NoError(t, ChangeMilestoneStatus(milestone, false)) - unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}, "is_closed=0") - unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &Milestone{}) -} - -func TestUpdateMilestoneCounters(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - issue := unittest.AssertExistsAndLoadBean(t, &Issue{MilestoneID: 1}, - "is_closed=0").(*Issue) - - issue.IsClosed = true - issue.ClosedUnix = timeutil.TimeStampNow() - _, err := db.GetEngine(db.DefaultContext).ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue) - assert.NoError(t, err) - assert.NoError(t, updateMilestoneCounters(db.DefaultContext, issue.MilestoneID)) - unittest.CheckConsistencyFor(t, &Milestone{}) - - issue.IsClosed = false - issue.ClosedUnix = 0 - _, err = db.GetEngine(db.DefaultContext).ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue) - assert.NoError(t, err) - assert.NoError(t, updateMilestoneCounters(db.DefaultContext, issue.MilestoneID)) - unittest.CheckConsistencyFor(t, &Milestone{}) -} - -func TestChangeMilestoneAssign(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - issue := unittest.AssertExistsAndLoadBean(t, &Issue{RepoID: 1}).(*Issue) - doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - assert.NotNil(t, issue) - assert.NotNil(t, doer) - - oldMilestoneID := issue.MilestoneID - issue.MilestoneID = 2 - assert.NoError(t, ChangeMilestoneAssign(issue, doer, oldMilestoneID)) - unittest.AssertExistsAndLoadBean(t, &Comment{ - IssueID: issue.ID, - Type: CommentTypeMilestone, - MilestoneID: issue.MilestoneID, - OldMilestoneID: oldMilestoneID, - }) - unittest.CheckConsistencyFor(t, &Milestone{}, &Issue{}) -} - -func TestDeleteMilestoneByRepoID(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - assert.NoError(t, DeleteMilestoneByRepoID(1, 1)) - unittest.AssertNotExistsBean(t, &Milestone{ID: 1}) - unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: 1}) - - assert.NoError(t, DeleteMilestoneByRepoID(unittest.NonexistentID, unittest.NonexistentID)) -} - -func TestMilestoneList_LoadTotalTrackedTimes(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - miles := MilestoneList{ - unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone), - } - - assert.NoError(t, miles.LoadTotalTrackedTimes()) - - assert.Equal(t, int64(3682), miles[0].TotalTrackedTime) -} - func TestCountMilestonesByRepoIDs(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) milestonesCount := func(repoID int64) (int, int) { @@ -343,15 +244,6 @@ func TestGetMilestonesByRepoIDs(t *testing.T) { }) } -func TestLoadTotalTrackedTime(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - milestone := unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone) - - assert.NoError(t, milestone.LoadTotalTrackedTime()) - - assert.Equal(t, int64(3682), milestone.TotalTrackedTime) -} - func TestGetMilestonesStats(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) diff --git a/models/issue_reaction.go b/models/issues/reaction.go similarity index 68% rename from models/issue_reaction.go rename to models/issues/reaction.go index 45b1d64fe1..87d6ff4310 100644 --- a/models/issue_reaction.go +++ b/models/issues/reaction.go @@ -2,21 +2,53 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models +package issues import ( "bytes" + "context" "fmt" "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "xorm.io/builder" ) +// ErrForbiddenIssueReaction is used when a forbidden reaction was try to created +type ErrForbiddenIssueReaction struct { + Reaction string +} + +// IsErrForbiddenIssueReaction checks if an error is a ErrForbiddenIssueReaction. +func IsErrForbiddenIssueReaction(err error) bool { + _, ok := err.(ErrForbiddenIssueReaction) + return ok +} + +func (err ErrForbiddenIssueReaction) Error() string { + return fmt.Sprintf("'%s' is not an allowed reaction", err.Reaction) +} + +// ErrReactionAlreadyExist is used when a existing reaction was try to created +type ErrReactionAlreadyExist struct { + Reaction string +} + +// IsErrReactionAlreadyExist checks if an error is a ErrReactionAlreadyExist. +func IsErrReactionAlreadyExist(err error) bool { + _, ok := err.(ErrReactionAlreadyExist) + return ok +} + +func (err ErrReactionAlreadyExist) Error() string { + return fmt.Sprintf("reaction '%s' already exists", err.Reaction) +} + // Reaction represents a reactions on issues and comments. type Reaction struct { ID int64 `xorm:"pk autoincr"` @@ -30,6 +62,36 @@ type Reaction struct { CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` } +// LoadUser load user of reaction +func (r *Reaction) LoadUser() (*user_model.User, error) { + if r.User != nil { + return r.User, nil + } + user, err := user_model.GetUserByIDCtx(db.DefaultContext, r.UserID) + if err != nil { + return nil, err + } + r.User = user + return user, nil +} + +// RemapExternalUser ExternalUserRemappable interface +func (r *Reaction) RemapExternalUser(externalName string, externalID, userID int64) error { + r.OriginalAuthor = externalName + r.OriginalAuthorID = externalID + r.UserID = userID + return nil +} + +// GetUserID ExternalUserRemappable interface +func (r *Reaction) GetUserID() int64 { return r.UserID } + +// GetExternalName ExternalUserRemappable interface +func (r *Reaction) GetExternalName() string { return r.OriginalAuthor } + +// GetExternalID ExternalUserRemappable interface +func (r *Reaction) GetExternalID() int64 { return r.OriginalAuthorID } + func init() { db.RegisterModel(new(Reaction)) } @@ -71,24 +133,25 @@ func (opts *FindReactionsOptions) toConds() builder.Cond { } // FindCommentReactions returns a ReactionList of all reactions from an comment -func FindCommentReactions(comment *Comment) (ReactionList, int64, error) { - return findReactions(db.GetEngine(db.DefaultContext), FindReactionsOptions{ - IssueID: comment.IssueID, - CommentID: comment.ID, +func FindCommentReactions(issueID, commentID int64) (ReactionList, int64, error) { + return FindReactions(db.DefaultContext, FindReactionsOptions{ + IssueID: issueID, + CommentID: commentID, }) } // FindIssueReactions returns a ReactionList of all reactions from an issue -func FindIssueReactions(issue *Issue, listOptions db.ListOptions) (ReactionList, int64, error) { - return findReactions(db.GetEngine(db.DefaultContext), FindReactionsOptions{ +func FindIssueReactions(issueID int64, listOptions db.ListOptions) (ReactionList, int64, error) { + return FindReactions(db.DefaultContext, FindReactionsOptions{ ListOptions: listOptions, - IssueID: issue.ID, + IssueID: issueID, CommentID: -1, }) } -func findReactions(e db.Engine, opts FindReactionsOptions) ([]*Reaction, int64, error) { - sess := e. +// FindReactions returns a ReactionList of all reactions from an issue or a comment +func FindReactions(ctx context.Context, opts FindReactionsOptions) (ReactionList, int64, error) { + sess := db.GetEngine(ctx). Where(opts.toConds()). In("reaction.`type`", setting.UI.Reactions). Asc("reaction.issue_id", "reaction.comment_id", "reaction.created_unix", "reaction.id") @@ -105,24 +168,21 @@ func findReactions(e db.Engine, opts FindReactionsOptions) ([]*Reaction, int64, return reactions, count, err } -func createReaction(e db.Engine, opts *ReactionOptions) (*Reaction, error) { +func createReaction(ctx context.Context, opts *ReactionOptions) (*Reaction, error) { reaction := &Reaction{ - Type: opts.Type, - UserID: opts.Doer.ID, - IssueID: opts.Issue.ID, + Type: opts.Type, + UserID: opts.DoerID, + IssueID: opts.IssueID, + CommentID: opts.CommentID, } findOpts := FindReactionsOptions{ - IssueID: opts.Issue.ID, - CommentID: -1, // reaction to issue only + IssueID: opts.IssueID, + CommentID: opts.CommentID, Reaction: opts.Type, - UserID: opts.Doer.ID, - } - if opts.Comment != nil { - reaction.CommentID = opts.Comment.ID - findOpts.CommentID = opts.Comment.ID + UserID: opts.DoerID, } - existingR, _, err := findReactions(e, findOpts) + existingR, _, err := FindReactions(ctx, findOpts) if err != nil { return nil, err } @@ -130,7 +190,7 @@ func createReaction(e db.Engine, opts *ReactionOptions) (*Reaction, error) { return existingR[0], ErrReactionAlreadyExist{Reaction: opts.Type} } - if _, err := e.Insert(reaction); err != nil { + if err := db.Insert(ctx, reaction); err != nil { return nil, err } @@ -139,10 +199,10 @@ func createReaction(e db.Engine, opts *ReactionOptions) (*Reaction, error) { // ReactionOptions defines options for creating or deleting reactions type ReactionOptions struct { - Type string - Doer *user_model.User - Issue *Issue - Comment *Comment + Type string + DoerID int64 + IssueID int64 + CommentID int64 } // CreateReaction creates reaction for issue or comment. @@ -157,7 +217,7 @@ func CreateReaction(opts *ReactionOptions) (*Reaction, error) { } defer committer.Close() - reaction, err := createReaction(db.GetEngine(ctx), opts) + reaction, err := createReaction(ctx, opts) if err != nil { return reaction, err } @@ -169,88 +229,56 @@ func CreateReaction(opts *ReactionOptions) (*Reaction, error) { } // CreateIssueReaction creates a reaction on issue. -func CreateIssueReaction(doer *user_model.User, issue *Issue, content string) (*Reaction, error) { +func CreateIssueReaction(doerID, issueID int64, content string) (*Reaction, error) { return CreateReaction(&ReactionOptions{ - Type: content, - Doer: doer, - Issue: issue, + Type: content, + DoerID: doerID, + IssueID: issueID, }) } // CreateCommentReaction creates a reaction on comment. -func CreateCommentReaction(doer *user_model.User, issue *Issue, comment *Comment, content string) (*Reaction, error) { +func CreateCommentReaction(doerID, issueID, commentID int64, content string) (*Reaction, error) { return CreateReaction(&ReactionOptions{ - Type: content, - Doer: doer, - Issue: issue, - Comment: comment, + Type: content, + DoerID: doerID, + IssueID: issueID, + CommentID: commentID, }) } -func deleteReaction(e db.Engine, opts *ReactionOptions) error { +// DeleteReaction deletes reaction for issue or comment. +func DeleteReaction(ctx context.Context, opts *ReactionOptions) error { reaction := &Reaction{ - Type: opts.Type, + Type: opts.Type, + UserID: opts.DoerID, + IssueID: opts.IssueID, + CommentID: opts.CommentID, } - if opts.Doer != nil { - reaction.UserID = opts.Doer.ID - } - if opts.Issue != nil { - reaction.IssueID = opts.Issue.ID - } - if opts.Comment != nil { - reaction.CommentID = opts.Comment.ID - } - _, err := e.Where("original_author_id = 0").Delete(reaction) + + _, err := db.GetEngine(ctx).Where("original_author_id = 0").Delete(reaction) return err } -// DeleteReaction deletes reaction for issue or comment. -func DeleteReaction(opts *ReactionOptions) error { - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - if err := deleteReaction(db.GetEngine(ctx), opts); err != nil { - return err - } - - return committer.Commit() -} - // DeleteIssueReaction deletes a reaction on issue. -func DeleteIssueReaction(doer *user_model.User, issue *Issue, content string) error { - return DeleteReaction(&ReactionOptions{ - Type: content, - Doer: doer, - Issue: issue, +func DeleteIssueReaction(doerID, issueID int64, content string) error { + return DeleteReaction(db.DefaultContext, &ReactionOptions{ + Type: content, + DoerID: doerID, + IssueID: issueID, }) } // DeleteCommentReaction deletes a reaction on comment. -func DeleteCommentReaction(doer *user_model.User, issue *Issue, comment *Comment, content string) error { - return DeleteReaction(&ReactionOptions{ - Type: content, - Doer: doer, - Issue: issue, - Comment: comment, +func DeleteCommentReaction(doerID, issueID, commentID int64, content string) error { + return DeleteReaction(db.DefaultContext, &ReactionOptions{ + Type: content, + DoerID: doerID, + IssueID: issueID, + CommentID: commentID, }) } -// LoadUser load user of reaction -func (r *Reaction) LoadUser() (*user_model.User, error) { - if r.User != nil { - return r.User, nil - } - user, err := user_model.GetUserByIDCtx(db.DefaultContext, r.UserID) - if err != nil { - return nil, err - } - r.User = user - return user, nil -} - // ReactionList represents list of reactions type ReactionList []*Reaction @@ -286,17 +314,26 @@ func (list ReactionList) getUserIDs() []int64 { userIDs[reaction.UserID] = struct{}{} } } - return keysInt64(userIDs) + return container.KeysInt64(userIDs) } -func (list ReactionList) loadUsers(e db.Engine, repo *repo_model.Repository) ([]*user_model.User, error) { +func valuesUser(m map[int64]*user_model.User) []*user_model.User { + values := make([]*user_model.User, 0, len(m)) + for _, v := range m { + values = append(values, v) + } + return values +} + +// LoadUsers loads reactions' all users +func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Repository) ([]*user_model.User, error) { if len(list) == 0 { return nil, nil } userIDs := list.getUserIDs() userMaps := make(map[int64]*user_model.User, len(userIDs)) - err := e. + err := db.GetEngine(ctx). In("id", userIDs). Find(&userMaps) if err != nil { @@ -315,11 +352,6 @@ func (list ReactionList) loadUsers(e db.Engine, repo *repo_model.Repository) ([] return valuesUser(userMaps), nil } -// LoadUsers loads reactions' all users -func (list ReactionList) LoadUsers(repo *repo_model.Repository) ([]*user_model.User, error) { - return list.loadUsers(db.GetEngine(db.DefaultContext), repo) -} - // GetFirstUsers returns first reacted user display names separated by comma func (list ReactionList) GetFirstUsers() string { var buffer bytes.Buffer @@ -343,20 +375,3 @@ func (list ReactionList) GetMoreUserCount() int { } return len(list) - setting.UI.ReactionMaxUserNum } - -// RemapExternalUser ExternalUserRemappable interface -func (r *Reaction) RemapExternalUser(externalName string, externalID, userID int64) error { - r.OriginalAuthor = externalName - r.OriginalAuthorID = externalID - r.UserID = userID - return nil -} - -// GetUserID ExternalUserRemappable interface -func (r *Reaction) GetUserID() int64 { return r.UserID } - -// GetExternalName ExternalUserRemappable interface -func (r *Reaction) GetExternalName() string { return r.OriginalAuthor } - -// GetExternalID ExternalUserRemappable interface -func (r *Reaction) GetExternalID() int64 { return r.OriginalAuthorID } diff --git a/models/issue_reaction_test.go b/models/issues/reaction_test.go similarity index 59% rename from models/issue_reaction_test.go rename to models/issues/reaction_test.go index 886d19e55f..b1216a3a69 100644 --- a/models/issue_reaction_test.go +++ b/models/issues/reaction_test.go @@ -1,7 +1,8 @@ // Copyright 2017 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models + +package issues import ( "testing" @@ -15,13 +16,13 @@ import ( "github.com/stretchr/testify/assert" ) -func addReaction(t *testing.T, doer *user_model.User, issue *Issue, comment *Comment, content string) { +func addReaction(t *testing.T, doerID, issueID, commentID int64, content string) { var reaction *Reaction var err error - if comment == nil { - reaction, err = CreateIssueReaction(doer, issue, content) + if commentID == 0 { + reaction, err = CreateIssueReaction(doerID, issueID, content) } else { - reaction, err = CreateCommentReaction(doer, issue, comment, content) + reaction, err = CreateCommentReaction(doerID, issueID, commentID, content) } assert.NoError(t, err) assert.NotNil(t, reaction) @@ -32,11 +33,11 @@ func TestIssueAddReaction(t *testing.T) { user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - issue1 := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) + var issue1ID int64 = 1 - addReaction(t, user1, issue1, nil, "heart") + addReaction(t, user1.ID, issue1ID, 0, "heart") - unittest.AssertExistsAndLoadBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1.ID}) + unittest.AssertExistsAndLoadBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID}) } func TestIssueAddDuplicateReaction(t *testing.T) { @@ -44,19 +45,19 @@ func TestIssueAddDuplicateReaction(t *testing.T) { user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - issue1 := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) + var issue1ID int64 = 1 - addReaction(t, user1, issue1, nil, "heart") + addReaction(t, user1.ID, issue1ID, 0, "heart") reaction, err := CreateReaction(&ReactionOptions{ - Doer: user1, - Issue: issue1, - Type: "heart", + DoerID: user1.ID, + IssueID: issue1ID, + Type: "heart", }) assert.Error(t, err) assert.Equal(t, ErrReactionAlreadyExist{Reaction: "heart"}, err) - existingR := unittest.AssertExistsAndLoadBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1.ID}).(*Reaction) + existingR := unittest.AssertExistsAndLoadBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID}).(*Reaction) assert.Equal(t, existingR.ID, reaction.ID) } @@ -65,14 +66,14 @@ func TestIssueDeleteReaction(t *testing.T) { user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - issue1 := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) + var issue1ID int64 = 1 - addReaction(t, user1, issue1, nil, "heart") + addReaction(t, user1.ID, issue1ID, 0, "heart") - err := DeleteIssueReaction(user1, issue1, "heart") + err := DeleteIssueReaction(user1.ID, issue1ID, "heart") assert.NoError(t, err) - unittest.AssertNotExistsBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1.ID}) + unittest.AssertNotExistsBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID}) } func TestIssueReactionCount(t *testing.T) { @@ -86,22 +87,26 @@ func TestIssueReactionCount(t *testing.T) { user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}).(*user_model.User) ghost := user_model.NewGhostUser() - issue := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 2}).(*Issue) + var issueID int64 = 2 + repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) - addReaction(t, user1, issue, nil, "heart") - addReaction(t, user2, issue, nil, "heart") - addReaction(t, user3, issue, nil, "heart") - addReaction(t, user3, issue, nil, "+1") - addReaction(t, user4, issue, nil, "+1") - addReaction(t, user4, issue, nil, "heart") - addReaction(t, ghost, issue, nil, "-1") + addReaction(t, user1.ID, issueID, 0, "heart") + addReaction(t, user2.ID, issueID, 0, "heart") + addReaction(t, user3.ID, issueID, 0, "heart") + addReaction(t, user3.ID, issueID, 0, "+1") + addReaction(t, user4.ID, issueID, 0, "+1") + addReaction(t, user4.ID, issueID, 0, "heart") + addReaction(t, ghost.ID, issueID, 0, "-1") - err := issue.loadReactions(db.DefaultContext) + reactionsList, _, err := FindReactions(db.DefaultContext, FindReactionsOptions{ + IssueID: issueID, + }) + assert.NoError(t, err) + assert.Len(t, reactionsList, 7) + _, err = reactionsList.LoadUsers(db.DefaultContext, repo) assert.NoError(t, err) - assert.Len(t, issue.Reactions, 7) - - reactions := issue.Reactions.GroupByType() + reactions := reactionsList.GroupByType() assert.Len(t, reactions["heart"], 4) assert.Equal(t, 2, reactions["heart"].GetMoreUserCount()) assert.Equal(t, user1.DisplayName()+", "+user2.DisplayName(), reactions["heart"].GetFirstUsers()) @@ -118,13 +123,12 @@ func TestIssueCommentAddReaction(t *testing.T) { user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - issue1 := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) + var issue1ID int64 = 1 + var comment1ID int64 = 1 - comment1 := unittest.AssertExistsAndLoadBean(t, &Comment{ID: 1}).(*Comment) + addReaction(t, user1.ID, issue1ID, comment1ID, "heart") - addReaction(t, user1, issue1, comment1, "heart") - - unittest.AssertExistsAndLoadBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1.ID, CommentID: comment1.ID}) + unittest.AssertExistsAndLoadBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID, CommentID: comment1ID}) } func TestIssueCommentDeleteReaction(t *testing.T) { @@ -135,21 +139,22 @@ func TestIssueCommentDeleteReaction(t *testing.T) { user3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}).(*user_model.User) - issue1 := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) - repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue1.RepoID}).(*repo_model.Repository) + var issue1ID int64 = 1 + var comment1ID int64 = 1 - comment1 := unittest.AssertExistsAndLoadBean(t, &Comment{ID: 1}).(*Comment) + addReaction(t, user1.ID, issue1ID, comment1ID, "heart") + addReaction(t, user2.ID, issue1ID, comment1ID, "heart") + addReaction(t, user3.ID, issue1ID, comment1ID, "heart") + addReaction(t, user4.ID, issue1ID, comment1ID, "+1") - addReaction(t, user1, issue1, comment1, "heart") - addReaction(t, user2, issue1, comment1, "heart") - addReaction(t, user3, issue1, comment1, "heart") - addReaction(t, user4, issue1, comment1, "+1") - - err := comment1.LoadReactions(repo1) + reactionsList, _, err := FindReactions(db.DefaultContext, FindReactionsOptions{ + IssueID: issue1ID, + CommentID: comment1ID, + }) assert.NoError(t, err) - assert.Len(t, comment1.Reactions, 4) + assert.Len(t, reactionsList, 4) - reactions := comment1.Reactions.GroupByType() + reactions := reactionsList.GroupByType() assert.Len(t, reactions["heart"], 3) assert.Len(t, reactions["+1"], 1) } @@ -159,12 +164,11 @@ func TestIssueCommentReactionCount(t *testing.T) { user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - issue1 := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) + var issue1ID int64 = 1 + var comment1ID int64 = 1 - comment1 := unittest.AssertExistsAndLoadBean(t, &Comment{ID: 1}).(*Comment) + addReaction(t, user1.ID, issue1ID, comment1ID, "heart") + assert.NoError(t, DeleteCommentReaction(user1.ID, issue1ID, comment1ID, "heart")) - addReaction(t, user1, issue1, comment1, "heart") - assert.NoError(t, DeleteCommentReaction(user1, issue1, comment1, "heart")) - - unittest.AssertNotExistsBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1.ID, CommentID: comment1.ID}) + unittest.AssertNotExistsBean(t, &Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID, CommentID: comment1ID}) } diff --git a/models/lfs_lock.go b/models/lfs_lock.go index a77dd24e9f..b5f8e4907f 100644 --- a/models/lfs_lock.go +++ b/models/lfs_lock.go @@ -5,6 +5,7 @@ package models import ( + "context" "fmt" "path" "strings" @@ -42,15 +43,20 @@ func cleanPath(p string) string { // CreateLFSLock creates a new lock. func CreateLFSLock(repo *repo_model.Repository, lock *LFSLock) (*LFSLock, error) { - err := CheckLFSAccessForRepo(lock.OwnerID, repo, perm.AccessModeWrite) + dbCtx, committer, err := db.TxContext() if err != nil { return nil, err } + defer committer.Close() + + if err := CheckLFSAccessForRepo(dbCtx, lock.OwnerID, repo, perm.AccessModeWrite); err != nil { + return nil, err + } lock.Path = cleanPath(lock.Path) lock.RepoID = repo.ID - l, err := GetLFSLock(repo, lock.Path) + l, err := GetLFSLock(dbCtx, repo, lock.Path) if err == nil { return l, ErrLFSLockAlreadyExist{lock.RepoID, lock.Path} } @@ -58,15 +64,18 @@ func CreateLFSLock(repo *repo_model.Repository, lock *LFSLock) (*LFSLock, error) return nil, err } - err = db.Insert(db.DefaultContext, lock) - return lock, err + if err := db.Insert(dbCtx, lock); err != nil { + return nil, err + } + + return lock, committer.Commit() } // GetLFSLock returns release by given path. -func GetLFSLock(repo *repo_model.Repository, path string) (*LFSLock, error) { +func GetLFSLock(ctx context.Context, repo *repo_model.Repository, path string) (*LFSLock, error) { path = cleanPath(path) rel := &LFSLock{RepoID: repo.ID} - has, err := db.GetEngine(db.DefaultContext).Where("lower(path) = ?", strings.ToLower(path)).Get(rel) + has, err := db.GetEngine(ctx).Where("lower(path) = ?", strings.ToLower(path)).Get(rel) if err != nil { return nil, err } @@ -77,9 +86,9 @@ func GetLFSLock(repo *repo_model.Repository, path string) (*LFSLock, error) { } // GetLFSLockByID returns release by given id. -func GetLFSLockByID(id int64) (*LFSLock, error) { +func GetLFSLockByID(ctx context.Context, id int64) (*LFSLock, error) { lock := new(LFSLock) - has, err := db.GetEngine(db.DefaultContext).ID(id).Get(lock) + has, err := db.GetEngine(ctx).ID(id).Get(lock) if err != nil { return nil, err } else if !has { @@ -127,13 +136,18 @@ func CountLFSLockByRepoID(repoID int64) (int64, error) { // DeleteLFSLockByID deletes a lock by given ID. func DeleteLFSLockByID(id int64, repo *repo_model.Repository, u *user_model.User, force bool) (*LFSLock, error) { - lock, err := GetLFSLockByID(id) + dbCtx, committer, err := db.TxContext() + if err != nil { + return nil, err + } + defer committer.Close() + + lock, err := GetLFSLockByID(dbCtx, id) if err != nil { return nil, err } - err = CheckLFSAccessForRepo(u.ID, repo, perm.AccessModeWrite) - if err != nil { + if err := CheckLFSAccessForRepo(dbCtx, u.ID, repo, perm.AccessModeWrite); err != nil { return nil, err } @@ -141,20 +155,23 @@ func DeleteLFSLockByID(id int64, repo *repo_model.Repository, u *user_model.User return nil, fmt.Errorf("user doesn't own lock and force flag is not set") } - _, err = db.GetEngine(db.DefaultContext).ID(id).Delete(new(LFSLock)) - return lock, err + if _, err := db.GetEngine(dbCtx).ID(id).Delete(new(LFSLock)); err != nil { + return nil, err + } + + return lock, committer.Commit() } // CheckLFSAccessForRepo check needed access mode base on action -func CheckLFSAccessForRepo(ownerID int64, repo *repo_model.Repository, mode perm.AccessMode) error { +func CheckLFSAccessForRepo(ctx context.Context, ownerID int64, repo *repo_model.Repository, mode perm.AccessMode) error { if ownerID == 0 { return ErrLFSUnauthorizedAction{repo.ID, "undefined", mode} } - u, err := user_model.GetUserByID(ownerID) + u, err := user_model.GetUserByIDCtx(ctx, ownerID) if err != nil { return err } - perm, err := GetUserRepoPermission(repo, u) + perm, err := GetUserRepoPermission(ctx, repo, u) if err != nil { return err } diff --git a/models/main_test.go b/models/main_test.go index 8d5291a8aa..96231e4704 100644 --- a/models/main_test.go +++ b/models/main_test.go @@ -7,6 +7,8 @@ package models import ( "testing" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -28,12 +30,14 @@ func TestFixturesAreConsistent(t *testing.T) { &repo_model.Repository{}, &Issue{}, &PullRequest{}, - &Milestone{}, + &issues_model.Milestone{}, &Label{}, - &Team{}, + &organization.Team{}, &Action{}) } func TestMain(m *testing.M) { - unittest.MainTest(m, "..") + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: "..", + }) } diff --git a/models/migrate.go b/models/migrate.go index bbfba1fa1e..7b12bc9c93 100644 --- a/models/migrate.go +++ b/models/migrate.go @@ -8,13 +8,14 @@ import ( "context" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/modules/structs" "xorm.io/builder" ) // InsertMilestones creates milestones of repository. -func InsertMilestones(ms ...*Milestone) (err error) { +func InsertMilestones(ms ...*issues_model.Milestone) (err error) { if len(ms) == 0 { return nil } @@ -83,6 +84,13 @@ func insertIssue(ctx context.Context, issue *Issue) error { } } + if issue.ForeignReference != nil { + issue.ForeignReference.LocalIndex = issue.Index + if _, err := sess.Insert(issue.ForeignReference); err != nil { + return err + } + } + return nil } diff --git a/models/migrate_test.go b/models/migrate_test.go index d85dcbfeef..ce28b3ca7c 100644 --- a/models/migrate_test.go +++ b/models/migrate_test.go @@ -5,8 +5,12 @@ package models import ( + "strconv" "testing" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/foreignreference" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -19,7 +23,7 @@ func TestMigrate_InsertMilestones(t *testing.T) { reponame := "repo1" repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: reponame}).(*repo_model.Repository) name := "milestonetest1" - ms := &Milestone{ + ms := &issues_model.Milestone{ RepoID: repo.ID, Name: name, } @@ -29,7 +33,7 @@ func TestMigrate_InsertMilestones(t *testing.T) { repoModified := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo.ID}).(*repo_model.Repository) assert.EqualValues(t, repo.NumMilestones+1, repoModified.NumMilestones) - unittest.CheckConsistencyFor(t, &Milestone{}) + unittest.CheckConsistencyFor(t, &issues_model.Milestone{}) } func assertCreateIssues(t *testing.T, isPull bool) { @@ -38,13 +42,14 @@ func assertCreateIssues(t *testing.T, isPull bool) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: reponame}).(*repo_model.Repository) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID}).(*user_model.User) label := unittest.AssertExistsAndLoadBean(t, &Label{ID: 1}).(*Label) - milestone := unittest.AssertExistsAndLoadBean(t, &Milestone{ID: 1}).(*Milestone) + milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}).(*issues_model.Milestone) assert.EqualValues(t, milestone.ID, 1) - reaction := &Reaction{ + reaction := &issues_model.Reaction{ Type: "heart", UserID: owner.ID, } + foreignIndex := int64(12345) title := "issuetitle1" is := &Issue{ RepoID: repo.ID, @@ -57,13 +62,22 @@ func assertCreateIssues(t *testing.T, isPull bool) { Poster: owner, IsClosed: true, Labels: []*Label{label}, - Reactions: []*Reaction{reaction}, + Reactions: []*issues_model.Reaction{reaction}, + ForeignReference: &foreignreference.ForeignReference{ + ForeignIndex: strconv.FormatInt(foreignIndex, 10), + RepoID: repo.ID, + Type: foreignreference.TypeIssue, + }, } err := InsertIssues(is) assert.NoError(t, err) i := unittest.AssertExistsAndLoadBean(t, &Issue{Title: title}).(*Issue) - unittest.AssertExistsAndLoadBean(t, &Reaction{Type: "heart", UserID: owner.ID, IssueID: i.ID}) + assert.Nil(t, i.ForeignReference) + err = i.LoadAttributes() + assert.NoError(t, err) + assert.EqualValues(t, strconv.FormatInt(foreignIndex, 10), i.ForeignReference.ForeignIndex) + unittest.AssertExistsAndLoadBean(t, &issues_model.Reaction{Type: "heart", UserID: owner.ID, IssueID: i.ID}) } func TestMigrate_CreateIssuesIsPullFalse(t *testing.T) { @@ -77,9 +91,9 @@ func TestMigrate_CreateIssuesIsPullTrue(t *testing.T) { func TestMigrate_InsertIssueComments(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) issue := unittest.AssertExistsAndLoadBean(t, &Issue{ID: 1}).(*Issue) - _ = issue.LoadRepo() + _ = issue.LoadRepo(db.DefaultContext) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: issue.Repo.OwnerID}).(*user_model.User) - reaction := &Reaction{ + reaction := &issues_model.Reaction{ Type: "heart", UserID: owner.ID, } @@ -89,7 +103,7 @@ func TestMigrate_InsertIssueComments(t *testing.T) { Poster: owner, IssueID: issue.ID, Issue: issue, - Reactions: []*Reaction{reaction}, + Reactions: []*issues_model.Reaction{reaction}, } err := InsertIssueComments([]*Comment{comment}) diff --git a/models/migrations/migrations.go b/models/migrations/migrations.go index 31b172a68d..5a2297ac0d 100644 --- a/models/migrations/migrations.go +++ b/models/migrations/migrations.go @@ -61,6 +61,7 @@ type Version struct { // update minDBVersion accordingly var migrations = []Migration{ // Gitea 1.5.0 ends at v69 + // v70 -> v71 NewMigration("add issue_dependencies", addIssueDependencies), // v71 -> v72 @@ -373,6 +374,19 @@ var migrations = []Migration{ NewMigration("Increase WebAuthentication CredentialID size to 410 - NO-OPED", increaseCredentialIDTo410), // v210 -> v211 NewMigration("v208 was completely broken - remigrate", remigrateU2FCredentials), + + // Gitea 1.16.2 ends at v211 + + // v211 -> v212 + NewMigration("Create ForeignReference table", createForeignReferenceTable), + // v212 -> v213 + NewMigration("Add package tables", addPackageTables), + // v213 -> v214 + NewMigration("Add allow edits from maintainers to PullRequest table", addAllowMaintainerEdit), + // v214 -> v215 + NewMigration("Add auto merge table", addAutoMergeTable), + // v215 -> v216 + NewMigration("allow to view files in PRs", addReviewViewedFiles), } // GetCurrentDBVersion returns the current db version @@ -453,7 +467,7 @@ Please try upgrading to a lower version first (suggested v1.6.4), then upgrade t // Downgrading Gitea's database version not supported if int(v-minDBVersion) > len(migrations) { - msg := fmt.Sprintf("Your database (migration version: %d) is for a newer Gita, you can not use the newer database for this old Gitea release (%d).", v, minDBVersion+len(migrations)) + msg := fmt.Sprintf("Your database (migration version: %d) is for a newer Gitea, you can not use the newer database for this old Gitea release (%d).", v, minDBVersion+len(migrations)) msg += "\nGitea will exit to keep your database safe and unchanged. Please use the correct Gitea release, do not change the migration version manually (incorrect manual operation may lose data)." if !setting.IsProd { msg += fmt.Sprintf("\nIf you are in development and really know what you're doing, you can force changing the migration version by executing: UPDATE version SET version=%d WHERE id=1;", minDBVersion+len(migrations)) diff --git a/models/migrations/migrations_test.go b/models/migrations/migrations_test.go index f798d50117..a1fd49a8b9 100644 --- a/models/migrations/migrations_test.go +++ b/models/migrations/migrations_test.go @@ -24,7 +24,6 @@ import ( "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" - "github.com/unknwon/com" "xorm.io/xorm" "xorm.io/xorm/names" ) @@ -204,7 +203,7 @@ func prepareTestEnv(t *testing.T, skip int, syncModels ...interface{}) (*xorm.En deferFn := PrintCurrentTest(t, ourSkip) assert.NoError(t, os.RemoveAll(setting.RepoRootPath)) - assert.NoError(t, com.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), + assert.NoError(t, unittest.CopyDir(path.Join(filepath.Dir(setting.AppPath), "integrations/gitea-repositories-meta"), setting.RepoRootPath)) ownerDirs, err := os.ReadDir(setting.RepoRootPath) if err != nil { diff --git a/models/migrations/v128.go b/models/migrations/v128.go index 1454088c89..2aa68f9b64 100644 --- a/models/migrations/v128.go +++ b/models/migrations/v128.go @@ -83,17 +83,17 @@ func fixMergeBase(x *xorm.Engine) error { if !pr.HasMerged { var err error - pr.MergeBase, err = git.NewCommand(git.DefaultContext, "merge-base", "--", pr.BaseBranch, gitRefName).RunInDir(repoPath) + pr.MergeBase, _, err = git.NewCommand(git.DefaultContext, "merge-base", "--", pr.BaseBranch, gitRefName).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { var err2 error - pr.MergeBase, err2 = git.NewCommand(git.DefaultContext, "rev-parse", git.BranchPrefix+pr.BaseBranch).RunInDir(repoPath) + pr.MergeBase, _, err2 = git.NewCommand(git.DefaultContext, "rev-parse", git.BranchPrefix+pr.BaseBranch).RunStdString(&git.RunOpts{Dir: repoPath}) if err2 != nil { log.Error("Unable to get merge base for PR ID %d, Index %d in %s/%s. Error: %v & %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err, err2) continue } } } else { - parentsString, err := git.NewCommand(git.DefaultContext, "rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunInDir(repoPath) + parentsString, _, err := git.NewCommand(git.DefaultContext, "rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue @@ -106,7 +106,7 @@ func fixMergeBase(x *xorm.Engine) error { args := append([]string{"merge-base", "--"}, parents[1:]...) args = append(args, gitRefName) - pr.MergeBase, err = git.NewCommand(git.DefaultContext, args...).RunInDir(repoPath) + pr.MergeBase, _, err = git.NewCommand(git.DefaultContext, args...).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue diff --git a/models/migrations/v134.go b/models/migrations/v134.go index 3a8fd96b7c..16e8ec8e94 100644 --- a/models/migrations/v134.go +++ b/models/migrations/v134.go @@ -80,7 +80,7 @@ func refixMergeBase(x *xorm.Engine) error { gitRefName := fmt.Sprintf("refs/pull/%d/head", pr.Index) - parentsString, err := git.NewCommand(git.DefaultContext, "rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunInDir(repoPath) + parentsString, _, err := git.NewCommand(git.DefaultContext, "rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue @@ -94,7 +94,7 @@ func refixMergeBase(x *xorm.Engine) error { args := append([]string{"merge-base", "--"}, parents[1:]...) args = append(args, gitRefName) - pr.MergeBase, err = git.NewCommand(git.DefaultContext, args...).RunInDir(repoPath) + pr.MergeBase, _, err = git.NewCommand(git.DefaultContext, args...).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { log.Error("Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, baseRepo.OwnerName, baseRepo.Name, err) continue diff --git a/models/migrations/v156.go b/models/migrations/v156.go index 77aed3ff57..26f97fe984 100644 --- a/models/migrations/v156.go +++ b/models/migrations/v156.go @@ -109,7 +109,7 @@ func fixPublisherIDforTagReleases(x *xorm.Engine) error { return err } } - gitRepo, err = git.OpenRepositoryCtx(git.DefaultContext, repoPath(repo.OwnerName, repo.Name)) + gitRepo, err = git.OpenRepository(git.DefaultContext, repoPath(repo.OwnerName, repo.Name)) if err != nil { log.Error("Error whilst opening git repo for [%d]%s/%s. Error: %v", repo.ID, repo.OwnerName, repo.Name, err) return err diff --git a/models/migrations/v180.go b/models/migrations/v180.go index 1b735c2035..492c91f1b9 100644 --- a/models/migrations/v180.go +++ b/models/migrations/v180.go @@ -112,7 +112,7 @@ func removeCredentials(payload string) (string, error) { opts.AuthPassword = "" opts.AuthToken = "" - opts.CloneAddr = util.NewStringURLSanitizer(opts.CloneAddr, true).Replace(opts.CloneAddr) + opts.CloneAddr = util.SanitizeCredentialURLs(opts.CloneAddr) confBytes, err := json.Marshal(opts) if err != nil { diff --git a/models/migrations/v210.go b/models/migrations/v210.go index dafe355fe2..f32fae77ba 100644 --- a/models/migrations/v210.go +++ b/models/migrations/v210.go @@ -174,5 +174,11 @@ func remigrateU2FCredentials(x *xorm.Engine) error { regs = regs[:0] } + if x.Dialect().URI().DBType == schemas.POSTGRES { + if _, err := x.Exec("SELECT setval('webauthn_credential_id_seq', COALESCE((SELECT MAX(id)+1 FROM `webauthn_credential`), 1), false)"); err != nil { + return err + } + } + return nil } diff --git a/models/migrations/v211.go b/models/migrations/v211.go new file mode 100644 index 0000000000..26ccfd2037 --- /dev/null +++ b/models/migrations/v211.go @@ -0,0 +1,26 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "fmt" + + "xorm.io/xorm" +) + +func createForeignReferenceTable(x *xorm.Engine) error { + type ForeignReference struct { + // RepoID is the first column in all indices. now we only need 2 indices: (repo, local) and (repo, foreign, type) + RepoID int64 `xorm:"UNIQUE(repo_foreign_type) INDEX(repo_local)" ` + LocalIndex int64 `xorm:"INDEX(repo_local)"` // the resource key inside Gitea, it can be IssueIndex, or some model ID. + ForeignIndex string `xorm:"INDEX UNIQUE(repo_foreign_type)"` + Type string `xorm:"VARCHAR(16) INDEX UNIQUE(repo_foreign_type)"` + } + + if err := x.Sync2(new(ForeignReference)); err != nil { + return fmt.Errorf("Sync2: %v", err) + } + return nil +} diff --git a/models/migrations/v212.go b/models/migrations/v212.go new file mode 100644 index 0000000000..9d16f0556c --- /dev/null +++ b/models/migrations/v212.go @@ -0,0 +1,94 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/xorm" +) + +func addPackageTables(x *xorm.Engine) error { + type Package struct { + ID int64 `xorm:"pk autoincr"` + OwnerID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"` + RepoID int64 `xorm:"INDEX"` + Type string `xorm:"UNIQUE(s) INDEX NOT NULL"` + Name string `xorm:"NOT NULL"` + LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"` + SemverCompatible bool `xorm:"NOT NULL DEFAULT false"` + } + + if err := x.Sync2(new(Package)); err != nil { + return err + } + + type PackageVersion struct { + ID int64 `xorm:"pk autoincr"` + PackageID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"` + CreatorID int64 `xorm:"NOT NULL DEFAULT 0"` + Version string `xorm:"NOT NULL"` + LowerVersion string `xorm:"UNIQUE(s) INDEX NOT NULL"` + CreatedUnix timeutil.TimeStamp `xorm:"created INDEX NOT NULL"` + IsInternal bool `xorm:"INDEX NOT NULL DEFAULT false"` + MetadataJSON string `xorm:"metadata_json TEXT"` + DownloadCount int64 `xorm:"NOT NULL DEFAULT 0"` + } + + if err := x.Sync2(new(PackageVersion)); err != nil { + return err + } + + type PackageProperty struct { + ID int64 `xorm:"pk autoincr"` + RefType int64 `xorm:"INDEX NOT NULL"` + RefID int64 `xorm:"INDEX NOT NULL"` + Name string `xorm:"INDEX NOT NULL"` + Value string `xorm:"TEXT NOT NULL"` + } + + if err := x.Sync2(new(PackageProperty)); err != nil { + return err + } + + type PackageFile struct { + ID int64 `xorm:"pk autoincr"` + VersionID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"` + BlobID int64 `xorm:"INDEX NOT NULL"` + Name string `xorm:"NOT NULL"` + LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"` + CompositeKey string `xorm:"UNIQUE(s) INDEX"` + IsLead bool `xorm:"NOT NULL DEFAULT false"` + CreatedUnix timeutil.TimeStamp `xorm:"created INDEX NOT NULL"` + } + + if err := x.Sync2(new(PackageFile)); err != nil { + return err + } + + type PackageBlob struct { + ID int64 `xorm:"pk autoincr"` + Size int64 `xorm:"NOT NULL DEFAULT 0"` + HashMD5 string `xorm:"hash_md5 char(32) UNIQUE(md5) INDEX NOT NULL"` + HashSHA1 string `xorm:"hash_sha1 char(40) UNIQUE(sha1) INDEX NOT NULL"` + HashSHA256 string `xorm:"hash_sha256 char(64) UNIQUE(sha256) INDEX NOT NULL"` + HashSHA512 string `xorm:"hash_sha512 char(128) UNIQUE(sha512) INDEX NOT NULL"` + CreatedUnix timeutil.TimeStamp `xorm:"created INDEX NOT NULL"` + } + + if err := x.Sync2(new(PackageBlob)); err != nil { + return err + } + + type PackageBlobUpload struct { + ID string `xorm:"pk"` + BytesReceived int64 `xorm:"NOT NULL DEFAULT 0"` + HashStateBytes []byte `xorm:"BLOB"` + CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"` + UpdatedUnix timeutil.TimeStamp `xorm:"updated INDEX NOT NULL"` + } + + return x.Sync2(new(PackageBlobUpload)) +} diff --git a/models/migrations/v213.go b/models/migrations/v213.go new file mode 100644 index 0000000000..b1dbf98d1e --- /dev/null +++ b/models/migrations/v213.go @@ -0,0 +1,18 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "xorm.io/xorm" +) + +func addAllowMaintainerEdit(x *xorm.Engine) error { + // PullRequest represents relation between pull request and repositories. + type PullRequest struct { + AllowMaintainerEdit bool `xorm:"NOT NULL DEFAULT false"` + } + + return x.Sync2(new(PullRequest)) +} diff --git a/models/migrations/v214.go b/models/migrations/v214.go new file mode 100644 index 0000000000..dfe5d776a0 --- /dev/null +++ b/models/migrations/v214.go @@ -0,0 +1,23 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "xorm.io/xorm" +) + +func addAutoMergeTable(x *xorm.Engine) error { + type MergeStyle string + type PullAutoMerge struct { + ID int64 `xorm:"pk autoincr"` + PullID int64 `xorm:"UNIQUE"` + DoerID int64 `xorm:"NOT NULL"` + MergeStyle MergeStyle `xorm:"varchar(30)"` + Message string `xorm:"LONGTEXT"` + CreatedUnix int64 `xorm:"created"` + } + + return x.Sync2(&PullAutoMerge{}) +} diff --git a/models/migrations/v215.go b/models/migrations/v215.go new file mode 100644 index 0000000000..d65488a181 --- /dev/null +++ b/models/migrations/v215.go @@ -0,0 +1,25 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package migrations + +import ( + "code.gitea.io/gitea/models/pull" + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/xorm" +) + +func addReviewViewedFiles(x *xorm.Engine) error { + type ReviewState struct { + ID int64 `xorm:"pk autoincr"` + UserID int64 `xorm:"NOT NULL UNIQUE(pull_commit_user)"` + PullID int64 `xorm:"NOT NULL INDEX UNIQUE(pull_commit_user) DEFAULT 0"` + CommitSHA string `xorm:"NOT NULL VARCHAR(40) UNIQUE(pull_commit_user)"` + UpdatedFiles map[string]pull.ViewedState `xorm:"NOT NULL LONGTEXT JSON"` + UpdatedUnix timeutil.TimeStamp `xorm:"updated"` + } + + return x.Sync2(new(ReviewState)) +} diff --git a/models/migrations/v82.go b/models/migrations/v82.go index 7f9f979d95..8e07e633c9 100644 --- a/models/migrations/v82.go +++ b/models/migrations/v82.go @@ -99,7 +99,7 @@ func fixReleaseSha1OnReleaseTable(x *xorm.Engine) error { userCache[repo.OwnerID] = user } - gitRepo, err = git.OpenRepositoryCtx(git.DefaultContext, RepoPath(user.Name, repo.Name)) + gitRepo, err = git.OpenRepository(git.DefaultContext, RepoPath(user.Name, repo.Name)) if err != nil { return err } diff --git a/models/notification.go b/models/notification.go index b53d236e43..d0b7852cd2 100644 --- a/models/notification.go +++ b/models/notification.go @@ -11,9 +11,11 @@ import ( "strconv" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -151,7 +153,7 @@ func CreateRepoTransferNotification(doer, newOwner *user_model.User, repo *repo_ var notify []*Notification if newOwner.IsOrganization() { - users, err := getUsersWhoCanCreateOrgRepo(db.GetEngine(ctx), newOwner.ID) + users, err := organization.GetUsersWhoCanCreateOrgRepo(ctx, newOwner.ID) if err != nil || len(users) == 0 { return err } @@ -253,7 +255,7 @@ func createOrUpdateIssueNotifications(ctx context.Context, issueID, commentID, n } } - err = issue.loadRepo(ctx) + err = issue.LoadRepo(ctx) if err != nil { return err } @@ -519,7 +521,7 @@ func (nl NotificationList) getPendingRepoIDs() []int64 { ids[notification.RepoID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } // LoadRepos loads repositories from database @@ -595,7 +597,7 @@ func (nl NotificationList) getPendingIssueIDs() []int64 { ids[notification.IssueID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } // LoadIssues loads issues from database @@ -681,7 +683,7 @@ func (nl NotificationList) getPendingCommentIDs() []int64 { ids[notification.CommentID] = struct{}{} } } - return keysInt64(ids) + return container.KeysInt64(ids) } // LoadComments loads comments from database @@ -823,7 +825,7 @@ func getNotificationByID(e db.Engine, notificationID int64) (*Notification, erro } if !ok { - return nil, ErrNotExist{ID: notificationID} + return nil, db.ErrNotExist{ID: notificationID} } return notification, nil diff --git a/models/org.go b/models/org.go index 70b3fe27b9..1e5b403f12 100644 --- a/models/org.go +++ b/models/org.go @@ -11,443 +11,15 @@ import ( "strings" "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/models/perm" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" - "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" - "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/structs" "xorm.io/builder" ) -// Organization represents an organization -type Organization user_model.User - -// OrgFromUser converts user to organization -func OrgFromUser(user *user_model.User) *Organization { - return (*Organization)(user) -} - -// TableName represents the real table name of Organization -func (Organization) TableName() string { - return "user" -} - -// IsOwnedBy returns true if given user is in the owner team. -func (org *Organization) IsOwnedBy(uid int64) (bool, error) { - return IsOrganizationOwner(org.ID, uid) -} - -// IsOrgMember returns true if given user is member of organization. -func (org *Organization) IsOrgMember(uid int64) (bool, error) { - return IsOrganizationMember(org.ID, uid) -} - -// CanCreateOrgRepo returns true if given user can create repo in organization -func (org *Organization) CanCreateOrgRepo(uid int64) (bool, error) { - return CanCreateOrgRepo(org.ID, uid) -} - -func (org *Organization) getTeam(e db.Engine, name string) (*Team, error) { - return getTeam(e, org.ID, name) -} - -// GetTeam returns named team of organization. -func (org *Organization) GetTeam(name string) (*Team, error) { - return org.getTeam(db.GetEngine(db.DefaultContext), name) -} - -func (org *Organization) getOwnerTeam(e db.Engine) (*Team, error) { - return org.getTeam(e, ownerTeamName) -} - -// GetOwnerTeam returns owner team of organization. -func (org *Organization) GetOwnerTeam() (*Team, error) { - return org.getOwnerTeam(db.GetEngine(db.DefaultContext)) -} - -func (org *Organization) loadTeams(e db.Engine) ([]*Team, error) { - var teams []*Team - return teams, e. - Where("org_id=?", org.ID). - OrderBy("CASE WHEN name LIKE '" + ownerTeamName + "' THEN '' ELSE name END"). - Find(&teams) -} - -// LoadTeams load teams if not loaded. -func (org *Organization) LoadTeams() ([]*Team, error) { - return org.loadTeams(db.GetEngine(db.DefaultContext)) -} - -// GetMembers returns all members of organization. -func (org *Organization) GetMembers() (user_model.UserList, map[int64]bool, error) { - return FindOrgMembers(&FindOrgMembersOpts{ - OrgID: org.ID, - }) -} - -// HasMemberWithUserID returns true if user with userID is part of the u organisation. -func (org *Organization) HasMemberWithUserID(userID int64) bool { - return org.hasMemberWithUserID(db.GetEngine(db.DefaultContext), userID) -} - -func (org *Organization) hasMemberWithUserID(e db.Engine, userID int64) bool { - isMember, err := isOrganizationMember(e, org.ID, userID) - if err != nil { - log.Error("IsOrganizationMember: %v", err) - return false - } - return isMember -} - -// AvatarLink returns the full avatar link with http host -func (org *Organization) AvatarLink() string { - return org.AsUser().AvatarLink() -} - -// HTMLURL returns the organization's full link. -func (org *Organization) HTMLURL() string { - return org.AsUser().HTMLURL() -} - -// OrganisationLink returns the organization sub page link. -func (org *Organization) OrganisationLink() string { - return org.AsUser().OrganisationLink() -} - -// ShortName ellipses username to length -func (org *Organization) ShortName(length int) string { - return org.AsUser().ShortName(length) -} - -// HomeLink returns the user or organization home page link. -func (org *Organization) HomeLink() string { - return org.AsUser().HomeLink() -} - -// CanCreateRepo returns if user login can create a repository -// NOTE: functions calling this assume a failure due to repository count limit; if new checks are added, those functions should be revised -func (org *Organization) CanCreateRepo() bool { - return org.AsUser().CanCreateRepo() -} - -// FindOrgMembersOpts represensts find org members conditions -type FindOrgMembersOpts struct { - db.ListOptions - OrgID int64 - PublicOnly bool -} - -// CountOrgMembers counts the organization's members -func CountOrgMembers(opts *FindOrgMembersOpts) (int64, error) { - sess := db.GetEngine(db.DefaultContext).Where("org_id=?", opts.OrgID) - if opts.PublicOnly { - sess.And("is_public = ?", true) - } - return sess.Count(new(OrgUser)) -} - -// FindOrgMembers loads organization members according conditions -func FindOrgMembers(opts *FindOrgMembersOpts) (user_model.UserList, map[int64]bool, error) { - ous, err := GetOrgUsersByOrgID(opts) - if err != nil { - return nil, nil, err - } - - ids := make([]int64, len(ous)) - idsIsPublic := make(map[int64]bool, len(ous)) - for i, ou := range ous { - ids[i] = ou.UID - idsIsPublic[ou.UID] = ou.IsPublic - } - - users, err := user_model.GetUsersByIDs(ids) - if err != nil { - return nil, nil, err - } - return users, idsIsPublic, nil -} - -// AddMember adds new member to organization. -func (org *Organization) AddMember(uid int64) error { - return AddOrgUser(org.ID, uid) -} - -// RemoveMember removes member from organization. -func (org *Organization) RemoveMember(uid int64) error { - return RemoveOrgUser(org.ID, uid) -} - -func (org *Organization) removeOrgRepo(e db.Engine, repoID int64) error { - return removeOrgRepo(e, org.ID, repoID) -} - -// RemoveOrgRepo removes all team-repository relations of organization. -func (org *Organization) RemoveOrgRepo(repoID int64) error { - return org.removeOrgRepo(db.GetEngine(db.DefaultContext), repoID) -} - -// AsUser returns the org as user object -func (org *Organization) AsUser() *user_model.User { - return (*user_model.User)(org) -} - -// DisplayName returns full name if it's not empty, -// returns username otherwise. -func (org *Organization) DisplayName() string { - return org.AsUser().DisplayName() -} - -// CustomAvatarRelativePath returns user custom avatar relative path. -func (org *Organization) CustomAvatarRelativePath() string { - return org.Avatar -} - -// CreateOrganization creates record of a new organization. -func CreateOrganization(org *Organization, owner *user_model.User) (err error) { - if !owner.CanCreateOrganization() { - return ErrUserNotAllowedCreateOrg{} - } - - if err = user_model.IsUsableUsername(org.Name); err != nil { - return err - } - - isExist, err := user_model.IsUserExist(0, org.Name) - if err != nil { - return err - } else if isExist { - return user_model.ErrUserAlreadyExist{Name: org.Name} - } - - org.LowerName = strings.ToLower(org.Name) - if org.Rands, err = user_model.GetUserSalt(); err != nil { - return err - } - if org.Salt, err = user_model.GetUserSalt(); err != nil { - return err - } - org.UseCustomAvatar = true - org.MaxRepoCreation = -1 - org.NumTeams = 1 - org.NumMembers = 1 - org.Type = user_model.UserTypeOrganization - - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - if err = user_model.DeleteUserRedirect(ctx, org.Name); err != nil { - return err - } - - if err = db.Insert(ctx, org); err != nil { - return fmt.Errorf("insert organization: %v", err) - } - if err = user_model.GenerateRandomAvatarCtx(ctx, org.AsUser()); err != nil { - return fmt.Errorf("generate random avatar: %v", err) - } - - // Add initial creator to organization and owner team. - if err = db.Insert(ctx, &OrgUser{ - UID: owner.ID, - OrgID: org.ID, - }); err != nil { - return fmt.Errorf("insert org-user relation: %v", err) - } - - // Create default owner team. - t := &Team{ - OrgID: org.ID, - LowerName: strings.ToLower(ownerTeamName), - Name: ownerTeamName, - AccessMode: perm.AccessModeOwner, - NumMembers: 1, - IncludesAllRepositories: true, - CanCreateOrgRepo: true, - } - if err = db.Insert(ctx, t); err != nil { - return fmt.Errorf("insert owner team: %v", err) - } - - // insert units for team - units := make([]TeamUnit, 0, len(unit.AllRepoUnitTypes)) - for _, tp := range unit.AllRepoUnitTypes { - units = append(units, TeamUnit{ - OrgID: org.ID, - TeamID: t.ID, - Type: tp, - }) - } - - if err = db.Insert(ctx, &units); err != nil { - return err - } - - if err = db.Insert(ctx, &TeamUser{ - UID: owner.ID, - OrgID: org.ID, - TeamID: t.ID, - }); err != nil { - return fmt.Errorf("insert team-user relation: %v", err) - } - - return committer.Commit() -} - -// GetOrgByName returns organization by given name. -func GetOrgByName(name string) (*Organization, error) { - if len(name) == 0 { - return nil, ErrOrgNotExist{0, name} - } - u := &Organization{ - LowerName: strings.ToLower(name), - Type: user_model.UserTypeOrganization, - } - has, err := db.GetEngine(db.DefaultContext).Get(u) - if err != nil { - return nil, err - } else if !has { - return nil, ErrOrgNotExist{0, name} - } - return u, nil -} - -// CountOrganizations returns number of organizations. -func CountOrganizations() int64 { - count, _ := db.GetEngine(db.DefaultContext). - Where("type=1"). - Count(new(Organization)) - return count -} - -// DeleteOrganization deletes models associated to an organization. -func DeleteOrganization(ctx context.Context, org *Organization) error { - if org.Type != user_model.UserTypeOrganization { - return fmt.Errorf("%s is a user not an organization", org.Name) - } - - if err := db.DeleteBeans(ctx, - &Team{OrgID: org.ID}, - &OrgUser{OrgID: org.ID}, - &TeamUser{OrgID: org.ID}, - &TeamUnit{OrgID: org.ID}, - ); err != nil { - return fmt.Errorf("deleteBeans: %v", err) - } - - if _, err := db.GetEngine(ctx).ID(org.ID).Delete(new(user_model.User)); err != nil { - return fmt.Errorf("Delete: %v", err) - } - - return nil -} - -// ________ ____ ___ -// \_____ \_______ ____ | | \______ ___________ -// / | \_ __ \/ ___\| | / ___// __ \_ __ \ -// / | \ | \/ /_/ > | /\___ \\ ___/| | \/ -// \_______ /__| \___ /|______//____ >\___ >__| -// \/ /_____/ \/ \/ - -// OrgUser represents an organization-user relation. -type OrgUser struct { - ID int64 `xorm:"pk autoincr"` - UID int64 `xorm:"INDEX UNIQUE(s)"` - OrgID int64 `xorm:"INDEX UNIQUE(s)"` - IsPublic bool `xorm:"INDEX"` -} - -func init() { - db.RegisterModel(new(OrgUser)) -} - -func isOrganizationOwner(e db.Engine, orgID, uid int64) (bool, error) { - ownerTeam, err := getOwnerTeam(e, orgID) - if err != nil { - if IsErrTeamNotExist(err) { - log.Error("Organization does not have owner team: %d", orgID) - return false, nil - } - return false, err - } - return isTeamMember(e, orgID, ownerTeam.ID, uid) -} - -// IsOrganizationOwner returns true if given user is in the owner team. -func IsOrganizationOwner(orgID, uid int64) (bool, error) { - return isOrganizationOwner(db.GetEngine(db.DefaultContext), orgID, uid) -} - -// IsOrganizationMember returns true if given user is member of organization. -func IsOrganizationMember(orgID, uid int64) (bool, error) { - return isOrganizationMember(db.GetEngine(db.DefaultContext), orgID, uid) -} - -func isOrganizationMember(e db.Engine, orgID, uid int64) (bool, error) { - return e. - Where("uid=?", uid). - And("org_id=?", orgID). - Table("org_user"). - Exist() -} - -// IsPublicMembership returns true if given user public his/her membership. -func IsPublicMembership(orgID, uid int64) (bool, error) { - return db.GetEngine(db.DefaultContext). - Where("uid=?", uid). - And("org_id=?", orgID). - And("is_public=?", true). - Table("org_user"). - Exist() -} - -// CanCreateOrgRepo returns true if user can create repo in organization -func CanCreateOrgRepo(orgID, uid int64) (bool, error) { - if owner, err := IsOrganizationOwner(orgID, uid); owner || err != nil { - return owner, err - } - return db.GetEngine(db.DefaultContext). - Where(builder.Eq{"team.can_create_org_repo": true}). - Join("INNER", "team_user", "team_user.team_id = team.id"). - And("team_user.uid = ?", uid). - And("team_user.org_id = ?", orgID). - Exist(new(Team)) -} - -// GetOrgUserMaxAuthorizeLevel returns highest authorize level of user in an organization -func (org *Organization) GetOrgUserMaxAuthorizeLevel(uid int64) (perm.AccessMode, error) { - var authorize perm.AccessMode - _, err := db.GetEngine(db.DefaultContext). - Select("max(team.authorize)"). - Table("team"). - Join("INNER", "team_user", "team_user.team_id = team.id"). - Where("team_user.uid = ?", uid). - And("team_user.org_id = ?", org.ID). - Get(&authorize) - return authorize, err -} - -// GetUsersWhoCanCreateOrgRepo returns users which are able to create repo in organization -func GetUsersWhoCanCreateOrgRepo(orgID int64) ([]*user_model.User, error) { - return getUsersWhoCanCreateOrgRepo(db.GetEngine(db.DefaultContext), orgID) -} - -func getUsersWhoCanCreateOrgRepo(e db.Engine, orgID int64) ([]*user_model.User, error) { - users := make([]*user_model.User, 0, 10) - return users, e. - Join("INNER", "`team_user`", "`team_user`.uid=`user`.id"). - Join("INNER", "`team`", "`team`.id=`team_user`.team_id"). - Where(builder.Eq{"team.can_create_org_repo": true}.Or(builder.Eq{"team.authorize": perm.AccessModeOwner})). - And("team_user.org_id = ?", orgID).Asc("`user`.name").Find(&users) -} - // MinimalOrg represents a simple orgnization with only needed columns -type MinimalOrg = Organization +type MinimalOrg = organization.Organization // GetUserOrgsList returns one user's all orgs list func GetUserOrgsList(user *user_model.User) ([]*MinimalOrg, error) { @@ -486,8 +58,8 @@ func GetUserOrgsList(user *user_model.User) ([]*MinimalOrg, error) { GroupBy(groupByStr) type OrgCount struct { - Organization `xorm:"extends"` - OrgCount int + organization.Organization `xorm:"extends"` + OrgCount int } orgCounts := make([]*OrgCount, 0, 10) @@ -507,239 +79,8 @@ func GetUserOrgsList(user *user_model.User) ([]*MinimalOrg, error) { return orgs, nil } -// SearchOrganizationsOptions options to filter organizations -type SearchOrganizationsOptions struct { - db.ListOptions - All bool -} - -// FindOrgOptions finds orgs options -type FindOrgOptions struct { - db.ListOptions - UserID int64 - IncludePrivate bool -} - -func queryUserOrgIDs(userID int64, includePrivate bool) *builder.Builder { - cond := builder.Eq{"uid": userID} - if !includePrivate { - cond["is_public"] = true - } - return builder.Select("org_id").From("org_user").Where(cond) -} - -func (opts FindOrgOptions) toConds() builder.Cond { - cond := builder.NewCond() - if opts.UserID > 0 { - cond = cond.And(builder.In("`user`.`id`", queryUserOrgIDs(opts.UserID, opts.IncludePrivate))) - } - if !opts.IncludePrivate { - cond = cond.And(builder.Eq{"`user`.visibility": structs.VisibleTypePublic}) - } - return cond -} - -// FindOrgs returns a list of organizations according given conditions -func FindOrgs(opts FindOrgOptions) ([]*Organization, error) { - orgs := make([]*Organization, 0, 10) - sess := db.GetEngine(db.DefaultContext). - Where(opts.toConds()). - Asc("`user`.name") - if opts.Page > 0 && opts.PageSize > 0 { - sess.Limit(opts.PageSize, opts.PageSize*(opts.Page-1)) - } - return orgs, sess.Find(&orgs) -} - -// CountOrgs returns total count organizations according options -func CountOrgs(opts FindOrgOptions) (int64, error) { - return db.GetEngine(db.DefaultContext). - Where(opts.toConds()). - Count(new(user_model.User)) -} - -func getOwnedOrgsByUserID(sess db.Engine, userID int64) ([]*Organization, error) { - orgs := make([]*Organization, 0, 10) - return orgs, sess. - Join("INNER", "`team_user`", "`team_user`.org_id=`user`.id"). - Join("INNER", "`team`", "`team`.id=`team_user`.team_id"). - Where("`team_user`.uid=?", userID). - And("`team`.authorize=?", perm.AccessModeOwner). - Asc("`user`.name"). - Find(&orgs) -} - -// HasOrgOrUserVisible tells if the given user can see the given org or user -func HasOrgOrUserVisible(org, user *user_model.User) bool { - return hasOrgOrUserVisible(db.GetEngine(db.DefaultContext), org, user) -} - -func hasOrgOrUserVisible(e db.Engine, orgOrUser, user *user_model.User) bool { - // Not SignedUser - if user == nil { - return orgOrUser.Visibility == structs.VisibleTypePublic - } - - if user.IsAdmin || orgOrUser.ID == user.ID { - return true - } - - if (orgOrUser.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !OrgFromUser(orgOrUser).hasMemberWithUserID(e, user.ID) { - return false - } - return true -} - -// HasOrgsVisible tells if the given user can see at least one of the orgs provided -func HasOrgsVisible(orgs []*Organization, user *user_model.User) bool { - if len(orgs) == 0 { - return false - } - - for _, org := range orgs { - if HasOrgOrUserVisible(org.AsUser(), user) { - return true - } - } - return false -} - -// GetOwnedOrgsByUserID returns a list of organizations are owned by given user ID. -func GetOwnedOrgsByUserID(userID int64) ([]*Organization, error) { - return getOwnedOrgsByUserID(db.GetEngine(db.DefaultContext), userID) -} - -// GetOwnedOrgsByUserIDDesc returns a list of organizations are owned by -// given user ID, ordered descending by the given condition. -func GetOwnedOrgsByUserIDDesc(userID int64, desc string) ([]*Organization, error) { - return getOwnedOrgsByUserID(db.GetEngine(db.DefaultContext).Desc(desc), userID) -} - -// GetOrgsCanCreateRepoByUserID returns a list of organizations where given user ID -// are allowed to create repos. -func GetOrgsCanCreateRepoByUserID(userID int64) ([]*Organization, error) { - orgs := make([]*Organization, 0, 10) - - return orgs, db.GetEngine(db.DefaultContext).Where(builder.In("id", builder.Select("`user`.id").From("`user`"). - Join("INNER", "`team_user`", "`team_user`.org_id = `user`.id"). - Join("INNER", "`team`", "`team`.id = `team_user`.team_id"). - Where(builder.Eq{"`team_user`.uid": userID}). - And(builder.Eq{"`team`.authorize": perm.AccessModeOwner}.Or(builder.Eq{"`team`.can_create_org_repo": true})))). - Asc("`user`.name"). - Find(&orgs) -} - -// GetOrgUsersByUserID returns all organization-user relations by user ID. -func GetOrgUsersByUserID(uid int64, opts *SearchOrganizationsOptions) ([]*OrgUser, error) { - ous := make([]*OrgUser, 0, 10) - sess := db.GetEngine(db.DefaultContext). - Join("LEFT", "`user`", "`org_user`.org_id=`user`.id"). - Where("`org_user`.uid=?", uid) - if !opts.All { - // Only show public organizations - sess.And("is_public=?", true) - } - - if opts.PageSize != 0 { - sess = db.SetSessionPagination(sess, opts) - } - - err := sess. - Asc("`user`.name"). - Find(&ous) - return ous, err -} - -// GetOrgUsersByOrgID returns all organization-user relations by organization ID. -func GetOrgUsersByOrgID(opts *FindOrgMembersOpts) ([]*OrgUser, error) { - return getOrgUsersByOrgID(db.GetEngine(db.DefaultContext), opts) -} - -func getOrgUsersByOrgID(e db.Engine, opts *FindOrgMembersOpts) ([]*OrgUser, error) { - sess := e.Where("org_id=?", opts.OrgID) - if opts.PublicOnly { - sess.And("is_public = ?", true) - } - if opts.ListOptions.PageSize > 0 { - sess = db.SetSessionPagination(sess, opts) - - ous := make([]*OrgUser, 0, opts.PageSize) - return ous, sess.Find(&ous) - } - - var ous []*OrgUser - return ous, sess.Find(&ous) -} - -// ChangeOrgUserStatus changes public or private membership status. -func ChangeOrgUserStatus(orgID, uid int64, public bool) error { - ou := new(OrgUser) - has, err := db.GetEngine(db.DefaultContext). - Where("uid=?", uid). - And("org_id=?", orgID). - Get(ou) - if err != nil { - return err - } else if !has { - return nil - } - - ou.IsPublic = public - _, err = db.GetEngine(db.DefaultContext).ID(ou.ID).Cols("is_public").Update(ou) - return err -} - -// AddOrgUser adds new user to given organization. -func AddOrgUser(orgID, uid int64) error { - isAlreadyMember, err := IsOrganizationMember(orgID, uid) - if err != nil || isAlreadyMember { - return err - } - - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - ou := &OrgUser{ - UID: uid, - OrgID: orgID, - IsPublic: setting.Service.DefaultOrgMemberVisible, - } - - if err := db.Insert(ctx, ou); err != nil { - return err - } else if _, err = db.Exec(ctx, "UPDATE `user` SET num_members = num_members + 1 WHERE id = ?", orgID); err != nil { - return err - } - - return committer.Commit() -} - -// GetOrgByIDCtx returns the user object by given ID if exists. -func GetOrgByIDCtx(ctx context.Context, id int64) (*Organization, error) { - u := new(Organization) - has, err := db.GetEngine(ctx).ID(id).Get(u) - if err != nil { - return nil, err - } else if !has { - return nil, user_model.ErrUserNotExist{ - UID: id, - Name: "", - KeyID: 0, - } - } - return u, nil -} - -// GetOrgByID returns the user object by given ID if exists. -func GetOrgByID(id int64) (*Organization, error) { - return GetOrgByIDCtx(db.DefaultContext, id) -} - func removeOrgUser(ctx context.Context, orgID, userID int64) error { - ou := new(OrgUser) + ou := new(organization.OrgUser) sess := db.GetEngine(ctx) @@ -753,25 +94,25 @@ func removeOrgUser(ctx context.Context, orgID, userID int64) error { return nil } - org, err := GetOrgByIDCtx(ctx, orgID) + org, err := organization.GetOrgByIDCtx(ctx, orgID) if err != nil { return fmt.Errorf("GetUserByID [%d]: %v", orgID, err) } // Check if the user to delete is the last member in owner team. - if isOwner, err := isOrganizationOwner(sess, orgID, userID); err != nil { + if isOwner, err := organization.IsOrganizationOwner(ctx, orgID, userID); err != nil { return err } else if isOwner { - t, err := org.getOwnerTeam(sess) + t, err := organization.GetOwnerTeam(ctx, org.ID) if err != nil { return err } if t.NumMembers == 1 { - if err := t.getMembers(sess); err != nil { + if err := t.GetMembersCtx(ctx); err != nil { return err } if t.Members[0].ID == userID { - return ErrLastOrgOwner{UID: userID} + return organization.ErrLastOrgOwner{UID: userID} } } } @@ -783,7 +124,7 @@ func removeOrgUser(ctx context.Context, orgID, userID int64) error { } // Delete all repository accesses and unwatch them. - env, err := org.accessibleReposEnv(sess, userID) + env, err := organization.AccessibleReposEnv(ctx, org, userID) if err != nil { return fmt.Errorf("AccessibleReposEnv: %v", err) } @@ -807,7 +148,7 @@ func removeOrgUser(ctx context.Context, orgID, userID int64) error { } // Delete member in his/her teams. - teams, err := getUserOrgTeams(sess, org.ID, userID) + teams, err := organization.GetUserOrgTeams(ctx, org.ID, userID) if err != nil { return err } @@ -832,233 +173,3 @@ func RemoveOrgUser(orgID, userID int64) error { } return committer.Commit() } - -func removeOrgRepo(e db.Engine, orgID, repoID int64) error { - teamRepos := make([]*TeamRepo, 0, 10) - if err := e.Find(&teamRepos, &TeamRepo{OrgID: orgID, RepoID: repoID}); err != nil { - return err - } - - if len(teamRepos) == 0 { - return nil - } - - if _, err := e.Delete(&TeamRepo{ - OrgID: orgID, - RepoID: repoID, - }); err != nil { - return err - } - - teamIDs := make([]int64, len(teamRepos)) - for i, teamRepo := range teamRepos { - teamIDs[i] = teamRepo.TeamID - } - - _, err := e.Decr("num_repos").In("id", teamIDs).Update(new(Team)) - return err -} - -func (org *Organization) getUserTeams(e db.Engine, userID int64, cols ...string) ([]*Team, error) { - teams := make([]*Team, 0, org.NumTeams) - return teams, e. - Where("`team_user`.org_id = ?", org.ID). - Join("INNER", "team_user", "`team_user`.team_id = team.id"). - Join("INNER", "`user`", "`user`.id=team_user.uid"). - And("`team_user`.uid = ?", userID). - Asc("`user`.name"). - Cols(cols...). - Find(&teams) -} - -func (org *Organization) getUserTeamIDs(e db.Engine, userID int64) ([]int64, error) { - teamIDs := make([]int64, 0, org.NumTeams) - return teamIDs, e. - Table("team"). - Cols("team.id"). - Where("`team_user`.org_id = ?", org.ID). - Join("INNER", "team_user", "`team_user`.team_id = team.id"). - And("`team_user`.uid = ?", userID). - Find(&teamIDs) -} - -// TeamsWithAccessToRepo returns all teams that have given access level to the repository. -func (org *Organization) TeamsWithAccessToRepo(repoID int64, mode perm.AccessMode) ([]*Team, error) { - return GetTeamsWithAccessToRepo(org.ID, repoID, mode) -} - -// GetUserTeamIDs returns of all team IDs of the organization that user is member of. -func (org *Organization) GetUserTeamIDs(userID int64) ([]int64, error) { - return org.getUserTeamIDs(db.GetEngine(db.DefaultContext), userID) -} - -// GetUserTeams returns all teams that belong to user, -// and that the user has joined. -func (org *Organization) GetUserTeams(userID int64) ([]*Team, error) { - return org.getUserTeams(db.GetEngine(db.DefaultContext), userID) -} - -// AccessibleReposEnvironment operations involving the repositories that are -// accessible to a particular user -type AccessibleReposEnvironment interface { - CountRepos() (int64, error) - RepoIDs(page, pageSize int) ([]int64, error) - Repos(page, pageSize int) ([]*repo_model.Repository, error) - MirrorRepos() ([]*repo_model.Repository, error) - AddKeyword(keyword string) - SetSort(db.SearchOrderBy) -} - -type accessibleReposEnv struct { - org *Organization - user *user_model.User - team *Team - teamIDs []int64 - e db.Engine - keyword string - orderBy db.SearchOrderBy -} - -// AccessibleReposEnv builds an AccessibleReposEnvironment for the repositories in `org` -// that are accessible to the specified user. -func (org *Organization) AccessibleReposEnv(userID int64) (AccessibleReposEnvironment, error) { - return org.accessibleReposEnv(db.GetEngine(db.DefaultContext), userID) -} - -func (org *Organization) accessibleReposEnv(e db.Engine, userID int64) (AccessibleReposEnvironment, error) { - var user *user_model.User - - if userID > 0 { - u, err := user_model.GetUserByIDEngine(e, userID) - if err != nil { - return nil, err - } - user = u - } - - teamIDs, err := org.getUserTeamIDs(e, userID) - if err != nil { - return nil, err - } - return &accessibleReposEnv{ - org: org, - user: user, - teamIDs: teamIDs, - e: e, - orderBy: db.SearchOrderByRecentUpdated, - }, nil -} - -// AccessibleTeamReposEnv an AccessibleReposEnvironment for the repositories in `org` -// that are accessible to the specified team. -func (org *Organization) AccessibleTeamReposEnv(team *Team) AccessibleReposEnvironment { - return &accessibleReposEnv{ - org: org, - team: team, - e: db.GetEngine(db.DefaultContext), - orderBy: db.SearchOrderByRecentUpdated, - } -} - -func (env *accessibleReposEnv) cond() builder.Cond { - cond := builder.NewCond() - if env.team != nil { - cond = cond.And(builder.Eq{"team_repo.team_id": env.team.ID}) - } else { - if env.user == nil || !env.user.IsRestricted { - cond = cond.Or(builder.Eq{ - "`repository`.owner_id": env.org.ID, - "`repository`.is_private": false, - }) - } - if len(env.teamIDs) > 0 { - cond = cond.Or(builder.In("team_repo.team_id", env.teamIDs)) - } - } - if env.keyword != "" { - cond = cond.And(builder.Like{"`repository`.lower_name", strings.ToLower(env.keyword)}) - } - return cond -} - -func (env *accessibleReposEnv) CountRepos() (int64, error) { - repoCount, err := env.e. - Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id"). - Where(env.cond()). - Distinct("`repository`.id"). - Count(&repo_model.Repository{}) - if err != nil { - return 0, fmt.Errorf("count user repositories in organization: %v", err) - } - return repoCount, nil -} - -func (env *accessibleReposEnv) RepoIDs(page, pageSize int) ([]int64, error) { - if page <= 0 { - page = 1 - } - - repoIDs := make([]int64, 0, pageSize) - return repoIDs, env.e. - Table("repository"). - Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id"). - Where(env.cond()). - GroupBy("`repository`.id,`repository`."+strings.Fields(string(env.orderBy))[0]). - OrderBy(string(env.orderBy)). - Limit(pageSize, (page-1)*pageSize). - Cols("`repository`.id"). - Find(&repoIDs) -} - -func (env *accessibleReposEnv) Repos(page, pageSize int) ([]*repo_model.Repository, error) { - repoIDs, err := env.RepoIDs(page, pageSize) - if err != nil { - return nil, fmt.Errorf("GetUserRepositoryIDs: %v", err) - } - - repos := make([]*repo_model.Repository, 0, len(repoIDs)) - if len(repoIDs) == 0 { - return repos, nil - } - - return repos, env.e. - In("`repository`.id", repoIDs). - OrderBy(string(env.orderBy)). - Find(&repos) -} - -func (env *accessibleReposEnv) MirrorRepoIDs() ([]int64, error) { - repoIDs := make([]int64, 0, 10) - return repoIDs, env.e. - Table("repository"). - Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id AND `repository`.is_mirror=?", true). - Where(env.cond()). - GroupBy("`repository`.id, `repository`.updated_unix"). - OrderBy(string(env.orderBy)). - Cols("`repository`.id"). - Find(&repoIDs) -} - -func (env *accessibleReposEnv) MirrorRepos() ([]*repo_model.Repository, error) { - repoIDs, err := env.MirrorRepoIDs() - if err != nil { - return nil, fmt.Errorf("MirrorRepoIDs: %v", err) - } - - repos := make([]*repo_model.Repository, 0, len(repoIDs)) - if len(repoIDs) == 0 { - return repos, nil - } - - return repos, env.e. - In("`repository`.id", repoIDs). - Find(&repos) -} - -func (env *accessibleReposEnv) AddKeyword(keyword string) { - env.keyword = keyword -} - -func (env *accessibleReposEnv) SetSort(orderBy db.SearchOrderBy) { - env.orderBy = orderBy -} diff --git a/models/org_team.go b/models/org_team.go index 17f95bb5b0..695f803dbf 100644 --- a/models/org_team.go +++ b/models/org_team.go @@ -9,287 +9,25 @@ import ( "context" "errors" "fmt" - "sort" "strings" "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/models/perm" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" - "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "xorm.io/builder" ) -const ownerTeamName = "Owners" - -// Team represents a organization team. -type Team struct { - ID int64 `xorm:"pk autoincr"` - OrgID int64 `xorm:"INDEX"` - LowerName string - Name string - Description string - AccessMode perm.AccessMode `xorm:"'authorize'"` - Repos []*repo_model.Repository `xorm:"-"` - Members []*user_model.User `xorm:"-"` - NumRepos int - NumMembers int - Units []*TeamUnit `xorm:"-"` - IncludesAllRepositories bool `xorm:"NOT NULL DEFAULT false"` - CanCreateOrgRepo bool `xorm:"NOT NULL DEFAULT false"` -} - -func init() { - db.RegisterModel(new(Team)) - db.RegisterModel(new(TeamUser)) - db.RegisterModel(new(TeamRepo)) - db.RegisterModel(new(TeamUnit)) -} - -// SearchOrgTeamOptions holds the search options -type SearchOrgTeamOptions struct { - db.ListOptions - Keyword string - OrgID int64 - IncludeDesc bool -} - -// GetUserTeamOptions holds the search options. -type GetUserTeamOptions struct { - db.ListOptions - UserID int64 -} - -// SearchMembersOptions holds the search options -type SearchMembersOptions struct { - db.ListOptions -} - -// GetUserTeams search for org teams. Caller is responsible to check permissions. -func GetUserTeams(opts *GetUserTeamOptions) ([]*Team, int64, error) { - if opts.Page <= 0 { - opts.Page = 1 - } - if opts.PageSize == 0 { - // Default limit - opts.PageSize = 10 - } - - sess := db.GetEngine(db.DefaultContext) - - sess = sess.Join("INNER", "team_user", "team_user.team_id = team.id"). - And("team_user.uid=?", opts.UserID) - - count, err := sess. - Count(new(Team)) - if err != nil { - return nil, 0, err - } - - if opts.PageSize == -1 { - opts.PageSize = int(count) - } else { - sess = sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) - } - - sess = sess.Join("INNER", "team_user", "team_user.team_id = team.id"). - And("team_user.uid=?", opts.UserID) - - teams := make([]*Team, 0, opts.PageSize) - if err = sess. - OrderBy("lower_name"). - Find(&teams); err != nil { - return nil, 0, err - } - - return teams, count, nil -} - -// SearchOrgTeams search for org teams. Caller is responsible to check permissions. -func SearchOrgTeams(opts *SearchOrgTeamOptions) ([]*Team, int64, error) { - if opts.Page <= 0 { - opts.Page = 1 - } - if opts.PageSize == 0 { - // Default limit - opts.PageSize = 10 - } - - cond := builder.NewCond() - - if len(opts.Keyword) > 0 { - lowerKeyword := strings.ToLower(opts.Keyword) - var keywordCond builder.Cond = builder.Like{"lower_name", lowerKeyword} - if opts.IncludeDesc { - keywordCond = keywordCond.Or(builder.Like{"LOWER(description)", lowerKeyword}) - } - cond = cond.And(keywordCond) - } - - cond = cond.And(builder.Eq{"org_id": opts.OrgID}) - - sess := db.GetEngine(db.DefaultContext) - - count, err := sess. - Where(cond). - Count(new(Team)) - if err != nil { - return nil, 0, err - } - - sess = sess.Where(cond) - if opts.PageSize == -1 { - opts.PageSize = int(count) - } else { - sess = sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) - } - - teams := make([]*Team, 0, opts.PageSize) - if err = sess. - OrderBy("lower_name"). - Find(&teams); err != nil { - return nil, 0, err - } - - return teams, count, nil -} - -// ColorFormat provides a basic color format for a Team -func (t *Team) ColorFormat(s fmt.State) { - if t == nil { - log.ColorFprintf(s, "%d:%s (OrgID: %d) %-v", - log.NewColoredIDValue(0), - "", - log.NewColoredIDValue(0), - 0) - return - } - log.ColorFprintf(s, "%d:%s (OrgID: %d) %-v", - log.NewColoredIDValue(t.ID), - t.Name, - log.NewColoredIDValue(t.OrgID), - t.AccessMode) -} - -// GetUnits return a list of available units for a team -func (t *Team) GetUnits() error { - return t.getUnits(db.GetEngine(db.DefaultContext)) -} - -func (t *Team) getUnits(e db.Engine) (err error) { - if t.Units != nil { - return nil - } - - t.Units, err = getUnitsByTeamID(e, t.ID) - return err -} - -// GetUnitNames returns the team units names -func (t *Team) GetUnitNames() (res []string) { - if t.AccessMode >= perm.AccessModeAdmin { - return unit.AllUnitKeyNames() - } - - for _, u := range t.Units { - res = append(res, unit.Units[u.Type].NameKey) - } - return -} - -// GetUnitsMap returns the team units permissions -func (t *Team) GetUnitsMap() map[string]string { - m := make(map[string]string) - if t.AccessMode >= perm.AccessModeAdmin { - for _, u := range unit.Units { - m[u.NameKey] = t.AccessMode.String() - } - } else { - for _, u := range t.Units { - m[u.Unit().NameKey] = u.AccessMode.String() - } - } - return m -} - -// IsOwnerTeam returns true if team is owner team. -func (t *Team) IsOwnerTeam() bool { - return t.Name == ownerTeamName -} - -// IsMember returns true if given user is a member of team. -func (t *Team) IsMember(userID int64) bool { - isMember, err := IsTeamMember(t.OrgID, t.ID, userID) - if err != nil { - log.Error("IsMember: %v", err) - return false - } - return isMember -} - -func (t *Team) getRepositories(e db.Engine) error { - if t.Repos != nil { - return nil - } - return e.Join("INNER", "team_repo", "repository.id = team_repo.repo_id"). - Where("team_repo.team_id=?", t.ID). - OrderBy("repository.name"). - Find(&t.Repos) -} - -// GetRepositories returns paginated repositories in team of organization. -func (t *Team) GetRepositories(opts *SearchOrgTeamOptions) error { - if opts.Page == 0 { - return t.getRepositories(db.GetEngine(db.DefaultContext)) - } - - return t.getRepositories(db.GetPaginatedSession(opts)) -} - -func (t *Team) getMembers(e db.Engine) (err error) { - t.Members, err = getTeamMembers(e, t.ID) - return err -} - -// GetMembers returns paginated members in team of organization. -func (t *Team) GetMembers(opts *SearchMembersOptions) (err error) { - if opts.Page == 0 { - return t.getMembers(db.GetEngine(db.DefaultContext)) - } - - return t.getMembers(db.GetPaginatedSession(opts)) -} - -// AddMember adds new membership of the team to the organization, -// the user will have membership to the organization automatically when needed. -func (t *Team) AddMember(userID int64) error { - return AddTeamMember(t, userID) -} - -// RemoveMember removes member from team of organization. -func (t *Team) RemoveMember(userID int64) error { - return RemoveTeamMember(t, userID) -} - -func (t *Team) hasRepository(e db.Engine, repoID int64) bool { - return hasTeamRepo(e, t.OrgID, t.ID, repoID) -} - -// HasRepository returns true if given repository belong to team. -func (t *Team) HasRepository(repoID int64) bool { - return t.hasRepository(db.GetEngine(db.DefaultContext), repoID) -} - -func (t *Team) addRepository(ctx context.Context, repo *repo_model.Repository) (err error) { - e := db.GetEngine(ctx) - if err = addTeamRepo(e, t.OrgID, t.ID, repo.ID); err != nil { +func addRepository(ctx context.Context, t *organization.Team, repo *repo_model.Repository) (err error) { + if err = organization.AddTeamRepo(ctx, t.OrgID, t.ID, repo.ID); err != nil { return err } - if _, err = e.Incr("num_repos").ID(t.ID).Update(new(Team)); err != nil { + if _, err = db.GetEngine(ctx).Incr("num_repos").ID(t.ID).Update(new(organization.Team)); err != nil { return fmt.Errorf("update team: %v", err) } @@ -301,7 +39,7 @@ func (t *Team) addRepository(ctx context.Context, repo *repo_model.Repository) ( // Make all team members watch this repo if enabled in global settings if setting.Service.AutoWatchNewRepos { - if err = t.getMembers(e); err != nil { + if err = t.GetMembersCtx(ctx); err != nil { return fmt.Errorf("getMembers: %v", err) } for _, u := range t.Members { @@ -316,7 +54,7 @@ func (t *Team) addRepository(ctx context.Context, repo *repo_model.Repository) ( // addAllRepositories adds all repositories to the team. // If the team already has some repositories they will be left unchanged. -func (t *Team) addAllRepositories(ctx context.Context) error { +func addAllRepositories(ctx context.Context, t *organization.Team) error { var orgRepos []repo_model.Repository e := db.GetEngine(ctx) if err := e.Where("owner_id = ?", t.OrgID).Find(&orgRepos); err != nil { @@ -324,8 +62,8 @@ func (t *Team) addAllRepositories(ctx context.Context) error { } for _, repo := range orgRepos { - if !t.hasRepository(e, repo.ID) { - if err := t.addRepository(ctx, &repo); err != nil { + if !hasRepository(ctx, t, repo.ID) { + if err := addRepository(ctx, t, &repo); err != nil { return fmt.Errorf("addRepository: %v", err) } } @@ -335,14 +73,14 @@ func (t *Team) addAllRepositories(ctx context.Context) error { } // AddAllRepositories adds all repositories to the team -func (t *Team) AddAllRepositories() (err error) { +func AddAllRepositories(t *organization.Team) (err error) { ctx, committer, err := db.TxContext() if err != nil { return err } defer committer.Close() - if err = t.addAllRepositories(ctx); err != nil { + if err = addAllRepositories(ctx, t); err != nil { return err } @@ -350,10 +88,10 @@ func (t *Team) AddAllRepositories() (err error) { } // AddRepository adds new repository to team of organization. -func (t *Team) AddRepository(repo *repo_model.Repository) (err error) { +func AddRepository(t *organization.Team, repo *repo_model.Repository) (err error) { if repo.OwnerID != t.OrgID { return errors.New("Repository does not belong to organization") - } else if t.HasRepository(repo.ID) { + } else if HasRepository(t, repo.ID) { return nil } @@ -363,15 +101,20 @@ func (t *Team) AddRepository(repo *repo_model.Repository) (err error) { } defer committer.Close() - if err = t.addRepository(ctx, repo); err != nil { + if err = addRepository(ctx, t, repo); err != nil { return err } return committer.Commit() } +// HasRepository returns true if given repository belong to team. +func HasRepository(t *organization.Team, repoID int64) bool { + return hasRepository(db.DefaultContext, t, repoID) +} + // RemoveAllRepositories removes all repositories from team and recalculates access -func (t *Team) RemoveAllRepositories() (err error) { +func RemoveAllRepositories(t *organization.Team) (err error) { if t.IncludesAllRepositories { return nil } @@ -382,7 +125,7 @@ func (t *Team) RemoveAllRepositories() (err error) { } defer committer.Close() - if err = t.removeAllRepositories(ctx); err != nil { + if err = removeAllRepositories(ctx, t); err != nil { return err } @@ -391,7 +134,7 @@ func (t *Team) RemoveAllRepositories() (err error) { // removeAllRepositories removes all repositories from team and recalculates access // Note: Shall not be called if team includes all repositories -func (t *Team) removeAllRepositories(ctx context.Context) (err error) { +func removeAllRepositories(ctx context.Context, t *organization.Team) (err error) { e := db.GetEngine(ctx) // Delete all accesses. for _, repo := range t.Repos { @@ -422,7 +165,7 @@ func (t *Team) removeAllRepositories(ctx context.Context) (err error) { // Delete team-repo if _, err := e. Where("team_id=?", t.ID). - Delete(new(TeamRepo)); err != nil { + Delete(new(organization.TeamRepo)); err != nil { return err } @@ -434,11 +177,15 @@ func (t *Team) removeAllRepositories(ctx context.Context) (err error) { return nil } +func hasRepository(ctx context.Context, t *organization.Team, repoID int64) bool { + return organization.HasTeamRepo(ctx, t.OrgID, t.ID, repoID) +} + // removeRepository removes a repository from a team and recalculates access // Note: Repository shall not be removed from team if it includes all repositories (unless the repository is deleted) -func (t *Team) removeRepository(ctx context.Context, repo *repo_model.Repository, recalculate bool) (err error) { +func removeRepository(ctx context.Context, t *organization.Team, repo *repo_model.Repository, recalculate bool) (err error) { e := db.GetEngine(ctx) - if err = removeTeamRepo(e, t.ID, repo.ID); err != nil { + if err = organization.RemoveTeamRepo(ctx, t.ID, repo.ID); err != nil { return err } @@ -454,7 +201,7 @@ func (t *Team) removeRepository(ctx context.Context, repo *repo_model.Repository } } - teamUsers, err := getTeamUsersByTeamID(e, t.ID) + teamUsers, err := organization.GetTeamUsersByTeamID(ctx, t.ID) if err != nil { return fmt.Errorf("getTeamUsersByTeamID: %v", err) } @@ -481,8 +228,8 @@ func (t *Team) removeRepository(ctx context.Context, repo *repo_model.Repository // RemoveRepository removes repository from team of organization. // If the team shall include all repositories the request is ignored. -func (t *Team) RemoveRepository(repoID int64) error { - if !t.HasRepository(repoID) { +func RemoveRepository(t *organization.Team, repoID int64) error { + if !HasRepository(t, repoID) { return nil } @@ -501,58 +248,21 @@ func (t *Team) RemoveRepository(repoID int64) error { } defer committer.Close() - if err = t.removeRepository(ctx, repo, true); err != nil { + if err = removeRepository(ctx, t, repo, true); err != nil { return err } return committer.Commit() } -// UnitEnabled returns if the team has the given unit type enabled -func (t *Team) UnitEnabled(tp unit.Type) bool { - return t.unitEnabled(db.GetEngine(db.DefaultContext), tp) -} - -func (t *Team) unitEnabled(e db.Engine, tp unit.Type) bool { - return t.unitAccessMode(e, tp) > perm.AccessModeNone -} - -// UnitAccessMode returns if the team has the given unit type enabled -func (t *Team) UnitAccessMode(tp unit.Type) perm.AccessMode { - return t.unitAccessMode(db.GetEngine(db.DefaultContext), tp) -} - -func (t *Team) unitAccessMode(e db.Engine, tp unit.Type) perm.AccessMode { - if err := t.getUnits(e); err != nil { - log.Warn("Error loading team (ID: %d) units: %s", t.ID, err.Error()) - } - - for _, unit := range t.Units { - if unit.Type == tp { - return unit.AccessMode - } - } - return perm.AccessModeNone -} - -// IsUsableTeamName tests if a name could be as team name -func IsUsableTeamName(name string) error { - switch name { - case "new": - return db.ErrNameReserved{Name: name} - default: - return nil - } -} - // NewTeam creates a record of new team. // It's caller's responsibility to assign organization ID. -func NewTeam(t *Team) (err error) { +func NewTeam(t *organization.Team) (err error) { if len(t.Name) == 0 { return errors.New("empty team name") } - if err = IsUsableTeamName(t.Name); err != nil { + if err = organization.IsUsableTeamName(t.Name); err != nil { return err } @@ -561,19 +271,19 @@ func NewTeam(t *Team) (err error) { return err } if !has { - return ErrOrgNotExist{t.OrgID, ""} + return organization.ErrOrgNotExist{ID: t.OrgID} } t.LowerName = strings.ToLower(t.Name) has, err = db.GetEngine(db.DefaultContext). Where("org_id=?", t.OrgID). And("lower_name=?", t.LowerName). - Get(new(Team)) + Get(new(organization.Team)) if err != nil { return err } if has { - return ErrTeamAlreadyExist{t.OrgID, t.LowerName} + return organization.ErrTeamAlreadyExist{OrgID: t.OrgID, Name: t.LowerName} } ctx, committer, err := db.TxContext() @@ -598,7 +308,7 @@ func NewTeam(t *Team) (err error) { // Add all repositories to the team if it has access to all of them. if t.IncludesAllRepositories { - err = t.addAllRepositories(ctx) + err = addAllRepositories(ctx, t) if err != nil { return fmt.Errorf("addAllRepositories: %v", err) } @@ -611,81 +321,8 @@ func NewTeam(t *Team) (err error) { return committer.Commit() } -func getTeam(e db.Engine, orgID int64, name string) (*Team, error) { - t := &Team{ - OrgID: orgID, - LowerName: strings.ToLower(name), - } - has, err := e.Get(t) - if err != nil { - return nil, err - } else if !has { - return nil, ErrTeamNotExist{orgID, 0, name} - } - return t, nil -} - -// GetTeam returns team by given team name and organization. -func GetTeam(orgID int64, name string) (*Team, error) { - return getTeam(db.GetEngine(db.DefaultContext), orgID, name) -} - -// GetTeamIDsByNames returns a slice of team ids corresponds to names. -func GetTeamIDsByNames(orgID int64, names []string, ignoreNonExistent bool) ([]int64, error) { - ids := make([]int64, 0, len(names)) - for _, name := range names { - u, err := GetTeam(orgID, name) - if err != nil { - if ignoreNonExistent { - continue - } else { - return nil, err - } - } - ids = append(ids, u.ID) - } - return ids, nil -} - -// getOwnerTeam returns team by given team name and organization. -func getOwnerTeam(e db.Engine, orgID int64) (*Team, error) { - return getTeam(e, orgID, ownerTeamName) -} - -func getTeamByID(e db.Engine, teamID int64) (*Team, error) { - t := new(Team) - has, err := e.ID(teamID).Get(t) - if err != nil { - return nil, err - } else if !has { - return nil, ErrTeamNotExist{0, teamID, ""} - } - return t, nil -} - -// GetTeamByID returns team by given ID. -func GetTeamByID(teamID int64) (*Team, error) { - return getTeamByID(db.GetEngine(db.DefaultContext), teamID) -} - -// GetTeamNamesByID returns team's lower name from a list of team ids. -func GetTeamNamesByID(teamIDs []int64) ([]string, error) { - if len(teamIDs) == 0 { - return []string{}, nil - } - - var teamNames []string - err := db.GetEngine(db.DefaultContext).Table("team"). - Select("lower_name"). - In("id", teamIDs). - Asc("name"). - Find(&teamNames) - - return teamNames, err -} - // UpdateTeam updates information of team. -func UpdateTeam(t *Team, authChanged, includeAllChanged bool) (err error) { +func UpdateTeam(t *organization.Team, authChanged, includeAllChanged bool) (err error) { if len(t.Name) == 0 { return errors.New("empty team name") } @@ -706,11 +343,11 @@ func UpdateTeam(t *Team, authChanged, includeAllChanged bool) (err error) { Where("org_id=?", t.OrgID). And("lower_name=?", t.LowerName). And("id!=?", t.ID). - Get(new(Team)) + Get(new(organization.Team)) if err != nil { return err } else if has { - return ErrTeamAlreadyExist{t.OrgID, t.LowerName} + return organization.ErrTeamAlreadyExist{OrgID: t.OrgID, Name: t.LowerName} } if _, err = sess.ID(t.ID).Cols("name", "lower_name", "description", @@ -726,7 +363,7 @@ func UpdateTeam(t *Team, authChanged, includeAllChanged bool) (err error) { // Delete team-unit. if _, err := sess. Where("team_id=?", t.ID). - Delete(new(TeamUnit)); err != nil { + Delete(new(organization.TeamUnit)); err != nil { return err } if _, err = sess.Cols("org_id", "team_id", "type", "access_mode").Insert(&t.Units); err != nil { @@ -736,7 +373,7 @@ func UpdateTeam(t *Team, authChanged, includeAllChanged bool) (err error) { // Update access for team members if needed. if authChanged { - if err = t.getRepositories(sess); err != nil { + if err = t.GetRepositoriesCtx(ctx); err != nil { return fmt.Errorf("getRepositories: %v", err) } @@ -749,7 +386,7 @@ func UpdateTeam(t *Team, authChanged, includeAllChanged bool) (err error) { // Add all repositories to the team if it has access to all of them. if includeAllChanged && t.IncludesAllRepositories { - err = t.addAllRepositories(ctx) + err = addAllRepositories(ctx, t) if err != nil { return fmt.Errorf("addAllRepositories: %v", err) } @@ -760,11 +397,7 @@ func UpdateTeam(t *Team, authChanged, includeAllChanged bool) (err error) { // DeleteTeam deletes given team. // It's caller's responsibility to assign organization ID. -func DeleteTeam(t *Team) error { - if err := t.GetRepositories(&SearchOrgTeamOptions{}); err != nil { - return err - } - +func DeleteTeam(t *organization.Team) error { ctx, committer, err := db.TxContext() if err != nil { return err @@ -772,31 +405,72 @@ func DeleteTeam(t *Team) error { defer committer.Close() sess := db.GetEngine(ctx) - if err := t.getMembers(sess); err != nil { + if err := t.GetRepositoriesCtx(ctx); err != nil { return err } - if err := t.removeAllRepositories(ctx); err != nil { + if err := t.GetMembersCtx(ctx); err != nil { return err } + // update branch protections + { + protections := make([]*ProtectedBranch, 0, 10) + err := sess.In("repo_id", + builder.Select("id").From("repository").Where(builder.Eq{"owner_id": t.OrgID})). + Find(&protections) + if err != nil { + return fmt.Errorf("findProtectedBranches: %v", err) + } + for _, p := range protections { + var matched1, matched2, matched3 bool + if len(p.WhitelistTeamIDs) != 0 { + p.WhitelistTeamIDs, matched1 = util.RemoveIDFromList( + p.WhitelistTeamIDs, t.ID) + } + if len(p.ApprovalsWhitelistTeamIDs) != 0 { + p.ApprovalsWhitelistTeamIDs, matched2 = util.RemoveIDFromList( + p.ApprovalsWhitelistTeamIDs, t.ID) + } + if len(p.MergeWhitelistTeamIDs) != 0 { + p.MergeWhitelistTeamIDs, matched3 = util.RemoveIDFromList( + p.MergeWhitelistTeamIDs, t.ID) + } + if matched1 || matched2 || matched3 { + if _, err = sess.ID(p.ID).Cols( + "whitelist_team_i_ds", + "merge_whitelist_team_i_ds", + "approvals_whitelist_team_i_ds", + ).Update(p); err != nil { + return fmt.Errorf("updateProtectedBranches: %v", err) + } + } + } + } + + if !t.IncludesAllRepositories { + if err := removeAllRepositories(ctx, t); err != nil { + return err + } + } + // Delete team-user. if _, err := sess. Where("org_id=?", t.OrgID). Where("team_id=?", t.ID). - Delete(new(TeamUser)); err != nil { + Delete(new(organization.TeamUser)); err != nil { return err } // Delete team-unit. if _, err := sess. Where("team_id=?", t.ID). - Delete(new(TeamUnit)); err != nil { + Delete(new(organization.TeamUnit)); err != nil { return err } // Delete team. - if _, err := sess.ID(t.ID).Delete(new(Team)); err != nil { + if _, err := sess.ID(t.ID).Delete(new(organization.Team)); err != nil { return err } // Update organization number of teams. @@ -807,103 +481,15 @@ func DeleteTeam(t *Team) error { return committer.Commit() } -// ___________ ____ ___ -// \__ ___/___ _____ _____ | | \______ ___________ -// | |_/ __ \\__ \ / \| | / ___// __ \_ __ \ -// | |\ ___/ / __ \| Y Y \ | /\___ \\ ___/| | \/ -// |____| \___ >____ /__|_| /______//____ >\___ >__| -// \/ \/ \/ \/ \/ - -// TeamUser represents an team-user relation. -type TeamUser struct { - ID int64 `xorm:"pk autoincr"` - OrgID int64 `xorm:"INDEX"` - TeamID int64 `xorm:"UNIQUE(s)"` - UID int64 `xorm:"UNIQUE(s)"` -} - -func isTeamMember(e db.Engine, orgID, teamID, userID int64) (bool, error) { - return e. - Where("org_id=?", orgID). - And("team_id=?", teamID). - And("uid=?", userID). - Table("team_user"). - Exist() -} - -// IsTeamMember returns true if given user is a member of team. -func IsTeamMember(orgID, teamID, userID int64) (bool, error) { - return isTeamMember(db.GetEngine(db.DefaultContext), orgID, teamID, userID) -} - -func getTeamUsersByTeamID(e db.Engine, teamID int64) ([]*TeamUser, error) { - teamUsers := make([]*TeamUser, 0, 10) - return teamUsers, e. - Where("team_id=?", teamID). - Find(&teamUsers) -} - -func getTeamMembers(e db.Engine, teamID int64) (_ []*user_model.User, err error) { - teamUsers, err := getTeamUsersByTeamID(e, teamID) - if err != nil { - return nil, fmt.Errorf("get team-users: %v", err) - } - members := make([]*user_model.User, len(teamUsers)) - for i, teamUser := range teamUsers { - member, err := user_model.GetUserByIDEngine(e, teamUser.UID) - if err != nil { - return nil, fmt.Errorf("get user '%d': %v", teamUser.UID, err) - } - members[i] = member - } - sort.Slice(members, func(i, j int) bool { - return members[i].DisplayName() < members[j].DisplayName() - }) - return members, nil -} - -// GetTeamMembers returns all members in given team of organization. -func GetTeamMembers(teamID int64) ([]*user_model.User, error) { - return getTeamMembers(db.GetEngine(db.DefaultContext), teamID) -} - -func getUserOrgTeams(e db.Engine, orgID, userID int64) (teams []*Team, err error) { - return teams, e. - Join("INNER", "team_user", "team_user.team_id = team.id"). - Where("team.org_id = ?", orgID). - And("team_user.uid=?", userID). - Find(&teams) -} - -func getUserRepoTeams(e db.Engine, orgID, userID, repoID int64) (teams []*Team, err error) { - return teams, e. - Join("INNER", "team_user", "team_user.team_id = team.id"). - Join("INNER", "team_repo", "team_repo.team_id = team.id"). - Where("team.org_id = ?", orgID). - And("team_user.uid=?", userID). - And("team_repo.repo_id=?", repoID). - Find(&teams) -} - -// GetUserOrgTeams returns all teams that user belongs to in given organization. -func GetUserOrgTeams(orgID, userID int64) ([]*Team, error) { - return getUserOrgTeams(db.GetEngine(db.DefaultContext), orgID, userID) -} - // AddTeamMember adds new membership of given team to given organization, // the user will have membership to given organization automatically when needed. -func AddTeamMember(team *Team, userID int64) error { - isAlreadyMember, err := IsTeamMember(team.OrgID, team.ID, userID) +func AddTeamMember(team *organization.Team, userID int64) error { + isAlreadyMember, err := organization.IsTeamMember(db.DefaultContext, team.OrgID, team.ID, userID) if err != nil || isAlreadyMember { return err } - if err := AddOrgUser(team.OrgID, userID); err != nil { - return err - } - - // Get team and its repositories. - if err := team.GetRepositories(&SearchOrgTeamOptions{}); err != nil { + if err := organization.AddOrgUser(team.OrgID, userID); err != nil { return err } @@ -915,52 +501,86 @@ func AddTeamMember(team *Team, userID int64) error { sess := db.GetEngine(ctx) - if err := db.Insert(ctx, &TeamUser{ + if err := db.Insert(ctx, &organization.TeamUser{ UID: userID, OrgID: team.OrgID, TeamID: team.ID, }); err != nil { return err - } else if _, err := sess.Incr("num_members").ID(team.ID).Update(new(Team)); err != nil { + } else if _, err := sess.Incr("num_members").ID(team.ID).Update(new(organization.Team)); err != nil { return err } team.NumMembers++ // Give access to team repositories. - for _, repo := range team.Repos { - if err := recalculateUserAccess(ctx, repo, userID); err != nil { - return err - } - if setting.Service.AutoWatchNewRepos { - if err = repo_model.WatchRepoCtx(ctx, userID, repo.ID, true); err != nil { - return err + // update exist access if mode become bigger + subQuery := builder.Select("repo_id").From("team_repo"). + Where(builder.Eq{"team_id": team.ID}) + + if _, err := sess.Where("user_id=?", userID). + In("repo_id", subQuery). + And("mode < ?", team.AccessMode). + SetExpr("mode", team.AccessMode). + Update(new(Access)); err != nil { + return fmt.Errorf("update user accesses: %v", err) + } + + // for not exist access + var repoIDs []int64 + accessSubQuery := builder.Select("repo_id").From("access").Where(builder.Eq{"user_id": userID}) + if err := sess.SQL(subQuery.And(builder.NotIn("repo_id", accessSubQuery))).Find(&repoIDs); err != nil { + return fmt.Errorf("select id accesses: %v", err) + } + + accesses := make([]*Access, 0, 100) + for i, repoID := range repoIDs { + accesses = append(accesses, &Access{RepoID: repoID, UserID: userID, Mode: team.AccessMode}) + if (i%100 == 0 || i == len(repoIDs)-1) && len(accesses) > 0 { + if err = db.Insert(ctx, accesses); err != nil { + return fmt.Errorf("insert new user accesses: %v", err) } + accesses = accesses[:0] } } + // watch could be failed, so run it in a goroutine + if setting.Service.AutoWatchNewRepos { + // Get team and its repositories. + if err := team.GetRepositoriesCtx(db.DefaultContext); err != nil { + log.Error("getRepositories failed: %v", err) + } + go func(repos []*repo_model.Repository) { + for _, repo := range repos { + if err = repo_model.WatchRepoCtx(db.DefaultContext, userID, repo.ID, true); err != nil { + log.Error("watch repo failed: %v", err) + } + } + }(team.Repos) + } + return committer.Commit() } -func removeTeamMember(ctx context.Context, team *Team, userID int64) error { +func removeTeamMember(ctx context.Context, team *organization.Team, userID int64) error { e := db.GetEngine(ctx) - isMember, err := isTeamMember(e, team.OrgID, team.ID, userID) + isMember, err := organization.IsTeamMember(ctx, team.OrgID, team.ID, userID) if err != nil || !isMember { return err } // Check if the user to delete is the last member in owner team. if team.IsOwnerTeam() && team.NumMembers == 1 { - return ErrLastOrgOwner{UID: userID} + return organization.ErrLastOrgOwner{UID: userID} } team.NumMembers-- - if err := team.getRepositories(e); err != nil { + if err := team.GetRepositoriesCtx(ctx); err != nil { return err } - if _, err := e.Delete(&TeamUser{ + if _, err := e.Delete(&organization.TeamUser{ UID: userID, OrgID: team.OrgID, TeamID: team.ID, @@ -991,7 +611,7 @@ func removeTeamMember(ctx context.Context, team *Team, userID int64) error { } // Check if the user is a member of any team in the organization. - if count, err := e.Count(&TeamUser{ + if count, err := e.Count(&organization.TeamUser{ UID: userID, OrgID: team.OrgID, }); err != nil { @@ -1004,7 +624,7 @@ func removeTeamMember(ctx context.Context, team *Team, userID int64) error { } // RemoveTeamMember removes member from given team of given organization. -func RemoveTeamMember(team *Team, userID int64) error { +func RemoveTeamMember(team *organization.Team, userID int64) error { ctx, committer, err := db.TxContext() if err != nil { return err @@ -1015,125 +635,3 @@ func RemoveTeamMember(team *Team, userID int64) error { } return committer.Commit() } - -// IsUserInTeams returns if a user in some teams -func IsUserInTeams(userID int64, teamIDs []int64) (bool, error) { - return isUserInTeams(db.GetEngine(db.DefaultContext), userID, teamIDs) -} - -func isUserInTeams(e db.Engine, userID int64, teamIDs []int64) (bool, error) { - return e.Where("uid=?", userID).In("team_id", teamIDs).Exist(new(TeamUser)) -} - -// UsersInTeamsCount counts the number of users which are in userIDs and teamIDs -func UsersInTeamsCount(userIDs, teamIDs []int64) (int64, error) { - var ids []int64 - if err := db.GetEngine(db.DefaultContext).In("uid", userIDs).In("team_id", teamIDs). - Table("team_user"). - Cols("uid").GroupBy("uid").Find(&ids); err != nil { - return 0, err - } - return int64(len(ids)), nil -} - -// ___________ __________ -// \__ ___/___ _____ _____\______ \ ____ ______ ____ -// | |_/ __ \\__ \ / \| _// __ \\____ \ / _ \ -// | |\ ___/ / __ \| Y Y \ | \ ___/| |_> > <_> ) -// |____| \___ >____ /__|_| /____|_ /\___ > __/ \____/ -// \/ \/ \/ \/ \/|__| - -// TeamRepo represents an team-repository relation. -type TeamRepo struct { - ID int64 `xorm:"pk autoincr"` - OrgID int64 `xorm:"INDEX"` - TeamID int64 `xorm:"UNIQUE(s)"` - RepoID int64 `xorm:"UNIQUE(s)"` -} - -func hasTeamRepo(e db.Engine, orgID, teamID, repoID int64) bool { - has, _ := e. - Where("org_id=?", orgID). - And("team_id=?", teamID). - And("repo_id=?", repoID). - Get(new(TeamRepo)) - return has -} - -// HasTeamRepo returns true if given repository belongs to team. -func HasTeamRepo(orgID, teamID, repoID int64) bool { - return hasTeamRepo(db.GetEngine(db.DefaultContext), orgID, teamID, repoID) -} - -func addTeamRepo(e db.Engine, orgID, teamID, repoID int64) error { - _, err := e.InsertOne(&TeamRepo{ - OrgID: orgID, - TeamID: teamID, - RepoID: repoID, - }) - return err -} - -func removeTeamRepo(e db.Engine, teamID, repoID int64) error { - _, err := e.Delete(&TeamRepo{ - TeamID: teamID, - RepoID: repoID, - }) - return err -} - -// GetTeamsWithAccessToRepo returns all teams in an organization that have given access level to the repository. -func GetTeamsWithAccessToRepo(orgID, repoID int64, mode perm.AccessMode) ([]*Team, error) { - teams := make([]*Team, 0, 5) - return teams, db.GetEngine(db.DefaultContext).Where("team.authorize >= ?", mode). - Join("INNER", "team_repo", "team_repo.team_id = team.id"). - And("team_repo.org_id = ?", orgID). - And("team_repo.repo_id = ?", repoID). - Find(&teams) -} - -// ___________ ____ ___ .__ __ -// \__ ___/___ _____ _____ | | \____ |__|/ |_ -// | |_/ __ \\__ \ / \| | / \| \ __\ -// | |\ ___/ / __ \| Y Y \ | / | \ || | -// |____| \___ >____ /__|_| /______/|___| /__||__| -// \/ \/ \/ \/ - -// TeamUnit describes all units of a repository -type TeamUnit struct { - ID int64 `xorm:"pk autoincr"` - OrgID int64 `xorm:"INDEX"` - TeamID int64 `xorm:"UNIQUE(s)"` - Type unit.Type `xorm:"UNIQUE(s)"` - AccessMode perm.AccessMode -} - -// Unit returns Unit -func (t *TeamUnit) Unit() unit.Unit { - return unit.Units[t.Type] -} - -func getUnitsByTeamID(e db.Engine, teamID int64) (units []*TeamUnit, err error) { - return units, e.Where("team_id = ?", teamID).Find(&units) -} - -// UpdateTeamUnits updates a teams's units -func UpdateTeamUnits(team *Team, units []TeamUnit) (err error) { - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - if _, err = db.GetEngine(ctx).Where("team_id = ?", team.ID).Delete(new(TeamUnit)); err != nil { - return err - } - - if len(units) > 0 { - if err = db.Insert(ctx, units); err != nil { - return err - } - } - - return committer.Commit() -} diff --git a/models/org_team_test.go b/models/org_team_test.go index cf3a797991..e125f3c65b 100644 --- a/models/org_team_test.go +++ b/models/org_team_test.go @@ -9,6 +9,7 @@ import ( "testing" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" @@ -17,68 +18,14 @@ import ( "github.com/stretchr/testify/assert" ) -func TestTeam_IsOwnerTeam(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 1}).(*Team) - assert.True(t, team.IsOwnerTeam()) - - team = unittest.AssertExistsAndLoadBean(t, &Team{ID: 2}).(*Team) - assert.False(t, team.IsOwnerTeam()) -} - -func TestTeam_IsMember(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 1}).(*Team) - assert.True(t, team.IsMember(2)) - assert.False(t, team.IsMember(4)) - assert.False(t, team.IsMember(unittest.NonexistentID)) - - team = unittest.AssertExistsAndLoadBean(t, &Team{ID: 2}).(*Team) - assert.True(t, team.IsMember(2)) - assert.True(t, team.IsMember(4)) - assert.False(t, team.IsMember(unittest.NonexistentID)) -} - -func TestTeam_GetRepositories(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - test := func(teamID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - assert.NoError(t, team.GetRepositories(&SearchOrgTeamOptions{})) - assert.Len(t, team.Repos, team.NumRepos) - for _, repo := range team.Repos { - unittest.AssertExistsAndLoadBean(t, &TeamRepo{TeamID: teamID, RepoID: repo.ID}) - } - } - test(1) - test(3) -} - -func TestTeam_GetMembers(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - test := func(teamID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - assert.NoError(t, team.GetMembers(&SearchMembersOptions{})) - assert.Len(t, team.Members, team.NumMembers) - for _, member := range team.Members { - unittest.AssertExistsAndLoadBean(t, &TeamUser{UID: member.ID, TeamID: teamID}) - } - } - test(1) - test(3) -} - func TestTeam_AddMember(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) test := func(teamID, userID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - assert.NoError(t, team.AddMember(userID)) - unittest.AssertExistsAndLoadBean(t, &TeamUser{UID: userID, TeamID: teamID}) - unittest.CheckConsistencyFor(t, &Team{ID: teamID}, &user_model.User{ID: team.OrgID}) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) + assert.NoError(t, AddTeamMember(team, userID)) + unittest.AssertExistsAndLoadBean(t, &organization.TeamUser{UID: userID, TeamID: teamID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: teamID}, &user_model.User{ID: team.OrgID}) } test(1, 2) test(1, 4) @@ -89,27 +36,27 @@ func TestTeam_RemoveMember(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) testSuccess := func(teamID, userID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - assert.NoError(t, team.RemoveMember(userID)) - unittest.AssertNotExistsBean(t, &TeamUser{UID: userID, TeamID: teamID}) - unittest.CheckConsistencyFor(t, &Team{ID: teamID}) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) + assert.NoError(t, RemoveTeamMember(team, userID)) + unittest.AssertNotExistsBean(t, &organization.TeamUser{UID: userID, TeamID: teamID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: teamID}) } testSuccess(1, 4) testSuccess(2, 2) testSuccess(3, 2) testSuccess(3, unittest.NonexistentID) - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 1}).(*Team) - err := team.RemoveMember(2) - assert.True(t, IsErrLastOrgOwner(err)) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 1}).(*organization.Team) + err := RemoveTeamMember(team, 2) + assert.True(t, organization.IsErrLastOrgOwner(err)) } func TestTeam_HasRepository(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) test := func(teamID, repoID int64, expected bool) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - assert.Equal(t, expected, team.HasRepository(repoID)) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) + assert.Equal(t, expected, HasRepository(team, repoID)) } test(1, 1, false) test(1, 3, true) @@ -124,29 +71,29 @@ func TestTeam_AddRepository(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) testSuccess := func(teamID, repoID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID}).(*repo_model.Repository) - assert.NoError(t, team.AddRepository(repo)) - unittest.AssertExistsAndLoadBean(t, &TeamRepo{TeamID: teamID, RepoID: repoID}) - unittest.CheckConsistencyFor(t, &Team{ID: teamID}, &repo_model.Repository{ID: repoID}) + assert.NoError(t, AddRepository(team, repo)) + unittest.AssertExistsAndLoadBean(t, &organization.TeamRepo{TeamID: teamID, RepoID: repoID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: teamID}, &repo_model.Repository{ID: repoID}) } testSuccess(2, 3) testSuccess(2, 5) - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 1}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 1}).(*organization.Team) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) - assert.Error(t, team.AddRepository(repo)) - unittest.CheckConsistencyFor(t, &Team{ID: 1}, &repo_model.Repository{ID: 1}) + assert.Error(t, AddRepository(team, repo)) + unittest.CheckConsistencyFor(t, &organization.Team{ID: 1}, &repo_model.Repository{ID: 1}) } func TestTeam_RemoveRepository(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) testSuccess := func(teamID, repoID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - assert.NoError(t, team.RemoveRepository(repoID)) - unittest.AssertNotExistsBean(t, &TeamRepo{TeamID: teamID, RepoID: repoID}) - unittest.CheckConsistencyFor(t, &Team{ID: teamID}, &repo_model.Repository{ID: repoID}) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) + assert.NoError(t, RemoveRepository(team, repoID)) + unittest.AssertNotExistsBean(t, &organization.TeamRepo{TeamID: teamID, RepoID: repoID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: teamID}, &repo_model.Repository{ID: repoID}) } testSuccess(2, 3) testSuccess(2, 5) @@ -154,97 +101,62 @@ func TestTeam_RemoveRepository(t *testing.T) { } func TestIsUsableTeamName(t *testing.T) { - assert.NoError(t, IsUsableTeamName("usable")) - assert.True(t, db.IsErrNameReserved(IsUsableTeamName("new"))) + assert.NoError(t, organization.IsUsableTeamName("usable")) + assert.True(t, db.IsErrNameReserved(organization.IsUsableTeamName("new"))) } func TestNewTeam(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) const teamName = "newTeamName" - team := &Team{Name: teamName, OrgID: 3} + team := &organization.Team{Name: teamName, OrgID: 3} assert.NoError(t, NewTeam(team)) - unittest.AssertExistsAndLoadBean(t, &Team{Name: teamName}) - unittest.CheckConsistencyFor(t, &Team{}, &user_model.User{ID: team.OrgID}) -} - -func TestGetTeam(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - testSuccess := func(orgID int64, name string) { - team, err := GetTeam(orgID, name) - assert.NoError(t, err) - assert.EqualValues(t, orgID, team.OrgID) - assert.Equal(t, name, team.Name) - } - testSuccess(3, "Owners") - testSuccess(3, "team1") - - _, err := GetTeam(3, "nonexistent") - assert.Error(t, err) - _, err = GetTeam(unittest.NonexistentID, "Owners") - assert.Error(t, err) -} - -func TestGetTeamByID(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - testSuccess := func(teamID int64) { - team, err := GetTeamByID(teamID) - assert.NoError(t, err) - assert.EqualValues(t, teamID, team.ID) - } - testSuccess(1) - testSuccess(2) - testSuccess(3) - testSuccess(4) - - _, err := GetTeamByID(unittest.NonexistentID) - assert.Error(t, err) + unittest.AssertExistsAndLoadBean(t, &organization.Team{Name: teamName}) + unittest.CheckConsistencyFor(t, &organization.Team{}, &user_model.User{ID: team.OrgID}) } func TestUpdateTeam(t *testing.T) { // successful update assert.NoError(t, unittest.PrepareTestDatabase()) - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 2}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 2}).(*organization.Team) team.LowerName = "newname" team.Name = "newName" team.Description = strings.Repeat("A long description!", 100) team.AccessMode = perm.AccessModeAdmin assert.NoError(t, UpdateTeam(team, true, false)) - team = unittest.AssertExistsAndLoadBean(t, &Team{Name: "newName"}).(*Team) + team = unittest.AssertExistsAndLoadBean(t, &organization.Team{Name: "newName"}).(*organization.Team) assert.True(t, strings.HasPrefix(team.Description, "A long description!")) access := unittest.AssertExistsAndLoadBean(t, &Access{UserID: 4, RepoID: 3}).(*Access) assert.EqualValues(t, perm.AccessModeAdmin, access.Mode) - unittest.CheckConsistencyFor(t, &Team{ID: team.ID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: team.ID}) } func TestUpdateTeam2(t *testing.T) { // update to already-existing team assert.NoError(t, unittest.PrepareTestDatabase()) - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 2}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 2}).(*organization.Team) team.LowerName = "owners" team.Name = "Owners" team.Description = strings.Repeat("A long description!", 100) err := UpdateTeam(team, true, false) - assert.True(t, IsErrTeamAlreadyExist(err)) + assert.True(t, organization.IsErrTeamAlreadyExist(err)) - unittest.CheckConsistencyFor(t, &Team{ID: team.ID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: team.ID}) } func TestDeleteTeam(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 2}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 2}).(*organization.Team) assert.NoError(t, DeleteTeam(team)) - unittest.AssertNotExistsBean(t, &Team{ID: team.ID}) - unittest.AssertNotExistsBean(t, &TeamRepo{TeamID: team.ID}) - unittest.AssertNotExistsBean(t, &TeamUser{TeamID: team.ID}) + unittest.AssertNotExistsBean(t, &organization.Team{ID: team.ID}) + unittest.AssertNotExistsBean(t, &organization.TeamRepo{TeamID: team.ID}) + unittest.AssertNotExistsBean(t, &organization.TeamUser{TeamID: team.ID}) // check that team members don't have "leftover" access to repos user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}).(*user_model.User) @@ -254,78 +166,14 @@ func TestDeleteTeam(t *testing.T) { assert.True(t, accessMode < perm.AccessModeWrite) } -func TestIsTeamMember(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - test := func(orgID, teamID, userID int64, expected bool) { - isMember, err := IsTeamMember(orgID, teamID, userID) - assert.NoError(t, err) - assert.Equal(t, expected, isMember) - } - - test(3, 1, 2, true) - test(3, 1, 4, false) - test(3, 1, unittest.NonexistentID, false) - - test(3, 2, 2, true) - test(3, 2, 4, true) - - test(3, unittest.NonexistentID, unittest.NonexistentID, false) - test(unittest.NonexistentID, unittest.NonexistentID, unittest.NonexistentID, false) -} - -func TestGetTeamMembers(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - test := func(teamID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - members, err := GetTeamMembers(teamID) - assert.NoError(t, err) - assert.Len(t, members, team.NumMembers) - for _, member := range members { - unittest.AssertExistsAndLoadBean(t, &TeamUser{UID: member.ID, TeamID: teamID}) - } - } - test(1) - test(3) -} - -func TestGetUserTeams(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - test := func(userID int64) { - teams, _, err := GetUserTeams(&GetUserTeamOptions{UserID: userID}) - assert.NoError(t, err) - for _, team := range teams { - unittest.AssertExistsAndLoadBean(t, &TeamUser{TeamID: team.ID, UID: userID}) - } - } - test(2) - test(5) - test(unittest.NonexistentID) -} - -func TestGetUserOrgTeams(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - test := func(orgID, userID int64) { - teams, err := GetUserOrgTeams(orgID, userID) - assert.NoError(t, err) - for _, team := range teams { - assert.EqualValues(t, orgID, team.OrgID) - unittest.AssertExistsAndLoadBean(t, &TeamUser{TeamID: team.ID, UID: userID}) - } - } - test(3, 2) - test(3, 4) - test(3, unittest.NonexistentID) -} - func TestAddTeamMember(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) test := func(teamID, userID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) assert.NoError(t, AddTeamMember(team, userID)) - unittest.AssertExistsAndLoadBean(t, &TeamUser{UID: userID, TeamID: teamID}) - unittest.CheckConsistencyFor(t, &Team{ID: teamID}, &user_model.User{ID: team.OrgID}) + unittest.AssertExistsAndLoadBean(t, &organization.TeamUser{UID: userID, TeamID: teamID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: teamID}, &user_model.User{ID: team.OrgID}) } test(1, 2) test(1, 4) @@ -336,47 +184,17 @@ func TestRemoveTeamMember(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) testSuccess := func(teamID, userID int64) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) assert.NoError(t, RemoveTeamMember(team, userID)) - unittest.AssertNotExistsBean(t, &TeamUser{UID: userID, TeamID: teamID}) - unittest.CheckConsistencyFor(t, &Team{ID: teamID}) + unittest.AssertNotExistsBean(t, &organization.TeamUser{UID: userID, TeamID: teamID}) + unittest.CheckConsistencyFor(t, &organization.Team{ID: teamID}) } testSuccess(1, 4) testSuccess(2, 2) testSuccess(3, 2) testSuccess(3, unittest.NonexistentID) - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 1}).(*Team) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 1}).(*organization.Team) err := RemoveTeamMember(team, 2) - assert.True(t, IsErrLastOrgOwner(err)) -} - -func TestHasTeamRepo(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - test := func(teamID, repoID int64, expected bool) { - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) - assert.Equal(t, expected, HasTeamRepo(team.OrgID, teamID, repoID)) - } - test(1, 1, false) - test(1, 3, true) - test(1, 5, true) - test(1, unittest.NonexistentID, false) - - test(2, 3, true) - test(2, 5, false) -} - -func TestUsersInTeamsCount(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - test := func(teamIDs, userIDs []int64, expected int64) { - count, err := UsersInTeamsCount(teamIDs, userIDs) - assert.NoError(t, err) - assert.Equal(t, expected, count) - } - - test([]int64{2}, []int64{1, 2, 3, 4}, 1) // only userid 2 - test([]int64{1, 2, 3, 4, 5}, []int64{2, 5}, 2) // userid 2,4 - test([]int64{1, 2, 3, 4, 5}, []int64{2, 3, 5}, 3) // userid 2,4,5 + assert.True(t, organization.IsErrLastOrgOwner(err)) } diff --git a/models/org_test.go b/models/org_test.go index ec324cb71a..4d8831858c 100644 --- a/models/org_test.go +++ b/models/org_test.go @@ -8,176 +8,81 @@ import ( "testing" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/structs" "github.com/stretchr/testify/assert" ) -func TestUser_IsOwnedBy(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - for _, testCase := range []struct { - OrgID int64 - UserID int64 - ExpectedOwner bool - }{ - {3, 2, true}, - {3, 1, false}, - {3, 3, false}, - {3, 4, false}, - {2, 2, false}, // user2 is not an organization - {2, 3, false}, - } { - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: testCase.OrgID}).(*Organization) - isOwner, err := org.IsOwnedBy(testCase.UserID) - assert.NoError(t, err) - assert.Equal(t, testCase.ExpectedOwner, isOwner) - } -} - -func TestUser_IsOrgMember(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - for _, testCase := range []struct { - OrgID int64 - UserID int64 - ExpectedMember bool - }{ - {3, 2, true}, - {3, 4, true}, - {3, 1, false}, - {3, 3, false}, - {2, 2, false}, // user2 is not an organization - {2, 3, false}, - } { - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: testCase.OrgID}).(*Organization) - isMember, err := org.IsOrgMember(testCase.UserID) - assert.NoError(t, err) - assert.Equal(t, testCase.ExpectedMember, isMember) - } -} - -func TestUser_GetTeam(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - team, err := org.GetTeam("team1") - assert.NoError(t, err) - assert.Equal(t, org.ID, team.OrgID) - assert.Equal(t, "team1", team.LowerName) - - _, err = org.GetTeam("does not exist") - assert.True(t, IsErrTeamNotExist(err)) - - nonOrg := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 2}).(*Organization) - _, err = nonOrg.GetTeam("team") - assert.True(t, IsErrTeamNotExist(err)) -} - -func TestUser_GetOwnerTeam(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - team, err := org.GetOwnerTeam() - assert.NoError(t, err) - assert.Equal(t, org.ID, team.OrgID) - - nonOrg := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 2}).(*Organization) - _, err = nonOrg.GetOwnerTeam() - assert.True(t, IsErrTeamNotExist(err)) -} - -func TestUser_GetTeams(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - teams, err := org.LoadTeams() - assert.NoError(t, err) - if assert.Len(t, teams, 4) { - assert.Equal(t, int64(1), teams[0].ID) - assert.Equal(t, int64(2), teams[1].ID) - assert.Equal(t, int64(12), teams[2].ID) - assert.Equal(t, int64(7), teams[3].ID) - } -} - -func TestUser_GetMembers(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - members, _, err := org.GetMembers() - assert.NoError(t, err) - if assert.Len(t, members, 3) { - assert.Equal(t, int64(2), members[0].ID) - assert.Equal(t, int64(28), members[1].ID) - assert.Equal(t, int64(4), members[2].ID) - } -} - -func TestUser_AddMember(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - - // add a user that is not a member - unittest.AssertNotExistsBean(t, &OrgUser{UID: 5, OrgID: 3}) - prevNumMembers := org.NumMembers - assert.NoError(t, org.AddMember(5)) - unittest.AssertExistsAndLoadBean(t, &OrgUser{UID: 5, OrgID: 3}) - org = unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - assert.Equal(t, prevNumMembers+1, org.NumMembers) - - // add a user that is already a member - unittest.AssertExistsAndLoadBean(t, &OrgUser{UID: 4, OrgID: 3}) - prevNumMembers = org.NumMembers - assert.NoError(t, org.AddMember(4)) - unittest.AssertExistsAndLoadBean(t, &OrgUser{UID: 4, OrgID: 3}) - org = unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - assert.Equal(t, prevNumMembers, org.NumMembers) - - unittest.CheckConsistencyFor(t, &user_model.User{}) -} - func TestUser_RemoveMember(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}).(*organization.Organization) // remove a user that is a member - unittest.AssertExistsAndLoadBean(t, &OrgUser{UID: 4, OrgID: 3}) + unittest.AssertExistsAndLoadBean(t, &organization.OrgUser{UID: 4, OrgID: 3}) prevNumMembers := org.NumMembers - assert.NoError(t, org.RemoveMember(4)) - unittest.AssertNotExistsBean(t, &OrgUser{UID: 4, OrgID: 3}) - org = unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + assert.NoError(t, RemoveOrgUser(org.ID, 4)) + unittest.AssertNotExistsBean(t, &organization.OrgUser{UID: 4, OrgID: 3}) + org = unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}).(*organization.Organization) assert.Equal(t, prevNumMembers-1, org.NumMembers) // remove a user that is not a member - unittest.AssertNotExistsBean(t, &OrgUser{UID: 5, OrgID: 3}) + unittest.AssertNotExistsBean(t, &organization.OrgUser{UID: 5, OrgID: 3}) prevNumMembers = org.NumMembers - assert.NoError(t, org.RemoveMember(5)) - unittest.AssertNotExistsBean(t, &OrgUser{UID: 5, OrgID: 3}) - org = unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + assert.NoError(t, RemoveOrgUser(org.ID, 5)) + unittest.AssertNotExistsBean(t, &organization.OrgUser{UID: 5, OrgID: 3}) + org = unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}).(*organization.Organization) assert.Equal(t, prevNumMembers, org.NumMembers) - unittest.CheckConsistencyFor(t, &user_model.User{}, &Team{}) + unittest.CheckConsistencyFor(t, &user_model.User{}, &organization.Team{}) +} + +func TestRemoveOrgUser(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + testSuccess := func(orgID, userID int64) { + org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID}).(*user_model.User) + expectedNumMembers := org.NumMembers + if unittest.BeanExists(t, &organization.OrgUser{OrgID: orgID, UID: userID}) { + expectedNumMembers-- + } + assert.NoError(t, RemoveOrgUser(orgID, userID)) + unittest.AssertNotExistsBean(t, &organization.OrgUser{OrgID: orgID, UID: userID}) + org = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID}).(*user_model.User) + assert.EqualValues(t, expectedNumMembers, org.NumMembers) + } + testSuccess(3, 4) + testSuccess(3, 4) + + err := RemoveOrgUser(7, 5) + assert.Error(t, err) + assert.True(t, organization.IsErrLastOrgOwner(err)) + unittest.AssertExistsAndLoadBean(t, &organization.OrgUser{OrgID: 7, UID: 5}) + unittest.CheckConsistencyFor(t, &user_model.User{}, &organization.Team{}) } func TestUser_RemoveOrgRepo(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}).(*organization.Organization) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: org.ID}).(*repo_model.Repository) // remove a repo that does belong to org - unittest.AssertExistsAndLoadBean(t, &TeamRepo{RepoID: repo.ID, OrgID: org.ID}) - assert.NoError(t, org.RemoveOrgRepo(repo.ID)) - unittest.AssertNotExistsBean(t, &TeamRepo{RepoID: repo.ID, OrgID: org.ID}) + unittest.AssertExistsAndLoadBean(t, &organization.TeamRepo{RepoID: repo.ID, OrgID: org.ID}) + assert.NoError(t, organization.RemoveOrgRepo(db.DefaultContext, org.ID, repo.ID)) + unittest.AssertNotExistsBean(t, &organization.TeamRepo{RepoID: repo.ID, OrgID: org.ID}) unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo.ID}) // repo should still exist // remove a repo that does not belong to org - assert.NoError(t, org.RemoveOrgRepo(repo.ID)) - unittest.AssertNotExistsBean(t, &TeamRepo{RepoID: repo.ID, OrgID: org.ID}) + assert.NoError(t, organization.RemoveOrgRepo(db.DefaultContext, org.ID, repo.ID)) + unittest.AssertNotExistsBean(t, &organization.TeamRepo{RepoID: repo.ID, OrgID: org.ID}) - assert.NoError(t, org.RemoveOrgRepo(unittest.NonexistentID)) + assert.NoError(t, organization.RemoveOrgRepo(db.DefaultContext, org.ID, unittest.NonexistentID)) unittest.CheckConsistencyFor(t, &user_model.User{ID: org.ID}, - &Team{OrgID: org.ID}, + &organization.Team{OrgID: org.ID}, &repo_model.Repository{ID: repo.ID}) } @@ -187,18 +92,18 @@ func TestCreateOrganization(t *testing.T) { owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) const newOrgName = "neworg" - org := &Organization{ + org := &organization.Organization{ Name: newOrgName, } unittest.AssertNotExistsBean(t, &user_model.User{Name: newOrgName, Type: user_model.UserTypeOrganization}) - assert.NoError(t, CreateOrganization(org, owner)) + assert.NoError(t, organization.CreateOrganization(org, owner)) org = unittest.AssertExistsAndLoadBean(t, - &Organization{Name: newOrgName, Type: user_model.UserTypeOrganization}).(*Organization) + &organization.Organization{Name: newOrgName, Type: user_model.UserTypeOrganization}).(*organization.Organization) ownerTeam := unittest.AssertExistsAndLoadBean(t, - &Team{Name: ownerTeamName, OrgID: org.ID}).(*Team) - unittest.AssertExistsAndLoadBean(t, &TeamUser{UID: owner.ID, TeamID: ownerTeam.ID}) - unittest.CheckConsistencyFor(t, &user_model.User{}, &Team{}) + &organization.Team{Name: organization.OwnerTeamName, OrgID: org.ID}).(*organization.Team) + unittest.AssertExistsAndLoadBean(t, &organization.TeamUser{UID: owner.ID, TeamID: ownerTeam.ID}) + unittest.CheckConsistencyFor(t, &user_model.User{}, &organization.Team{}) } func TestCreateOrganization2(t *testing.T) { @@ -207,16 +112,16 @@ func TestCreateOrganization2(t *testing.T) { owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) const newOrgName = "neworg" - org := &Organization{ + org := &organization.Organization{ Name: newOrgName, } - unittest.AssertNotExistsBean(t, &Organization{Name: newOrgName, Type: user_model.UserTypeOrganization}) - err := CreateOrganization(org, owner) + unittest.AssertNotExistsBean(t, &organization.Organization{Name: newOrgName, Type: user_model.UserTypeOrganization}) + err := organization.CreateOrganization(org, owner) assert.Error(t, err) - assert.True(t, IsErrUserNotAllowedCreateOrg(err)) - unittest.AssertNotExistsBean(t, &Organization{Name: newOrgName, Type: user_model.UserTypeOrganization}) - unittest.CheckConsistencyFor(t, &Organization{}, &Team{}) + assert.True(t, organization.IsErrUserNotAllowedCreateOrg(err)) + unittest.AssertNotExistsBean(t, &organization.Organization{Name: newOrgName, Type: user_model.UserTypeOrganization}) + unittest.CheckConsistencyFor(t, &organization.Organization{}, &organization.Team{}) } func TestCreateOrganization3(t *testing.T) { @@ -224,12 +129,12 @@ func TestCreateOrganization3(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - org := &Organization{Name: "user3"} // should already exist + org := &organization.Organization{Name: "user3"} // should already exist unittest.AssertExistsAndLoadBean(t, &user_model.User{Name: org.Name}) // sanity check - err := CreateOrganization(org, owner) + err := organization.CreateOrganization(org, owner) assert.Error(t, err) assert.True(t, user_model.IsErrUserAlreadyExist(err)) - unittest.CheckConsistencyFor(t, &user_model.User{}, &Team{}) + unittest.CheckConsistencyFor(t, &user_model.User{}, &organization.Team{}) } func TestCreateOrganization4(t *testing.T) { @@ -237,210 +142,10 @@ func TestCreateOrganization4(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - err := CreateOrganization(&Organization{Name: "assets"}, owner) + err := organization.CreateOrganization(&organization.Organization{Name: "assets"}, owner) assert.Error(t, err) assert.True(t, db.IsErrNameReserved(err)) - unittest.CheckConsistencyFor(t, &Organization{}, &Team{}) -} - -func TestGetOrgByName(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - org, err := GetOrgByName("user3") - assert.NoError(t, err) - assert.EqualValues(t, 3, org.ID) - assert.Equal(t, "user3", org.Name) - - _, err = GetOrgByName("user2") // user2 is an individual - assert.True(t, IsErrOrgNotExist(err)) - - _, err = GetOrgByName("") // corner case - assert.True(t, IsErrOrgNotExist(err)) -} - -func TestCountOrganizations(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - expected, err := db.GetEngine(db.DefaultContext).Where("type=?", user_model.UserTypeOrganization).Count(&user_model.User{}) - assert.NoError(t, err) - assert.Equal(t, expected, CountOrganizations()) -} - -func TestIsOrganizationOwner(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - test := func(orgID, userID int64, expected bool) { - isOwner, err := IsOrganizationOwner(orgID, userID) - assert.NoError(t, err) - assert.EqualValues(t, expected, isOwner) - } - test(3, 2, true) - test(3, 3, false) - test(6, 5, true) - test(6, 4, false) - test(unittest.NonexistentID, unittest.NonexistentID, false) -} - -func TestIsOrganizationMember(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - test := func(orgID, userID int64, expected bool) { - isMember, err := IsOrganizationMember(orgID, userID) - assert.NoError(t, err) - assert.EqualValues(t, expected, isMember) - } - test(3, 2, true) - test(3, 3, false) - test(3, 4, true) - test(6, 5, true) - test(6, 4, false) - test(unittest.NonexistentID, unittest.NonexistentID, false) -} - -func TestIsPublicMembership(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - test := func(orgID, userID int64, expected bool) { - isMember, err := IsPublicMembership(orgID, userID) - assert.NoError(t, err) - assert.EqualValues(t, expected, isMember) - } - test(3, 2, true) - test(3, 3, false) - test(3, 4, false) - test(6, 5, true) - test(6, 4, false) - test(unittest.NonexistentID, unittest.NonexistentID, false) -} - -func TestFindOrgs(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - orgs, err := FindOrgs(FindOrgOptions{ - UserID: 4, - IncludePrivate: true, - }) - assert.NoError(t, err) - if assert.Len(t, orgs, 1) { - assert.EqualValues(t, 3, orgs[0].ID) - } - - orgs, err = FindOrgs(FindOrgOptions{ - UserID: 4, - IncludePrivate: false, - }) - assert.NoError(t, err) - assert.Len(t, orgs, 0) - - total, err := CountOrgs(FindOrgOptions{ - UserID: 4, - IncludePrivate: true, - }) - assert.NoError(t, err) - assert.EqualValues(t, 1, total) -} - -func TestGetOwnedOrgsByUserID(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - orgs, err := GetOwnedOrgsByUserID(2) - assert.NoError(t, err) - if assert.Len(t, orgs, 1) { - assert.EqualValues(t, 3, orgs[0].ID) - } - - orgs, err = GetOwnedOrgsByUserID(4) - assert.NoError(t, err) - assert.Len(t, orgs, 0) -} - -func TestGetOwnedOrgsByUserIDDesc(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - orgs, err := GetOwnedOrgsByUserIDDesc(5, "id") - assert.NoError(t, err) - if assert.Len(t, orgs, 2) { - assert.EqualValues(t, 7, orgs[0].ID) - assert.EqualValues(t, 6, orgs[1].ID) - } - - orgs, err = GetOwnedOrgsByUserIDDesc(4, "id") - assert.NoError(t, err) - assert.Len(t, orgs, 0) -} - -func TestGetOrgUsersByUserID(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - orgUsers, err := GetOrgUsersByUserID(5, &SearchOrganizationsOptions{All: true}) - assert.NoError(t, err) - if assert.Len(t, orgUsers, 2) { - assert.Equal(t, OrgUser{ - ID: orgUsers[0].ID, - OrgID: 6, - UID: 5, - IsPublic: true, - }, *orgUsers[0]) - assert.Equal(t, OrgUser{ - ID: orgUsers[1].ID, - OrgID: 7, - UID: 5, - IsPublic: false, - }, *orgUsers[1]) - } - - publicOrgUsers, err := GetOrgUsersByUserID(5, &SearchOrganizationsOptions{All: false}) - assert.NoError(t, err) - assert.Len(t, publicOrgUsers, 1) - assert.Equal(t, *orgUsers[0], *publicOrgUsers[0]) - - orgUsers, err = GetOrgUsersByUserID(1, &SearchOrganizationsOptions{All: true}) - assert.NoError(t, err) - assert.Len(t, orgUsers, 0) -} - -func TestGetOrgUsersByOrgID(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - orgUsers, err := GetOrgUsersByOrgID(&FindOrgMembersOpts{ - ListOptions: db.ListOptions{}, - OrgID: 3, - PublicOnly: false, - }) - assert.NoError(t, err) - if assert.Len(t, orgUsers, 3) { - assert.Equal(t, OrgUser{ - ID: orgUsers[0].ID, - OrgID: 3, - UID: 2, - IsPublic: true, - }, *orgUsers[0]) - assert.Equal(t, OrgUser{ - ID: orgUsers[1].ID, - OrgID: 3, - UID: 4, - IsPublic: false, - }, *orgUsers[1]) - } - - orgUsers, err = GetOrgUsersByOrgID(&FindOrgMembersOpts{ - ListOptions: db.ListOptions{}, - OrgID: unittest.NonexistentID, - PublicOnly: false, - }) - assert.NoError(t, err) - assert.Len(t, orgUsers, 0) -} - -func TestChangeOrgUserStatus(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - testSuccess := func(orgID, userID int64, public bool) { - assert.NoError(t, ChangeOrgUserStatus(orgID, userID, public)) - orgUser := unittest.AssertExistsAndLoadBean(t, &OrgUser{OrgID: orgID, UID: userID}).(*OrgUser) - assert.Equal(t, public, orgUser.IsPublic) - } - - testSuccess(3, 2, false) - testSuccess(3, 2, false) - testSuccess(3, 4, true) - assert.NoError(t, ChangeOrgUserStatus(unittest.NonexistentID, unittest.NonexistentID, true)) + unittest.CheckConsistencyFor(t, &organization.Organization{}, &organization.Team{}) } func TestAddOrgUser(t *testing.T) { @@ -448,11 +153,11 @@ func TestAddOrgUser(t *testing.T) { testSuccess := func(orgID, userID int64, isPublic bool) { org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID}).(*user_model.User) expectedNumMembers := org.NumMembers - if !unittest.BeanExists(t, &OrgUser{OrgID: orgID, UID: userID}) { + if !unittest.BeanExists(t, &organization.OrgUser{OrgID: orgID, UID: userID}) { expectedNumMembers++ } - assert.NoError(t, AddOrgUser(orgID, userID)) - ou := &OrgUser{OrgID: orgID, UID: userID} + assert.NoError(t, organization.AddOrgUser(orgID, userID)) + ou := &organization.OrgUser{OrgID: orgID, UID: userID} unittest.AssertExistsAndLoadBean(t, ou) assert.Equal(t, isPublic, ou.IsPublic) org = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID}).(*user_model.User) @@ -467,194 +172,5 @@ func TestAddOrgUser(t *testing.T) { setting.Service.DefaultOrgMemberVisible = true testSuccess(6, 3, true) - unittest.CheckConsistencyFor(t, &user_model.User{}, &Team{}) -} - -func TestRemoveOrgUser(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - testSuccess := func(orgID, userID int64) { - org := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID}).(*user_model.User) - expectedNumMembers := org.NumMembers - if unittest.BeanExists(t, &OrgUser{OrgID: orgID, UID: userID}) { - expectedNumMembers-- - } - assert.NoError(t, RemoveOrgUser(orgID, userID)) - unittest.AssertNotExistsBean(t, &OrgUser{OrgID: orgID, UID: userID}) - org = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: orgID}).(*user_model.User) - assert.EqualValues(t, expectedNumMembers, org.NumMembers) - } - testSuccess(3, 4) - testSuccess(3, 4) - - err := RemoveOrgUser(7, 5) - assert.Error(t, err) - assert.True(t, IsErrLastOrgOwner(err)) - unittest.AssertExistsAndLoadBean(t, &OrgUser{OrgID: 7, UID: 5}) - unittest.CheckConsistencyFor(t, &user_model.User{}, &Team{}) -} - -func TestUser_GetUserTeamIDs(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - testSuccess := func(userID int64, expected []int64) { - teamIDs, err := org.GetUserTeamIDs(userID) - assert.NoError(t, err) - assert.Equal(t, expected, teamIDs) - } - testSuccess(2, []int64{1, 2}) - testSuccess(4, []int64{2}) - testSuccess(unittest.NonexistentID, []int64{}) -} - -func TestAccessibleReposEnv_CountRepos(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - testSuccess := func(userID, expectedCount int64) { - env, err := org.AccessibleReposEnv(userID) - assert.NoError(t, err) - count, err := env.CountRepos() - assert.NoError(t, err) - assert.EqualValues(t, expectedCount, count) - } - testSuccess(2, 3) - testSuccess(4, 2) -} - -func TestAccessibleReposEnv_RepoIDs(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - testSuccess := func(userID, _, pageSize int64, expectedRepoIDs []int64) { - env, err := org.AccessibleReposEnv(userID) - assert.NoError(t, err) - repoIDs, err := env.RepoIDs(1, 100) - assert.NoError(t, err) - assert.Equal(t, expectedRepoIDs, repoIDs) - } - testSuccess(2, 1, 100, []int64{3, 5, 32}) - testSuccess(4, 0, 100, []int64{3, 32}) -} - -func TestAccessibleReposEnv_Repos(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - testSuccess := func(userID int64, expectedRepoIDs []int64) { - env, err := org.AccessibleReposEnv(userID) - assert.NoError(t, err) - repos, err := env.Repos(1, 100) - assert.NoError(t, err) - expectedRepos := make([]*repo_model.Repository, len(expectedRepoIDs)) - for i, repoID := range expectedRepoIDs { - expectedRepos[i] = unittest.AssertExistsAndLoadBean(t, - &repo_model.Repository{ID: repoID}).(*repo_model.Repository) - } - assert.Equal(t, expectedRepos, repos) - } - testSuccess(2, []int64{3, 5, 32}) - testSuccess(4, []int64{3, 32}) -} - -func TestAccessibleReposEnv_MirrorRepos(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) - testSuccess := func(userID int64, expectedRepoIDs []int64) { - env, err := org.AccessibleReposEnv(userID) - assert.NoError(t, err) - repos, err := env.MirrorRepos() - assert.NoError(t, err) - expectedRepos := make([]*repo_model.Repository, len(expectedRepoIDs)) - for i, repoID := range expectedRepoIDs { - expectedRepos[i] = unittest.AssertExistsAndLoadBean(t, - &repo_model.Repository{ID: repoID}).(*repo_model.Repository) - } - assert.Equal(t, expectedRepos, repos) - } - testSuccess(2, []int64{5}) - testSuccess(4, []int64{}) -} - -func TestHasOrgVisibleTypePublic(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - user3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) - - const newOrgName = "test-org-public" - org := &Organization{ - Name: newOrgName, - Visibility: structs.VisibleTypePublic, - } - - unittest.AssertNotExistsBean(t, &user_model.User{Name: org.Name, Type: user_model.UserTypeOrganization}) - assert.NoError(t, CreateOrganization(org, owner)) - org = unittest.AssertExistsAndLoadBean(t, - &Organization{Name: org.Name, Type: user_model.UserTypeOrganization}).(*Organization) - test1 := HasOrgOrUserVisible(org.AsUser(), owner) - test2 := HasOrgOrUserVisible(org.AsUser(), user3) - test3 := HasOrgOrUserVisible(org.AsUser(), nil) - assert.True(t, test1) // owner of org - assert.True(t, test2) // user not a part of org - assert.True(t, test3) // logged out user -} - -func TestHasOrgVisibleTypeLimited(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - user3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) - - const newOrgName = "test-org-limited" - org := &Organization{ - Name: newOrgName, - Visibility: structs.VisibleTypeLimited, - } - - unittest.AssertNotExistsBean(t, &user_model.User{Name: org.Name, Type: user_model.UserTypeOrganization}) - assert.NoError(t, CreateOrganization(org, owner)) - org = unittest.AssertExistsAndLoadBean(t, - &Organization{Name: org.Name, Type: user_model.UserTypeOrganization}).(*Organization) - test1 := HasOrgOrUserVisible(org.AsUser(), owner) - test2 := HasOrgOrUserVisible(org.AsUser(), user3) - test3 := HasOrgOrUserVisible(org.AsUser(), nil) - assert.True(t, test1) // owner of org - assert.True(t, test2) // user not a part of org - assert.False(t, test3) // logged out user -} - -func TestHasOrgVisibleTypePrivate(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - user3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) - - const newOrgName = "test-org-private" - org := &Organization{ - Name: newOrgName, - Visibility: structs.VisibleTypePrivate, - } - - unittest.AssertNotExistsBean(t, &user_model.User{Name: org.Name, Type: user_model.UserTypeOrganization}) - assert.NoError(t, CreateOrganization(org, owner)) - org = unittest.AssertExistsAndLoadBean(t, - &Organization{Name: org.Name, Type: user_model.UserTypeOrganization}).(*Organization) - test1 := HasOrgOrUserVisible(org.AsUser(), owner) - test2 := HasOrgOrUserVisible(org.AsUser(), user3) - test3 := HasOrgOrUserVisible(org.AsUser(), nil) - assert.True(t, test1) // owner of org - assert.False(t, test2) // user not a part of org - assert.False(t, test3) // logged out user -} - -func TestGetUsersWhoCanCreateOrgRepo(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - users, err := GetUsersWhoCanCreateOrgRepo(3) - assert.NoError(t, err) - assert.Len(t, users, 2) - var ids []int64 - for i := range users { - ids = append(ids, users[i].ID) - } - assert.ElementsMatch(t, ids, []int64{2, 28}) - - users, err = GetUsersWhoCanCreateOrgRepo(7) - assert.NoError(t, err) - assert.Len(t, users, 1) - assert.EqualValues(t, 5, users[0].ID) + unittest.CheckConsistencyFor(t, &user_model.User{}, &organization.Team{}) } diff --git a/models/organization/main_test.go b/models/organization/main_test.go new file mode 100644 index 0000000000..711b86b9bd --- /dev/null +++ b/models/organization/main_test.go @@ -0,0 +1,27 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "path/filepath" + "testing" + + "code.gitea.io/gitea/models/unittest" +) + +func TestMain(m *testing.M) { + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "user.yml", + "org_user.yml", + "team.yml", + "team_repo.yml", + "team_unit.yml", + "team_user.yml", + "repository.yml", + }, + }) +} diff --git a/models/organization/org.go b/models/organization/org.go new file mode 100644 index 0000000000..3761335922 --- /dev/null +++ b/models/organization/org.go @@ -0,0 +1,859 @@ +// Copyright 2014 The Gogs Authors. All rights reserved. +// Copyright 2019 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "context" + "fmt" + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/perm" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unit" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" + + "xorm.io/builder" +) + +// ________ .__ __ .__ +// \_____ \_______ _________ ____ |__|____________ _/ |_|__| ____ ____ +// / | \_ __ \/ ___\__ \ / \| \___ /\__ \\ __\ |/ _ \ / \ +// / | \ | \/ /_/ > __ \| | \ |/ / / __ \| | | ( <_> ) | \ +// \_______ /__| \___ (____ /___| /__/_____ \(____ /__| |__|\____/|___| / +// \/ /_____/ \/ \/ \/ \/ \/ + +// ErrOrgNotExist represents a "OrgNotExist" kind of error. +type ErrOrgNotExist struct { + ID int64 + Name string +} + +// IsErrOrgNotExist checks if an error is a ErrOrgNotExist. +func IsErrOrgNotExist(err error) bool { + _, ok := err.(ErrOrgNotExist) + return ok +} + +func (err ErrOrgNotExist) Error() string { + return fmt.Sprintf("org does not exist [id: %d, name: %s]", err.ID, err.Name) +} + +// ErrLastOrgOwner represents a "LastOrgOwner" kind of error. +type ErrLastOrgOwner struct { + UID int64 +} + +// IsErrLastOrgOwner checks if an error is a ErrLastOrgOwner. +func IsErrLastOrgOwner(err error) bool { + _, ok := err.(ErrLastOrgOwner) + return ok +} + +func (err ErrLastOrgOwner) Error() string { + return fmt.Sprintf("user is the last member of owner team [uid: %d]", err.UID) +} + +// ErrUserNotAllowedCreateOrg represents a "UserNotAllowedCreateOrg" kind of error. +type ErrUserNotAllowedCreateOrg struct{} + +// IsErrUserNotAllowedCreateOrg checks if an error is an ErrUserNotAllowedCreateOrg. +func IsErrUserNotAllowedCreateOrg(err error) bool { + _, ok := err.(ErrUserNotAllowedCreateOrg) + return ok +} + +func (err ErrUserNotAllowedCreateOrg) Error() string { + return "user is not allowed to create organizations" +} + +// Organization represents an organization +type Organization user_model.User + +// OrgFromUser converts user to organization +func OrgFromUser(user *user_model.User) *Organization { + return (*Organization)(user) +} + +// TableName represents the real table name of Organization +func (Organization) TableName() string { + return "user" +} + +// IsOwnedBy returns true if given user is in the owner team. +func (org *Organization) IsOwnedBy(uid int64) (bool, error) { + return IsOrganizationOwner(db.DefaultContext, org.ID, uid) +} + +// IsOrgMember returns true if given user is member of organization. +func (org *Organization) IsOrgMember(uid int64) (bool, error) { + return IsOrganizationMember(db.DefaultContext, org.ID, uid) +} + +// CanCreateOrgRepo returns true if given user can create repo in organization +func (org *Organization) CanCreateOrgRepo(uid int64) (bool, error) { + return CanCreateOrgRepo(org.ID, uid) +} + +func (org *Organization) getTeam(ctx context.Context, name string) (*Team, error) { + return getTeam(ctx, org.ID, name) +} + +// GetTeam returns named team of organization. +func (org *Organization) GetTeam(name string) (*Team, error) { + return org.getTeam(db.DefaultContext, name) +} + +func (org *Organization) getOwnerTeam(ctx context.Context) (*Team, error) { + return org.getTeam(ctx, OwnerTeamName) +} + +// GetOwnerTeam returns owner team of organization. +func (org *Organization) GetOwnerTeam() (*Team, error) { + return org.getOwnerTeam(db.DefaultContext) +} + +// FindOrgTeams returns all teams of a given organization +func FindOrgTeams(ctx context.Context, orgID int64) ([]*Team, error) { + var teams []*Team + return teams, db.GetEngine(ctx). + Where("org_id=?", orgID). + OrderBy("CASE WHEN name LIKE '" + OwnerTeamName + "' THEN '' ELSE name END"). + Find(&teams) +} + +// LoadTeams load teams if not loaded. +func (org *Organization) LoadTeams() ([]*Team, error) { + return FindOrgTeams(db.DefaultContext, org.ID) +} + +// GetMembers returns all members of organization. +func (org *Organization) GetMembers() (user_model.UserList, map[int64]bool, error) { + return FindOrgMembers(&FindOrgMembersOpts{ + OrgID: org.ID, + }) +} + +// HasMemberWithUserID returns true if user with userID is part of the u organisation. +func (org *Organization) HasMemberWithUserID(userID int64) bool { + return org.hasMemberWithUserID(db.DefaultContext, userID) +} + +func (org *Organization) hasMemberWithUserID(ctx context.Context, userID int64) bool { + isMember, err := IsOrganizationMember(ctx, org.ID, userID) + if err != nil { + log.Error("IsOrganizationMember: %v", err) + return false + } + return isMember +} + +// AvatarLink returns the full avatar link with http host +func (org *Organization) AvatarLink() string { + return org.AsUser().AvatarLink() +} + +// HTMLURL returns the organization's full link. +func (org *Organization) HTMLURL() string { + return org.AsUser().HTMLURL() +} + +// OrganisationLink returns the organization sub page link. +func (org *Organization) OrganisationLink() string { + return org.AsUser().OrganisationLink() +} + +// ShortName ellipses username to length +func (org *Organization) ShortName(length int) string { + return org.AsUser().ShortName(length) +} + +// HomeLink returns the user or organization home page link. +func (org *Organization) HomeLink() string { + return org.AsUser().HomeLink() +} + +// CanCreateRepo returns if user login can create a repository +// NOTE: functions calling this assume a failure due to repository count limit; if new checks are added, those functions should be revised +func (org *Organization) CanCreateRepo() bool { + return org.AsUser().CanCreateRepo() +} + +// FindOrgMembersOpts represensts find org members conditions +type FindOrgMembersOpts struct { + db.ListOptions + OrgID int64 + PublicOnly bool +} + +// CountOrgMembers counts the organization's members +func CountOrgMembers(opts *FindOrgMembersOpts) (int64, error) { + sess := db.GetEngine(db.DefaultContext).Where("org_id=?", opts.OrgID) + if opts.PublicOnly { + sess.And("is_public = ?", true) + } + return sess.Count(new(OrgUser)) +} + +// FindOrgMembers loads organization members according conditions +func FindOrgMembers(opts *FindOrgMembersOpts) (user_model.UserList, map[int64]bool, error) { + ous, err := GetOrgUsersByOrgID(opts) + if err != nil { + return nil, nil, err + } + + ids := make([]int64, len(ous)) + idsIsPublic := make(map[int64]bool, len(ous)) + for i, ou := range ous { + ids[i] = ou.UID + idsIsPublic[ou.UID] = ou.IsPublic + } + + users, err := user_model.GetUsersByIDs(ids) + if err != nil { + return nil, nil, err + } + return users, idsIsPublic, nil +} + +// AsUser returns the org as user object +func (org *Organization) AsUser() *user_model.User { + return (*user_model.User)(org) +} + +// DisplayName returns full name if it's not empty, +// returns username otherwise. +func (org *Organization) DisplayName() string { + return org.AsUser().DisplayName() +} + +// CustomAvatarRelativePath returns user custom avatar relative path. +func (org *Organization) CustomAvatarRelativePath() string { + return org.Avatar +} + +// CreateOrganization creates record of a new organization. +func CreateOrganization(org *Organization, owner *user_model.User) (err error) { + if !owner.CanCreateOrganization() { + return ErrUserNotAllowedCreateOrg{} + } + + if err = user_model.IsUsableUsername(org.Name); err != nil { + return err + } + + isExist, err := user_model.IsUserExist(0, org.Name) + if err != nil { + return err + } else if isExist { + return user_model.ErrUserAlreadyExist{Name: org.Name} + } + + org.LowerName = strings.ToLower(org.Name) + if org.Rands, err = user_model.GetUserSalt(); err != nil { + return err + } + if org.Salt, err = user_model.GetUserSalt(); err != nil { + return err + } + org.UseCustomAvatar = true + org.MaxRepoCreation = -1 + org.NumTeams = 1 + org.NumMembers = 1 + org.Type = user_model.UserTypeOrganization + + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + if err = user_model.DeleteUserRedirect(ctx, org.Name); err != nil { + return err + } + + if err = db.Insert(ctx, org); err != nil { + return fmt.Errorf("insert organization: %v", err) + } + if err = user_model.GenerateRandomAvatarCtx(ctx, org.AsUser()); err != nil { + return fmt.Errorf("generate random avatar: %v", err) + } + + // Add initial creator to organization and owner team. + if err = db.Insert(ctx, &OrgUser{ + UID: owner.ID, + OrgID: org.ID, + }); err != nil { + return fmt.Errorf("insert org-user relation: %v", err) + } + + // Create default owner team. + t := &Team{ + OrgID: org.ID, + LowerName: strings.ToLower(OwnerTeamName), + Name: OwnerTeamName, + AccessMode: perm.AccessModeOwner, + NumMembers: 1, + IncludesAllRepositories: true, + CanCreateOrgRepo: true, + } + if err = db.Insert(ctx, t); err != nil { + return fmt.Errorf("insert owner team: %v", err) + } + + // insert units for team + units := make([]TeamUnit, 0, len(unit.AllRepoUnitTypes)) + for _, tp := range unit.AllRepoUnitTypes { + units = append(units, TeamUnit{ + OrgID: org.ID, + TeamID: t.ID, + Type: tp, + }) + } + + if err = db.Insert(ctx, &units); err != nil { + return err + } + + if err = db.Insert(ctx, &TeamUser{ + UID: owner.ID, + OrgID: org.ID, + TeamID: t.ID, + }); err != nil { + return fmt.Errorf("insert team-user relation: %v", err) + } + + return committer.Commit() +} + +// GetOrgByName returns organization by given name. +func GetOrgByName(name string) (*Organization, error) { + if len(name) == 0 { + return nil, ErrOrgNotExist{0, name} + } + u := &Organization{ + LowerName: strings.ToLower(name), + Type: user_model.UserTypeOrganization, + } + has, err := db.GetEngine(db.DefaultContext).Get(u) + if err != nil { + return nil, err + } else if !has { + return nil, ErrOrgNotExist{0, name} + } + return u, nil +} + +// CountOrganizations returns number of organizations. +func CountOrganizations() int64 { + count, _ := db.GetEngine(db.DefaultContext). + Where("type=1"). + Count(new(Organization)) + return count +} + +// DeleteOrganization deletes models associated to an organization. +func DeleteOrganization(ctx context.Context, org *Organization) error { + if org.Type != user_model.UserTypeOrganization { + return fmt.Errorf("%s is a user not an organization", org.Name) + } + + if err := db.DeleteBeans(ctx, + &Team{OrgID: org.ID}, + &OrgUser{OrgID: org.ID}, + &TeamUser{OrgID: org.ID}, + &TeamUnit{OrgID: org.ID}, + ); err != nil { + return fmt.Errorf("deleteBeans: %v", err) + } + + if _, err := db.GetEngine(ctx).ID(org.ID).Delete(new(user_model.User)); err != nil { + return fmt.Errorf("Delete: %v", err) + } + + return nil +} + +// GetOrgUserMaxAuthorizeLevel returns highest authorize level of user in an organization +func (org *Organization) GetOrgUserMaxAuthorizeLevel(uid int64) (perm.AccessMode, error) { + var authorize perm.AccessMode + _, err := db.GetEngine(db.DefaultContext). + Select("max(team.authorize)"). + Table("team"). + Join("INNER", "team_user", "team_user.team_id = team.id"). + Where("team_user.uid = ?", uid). + And("team_user.org_id = ?", org.ID). + Get(&authorize) + return authorize, err +} + +// GetUsersWhoCanCreateOrgRepo returns users which are able to create repo in organization +func GetUsersWhoCanCreateOrgRepo(ctx context.Context, orgID int64) ([]*user_model.User, error) { + users := make([]*user_model.User, 0, 10) + return users, db.GetEngine(ctx). + Join("INNER", "`team_user`", "`team_user`.uid=`user`.id"). + Join("INNER", "`team`", "`team`.id=`team_user`.team_id"). + Where(builder.Eq{"team.can_create_org_repo": true}.Or(builder.Eq{"team.authorize": perm.AccessModeOwner})). + And("team_user.org_id = ?", orgID).Asc("`user`.name").Find(&users) +} + +// SearchOrganizationsOptions options to filter organizations +type SearchOrganizationsOptions struct { + db.ListOptions + All bool +} + +// FindOrgOptions finds orgs options +type FindOrgOptions struct { + db.ListOptions + UserID int64 + IncludePrivate bool +} + +func queryUserOrgIDs(userID int64, includePrivate bool) *builder.Builder { + cond := builder.Eq{"uid": userID} + if !includePrivate { + cond["is_public"] = true + } + return builder.Select("org_id").From("org_user").Where(cond) +} + +func (opts FindOrgOptions) toConds() builder.Cond { + cond := builder.NewCond() + if opts.UserID > 0 { + cond = cond.And(builder.In("`user`.`id`", queryUserOrgIDs(opts.UserID, opts.IncludePrivate))) + } + if !opts.IncludePrivate { + cond = cond.And(builder.Eq{"`user`.visibility": structs.VisibleTypePublic}) + } + return cond +} + +// FindOrgs returns a list of organizations according given conditions +func FindOrgs(opts FindOrgOptions) ([]*Organization, error) { + orgs := make([]*Organization, 0, 10) + sess := db.GetEngine(db.DefaultContext). + Where(opts.toConds()). + Asc("`user`.name") + if opts.Page > 0 && opts.PageSize > 0 { + sess.Limit(opts.PageSize, opts.PageSize*(opts.Page-1)) + } + return orgs, sess.Find(&orgs) +} + +// CountOrgs returns total count organizations according options +func CountOrgs(opts FindOrgOptions) (int64, error) { + return db.GetEngine(db.DefaultContext). + Where(opts.toConds()). + Count(new(user_model.User)) +} + +func getOwnedOrgsByUserID(sess db.Engine, userID int64) ([]*Organization, error) { + orgs := make([]*Organization, 0, 10) + return orgs, sess. + Join("INNER", "`team_user`", "`team_user`.org_id=`user`.id"). + Join("INNER", "`team`", "`team`.id=`team_user`.team_id"). + Where("`team_user`.uid=?", userID). + And("`team`.authorize=?", perm.AccessModeOwner). + Asc("`user`.name"). + Find(&orgs) +} + +// HasOrgOrUserVisible tells if the given user can see the given org or user +func HasOrgOrUserVisible(ctx context.Context, orgOrUser, user *user_model.User) bool { + // Not SignedUser + if user == nil { + return orgOrUser.Visibility == structs.VisibleTypePublic + } + + if user.IsAdmin || orgOrUser.ID == user.ID { + return true + } + + if (orgOrUser.Visibility == structs.VisibleTypePrivate || user.IsRestricted) && !OrgFromUser(orgOrUser).hasMemberWithUserID(ctx, user.ID) { + return false + } + return true +} + +// HasOrgsVisible tells if the given user can see at least one of the orgs provided +func HasOrgsVisible(orgs []*Organization, user *user_model.User) bool { + if len(orgs) == 0 { + return false + } + + for _, org := range orgs { + if HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), user) { + return true + } + } + return false +} + +// GetOwnedOrgsByUserID returns a list of organizations are owned by given user ID. +func GetOwnedOrgsByUserID(userID int64) ([]*Organization, error) { + return getOwnedOrgsByUserID(db.GetEngine(db.DefaultContext), userID) +} + +// GetOwnedOrgsByUserIDDesc returns a list of organizations are owned by +// given user ID, ordered descending by the given condition. +func GetOwnedOrgsByUserIDDesc(userID int64, desc string) ([]*Organization, error) { + return getOwnedOrgsByUserID(db.GetEngine(db.DefaultContext).Desc(desc), userID) +} + +// GetOrgsCanCreateRepoByUserID returns a list of organizations where given user ID +// are allowed to create repos. +func GetOrgsCanCreateRepoByUserID(userID int64) ([]*Organization, error) { + orgs := make([]*Organization, 0, 10) + + return orgs, db.GetEngine(db.DefaultContext).Where(builder.In("id", builder.Select("`user`.id").From("`user`"). + Join("INNER", "`team_user`", "`team_user`.org_id = `user`.id"). + Join("INNER", "`team`", "`team`.id = `team_user`.team_id"). + Where(builder.Eq{"`team_user`.uid": userID}). + And(builder.Eq{"`team`.authorize": perm.AccessModeOwner}.Or(builder.Eq{"`team`.can_create_org_repo": true})))). + Asc("`user`.name"). + Find(&orgs) +} + +// GetOrgUsersByUserID returns all organization-user relations by user ID. +func GetOrgUsersByUserID(uid int64, opts *SearchOrganizationsOptions) ([]*OrgUser, error) { + ous := make([]*OrgUser, 0, 10) + sess := db.GetEngine(db.DefaultContext). + Join("LEFT", "`user`", "`org_user`.org_id=`user`.id"). + Where("`org_user`.uid=?", uid) + if !opts.All { + // Only show public organizations + sess.And("is_public=?", true) + } + + if opts.PageSize != 0 { + sess = db.SetSessionPagination(sess, opts) + } + + err := sess. + Asc("`user`.name"). + Find(&ous) + return ous, err +} + +// GetOrgUsersByOrgID returns all organization-user relations by organization ID. +func GetOrgUsersByOrgID(opts *FindOrgMembersOpts) ([]*OrgUser, error) { + return getOrgUsersByOrgID(db.GetEngine(db.DefaultContext), opts) +} + +func getOrgUsersByOrgID(e db.Engine, opts *FindOrgMembersOpts) ([]*OrgUser, error) { + sess := e.Where("org_id=?", opts.OrgID) + if opts.PublicOnly { + sess.And("is_public = ?", true) + } + if opts.ListOptions.PageSize > 0 { + sess = db.SetSessionPagination(sess, opts) + + ous := make([]*OrgUser, 0, opts.PageSize) + return ous, sess.Find(&ous) + } + + var ous []*OrgUser + return ous, sess.Find(&ous) +} + +// ChangeOrgUserStatus changes public or private membership status. +func ChangeOrgUserStatus(orgID, uid int64, public bool) error { + ou := new(OrgUser) + has, err := db.GetEngine(db.DefaultContext). + Where("uid=?", uid). + And("org_id=?", orgID). + Get(ou) + if err != nil { + return err + } else if !has { + return nil + } + + ou.IsPublic = public + _, err = db.GetEngine(db.DefaultContext).ID(ou.ID).Cols("is_public").Update(ou) + return err +} + +// AddOrgUser adds new user to given organization. +func AddOrgUser(orgID, uid int64) error { + isAlreadyMember, err := IsOrganizationMember(db.DefaultContext, orgID, uid) + if err != nil || isAlreadyMember { + return err + } + + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + ou := &OrgUser{ + UID: uid, + OrgID: orgID, + IsPublic: setting.Service.DefaultOrgMemberVisible, + } + + if err := db.Insert(ctx, ou); err != nil { + return err + } else if _, err = db.Exec(ctx, "UPDATE `user` SET num_members = num_members + 1 WHERE id = ?", orgID); err != nil { + return err + } + + return committer.Commit() +} + +// GetOrgByIDCtx returns the user object by given ID if exists. +func GetOrgByIDCtx(ctx context.Context, id int64) (*Organization, error) { + u := new(Organization) + has, err := db.GetEngine(ctx).ID(id).Get(u) + if err != nil { + return nil, err + } else if !has { + return nil, user_model.ErrUserNotExist{ + UID: id, + Name: "", + KeyID: 0, + } + } + return u, nil +} + +// GetOrgByID returns the user object by given ID if exists. +func GetOrgByID(id int64) (*Organization, error) { + return GetOrgByIDCtx(db.DefaultContext, id) +} + +// RemoveOrgRepo removes all team-repository relations of organization. +func RemoveOrgRepo(ctx context.Context, orgID, repoID int64) error { + teamRepos := make([]*TeamRepo, 0, 10) + e := db.GetEngine(ctx) + if err := e.Find(&teamRepos, &TeamRepo{OrgID: orgID, RepoID: repoID}); err != nil { + return err + } + + if len(teamRepos) == 0 { + return nil + } + + if _, err := e.Delete(&TeamRepo{ + OrgID: orgID, + RepoID: repoID, + }); err != nil { + return err + } + + teamIDs := make([]int64, len(teamRepos)) + for i, teamRepo := range teamRepos { + teamIDs[i] = teamRepo.TeamID + } + + _, err := e.Decr("num_repos").In("id", teamIDs).Update(new(Team)) + return err +} + +func (org *Organization) getUserTeams(e db.Engine, userID int64, cols ...string) ([]*Team, error) { + teams := make([]*Team, 0, org.NumTeams) + return teams, e. + Where("`team_user`.org_id = ?", org.ID). + Join("INNER", "team_user", "`team_user`.team_id = team.id"). + Join("INNER", "`user`", "`user`.id=team_user.uid"). + And("`team_user`.uid = ?", userID). + Asc("`user`.name"). + Cols(cols...). + Find(&teams) +} + +func (org *Organization) getUserTeamIDs(ctx context.Context, userID int64) ([]int64, error) { + teamIDs := make([]int64, 0, org.NumTeams) + return teamIDs, db.GetEngine(ctx). + Table("team"). + Cols("team.id"). + Where("`team_user`.org_id = ?", org.ID). + Join("INNER", "team_user", "`team_user`.team_id = team.id"). + And("`team_user`.uid = ?", userID). + Find(&teamIDs) +} + +// TeamsWithAccessToRepo returns all teams that have given access level to the repository. +func (org *Organization) TeamsWithAccessToRepo(repoID int64, mode perm.AccessMode) ([]*Team, error) { + return GetTeamsWithAccessToRepo(db.DefaultContext, org.ID, repoID, mode) +} + +// GetUserTeamIDs returns of all team IDs of the organization that user is member of. +func (org *Organization) GetUserTeamIDs(userID int64) ([]int64, error) { + return org.getUserTeamIDs(db.DefaultContext, userID) +} + +// GetUserTeams returns all teams that belong to user, +// and that the user has joined. +func (org *Organization) GetUserTeams(userID int64) ([]*Team, error) { + return org.getUserTeams(db.GetEngine(db.DefaultContext), userID) +} + +// AccessibleReposEnvironment operations involving the repositories that are +// accessible to a particular user +type AccessibleReposEnvironment interface { + CountRepos() (int64, error) + RepoIDs(page, pageSize int) ([]int64, error) + Repos(page, pageSize int) ([]*repo_model.Repository, error) + MirrorRepos() ([]*repo_model.Repository, error) + AddKeyword(keyword string) + SetSort(db.SearchOrderBy) +} + +type accessibleReposEnv struct { + org *Organization + user *user_model.User + team *Team + teamIDs []int64 + e db.Engine + keyword string + orderBy db.SearchOrderBy +} + +// AccessibleReposEnv builds an AccessibleReposEnvironment for the repositories in `org` +// that are accessible to the specified user. +func AccessibleReposEnv(ctx context.Context, org *Organization, userID int64) (AccessibleReposEnvironment, error) { + var user *user_model.User + + if userID > 0 { + u, err := user_model.GetUserByIDCtx(ctx, userID) + if err != nil { + return nil, err + } + user = u + } + + teamIDs, err := org.getUserTeamIDs(ctx, userID) + if err != nil { + return nil, err + } + return &accessibleReposEnv{ + org: org, + user: user, + teamIDs: teamIDs, + e: db.GetEngine(ctx), + orderBy: db.SearchOrderByRecentUpdated, + }, nil +} + +// AccessibleTeamReposEnv an AccessibleReposEnvironment for the repositories in `org` +// that are accessible to the specified team. +func (org *Organization) AccessibleTeamReposEnv(team *Team) AccessibleReposEnvironment { + return &accessibleReposEnv{ + org: org, + team: team, + e: db.GetEngine(db.DefaultContext), + orderBy: db.SearchOrderByRecentUpdated, + } +} + +func (env *accessibleReposEnv) cond() builder.Cond { + cond := builder.NewCond() + if env.team != nil { + cond = cond.And(builder.Eq{"team_repo.team_id": env.team.ID}) + } else { + if env.user == nil || !env.user.IsRestricted { + cond = cond.Or(builder.Eq{ + "`repository`.owner_id": env.org.ID, + "`repository`.is_private": false, + }) + } + if len(env.teamIDs) > 0 { + cond = cond.Or(builder.In("team_repo.team_id", env.teamIDs)) + } + } + if env.keyword != "" { + cond = cond.And(builder.Like{"`repository`.lower_name", strings.ToLower(env.keyword)}) + } + return cond +} + +func (env *accessibleReposEnv) CountRepos() (int64, error) { + repoCount, err := env.e. + Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id"). + Where(env.cond()). + Distinct("`repository`.id"). + Count(&repo_model.Repository{}) + if err != nil { + return 0, fmt.Errorf("count user repositories in organization: %v", err) + } + return repoCount, nil +} + +func (env *accessibleReposEnv) RepoIDs(page, pageSize int) ([]int64, error) { + if page <= 0 { + page = 1 + } + + repoIDs := make([]int64, 0, pageSize) + return repoIDs, env.e. + Table("repository"). + Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id"). + Where(env.cond()). + GroupBy("`repository`.id,`repository`."+strings.Fields(string(env.orderBy))[0]). + OrderBy(string(env.orderBy)). + Limit(pageSize, (page-1)*pageSize). + Cols("`repository`.id"). + Find(&repoIDs) +} + +func (env *accessibleReposEnv) Repos(page, pageSize int) ([]*repo_model.Repository, error) { + repoIDs, err := env.RepoIDs(page, pageSize) + if err != nil { + return nil, fmt.Errorf("GetUserRepositoryIDs: %v", err) + } + + repos := make([]*repo_model.Repository, 0, len(repoIDs)) + if len(repoIDs) == 0 { + return repos, nil + } + + return repos, env.e. + In("`repository`.id", repoIDs). + OrderBy(string(env.orderBy)). + Find(&repos) +} + +func (env *accessibleReposEnv) MirrorRepoIDs() ([]int64, error) { + repoIDs := make([]int64, 0, 10) + return repoIDs, env.e. + Table("repository"). + Join("INNER", "team_repo", "`team_repo`.repo_id=`repository`.id AND `repository`.is_mirror=?", true). + Where(env.cond()). + GroupBy("`repository`.id, `repository`.updated_unix"). + OrderBy(string(env.orderBy)). + Cols("`repository`.id"). + Find(&repoIDs) +} + +func (env *accessibleReposEnv) MirrorRepos() ([]*repo_model.Repository, error) { + repoIDs, err := env.MirrorRepoIDs() + if err != nil { + return nil, fmt.Errorf("MirrorRepoIDs: %v", err) + } + + repos := make([]*repo_model.Repository, 0, len(repoIDs)) + if len(repoIDs) == 0 { + return repos, nil + } + + return repos, env.e. + In("`repository`.id", repoIDs). + Find(&repos) +} + +func (env *accessibleReposEnv) AddKeyword(keyword string) { + env.keyword = keyword +} + +func (env *accessibleReposEnv) SetSort(orderBy db.SearchOrderBy) { + env.orderBy = orderBy +} diff --git a/models/organization/org_test.go b/models/organization/org_test.go new file mode 100644 index 0000000000..71cdbd869f --- /dev/null +++ b/models/organization/org_test.go @@ -0,0 +1,478 @@ +// Copyright 2017 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "testing" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/structs" + + "github.com/stretchr/testify/assert" +) + +func TestUser_IsOwnedBy(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + for _, testCase := range []struct { + OrgID int64 + UserID int64 + ExpectedOwner bool + }{ + {3, 2, true}, + {3, 1, false}, + {3, 3, false}, + {3, 4, false}, + {2, 2, false}, // user2 is not an organization + {2, 3, false}, + } { + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: testCase.OrgID}).(*Organization) + isOwner, err := org.IsOwnedBy(testCase.UserID) + assert.NoError(t, err) + assert.Equal(t, testCase.ExpectedOwner, isOwner) + } +} + +func TestUser_IsOrgMember(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + for _, testCase := range []struct { + OrgID int64 + UserID int64 + ExpectedMember bool + }{ + {3, 2, true}, + {3, 4, true}, + {3, 1, false}, + {3, 3, false}, + {2, 2, false}, // user2 is not an organization + {2, 3, false}, + } { + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: testCase.OrgID}).(*Organization) + isMember, err := org.IsOrgMember(testCase.UserID) + assert.NoError(t, err) + assert.Equal(t, testCase.ExpectedMember, isMember) + } +} + +func TestUser_GetTeam(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + team, err := org.GetTeam("team1") + assert.NoError(t, err) + assert.Equal(t, org.ID, team.OrgID) + assert.Equal(t, "team1", team.LowerName) + + _, err = org.GetTeam("does not exist") + assert.True(t, IsErrTeamNotExist(err)) + + nonOrg := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 2}).(*Organization) + _, err = nonOrg.GetTeam("team") + assert.True(t, IsErrTeamNotExist(err)) +} + +func TestUser_GetOwnerTeam(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + team, err := org.GetOwnerTeam() + assert.NoError(t, err) + assert.Equal(t, org.ID, team.OrgID) + + nonOrg := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 2}).(*Organization) + _, err = nonOrg.GetOwnerTeam() + assert.True(t, IsErrTeamNotExist(err)) +} + +func TestUser_GetTeams(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + teams, err := org.LoadTeams() + assert.NoError(t, err) + if assert.Len(t, teams, 4) { + assert.Equal(t, int64(1), teams[0].ID) + assert.Equal(t, int64(2), teams[1].ID) + assert.Equal(t, int64(12), teams[2].ID) + assert.Equal(t, int64(7), teams[3].ID) + } +} + +func TestUser_GetMembers(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + members, _, err := org.GetMembers() + assert.NoError(t, err) + if assert.Len(t, members, 3) { + assert.Equal(t, int64(2), members[0].ID) + assert.Equal(t, int64(28), members[1].ID) + assert.Equal(t, int64(4), members[2].ID) + } +} + +func TestGetOrgByName(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + org, err := GetOrgByName("user3") + assert.NoError(t, err) + assert.EqualValues(t, 3, org.ID) + assert.Equal(t, "user3", org.Name) + + _, err = GetOrgByName("user2") // user2 is an individual + assert.True(t, IsErrOrgNotExist(err)) + + _, err = GetOrgByName("") // corner case + assert.True(t, IsErrOrgNotExist(err)) +} + +func TestCountOrganizations(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + expected, err := db.GetEngine(db.DefaultContext).Where("type=?", user_model.UserTypeOrganization).Count(&user_model.User{}) + assert.NoError(t, err) + assert.Equal(t, expected, CountOrganizations()) +} + +func TestIsOrganizationOwner(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + test := func(orgID, userID int64, expected bool) { + isOwner, err := IsOrganizationOwner(db.DefaultContext, orgID, userID) + assert.NoError(t, err) + assert.EqualValues(t, expected, isOwner) + } + test(3, 2, true) + test(3, 3, false) + test(6, 5, true) + test(6, 4, false) + test(unittest.NonexistentID, unittest.NonexistentID, false) +} + +func TestIsOrganizationMember(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + test := func(orgID, userID int64, expected bool) { + isMember, err := IsOrganizationMember(db.DefaultContext, orgID, userID) + assert.NoError(t, err) + assert.EqualValues(t, expected, isMember) + } + test(3, 2, true) + test(3, 3, false) + test(3, 4, true) + test(6, 5, true) + test(6, 4, false) + test(unittest.NonexistentID, unittest.NonexistentID, false) +} + +func TestIsPublicMembership(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + test := func(orgID, userID int64, expected bool) { + isMember, err := IsPublicMembership(orgID, userID) + assert.NoError(t, err) + assert.EqualValues(t, expected, isMember) + } + test(3, 2, true) + test(3, 3, false) + test(3, 4, false) + test(6, 5, true) + test(6, 4, false) + test(unittest.NonexistentID, unittest.NonexistentID, false) +} + +func TestFindOrgs(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + orgs, err := FindOrgs(FindOrgOptions{ + UserID: 4, + IncludePrivate: true, + }) + assert.NoError(t, err) + if assert.Len(t, orgs, 1) { + assert.EqualValues(t, 3, orgs[0].ID) + } + + orgs, err = FindOrgs(FindOrgOptions{ + UserID: 4, + IncludePrivate: false, + }) + assert.NoError(t, err) + assert.Len(t, orgs, 0) + + total, err := CountOrgs(FindOrgOptions{ + UserID: 4, + IncludePrivate: true, + }) + assert.NoError(t, err) + assert.EqualValues(t, 1, total) +} + +func TestGetOwnedOrgsByUserID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + orgs, err := GetOwnedOrgsByUserID(2) + assert.NoError(t, err) + if assert.Len(t, orgs, 1) { + assert.EqualValues(t, 3, orgs[0].ID) + } + + orgs, err = GetOwnedOrgsByUserID(4) + assert.NoError(t, err) + assert.Len(t, orgs, 0) +} + +func TestGetOwnedOrgsByUserIDDesc(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + orgs, err := GetOwnedOrgsByUserIDDesc(5, "id") + assert.NoError(t, err) + if assert.Len(t, orgs, 2) { + assert.EqualValues(t, 7, orgs[0].ID) + assert.EqualValues(t, 6, orgs[1].ID) + } + + orgs, err = GetOwnedOrgsByUserIDDesc(4, "id") + assert.NoError(t, err) + assert.Len(t, orgs, 0) +} + +func TestGetOrgUsersByUserID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + orgUsers, err := GetOrgUsersByUserID(5, &SearchOrganizationsOptions{All: true}) + assert.NoError(t, err) + if assert.Len(t, orgUsers, 2) { + assert.Equal(t, OrgUser{ + ID: orgUsers[0].ID, + OrgID: 6, + UID: 5, + IsPublic: true, + }, *orgUsers[0]) + assert.Equal(t, OrgUser{ + ID: orgUsers[1].ID, + OrgID: 7, + UID: 5, + IsPublic: false, + }, *orgUsers[1]) + } + + publicOrgUsers, err := GetOrgUsersByUserID(5, &SearchOrganizationsOptions{All: false}) + assert.NoError(t, err) + assert.Len(t, publicOrgUsers, 1) + assert.Equal(t, *orgUsers[0], *publicOrgUsers[0]) + + orgUsers, err = GetOrgUsersByUserID(1, &SearchOrganizationsOptions{All: true}) + assert.NoError(t, err) + assert.Len(t, orgUsers, 0) +} + +func TestGetOrgUsersByOrgID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + orgUsers, err := GetOrgUsersByOrgID(&FindOrgMembersOpts{ + ListOptions: db.ListOptions{}, + OrgID: 3, + PublicOnly: false, + }) + assert.NoError(t, err) + if assert.Len(t, orgUsers, 3) { + assert.Equal(t, OrgUser{ + ID: orgUsers[0].ID, + OrgID: 3, + UID: 2, + IsPublic: true, + }, *orgUsers[0]) + assert.Equal(t, OrgUser{ + ID: orgUsers[1].ID, + OrgID: 3, + UID: 4, + IsPublic: false, + }, *orgUsers[1]) + } + + orgUsers, err = GetOrgUsersByOrgID(&FindOrgMembersOpts{ + ListOptions: db.ListOptions{}, + OrgID: unittest.NonexistentID, + PublicOnly: false, + }) + assert.NoError(t, err) + assert.Len(t, orgUsers, 0) +} + +func TestChangeOrgUserStatus(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + testSuccess := func(orgID, userID int64, public bool) { + assert.NoError(t, ChangeOrgUserStatus(orgID, userID, public)) + orgUser := unittest.AssertExistsAndLoadBean(t, &OrgUser{OrgID: orgID, UID: userID}).(*OrgUser) + assert.Equal(t, public, orgUser.IsPublic) + } + + testSuccess(3, 2, false) + testSuccess(3, 2, false) + testSuccess(3, 4, true) + assert.NoError(t, ChangeOrgUserStatus(unittest.NonexistentID, unittest.NonexistentID, true)) +} + +func TestUser_GetUserTeamIDs(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + testSuccess := func(userID int64, expected []int64) { + teamIDs, err := org.GetUserTeamIDs(userID) + assert.NoError(t, err) + assert.Equal(t, expected, teamIDs) + } + testSuccess(2, []int64{1, 2}) + testSuccess(4, []int64{2}) + testSuccess(unittest.NonexistentID, []int64{}) +} + +func TestAccessibleReposEnv_CountRepos(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + testSuccess := func(userID, expectedCount int64) { + env, err := AccessibleReposEnv(db.DefaultContext, org, userID) + assert.NoError(t, err) + count, err := env.CountRepos() + assert.NoError(t, err) + assert.EqualValues(t, expectedCount, count) + } + testSuccess(2, 3) + testSuccess(4, 2) +} + +func TestAccessibleReposEnv_RepoIDs(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + testSuccess := func(userID, _, pageSize int64, expectedRepoIDs []int64) { + env, err := AccessibleReposEnv(db.DefaultContext, org, userID) + assert.NoError(t, err) + repoIDs, err := env.RepoIDs(1, 100) + assert.NoError(t, err) + assert.Equal(t, expectedRepoIDs, repoIDs) + } + testSuccess(2, 1, 100, []int64{3, 5, 32}) + testSuccess(4, 0, 100, []int64{3, 32}) +} + +func TestAccessibleReposEnv_Repos(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + testSuccess := func(userID int64, expectedRepoIDs []int64) { + env, err := AccessibleReposEnv(db.DefaultContext, org, userID) + assert.NoError(t, err) + repos, err := env.Repos(1, 100) + assert.NoError(t, err) + expectedRepos := make([]*repo_model.Repository, len(expectedRepoIDs)) + for i, repoID := range expectedRepoIDs { + expectedRepos[i] = unittest.AssertExistsAndLoadBean(t, + &repo_model.Repository{ID: repoID}).(*repo_model.Repository) + } + assert.Equal(t, expectedRepos, repos) + } + testSuccess(2, []int64{3, 5, 32}) + testSuccess(4, []int64{3, 32}) +} + +func TestAccessibleReposEnv_MirrorRepos(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + org := unittest.AssertExistsAndLoadBean(t, &Organization{ID: 3}).(*Organization) + testSuccess := func(userID int64, expectedRepoIDs []int64) { + env, err := AccessibleReposEnv(db.DefaultContext, org, userID) + assert.NoError(t, err) + repos, err := env.MirrorRepos() + assert.NoError(t, err) + expectedRepos := make([]*repo_model.Repository, len(expectedRepoIDs)) + for i, repoID := range expectedRepoIDs { + expectedRepos[i] = unittest.AssertExistsAndLoadBean(t, + &repo_model.Repository{ID: repoID}).(*repo_model.Repository) + } + assert.Equal(t, expectedRepos, repos) + } + testSuccess(2, []int64{5}) + testSuccess(4, []int64{}) +} + +func TestHasOrgVisibleTypePublic(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + user3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) + + const newOrgName = "test-org-public" + org := &Organization{ + Name: newOrgName, + Visibility: structs.VisibleTypePublic, + } + + unittest.AssertNotExistsBean(t, &user_model.User{Name: org.Name, Type: user_model.UserTypeOrganization}) + assert.NoError(t, CreateOrganization(org, owner)) + org = unittest.AssertExistsAndLoadBean(t, + &Organization{Name: org.Name, Type: user_model.UserTypeOrganization}).(*Organization) + test1 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), owner) + test2 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), user3) + test3 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), nil) + assert.True(t, test1) // owner of org + assert.True(t, test2) // user not a part of org + assert.True(t, test3) // logged out user +} + +func TestHasOrgVisibleTypeLimited(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + user3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) + + const newOrgName = "test-org-limited" + org := &Organization{ + Name: newOrgName, + Visibility: structs.VisibleTypeLimited, + } + + unittest.AssertNotExistsBean(t, &user_model.User{Name: org.Name, Type: user_model.UserTypeOrganization}) + assert.NoError(t, CreateOrganization(org, owner)) + org = unittest.AssertExistsAndLoadBean(t, + &Organization{Name: org.Name, Type: user_model.UserTypeOrganization}).(*Organization) + test1 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), owner) + test2 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), user3) + test3 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), nil) + assert.True(t, test1) // owner of org + assert.True(t, test2) // user not a part of org + assert.False(t, test3) // logged out user +} + +func TestHasOrgVisibleTypePrivate(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + user3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3}).(*user_model.User) + + const newOrgName = "test-org-private" + org := &Organization{ + Name: newOrgName, + Visibility: structs.VisibleTypePrivate, + } + + unittest.AssertNotExistsBean(t, &user_model.User{Name: org.Name, Type: user_model.UserTypeOrganization}) + assert.NoError(t, CreateOrganization(org, owner)) + org = unittest.AssertExistsAndLoadBean(t, + &Organization{Name: org.Name, Type: user_model.UserTypeOrganization}).(*Organization) + test1 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), owner) + test2 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), user3) + test3 := HasOrgOrUserVisible(db.DefaultContext, org.AsUser(), nil) + assert.True(t, test1) // owner of org + assert.False(t, test2) // user not a part of org + assert.False(t, test3) // logged out user +} + +func TestGetUsersWhoCanCreateOrgRepo(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + users, err := GetUsersWhoCanCreateOrgRepo(db.DefaultContext, 3) + assert.NoError(t, err) + assert.Len(t, users, 2) + var ids []int64 + for i := range users { + ids = append(ids, users[i].ID) + } + assert.ElementsMatch(t, ids, []int64{2, 28}) + + users, err = GetUsersWhoCanCreateOrgRepo(db.DefaultContext, 7) + assert.NoError(t, err) + assert.Len(t, users, 1) + assert.EqualValues(t, 5, users[0].ID) +} diff --git a/models/organization/org_user.go b/models/organization/org_user.go new file mode 100644 index 0000000000..b679246d0b --- /dev/null +++ b/models/organization/org_user.go @@ -0,0 +1,83 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "context" + + "code.gitea.io/gitea/models/db" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + + "xorm.io/builder" +) + +// ________ ____ ___ +// \_____ \_______ ____ | | \______ ___________ +// / | \_ __ \/ ___\| | / ___// __ \_ __ \ +// / | \ | \/ /_/ > | /\___ \\ ___/| | \/ +// \_______ /__| \___ /|______//____ >\___ >__| +// \/ /_____/ \/ \/ + +// OrgUser represents an organization-user relation. +type OrgUser struct { + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"INDEX UNIQUE(s)"` + OrgID int64 `xorm:"INDEX UNIQUE(s)"` + IsPublic bool `xorm:"INDEX"` +} + +func init() { + db.RegisterModel(new(OrgUser)) +} + +// GetOrganizationCount returns count of membership of organization of the user. +func GetOrganizationCount(ctx context.Context, u *user_model.User) (int64, error) { + return db.GetEngine(ctx). + Where("uid=?", u.ID). + Count(new(OrgUser)) +} + +// IsOrganizationOwner returns true if given user is in the owner team. +func IsOrganizationOwner(ctx context.Context, orgID, uid int64) (bool, error) { + ownerTeam, err := GetOwnerTeam(ctx, orgID) + if err != nil { + if IsErrTeamNotExist(err) { + log.Error("Organization does not have owner team: %d", orgID) + return false, nil + } + return false, err + } + return IsTeamMember(ctx, orgID, ownerTeam.ID, uid) +} + +// IsOrganizationMember returns true if given user is member of organization. +func IsOrganizationMember(ctx context.Context, orgID, uid int64) (bool, error) { + return db.GetEngine(ctx). + Where("uid=?", uid). + And("org_id=?", orgID). + Table("org_user"). + Exist() +} + +// IsPublicMembership returns true if the given user's membership of given org is public. +func IsPublicMembership(orgID, uid int64) (bool, error) { + return db.GetEngine(db.DefaultContext). + Where("uid=?", uid). + And("org_id=?", orgID). + And("is_public=?", true). + Table("org_user"). + Exist() +} + +// CanCreateOrgRepo returns true if user can create repo in organization +func CanCreateOrgRepo(orgID, uid int64) (bool, error) { + return db.GetEngine(db.DefaultContext). + Where(builder.Eq{"team.can_create_org_repo": true}). + Join("INNER", "team_user", "team_user.team_id = team.id"). + And("team_user.uid = ?", uid). + And("team_user.org_id = ?", orgID). + Exist(new(Team)) +} diff --git a/models/organization/org_user_test.go b/models/organization/org_user_test.go new file mode 100644 index 0000000000..b323002934 --- /dev/null +++ b/models/organization/org_user_test.go @@ -0,0 +1,72 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "fmt" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + + "github.com/stretchr/testify/assert" +) + +func TestUserIsPublicMember(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + tt := []struct { + uid int64 + orgid int64 + expected bool + }{ + {2, 3, true}, + {4, 3, false}, + {5, 6, true}, + {5, 7, false}, + } + for _, v := range tt { + t.Run(fmt.Sprintf("UserId%dIsPublicMemberOf%d", v.uid, v.orgid), func(t *testing.T) { + testUserIsPublicMember(t, v.uid, v.orgid, v.expected) + }) + } +} + +func testUserIsPublicMember(t *testing.T, uid, orgID int64, expected bool) { + user, err := user_model.GetUserByID(uid) + assert.NoError(t, err) + is, err := IsPublicMembership(orgID, user.ID) + assert.NoError(t, err) + assert.Equal(t, expected, is) +} + +func TestIsUserOrgOwner(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + tt := []struct { + uid int64 + orgid int64 + expected bool + }{ + {2, 3, true}, + {4, 3, false}, + {5, 6, true}, + {5, 7, true}, + } + for _, v := range tt { + t.Run(fmt.Sprintf("UserId%dIsOrgOwnerOf%d", v.uid, v.orgid), func(t *testing.T) { + testIsUserOrgOwner(t, v.uid, v.orgid, v.expected) + }) + } +} + +func testIsUserOrgOwner(t *testing.T, uid, orgID int64, expected bool) { + user, err := user_model.GetUserByID(uid) + assert.NoError(t, err) + is, err := IsOrganizationOwner(db.DefaultContext, orgID, user.ID) + assert.NoError(t, err) + assert.Equal(t, expected, is) +} diff --git a/models/organization/team.go b/models/organization/team.go new file mode 100644 index 0000000000..077fba6a60 --- /dev/null +++ b/models/organization/team.go @@ -0,0 +1,361 @@ +// Copyright 2018 The Gitea Authors. All rights reserved. +// Copyright 2016 The Gogs Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "context" + "fmt" + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/perm" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unit" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + + "xorm.io/builder" +) + +// ___________ +// \__ ___/___ _____ _____ +// | |_/ __ \\__ \ / \ +// | |\ ___/ / __ \| Y Y \ +// |____| \___ >____ /__|_| / +// \/ \/ \/ + +// ErrTeamAlreadyExist represents a "TeamAlreadyExist" kind of error. +type ErrTeamAlreadyExist struct { + OrgID int64 + Name string +} + +// IsErrTeamAlreadyExist checks if an error is a ErrTeamAlreadyExist. +func IsErrTeamAlreadyExist(err error) bool { + _, ok := err.(ErrTeamAlreadyExist) + return ok +} + +func (err ErrTeamAlreadyExist) Error() string { + return fmt.Sprintf("team already exists [org_id: %d, name: %s]", err.OrgID, err.Name) +} + +// ErrTeamNotExist represents a "TeamNotExist" error +type ErrTeamNotExist struct { + OrgID int64 + TeamID int64 + Name string +} + +// IsErrTeamNotExist checks if an error is a ErrTeamNotExist. +func IsErrTeamNotExist(err error) bool { + _, ok := err.(ErrTeamNotExist) + return ok +} + +func (err ErrTeamNotExist) Error() string { + return fmt.Sprintf("team does not exist [org_id %d, team_id %d, name: %s]", err.OrgID, err.TeamID, err.Name) +} + +// OwnerTeamName return the owner team name +const OwnerTeamName = "Owners" + +// Team represents a organization team. +type Team struct { + ID int64 `xorm:"pk autoincr"` + OrgID int64 `xorm:"INDEX"` + LowerName string + Name string + Description string + AccessMode perm.AccessMode `xorm:"'authorize'"` + Repos []*repo_model.Repository `xorm:"-"` + Members []*user_model.User `xorm:"-"` + NumRepos int + NumMembers int + Units []*TeamUnit `xorm:"-"` + IncludesAllRepositories bool `xorm:"NOT NULL DEFAULT false"` + CanCreateOrgRepo bool `xorm:"NOT NULL DEFAULT false"` +} + +func init() { + db.RegisterModel(new(Team)) + db.RegisterModel(new(TeamUser)) + db.RegisterModel(new(TeamRepo)) + db.RegisterModel(new(TeamUnit)) +} + +// SearchTeamOptions holds the search options +type SearchTeamOptions struct { + db.ListOptions + UserID int64 + Keyword string + OrgID int64 + IncludeDesc bool +} + +// SearchTeam search for teams. Caller is responsible to check permissions. +func SearchTeam(opts *SearchTeamOptions) ([]*Team, int64, error) { + if opts.Page <= 0 { + opts.Page = 1 + } + if opts.PageSize == 0 { + // Default limit + opts.PageSize = 10 + } + + cond := builder.NewCond() + + if len(opts.Keyword) > 0 { + lowerKeyword := strings.ToLower(opts.Keyword) + var keywordCond builder.Cond = builder.Like{"lower_name", lowerKeyword} + if opts.IncludeDesc { + keywordCond = keywordCond.Or(builder.Like{"LOWER(description)", lowerKeyword}) + } + cond = cond.And(keywordCond) + } + + cond = cond.And(builder.Eq{"org_id": opts.OrgID}) + + sess := db.GetEngine(db.DefaultContext) + + count, err := sess. + Where(cond). + Count(new(Team)) + if err != nil { + return nil, 0, err + } + + sess = sess.Where(cond) + if opts.PageSize == -1 { + opts.PageSize = int(count) + } else { + sess = sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) + } + + teams := make([]*Team, 0, opts.PageSize) + if err = sess. + OrderBy("lower_name"). + Find(&teams); err != nil { + return nil, 0, err + } + + return teams, count, nil +} + +// ColorFormat provides a basic color format for a Team +func (t *Team) ColorFormat(s fmt.State) { + if t == nil { + log.ColorFprintf(s, "%d:%s (OrgID: %d) %-v", + log.NewColoredIDValue(0), + "", + log.NewColoredIDValue(0), + 0) + return + } + log.ColorFprintf(s, "%d:%s (OrgID: %d) %-v", + log.NewColoredIDValue(t.ID), + t.Name, + log.NewColoredIDValue(t.OrgID), + t.AccessMode) +} + +// GetUnits return a list of available units for a team +func (t *Team) GetUnits() error { + return t.getUnits(db.DefaultContext) +} + +func (t *Team) getUnits(ctx context.Context) (err error) { + if t.Units != nil { + return nil + } + + t.Units, err = getUnitsByTeamID(ctx, t.ID) + return err +} + +// GetUnitNames returns the team units names +func (t *Team) GetUnitNames() (res []string) { + if t.AccessMode >= perm.AccessModeAdmin { + return unit.AllUnitKeyNames() + } + + for _, u := range t.Units { + res = append(res, unit.Units[u.Type].NameKey) + } + return +} + +// GetUnitsMap returns the team units permissions +func (t *Team) GetUnitsMap() map[string]string { + m := make(map[string]string) + if t.AccessMode >= perm.AccessModeAdmin { + for _, u := range unit.Units { + m[u.NameKey] = t.AccessMode.String() + } + } else { + for _, u := range t.Units { + m[u.Unit().NameKey] = u.AccessMode.String() + } + } + return m +} + +// IsOwnerTeam returns true if team is owner team. +func (t *Team) IsOwnerTeam() bool { + return t.Name == OwnerTeamName +} + +// IsMember returns true if given user is a member of team. +func (t *Team) IsMember(userID int64) bool { + isMember, err := IsTeamMember(db.DefaultContext, t.OrgID, t.ID, userID) + if err != nil { + log.Error("IsMember: %v", err) + return false + } + return isMember +} + +// GetRepositoriesCtx returns paginated repositories in team of organization. +func (t *Team) GetRepositoriesCtx(ctx context.Context) (err error) { + if t.Repos != nil { + return nil + } + t.Repos, err = GetTeamRepositories(ctx, &SearchTeamRepoOptions{ + TeamID: t.ID, + }) + return +} + +// GetMembersCtx returns paginated members in team of organization. +func (t *Team) GetMembersCtx(ctx context.Context) (err error) { + t.Members, err = GetTeamMembers(ctx, &SearchMembersOptions{ + TeamID: t.ID, + }) + return err +} + +// UnitEnabled returns if the team has the given unit type enabled +func (t *Team) UnitEnabled(tp unit.Type) bool { + return t.UnitAccessMode(tp) > perm.AccessModeNone +} + +// UnitAccessMode returns if the team has the given unit type enabled +// it is called in templates, should not be replaced by `UnitAccessModeCtx(ctx ...)` +func (t *Team) UnitAccessMode(tp unit.Type) perm.AccessMode { + return t.UnitAccessModeCtx(db.DefaultContext, tp) +} + +// UnitAccessModeCtx returns if the team has the given unit type enabled +func (t *Team) UnitAccessModeCtx(ctx context.Context, tp unit.Type) perm.AccessMode { + if err := t.getUnits(ctx); err != nil { + log.Warn("Error loading team (ID: %d) units: %s", t.ID, err.Error()) + } + + for _, unit := range t.Units { + if unit.Type == tp { + return unit.AccessMode + } + } + return perm.AccessModeNone +} + +// IsUsableTeamName tests if a name could be as team name +func IsUsableTeamName(name string) error { + switch name { + case "new": + return db.ErrNameReserved{Name: name} + default: + return nil + } +} + +func getTeam(ctx context.Context, orgID int64, name string) (*Team, error) { + t := &Team{ + OrgID: orgID, + LowerName: strings.ToLower(name), + } + has, err := db.GetByBean(ctx, t) + if err != nil { + return nil, err + } else if !has { + return nil, ErrTeamNotExist{orgID, 0, name} + } + return t, nil +} + +// GetTeam returns team by given team name and organization. +func GetTeam(orgID int64, name string) (*Team, error) { + return getTeam(db.DefaultContext, orgID, name) +} + +// GetTeamIDsByNames returns a slice of team ids corresponds to names. +func GetTeamIDsByNames(orgID int64, names []string, ignoreNonExistent bool) ([]int64, error) { + ids := make([]int64, 0, len(names)) + for _, name := range names { + u, err := GetTeam(orgID, name) + if err != nil { + if ignoreNonExistent { + continue + } else { + return nil, err + } + } + ids = append(ids, u.ID) + } + return ids, nil +} + +// GetOwnerTeam returns team by given team name and organization. +func GetOwnerTeam(ctx context.Context, orgID int64) (*Team, error) { + return getTeam(ctx, orgID, OwnerTeamName) +} + +// GetTeamByIDCtx returns team by given ID. +func GetTeamByIDCtx(ctx context.Context, teamID int64) (*Team, error) { + t := new(Team) + has, err := db.GetEngine(ctx).ID(teamID).Get(t) + if err != nil { + return nil, err + } else if !has { + return nil, ErrTeamNotExist{0, teamID, ""} + } + return t, nil +} + +// GetTeamByID returns team by given ID. +func GetTeamByID(teamID int64) (*Team, error) { + return GetTeamByIDCtx(db.DefaultContext, teamID) +} + +// GetTeamNamesByID returns team's lower name from a list of team ids. +func GetTeamNamesByID(teamIDs []int64) ([]string, error) { + if len(teamIDs) == 0 { + return []string{}, nil + } + + var teamNames []string + err := db.GetEngine(db.DefaultContext).Table("team"). + Select("lower_name"). + In("id", teamIDs). + Asc("name"). + Find(&teamNames) + + return teamNames, err +} + +func getRepoTeams(e db.Engine, repo *repo_model.Repository) (teams []*Team, err error) { + return teams, e. + Join("INNER", "team_repo", "team_repo.team_id = team.id"). + Where("team.org_id = ?", repo.OwnerID). + And("team_repo.repo_id=?", repo.ID). + OrderBy("CASE WHEN name LIKE '" + OwnerTeamName + "' THEN '' ELSE name END"). + Find(&teams) +} + +// GetRepoTeams gets the list of teams that has access to the repository +func GetRepoTeams(repo *repo_model.Repository) ([]*Team, error) { + return getRepoTeams(db.GetEngine(db.DefaultContext), repo) +} diff --git a/models/organization/team_repo.go b/models/organization/team_repo.go new file mode 100644 index 0000000000..717d754c40 --- /dev/null +++ b/models/organization/team_repo.go @@ -0,0 +1,85 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "context" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/perm" + repo_model "code.gitea.io/gitea/models/repo" + + "xorm.io/builder" +) + +// TeamRepo represents an team-repository relation. +type TeamRepo struct { + ID int64 `xorm:"pk autoincr"` + OrgID int64 `xorm:"INDEX"` + TeamID int64 `xorm:"UNIQUE(s)"` + RepoID int64 `xorm:"UNIQUE(s)"` +} + +// HasTeamRepo returns true if given repository belongs to team. +func HasTeamRepo(ctx context.Context, orgID, teamID, repoID int64) bool { + has, _ := db.GetEngine(ctx). + Where("org_id=?", orgID). + And("team_id=?", teamID). + And("repo_id=?", repoID). + Get(new(TeamRepo)) + return has +} + +type SearchTeamRepoOptions struct { + db.ListOptions + TeamID int64 +} + +// GetRepositories returns paginated repositories in team of organization. +func GetTeamRepositories(ctx context.Context, opts *SearchTeamRepoOptions) ([]*repo_model.Repository, error) { + sess := db.GetEngine(ctx) + if opts.TeamID > 0 { + sess = sess.In("id", + builder.Select("repo_id"). + From("team_repo"). + Where(builder.Eq{"team_id": opts.TeamID}), + ) + } + if opts.PageSize > 0 { + sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize) + } + var repos []*repo_model.Repository + return repos, sess.OrderBy("repository.name"). + Find(&repos) +} + +// AddTeamRepo addes a repo for an organization's team +func AddTeamRepo(ctx context.Context, orgID, teamID, repoID int64) error { + _, err := db.GetEngine(ctx).Insert(&TeamRepo{ + OrgID: orgID, + TeamID: teamID, + RepoID: repoID, + }) + return err +} + +// RemoveTeamRepo remove repository from team +func RemoveTeamRepo(ctx context.Context, teamID, repoID int64) error { + _, err := db.DeleteByBean(ctx, &TeamRepo{ + TeamID: teamID, + RepoID: repoID, + }) + return err +} + +// GetTeamsWithAccessToRepo returns all teams in an organization that have given access level to the repository. +func GetTeamsWithAccessToRepo(ctx context.Context, orgID, repoID int64, mode perm.AccessMode) ([]*Team, error) { + teams := make([]*Team, 0, 5) + return teams, db.GetEngine(ctx).Where("team.authorize >= ?", mode). + Join("INNER", "team_repo", "team_repo.team_id = team.id"). + And("team_repo.org_id = ?", orgID). + And("team_repo.repo_id = ?", repoID). + Find(&teams) +} diff --git a/models/organization/team_test.go b/models/organization/team_test.go new file mode 100644 index 0000000000..bbf9f789f6 --- /dev/null +++ b/models/organization/team_test.go @@ -0,0 +1,199 @@ +// Copyright 2017 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/unittest" + + "github.com/stretchr/testify/assert" +) + +func TestTeam_IsOwnerTeam(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 1}).(*Team) + assert.True(t, team.IsOwnerTeam()) + + team = unittest.AssertExistsAndLoadBean(t, &Team{ID: 2}).(*Team) + assert.False(t, team.IsOwnerTeam()) +} + +func TestTeam_IsMember(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 1}).(*Team) + assert.True(t, team.IsMember(2)) + assert.False(t, team.IsMember(4)) + assert.False(t, team.IsMember(unittest.NonexistentID)) + + team = unittest.AssertExistsAndLoadBean(t, &Team{ID: 2}).(*Team) + assert.True(t, team.IsMember(2)) + assert.True(t, team.IsMember(4)) + assert.False(t, team.IsMember(unittest.NonexistentID)) +} + +func TestTeam_GetRepositories(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + test := func(teamID int64) { + team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) + assert.NoError(t, team.GetRepositoriesCtx(db.DefaultContext)) + assert.Len(t, team.Repos, team.NumRepos) + for _, repo := range team.Repos { + unittest.AssertExistsAndLoadBean(t, &TeamRepo{TeamID: teamID, RepoID: repo.ID}) + } + } + test(1) + test(3) +} + +func TestTeam_GetMembers(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + test := func(teamID int64) { + team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) + assert.NoError(t, team.GetMembersCtx(db.DefaultContext)) + assert.Len(t, team.Members, team.NumMembers) + for _, member := range team.Members { + unittest.AssertExistsAndLoadBean(t, &TeamUser{UID: member.ID, TeamID: teamID}) + } + } + test(1) + test(3) +} + +func TestGetTeam(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + testSuccess := func(orgID int64, name string) { + team, err := GetTeam(orgID, name) + assert.NoError(t, err) + assert.EqualValues(t, orgID, team.OrgID) + assert.Equal(t, name, team.Name) + } + testSuccess(3, "Owners") + testSuccess(3, "team1") + + _, err := GetTeam(3, "nonexistent") + assert.Error(t, err) + _, err = GetTeam(unittest.NonexistentID, "Owners") + assert.Error(t, err) +} + +func TestGetTeamByID(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + testSuccess := func(teamID int64) { + team, err := GetTeamByID(teamID) + assert.NoError(t, err) + assert.EqualValues(t, teamID, team.ID) + } + testSuccess(1) + testSuccess(2) + testSuccess(3) + testSuccess(4) + + _, err := GetTeamByID(unittest.NonexistentID) + assert.Error(t, err) +} + +func TestIsTeamMember(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + test := func(orgID, teamID, userID int64, expected bool) { + isMember, err := IsTeamMember(db.DefaultContext, orgID, teamID, userID) + assert.NoError(t, err) + assert.Equal(t, expected, isMember) + } + + test(3, 1, 2, true) + test(3, 1, 4, false) + test(3, 1, unittest.NonexistentID, false) + + test(3, 2, 2, true) + test(3, 2, 4, true) + + test(3, unittest.NonexistentID, unittest.NonexistentID, false) + test(unittest.NonexistentID, unittest.NonexistentID, unittest.NonexistentID, false) +} + +func TestGetTeamMembers(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + test := func(teamID int64) { + team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) + members, err := GetTeamMembers(db.DefaultContext, &SearchMembersOptions{ + TeamID: teamID, + }) + assert.NoError(t, err) + assert.Len(t, members, team.NumMembers) + for _, member := range members { + unittest.AssertExistsAndLoadBean(t, &TeamUser{UID: member.ID, TeamID: teamID}) + } + } + test(1) + test(3) +} + +func TestGetUserTeams(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + test := func(userID int64) { + teams, _, err := SearchTeam(&SearchTeamOptions{UserID: userID}) + assert.NoError(t, err) + for _, team := range teams { + unittest.AssertExistsAndLoadBean(t, &TeamUser{TeamID: team.ID, UID: userID}) + } + } + test(2) + test(5) + test(unittest.NonexistentID) +} + +func TestGetUserOrgTeams(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + test := func(orgID, userID int64) { + teams, err := GetUserOrgTeams(db.DefaultContext, orgID, userID) + assert.NoError(t, err) + for _, team := range teams { + assert.EqualValues(t, orgID, team.OrgID) + unittest.AssertExistsAndLoadBean(t, &TeamUser{TeamID: team.ID, UID: userID}) + } + } + test(3, 2) + test(3, 4) + test(3, unittest.NonexistentID) +} + +func TestHasTeamRepo(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + test := func(teamID, repoID int64, expected bool) { + team := unittest.AssertExistsAndLoadBean(t, &Team{ID: teamID}).(*Team) + assert.Equal(t, expected, HasTeamRepo(db.DefaultContext, team.OrgID, teamID, repoID)) + } + test(1, 1, false) + test(1, 3, true) + test(1, 5, true) + test(1, unittest.NonexistentID, false) + + test(2, 3, true) + test(2, 5, false) +} + +func TestUsersInTeamsCount(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + test := func(teamIDs, userIDs []int64, expected int64) { + count, err := UsersInTeamsCount(teamIDs, userIDs) + assert.NoError(t, err) + assert.Equal(t, expected, count) + } + + test([]int64{2}, []int64{1, 2, 3, 4}, 1) // only userid 2 + test([]int64{1, 2, 3, 4, 5}, []int64{2, 5}, 2) // userid 2,4 + test([]int64{1, 2, 3, 4, 5}, []int64{2, 3, 5}, 3) // userid 2,4,5 +} diff --git a/models/organization/team_unit.go b/models/organization/team_unit.go new file mode 100644 index 0000000000..a712ddb2eb --- /dev/null +++ b/models/organization/team_unit.go @@ -0,0 +1,52 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "context" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/perm" + "code.gitea.io/gitea/models/unit" +) + +// TeamUnit describes all units of a repository +type TeamUnit struct { + ID int64 `xorm:"pk autoincr"` + OrgID int64 `xorm:"INDEX"` + TeamID int64 `xorm:"UNIQUE(s)"` + Type unit.Type `xorm:"UNIQUE(s)"` + AccessMode perm.AccessMode +} + +// Unit returns Unit +func (t *TeamUnit) Unit() unit.Unit { + return unit.Units[t.Type] +} + +func getUnitsByTeamID(ctx context.Context, teamID int64) (units []*TeamUnit, err error) { + return units, db.GetEngine(ctx).Where("team_id = ?", teamID).Find(&units) +} + +// UpdateTeamUnits updates a teams's units +func UpdateTeamUnits(team *Team, units []TeamUnit) (err error) { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + if _, err = db.GetEngine(ctx).Where("team_id = ?", team.ID).Delete(new(TeamUnit)); err != nil { + return err + } + + if len(units) > 0 { + if err = db.Insert(ctx, units); err != nil { + return err + } + } + + return committer.Commit() +} diff --git a/models/organization/team_user.go b/models/organization/team_user.go new file mode 100644 index 0000000000..80f4d00e3d --- /dev/null +++ b/models/organization/team_user.go @@ -0,0 +1,110 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package organization + +import ( + "context" + + "code.gitea.io/gitea/models/db" + user_model "code.gitea.io/gitea/models/user" + + "xorm.io/builder" +) + +// TeamUser represents an team-user relation. +type TeamUser struct { + ID int64 `xorm:"pk autoincr"` + OrgID int64 `xorm:"INDEX"` + TeamID int64 `xorm:"UNIQUE(s)"` + UID int64 `xorm:"UNIQUE(s)"` +} + +// IsTeamMember returns true if given user is a member of team. +func IsTeamMember(ctx context.Context, orgID, teamID, userID int64) (bool, error) { + return db.GetEngine(ctx). + Where("org_id=?", orgID). + And("team_id=?", teamID). + And("uid=?", userID). + Table("team_user"). + Exist() +} + +// GetTeamUsersByTeamID returns team users for a team +func GetTeamUsersByTeamID(ctx context.Context, teamID int64) ([]*TeamUser, error) { + teamUsers := make([]*TeamUser, 0, 10) + return teamUsers, db.GetEngine(ctx). + Where("team_id=?", teamID). + Find(&teamUsers) +} + +// SearchMembersOptions holds the search options +type SearchMembersOptions struct { + db.ListOptions + TeamID int64 +} + +func (opts SearchMembersOptions) ToConds() builder.Cond { + cond := builder.NewCond() + if opts.TeamID > 0 { + cond = cond.And(builder.Eq{"": opts.TeamID}) + } + return cond +} + +// GetTeamMembers returns all members in given team of organization. +func GetTeamMembers(ctx context.Context, opts *SearchMembersOptions) ([]*user_model.User, error) { + var members []*user_model.User + sess := db.GetEngine(ctx) + if opts.TeamID > 0 { + sess = sess.In("id", + builder.Select("uid"). + From("team_user"). + Where(builder.Eq{"team_id": opts.TeamID}), + ) + } + if opts.PageSize > 0 && opts.Page > -1 { + sess = sess.Limit(opts.PageSize, opts.Page*opts.PageSize) + } + if err := sess.OrderBy("full_name, name").Find(&members); err != nil { + return nil, err + } + return members, nil +} + +// GetUserOrgTeams returns all teams that user belongs to in given organization. +func GetUserOrgTeams(ctx context.Context, orgID, userID int64) (teams []*Team, err error) { + return teams, db.GetEngine(ctx). + Join("INNER", "team_user", "team_user.team_id = team.id"). + Where("team.org_id = ?", orgID). + And("team_user.uid=?", userID). + Find(&teams) +} + +// GetUserRepoTeams returns user repo's teams +func GetUserRepoTeams(ctx context.Context, orgID, userID, repoID int64) (teams []*Team, err error) { + return teams, db.GetEngine(ctx). + Join("INNER", "team_user", "team_user.team_id = team.id"). + Join("INNER", "team_repo", "team_repo.team_id = team.id"). + Where("team.org_id = ?", orgID). + And("team_user.uid=?", userID). + And("team_repo.repo_id=?", repoID). + Find(&teams) +} + +// IsUserInTeams returns if a user in some teams +func IsUserInTeams(ctx context.Context, userID int64, teamIDs []int64) (bool, error) { + return db.GetEngine(ctx).Where("uid=?", userID).In("team_id", teamIDs).Exist(new(TeamUser)) +} + +// UsersInTeamsCount counts the number of users which are in userIDs and teamIDs +func UsersInTeamsCount(userIDs, teamIDs []int64) (int64, error) { + var ids []int64 + if err := db.GetEngine(db.DefaultContext).In("uid", userIDs).In("team_id", teamIDs). + Table("team_user"). + Cols("uid").GroupBy("uid").Find(&ids); err != nil { + return 0, err + } + return int64(len(ids)), nil +} diff --git a/models/packages/conan/references.go b/models/packages/conan/references.go new file mode 100644 index 0000000000..e47e689af7 --- /dev/null +++ b/models/packages/conan/references.go @@ -0,0 +1,171 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "context" + "errors" + "strconv" + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + conan_module "code.gitea.io/gitea/modules/packages/conan" + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/builder" +) + +var ( + ErrRecipeReferenceNotExist = errors.New("Recipe reference does not exist") + ErrPackageReferenceNotExist = errors.New("Package reference does not exist") +) + +// RecipeExists checks if a recipe exists +func RecipeExists(ctx context.Context, ownerID int64, ref *conan_module.RecipeReference) (bool, error) { + revisions, err := GetRecipeRevisions(ctx, ownerID, ref) + if err != nil { + return false, err + } + + return len(revisions) != 0, nil +} + +type PropertyValue struct { + Value string + CreatedUnix timeutil.TimeStamp +} + +func findPropertyValues(ctx context.Context, propertyName string, ownerID int64, name, version string, propertyFilter map[string]string) ([]*PropertyValue, error) { + var propsCond builder.Cond = builder.Eq{ + "package_property.ref_type": packages.PropertyTypeFile, + } + propsCond = propsCond.And(builder.Expr("package_property.ref_id = package_file.id")) + + propsCondBlock := builder.NewCond() + for name, value := range propertyFilter { + propsCondBlock = propsCondBlock.Or(builder.Eq{ + "package_property.name": name, + "package_property.value": value, + }) + } + propsCond = propsCond.And(propsCondBlock) + + var cond builder.Cond = builder.Eq{ + "package.type": packages.TypeConan, + "package.owner_id": ownerID, + "package.lower_name": strings.ToLower(name), + "package_version.lower_version": strings.ToLower(version), + "package_version.is_internal": false, + strconv.Itoa(len(propertyFilter)): builder.Select("COUNT(*)").Where(propsCond).From("package_property"), + } + + in2 := builder. + Select("package_file.id"). + From("package_file"). + InnerJoin("package_version", "package_version.id = package_file.version_id"). + InnerJoin("package", "package.id = package_version.package_id"). + Where(cond) + + query := builder. + Select("package_property.value, MAX(package_file.created_unix) AS created_unix"). + From("package_property"). + InnerJoin("package_file", "package_file.id = package_property.ref_id"). + Where(builder.Eq{"package_property.name": propertyName}.And(builder.In("package_property.ref_id", in2))). + GroupBy("package_property.value"). + OrderBy("created_unix DESC") + + var values []*PropertyValue + return values, db.GetEngine(ctx).SQL(query).Find(&values) +} + +// GetRecipeRevisions gets all revisions of a recipe +func GetRecipeRevisions(ctx context.Context, ownerID int64, ref *conan_module.RecipeReference) ([]*PropertyValue, error) { + values, err := findPropertyValues( + ctx, + conan_module.PropertyRecipeRevision, + ownerID, + ref.Name, + ref.Version, + map[string]string{ + conan_module.PropertyRecipeUser: ref.User, + conan_module.PropertyRecipeChannel: ref.Channel, + }, + ) + if err != nil { + return nil, err + } + + return values, nil +} + +// GetLastRecipeRevision gets the latest recipe revision +func GetLastRecipeRevision(ctx context.Context, ownerID int64, ref *conan_module.RecipeReference) (*PropertyValue, error) { + revisions, err := GetRecipeRevisions(ctx, ownerID, ref) + if err != nil { + return nil, err + } + + if len(revisions) == 0 { + return nil, ErrRecipeReferenceNotExist + } + return revisions[0], nil +} + +// GetPackageReferences gets all package references of a recipe +func GetPackageReferences(ctx context.Context, ownerID int64, ref *conan_module.RecipeReference) ([]*PropertyValue, error) { + values, err := findPropertyValues( + ctx, + conan_module.PropertyPackageReference, + ownerID, + ref.Name, + ref.Version, + map[string]string{ + conan_module.PropertyRecipeUser: ref.User, + conan_module.PropertyRecipeChannel: ref.Channel, + conan_module.PropertyRecipeRevision: ref.Revision, + }, + ) + if err != nil { + return nil, err + } + + return values, nil +} + +// GetPackageRevisions gets all revision of a package +func GetPackageRevisions(ctx context.Context, ownerID int64, ref *conan_module.PackageReference) ([]*PropertyValue, error) { + values, err := findPropertyValues( + ctx, + conan_module.PropertyPackageRevision, + ownerID, + ref.Recipe.Name, + ref.Recipe.Version, + map[string]string{ + conan_module.PropertyRecipeUser: ref.Recipe.User, + conan_module.PropertyRecipeChannel: ref.Recipe.Channel, + conan_module.PropertyRecipeRevision: ref.Recipe.Revision, + conan_module.PropertyPackageReference: ref.Reference, + }, + ) + if err != nil { + return nil, err + } + + return values, nil +} + +// GetLastPackageRevision gets the latest package revision +func GetLastPackageRevision(ctx context.Context, ownerID int64, ref *conan_module.PackageReference) (*PropertyValue, error) { + revisions, err := GetPackageRevisions(ctx, ownerID, ref) + if err != nil { + return nil, err + } + + if len(revisions) == 0 { + return nil, ErrPackageReferenceNotExist + } + return revisions[0], nil +} diff --git a/models/packages/conan/search.go b/models/packages/conan/search.go new file mode 100644 index 0000000000..6a2cfa38f5 --- /dev/null +++ b/models/packages/conan/search.go @@ -0,0 +1,149 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "context" + "fmt" + "strconv" + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + conan_module "code.gitea.io/gitea/modules/packages/conan" + + "xorm.io/builder" +) + +// buildCondition creates a Like condition if a wildcard is present. Otherwise Eq is used. +func buildCondition(name, value string) builder.Cond { + if strings.Contains(value, "*") { + return builder.Like{name, strings.ReplaceAll(strings.ReplaceAll(value, "_", "\\_"), "*", "%")} + } + return builder.Eq{name: value} +} + +type RecipeSearchOptions struct { + OwnerID int64 + Name string + Version string + User string + Channel string +} + +// SearchRecipes gets all recipes matching the search options +func SearchRecipes(ctx context.Context, opts *RecipeSearchOptions) ([]string, error) { + var cond builder.Cond = builder.Eq{ + "package_file.is_lead": true, + "package.type": packages.TypeConan, + "package.owner_id": opts.OwnerID, + "package_version.is_internal": false, + } + + if opts.Name != "" { + cond = cond.And(buildCondition("package.lower_name", strings.ToLower(opts.Name))) + } + if opts.Version != "" { + cond = cond.And(buildCondition("package_version.lower_version", strings.ToLower(opts.Version))) + } + if opts.User != "" || opts.Channel != "" { + var propsCond builder.Cond = builder.Eq{ + "package_property.ref_type": packages.PropertyTypeFile, + } + propsCond = propsCond.And(builder.Expr("package_property.ref_id = package_file.id")) + + count := 0 + propsCondBlock := builder.NewCond() + if opts.User != "" { + count++ + propsCondBlock = propsCondBlock.Or(builder.Eq{"package_property.name": conan_module.PropertyRecipeUser}.And(buildCondition("package_property.value", opts.User))) + } + if opts.Channel != "" { + count++ + propsCondBlock = propsCondBlock.Or(builder.Eq{"package_property.name": conan_module.PropertyRecipeChannel}.And(buildCondition("package_property.value", opts.Channel))) + } + propsCond = propsCond.And(propsCondBlock) + + cond = cond.And(builder.Eq{ + strconv.Itoa(count): builder.Select("COUNT(*)").Where(propsCond).From("package_property"), + }) + } + + query := builder. + Select("package.name, package_version.version, package_file.id"). + From("package_file"). + InnerJoin("package_version", "package_version.id = package_file.version_id"). + InnerJoin("package", "package.id = package_version.package_id"). + Where(cond) + + results := make([]struct { + Name string + Version string + ID int64 + }, 0, 5) + err := db.GetEngine(ctx).SQL(query).Find(&results) + if err != nil { + return nil, err + } + + unique := make(map[string]bool) + for _, info := range results { + recipe := fmt.Sprintf("%s/%s", info.Name, info.Version) + + props, _ := packages.GetProperties(ctx, packages.PropertyTypeFile, info.ID) + if len(props) > 0 { + var ( + user = "" + channel = "" + ) + for _, prop := range props { + if prop.Name == conan_module.PropertyRecipeUser { + user = prop.Value + } + if prop.Name == conan_module.PropertyRecipeChannel { + channel = prop.Value + } + } + if user != "" && channel != "" { + recipe = fmt.Sprintf("%s@%s/%s", recipe, user, channel) + } + } + + unique[recipe] = true + } + + recipes := make([]string, 0, len(unique)) + for recipe := range unique { + recipes = append(recipes, recipe) + } + return recipes, nil +} + +// GetPackageInfo gets the Conaninfo for a package +func GetPackageInfo(ctx context.Context, ownerID int64, ref *conan_module.PackageReference) (string, error) { + values, err := findPropertyValues( + ctx, + conan_module.PropertyPackageInfo, + ownerID, + ref.Recipe.Name, + ref.Recipe.Version, + map[string]string{ + conan_module.PropertyRecipeUser: ref.Recipe.User, + conan_module.PropertyRecipeChannel: ref.Recipe.Channel, + conan_module.PropertyRecipeRevision: ref.Recipe.Revision, + conan_module.PropertyPackageReference: ref.Reference, + conan_module.PropertyPackageRevision: ref.Revision, + }, + ) + if err != nil { + return "", err + } + + if len(values) == 0 { + return "", ErrPackageReferenceNotExist + } + + return values[0].Value, nil +} diff --git a/models/packages/container/const.go b/models/packages/container/const.go new file mode 100644 index 0000000000..9d3ed64a6e --- /dev/null +++ b/models/packages/container/const.go @@ -0,0 +1,10 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +const ( + ManifestFilename = "manifest.json" + UploadVersion = "_upload" +) diff --git a/models/packages/container/search.go b/models/packages/container/search.go new file mode 100644 index 0000000000..972cac9528 --- /dev/null +++ b/models/packages/container/search.go @@ -0,0 +1,227 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "context" + "errors" + "strings" + "time" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + container_module "code.gitea.io/gitea/modules/packages/container" + + "xorm.io/builder" +) + +var ErrContainerBlobNotExist = errors.New("Container blob does not exist") + +type BlobSearchOptions struct { + OwnerID int64 + Image string + Digest string + Tag string + IsManifest bool +} + +func (opts *BlobSearchOptions) toConds() builder.Cond { + var cond builder.Cond = builder.Eq{ + "package.type": packages.TypeContainer, + } + + if opts.OwnerID != 0 { + cond = cond.And(builder.Eq{"package.owner_id": opts.OwnerID}) + } + if opts.Image != "" { + cond = cond.And(builder.Eq{"package.lower_name": strings.ToLower(opts.Image)}) + } + if opts.Tag != "" { + cond = cond.And(builder.Eq{"package_version.lower_version": strings.ToLower(opts.Tag)}) + } + if opts.IsManifest { + cond = cond.And(builder.Eq{"package_file.lower_name": ManifestFilename}) + } + if opts.Digest != "" { + var propsCond builder.Cond = builder.Eq{ + "package_property.ref_type": packages.PropertyTypeFile, + "package_property.name": container_module.PropertyDigest, + "package_property.value": opts.Digest, + } + + cond = cond.And(builder.In("package_file.id", builder.Select("package_property.ref_id").Where(propsCond).From("package_property"))) + } + + return cond +} + +// GetContainerBlob gets the container blob matching the blob search options +// If multiple matching blobs are found (manifests with the same digest) the first (according to the database) is selected. +func GetContainerBlob(ctx context.Context, opts *BlobSearchOptions) (*packages.PackageFileDescriptor, error) { + pfds, err := getContainerBlobsLimit(ctx, opts, 1) + if err != nil { + return nil, err + } + if len(pfds) != 1 { + return nil, ErrContainerBlobNotExist + } + + return pfds[0], nil +} + +// GetContainerBlobs gets the container blobs matching the blob search options +func GetContainerBlobs(ctx context.Context, opts *BlobSearchOptions) ([]*packages.PackageFileDescriptor, error) { + return getContainerBlobsLimit(ctx, opts, 0) +} + +func getContainerBlobsLimit(ctx context.Context, opts *BlobSearchOptions, limit int) ([]*packages.PackageFileDescriptor, error) { + pfs := make([]*packages.PackageFile, 0, limit) + sess := db.GetEngine(ctx). + Join("INNER", "package_version", "package_version.id = package_file.version_id"). + Join("INNER", "package", "package.id = package_version.package_id"). + Where(opts.toConds()) + + if limit > 0 { + sess = sess.Limit(limit) + } + + if err := sess.Find(&pfs); err != nil { + return nil, err + } + + pfds := make([]*packages.PackageFileDescriptor, 0, len(pfs)) + for _, pf := range pfs { + pfd, err := packages.GetPackageFileDescriptor(ctx, pf) + if err != nil { + return nil, err + } + pfds = append(pfds, pfd) + } + + return pfds, nil +} + +// GetManifestVersions gets all package versions representing the matching manifest +func GetManifestVersions(ctx context.Context, opts *BlobSearchOptions) ([]*packages.PackageVersion, error) { + cond := opts.toConds().And(builder.Eq{"package_version.is_internal": false}) + + pvs := make([]*packages.PackageVersion, 0, 10) + return pvs, db.GetEngine(ctx). + Join("INNER", "package", "package.id = package_version.package_id"). + Join("INNER", "package_file", "package_file.version_id = package_version.id"). + Where(cond). + Find(&pvs) +} + +// GetImageTags gets a sorted list of the tags of an image +// The result is suitable for the api call. +func GetImageTags(ctx context.Context, ownerID int64, image string, n int, last string) ([]string, error) { + // Short circuit: n == 0 should return an empty list + if n == 0 { + return []string{}, nil + } + + var cond builder.Cond = builder.Eq{ + "package.type": packages.TypeContainer, + "package.owner_id": ownerID, + "package.lower_name": strings.ToLower(image), + "package_version.is_internal": false, + } + + var propsCond builder.Cond = builder.Eq{ + "package_property.ref_type": packages.PropertyTypeVersion, + "package_property.name": container_module.PropertyManifestTagged, + } + + cond = cond.And(builder.In("package_version.id", builder.Select("package_property.ref_id").Where(propsCond).From("package_property"))) + + if last != "" { + cond = cond.And(builder.Gt{"package_version.lower_version": strings.ToLower(last)}) + } + + sess := db.GetEngine(ctx). + Table("package_version"). + Select("package_version.lower_version"). + Join("INNER", "package", "package.id = package_version.package_id"). + Where(cond). + Asc("package_version.lower_version") + + var tags []string + if n > 0 { + sess = sess.Limit(n) + + tags = make([]string, 0, n) + } else { + tags = make([]string, 0, 10) + } + + return tags, sess.Find(&tags) +} + +type ImageTagsSearchOptions struct { + PackageID int64 + Query string + IsTagged bool + db.Paginator +} + +func (opts *ImageTagsSearchOptions) toConds() builder.Cond { + var cond builder.Cond = builder.Eq{ + "package.type": packages.TypeContainer, + "package.id": opts.PackageID, + "package_version.is_internal": false, + } + + if opts.Query != "" { + cond = cond.And(builder.Like{"package_version.lower_version", strings.ToLower(opts.Query)}) + } + + var propsCond builder.Cond = builder.Eq{ + "package_property.ref_type": packages.PropertyTypeVersion, + "package_property.name": container_module.PropertyManifestTagged, + } + + in := builder.In("package_version.id", builder.Select("package_property.ref_id").Where(propsCond).From("package_property")) + + if opts.IsTagged { + cond = cond.And(in) + } else { + cond = cond.And(builder.Not{in}) + } + + return cond +} + +// SearchImageTags gets a sorted list of the tags of an image +func SearchImageTags(ctx context.Context, opts *ImageTagsSearchOptions) ([]*packages.PackageVersion, int64, error) { + sess := db.GetEngine(ctx). + Join("INNER", "package", "package.id = package_version.package_id"). + Where(opts.toConds()). + Desc("package_version.created_unix") + + if opts.Paginator != nil { + sess = db.SetSessionPagination(sess, opts) + } + + pvs := make([]*packages.PackageVersion, 0, 10) + count, err := sess.FindAndCount(&pvs) + return pvs, count, err +} + +func SearchExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) ([]*packages.PackageFile, error) { + var cond builder.Cond = builder.Eq{ + "package_version.is_internal": true, + "package_version.lower_version": UploadVersion, + "package.type": packages.TypeContainer, + } + cond = cond.And(builder.Lt{"package_file.created_unix": time.Now().Add(-olderThan).Unix()}) + + var pfs []*packages.PackageFile + return pfs, db.GetEngine(ctx). + Join("INNER", "package_version", "package_version.id = package_file.version_id"). + Join("INNER", "package", "package.id = package_version.package_id"). + Where(cond). + Find(&pfs) +} diff --git a/models/packages/descriptor.go b/models/packages/descriptor.go new file mode 100644 index 0000000000..fbdc40f37f --- /dev/null +++ b/models/packages/descriptor.go @@ -0,0 +1,195 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + "fmt" + "net/url" + + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/packages/composer" + "code.gitea.io/gitea/modules/packages/conan" + "code.gitea.io/gitea/modules/packages/container" + "code.gitea.io/gitea/modules/packages/helm" + "code.gitea.io/gitea/modules/packages/maven" + "code.gitea.io/gitea/modules/packages/npm" + "code.gitea.io/gitea/modules/packages/nuget" + "code.gitea.io/gitea/modules/packages/pypi" + "code.gitea.io/gitea/modules/packages/rubygems" + + "github.com/hashicorp/go-version" +) + +// PackagePropertyList is a list of package properties +type PackagePropertyList []*PackageProperty + +// GetByName gets the first property value with the specific name +func (l PackagePropertyList) GetByName(name string) string { + for _, pp := range l { + if pp.Name == name { + return pp.Value + } + } + return "" +} + +// PackageDescriptor describes a package +type PackageDescriptor struct { + Package *Package + Owner *user_model.User + Repository *repo_model.Repository + Version *PackageVersion + SemVer *version.Version + Creator *user_model.User + Properties PackagePropertyList + Metadata interface{} + Files []*PackageFileDescriptor +} + +// PackageFileDescriptor describes a package file +type PackageFileDescriptor struct { + File *PackageFile + Blob *PackageBlob + Properties PackagePropertyList +} + +// PackageWebLink returns the package web link +func (pd *PackageDescriptor) PackageWebLink() string { + return fmt.Sprintf("%s/-/packages/%s/%s", pd.Owner.HTMLURL(), string(pd.Package.Type), url.PathEscape(pd.Package.LowerName)) +} + +// FullWebLink returns the package version web link +func (pd *PackageDescriptor) FullWebLink() string { + return fmt.Sprintf("%s/%s", pd.PackageWebLink(), url.PathEscape(pd.Version.LowerVersion)) +} + +// CalculateBlobSize returns the total blobs size in bytes +func (pd *PackageDescriptor) CalculateBlobSize() int64 { + size := int64(0) + for _, f := range pd.Files { + size += f.Blob.Size + } + return size +} + +// GetPackageDescriptor gets the package description for a version +func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDescriptor, error) { + p, err := GetPackageByID(ctx, pv.PackageID) + if err != nil { + return nil, err + } + o, err := user_model.GetUserByIDCtx(ctx, p.OwnerID) + if err != nil { + return nil, err + } + repository, err := repo_model.GetRepositoryByIDCtx(ctx, p.RepoID) + if err != nil && !repo_model.IsErrRepoNotExist(err) { + return nil, err + } + creator, err := user_model.GetUserByIDCtx(ctx, pv.CreatorID) + if err != nil { + return nil, err + } + var semVer *version.Version + if p.SemverCompatible { + semVer, err = version.NewVersion(pv.Version) + if err != nil { + return nil, err + } + } + pvps, err := GetProperties(ctx, PropertyTypeVersion, pv.ID) + if err != nil { + return nil, err + } + pfs, err := GetFilesByVersionID(ctx, pv.ID) + if err != nil { + return nil, err + } + + pfds := make([]*PackageFileDescriptor, 0, len(pfs)) + for _, pf := range pfs { + pfd, err := GetPackageFileDescriptor(ctx, pf) + if err != nil { + return nil, err + } + pfds = append(pfds, pfd) + } + + var metadata interface{} + switch p.Type { + case TypeComposer: + metadata = &composer.Metadata{} + case TypeConan: + metadata = &conan.Metadata{} + case TypeContainer: + metadata = &container.Metadata{} + case TypeGeneric: + // generic packages have no metadata + case TypeHelm: + metadata = &helm.Metadata{} + case TypeNuGet: + metadata = &nuget.Metadata{} + case TypeNpm: + metadata = &npm.Metadata{} + case TypeMaven: + metadata = &maven.Metadata{} + case TypePyPI: + metadata = &pypi.Metadata{} + case TypeRubyGems: + metadata = &rubygems.Metadata{} + default: + panic(fmt.Sprintf("unknown package type: %s", string(p.Type))) + } + if metadata != nil { + if err := json.Unmarshal([]byte(pv.MetadataJSON), &metadata); err != nil { + return nil, err + } + } + + return &PackageDescriptor{ + Package: p, + Owner: o, + Repository: repository, + Version: pv, + SemVer: semVer, + Creator: creator, + Properties: PackagePropertyList(pvps), + Metadata: metadata, + Files: pfds, + }, nil +} + +// GetPackageFileDescriptor gets a package file descriptor for a package file +func GetPackageFileDescriptor(ctx context.Context, pf *PackageFile) (*PackageFileDescriptor, error) { + pb, err := GetBlobByID(ctx, pf.BlobID) + if err != nil { + return nil, err + } + pfps, err := GetProperties(ctx, PropertyTypeFile, pf.ID) + if err != nil { + return nil, err + } + return &PackageFileDescriptor{ + pf, + pb, + PackagePropertyList(pfps), + }, nil +} + +// GetPackageDescriptors gets the package descriptions for the versions +func GetPackageDescriptors(ctx context.Context, pvs []*PackageVersion) ([]*PackageDescriptor, error) { + pds := make([]*PackageDescriptor, 0, len(pvs)) + for _, pv := range pvs { + pd, err := GetPackageDescriptor(ctx, pv) + if err != nil { + return nil, err + } + pds = append(pds, pd) + } + return pds, nil +} diff --git a/models/packages/package.go b/models/packages/package.go new file mode 100644 index 0000000000..bdb535492b --- /dev/null +++ b/models/packages/package.go @@ -0,0 +1,220 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + "errors" + "fmt" + "strings" + + "code.gitea.io/gitea/models/db" + + "xorm.io/builder" +) + +func init() { + db.RegisterModel(new(Package)) +} + +var ( + // ErrDuplicatePackage indicates a duplicated package error + ErrDuplicatePackage = errors.New("Package does exist already") + // ErrPackageNotExist indicates a package not exist error + ErrPackageNotExist = errors.New("Package does not exist") +) + +// Type of a package +type Type string + +// List of supported packages +const ( + TypeComposer Type = "composer" + TypeConan Type = "conan" + TypeContainer Type = "container" + TypeGeneric Type = "generic" + TypeHelm Type = "helm" + TypeMaven Type = "maven" + TypeNpm Type = "npm" + TypeNuGet Type = "nuget" + TypePyPI Type = "pypi" + TypeRubyGems Type = "rubygems" +) + +// Name gets the name of the package type +func (pt Type) Name() string { + switch pt { + case TypeComposer: + return "Composer" + case TypeConan: + return "Conan" + case TypeContainer: + return "Container" + case TypeGeneric: + return "Generic" + case TypeHelm: + return "Helm" + case TypeMaven: + return "Maven" + case TypeNpm: + return "npm" + case TypeNuGet: + return "NuGet" + case TypePyPI: + return "PyPI" + case TypeRubyGems: + return "RubyGems" + } + panic(fmt.Sprintf("unknown package type: %s", string(pt))) +} + +// SVGName gets the name of the package type svg image +func (pt Type) SVGName() string { + switch pt { + case TypeComposer: + return "gitea-composer" + case TypeConan: + return "gitea-conan" + case TypeContainer: + return "octicon-container" + case TypeGeneric: + return "octicon-package" + case TypeHelm: + return "gitea-helm" + case TypeMaven: + return "gitea-maven" + case TypeNpm: + return "gitea-npm" + case TypeNuGet: + return "gitea-nuget" + case TypePyPI: + return "gitea-python" + case TypeRubyGems: + return "gitea-rubygems" + } + panic(fmt.Sprintf("unknown package type: %s", string(pt))) +} + +// Package represents a package +type Package struct { + ID int64 `xorm:"pk autoincr"` + OwnerID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"` + RepoID int64 `xorm:"INDEX"` + Type Type `xorm:"UNIQUE(s) INDEX NOT NULL"` + Name string `xorm:"NOT NULL"` + LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"` + SemverCompatible bool `xorm:"NOT NULL DEFAULT false"` +} + +// TryInsertPackage inserts a package. If a package exists already, ErrDuplicatePackage is returned +func TryInsertPackage(ctx context.Context, p *Package) (*Package, error) { + e := db.GetEngine(ctx) + + key := &Package{ + OwnerID: p.OwnerID, + Type: p.Type, + LowerName: p.LowerName, + } + + has, err := e.Get(key) + if err != nil { + return nil, err + } + if has { + return key, ErrDuplicatePackage + } + if _, err = e.Insert(p); err != nil { + return nil, err + } + return p, nil +} + +// SetRepositoryLink sets the linked repository +func SetRepositoryLink(ctx context.Context, packageID, repoID int64) error { + _, err := db.GetEngine(ctx).ID(packageID).Cols("repo_id").Update(&Package{RepoID: repoID}) + return err +} + +// UnlinkRepositoryFromAllPackages unlinks every package from the repository +func UnlinkRepositoryFromAllPackages(ctx context.Context, repoID int64) error { + _, err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Cols("repo_id").Update(&Package{}) + return err +} + +// GetPackageByID gets a package by id +func GetPackageByID(ctx context.Context, packageID int64) (*Package, error) { + p := &Package{} + + has, err := db.GetEngine(ctx).ID(packageID).Get(p) + if err != nil { + return nil, err + } + if !has { + return nil, ErrPackageNotExist + } + return p, nil +} + +// GetPackageByName gets a package by name +func GetPackageByName(ctx context.Context, ownerID int64, packageType Type, name string) (*Package, error) { + var cond builder.Cond = builder.Eq{ + "package.owner_id": ownerID, + "package.type": packageType, + "package.lower_name": strings.ToLower(name), + } + + p := &Package{} + + has, err := db.GetEngine(ctx). + Where(cond). + Get(p) + if err != nil { + return nil, err + } + if !has { + return nil, ErrPackageNotExist + } + return p, nil +} + +// GetPackagesByType gets all packages of a specific type +func GetPackagesByType(ctx context.Context, ownerID int64, packageType Type) ([]*Package, error) { + var cond builder.Cond = builder.Eq{ + "package.owner_id": ownerID, + "package.type": packageType, + } + + ps := make([]*Package, 0, 10) + return ps, db.GetEngine(ctx). + Where(cond). + Find(&ps) +} + +// DeletePackagesIfUnreferenced deletes a package if there are no associated versions +func DeletePackagesIfUnreferenced(ctx context.Context) error { + in := builder. + Select("package.id"). + From("package"). + LeftJoin("package_version", "package_version.package_id = package.id"). + Where(builder.Expr("package_version.id IS NULL")) + + _, err := db.GetEngine(ctx). + // double select workaround for MySQL + // https://stackoverflow.com/questions/4471277/mysql-delete-from-with-subquery-as-condition + Where(builder.In("package.id", builder.Select("id").From(in, "temp"))). + Delete(&Package{}) + + return err +} + +// HasOwnerPackages tests if a user/org has packages +func HasOwnerPackages(ctx context.Context, ownerID int64) (bool, error) { + return db.GetEngine(ctx).Where("owner_id = ?", ownerID).Exist(&Package{}) +} + +// HasRepositoryPackages tests if a repository has packages +func HasRepositoryPackages(ctx context.Context, repositoryID int64) (bool, error) { + return db.GetEngine(ctx).Where("repo_id = ?", repositoryID).Exist(&Package{}) +} diff --git a/models/packages/package_blob.go b/models/packages/package_blob.go new file mode 100644 index 0000000000..8c701d4285 --- /dev/null +++ b/models/packages/package_blob.go @@ -0,0 +1,85 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + "errors" + "time" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/timeutil" +) + +// ErrPackageBlobNotExist indicates a package blob not exist error +var ErrPackageBlobNotExist = errors.New("Package blob does not exist") + +func init() { + db.RegisterModel(new(PackageBlob)) +} + +// PackageBlob represents a package blob +type PackageBlob struct { + ID int64 `xorm:"pk autoincr"` + Size int64 `xorm:"NOT NULL DEFAULT 0"` + HashMD5 string `xorm:"hash_md5 char(32) UNIQUE(md5) INDEX NOT NULL"` + HashSHA1 string `xorm:"hash_sha1 char(40) UNIQUE(sha1) INDEX NOT NULL"` + HashSHA256 string `xorm:"hash_sha256 char(64) UNIQUE(sha256) INDEX NOT NULL"` + HashSHA512 string `xorm:"hash_sha512 char(128) UNIQUE(sha512) INDEX NOT NULL"` + CreatedUnix timeutil.TimeStamp `xorm:"created INDEX NOT NULL"` +} + +// GetOrInsertBlob inserts a blob. If the blob exists already the existing blob is returned +func GetOrInsertBlob(ctx context.Context, pb *PackageBlob) (*PackageBlob, bool, error) { + e := db.GetEngine(ctx) + + has, err := e.Get(pb) + if err != nil { + return nil, false, err + } + if has { + return pb, true, nil + } + if _, err = e.Insert(pb); err != nil { + return nil, false, err + } + return pb, false, nil +} + +// GetBlobByID gets a blob by id +func GetBlobByID(ctx context.Context, blobID int64) (*PackageBlob, error) { + pb := &PackageBlob{} + + has, err := db.GetEngine(ctx).ID(blobID).Get(pb) + if err != nil { + return nil, err + } + if !has { + return nil, ErrPackageBlobNotExist + } + return pb, nil +} + +// FindExpiredUnreferencedBlobs gets all blobs without associated files older than the specific duration +func FindExpiredUnreferencedBlobs(ctx context.Context, olderThan time.Duration) ([]*PackageBlob, error) { + pbs := make([]*PackageBlob, 0, 10) + return pbs, db.GetEngine(ctx). + Table("package_blob"). + Join("LEFT", "package_file", "package_file.blob_id = package_blob.id"). + Where("package_file.id IS NULL AND package_blob.created_unix < ?", time.Now().Add(-olderThan).Unix()). + Find(&pbs) +} + +// DeleteBlobByID deletes a blob by id +func DeleteBlobByID(ctx context.Context, blobID int64) error { + _, err := db.GetEngine(ctx).ID(blobID).Delete(&PackageBlob{}) + return err +} + +// GetTotalBlobSize returns the total blobs size in bytes +func GetTotalBlobSize() (int64, error) { + return db.GetEngine(db.DefaultContext). + SumInt(&PackageBlob{}, "size") +} diff --git a/models/packages/package_blob_upload.go b/models/packages/package_blob_upload.go new file mode 100644 index 0000000000..635068f1d8 --- /dev/null +++ b/models/packages/package_blob_upload.go @@ -0,0 +1,81 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + "errors" + "strings" + "time" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" +) + +// ErrPackageBlobUploadNotExist indicates a package blob upload not exist error +var ErrPackageBlobUploadNotExist = errors.New("Package blob upload does not exist") + +func init() { + db.RegisterModel(new(PackageBlobUpload)) +} + +// PackageBlobUpload represents a package blob upload +type PackageBlobUpload struct { + ID string `xorm:"pk"` + BytesReceived int64 `xorm:"NOT NULL DEFAULT 0"` + HashStateBytes []byte `xorm:"BLOB"` + CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"` + UpdatedUnix timeutil.TimeStamp `xorm:"updated INDEX NOT NULL"` +} + +// CreateBlobUpload inserts a blob upload +func CreateBlobUpload(ctx context.Context) (*PackageBlobUpload, error) { + id, err := util.CryptoRandomString(25) + if err != nil { + return nil, err + } + + pbu := &PackageBlobUpload{ + ID: strings.ToLower(id), + } + + _, err = db.GetEngine(ctx).Insert(pbu) + return pbu, err +} + +// GetBlobUploadByID gets a blob upload by id +func GetBlobUploadByID(ctx context.Context, id string) (*PackageBlobUpload, error) { + pbu := &PackageBlobUpload{} + + has, err := db.GetEngine(ctx).ID(id).Get(pbu) + if err != nil { + return nil, err + } + if !has { + return nil, ErrPackageBlobUploadNotExist + } + return pbu, nil +} + +// UpdateBlobUpload updates the blob upload +func UpdateBlobUpload(ctx context.Context, pbu *PackageBlobUpload) error { + _, err := db.GetEngine(ctx).ID(pbu.ID).Update(pbu) + return err +} + +// DeleteBlobUploadByID deletes the blob upload +func DeleteBlobUploadByID(ctx context.Context, id string) error { + _, err := db.GetEngine(ctx).ID(id).Delete(&PackageBlobUpload{}) + return err +} + +// FindExpiredBlobUploads gets all expired blob uploads +func FindExpiredBlobUploads(ctx context.Context, olderThan time.Duration) ([]*PackageBlobUpload, error) { + pbus := make([]*PackageBlobUpload, 0, 10) + return pbus, db.GetEngine(ctx). + Where("updated_unix < ?", time.Now().Add(-olderThan).Unix()). + Find(&pbus) +} diff --git a/models/packages/package_file.go b/models/packages/package_file.go new file mode 100644 index 0000000000..8f304ce8ac --- /dev/null +++ b/models/packages/package_file.go @@ -0,0 +1,201 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + "errors" + "strconv" + "strings" + "time" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/builder" +) + +func init() { + db.RegisterModel(new(PackageFile)) +} + +var ( + // ErrDuplicatePackageFile indicates a duplicated package file error + ErrDuplicatePackageFile = errors.New("Package file does exist already") + // ErrPackageFileNotExist indicates a package file not exist error + ErrPackageFileNotExist = errors.New("Package file does not exist") +) + +// EmptyFileKey is a named constant for an empty file key +const EmptyFileKey = "" + +// PackageFile represents a package file +type PackageFile struct { + ID int64 `xorm:"pk autoincr"` + VersionID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"` + BlobID int64 `xorm:"INDEX NOT NULL"` + Name string `xorm:"NOT NULL"` + LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"` + CompositeKey string `xorm:"UNIQUE(s) INDEX"` + IsLead bool `xorm:"NOT NULL DEFAULT false"` + CreatedUnix timeutil.TimeStamp `xorm:"created INDEX NOT NULL"` +} + +// TryInsertFile inserts a file. If the file exists already ErrDuplicatePackageFile is returned +func TryInsertFile(ctx context.Context, pf *PackageFile) (*PackageFile, error) { + e := db.GetEngine(ctx) + + key := &PackageFile{ + VersionID: pf.VersionID, + LowerName: pf.LowerName, + CompositeKey: pf.CompositeKey, + } + + has, err := e.Get(key) + if err != nil { + return nil, err + } + if has { + return pf, ErrDuplicatePackageFile + } + if _, err = e.Insert(pf); err != nil { + return nil, err + } + return pf, nil +} + +// GetFilesByVersionID gets all files of a version +func GetFilesByVersionID(ctx context.Context, versionID int64) ([]*PackageFile, error) { + pfs := make([]*PackageFile, 0, 10) + return pfs, db.GetEngine(ctx).Where("version_id = ?", versionID).Find(&pfs) +} + +// GetFileForVersionByID gets a file of a version by id +func GetFileForVersionByID(ctx context.Context, versionID, fileID int64) (*PackageFile, error) { + pf := &PackageFile{ + VersionID: versionID, + } + + has, err := db.GetEngine(ctx).ID(fileID).Get(pf) + if err != nil { + return nil, err + } + if !has { + return nil, ErrPackageFileNotExist + } + return pf, nil +} + +// GetFileForVersionByName gets a file of a version by name +func GetFileForVersionByName(ctx context.Context, versionID int64, name, key string) (*PackageFile, error) { + if name == "" { + return nil, ErrPackageFileNotExist + } + + pf := &PackageFile{ + VersionID: versionID, + LowerName: strings.ToLower(name), + CompositeKey: key, + } + + has, err := db.GetEngine(ctx).Get(pf) + if err != nil { + return nil, err + } + if !has { + return nil, ErrPackageFileNotExist + } + return pf, nil +} + +// DeleteFileByID deletes a file +func DeleteFileByID(ctx context.Context, fileID int64) error { + _, err := db.GetEngine(ctx).ID(fileID).Delete(&PackageFile{}) + return err +} + +// PackageFileSearchOptions are options for SearchXXX methods +type PackageFileSearchOptions struct { + OwnerID int64 + PackageType string + VersionID int64 + Query string + CompositeKey string + Properties map[string]string + OlderThan time.Duration + db.Paginator +} + +func (opts *PackageFileSearchOptions) toConds() builder.Cond { + cond := builder.NewCond() + + if opts.VersionID != 0 { + cond = cond.And(builder.Eq{"package_file.version_id": opts.VersionID}) + } else if opts.OwnerID != 0 || (opts.PackageType != "" && opts.PackageType != "all") { + var versionCond builder.Cond = builder.Eq{ + "package_version.is_internal": false, + } + if opts.OwnerID != 0 { + versionCond = versionCond.And(builder.Eq{"package.owner_id": opts.OwnerID}) + } + if opts.PackageType != "" && opts.PackageType != "all" { + versionCond = versionCond.And(builder.Eq{"package.type": opts.PackageType}) + } + + in := builder. + Select("package_version.id"). + From("package_version"). + InnerJoin("package", "package.id = package_version.package_id"). + Where(versionCond) + + cond = cond.And(builder.In("package_file.version_id", in)) + } + if opts.CompositeKey != "" { + cond = cond.And(builder.Eq{"package_file.composite_key": opts.CompositeKey}) + } + if opts.Query != "" { + cond = cond.And(builder.Like{"package_file.lower_name", strings.ToLower(opts.Query)}) + } + + if len(opts.Properties) != 0 { + var propsCond builder.Cond = builder.Eq{ + "package_property.ref_type": PropertyTypeFile, + } + propsCond = propsCond.And(builder.Expr("package_property.ref_id = package_file.id")) + + propsCondBlock := builder.NewCond() + for name, value := range opts.Properties { + propsCondBlock = propsCondBlock.Or(builder.Eq{ + "package_property.name": name, + "package_property.value": value, + }) + } + propsCond = propsCond.And(propsCondBlock) + + cond = cond.And(builder.Eq{ + strconv.Itoa(len(opts.Properties)): builder.Select("COUNT(*)").Where(propsCond).From("package_property"), + }) + } + + if opts.OlderThan != 0 { + cond = cond.And(builder.Lt{"package_file.created_unix": time.Now().Add(-opts.OlderThan).Unix()}) + } + + return cond +} + +// SearchFiles gets all files of packages matching the search options +func SearchFiles(ctx context.Context, opts *PackageFileSearchOptions) ([]*PackageFile, int64, error) { + sess := db.GetEngine(ctx). + Where(opts.toConds()) + + if opts.Paginator != nil { + sess = db.SetSessionPagination(sess, opts) + } + + pfs := make([]*PackageFile, 0, 10) + count, err := sess.FindAndCount(&pfs) + return pfs, count, err +} diff --git a/models/packages/package_property.go b/models/packages/package_property.go new file mode 100644 index 0000000000..bf7dc346c6 --- /dev/null +++ b/models/packages/package_property.go @@ -0,0 +1,70 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + + "code.gitea.io/gitea/models/db" +) + +func init() { + db.RegisterModel(new(PackageProperty)) +} + +type PropertyType int64 + +const ( + // PropertyTypeVersion means the reference is a package version + PropertyTypeVersion PropertyType = iota // 0 + // PropertyTypeFile means the reference is a package file + PropertyTypeFile // 1 +) + +// PackageProperty represents a property of a package version or file +type PackageProperty struct { + ID int64 `xorm:"pk autoincr"` + RefType PropertyType `xorm:"INDEX NOT NULL"` + RefID int64 `xorm:"INDEX NOT NULL"` + Name string `xorm:"INDEX NOT NULL"` + Value string `xorm:"TEXT NOT NULL"` +} + +// InsertProperty creates a property +func InsertProperty(ctx context.Context, refType PropertyType, refID int64, name, value string) (*PackageProperty, error) { + pp := &PackageProperty{ + RefType: refType, + RefID: refID, + Name: name, + Value: value, + } + + _, err := db.GetEngine(ctx).Insert(pp) + return pp, err +} + +// GetProperties gets all properties +func GetProperties(ctx context.Context, refType PropertyType, refID int64) ([]*PackageProperty, error) { + pps := make([]*PackageProperty, 0, 10) + return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Find(&pps) +} + +// GetPropertiesByName gets all properties with a specific name +func GetPropertiesByName(ctx context.Context, refType PropertyType, refID int64, name string) ([]*PackageProperty, error) { + pps := make([]*PackageProperty, 0, 10) + return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Find(&pps) +} + +// DeleteAllProperties deletes all properties of a ref +func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) error { + _, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Delete(&PackageProperty{}) + return err +} + +// DeletePropertyByID deletes a property +func DeletePropertyByID(ctx context.Context, propertyID int64) error { + _, err := db.GetEngine(ctx).ID(propertyID).Delete(&PackageProperty{}) + return err +} diff --git a/models/packages/package_version.go b/models/packages/package_version.go new file mode 100644 index 0000000000..78e76c5054 --- /dev/null +++ b/models/packages/package_version.go @@ -0,0 +1,305 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + "errors" + "strconv" + "strings" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" + + "xorm.io/builder" +) + +// ErrDuplicatePackageVersion indicates a duplicated package version error +var ErrDuplicatePackageVersion = errors.New("Package version already exists") + +func init() { + db.RegisterModel(new(PackageVersion)) +} + +// PackageVersion represents a package version +type PackageVersion struct { + ID int64 `xorm:"pk autoincr"` + PackageID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"` + CreatorID int64 `xorm:"NOT NULL DEFAULT 0"` + Version string `xorm:"NOT NULL"` + LowerVersion string `xorm:"UNIQUE(s) INDEX NOT NULL"` + CreatedUnix timeutil.TimeStamp `xorm:"created INDEX NOT NULL"` + IsInternal bool `xorm:"INDEX NOT NULL DEFAULT false"` + MetadataJSON string `xorm:"metadata_json TEXT"` + DownloadCount int64 `xorm:"NOT NULL DEFAULT 0"` +} + +// GetOrInsertVersion inserts a version. If the same version exist already ErrDuplicatePackageVersion is returned +func GetOrInsertVersion(ctx context.Context, pv *PackageVersion) (*PackageVersion, error) { + e := db.GetEngine(ctx) + + key := &PackageVersion{ + PackageID: pv.PackageID, + LowerVersion: pv.LowerVersion, + } + + has, err := e.Get(key) + if err != nil { + return nil, err + } + if has { + return key, ErrDuplicatePackageVersion + } + if _, err = e.Insert(pv); err != nil { + return nil, err + } + return pv, nil +} + +// UpdateVersion updates a version +func UpdateVersion(ctx context.Context, pv *PackageVersion) error { + _, err := db.GetEngine(ctx).ID(pv.ID).Update(pv) + return err +} + +// IncrementDownloadCounter increments the download counter of a version +func IncrementDownloadCounter(ctx context.Context, versionID int64) error { + _, err := db.GetEngine(ctx).Exec("UPDATE `package_version` SET `download_count` = `download_count` + 1 WHERE `id` = ?", versionID) + return err +} + +// GetVersionByID gets a version by id +func GetVersionByID(ctx context.Context, versionID int64) (*PackageVersion, error) { + pv := &PackageVersion{} + + has, err := db.GetEngine(ctx).ID(versionID).Get(pv) + if err != nil { + return nil, err + } + if !has { + return nil, ErrPackageNotExist + } + return pv, nil +} + +// GetVersionByNameAndVersion gets a version by name and version number +func GetVersionByNameAndVersion(ctx context.Context, ownerID int64, packageType Type, name, version string) (*PackageVersion, error) { + return getVersionByNameAndVersion(ctx, ownerID, packageType, name, version, false) +} + +// GetInternalVersionByNameAndVersion gets a version by name and version number +func GetInternalVersionByNameAndVersion(ctx context.Context, ownerID int64, packageType Type, name, version string) (*PackageVersion, error) { + return getVersionByNameAndVersion(ctx, ownerID, packageType, name, version, true) +} + +func getVersionByNameAndVersion(ctx context.Context, ownerID int64, packageType Type, name, version string, isInternal bool) (*PackageVersion, error) { + pvs, _, err := SearchVersions(ctx, &PackageSearchOptions{ + OwnerID: ownerID, + Type: packageType, + Name: SearchValue{ + ExactMatch: true, + Value: name, + }, + Version: SearchValue{ + ExactMatch: true, + Value: version, + }, + IsInternal: isInternal, + Paginator: db.NewAbsoluteListOptions(0, 1), + }) + if err != nil { + return nil, err + } + if len(pvs) == 0 { + return nil, ErrPackageNotExist + } + return pvs[0], nil +} + +// GetVersionsByPackageType gets all versions of a specific type +func GetVersionsByPackageType(ctx context.Context, ownerID int64, packageType Type) ([]*PackageVersion, error) { + pvs, _, err := SearchVersions(ctx, &PackageSearchOptions{ + OwnerID: ownerID, + Type: packageType, + }) + return pvs, err +} + +// GetVersionsByPackageName gets all versions of a specific package +func GetVersionsByPackageName(ctx context.Context, ownerID int64, packageType Type, name string) ([]*PackageVersion, error) { + pvs, _, err := SearchVersions(ctx, &PackageSearchOptions{ + OwnerID: ownerID, + Type: packageType, + Name: SearchValue{ + ExactMatch: true, + Value: name, + }, + }) + return pvs, err +} + +// DeleteVersionByID deletes a version by id +func DeleteVersionByID(ctx context.Context, versionID int64) error { + _, err := db.GetEngine(ctx).ID(versionID).Delete(&PackageVersion{}) + return err +} + +// HasVersionFileReferences checks if there are associated files +func HasVersionFileReferences(ctx context.Context, versionID int64) (bool, error) { + return db.GetEngine(ctx).Get(&PackageFile{ + VersionID: versionID, + }) +} + +// SearchValue describes a value to search +// If ExactMatch is true, the field must match the value otherwise a LIKE search is performed. +type SearchValue struct { + Value string + ExactMatch bool +} + +// PackageSearchOptions are options for SearchXXX methods +// Besides IsInternal are all fields optional and are not used if they have their default value (nil, "", 0) +type PackageSearchOptions struct { + OwnerID int64 + RepoID int64 + Type Type + PackageID int64 + Name SearchValue // only results with the specific name are found + Version SearchValue // only results with the specific version are found + Properties map[string]string // only results are found which contain all listed version properties with the specific value + IsInternal bool + HasFileWithName string // only results are found which are associated with a file with the specific name + HasFiles util.OptionalBool // only results are found which have associated files + Sort string + db.Paginator +} + +func (opts *PackageSearchOptions) toConds() builder.Cond { + var cond builder.Cond = builder.Eq{"package_version.is_internal": opts.IsInternal} + + if opts.OwnerID != 0 { + cond = cond.And(builder.Eq{"package.owner_id": opts.OwnerID}) + } + if opts.RepoID != 0 { + cond = cond.And(builder.Eq{"package.repo_id": opts.RepoID}) + } + if opts.Type != "" && opts.Type != "all" { + cond = cond.And(builder.Eq{"package.type": opts.Type}) + } + if opts.PackageID != 0 { + cond = cond.And(builder.Eq{"package.id": opts.PackageID}) + } + if opts.Name.Value != "" { + if opts.Name.ExactMatch { + cond = cond.And(builder.Eq{"package.lower_name": strings.ToLower(opts.Name.Value)}) + } else { + cond = cond.And(builder.Like{"package.lower_name", strings.ToLower(opts.Name.Value)}) + } + } + if opts.Version.Value != "" { + if opts.Version.ExactMatch { + cond = cond.And(builder.Eq{"package_version.lower_version": strings.ToLower(opts.Version.Value)}) + } else { + cond = cond.And(builder.Like{"package_version.lower_version", strings.ToLower(opts.Version.Value)}) + } + } + + if len(opts.Properties) != 0 { + var propsCond builder.Cond = builder.Eq{ + "package_property.ref_type": PropertyTypeVersion, + } + propsCond = propsCond.And(builder.Expr("package_property.ref_id = package_version.id")) + + propsCondBlock := builder.NewCond() + for name, value := range opts.Properties { + propsCondBlock = propsCondBlock.Or(builder.Eq{ + "package_property.name": name, + "package_property.value": value, + }) + } + propsCond = propsCond.And(propsCondBlock) + + cond = cond.And(builder.Eq{ + strconv.Itoa(len(opts.Properties)): builder.Select("COUNT(*)").Where(propsCond).From("package_property"), + }) + } + + if opts.HasFileWithName != "" { + fileCond := builder.Expr("package_file.version_id = package_version.id").And(builder.Eq{"package_file.lower_name": strings.ToLower(opts.HasFileWithName)}) + + cond = cond.And(builder.Exists(builder.Select("package_file.id").From("package_file").Where(fileCond))) + } + + if !opts.HasFiles.IsNone() { + var filesCond builder.Cond = builder.Exists(builder.Select("package_file.id").From("package_file").Where(builder.Expr("package_file.version_id = package_version.id"))) + + if opts.HasFiles.IsFalse() { + filesCond = builder.Not{filesCond} + } + + cond = cond.And(filesCond) + } + + return cond +} + +func (opts *PackageSearchOptions) configureOrderBy(e db.Engine) { + switch opts.Sort { + case "alphabetically": + e.Asc("package.name") + case "reversealphabetically": + e.Desc("package.name") + case "highestversion": + e.Desc("package_version.version") + case "lowestversion": + e.Asc("package_version.version") + case "oldest": + e.Asc("package_version.created_unix") + default: + e.Desc("package_version.created_unix") + } +} + +// SearchVersions gets all versions of packages matching the search options +func SearchVersions(ctx context.Context, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) { + sess := db.GetEngine(ctx). + Where(opts.toConds()). + Table("package_version"). + Join("INNER", "package", "package.id = package_version.package_id") + + opts.configureOrderBy(sess) + + if opts.Paginator != nil { + sess = db.SetSessionPagination(sess, opts) + } + + pvs := make([]*PackageVersion, 0, 10) + count, err := sess.FindAndCount(&pvs) + return pvs, count, err +} + +// SearchLatestVersions gets the latest version of every package matching the search options +func SearchLatestVersions(ctx context.Context, opts *PackageSearchOptions) ([]*PackageVersion, int64, error) { + cond := opts.toConds(). + And(builder.Expr("pv2.id IS NULL")) + + sess := db.GetEngine(ctx). + Table("package_version"). + Join("LEFT", "package_version pv2", "package_version.package_id = pv2.package_id AND (package_version.created_unix < pv2.created_unix OR (package_version.created_unix = pv2.created_unix AND package_version.id < pv2.id))"). + Join("INNER", "package", "package.id = package_version.package_id"). + Where(cond) + + opts.configureOrderBy(sess) + + if opts.Paginator != nil { + sess = db.SetSessionPagination(sess, opts) + } + + pvs := make([]*PackageVersion, 0, 10) + count, err := sess.FindAndCount(&pvs) + return pvs, count, err +} diff --git a/models/project/board.go b/models/project/board.go new file mode 100644 index 0000000000..f770a18f59 --- /dev/null +++ b/models/project/board.go @@ -0,0 +1,289 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package project + +import ( + "context" + "fmt" + "regexp" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/timeutil" + + "xorm.io/builder" +) + +type ( + // BoardType is used to represent a project board type + BoardType uint8 + + // BoardList is a list of all project boards in a repository + BoardList []*Board +) + +const ( + // BoardTypeNone is a project board type that has no predefined columns + BoardTypeNone BoardType = iota + + // BoardTypeBasicKanban is a project board type that has basic predefined columns + BoardTypeBasicKanban + + // BoardTypeBugTriage is a project board type that has predefined columns suited to hunting down bugs + BoardTypeBugTriage +) + +// BoardColorPattern is a regexp witch can validate BoardColor +var BoardColorPattern = regexp.MustCompile("^#[0-9a-fA-F]{6}$") + +// Board is used to represent boards on a project +type Board struct { + ID int64 `xorm:"pk autoincr"` + Title string + Default bool `xorm:"NOT NULL DEFAULT false"` // issues not assigned to a specific board will be assigned to this board + Sorting int8 `xorm:"NOT NULL DEFAULT 0"` + Color string `xorm:"VARCHAR(7)"` + + ProjectID int64 `xorm:"INDEX NOT NULL"` + CreatorID int64 `xorm:"NOT NULL"` + + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` +} + +// TableName return the real table name +func (Board) TableName() string { + return "project_board" +} + +// NumIssues return counter of all issues assigned to the board +func (b *Board) NumIssues() int { + c, err := db.GetEngine(db.DefaultContext).Table("project_issue"). + Where("project_id=?", b.ProjectID). + And("project_board_id=?", b.ID). + GroupBy("issue_id"). + Cols("issue_id"). + Count() + if err != nil { + return 0 + } + return int(c) +} + +func init() { + db.RegisterModel(new(Board)) +} + +// IsBoardTypeValid checks if the project board type is valid +func IsBoardTypeValid(p BoardType) bool { + switch p { + case BoardTypeNone, BoardTypeBasicKanban, BoardTypeBugTriage: + return true + default: + return false + } +} + +func createBoardsForProjectsType(ctx context.Context, project *Project) error { + var items []string + + switch project.BoardType { + + case BoardTypeBugTriage: + items = setting.Project.ProjectBoardBugTriageType + + case BoardTypeBasicKanban: + items = setting.Project.ProjectBoardBasicKanbanType + + case BoardTypeNone: + fallthrough + default: + return nil + } + + if len(items) == 0 { + return nil + } + + boards := make([]Board, 0, len(items)) + + for _, v := range items { + boards = append(boards, Board{ + CreatedUnix: timeutil.TimeStampNow(), + CreatorID: project.CreatorID, + Title: v, + ProjectID: project.ID, + }) + } + + return db.Insert(ctx, boards) +} + +// NewBoard adds a new project board to a given project +func NewBoard(board *Board) error { + if len(board.Color) != 0 && !BoardColorPattern.MatchString(board.Color) { + return fmt.Errorf("bad color code: %s", board.Color) + } + + _, err := db.GetEngine(db.DefaultContext).Insert(board) + return err +} + +// DeleteBoardByID removes all issues references to the project board. +func DeleteBoardByID(boardID int64) error { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + if err := deleteBoardByID(ctx, boardID); err != nil { + return err + } + + return committer.Commit() +} + +func deleteBoardByID(ctx context.Context, boardID int64) error { + e := db.GetEngine(ctx) + board, err := getBoard(e, boardID) + if err != nil { + if IsErrProjectBoardNotExist(err) { + return nil + } + + return err + } + + if err = board.removeIssues(e); err != nil { + return err + } + + if _, err := e.ID(board.ID).Delete(board); err != nil { + return err + } + return nil +} + +func deleteBoardByProjectID(e db.Engine, projectID int64) error { + _, err := e.Where("project_id=?", projectID).Delete(&Board{}) + return err +} + +// GetBoard fetches the current board of a project +func GetBoard(boardID int64) (*Board, error) { + return getBoard(db.GetEngine(db.DefaultContext), boardID) +} + +func getBoard(e db.Engine, boardID int64) (*Board, error) { + board := new(Board) + + has, err := e.ID(boardID).Get(board) + if err != nil { + return nil, err + } else if !has { + return nil, ErrProjectBoardNotExist{BoardID: boardID} + } + + return board, nil +} + +// UpdateBoard updates a project board +func UpdateBoard(board *Board) error { + return updateBoard(db.GetEngine(db.DefaultContext), board) +} + +func updateBoard(e db.Engine, board *Board) error { + var fieldToUpdate []string + + if board.Sorting != 0 { + fieldToUpdate = append(fieldToUpdate, "sorting") + } + + if board.Title != "" { + fieldToUpdate = append(fieldToUpdate, "title") + } + + if len(board.Color) != 0 && !BoardColorPattern.MatchString(board.Color) { + return fmt.Errorf("bad color code: %s", board.Color) + } + fieldToUpdate = append(fieldToUpdate, "color") + + _, err := e.ID(board.ID).Cols(fieldToUpdate...).Update(board) + + return err +} + +// GetBoards fetches all boards related to a project +// if no default board set, first board is a temporary "Uncategorized" board +func GetBoards(projectID int64) (BoardList, error) { + return getBoards(db.GetEngine(db.DefaultContext), projectID) +} + +func getBoards(e db.Engine, projectID int64) ([]*Board, error) { + boards := make([]*Board, 0, 5) + + if err := e.Where("project_id=? AND `default`=?", projectID, false).OrderBy("Sorting").Find(&boards); err != nil { + return nil, err + } + + defaultB, err := getDefaultBoard(e, projectID) + if err != nil { + return nil, err + } + + return append([]*Board{defaultB}, boards...), nil +} + +// getDefaultBoard return default board and create a dummy if none exist +func getDefaultBoard(e db.Engine, projectID int64) (*Board, error) { + var board Board + exist, err := e.Where("project_id=? AND `default`=?", projectID, true).Get(&board) + if err != nil { + return nil, err + } + if exist { + return &board, nil + } + + // represents a board for issues not assigned to one + return &Board{ + ProjectID: projectID, + Title: "Uncategorized", + Default: true, + }, nil +} + +// SetDefaultBoard represents a board for issues not assigned to one +// if boardID is 0 unset default +func SetDefaultBoard(projectID, boardID int64) error { + _, err := db.GetEngine(db.DefaultContext).Where(builder.Eq{ + "project_id": projectID, + "`default`": true, + }).Cols("`default`").Update(&Board{Default: false}) + if err != nil { + return err + } + + if boardID > 0 { + _, err = db.GetEngine(db.DefaultContext).ID(boardID).Where(builder.Eq{"project_id": projectID}). + Cols("`default`").Update(&Board{Default: true}) + } + + return err +} + +// UpdateBoardSorting update project board sorting +func UpdateBoardSorting(bs BoardList) error { + for i := range bs { + _, err := db.GetEngine(db.DefaultContext).ID(bs[i].ID).Cols( + "sorting", + ).Update(bs[i]) + if err != nil { + return err + } + } + return nil +} diff --git a/models/project/issue.go b/models/project/issue.go new file mode 100644 index 0000000000..6bde91668f --- /dev/null +++ b/models/project/issue.go @@ -0,0 +1,103 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package project + +import ( + "context" + "fmt" + + "code.gitea.io/gitea/models/db" +) + +// ProjectIssue saves relation from issue to a project +type ProjectIssue struct { //revive:disable-line:exported + ID int64 `xorm:"pk autoincr"` + IssueID int64 `xorm:"INDEX"` + ProjectID int64 `xorm:"INDEX"` + + // If 0, then it has not been added to a specific board in the project + ProjectBoardID int64 `xorm:"INDEX"` + + // the sorting order on the board + Sorting int64 `xorm:"NOT NULL DEFAULT 0"` +} + +func init() { + db.RegisterModel(new(ProjectIssue)) +} + +func deleteProjectIssuesByProjectID(e db.Engine, projectID int64) error { + _, err := e.Where("project_id=?", projectID).Delete(&ProjectIssue{}) + return err +} + +// NumIssues return counter of all issues assigned to a project +func (p *Project) NumIssues() int { + c, err := db.GetEngine(db.DefaultContext).Table("project_issue"). + Where("project_id=?", p.ID). + GroupBy("issue_id"). + Cols("issue_id"). + Count() + if err != nil { + return 0 + } + return int(c) +} + +// NumClosedIssues return counter of closed issues assigned to a project +func (p *Project) NumClosedIssues() int { + c, err := db.GetEngine(db.DefaultContext).Table("project_issue"). + Join("INNER", "issue", "project_issue.issue_id=issue.id"). + Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, true). + Cols("issue_id"). + Count() + if err != nil { + return 0 + } + return int(c) +} + +// NumOpenIssues return counter of open issues assigned to a project +func (p *Project) NumOpenIssues() int { + c, err := db.GetEngine(db.DefaultContext).Table("project_issue"). + Join("INNER", "issue", "project_issue.issue_id=issue.id"). + Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, false).Count("issue.id") + if err != nil { + return 0 + } + return int(c) +} + +// MoveIssuesOnProjectBoard moves or keeps issues in a column and sorts them inside that column +func MoveIssuesOnProjectBoard(board *Board, sortedIssueIDs map[int64]int64) error { + return db.WithTx(func(ctx context.Context) error { + sess := db.GetEngine(ctx) + + issueIDs := make([]int64, 0, len(sortedIssueIDs)) + for _, issueID := range sortedIssueIDs { + issueIDs = append(issueIDs, issueID) + } + count, err := sess.Table(new(ProjectIssue)).Where("project_id=?", board.ProjectID).In("issue_id", issueIDs).Count() + if err != nil { + return err + } + if int(count) != len(sortedIssueIDs) { + return fmt.Errorf("all issues have to be added to a project first") + } + + for sorting, issueID := range sortedIssueIDs { + _, err = sess.Exec("UPDATE `project_issue` SET project_board_id=?, sorting=? WHERE issue_id=?", board.ID, sorting, issueID) + if err != nil { + return err + } + } + return nil + }) +} + +func (pb *Board) removeIssues(e db.Engine) error { + _, err := e.Exec("UPDATE `project_issue` SET project_board_id = 0 WHERE project_board_id = ? ", pb.ID) + return err +} diff --git a/models/project/main_test.go b/models/project/main_test.go new file mode 100644 index 0000000000..2e97b3a5ef --- /dev/null +++ b/models/project/main_test.go @@ -0,0 +1,26 @@ +// Copyright 2020 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package project + +import ( + "path/filepath" + "testing" + + "code.gitea.io/gitea/models/unittest" + + _ "code.gitea.io/gitea/models/repo" +) + +func TestMain(m *testing.M) { + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "project.yml", + "project_board.yml", + "project_issue.yml", + "repository.yml", + }, + }) +} diff --git a/models/project.go b/models/project/project.go similarity index 67% rename from models/project.go rename to models/project/project.go index e6a650674b..a639879e78 100644 --- a/models/project.go +++ b/models/project/project.go @@ -2,9 +2,10 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models +package project import ( + "context" "errors" "fmt" @@ -19,25 +20,56 @@ import ( type ( // ProjectsConfig is used to identify the type of board that is being created ProjectsConfig struct { - BoardType ProjectBoardType + BoardType BoardType Translation string } - // ProjectType is used to identify the type of project in question and ownership - ProjectType uint8 + // Type is used to identify the type of project in question and ownership + Type uint8 ) const ( - // ProjectTypeIndividual is a type of project board that is owned by an individual - ProjectTypeIndividual ProjectType = iota + 1 + // TypeIndividual is a type of project board that is owned by an individual + TypeIndividual Type = iota + 1 - // ProjectTypeRepository is a project that is tied to a repository - ProjectTypeRepository + // TypeRepository is a project that is tied to a repository + TypeRepository - // ProjectTypeOrganization is a project that is tied to an organisation - ProjectTypeOrganization + // TypeOrganization is a project that is tied to an organisation + TypeOrganization ) +// ErrProjectNotExist represents a "ProjectNotExist" kind of error. +type ErrProjectNotExist struct { + ID int64 + RepoID int64 +} + +// IsErrProjectNotExist checks if an error is a ErrProjectNotExist +func IsErrProjectNotExist(err error) bool { + _, ok := err.(ErrProjectNotExist) + return ok +} + +func (err ErrProjectNotExist) Error() string { + return fmt.Sprintf("projects does not exist [id: %d]", err.ID) +} + +// ErrProjectBoardNotExist represents a "ProjectBoardNotExist" kind of error. +type ErrProjectBoardNotExist struct { + BoardID int64 +} + +// IsErrProjectBoardNotExist checks if an error is a ErrProjectBoardNotExist +func IsErrProjectBoardNotExist(err error) bool { + _, ok := err.(ErrProjectBoardNotExist) + return ok +} + +func (err ErrProjectBoardNotExist) Error() string { + return fmt.Sprintf("project board does not exist [id: %d]", err.BoardID) +} + // Project represents a project board type Project struct { ID int64 `xorm:"pk autoincr"` @@ -46,8 +78,8 @@ type Project struct { RepoID int64 `xorm:"INDEX"` CreatorID int64 `xorm:"NOT NULL"` IsClosed bool `xorm:"INDEX"` - BoardType ProjectBoardType - Type ProjectType + BoardType BoardType + Type Type RenderedContent string `xorm:"-"` @@ -63,37 +95,39 @@ func init() { // GetProjectsConfig retrieves the types of configurations projects could have func GetProjectsConfig() []ProjectsConfig { return []ProjectsConfig{ - {ProjectBoardTypeNone, "repo.projects.type.none"}, - {ProjectBoardTypeBasicKanban, "repo.projects.type.basic_kanban"}, - {ProjectBoardTypeBugTriage, "repo.projects.type.bug_triage"}, + {BoardTypeNone, "repo.projects.type.none"}, + {BoardTypeBasicKanban, "repo.projects.type.basic_kanban"}, + {BoardTypeBugTriage, "repo.projects.type.bug_triage"}, } } -// IsProjectTypeValid checks if a project type is valid -func IsProjectTypeValid(p ProjectType) bool { +// IsTypeValid checks if a project type is valid +func IsTypeValid(p Type) bool { switch p { - case ProjectTypeRepository: + case TypeRepository: return true default: return false } } -// ProjectSearchOptions are options for GetProjects -type ProjectSearchOptions struct { +// SearchOptions are options for GetProjects +type SearchOptions struct { RepoID int64 Page int IsClosed util.OptionalBool SortType string - Type ProjectType + Type Type } // GetProjects returns a list of all projects that have been created in the repository -func GetProjects(opts ProjectSearchOptions) ([]*Project, int64, error) { - return getProjects(db.GetEngine(db.DefaultContext), opts) +func GetProjects(opts SearchOptions) ([]*Project, int64, error) { + return GetProjectsCtx(db.DefaultContext, opts) } -func getProjects(e db.Engine, opts ProjectSearchOptions) ([]*Project, int64, error) { +// GetProjectsCtx returns a list of all projects that have been created in the repository +func GetProjectsCtx(ctx context.Context, opts SearchOptions) ([]*Project, int64, error) { + e := db.GetEngine(ctx) projects := make([]*Project, 0, setting.UI.IssuePagingNum) var cond builder.Cond = builder.Eq{"repo_id": opts.RepoID} @@ -135,11 +169,11 @@ func getProjects(e db.Engine, opts ProjectSearchOptions) ([]*Project, int64, err // NewProject creates a new Project func NewProject(p *Project) error { - if !IsProjectBoardTypeValid(p.BoardType) { - p.BoardType = ProjectBoardTypeNone + if !IsBoardTypeValid(p.BoardType) { + p.BoardType = BoardTypeNone } - if !IsProjectTypeValid(p.Type) { + if !IsTypeValid(p.Type) { return errors.New("project type is not valid") } @@ -157,7 +191,7 @@ func NewProject(p *Project) error { return err } - if err := createBoardsForProjectsType(db.GetEngine(ctx), p); err != nil { + if err := createBoardsForProjectsType(ctx, p); err != nil { return err } @@ -200,7 +234,7 @@ func updateRepositoryProjectCount(e db.Engine, repoID int64) error { builder.Eq{ "`num_projects`": builder.Select("count(*)").From("`project`"). Where(builder.Eq{"`project`.`repo_id`": repoID}. - And(builder.Eq{"`project`.`type`": ProjectTypeRepository})), + And(builder.Eq{"`project`.`type`": TypeRepository})), }).From("`repository`").Where(builder.Eq{"id": repoID})); err != nil { return err } @@ -209,7 +243,7 @@ func updateRepositoryProjectCount(e db.Engine, repoID int64) error { builder.Eq{ "`num_closed_projects`": builder.Select("count(*)").From("`project`"). Where(builder.Eq{"`project`.`repo_id`": repoID}. - And(builder.Eq{"`project`.`type`": ProjectTypeRepository}). + And(builder.Eq{"`project`.`type`": TypeRepository}). And(builder.Eq{"`project`.`is_closed`": true})), }).From("`repository`").Where(builder.Eq{"id": repoID})); err != nil { return err @@ -224,18 +258,17 @@ func ChangeProjectStatusByRepoIDAndID(repoID, projectID int64, isClosed bool) er return err } defer committer.Close() - sess := db.GetEngine(ctx) p := new(Project) - has, err := sess.ID(projectID).Where("repo_id = ?", repoID).Get(p) + has, err := db.GetEngine(ctx).ID(projectID).Where("repo_id = ?", repoID).Get(p) if err != nil { return err } else if !has { return ErrProjectNotExist{ID: projectID, RepoID: repoID} } - if err := changeProjectStatus(sess, p, isClosed); err != nil { + if err := changeProjectStatus(ctx, p, isClosed); err != nil { return err } @@ -250,16 +283,17 @@ func ChangeProjectStatus(p *Project, isClosed bool) error { } defer committer.Close() - if err := changeProjectStatus(db.GetEngine(ctx), p, isClosed); err != nil { + if err := changeProjectStatus(ctx, p, isClosed); err != nil { return err } return committer.Commit() } -func changeProjectStatus(e db.Engine, p *Project, isClosed bool) error { +func changeProjectStatus(ctx context.Context, p *Project, isClosed bool) error { p.IsClosed = isClosed p.ClosedDateUnix = timeutil.TimeStampNow() + e := db.GetEngine(ctx) count, err := e.ID(p.ID).Where("repo_id = ? AND is_closed = ?", p.RepoID, !isClosed).Cols("is_closed", "closed_date_unix").Update(p) if err != nil { return err @@ -279,14 +313,16 @@ func DeleteProjectByID(id int64) error { } defer committer.Close() - if err := deleteProjectByID(db.GetEngine(ctx), id); err != nil { + if err := DeleteProjectByIDCtx(ctx, id); err != nil { return err } return committer.Commit() } -func deleteProjectByID(e db.Engine, id int64) error { +// DeleteProjectByIDCtx deletes a project from a repository. +func DeleteProjectByIDCtx(ctx context.Context, id int64) error { + e := db.GetEngine(ctx) p, err := getProjectByID(e, id) if err != nil { if IsErrProjectNotExist(err) { @@ -299,7 +335,7 @@ func deleteProjectByID(e db.Engine, id int64) error { return err } - if err := deleteProjectBoardByProjectID(e, id); err != nil { + if err := deleteBoardByProjectID(e, id); err != nil { return err } diff --git a/models/project_test.go b/models/project/project_test.go similarity index 77% rename from models/project_test.go rename to models/project/project_test.go index 70dabb7674..211a890874 100644 --- a/models/project_test.go +++ b/models/project/project_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models +package project import ( "testing" @@ -14,33 +14,33 @@ import ( ) func TestIsProjectTypeValid(t *testing.T) { - const UnknownType ProjectType = 15 + const UnknownType Type = 15 cases := []struct { - typ ProjectType + typ Type valid bool }{ - {ProjectTypeIndividual, false}, - {ProjectTypeRepository, true}, - {ProjectTypeOrganization, false}, + {TypeIndividual, false}, + {TypeRepository, true}, + {TypeOrganization, false}, {UnknownType, false}, } for _, v := range cases { - assert.Equal(t, v.valid, IsProjectTypeValid(v.typ)) + assert.Equal(t, v.valid, IsTypeValid(v.typ)) } } func TestGetProjects(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - projects, _, err := GetProjects(ProjectSearchOptions{RepoID: 1}) + projects, _, err := GetProjects(SearchOptions{RepoID: 1}) assert.NoError(t, err) // 1 value for this repo exists in the fixtures assert.Len(t, projects, 1) - projects, _, err = GetProjects(ProjectSearchOptions{RepoID: 3}) + projects, _, err = GetProjects(SearchOptions{RepoID: 3}) assert.NoError(t, err) // 1 value for this repo exists in the fixtures @@ -51,8 +51,8 @@ func TestProject(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) project := &Project{ - Type: ProjectTypeRepository, - BoardType: ProjectBoardTypeBasicKanban, + Type: TypeRepository, + BoardType: BoardTypeBasicKanban, Title: "New Project", RepoID: 1, CreatedUnix: timeutil.TimeStampNow(), diff --git a/models/project_board.go b/models/project_board.go deleted file mode 100644 index d40cfd06f0..0000000000 --- a/models/project_board.go +++ /dev/null @@ -1,321 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package models - -import ( - "fmt" - "regexp" - - "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/timeutil" - - "xorm.io/builder" -) - -type ( - // ProjectBoardType is used to represent a project board type - ProjectBoardType uint8 - - // ProjectBoardList is a list of all project boards in a repository - ProjectBoardList []*ProjectBoard -) - -const ( - // ProjectBoardTypeNone is a project board type that has no predefined columns - ProjectBoardTypeNone ProjectBoardType = iota - - // ProjectBoardTypeBasicKanban is a project board type that has basic predefined columns - ProjectBoardTypeBasicKanban - - // ProjectBoardTypeBugTriage is a project board type that has predefined columns suited to hunting down bugs - ProjectBoardTypeBugTriage -) - -// BoardColorPattern is a regexp witch can validate BoardColor -var BoardColorPattern = regexp.MustCompile("^#[0-9a-fA-F]{6}$") - -// ProjectBoard is used to represent boards on a project -type ProjectBoard struct { - ID int64 `xorm:"pk autoincr"` - Title string - Default bool `xorm:"NOT NULL DEFAULT false"` // issues not assigned to a specific board will be assigned to this board - Sorting int8 `xorm:"NOT NULL DEFAULT 0"` - Color string `xorm:"VARCHAR(7)"` - - ProjectID int64 `xorm:"INDEX NOT NULL"` - CreatorID int64 `xorm:"NOT NULL"` - - CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` - UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` - - Issues []*Issue `xorm:"-"` -} - -func init() { - db.RegisterModel(new(ProjectBoard)) -} - -// IsProjectBoardTypeValid checks if the project board type is valid -func IsProjectBoardTypeValid(p ProjectBoardType) bool { - switch p { - case ProjectBoardTypeNone, ProjectBoardTypeBasicKanban, ProjectBoardTypeBugTriage: - return true - default: - return false - } -} - -func createBoardsForProjectsType(sess db.Engine, project *Project) error { - var items []string - - switch project.BoardType { - - case ProjectBoardTypeBugTriage: - items = setting.Project.ProjectBoardBugTriageType - - case ProjectBoardTypeBasicKanban: - items = setting.Project.ProjectBoardBasicKanbanType - - case ProjectBoardTypeNone: - fallthrough - default: - return nil - } - - if len(items) == 0 { - return nil - } - - boards := make([]ProjectBoard, 0, len(items)) - - for _, v := range items { - boards = append(boards, ProjectBoard{ - CreatedUnix: timeutil.TimeStampNow(), - CreatorID: project.CreatorID, - Title: v, - ProjectID: project.ID, - }) - } - - _, err := sess.Insert(boards) - return err -} - -// NewProjectBoard adds a new project board to a given project -func NewProjectBoard(board *ProjectBoard) error { - if len(board.Color) != 0 && !BoardColorPattern.MatchString(board.Color) { - return fmt.Errorf("bad color code: %s", board.Color) - } - - _, err := db.GetEngine(db.DefaultContext).Insert(board) - return err -} - -// DeleteProjectBoardByID removes all issues references to the project board. -func DeleteProjectBoardByID(boardID int64) error { - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - if err := deleteProjectBoardByID(db.GetEngine(ctx), boardID); err != nil { - return err - } - - return committer.Commit() -} - -func deleteProjectBoardByID(e db.Engine, boardID int64) error { - board, err := getProjectBoard(e, boardID) - if err != nil { - if IsErrProjectBoardNotExist(err) { - return nil - } - - return err - } - - if err = board.removeIssues(e); err != nil { - return err - } - - if _, err := e.ID(board.ID).Delete(board); err != nil { - return err - } - return nil -} - -func deleteProjectBoardByProjectID(e db.Engine, projectID int64) error { - _, err := e.Where("project_id=?", projectID).Delete(&ProjectBoard{}) - return err -} - -// GetProjectBoard fetches the current board of a project -func GetProjectBoard(boardID int64) (*ProjectBoard, error) { - return getProjectBoard(db.GetEngine(db.DefaultContext), boardID) -} - -func getProjectBoard(e db.Engine, boardID int64) (*ProjectBoard, error) { - board := new(ProjectBoard) - - has, err := e.ID(boardID).Get(board) - if err != nil { - return nil, err - } else if !has { - return nil, ErrProjectBoardNotExist{BoardID: boardID} - } - - return board, nil -} - -// UpdateProjectBoard updates a project board -func UpdateProjectBoard(board *ProjectBoard) error { - return updateProjectBoard(db.GetEngine(db.DefaultContext), board) -} - -func updateProjectBoard(e db.Engine, board *ProjectBoard) error { - var fieldToUpdate []string - - if board.Sorting != 0 { - fieldToUpdate = append(fieldToUpdate, "sorting") - } - - if board.Title != "" { - fieldToUpdate = append(fieldToUpdate, "title") - } - - if len(board.Color) != 0 && !BoardColorPattern.MatchString(board.Color) { - return fmt.Errorf("bad color code: %s", board.Color) - } - fieldToUpdate = append(fieldToUpdate, "color") - - _, err := e.ID(board.ID).Cols(fieldToUpdate...).Update(board) - - return err -} - -// GetProjectBoards fetches all boards related to a project -// if no default board set, first board is a temporary "Uncategorized" board -func GetProjectBoards(projectID int64) (ProjectBoardList, error) { - return getProjectBoards(db.GetEngine(db.DefaultContext), projectID) -} - -func getProjectBoards(e db.Engine, projectID int64) ([]*ProjectBoard, error) { - boards := make([]*ProjectBoard, 0, 5) - - if err := e.Where("project_id=? AND `default`=?", projectID, false).OrderBy("Sorting").Find(&boards); err != nil { - return nil, err - } - - defaultB, err := getDefaultBoard(e, projectID) - if err != nil { - return nil, err - } - - return append([]*ProjectBoard{defaultB}, boards...), nil -} - -// getDefaultBoard return default board and create a dummy if none exist -func getDefaultBoard(e db.Engine, projectID int64) (*ProjectBoard, error) { - var board ProjectBoard - exist, err := e.Where("project_id=? AND `default`=?", projectID, true).Get(&board) - if err != nil { - return nil, err - } - if exist { - return &board, nil - } - - // represents a board for issues not assigned to one - return &ProjectBoard{ - ProjectID: projectID, - Title: "Uncategorized", - Default: true, - }, nil -} - -// SetDefaultBoard represents a board for issues not assigned to one -// if boardID is 0 unset default -func SetDefaultBoard(projectID, boardID int64) error { - _, err := db.GetEngine(db.DefaultContext).Where(builder.Eq{ - "project_id": projectID, - "`default`": true, - }).Cols("`default`").Update(&ProjectBoard{Default: false}) - if err != nil { - return err - } - - if boardID > 0 { - _, err = db.GetEngine(db.DefaultContext).ID(boardID).Where(builder.Eq{"project_id": projectID}). - Cols("`default`").Update(&ProjectBoard{Default: true}) - } - - return err -} - -// LoadIssues load issues assigned to this board -func (b *ProjectBoard) LoadIssues() (IssueList, error) { - issueList := make([]*Issue, 0, 10) - - if b.ID != 0 { - issues, err := Issues(&IssuesOptions{ - ProjectBoardID: b.ID, - ProjectID: b.ProjectID, - SortType: "project-column-sorting", - }) - if err != nil { - return nil, err - } - issueList = issues - } - - if b.Default { - issues, err := Issues(&IssuesOptions{ - ProjectBoardID: -1, // Issues without ProjectBoardID - ProjectID: b.ProjectID, - SortType: "project-column-sorting", - }) - if err != nil { - return nil, err - } - issueList = append(issueList, issues...) - } - - if err := IssueList(issueList).LoadComments(); err != nil { - return nil, err - } - - b.Issues = issueList - return issueList, nil -} - -// LoadIssues load issues assigned to the boards -func (bs ProjectBoardList) LoadIssues() (IssueList, error) { - issues := make(IssueList, 0, len(bs)*10) - for i := range bs { - il, err := bs[i].LoadIssues() - if err != nil { - return nil, err - } - bs[i].Issues = il - issues = append(issues, il...) - } - return issues, nil -} - -// UpdateProjectBoardSorting update project board sorting -func UpdateProjectBoardSorting(bs ProjectBoardList) error { - for i := range bs { - _, err := db.GetEngine(db.DefaultContext).ID(bs[i].ID).Cols( - "sorting", - ).Update(bs[i]) - if err != nil { - return err - } - } - return nil -} diff --git a/models/project_issue.go b/models/project_issue.go deleted file mode 100644 index c7735addcc..0000000000 --- a/models/project_issue.go +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package models - -import ( - "context" - "fmt" - - "code.gitea.io/gitea/models/db" - user_model "code.gitea.io/gitea/models/user" -) - -// ProjectIssue saves relation from issue to a project -type ProjectIssue struct { - ID int64 `xorm:"pk autoincr"` - IssueID int64 `xorm:"INDEX"` - ProjectID int64 `xorm:"INDEX"` - - // If 0, then it has not been added to a specific board in the project - ProjectBoardID int64 `xorm:"INDEX"` - Sorting int64 `xorm:"NOT NULL DEFAULT 0"` -} - -func init() { - db.RegisterModel(new(ProjectIssue)) -} - -func deleteProjectIssuesByProjectID(e db.Engine, projectID int64) error { - _, err := e.Where("project_id=?", projectID).Delete(&ProjectIssue{}) - return err -} - -// ___ -// |_ _|___ ___ _ _ ___ -// | |/ __/ __| | | |/ _ \ -// | |\__ \__ \ |_| | __/ -// |___|___/___/\__,_|\___| - -// LoadProject load the project the issue was assigned to -func (i *Issue) LoadProject() (err error) { - return i.loadProject(db.GetEngine(db.DefaultContext)) -} - -func (i *Issue) loadProject(e db.Engine) (err error) { - if i.Project == nil { - var p Project - if _, err = e.Table("project"). - Join("INNER", "project_issue", "project.id=project_issue.project_id"). - Where("project_issue.issue_id = ?", i.ID). - Get(&p); err != nil { - return err - } - i.Project = &p - } - return -} - -// ProjectID return project id if issue was assigned to one -func (i *Issue) ProjectID() int64 { - return i.projectID(db.GetEngine(db.DefaultContext)) -} - -func (i *Issue) projectID(e db.Engine) int64 { - var ip ProjectIssue - has, err := e.Where("issue_id=?", i.ID).Get(&ip) - if err != nil || !has { - return 0 - } - return ip.ProjectID -} - -// ProjectBoardID return project board id if issue was assigned to one -func (i *Issue) ProjectBoardID() int64 { - return i.projectBoardID(db.GetEngine(db.DefaultContext)) -} - -func (i *Issue) projectBoardID(e db.Engine) int64 { - var ip ProjectIssue - has, err := e.Where("issue_id=?", i.ID).Get(&ip) - if err != nil || !has { - return 0 - } - return ip.ProjectBoardID -} - -// ____ _ _ -// | _ \ _ __ ___ (_) ___ ___| |_ -// | |_) | '__/ _ \| |/ _ \/ __| __| -// | __/| | | (_) | | __/ (__| |_ -// |_| |_| \___// |\___|\___|\__| -// |__/ - -// NumIssues return counter of all issues assigned to a project -func (p *Project) NumIssues() int { - c, err := db.GetEngine(db.DefaultContext).Table("project_issue"). - Where("project_id=?", p.ID). - GroupBy("issue_id"). - Cols("issue_id"). - Count() - if err != nil { - return 0 - } - return int(c) -} - -// NumClosedIssues return counter of closed issues assigned to a project -func (p *Project) NumClosedIssues() int { - c, err := db.GetEngine(db.DefaultContext).Table("project_issue"). - Join("INNER", "issue", "project_issue.issue_id=issue.id"). - Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, true). - Cols("issue_id"). - Count() - if err != nil { - return 0 - } - return int(c) -} - -// NumOpenIssues return counter of open issues assigned to a project -func (p *Project) NumOpenIssues() int { - c, err := db.GetEngine(db.DefaultContext).Table("project_issue"). - Join("INNER", "issue", "project_issue.issue_id=issue.id"). - Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, false). - Cols("issue_id"). - Count() - if err != nil { - return 0 - } - return int(c) -} - -// ChangeProjectAssign changes the project associated with an issue -func ChangeProjectAssign(issue *Issue, doer *user_model.User, newProjectID int64) error { - ctx, committer, err := db.TxContext() - if err != nil { - return err - } - defer committer.Close() - - if err := addUpdateIssueProject(ctx, issue, doer, newProjectID); err != nil { - return err - } - - return committer.Commit() -} - -func addUpdateIssueProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID int64) error { - e := db.GetEngine(ctx) - oldProjectID := issue.projectID(e) - - if _, err := e.Where("project_issue.issue_id=?", issue.ID).Delete(&ProjectIssue{}); err != nil { - return err - } - - if err := issue.loadRepo(ctx); err != nil { - return err - } - - if oldProjectID > 0 || newProjectID > 0 { - if _, err := createComment(ctx, &CreateCommentOptions{ - Type: CommentTypeProject, - Doer: doer, - Repo: issue.Repo, - Issue: issue, - OldProjectID: oldProjectID, - ProjectID: newProjectID, - }); err != nil { - return err - } - } - - _, err := e.Insert(&ProjectIssue{ - IssueID: issue.ID, - ProjectID: newProjectID, - }) - return err -} - -// ____ _ _ ____ _ -// | _ \ _ __ ___ (_) ___ ___| |_| __ ) ___ __ _ _ __ __| | -// | |_) | '__/ _ \| |/ _ \/ __| __| _ \ / _ \ / _` | '__/ _` | -// | __/| | | (_) | | __/ (__| |_| |_) | (_) | (_| | | | (_| | -// |_| |_| \___// |\___|\___|\__|____/ \___/ \__,_|_| \__,_| -// |__/ - -// MoveIssuesOnProjectBoard moves or keeps issues in a column and sorts them inside that column -func MoveIssuesOnProjectBoard(board *ProjectBoard, sortedIssueIDs map[int64]int64) error { - return db.WithTx(func(ctx context.Context) error { - sess := db.GetEngine(ctx) - - issueIDs := make([]int64, 0, len(sortedIssueIDs)) - for _, issueID := range sortedIssueIDs { - issueIDs = append(issueIDs, issueID) - } - count, err := sess.Table(new(ProjectIssue)).Where("project_id=?", board.ProjectID).In("issue_id", issueIDs).Count() - if err != nil { - return err - } - if int(count) != len(sortedIssueIDs) { - return fmt.Errorf("all issues have to be added to a project first") - } - - for sorting, issueID := range sortedIssueIDs { - _, err = sess.Exec("UPDATE `project_issue` SET project_board_id=?, sorting=? WHERE issue_id=?", board.ID, sorting, issueID) - if err != nil { - return err - } - } - return nil - }) -} - -func (pb *ProjectBoard) removeIssues(e db.Engine) error { - _, err := e.Exec("UPDATE `project_issue` SET project_board_id = 0, sorting = 0 WHERE project_board_id = ? ", pb.ID) - return err -} diff --git a/models/protected_tag.go b/models/protected_tag.go index c9cc0fa1ba..db6ff50462 100644 --- a/models/protected_tag.go +++ b/models/protected_tag.go @@ -9,6 +9,7 @@ import ( "strings" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/timeutil" @@ -83,7 +84,7 @@ func IsUserAllowedModifyTag(pt *ProtectedTag, userID int64) (bool, error) { return false, nil } - in, err := IsUserInTeams(userID, pt.AllowlistTeamIDs) + in, err := organization.IsUserInTeams(db.DefaultContext, userID, pt.AllowlistTeamIDs) if err != nil { return false, err } diff --git a/models/pull.go b/models/pull.go index d6bfbbf09f..8eab7569cd 100644 --- a/models/pull.go +++ b/models/pull.go @@ -12,14 +12,16 @@ import ( "strings" "code.gitea.io/gitea/models/db" + pull_model "code.gitea.io/gitea/models/pull" repo_model "code.gitea.io/gitea/models/repo" - "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" + + "xorm.io/builder" ) // PullRequestType defines pull request type @@ -69,15 +71,16 @@ type PullRequest struct { Issue *Issue `xorm:"-"` Index int64 - HeadRepoID int64 `xorm:"INDEX"` - HeadRepo *repo_model.Repository `xorm:"-"` - BaseRepoID int64 `xorm:"INDEX"` - BaseRepo *repo_model.Repository `xorm:"-"` - HeadBranch string - HeadCommitID string `xorm:"-"` - BaseBranch string - ProtectedBranch *ProtectedBranch `xorm:"-"` - MergeBase string `xorm:"VARCHAR(40)"` + HeadRepoID int64 `xorm:"INDEX"` + HeadRepo *repo_model.Repository `xorm:"-"` + BaseRepoID int64 `xorm:"INDEX"` + BaseRepo *repo_model.Repository `xorm:"-"` + HeadBranch string + HeadCommitID string `xorm:"-"` + BaseBranch string + ProtectedBranch *ProtectedBranch `xorm:"-"` + MergeBase string `xorm:"VARCHAR(40)"` + AllowMaintainerEdit bool `xorm:"NOT NULL DEFAULT false"` HasMerged bool `xorm:"INDEX"` MergedCommitID string `xorm:"VARCHAR(40)"` @@ -94,6 +97,25 @@ func init() { db.RegisterModel(new(PullRequest)) } +func deletePullsByBaseRepoID(sess db.Engine, repoID int64) error { + deleteCond := builder.Select("id").From("pull_request").Where(builder.Eq{"pull_request.base_repo_id": repoID}) + + // Delete scheduled auto merges + if _, err := sess.In("pull_id", deleteCond). + Delete(&pull_model.AutoMerge{}); err != nil { + return err + } + + // Delete review states + if _, err := sess.In("pull_id", deleteCond). + Delete(&pull_model.ReviewState{}); err != nil { + return err + } + + _, err := sess.Delete(&PullRequest{BaseRepoID: repoID}) + return err +} + // MustHeadUserName returns the HeadRepo's username if failed return blank func (pr *PullRequest) MustHeadUserName() string { if err := pr.LoadHeadRepo(); err != nil { @@ -130,7 +152,8 @@ func (pr *PullRequest) LoadAttributes() error { return pr.loadAttributes(db.GetEngine(db.DefaultContext)) } -func (pr *PullRequest) loadHeadRepo(ctx context.Context) (err error) { +// LoadHeadRepoCtx loads the head repository +func (pr *PullRequest) LoadHeadRepoCtx(ctx context.Context) (err error) { if !pr.isHeadRepoLoaded && pr.HeadRepo == nil && pr.HeadRepoID > 0 { if pr.HeadRepoID == pr.BaseRepoID { if pr.BaseRepo != nil { @@ -153,15 +176,16 @@ func (pr *PullRequest) loadHeadRepo(ctx context.Context) (err error) { // LoadHeadRepo loads the head repository func (pr *PullRequest) LoadHeadRepo() error { - return pr.loadHeadRepo(db.DefaultContext) + return pr.LoadHeadRepoCtx(db.DefaultContext) } // LoadBaseRepo loads the target repository func (pr *PullRequest) LoadBaseRepo() error { - return pr.loadBaseRepo(db.DefaultContext) + return pr.LoadBaseRepoCtx(db.DefaultContext) } -func (pr *PullRequest) loadBaseRepo(ctx context.Context) (err error) { +// LoadBaseRepoCtx loads the target repository +func (pr *PullRequest) LoadBaseRepoCtx(ctx context.Context) (err error) { if pr.BaseRepo != nil { return nil } @@ -185,15 +209,16 @@ func (pr *PullRequest) loadBaseRepo(ctx context.Context) (err error) { // LoadIssue loads issue information from database func (pr *PullRequest) LoadIssue() (err error) { - return pr.loadIssue(db.GetEngine(db.DefaultContext)) + return pr.LoadIssueCtx(db.DefaultContext) } -func (pr *PullRequest) loadIssue(e db.Engine) (err error) { +// LoadIssueCtx loads issue information from database +func (pr *PullRequest) LoadIssueCtx(ctx context.Context) (err error) { if pr.Issue != nil { return nil } - pr.Issue, err = getIssueByID(e, pr.IssueID) + pr.Issue, err = getIssueByID(db.GetEngine(ctx), pr.IssueID) if err == nil { pr.Issue.PullRequest = pr } @@ -202,10 +227,11 @@ func (pr *PullRequest) loadIssue(e db.Engine) (err error) { // LoadProtectedBranch loads the protected branch of the base branch func (pr *PullRequest) LoadProtectedBranch() (err error) { - return pr.loadProtectedBranch(db.DefaultContext) + return pr.LoadProtectedBranchCtx(db.DefaultContext) } -func (pr *PullRequest) loadProtectedBranch(ctx context.Context) (err error) { +// LoadProtectedBranchCtx loads the protected branch of the base branch +func (pr *PullRequest) LoadProtectedBranchCtx(ctx context.Context) (err error) { if pr.ProtectedBranch == nil { if pr.BaseRepo == nil { if pr.BaseRepoID == 0 { @@ -221,37 +247,6 @@ func (pr *PullRequest) loadProtectedBranch(ctx context.Context) (err error) { return } -// GetDefaultMergeMessage returns default message used when merging pull request -func (pr *PullRequest) GetDefaultMergeMessage() string { - if pr.HeadRepo == nil { - var err error - pr.HeadRepo, err = repo_model.GetRepositoryByID(pr.HeadRepoID) - if err != nil { - log.Error("GetRepositoryById[%d]: %v", pr.HeadRepoID, err) - return "" - } - } - if err := pr.LoadIssue(); err != nil { - log.Error("Cannot load issue %d for PR id %d: Error: %v", pr.IssueID, pr.ID, err) - return "" - } - if err := pr.LoadBaseRepo(); err != nil { - log.Error("LoadBaseRepo: %v", err) - return "" - } - - issueReference := "#" - if pr.BaseRepo.UnitEnabled(unit.TypeExternalTracker) { - issueReference = "!" - } - - if pr.BaseRepoID == pr.HeadRepoID { - return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch) - } - - return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch) -} - // ReviewCount represents a count of Reviews type ReviewCount struct { IssueID int64 @@ -334,22 +329,6 @@ func (pr *PullRequest) getReviewedByLines(writer io.Writer) error { return committer.Commit() } -// GetDefaultSquashMessage returns default message used when squash and merging pull request -func (pr *PullRequest) GetDefaultSquashMessage() string { - if err := pr.LoadIssue(); err != nil { - log.Error("LoadIssue: %v", err) - return "" - } - if err := pr.LoadBaseRepo(); err != nil { - log.Error("LoadBaseRepo: %v", err) - return "" - } - if pr.BaseRepo.UnitEnabled(unit.TypeExternalTracker) { - return fmt.Sprintf("%s (!%d)", pr.Issue.Title, pr.Issue.Index) - } - return fmt.Sprintf("%s (#%d)", pr.Issue.Title, pr.Issue.Index) -} - // GetGitRefName returns git ref for hidden pull request branch func (pr *PullRequest) GetGitRefName() string { return fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index) @@ -371,7 +350,7 @@ func (pr *PullRequest) IsEmpty() bool { } // SetMerged sets a pull request to merged and closes the corresponding issue -func (pr *PullRequest) SetMerged() (bool, error) { +func (pr *PullRequest) SetMerged(ctx context.Context) (bool, error) { if pr.HasMerged { return false, fmt.Errorf("PullRequest[%d] already merged", pr.Index) } @@ -380,12 +359,6 @@ func (pr *PullRequest) SetMerged() (bool, error) { } pr.HasMerged = true - - ctx, committer, err := db.TxContext() - if err != nil { - return false, err - } - defer committer.Close() sess := db.GetEngine(ctx) if _, err := sess.Exec("UPDATE `issue` SET `repo_id` = `repo_id` WHERE `id` = ?", pr.IssueID); err != nil { @@ -397,7 +370,7 @@ func (pr *PullRequest) SetMerged() (bool, error) { } pr.Issue = nil - if err := pr.loadIssue(sess); err != nil { + if err := pr.LoadIssueCtx(ctx); err != nil { return false, err } @@ -412,7 +385,7 @@ func (pr *PullRequest) SetMerged() (bool, error) { return false, fmt.Errorf("PullRequest[%d] already closed", pr.Index) } - if err := pr.Issue.loadRepo(ctx); err != nil { + if err := pr.Issue.LoadRepo(ctx); err != nil { return false, err } @@ -420,18 +393,18 @@ func (pr *PullRequest) SetMerged() (bool, error) { return false, err } - if _, err := pr.Issue.changeStatus(ctx, pr.Merger, true, true); err != nil { + if _, err := changeIssueStatus(ctx, pr.Issue, pr.Merger, true, true); err != nil { return false, fmt.Errorf("Issue.changeStatus: %v", err) } + // reset the conflicted files as there cannot be any if we're merged + pr.ConflictedFiles = []string{} + // We need to save all of the data used to compute this merge as it may have already been changed by TestPatch. FIXME: need to set some state to prevent TestPatch from running whilst we are merging. - if _, err := sess.Where("id = ?", pr.ID).Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix").Update(pr); err != nil { + if _, err := sess.Where("id = ?", pr.ID).Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix, conflicted_files").Update(pr); err != nil { return false, fmt.Errorf("Failed to update pr[%d]: %v", pr.ID, err) } - if err := committer.Commit(); err != nil { - return false, fmt.Errorf("Commit: %v", err) - } return true, nil } @@ -512,6 +485,11 @@ func GetLatestPullRequestByHeadInfo(repoID int64, branch string) (*PullRequest, // GetPullRequestByIndex returns a pull request by the given index func GetPullRequestByIndex(repoID, index int64) (*PullRequest, error) { + return GetPullRequestByIndexCtx(db.DefaultContext, repoID, index) +} + +// GetPullRequestByIndexCtx returns a pull request by the given index +func GetPullRequestByIndexCtx(ctx context.Context, repoID, index int64) (*PullRequest, error) { if index < 1 { return nil, ErrPullRequestNotExist{} } @@ -520,17 +498,17 @@ func GetPullRequestByIndex(repoID, index int64) (*PullRequest, error) { Index: index, } - has, err := db.GetEngine(db.DefaultContext).Get(pr) + has, err := db.GetEngine(ctx).Get(pr) if err != nil { return nil, err } else if !has { return nil, ErrPullRequestNotExist{0, 0, 0, repoID, "", ""} } - if err = pr.LoadAttributes(); err != nil { + if err = pr.loadAttributes(db.GetEngine(ctx)); err != nil { return nil, err } - if err = pr.LoadIssue(); err != nil { + if err = pr.LoadIssueCtx(ctx); err != nil { return nil, err } @@ -549,8 +527,8 @@ func getPullRequestByID(e db.Engine, id int64) (*PullRequest, error) { } // GetPullRequestByID returns a pull request by given ID. -func GetPullRequestByID(id int64) (*PullRequest, error) { - return getPullRequestByID(db.GetEngine(db.DefaultContext), id) +func GetPullRequestByID(ctx context.Context, id int64) (*PullRequest, error) { + return getPullRequestByID(db.GetEngine(ctx), id) } // GetPullRequestByIssueIDWithNoAttributes returns pull request with no attributes loaded by given issue ID. @@ -676,6 +654,18 @@ func (pr *PullRequest) IsSameRepo() bool { return pr.BaseRepoID == pr.HeadRepoID } +// GetPullRequestsByHeadBranch returns all prs by head branch +// Since there could be multiple prs with the same head branch, this function returns a slice of prs +func GetPullRequestsByHeadBranch(ctx context.Context, headBranch string, headRepoID int64) ([]*PullRequest, error) { + log.Trace("GetPullRequestsByHeadBranch: headBranch: '%s', headRepoID: '%d'", headBranch, headRepoID) + prs := make([]*PullRequest, 0, 2) + if err := db.GetEngine(ctx).Where(builder.Eq{"head_branch": headBranch, "head_repo_id": headRepoID}). + Find(&prs); err != nil { + return nil, err + } + return prs, nil +} + // GetBaseBranchHTMLURL returns the HTML URL of the base branch func (pr *PullRequest) GetBaseBranchHTMLURL() string { if err := pr.LoadBaseRepo(); err != nil { @@ -703,3 +693,22 @@ func (pr *PullRequest) GetHeadBranchHTMLURL() string { } return pr.HeadRepo.HTMLURL() + "/src/branch/" + util.PathEscapeSegments(pr.HeadBranch) } + +// UpdateAllowEdits update if PR can be edited from maintainers +func UpdateAllowEdits(ctx context.Context, pr *PullRequest) error { + if _, err := db.GetEngine(ctx).ID(pr.ID).Cols("allow_maintainer_edit").Update(pr); err != nil { + return err + } + return nil +} + +// Mergeable returns if the pullrequest is mergeable. +func (pr *PullRequest) Mergeable() bool { + // If a pull request isn't mergable if it's: + // - Being conflict checked. + // - Has a conflict. + // - Received a error while being conflict checked. + // - Is a work-in-progress pull request. + return pr.Status != PullRequestStatusChecking && pr.Status != PullRequestStatusConflict && + pr.Status != PullRequestStatusError && !pr.IsWorkInProgress() +} diff --git a/models/pull/automerge.go b/models/pull/automerge.go new file mode 100644 index 0000000000..d0aca2e85f --- /dev/null +++ b/models/pull/automerge.go @@ -0,0 +1,98 @@ +// Copyright 2022 Gitea. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package pull + +import ( + "context" + "fmt" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/timeutil" +) + +// AutoMerge represents a pull request scheduled for merging when checks succeed +type AutoMerge struct { + ID int64 `xorm:"pk autoincr"` + PullID int64 `xorm:"UNIQUE"` + DoerID int64 `xorm:"NOT NULL"` + Doer *user_model.User `xorm:"-"` + MergeStyle repo_model.MergeStyle `xorm:"varchar(30)"` + Message string `xorm:"LONGTEXT"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` +} + +// TableName return database table name for xorm +func (AutoMerge) TableName() string { + return "pull_auto_merge" +} + +func init() { + db.RegisterModel(new(AutoMerge)) +} + +// ErrAlreadyScheduledToAutoMerge represents a "PullRequestHasMerged"-error +type ErrAlreadyScheduledToAutoMerge struct { + PullID int64 +} + +func (err ErrAlreadyScheduledToAutoMerge) Error() string { + return fmt.Sprintf("pull request is already scheduled to auto merge when checks succeed [pull_id: %d]", err.PullID) +} + +// IsErrAlreadyScheduledToAutoMerge checks if an error is a ErrAlreadyScheduledToAutoMerge. +func IsErrAlreadyScheduledToAutoMerge(err error) bool { + _, ok := err.(ErrAlreadyScheduledToAutoMerge) + return ok +} + +// ScheduleAutoMerge schedules a pull request to be merged when all checks succeed +func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pullID int64, style repo_model.MergeStyle, message string) error { + // Check if we already have a merge scheduled for that pull request + if exists, _, err := GetScheduledMergeByPullID(ctx, pullID); err != nil { + return err + } else if exists { + return ErrAlreadyScheduledToAutoMerge{PullID: pullID} + } + + _, err := db.GetEngine(ctx).Insert(&AutoMerge{ + DoerID: doer.ID, + PullID: pullID, + MergeStyle: style, + Message: message, + }) + return err +} + +// GetScheduledMergeByPullID gets a scheduled pull request merge by pull request id +func GetScheduledMergeByPullID(ctx context.Context, pullID int64) (bool, *AutoMerge, error) { + scheduledPRM := &AutoMerge{} + exists, err := db.GetEngine(ctx).Where("pull_id = ?", pullID).Get(scheduledPRM) + if err != nil || !exists { + return false, nil, err + } + + doer, err := user_model.GetUserByIDCtx(ctx, scheduledPRM.DoerID) + if err != nil { + return false, nil, err + } + + scheduledPRM.Doer = doer + return true, scheduledPRM, nil +} + +// DeleteScheduledAutoMerge delete a scheduled pull request +func DeleteScheduledAutoMerge(ctx context.Context, pullID int64) error { + exist, scheduledPRM, err := GetScheduledMergeByPullID(ctx, pullID) + if err != nil { + return err + } else if !exist { + return db.ErrNotExist{ID: pullID} + } + + _, err = db.GetEngine(ctx).ID(scheduledPRM.ID).Delete(&AutoMerge{}) + return err +} diff --git a/models/pull/review_state.go b/models/pull/review_state.go new file mode 100644 index 0000000000..1c465bf766 --- /dev/null +++ b/models/pull/review_state.go @@ -0,0 +1,139 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. +package pull + +import ( + "context" + "fmt" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/timeutil" +) + +// ViewedState stores for a file in which state it is currently viewed +type ViewedState uint8 + +const ( + Unviewed ViewedState = iota + HasChanged // cannot be set from the UI/ API, only internally + Viewed +) + +func (viewedState ViewedState) String() string { + switch viewedState { + case Unviewed: + return "unviewed" + case HasChanged: + return "has-changed" + case Viewed: + return "viewed" + default: + return fmt.Sprintf("unknown(value=%d)", viewedState) + } +} + +// ReviewState stores for a user-PR-commit combination which files the user has already viewed +type ReviewState struct { + ID int64 `xorm:"pk autoincr"` + UserID int64 `xorm:"NOT NULL UNIQUE(pull_commit_user)"` + PullID int64 `xorm:"NOT NULL INDEX UNIQUE(pull_commit_user) DEFAULT 0"` // Which PR was the review on? + CommitSHA string `xorm:"NOT NULL VARCHAR(40) UNIQUE(pull_commit_user)"` // Which commit was the head commit for the review? + UpdatedFiles map[string]ViewedState `xorm:"NOT NULL LONGTEXT JSON"` // Stores for each of the changed files of a PR whether they have been viewed, changed since last viewed, or not viewed + UpdatedUnix timeutil.TimeStamp `xorm:"updated"` // Is an accurate indicator of the order of commits as we do not expect it to be possible to make reviews on previous commits +} + +func init() { + db.RegisterModel(new(ReviewState)) +} + +// GetReviewState returns the ReviewState with all given values prefilled, whether or not it exists in the database. +// If the review didn't exist before in the database, it won't afterwards either. +// The returned boolean shows whether the review exists in the database +func GetReviewState(ctx context.Context, userID, pullID int64, commitSHA string) (*ReviewState, bool, error) { + review := &ReviewState{UserID: userID, PullID: pullID, CommitSHA: commitSHA} + has, err := db.GetEngine(ctx).Get(review) + return review, has, err +} + +// UpdateReviewState updates the given review inside the database, regardless of whether it existed before or not +// The given map of files with their viewed state will be merged with the previous review, if present +func UpdateReviewState(ctx context.Context, userID, pullID int64, commitSHA string, updatedFiles map[string]ViewedState) error { + log.Trace("Updating review for user %d, repo %d, commit %s with the updated files %v.", userID, pullID, commitSHA, updatedFiles) + + review, exists, err := GetReviewState(ctx, userID, pullID, commitSHA) + if err != nil { + return err + } + + if exists { + review.UpdatedFiles = mergeFiles(review.UpdatedFiles, updatedFiles) + } else if previousReview, err := getNewestReviewStateApartFrom(ctx, userID, pullID, commitSHA); err != nil { + return err + + // Overwrite the viewed files of the previous review if present + } else if previousReview != nil { + review.UpdatedFiles = mergeFiles(previousReview.UpdatedFiles, updatedFiles) + } else { + review.UpdatedFiles = updatedFiles + } + + // Insert or Update review + engine := db.GetEngine(ctx) + if !exists { + log.Trace("Inserting new review for user %d, repo %d, commit %s with the updated files %v.", userID, pullID, commitSHA, review.UpdatedFiles) + _, err := engine.Insert(review) + return err + } + log.Trace("Updating already existing review with ID %d (user %d, repo %d, commit %s) with the updated files %v.", review.ID, userID, pullID, commitSHA, review.UpdatedFiles) + _, err = engine.ID(review.ID).Update(&ReviewState{UpdatedFiles: review.UpdatedFiles}) + return err +} + +// mergeFiles merges the given maps of files with their viewing state into one map. +// Values from oldFiles will be overridden with values from newFiles +func mergeFiles(oldFiles, newFiles map[string]ViewedState) map[string]ViewedState { + if oldFiles == nil { + return newFiles + } else if newFiles == nil { + return oldFiles + } + + for file, viewed := range newFiles { + oldFiles[file] = viewed + } + return oldFiles +} + +// GetNewestReviewState gets the newest review of the current user in the current PR. +// The returned PR Review will be nil if the user has not yet reviewed this PR. +func GetNewestReviewState(ctx context.Context, userID, pullID int64) (*ReviewState, error) { + var review ReviewState + has, err := db.GetEngine(ctx).Where("user_id = ?", userID).And("pull_id = ?", pullID).OrderBy("updated_unix DESC").Get(&review) + if err != nil || !has { + return nil, err + } + return &review, err +} + +// getNewestReviewStateApartFrom is like GetNewestReview, except that the second newest review will be returned if the newest review points at the given commit. +// The returned PR Review will be nil if the user has not yet reviewed this PR. +func getNewestReviewStateApartFrom(ctx context.Context, userID, pullID int64, commitSHA string) (*ReviewState, error) { + var reviews []ReviewState + err := db.GetEngine(ctx).Where("user_id = ?", userID).And("pull_id = ?", pullID).OrderBy("updated_unix DESC").Limit(2).Find(&reviews) + // It would also be possible to use ".And("commit_sha != ?", commitSHA)" instead of the error handling below + // However, benchmarks show drastically improved performance by not doing that + + // Error cases in which no review should be returned + if err != nil || len(reviews) == 0 || (len(reviews) == 1 && reviews[0].CommitSHA == commitSHA) { + return nil, err + + // The first review points at the commit to exclude, hence skip to the second review + } else if len(reviews) >= 2 && reviews[0].CommitSHA == commitSHA { + return &reviews[1], nil + } + + // As we have no error cases left, the result must be the first element in the list + return &reviews[0], nil +} diff --git a/models/pull_list.go b/models/pull_list.go index 9d4d428928..ca09e28a93 100644 --- a/models/pull_list.go +++ b/models/pull_list.go @@ -5,6 +5,7 @@ package models import ( + "context" "fmt" "code.gitea.io/gitea/models/db" @@ -61,8 +62,8 @@ func GetUnmergedPullRequestsByHeadInfo(repoID int64, branch string) ([]*PullRequ // HasUnmergedPullRequestsByHeadInfo checks if there are open and not merged pull request // by given head information (repo and branch) -func HasUnmergedPullRequestsByHeadInfo(repoID int64, branch string) (bool, error) { - return db.GetEngine(db.DefaultContext). +func HasUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch string) (bool, error) { + return db.GetEngine(ctx). Where("head_repo_id = ? AND head_branch = ? AND has_merged = ? AND issue.is_closed = ? AND flow = ?", repoID, branch, false, false, PullRequestFlowGithub). Join("INNER", "issue", "issue.id = pull_request.issue_id"). @@ -158,13 +159,14 @@ func (prs PullRequestList) LoadAttributes() error { return prs.loadAttributes(db.GetEngine(db.DefaultContext)) } -func (prs PullRequestList) invalidateCodeComments(e db.Engine, doer *user_model.User, repo *git.Repository, branch string) error { +// InvalidateCodeComments will lookup the prs for code comments which got invalidated by change +func (prs PullRequestList) InvalidateCodeComments(ctx context.Context, doer *user_model.User, repo *git.Repository, branch string) error { if len(prs) == 0 { return nil } issueIDs := prs.getIssueIDs() var codeComments []*Comment - if err := e. + if err := db.GetEngine(ctx). Where("type = ? and invalidated = ?", CommentTypeCode, false). In("issue_id", issueIDs). Find(&codeComments); err != nil { @@ -177,8 +179,3 @@ func (prs PullRequestList) invalidateCodeComments(e db.Engine, doer *user_model. } return nil } - -// InvalidateCodeComments will lookup the prs for code comments which got invalidated by change -func (prs PullRequestList) InvalidateCodeComments(doer *user_model.User, repo *git.Repository, branch string) error { - return prs.invalidateCodeComments(db.GetEngine(db.DefaultContext), doer, repo, branch) -} diff --git a/models/pull_test.go b/models/pull_test.go index 2567984cc1..6119bca692 100644 --- a/models/pull_test.go +++ b/models/pull_test.go @@ -8,10 +8,7 @@ import ( "testing" "code.gitea.io/gitea/models/db" - repo_model "code.gitea.io/gitea/models/repo" - "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/models/unittest" - user_model "code.gitea.io/gitea/models/user" "github.com/stretchr/testify/assert" ) @@ -110,11 +107,11 @@ func TestGetUnmergedPullRequest(t *testing.T) { func TestHasUnmergedPullRequestsByHeadInfo(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - exist, err := HasUnmergedPullRequestsByHeadInfo(1, "branch2") + exist, err := HasUnmergedPullRequestsByHeadInfo(db.DefaultContext, 1, "branch2") assert.NoError(t, err) assert.Equal(t, true, exist) - exist, err = HasUnmergedPullRequestsByHeadInfo(1, "not_exist_branch") + exist, err = HasUnmergedPullRequestsByHeadInfo(db.DefaultContext, 1, "not_exist_branch") assert.NoError(t, err) assert.Equal(t, false, exist) } @@ -159,12 +156,12 @@ func TestGetPullRequestByIndex(t *testing.T) { func TestGetPullRequestByID(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - pr, err := GetPullRequestByID(1) + pr, err := GetPullRequestByID(db.DefaultContext, 1) assert.NoError(t, err) assert.Equal(t, int64(1), pr.ID) assert.Equal(t, int64(2), pr.IssueID) - _, err = GetPullRequestByID(9223372036854775807) + _, err = GetPullRequestByID(db.DefaultContext, 9223372036854775807) assert.Error(t, err) assert.True(t, IsErrPullRequestNotExist(err)) } @@ -256,36 +253,3 @@ func TestPullRequest_GetWorkInProgressPrefixWorkInProgress(t *testing.T) { pr.Issue.Title = "[wip] " + original assert.Equal(t, "[wip]", pr.GetWorkInProgressPrefix()) } - -func TestPullRequest_GetDefaultMergeMessage_InternalTracker(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - pr := unittest.AssertExistsAndLoadBean(t, &PullRequest{ID: 2}).(*PullRequest) - - assert.Equal(t, "Merge pull request 'issue3' (#3) from branch2 into master", pr.GetDefaultMergeMessage()) - - pr.BaseRepoID = 1 - pr.HeadRepoID = 2 - assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo1:branch2 into master", pr.GetDefaultMergeMessage()) -} - -func TestPullRequest_GetDefaultMergeMessage_ExternalTracker(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - externalTracker := repo_model.RepoUnit{ - Type: unit.TypeExternalTracker, - Config: &repo_model.ExternalTrackerConfig{ - ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}", - }, - } - baseRepo := &repo_model.Repository{Name: "testRepo", ID: 1} - baseRepo.Owner = &user_model.User{Name: "testOwner"} - baseRepo.Units = []*repo_model.RepoUnit{&externalTracker} - - pr := unittest.AssertExistsAndLoadBean(t, &PullRequest{ID: 2, BaseRepo: baseRepo}).(*PullRequest) - - assert.Equal(t, "Merge pull request 'issue3' (!3) from branch2 into master", pr.GetDefaultMergeMessage()) - - pr.BaseRepoID = 1 - pr.HeadRepoID = 2 - assert.Equal(t, "Merge pull request 'issue3' (!3) from user2/repo1:branch2 into master", pr.GetDefaultMergeMessage()) -} diff --git a/models/repo.go b/models/repo.go index 53199bcca3..fb7bbba1e1 100644 --- a/models/repo.go +++ b/models/repo.go @@ -10,7 +10,6 @@ import ( "fmt" "os" "path" - "sort" "strconv" "strings" "unicode/utf8" @@ -20,14 +19,16 @@ import ( admin_model "code.gitea.io/gitea/models/admin" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" + project_model "code.gitea.io/gitea/models/project" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/models/webhook" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/options" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" api "code.gitea.io/gitea/modules/structs" @@ -36,94 +37,12 @@ import ( "xorm.io/builder" ) -var ( - // Gitignores contains the gitiginore files - Gitignores []string - - // Licenses contains the license files - Licenses []string - - // Readmes contains the readme files - Readmes []string - - // LabelTemplates contains the label template files and the list of labels for each file - LabelTemplates map[string]string - - // ItemsPerPage maximum items per page in forks, watchers and stars of a repo - ItemsPerPage = 40 -) - -// loadRepoConfig loads the repository config -func loadRepoConfig() { - // Load .gitignore and license files and readme templates. - types := []string{"gitignore", "license", "readme", "label"} - typeFiles := make([][]string, 4) - for i, t := range types { - files, err := options.Dir(t) - if err != nil { - log.Fatal("Failed to get %s files: %v", t, err) - } - customPath := path.Join(setting.CustomPath, "options", t) - isDir, err := util.IsDir(customPath) - if err != nil { - log.Fatal("Failed to get custom %s files: %v", t, err) - } - if isDir { - customFiles, err := util.StatDir(customPath) - if err != nil { - log.Fatal("Failed to get custom %s files: %v", t, err) - } - - for _, f := range customFiles { - if !util.IsStringInSlice(f, files, true) { - files = append(files, f) - } - } - } - typeFiles[i] = files - } - - Gitignores = typeFiles[0] - Licenses = typeFiles[1] - Readmes = typeFiles[2] - LabelTemplatesFiles := typeFiles[3] - sort.Strings(Gitignores) - sort.Strings(Licenses) - sort.Strings(Readmes) - sort.Strings(LabelTemplatesFiles) - - // Load label templates - LabelTemplates = make(map[string]string) - for _, templateFile := range LabelTemplatesFiles { - labels, err := LoadLabelsFormatted(templateFile) - if err != nil { - log.Error("Failed to load labels: %v", err) - } - LabelTemplates[templateFile] = labels - } - - // Filter out invalid names and promote preferred licenses. - sortedLicenses := make([]string, 0, len(Licenses)) - for _, name := range setting.Repository.PreferredLicenses { - if util.IsStringInSlice(name, Licenses, true) { - sortedLicenses = append(sortedLicenses, name) - } - } - for _, name := range Licenses { - if !util.IsStringInSlice(name, setting.Repository.PreferredLicenses, true) { - sortedLicenses = append(sortedLicenses, name) - } - } - Licenses = sortedLicenses -} +// ItemsPerPage maximum items per page in forks, watchers and stars of a repo +var ItemsPerPage = 40 // NewRepoContext creates a new repository context func NewRepoContext() { - loadRepoConfig() unit.LoadUnitConfig() - - admin_model.RemoveAllWithNotice(db.DefaultContext, "Clean up temporary repository uploads", setting.Repository.Upload.TempPath) - admin_model.RemoveAllWithNotice(db.DefaultContext, "Clean up temporary repositories", LocalCopyPath()) } // CheckRepoUnitUser check whether user could visit the unit of this repository @@ -132,12 +51,12 @@ func CheckRepoUnitUser(repo *repo_model.Repository, user *user_model.User, unitT } func checkRepoUnitUser(ctx context.Context, repo *repo_model.Repository, user *user_model.User, unitType unit.Type) bool { - if user.IsAdmin { + if user != nil && user.IsAdmin { return true } - perm, err := getUserRepoPermission(ctx, repo, user) + perm, err := GetUserRepoPermission(ctx, repo, user) if err != nil { - log.Error("getUserRepoPermission(): %v", err) + log.Error("GetUserRepoPermission(): %v", err) return false } @@ -218,50 +137,44 @@ func getReviewers(ctx context.Context, repo *repo_model.Repository, doerID, post return nil, err } - var users []*user_model.User - e := db.GetEngine(ctx) + cond := builder.And(builder.Neq{"`user`.id": posterID}) if repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate { // This a private repository: // Anyone who can read the repository is a requestable reviewer - if err := e. - SQL("SELECT * FROM `user` WHERE id in ("+ - "SELECT user_id FROM `access` WHERE repo_id = ? AND mode >= ? AND user_id != ?"+ // private org repos - ") ORDER BY name", - repo.ID, perm.AccessModeRead, - posterID). - Find(&users); err != nil { - return nil, err - } + + cond = cond.And(builder.In("`user`.id", + builder.Select("user_id").From("access").Where( + builder.Eq{"repo_id": repo.ID}. + And(builder.Gte{"mode": perm.AccessModeRead}), + ), + )) if repo.Owner.Type == user_model.UserTypeIndividual && repo.Owner.ID != posterID { // as private *user* repos don't generate an entry in the `access` table, // the owner of a private repo needs to be explicitly added. - users = append(users, repo.Owner) + cond = cond.Or(builder.Eq{"`user`.id": repo.Owner.ID}) } - return users, nil + } else { + // This is a "public" repository: + // Any user that has read access, is a watcher or organization member can be requested to review + cond = cond.And(builder.And(builder.In("`user`.id", + builder.Select("user_id").From("access"). + Where(builder.Eq{"repo_id": repo.ID}. + And(builder.Gte{"mode": perm.AccessModeRead})), + ).Or(builder.In("`user`.id", + builder.Select("user_id").From("watch"). + Where(builder.Eq{"repo_id": repo.ID}. + And(builder.In("mode", repo_model.WatchModeNormal, repo_model.WatchModeAuto))), + ).Or(builder.In("`user`.id", + builder.Select("uid").From("org_user"). + Where(builder.Eq{"org_id": repo.OwnerID}), + ))))) } - // This is a "public" repository: - // Any user that has read access, is a watcher or organization member can be requested to review - if err := e. - SQL("SELECT * FROM `user` WHERE id IN ( "+ - "SELECT user_id FROM `access` WHERE repo_id = ? AND mode >= ? "+ - "UNION "+ - "SELECT user_id FROM `watch` WHERE repo_id = ? AND mode IN (?, ?) "+ - "UNION "+ - "SELECT uid AS user_id FROM `org_user` WHERE org_id = ? "+ - ") AND id != ? ORDER BY name", - repo.ID, perm.AccessModeRead, - repo.ID, repo_model.WatchModeNormal, repo_model.WatchModeAuto, - repo.OwnerID, - posterID). - Find(&users); err != nil { - return nil, err - } - - return users, nil + users := make([]*user_model.User, 0, 8) + return users, db.GetEngine(ctx).Where(cond).OrderBy("name").Find(&users) } // GetReviewers get all users can be requested to review: @@ -274,7 +187,7 @@ func GetReviewers(repo *repo_model.Repository, doerID, posterID int64) ([]*user_ } // GetReviewerTeams get all teams can be requested to review -func GetReviewerTeams(repo *repo_model.Repository) ([]*Team, error) { +func GetReviewerTeams(repo *repo_model.Repository) ([]*organization.Team, error) { if err := repo.GetOwner(db.DefaultContext); err != nil { return nil, err } @@ -282,7 +195,7 @@ func GetReviewerTeams(repo *repo_model.Repository) ([]*Team, error) { return nil, nil } - teams, err := GetTeamsWithAccessToRepo(repo.OwnerID, repo.ID, perm.AccessModeRead) + teams, err := organization.GetTeamsWithAccessToRepo(db.DefaultContext, repo.OwnerID, repo.ID, perm.AccessModeRead) if err != nil { return nil, err } @@ -319,7 +232,7 @@ func CanUserForkRepo(user *user_model.User, repo *repo_model.Repository) (bool, if repo.OwnerID != user.ID && !repo_model.HasForkedRepo(user.ID, repo.ID) { return true, nil } - ownedOrgs, err := GetOrgsCanCreateRepoByUserID(user.ID) + ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(user.ID) if err != nil { return false, err } @@ -332,8 +245,8 @@ func CanUserForkRepo(user *user_model.User, repo *repo_model.Repository) (bool, } // FindUserOrgForks returns the forked repositories for one user from a repository -func FindUserOrgForks(repoID, userID int64) ([]*repo_model.Repository, error) { - var cond builder.Cond = builder.And( +func FindUserOrgForks(ctx context.Context, repoID, userID int64) ([]*repo_model.Repository, error) { + cond := builder.And( builder.Eq{"fork_id": repoID}, builder.In("owner_id", builder.Select("org_id"). @@ -343,23 +256,23 @@ func FindUserOrgForks(repoID, userID int64) ([]*repo_model.Repository, error) { ) var repos []*repo_model.Repository - return repos, db.GetEngine(db.DefaultContext).Table("repository").Where(cond).Find(&repos) + return repos, db.GetEngine(ctx).Table("repository").Where(cond).Find(&repos) } // GetForksByUserAndOrgs return forked repos of the user and owned orgs -func GetForksByUserAndOrgs(user *user_model.User, repo *repo_model.Repository) ([]*repo_model.Repository, error) { +func GetForksByUserAndOrgs(ctx context.Context, user *user_model.User, repo *repo_model.Repository) ([]*repo_model.Repository, error) { var repoList []*repo_model.Repository if user == nil { return repoList, nil } - forkedRepo, err := repo_model.GetUserFork(repo.ID, user.ID) + forkedRepo, err := repo_model.GetUserFork(ctx, repo.ID, user.ID) if err != nil { return repoList, err } if forkedRepo != nil { repoList = append(repoList, forkedRepo) } - orgForks, err := FindUserOrgForks(repo.ID, user.ID) + orgForks, err := FindUserOrgForks(ctx, repo.ID, user.ID) if err != nil { return nil, err } @@ -378,7 +291,7 @@ func CanUserDelete(repo *repo_model.Repository, user *user_model.User) (bool, er } if repo.Owner.IsOrganization() { - isOwner, err := OrgFromUser(repo.Owner).IsOwnedBy(user.ID) + isOwner, err := organization.OrgFromUser(repo.Owner).IsOwnedBy(user.ID) if err != nil { return false, err } else if isOwner { @@ -446,35 +359,6 @@ type CreateRepoOptions struct { MirrorInterval string } -// GetRepoInitFile returns repository init files -func GetRepoInitFile(tp, name string) ([]byte, error) { - cleanedName := strings.TrimLeft(path.Clean("/"+name), "/") - relPath := path.Join("options", tp, cleanedName) - - // Use custom file when available. - customPath := path.Join(setting.CustomPath, relPath) - isFile, err := util.IsFile(customPath) - if err != nil { - log.Error("Unable to check if %s is a file. Error: %v", customPath, err) - } - if isFile { - return os.ReadFile(customPath) - } - - switch tp { - case "readme": - return options.Readme(cleanedName) - case "gitignore": - return options.Gitignore(cleanedName) - case "license": - return options.License(cleanedName) - case "label": - return options.Labels(cleanedName) - default: - return []byte{}, fmt.Errorf("Invalid init file type") - } -} - // CreateRepository creates a repository for the user/organization. func CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, overwriteOrAdopt bool) (err error) { if err = repo_model.IsUsableRepoName(repo.Name); err != nil { @@ -556,20 +440,20 @@ func CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_ // Give access to all members in teams with access to all repositories. if u.IsOrganization() { - teams, err := OrgFromUser(u).loadTeams(db.GetEngine(ctx)) + teams, err := organization.FindOrgTeams(ctx, u.ID) if err != nil { return fmt.Errorf("loadTeams: %v", err) } for _, t := range teams { if t.IncludesAllRepositories { - if err := t.addRepository(ctx, repo); err != nil { + if err := addRepository(ctx, t, repo); err != nil { return fmt.Errorf("addRepository: %v", err) } } } - if isAdmin, err := isUserRepoAdmin(db.GetEngine(ctx), repo, doer); err != nil { - return fmt.Errorf("isUserRepoAdmin: %v", err) + if isAdmin, err := IsUserRepoAdminCtx(ctx, repo, doer); err != nil { + return fmt.Errorf("IsUserRepoAdminCtx: %v", err) } else if !isAdmin { // Make creator repo admin if it wasn't assigned automatically if err = addCollaborator(ctx, repo, doer); err != nil { @@ -640,7 +524,8 @@ func DecrementRepoForkNum(ctx context.Context, repoID int64) error { return err } -func updateRepository(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) { +// UpdateRepositoryCtx updates a repository with db context +func UpdateRepositoryCtx(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) (err error) { repo.LowerName = strings.ToLower(repo.Name) if utf8.RuneCountInString(repo.Description) > 255 { @@ -692,7 +577,7 @@ func updateRepository(ctx context.Context, repo *repo_model.Repository, visibili } for i := range forkRepos { forkRepos[i].IsPrivate = repo.IsPrivate || repo.Owner.Visibility == api.VisibleTypePrivate - if err = updateRepository(ctx, forkRepos[i], true); err != nil { + if err = UpdateRepositoryCtx(ctx, forkRepos[i], true); err != nil { return fmt.Errorf("updateRepository[%d]: %v", forkRepos[i].ID, err) } } @@ -701,11 +586,6 @@ func updateRepository(ctx context.Context, repo *repo_model.Repository, visibili return nil } -// UpdateRepositoryCtx updates a repository with db context -func UpdateRepositoryCtx(ctx context.Context, repo *repo_model.Repository, visibilityChanged bool) error { - return updateRepository(ctx, repo, visibilityChanged) -} - // UpdateRepository updates a repository func UpdateRepository(repo *repo_model.Repository, visibilityChanged bool) (err error) { ctx, committer, err := db.TxContext() @@ -714,7 +594,7 @@ func UpdateRepository(repo *repo_model.Repository, visibilityChanged bool) (err } defer committer.Close() - if err = updateRepository(ctx, repo, visibilityChanged); err != nil { + if err = UpdateRepositoryCtx(ctx, repo, visibilityChanged); err != nil { return fmt.Errorf("updateRepository: %v", err) } @@ -774,14 +654,14 @@ func DeleteRepository(doer *user_model.User, uid, repoID int64) error { } if org.IsOrganization() { - teams, err := OrgFromUser(org).loadTeams(sess) + teams, err := organization.FindOrgTeams(ctx, org.ID) if err != nil { return err } for _, t := range teams { - if !t.hasRepository(sess, repoID) { + if !hasRepository(ctx, t, repoID) { continue - } else if err = t.removeRepository(ctx, repo, false); err != nil { + } else if err = removeRepository(ctx, t, repo, false); err != nil { return err } } @@ -812,12 +692,11 @@ func DeleteRepository(doer *user_model.User, uid, repoID int64) error { &webhook.HookTask{RepoID: repoID}, &LFSLock{RepoID: repoID}, &repo_model.LanguageStat{RepoID: repoID}, - &Milestone{RepoID: repoID}, + &issues_model.Milestone{RepoID: repoID}, &repo_model.Mirror{RepoID: repoID}, &Notification{RepoID: repoID}, &ProtectedBranch{RepoID: repoID}, &ProtectedTag{RepoID: repoID}, - &PullRequest{BaseRepoID: repoID}, &repo_model.PushMirror{RepoID: repoID}, &Release{RepoID: repoID}, &repo_model.RepoIndexerStatus{RepoID: repoID}, @@ -836,6 +715,11 @@ func DeleteRepository(doer *user_model.User, uid, repoID int64) error { return err } + // Delete Pulls and related objects + if err := deletePullsByBaseRepoID(sess, repoID); err != nil { + return err + } + // Delete Issues and related objects var attachmentPaths []string if attachmentPaths, err = deleteIssuesByRepoID(sess, repoID); err != nil { @@ -863,14 +747,14 @@ func DeleteRepository(doer *user_model.User, uid, repoID int64) error { } } - projects, _, err := getProjects(sess, ProjectSearchOptions{ + projects, _, err := project_model.GetProjectsCtx(ctx, project_model.SearchOptions{ RepoID: repoID, }) if err != nil { return fmt.Errorf("get projects: %v", err) } for i := range projects { - if err := deleteProjectByID(sess, projects[i].ID); err != nil { + if err := project_model.DeleteProjectByIDCtx(ctx, projects[i].ID); err != nil { return fmt.Errorf("delete project [%d]: %v", projects[i].ID, err) } } @@ -1059,10 +943,6 @@ func labelStatsCorrectNumClosedIssuesRepo(ctx context.Context, id int64) error { var milestoneStatsQueryNumIssues = "SELECT `milestone`.id FROM `milestone` WHERE `milestone`.num_closed_issues!=(SELECT COUNT(*) FROM `issue` WHERE `issue`.milestone_id=`milestone`.id AND `issue`.is_closed=?) OR `milestone`.num_issues!=(SELECT COUNT(*) FROM `issue` WHERE `issue`.milestone_id=`milestone`.id)" -func milestoneStatsCorrectNumIssues(ctx context.Context, id int64) error { - return updateMilestoneCounters(ctx, id) -} - func milestoneStatsCorrectNumIssuesRepo(ctx context.Context, id int64) error { e := db.GetEngine(ctx) results, err := e.Query(milestoneStatsQueryNumIssues+" AND `milestone`.repo_id = ?", true, id) @@ -1071,7 +951,7 @@ func milestoneStatsCorrectNumIssuesRepo(ctx context.Context, id int64) error { } for _, result := range results { id, _ := strconv.ParseInt(string(result["id"]), 10, 64) - err = milestoneStatsCorrectNumIssues(ctx, id) + err = issues_model.UpdateMilestoneCounters(ctx, id) if err != nil { return err } @@ -1163,7 +1043,7 @@ func CheckRepoStats(ctx context.Context) error { // Milestone.Num{,Closed}Issues { statsQuery(milestoneStatsQueryNumIssues, true), - milestoneStatsCorrectNumIssues, + issues_model.UpdateMilestoneCounters, "milestone count 'num_closed_issues' and 'num_issues'", }, // User.NumRepos @@ -1332,7 +1212,7 @@ func DeleteDeployKey(ctx context.Context, doer *user_model.User, id int64) error if err != nil { return fmt.Errorf("GetRepositoryByID: %v", err) } - has, err := isUserRepoAdmin(sess, repo, doer) + has, err := IsUserRepoAdminCtx(ctx, repo, doer) if err != nil { return fmt.Errorf("GetUserRepoPermission: %v", err) } else if !has { diff --git a/models/repo/fork.go b/models/repo/fork.go index 570a5b68ab..ae7882a02e 100644 --- a/models/repo/fork.go +++ b/models/repo/fork.go @@ -44,9 +44,9 @@ func HasForkedRepo(ownerID, repoID int64) bool { } // GetUserFork return user forked repository from this repository, if not forked return nil -func GetUserFork(repoID, userID int64) (*Repository, error) { +func GetUserFork(ctx context.Context, repoID, userID int64) (*Repository, error) { var forkedRepo Repository - has, err := db.GetEngine(db.DefaultContext).Where("fork_id = ?", repoID).And("owner_id = ?", userID).Get(&forkedRepo) + has, err := db.GetEngine(ctx).Where("fork_id = ?", repoID).And("owner_id = ?", userID).Get(&forkedRepo) if err != nil { return nil, err } diff --git a/models/repo/fork_test.go b/models/repo/fork_test.go index bf6b90b388..263aec4e3a 100644 --- a/models/repo/fork_test.go +++ b/models/repo/fork_test.go @@ -7,6 +7,7 @@ package repo import ( "testing" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" "github.com/stretchr/testify/assert" @@ -19,14 +20,14 @@ func TestGetUserFork(t *testing.T) { repo, err := GetRepositoryByID(10) assert.NoError(t, err) assert.NotNil(t, repo) - repo, err = GetUserFork(repo.ID, 13) + repo, err = GetUserFork(db.DefaultContext, repo.ID, 13) assert.NoError(t, err) assert.NotNil(t, repo) repo, err = GetRepositoryByID(9) assert.NoError(t, err) assert.NotNil(t, repo) - repo, err = GetUserFork(repo.ID, 13) + repo, err = GetUserFork(db.DefaultContext, repo.ID, 13) assert.NoError(t, err) assert.Nil(t, repo) } diff --git a/models/repo/main_test.go b/models/repo/main_test.go index fdd6c3f4d3..e375e8a9f3 100644 --- a/models/repo/main_test.go +++ b/models/repo/main_test.go @@ -12,17 +12,20 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), - "attachment.yml", - "repo_archiver.yml", - "repository.yml", - "repo_unit.yml", - "repo_indexer_status.yml", - "repo_redirect.yml", - "watch.yml", - "star.yml", - "topic.yml", - "repo_topic.yml", - "user.yml", - ) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "attachment.yml", + "repo_archiver.yml", + "repository.yml", + "repo_unit.yml", + "repo_indexer_status.yml", + "repo_redirect.yml", + "watch.yml", + "star.yml", + "topic.yml", + "repo_topic.yml", + "user.yml", + }, + }) } diff --git a/models/repo/mirror.go b/models/repo/mirror.go index 8494331ff7..df4e320752 100644 --- a/models/repo/mirror.go +++ b/models/repo/mirror.go @@ -6,6 +6,7 @@ package repo import ( + "context" "errors" "fmt" "time" @@ -113,6 +114,13 @@ func UpdateMirror(m *Mirror) error { return updateMirror(db.GetEngine(db.DefaultContext), m) } +// TouchMirror updates the mirror updatedUnix +func TouchMirror(ctx context.Context, m *Mirror) error { + m.UpdatedUnix = timeutil.TimeStampNow() + _, err := db.GetEngine(ctx).ID(m.ID).Cols("updated_unix").Update(m) + return err +} + // DeleteMirrorByRepoID deletes a mirror by repoID func DeleteMirrorByRepoID(repoID int64) error { _, err := db.GetEngine(db.DefaultContext).Delete(&Mirror{RepoID: repoID}) diff --git a/models/repo/repo.go b/models/repo/repo.go index f5b93d6da5..8af6357bf3 100644 --- a/models/repo/repo.go +++ b/models/repo/repo.go @@ -26,7 +26,7 @@ import ( ) var ( - reservedRepoNames = []string{".", ".."} + reservedRepoNames = []string{".", "..", "-"} reservedRepoPatterns = []string{"*.git", "*.wiki", "*.rss", "*.atom"} ) @@ -302,8 +302,19 @@ func (repo *Repository) LoadUnits(ctx context.Context) (err error) { } // UnitEnabled if this repository has the given unit enabled -func (repo *Repository) UnitEnabled(tp unit.Type) bool { - if err := repo.LoadUnits(db.DefaultContext); err != nil { +func (repo *Repository) UnitEnabled(tp unit.Type) (result bool) { + if err := db.WithContext(func(ctx *db.Context) error { + result = repo.UnitEnabledCtx(ctx, tp) + return nil + }); err != nil { + log.Error("repo.UnitEnabled: %v", err) + } + return +} + +// UnitEnabled if this repository has the given unit enabled +func (repo *Repository) UnitEnabledCtx(ctx context.Context, tp unit.Type) bool { + if err := repo.LoadUnits(ctx); err != nil { log.Warn("Error loading repository (ID: %d) units: %s", repo.ID, err.Error()) } for _, unit := range repo.Units { @@ -533,7 +544,6 @@ func (repo *Repository) DescriptionHTML(ctx context.Context) template.HTML { type CloneLink struct { SSH string HTTPS string - Git string } // ComposeHTTPSCloneURL returns HTTPS clone URL based on given owner and repository name. @@ -752,37 +762,3 @@ func countRepositories(userID int64, private bool) int64 { func CountRepositories(private bool) int64 { return countRepositories(-1, private) } - -// CountUserRepositories returns number of repositories user owns. -// Argument private only takes effect when it is false, -// set it true to count all repositories. -func CountUserRepositories(userID int64, private bool) int64 { - return countRepositories(userID, private) -} - -func getRepositoryCount(e db.Engine, ownerID int64) (int64, error) { - return e.Count(&Repository{OwnerID: ownerID}) -} - -func getPublicRepositoryCount(e db.Engine, u *user_model.User) (int64, error) { - return e.Where("is_private = ?", false).Count(&Repository{OwnerID: u.ID}) -} - -func getPrivateRepositoryCount(e db.Engine, u *user_model.User) (int64, error) { - return e.Where("is_private = ?", true).Count(&Repository{OwnerID: u.ID}) -} - -// GetRepositoryCount returns the total number of repositories of user. -func GetRepositoryCount(ctx context.Context, ownerID int64) (int64, error) { - return getRepositoryCount(db.GetEngine(ctx), ownerID) -} - -// GetPublicRepositoryCount returns the total number of public repositories of user. -func GetPublicRepositoryCount(u *user_model.User) (int64, error) { - return getPublicRepositoryCount(db.GetEngine(db.DefaultContext), u) -} - -// GetPrivateRepositoryCount returns the total number of private repositories of user. -func GetPrivateRepositoryCount(u *user_model.User) (int64, error) { - return getPrivateRepositoryCount(db.GetEngine(db.DefaultContext), u) -} diff --git a/models/repo/repo_unit.go b/models/repo/repo_unit.go index 37f1c70545..0f6b41933d 100644 --- a/models/repo/repo_unit.go +++ b/models/repo/repo_unit.go @@ -173,8 +173,6 @@ func (r *RepoUnit) BeforeSet(colName string, val xorm.Cell) { switch colName { case "type": switch unit.Type(db.Cell2Int64(val)) { - case unit.TypeCode, unit.TypeReleases, unit.TypeWiki, unit.TypeProjects: - r.Config = new(UnitConfig) case unit.TypeExternalWiki: r.Config = new(ExternalWikiConfig) case unit.TypeExternalTracker: @@ -183,8 +181,10 @@ func (r *RepoUnit) BeforeSet(colName string, val xorm.Cell) { r.Config = new(PullRequestsConfig) case unit.TypeIssues: r.Config = new(IssuesConfig) + case unit.TypeCode, unit.TypeReleases, unit.TypeWiki, unit.TypeProjects, unit.TypePackages: + fallthrough default: - panic(fmt.Sprintf("unrecognized repo unit type: %v", *val)) + r.Config = new(UnitConfig) } } } diff --git a/models/repo/user_repo.go b/models/repo/user_repo.go new file mode 100644 index 0000000000..18a04f7267 --- /dev/null +++ b/models/repo/user_repo.go @@ -0,0 +1,87 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package repo + +import ( + "context" + + "code.gitea.io/gitea/models/db" + user_model "code.gitea.io/gitea/models/user" +) + +// GetStarredRepos returns the repos starred by a particular user +func GetStarredRepos(userID int64, private bool, listOptions db.ListOptions) ([]*Repository, error) { + sess := db.GetEngine(db.DefaultContext).Where("star.uid=?", userID). + Join("LEFT", "star", "`repository`.id=`star`.repo_id") + if !private { + sess = sess.And("is_private=?", false) + } + + if listOptions.Page != 0 { + sess = db.SetSessionPagination(sess, &listOptions) + + repos := make([]*Repository, 0, listOptions.PageSize) + return repos, sess.Find(&repos) + } + + repos := make([]*Repository, 0, 10) + return repos, sess.Find(&repos) +} + +// GetWatchedRepos returns the repos watched by a particular user +func GetWatchedRepos(userID int64, private bool, listOptions db.ListOptions) ([]*Repository, int64, error) { + sess := db.GetEngine(db.DefaultContext).Where("watch.user_id=?", userID). + And("`watch`.mode<>?", WatchModeDont). + Join("LEFT", "watch", "`repository`.id=`watch`.repo_id") + if !private { + sess = sess.And("is_private=?", false) + } + + if listOptions.Page != 0 { + sess = db.SetSessionPagination(sess, &listOptions) + + repos := make([]*Repository, 0, listOptions.PageSize) + total, err := sess.FindAndCount(&repos) + return repos, total, err + } + + repos := make([]*Repository, 0, 10) + total, err := sess.FindAndCount(&repos) + return repos, total, err +} + +// CountUserRepositories returns number of repositories user owns. +// Argument private only takes effect when it is false, +// set it true to count all repositories. +func CountUserRepositories(userID int64, private bool) int64 { + return countRepositories(userID, private) +} + +func getRepositoryCount(e db.Engine, ownerID int64) (int64, error) { + return e.Count(&Repository{OwnerID: ownerID}) +} + +func getPublicRepositoryCount(e db.Engine, u *user_model.User) (int64, error) { + return e.Where("is_private = ?", false).Count(&Repository{OwnerID: u.ID}) +} + +func getPrivateRepositoryCount(e db.Engine, u *user_model.User) (int64, error) { + return e.Where("is_private = ?", true).Count(&Repository{OwnerID: u.ID}) +} + +// GetRepositoryCount returns the total number of repositories of user. +func GetRepositoryCount(ctx context.Context, ownerID int64) (int64, error) { + return getRepositoryCount(db.GetEngine(ctx), ownerID) +} + +// GetPublicRepositoryCount returns the total number of public repositories of user. +func GetPublicRepositoryCount(u *user_model.User) (int64, error) { + return getPublicRepositoryCount(db.GetEngine(db.DefaultContext), u) +} + +// GetPrivateRepositoryCount returns the total number of private repositories of user. +func GetPrivateRepositoryCount(u *user_model.User) (int64, error) { + return getPrivateRepositoryCount(db.GetEngine(db.DefaultContext), u) +} diff --git a/models/repo_activity.go b/models/repo_activity.go index 7475be2b11..06710ff1ac 100644 --- a/models/repo_activity.go +++ b/models/repo_activity.go @@ -127,7 +127,7 @@ func GetActivityStatsTopAuthors(ctx context.Context, repo *repo_model.Repository user.Commits += v.Commits } } - v := make([]*ActivityAuthorData, 0) + v := make([]*ActivityAuthorData, 0, len(users)) for _, u := range users { v = append(v, u) } diff --git a/models/repo_collaboration.go b/models/repo_collaboration.go index 3aca1023e6..d94b61b449 100644 --- a/models/repo_collaboration.go +++ b/models/repo_collaboration.go @@ -10,6 +10,7 @@ import ( "fmt" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -259,20 +260,6 @@ func reconsiderWatches(ctx context.Context, repo *repo_model.Repository, uid int return removeIssueWatchersByRepoID(db.GetEngine(ctx), uid, repo.ID) } -func getRepoTeams(e db.Engine, repo *repo_model.Repository) (teams []*Team, err error) { - return teams, e. - Join("INNER", "team_repo", "team_repo.team_id = team.id"). - Where("team.org_id = ?", repo.OwnerID). - And("team_repo.repo_id=?", repo.ID). - OrderBy("CASE WHEN name LIKE '" + ownerTeamName + "' THEN '' ELSE name END"). - Find(&teams) -} - -// GetRepoTeams gets the list of teams that has access to the repository -func GetRepoTeams(repo *repo_model.Repository) ([]*Team, error) { - return getRepoTeams(db.GetEngine(db.DefaultContext), repo) -} - // IsOwnerMemberCollaborator checks if a provided user is the owner, a collaborator or a member of a team in a repository func IsOwnerMemberCollaborator(repo *repo_model.Repository, userID int64) (bool, error) { if repo.OwnerID == userID { @@ -282,7 +269,7 @@ func IsOwnerMemberCollaborator(repo *repo_model.Repository, userID int64) (bool, Join("INNER", "team_unit", "team_unit.team_id = team_user.team_id"). Where("team_repo.repo_id = ?", repo.ID). And("team_unit.`type` = ?", unit.TypeCode). - And("team_user.uid = ?", userID).Table("team_user").Exist(&TeamUser{}) + And("team_user.uid = ?", userID).Table("team_user").Exist(&organization.TeamUser{}) if err != nil { return false, err } diff --git a/models/repo_generate.go b/models/repo_generate.go index fc749f1120..7d6d262aab 100644 --- a/models/repo_generate.go +++ b/models/repo_generate.go @@ -20,15 +20,16 @@ import ( // GenerateRepoOptions contains the template units to generate type GenerateRepoOptions struct { - Name string - Description string - Private bool - GitContent bool - Topics bool - GitHooks bool - Webhooks bool - Avatar bool - IssueLabels bool + Name string + DefaultBranch string + Description string + Private bool + GitContent bool + Topics bool + GitHooks bool + Webhooks bool + Avatar bool + IssueLabels bool } // IsValid checks whether at least one option is chosen for generation @@ -109,7 +110,7 @@ func GenerateIssueLabels(ctx context.Context, templateRepo, generateRepo *repo_m Description: templateLabel.Description, Color: templateLabel.Color, } - if err := newLabel(db.GetEngine(ctx), generateLabel); err != nil { + if err := db.Insert(ctx, generateLabel); err != nil { return err } } diff --git a/models/repo_list.go b/models/repo_list.go index 36f57abcc5..35b2ab5bf8 100644 --- a/models/repo_list.go +++ b/models/repo_list.go @@ -13,6 +13,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" @@ -41,6 +42,15 @@ func (repos RepositoryList) Swap(i, j int) { repos[i], repos[j] = repos[j], repos[i] } +// FIXME: Remove in favor of maps.values when MIN_GO_VERSION >= 1.18 +func valuesRepository(m map[int64]*repo_model.Repository) []*repo_model.Repository { + values := make([]*repo_model.Repository, 0, len(m)) + for _, v := range m { + values = append(values, v) + } + return values +} + // RepositoryListOfMap make list from values of map func RepositoryListOfMap(repoMap map[int64]*repo_model.Repository) RepositoryList { return RepositoryList(valuesRepository(repoMap)) @@ -62,7 +72,7 @@ func (repos RepositoryList) loadAttributes(e db.Engine) error { users := make(map[int64]*user_model.User, len(set)) if err := e. Where("id > 0"). - In("id", keysInt64(set)). + In("id", container.KeysInt64(set)). Find(&users); err != nil { return fmt.Errorf("find users: %v", err) } diff --git a/models/repo_permission.go b/models/repo_permission.go index 4e5cbfd558..8ba6b86145 100644 --- a/models/repo_permission.go +++ b/models/repo_permission.go @@ -9,6 +9,7 @@ import ( "fmt" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" perm_model "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -102,6 +103,39 @@ func (p *Permission) CanWriteIssuesOrPulls(isPull bool) bool { return p.CanWrite(unit.TypeIssues) } +// CanWriteToBranch checks if the branch is writable by the user +func (p *Permission) CanWriteToBranch(user *user_model.User, branch string) bool { + if p.CanWrite(unit.TypeCode) { + return true + } + + if len(p.Units) < 1 { + return false + } + + prs, err := GetUnmergedPullRequestsByHeadInfo(p.Units[0].RepoID, branch) + if err != nil { + return false + } + + for _, pr := range prs { + if pr.AllowMaintainerEdit { + err = pr.LoadBaseRepo() + if err != nil { + continue + } + prPerm, err := GetUserRepoPermission(db.DefaultContext, pr.BaseRepo, user) + if err != nil { + continue + } + if prPerm.CanWrite(unit.TypeCode) { + return true + } + } + } + return false +} + // ColorFormat writes a colored string for these Permissions func (p *Permission) ColorFormat(s fmt.State) { noColor := log.ColorBytes(log.Reset) @@ -144,11 +178,7 @@ func (p *Permission) ColorFormat(s fmt.State) { } // GetUserRepoPermission returns the user permissions to the repository -func GetUserRepoPermission(repo *repo_model.Repository, user *user_model.User) (Permission, error) { - return getUserRepoPermission(db.DefaultContext, repo, user) -} - -func getUserRepoPermission(ctx context.Context, repo *repo_model.Repository, user *user_model.User) (perm Permission, err error) { +func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, user *user_model.User) (perm Permission, err error) { if log.IsTrace() { defer func() { if user == nil { @@ -163,6 +193,7 @@ func getUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use perm) }() } + // anonymous user visit private repo. // TODO: anonymous user visit public unit of private repo??? if user == nil && repo.IsPrivate { @@ -186,7 +217,7 @@ func getUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use // Prevent strangers from checking out public repo of private organization/users // Allow user if they are collaborator of a repo within a private user or a private organization but not a member of the organization itself - if !hasOrgOrUserVisible(e, repo.Owner, user) && !is { + if !organization.HasOrgOrUserVisible(ctx, repo.Owner, user) && !is { perm.AccessMode = perm_model.AccessModeNone return } @@ -232,7 +263,7 @@ func getUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use } // get units mode from teams - teams, err := getUserRepoTeams(e, repo.OwnerID, user.ID, repo.ID) + teams, err := organization.GetUserRepoTeams(ctx, repo.OwnerID, user.ID, repo.ID) if err != nil { return } @@ -249,7 +280,7 @@ func getUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use for _, u := range repo.Units { var found bool for _, team := range teams { - teamMode := team.unitAccessMode(e, u.Type) + teamMode := team.UnitAccessModeCtx(ctx, u.Type) if teamMode > perm_model.AccessModeNone { m := perm.UnitsMode[u.Type] if m < teamMode { @@ -300,10 +331,11 @@ func IsUserRealRepoAdmin(repo *repo_model.Repository, user *user_model.User) (bo // IsUserRepoAdmin return true if user has admin right of a repo func IsUserRepoAdmin(repo *repo_model.Repository, user *user_model.User) (bool, error) { - return isUserRepoAdmin(db.GetEngine(db.DefaultContext), repo, user) + return IsUserRepoAdminCtx(db.DefaultContext, repo, user) } -func isUserRepoAdmin(e db.Engine, repo *repo_model.Repository, user *user_model.User) (bool, error) { +// IsUserRepoAdminCtx return true if user has admin right of a repo +func IsUserRepoAdminCtx(ctx context.Context, repo *repo_model.Repository, user *user_model.User) (bool, error) { if user == nil || repo == nil { return false, nil } @@ -311,6 +343,7 @@ func isUserRepoAdmin(e db.Engine, repo *repo_model.Repository, user *user_model. return true, nil } + e := db.GetEngine(ctx) mode, err := accessLevel(e, user, repo) if err != nil { return false, err @@ -319,7 +352,7 @@ func isUserRepoAdmin(e db.Engine, repo *repo_model.Repository, user *user_model. return true, nil } - teams, err := getUserRepoTeams(e, repo.OwnerID, user.ID, repo.ID) + teams, err := organization.GetUserRepoTeams(ctx, repo.OwnerID, user.ID, repo.ID) if err != nil { return false, err } @@ -345,7 +378,7 @@ func AccessLevelUnit(user *user_model.User, repo *repo_model.Repository, unitTyp } func accessLevelUnit(ctx context.Context, user *user_model.User, repo *repo_model.Repository, unitType unit.Type) (perm_model.AccessMode, error) { - perm, err := getUserRepoPermission(ctx, repo, user) + perm, err := GetUserRepoPermission(ctx, repo, user) if err != nil { return perm_model.AccessModeNone, err } @@ -373,7 +406,7 @@ func canBeAssigned(ctx context.Context, user *user_model.User, repo *repo_model. if user.IsOrganization() { return false, fmt.Errorf("Organization can't be added as assignee [user_id: %d, repo_id: %d]", user.ID, repo.ID) } - perm, err := getUserRepoPermission(ctx, repo, user) + perm, err := GetUserRepoPermission(ctx, repo, user) if err != nil { return false, err } @@ -389,7 +422,7 @@ func hasAccess(ctx context.Context, userID int64, repo *repo_model.Repository) ( return false, err } } - perm, err := getUserRepoPermission(ctx, repo, user) + perm, err := GetUserRepoPermission(ctx, repo, user) if err != nil { return false, err } @@ -412,9 +445,9 @@ func GetRepoWriters(repo *repo_model.Repository) (_ []*user_model.User, err erro } // IsRepoReader returns true if user has explicit read access or higher to the repository. -func IsRepoReader(repo *repo_model.Repository, userID int64) (bool, error) { +func IsRepoReader(ctx context.Context, repo *repo_model.Repository, userID int64) (bool, error) { if repo.OwnerID == userID { return true, nil } - return db.GetEngine(db.DefaultContext).Where("repo_id = ? AND user_id = ? AND mode >= ?", repo.ID, userID, perm_model.AccessModeRead).Get(&Access{}) + return db.GetEngine(ctx).Where("repo_id = ? AND user_id = ? AND mode >= ?", repo.ID, userID, perm_model.AccessModeRead).Get(&Access{}) } diff --git a/models/repo_permission_test.go b/models/repo_permission_test.go index f2664d8101..7e22437f99 100644 --- a/models/repo_permission_test.go +++ b/models/repo_permission_test.go @@ -8,6 +8,7 @@ import ( "testing" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" perm_model "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -26,7 +27,7 @@ func TestRepoPermissionPublicNonOrgRepo(t *testing.T) { // plain user user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - perm, err := GetUserRepoPermission(repo, user) + perm, err := GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -35,7 +36,7 @@ func TestRepoPermissionPublicNonOrgRepo(t *testing.T) { // change to collaborator assert.NoError(t, AddCollaborator(repo, user)) - perm, err = GetUserRepoPermission(repo, user) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -44,7 +45,7 @@ func TestRepoPermissionPublicNonOrgRepo(t *testing.T) { // collaborator collaborator := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, collaborator) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, collaborator) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -53,7 +54,7 @@ func TestRepoPermissionPublicNonOrgRepo(t *testing.T) { // owner owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, owner) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, owner) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -62,7 +63,7 @@ func TestRepoPermissionPublicNonOrgRepo(t *testing.T) { // admin admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, admin) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, admin) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -79,7 +80,7 @@ func TestRepoPermissionPrivateNonOrgRepo(t *testing.T) { // plain user user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4}).(*user_model.User) - perm, err := GetUserRepoPermission(repo, user) + perm, err := GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.False(t, perm.CanRead(unit.Type)) @@ -88,7 +89,7 @@ func TestRepoPermissionPrivateNonOrgRepo(t *testing.T) { // change to collaborator to default write access assert.NoError(t, AddCollaborator(repo, user)) - perm, err = GetUserRepoPermission(repo, user) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -96,7 +97,7 @@ func TestRepoPermissionPrivateNonOrgRepo(t *testing.T) { } assert.NoError(t, ChangeCollaborationAccessMode(repo, user.ID, perm_model.AccessModeRead)) - perm, err = GetUserRepoPermission(repo, user) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -105,7 +106,7 @@ func TestRepoPermissionPrivateNonOrgRepo(t *testing.T) { // owner owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, owner) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, owner) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -114,7 +115,7 @@ func TestRepoPermissionPrivateNonOrgRepo(t *testing.T) { // admin admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, admin) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, admin) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -131,7 +132,7 @@ func TestRepoPermissionPublicOrgRepo(t *testing.T) { // plain user user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) - perm, err := GetUserRepoPermission(repo, user) + perm, err := GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -140,7 +141,7 @@ func TestRepoPermissionPublicOrgRepo(t *testing.T) { // change to collaborator to default write access assert.NoError(t, AddCollaborator(repo, user)) - perm, err = GetUserRepoPermission(repo, user) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -148,7 +149,7 @@ func TestRepoPermissionPublicOrgRepo(t *testing.T) { } assert.NoError(t, ChangeCollaborationAccessMode(repo, user.ID, perm_model.AccessModeRead)) - perm, err = GetUserRepoPermission(repo, user) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -157,7 +158,7 @@ func TestRepoPermissionPublicOrgRepo(t *testing.T) { // org member team owner owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, owner) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, owner) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -166,7 +167,7 @@ func TestRepoPermissionPublicOrgRepo(t *testing.T) { // org member team tester member := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, member) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, member) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -176,7 +177,7 @@ func TestRepoPermissionPublicOrgRepo(t *testing.T) { // admin admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, admin) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, admin) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -193,7 +194,7 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { // plain user user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5}).(*user_model.User) - perm, err := GetUserRepoPermission(repo, user) + perm, err := GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.False(t, perm.CanRead(unit.Type)) @@ -202,7 +203,7 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { // change to collaborator to default write access assert.NoError(t, AddCollaborator(repo, user)) - perm, err = GetUserRepoPermission(repo, user) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -210,7 +211,7 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { } assert.NoError(t, ChangeCollaborationAccessMode(repo, user.ID, perm_model.AccessModeRead)) - perm, err = GetUserRepoPermission(repo, user) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, user) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -219,7 +220,7 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { // org member team owner owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 15}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, owner) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, owner) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -227,10 +228,10 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { } // update team information and then check permission - team := unittest.AssertExistsAndLoadBean(t, &Team{ID: 5}).(*Team) - err = UpdateTeamUnits(team, nil) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: 5}).(*organization.Team) + err = organization.UpdateTeamUnits(team, nil) assert.NoError(t, err) - perm, err = GetUserRepoPermission(repo, owner) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, owner) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) @@ -239,7 +240,7 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { // org member team tester tester := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, tester) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, tester) assert.NoError(t, err) assert.True(t, perm.CanWrite(unit.TypeIssues)) assert.False(t, perm.CanWrite(unit.TypeCode)) @@ -247,7 +248,7 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { // org member team reviewer reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 20}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, reviewer) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, reviewer) assert.NoError(t, err) assert.False(t, perm.CanRead(unit.TypeIssues)) assert.False(t, perm.CanWrite(unit.TypeCode)) @@ -255,7 +256,7 @@ func TestRepoPermissionPrivateOrgRepo(t *testing.T) { // admin admin := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1}).(*user_model.User) - perm, err = GetUserRepoPermission(repo, admin) + perm, err = GetUserRepoPermission(db.DefaultContext, repo, admin) assert.NoError(t, err) for _, unit := range repo.Units { assert.True(t, perm.CanRead(unit.Type)) diff --git a/models/repo_transfer.go b/models/repo_transfer.go index f7d5e20990..f9a758a20b 100644 --- a/models/repo_transfer.go +++ b/models/repo_transfer.go @@ -10,6 +10,7 @@ import ( "os" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" @@ -26,7 +27,7 @@ type RepoTransfer struct { Recipient *user_model.User `xorm:"-"` RepoID int64 TeamIDs []int64 - Teams []*Team `xorm:"-"` + Teams []*organization.Team `xorm:"-"` CreatedUnix timeutil.TimeStamp `xorm:"INDEX NOT NULL created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX NOT NULL updated"` @@ -49,7 +50,7 @@ func (r *RepoTransfer) LoadAttributes() error { if r.Recipient.IsOrganization() && len(r.TeamIDs) != len(r.Teams) { for _, v := range r.TeamIDs { - team, err := GetTeamByID(v) + team, err := organization.GetTeamByID(v) if err != nil { return err } @@ -87,7 +88,7 @@ func (r *RepoTransfer) CanUserAcceptTransfer(u *user_model.User) bool { return r.RecipientID == u.ID } - allowed, err := CanCreateOrgRepo(r.RecipientID, u.ID) + allowed, err := organization.CanCreateOrgRepo(r.RecipientID, u.ID) if err != nil { log.Error("CanCreateOrgRepo: %v", err) return false @@ -152,7 +153,7 @@ func TestRepositoryReadyForTransfer(status repo_model.RepositoryStatus) error { // CreatePendingRepositoryTransfer transfer a repo from one owner to a new one. // it marks the repository transfer as "pending" -func CreatePendingRepositoryTransfer(doer, newOwner *user_model.User, repoID int64, teams []*Team) error { +func CreatePendingRepositoryTransfer(doer, newOwner *user_model.User, repoID int64, teams []*organization.Team) error { ctx, committer, err := db.TxContext() if err != nil { return err @@ -296,7 +297,7 @@ func TransferOwnership(doer *user_model.User, newOwnerName string, repo *repo_mo } if c.ID != newOwner.ID { - isMember, err := isOrganizationMember(sess, newOwner.ID, c.ID) + isMember, err := organization.IsOrganizationMember(ctx, newOwner.ID, c.ID) if err != nil { return fmt.Errorf("IsOrgMember: %v", err) } else if !isMember { @@ -312,19 +313,19 @@ func TransferOwnership(doer *user_model.User, newOwnerName string, repo *repo_mo // Remove old team-repository relations. if oldOwner.IsOrganization() { - if err := OrgFromUser(oldOwner).removeOrgRepo(sess, repo.ID); err != nil { + if err := organization.RemoveOrgRepo(ctx, oldOwner.ID, repo.ID); err != nil { return fmt.Errorf("removeOrgRepo: %v", err) } } if newOwner.IsOrganization() { - teams, err := OrgFromUser(newOwner).loadTeams(sess) + teams, err := organization.FindOrgTeams(ctx, newOwner.ID) if err != nil { return fmt.Errorf("LoadTeams: %v", err) } for _, t := range teams { if t.IncludesAllRepositories { - if err := t.addRepository(ctx, repo); err != nil { + if err := addRepository(ctx, t, repo); err != nil { return fmt.Errorf("addRepository: %v", err) } } diff --git a/models/review.go b/models/review.go index 22c47486a1..a9e29a10e0 100644 --- a/models/review.go +++ b/models/review.go @@ -10,6 +10,7 @@ import ( "strings" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -58,10 +59,10 @@ func (rt ReviewType) Icon() string { type Review struct { ID int64 `xorm:"pk autoincr"` Type ReviewType - Reviewer *user_model.User `xorm:"-"` - ReviewerID int64 `xorm:"index"` - ReviewerTeamID int64 `xorm:"NOT NULL DEFAULT 0"` - ReviewerTeam *Team `xorm:"-"` + Reviewer *user_model.User `xorm:"-"` + ReviewerID int64 `xorm:"index"` + ReviewerTeamID int64 `xorm:"NOT NULL DEFAULT 0"` + ReviewerTeam *organization.Team `xorm:"-"` OriginalAuthor string OriginalAuthorID int64 Issue *Issue `xorm:"-"` @@ -114,12 +115,12 @@ func (r *Review) loadReviewer(e db.Engine) (err error) { return } -func (r *Review) loadReviewerTeam(e db.Engine) (err error) { +func (r *Review) loadReviewerTeam(ctx context.Context) (err error) { if r.ReviewerTeamID == 0 || r.ReviewerTeam != nil { return } - r.ReviewerTeam, err = getTeamByID(e, r.ReviewerTeamID) + r.ReviewerTeam, err = organization.GetTeamByIDCtx(ctx, r.ReviewerTeamID) return } @@ -130,7 +131,7 @@ func (r *Review) LoadReviewer() error { // LoadReviewerTeam loads reviewer team func (r *Review) LoadReviewerTeam() error { - return r.loadReviewerTeam(db.GetEngine(db.DefaultContext)) + return r.loadReviewerTeam(db.DefaultContext) } // LoadAttributes loads all attributes except CodeComments @@ -145,7 +146,7 @@ func (r *Review) LoadAttributes(ctx context.Context) (err error) { if err = r.loadReviewer(e); err != nil { return } - if err = r.loadReviewerTeam(e); err != nil { + if err = r.loadReviewerTeam(ctx); err != nil { return } return @@ -221,7 +222,7 @@ type CreateReviewOptions struct { Type ReviewType Issue *Issue Reviewer *user_model.User - ReviewerTeam *Team + ReviewerTeam *organization.Team Official bool CommitID string Stale bool @@ -237,7 +238,7 @@ func isOfficialReviewer(ctx context.Context, issue *Issue, reviewers ...*user_mo if err != nil { return false, err } - if err = pr.loadProtectedBranch(ctx); err != nil { + if err = pr.LoadProtectedBranchCtx(ctx); err != nil { return false, err } if pr.ProtectedBranch == nil { @@ -255,16 +256,16 @@ func isOfficialReviewer(ctx context.Context, issue *Issue, reviewers ...*user_mo } // IsOfficialReviewerTeam check if reviewer in this team can make official reviews in issue (counts towards required approvals) -func IsOfficialReviewerTeam(issue *Issue, team *Team) (bool, error) { +func IsOfficialReviewerTeam(issue *Issue, team *organization.Team) (bool, error) { return isOfficialReviewerTeam(db.DefaultContext, issue, team) } -func isOfficialReviewerTeam(ctx context.Context, issue *Issue, team *Team) (bool, error) { +func isOfficialReviewerTeam(ctx context.Context, issue *Issue, team *organization.Team) (bool, error) { pr, err := getPullRequestByIssueID(db.GetEngine(ctx), issue.ID) if err != nil { return false, err } - if err = pr.loadProtectedBranch(ctx); err != nil { + if err = pr.LoadProtectedBranchCtx(ctx); err != nil { return false, err } if pr.ProtectedBranch == nil { @@ -272,7 +273,7 @@ func isOfficialReviewerTeam(ctx context.Context, issue *Issue, team *Team) (bool } if !pr.ProtectedBranch.EnableApprovalsWhitelist { - return team.UnitAccessMode(unit.TypeCode) >= perm.AccessModeWrite, nil + return team.UnitAccessModeCtx(ctx, unit.TypeCode) >= perm.AccessModeWrite, nil } return base.Int64sContains(pr.ProtectedBranch.ApprovalsWhitelistTeamIDs, team.ID), nil @@ -426,7 +427,7 @@ func SubmitReview(doer *user_model.User, issue *Issue, reviewType ReviewType, co } } - comm, err := createComment(ctx, &CreateCommentOptions{ + comm, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Type: CommentTypeReview, Doer: doer, Content: review.Content, @@ -447,7 +448,7 @@ func SubmitReview(doer *user_model.User, issue *Issue, reviewType ReviewType, co } for _, teamReviewRequest := range teamReviewRequests { - ok, err := isTeamMember(sess, issue.Repo.OwnerID, teamReviewRequest.ReviewerTeamID, doer.ID) + ok, err := organization.IsTeamMember(ctx, issue.Repo.OwnerID, teamReviewRequest.ReviewerTeamID, doer.ID) if err != nil { return nil, nil, err } else if !ok { @@ -661,7 +662,7 @@ func AddReviewRequest(issue *Issue, reviewer, doer *user_model.User) (*Comment, return nil, err } - comment, err := createComment(ctx, &CreateCommentOptions{ + comment, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Type: CommentTypeReviewRequest, Doer: doer, Repo: issue.Repo, @@ -716,7 +717,7 @@ func RemoveReviewRequest(issue *Issue, reviewer, doer *user_model.User) (*Commen } } - comment, err := createComment(ctx, &CreateCommentOptions{ + comment, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Type: CommentTypeReviewRequest, Doer: doer, Repo: issue.Repo, @@ -732,7 +733,7 @@ func RemoveReviewRequest(issue *Issue, reviewer, doer *user_model.User) (*Commen } // AddTeamReviewRequest add a review request from one team -func AddTeamReviewRequest(issue *Issue, reviewer *Team, doer *user_model.User) (*Comment, error) { +func AddTeamReviewRequest(issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) { ctx, committer, err := db.TxContext() if err != nil { return nil, err @@ -775,7 +776,7 @@ func AddTeamReviewRequest(issue *Issue, reviewer *Team, doer *user_model.User) ( } } - comment, err := createComment(ctx, &CreateCommentOptions{ + comment, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Type: CommentTypeReviewRequest, Doer: doer, Repo: issue.Repo, @@ -785,14 +786,14 @@ func AddTeamReviewRequest(issue *Issue, reviewer *Team, doer *user_model.User) ( ReviewID: review.ID, }) if err != nil { - return nil, fmt.Errorf("createComment(): %v", err) + return nil, fmt.Errorf("CreateCommentCtx(): %v", err) } return comment, committer.Commit() } // RemoveTeamReviewRequest remove a review request from one team -func RemoveTeamReviewRequest(issue *Issue, reviewer *Team, doer *user_model.User) (*Comment, error) { +func RemoveTeamReviewRequest(issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) { ctx, committer, err := db.TxContext() if err != nil { return nil, err @@ -836,7 +837,7 @@ func RemoveTeamReviewRequest(issue *Issue, reviewer *Team, doer *user_model.User return nil, committer.Commit() } - comment, err := createComment(ctx, &CreateCommentOptions{ + comment, err := CreateCommentCtx(ctx, &CreateCommentOptions{ Type: CommentTypeReviewRequest, Doer: doer, Repo: issue.Repo, @@ -845,7 +846,7 @@ func RemoveTeamReviewRequest(issue *Issue, reviewer *Team, doer *user_model.User AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID }) if err != nil { - return nil, fmt.Errorf("createComment(): %v", err) + return nil, fmt.Errorf("CreateCommentCtx(): %v", err) } return comment, committer.Commit() @@ -886,11 +887,11 @@ func CanMarkConversation(issue *Issue, doer *user_model.User) (permResult bool, } if doer.ID != issue.PosterID { - if err = issue.LoadRepo(); err != nil { + if err = issue.LoadRepo(db.DefaultContext); err != nil { return false, err } - p, err := GetUserRepoPermission(issue.Repo, doer) + p, err := GetUserRepoPermission(db.DefaultContext, issue.Repo, doer) if err != nil { return false, err } diff --git a/models/statistic.go b/models/statistic.go index dfe543d063..d858102be8 100644 --- a/models/statistic.go +++ b/models/statistic.go @@ -8,6 +8,9 @@ import ( asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/auth" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" + project_model "code.gitea.io/gitea/models/project" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/models/webhook" @@ -46,8 +49,8 @@ type IssueByRepositoryCount struct { // GetStatistic returns the database statistics func GetStatistic() (stats Statistic) { e := db.GetEngine(db.DefaultContext) - stats.Counter.User = user_model.CountUsers() - stats.Counter.Org = CountOrganizations() + stats.Counter.User = user_model.CountUsers(nil) + stats.Counter.Org = organization.CountOrganizations() stats.Counter.PublicKey, _ = e.Count(new(asymkey_model.PublicKey)) stats.Counter.Repo = repo_model.CountRepositories(true) stats.Counter.Watch, _ = e.Count(new(repo_model.Watch)) @@ -100,12 +103,12 @@ func GetStatistic() (stats Statistic) { stats.Counter.Release, _ = e.Count(new(Release)) stats.Counter.AuthSource = auth.CountSources() stats.Counter.Webhook, _ = e.Count(new(webhook.Webhook)) - stats.Counter.Milestone, _ = e.Count(new(Milestone)) + stats.Counter.Milestone, _ = e.Count(new(issues_model.Milestone)) stats.Counter.Label, _ = e.Count(new(Label)) stats.Counter.HookTask, _ = e.Count(new(webhook.HookTask)) - stats.Counter.Team, _ = e.Count(new(Team)) + stats.Counter.Team, _ = e.Count(new(organization.Team)) stats.Counter.Attachment, _ = e.Count(new(repo_model.Attachment)) - stats.Counter.Project, _ = e.Count(new(Project)) - stats.Counter.ProjectBoard, _ = e.Count(new(ProjectBoard)) + stats.Counter.Project, _ = e.Count(new(project_model.Project)) + stats.Counter.ProjectBoard, _ = e.Count(new(project_model.Board)) return } diff --git a/models/task.go b/models/task.go index 64c858921c..5528573ca5 100644 --- a/models/task.go +++ b/models/task.go @@ -181,6 +181,14 @@ func GetMigratingTask(repoID int64) (*Task, error) { return &task, nil } +// HasFinishedMigratingTask returns if a finished migration task exists for the repo. +func HasFinishedMigratingTask(repoID int64) (bool, error) { + return db.GetEngine(db.DefaultContext). + Where("repo_id=? AND type=? AND status=?", repoID, structs.TaskTypeMigrateRepo, structs.TaskStatusFinished). + Table("task"). + Exist() +} + // GetMigratingTaskByID returns the migrating task by repo's id func GetMigratingTaskByID(id, doerID int64) (*Task, *migration.MigrateOptions, error) { task := Task{ @@ -245,7 +253,7 @@ func FinishMigrateTask(task *Task) error { } conf.AuthPassword = "" conf.AuthToken = "" - conf.CloneAddr = util.NewStringURLSanitizer(conf.CloneAddr, true).Replace(conf.CloneAddr) + conf.CloneAddr = util.SanitizeCredentialURLs(conf.CloneAddr) conf.AuthPasswordEncrypted = "" conf.AuthTokenEncrypted = "" conf.CloneAddrEncrypted = "" diff --git a/models/unit/unit.go b/models/unit/unit.go index a6a47eb1f3..9d3cae5f25 100644 --- a/models/unit/unit.go +++ b/models/unit/unit.go @@ -27,6 +27,7 @@ const ( TypeExternalWiki // 6 ExternalWiki TypeExternalTracker // 7 ExternalTracker TypeProjects // 8 Kanban board + TypePackages // 9 Packages ) // Value returns integer value for unit type @@ -52,6 +53,8 @@ func (u Type) String() string { return "TypeExternalTracker" case TypeProjects: return "TypeProjects" + case TypePackages: + return "TypePackages" } return fmt.Sprintf("Unknown Type %d", u) } @@ -74,6 +77,7 @@ var ( TypeExternalWiki, TypeExternalTracker, TypeProjects, + TypePackages, } // DefaultRepoUnits contains the default unit types @@ -84,6 +88,7 @@ var ( TypeReleases, TypeWiki, TypeProjects, + TypePackages, } // NotAllowedDefaultRepoUnits contains units that can't be default @@ -275,6 +280,15 @@ var ( perm.AccessModeOwner, } + UnitPackages = Unit{ + TypePackages, + "repo.packages", + "/packages", + "repo.packages.desc", + 6, + perm.AccessModeRead, + } + // Units contains all the units Units = map[Type]Unit{ TypeCode: UnitCode, @@ -285,6 +299,7 @@ var ( TypeWiki: UnitWiki, TypeExternalWiki: UnitExternalWiki, TypeProjects: UnitProjects, + TypePackages: UnitPackages, } ) diff --git a/models/unittest/consistency.go b/models/unittest/consistency.go index 2645084d3e..af05348868 100644 --- a/models/unittest/consistency.go +++ b/models/unittest/consistency.go @@ -175,8 +175,10 @@ func init() { checkForActionConsistency := func(t assert.TestingT, bean interface{}) { action := reflectionWrap(bean) - repoRow := AssertExistsAndLoadMap(t, "repository", builder.Eq{"id": action.int("RepoID")}) - assert.Equal(t, parseBool(repoRow["is_private"]), action.bool("IsPrivate"), "action: %+v", action) + if action.int("RepoID") != 1700 { // dangling intentional + repoRow := AssertExistsAndLoadMap(t, "repository", builder.Eq{"id": action.int("RepoID")}) + assert.Equal(t, parseBool(repoRow["is_private"]), action.bool("IsPrivate"), "action: %+v", action) + } } consistencyCheckMap["user"] = checkForUserConsistency diff --git a/models/unittest/fscopy.go b/models/unittest/fscopy.go new file mode 100644 index 0000000000..ff815e729d --- /dev/null +++ b/models/unittest/fscopy.go @@ -0,0 +1,103 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package unittest + +import ( + "errors" + "io" + "os" + "path" + "strings" + + "code.gitea.io/gitea/modules/util" +) + +// Copy copies file from source to target path. +func Copy(src, dest string) error { + // Gather file information to set back later. + si, err := os.Lstat(src) + if err != nil { + return err + } + + // Handle symbolic link. + if si.Mode()&os.ModeSymlink != 0 { + target, err := os.Readlink(src) + if err != nil { + return err + } + // NOTE: os.Chmod and os.Chtimes don't recognize symbolic link, + // which will lead "no such file or directory" error. + return os.Symlink(target, dest) + } + + sr, err := os.Open(src) + if err != nil { + return err + } + defer sr.Close() + + dw, err := os.Create(dest) + if err != nil { + return err + } + defer dw.Close() + + if _, err = io.Copy(dw, sr); err != nil { + return err + } + + // Set back file information. + if err = os.Chtimes(dest, si.ModTime(), si.ModTime()); err != nil { + return err + } + return os.Chmod(dest, si.Mode()) +} + +// CopyDir copy files recursively from source to target directory. +// +// The filter accepts a function that process the path info. +// and should return true for need to filter. +// +// It returns error when error occurs in underlying functions. +func CopyDir(srcPath, destPath string, filters ...func(filePath string) bool) error { + // Check if target directory exists. + if _, err := os.Stat(destPath); !errors.Is(err, os.ErrNotExist) { + return errors.New("file or directory already exists: " + destPath) + } + + err := os.MkdirAll(destPath, os.ModePerm) + if err != nil { + return err + } + + // Gather directory info. + infos, err := util.StatDir(srcPath, true) + if err != nil { + return err + } + + var filter func(filePath string) bool + if len(filters) > 0 { + filter = filters[0] + } + + for _, info := range infos { + if filter != nil && filter(info) { + continue + } + + curPath := path.Join(destPath, info) + if strings.HasSuffix(info, "/") { + err = os.MkdirAll(curPath, os.ModePerm) + } else { + err = Copy(path.Join(srcPath, info), curPath) + } + if err != nil { + return err + } + } + return nil +} diff --git a/models/unittest/testdb.go b/models/unittest/testdb.go index 80dcb428df..117614a7a4 100644 --- a/models/unittest/testdb.go +++ b/models/unittest/testdb.go @@ -39,19 +39,27 @@ func fatalTestError(fmtStr string, args ...interface{}) { os.Exit(1) } +// TestOptions represents test options +type TestOptions struct { + GiteaRootPath string + FixtureFiles []string + SetUp func() error // SetUp will be executed before all tests in this package + TearDown func() error // TearDown will be executed after all tests in this package +} + // MainTest a reusable TestMain(..) function for unit tests that need to use a // test database. Creates the test database, and sets necessary settings. -func MainTest(m *testing.M, pathToGiteaRoot string, fixtureFiles ...string) { +func MainTest(m *testing.M, testOpts *TestOptions) { var err error - giteaRoot = pathToGiteaRoot - fixturesDir = filepath.Join(pathToGiteaRoot, "models", "fixtures") + giteaRoot = testOpts.GiteaRootPath + fixturesDir = filepath.Join(testOpts.GiteaRootPath, "models", "fixtures") var opts FixturesOptions - if len(fixtureFiles) == 0 { + if len(testOpts.FixtureFiles) == 0 { opts.Dir = fixturesDir } else { - for _, f := range fixtureFiles { + for _, f := range testOpts.FixtureFiles { if len(f) != 0 { opts.Files = append(opts.Files, filepath.Join(fixturesDir, f)) } @@ -69,6 +77,7 @@ func MainTest(m *testing.M, pathToGiteaRoot string, fixtureFiles ...string) { setting.SSH.Port = 3000 setting.SSH.Domain = "try.gitea.io" setting.Database.UseSQLite3 = true + setting.Repository.DefaultBranch = "master" // many test code still assume that default branch is called "master" repoRootPath, err := os.MkdirTemp(os.TempDir(), "repos") if err != nil { fatalTestError("TempDir: %v\n", err) @@ -79,8 +88,8 @@ func MainTest(m *testing.M, pathToGiteaRoot string, fixtureFiles ...string) { fatalTestError("TempDir: %v\n", err) } setting.AppDataPath = appDataPath - setting.AppWorkPath = pathToGiteaRoot - setting.StaticRootPath = pathToGiteaRoot + setting.AppWorkPath = testOpts.GiteaRootPath + setting.StaticRootPath = testOpts.GiteaRootPath setting.GravatarSourceURL, err = url.Parse("https://secure.gravatar.com/avatar/") if err != nil { fatalTestError("url.Parse: %v\n", err) @@ -95,6 +104,8 @@ func MainTest(m *testing.M, pathToGiteaRoot string, fixtureFiles ...string) { setting.RepoArchive.Storage.Path = filepath.Join(setting.AppDataPath, "repo-archive") + setting.Packages.Storage.Path = filepath.Join(setting.AppDataPath, "packages") + if err = storage.Init(); err != nil { fatalTestError("storage.Init: %v\n", err) } @@ -102,7 +113,7 @@ func MainTest(m *testing.M, pathToGiteaRoot string, fixtureFiles ...string) { if err = util.RemoveAll(repoRootPath); err != nil { fatalTestError("util.RemoveAll: %v\n", err) } - if err = util.CopyDir(filepath.Join(pathToGiteaRoot, "integrations", "gitea-repositories-meta"), setting.RepoRootPath); err != nil { + if err = CopyDir(filepath.Join(testOpts.GiteaRootPath, "integrations", "gitea-repositories-meta"), setting.RepoRootPath); err != nil { fatalTestError("util.CopyDir: %v\n", err) } @@ -126,7 +137,20 @@ func MainTest(m *testing.M, pathToGiteaRoot string, fixtureFiles ...string) { } } + if testOpts.SetUp != nil { + if err := testOpts.SetUp(); err != nil { + fatalTestError("set up failed: %v\n", err) + } + } + exitStatus := m.Run() + + if testOpts.TearDown != nil { + if err := testOpts.TearDown(); err != nil { + fatalTestError("tear down failed: %v\n", err) + } + } + if err = util.RemoveAll(repoRootPath); err != nil { fatalTestError("util.RemoveAll: %v\n", err) } @@ -173,7 +197,7 @@ func PrepareTestEnv(t testing.TB) { assert.NoError(t, PrepareTestDatabase()) assert.NoError(t, util.RemoveAll(setting.RepoRootPath)) metaPath := filepath.Join(giteaRoot, "integrations", "gitea-repositories-meta") - assert.NoError(t, util.CopyDir(metaPath, setting.RepoRootPath)) + assert.NoError(t, CopyDir(metaPath, setting.RepoRootPath)) ownerDirs, err := os.ReadDir(setting.RepoRootPath) assert.NoError(t, err) diff --git a/models/user.go b/models/user.go index 443e0c0c8b..11234a881d 100644 --- a/models/user.go +++ b/models/user.go @@ -14,21 +14,15 @@ import ( asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" + pull_model "code.gitea.io/gitea/models/pull" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" - "code.gitea.io/gitea/modules/structs" - - "xorm.io/builder" + "code.gitea.io/gitea/modules/util" ) -// GetOrganizationCount returns count of membership of organization of the user. -func GetOrganizationCount(ctx context.Context, u *user_model.User) (int64, error) { - return db.GetEngine(ctx). - Where("uid=?", u.ID). - Count(new(OrgUser)) -} - // DeleteUser deletes models associated to an user. func DeleteUser(ctx context.Context, u *user_model.User) (err error) { e := db.GetEngine(ctx) @@ -84,11 +78,13 @@ func DeleteUser(ctx context.Context, u *user_model.User) (err error) { &IssueUser{UID: u.ID}, &user_model.EmailAddress{UID: u.ID}, &user_model.UserOpenID{UID: u.ID}, - &Reaction{UserID: u.ID}, - &TeamUser{UID: u.ID}, + &issues.Reaction{UserID: u.ID}, + &organization.TeamUser{UID: u.ID}, &Collaboration{UserID: u.ID}, &Stopwatch{UserID: u.ID}, &user_model.Setting{UserID: u.ID}, + &pull_model.AutoMerge{DoerID: u.ID}, + &pull_model.ReviewState{UserID: u.ID}, ); err != nil { return fmt.Errorf("deleteBeans: %v", err) } @@ -108,18 +104,62 @@ func DeleteUser(ctx context.Context, u *user_model.User) (err error) { } for _, comment := range comments { - if err = deleteComment(e, comment); err != nil { + if err = deleteComment(ctx, comment); err != nil { return err } } } // Delete Reactions - if err = deleteReaction(e, &ReactionOptions{Doer: u}); err != nil { + if err = issues.DeleteReaction(ctx, &issues.ReactionOptions{DoerID: u.ID}); err != nil { return err } } + // ***** START: Branch Protections ***** + { + const batchSize = 50 + for start := 0; ; start += batchSize { + protections := make([]*ProtectedBranch, 0, batchSize) + // @perf: We can't filter on DB side by u.ID, as those IDs are serialized as JSON strings. + // We could filter down with `WHERE repo_id IN (reposWithPushPermission(u))`, + // though that query will be quite complex and tricky to maintain (compare `getRepoAssignees()`). + // Also, as we didn't update branch protections when removing entries from `access` table, + // it's safer to iterate all protected branches. + if err = e.Limit(batchSize, start).Find(&protections); err != nil { + return fmt.Errorf("findProtectedBranches: %v", err) + } + if len(protections) == 0 { + break + } + for _, p := range protections { + var matched1, matched2, matched3 bool + if len(p.WhitelistUserIDs) != 0 { + p.WhitelistUserIDs, matched1 = util.RemoveIDFromList( + p.WhitelistUserIDs, u.ID) + } + if len(p.ApprovalsWhitelistUserIDs) != 0 { + p.ApprovalsWhitelistUserIDs, matched2 = util.RemoveIDFromList( + p.ApprovalsWhitelistUserIDs, u.ID) + } + if len(p.MergeWhitelistUserIDs) != 0 { + p.MergeWhitelistUserIDs, matched3 = util.RemoveIDFromList( + p.MergeWhitelistUserIDs, u.ID) + } + if matched1 || matched2 || matched3 { + if _, err = e.ID(p.ID).Cols( + "whitelist_user_i_ds", + "merge_whitelist_user_i_ds", + "approvals_whitelist_user_i_ds", + ).Update(p); err != nil { + return fmt.Errorf("updateProtectedBranches: %v", err) + } + } + } + } + } + // ***** END: Branch Protections ***** + // ***** START: PublicKey ***** if _, err = e.Delete(&asymkey_model.PublicKey{OwnerID: u.ID}); err != nil { return fmt.Errorf("deletePublicKeys: %v", err) @@ -159,100 +199,3 @@ func DeleteUser(ctx context.Context, u *user_model.User) (err error) { return nil } - -// GetStarredRepos returns the repos starred by a particular user -func GetStarredRepos(userID int64, private bool, listOptions db.ListOptions) ([]*repo_model.Repository, error) { - sess := db.GetEngine(db.DefaultContext).Where("star.uid=?", userID). - Join("LEFT", "star", "`repository`.id=`star`.repo_id") - if !private { - sess = sess.And("is_private=?", false) - } - - if listOptions.Page != 0 { - sess = db.SetSessionPagination(sess, &listOptions) - - repos := make([]*repo_model.Repository, 0, listOptions.PageSize) - return repos, sess.Find(&repos) - } - - repos := make([]*repo_model.Repository, 0, 10) - return repos, sess.Find(&repos) -} - -// GetWatchedRepos returns the repos watched by a particular user -func GetWatchedRepos(userID int64, private bool, listOptions db.ListOptions) ([]*repo_model.Repository, int64, error) { - sess := db.GetEngine(db.DefaultContext).Where("watch.user_id=?", userID). - And("`watch`.mode<>?", repo_model.WatchModeDont). - Join("LEFT", "watch", "`repository`.id=`watch`.repo_id") - if !private { - sess = sess.And("is_private=?", false) - } - - if listOptions.Page != 0 { - sess = db.SetSessionPagination(sess, &listOptions) - - repos := make([]*repo_model.Repository, 0, listOptions.PageSize) - total, err := sess.FindAndCount(&repos) - return repos, total, err - } - - repos := make([]*repo_model.Repository, 0, 10) - total, err := sess.FindAndCount(&repos) - return repos, total, err -} - -// IsUserVisibleToViewer check if viewer is able to see user profile -func IsUserVisibleToViewer(u, viewer *user_model.User) bool { - return isUserVisibleToViewer(db.GetEngine(db.DefaultContext), u, viewer) -} - -func isUserVisibleToViewer(e db.Engine, u, viewer *user_model.User) bool { - if viewer != nil && viewer.IsAdmin { - return true - } - - switch u.Visibility { - case structs.VisibleTypePublic: - return true - case structs.VisibleTypeLimited: - if viewer == nil || viewer.IsRestricted { - return false - } - return true - case structs.VisibleTypePrivate: - if viewer == nil || viewer.IsRestricted { - return false - } - - // If they follow - they see each over - follower := user_model.IsFollowing(u.ID, viewer.ID) - if follower { - return true - } - - // Now we need to check if they in some organization together - count, err := e.Table("team_user"). - Where( - builder.And( - builder.Eq{"uid": viewer.ID}, - builder.Or( - builder.Eq{"org_id": u.ID}, - builder.In("org_id", - builder.Select("org_id"). - From("team_user", "t2"). - Where(builder.Eq{"uid": u.ID}))))). - Count(new(TeamUser)) - if err != nil { - return false - } - - if count < 0 { - // No common organization - return false - } - - // they are in an organization together - return true - } - return false -} diff --git a/models/user/email_address.go b/models/user/email_address.go index 726af7b3b4..564d018dac 100644 --- a/models/user/email_address.go +++ b/models/user/email_address.go @@ -10,6 +10,7 @@ import ( "errors" "fmt" "net/mail" + "regexp" "strings" "code.gitea.io/gitea/models/db" @@ -22,7 +23,22 @@ import ( ) // ErrEmailNotActivated e-mail address has not been activated error -var ErrEmailNotActivated = errors.New("E-mail address has not been activated") +var ErrEmailNotActivated = errors.New("e-mail address has not been activated") + +// ErrEmailCharIsNotSupported e-mail address contains unsupported character +type ErrEmailCharIsNotSupported struct { + Email string +} + +// IsErrEmailCharIsNotSupported checks if an error is an ErrEmailCharIsNotSupported +func IsErrEmailCharIsNotSupported(err error) bool { + _, ok := err.(ErrEmailCharIsNotSupported) + return ok +} + +func (err ErrEmailCharIsNotSupported) Error() string { + return fmt.Sprintf("e-mail address contains unsupported character [email: %s]", err.Email) +} // ErrEmailInvalid represents an error where the email address does not comply with RFC 5322 type ErrEmailInvalid struct { @@ -106,12 +122,24 @@ func (email *EmailAddress) BeforeInsert() { } } +var emailRegexp = regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$") + // ValidateEmail check if email is a allowed address func ValidateEmail(email string) error { if len(email) == 0 { return nil } + if !emailRegexp.MatchString(email) { + return ErrEmailCharIsNotSupported{email} + } + + if !(email[0] >= 'a' && email[0] <= 'z') && + !(email[0] >= 'A' && email[0] <= 'Z') && + !(email[0] >= '0' && email[0] <= '9') { + return ErrEmailInvalid{email} + } + if _, err := mail.ParseAddress(email); err != nil { return ErrEmailInvalid{email} } diff --git a/models/user/email_address_test.go b/models/user/email_address_test.go index 4a539e150a..7eeb469b26 100644 --- a/models/user/email_address_test.go +++ b/models/user/email_address_test.go @@ -252,3 +252,58 @@ func TestListEmails(t *testing.T) { assert.Len(t, emails, 5) assert.Greater(t, count, int64(len(emails))) } + +func TestEmailAddressValidate(t *testing.T) { + kases := map[string]error{ + "abc@gmail.com": nil, + "132@hotmail.com": nil, + "1-3-2@test.org": nil, + "1.3.2@test.org": nil, + "a_123@test.org.cn": nil, + `first.last@iana.org`: nil, + `first!last@iana.org`: nil, + `first#last@iana.org`: nil, + `first$last@iana.org`: nil, + `first%last@iana.org`: nil, + `first&last@iana.org`: nil, + `first'last@iana.org`: nil, + `first*last@iana.org`: nil, + `first+last@iana.org`: nil, + `first/last@iana.org`: nil, + `first=last@iana.org`: nil, + `first?last@iana.org`: nil, + `first^last@iana.org`: nil, + "first`last@iana.org": nil, + `first{last@iana.org`: nil, + `first|last@iana.org`: nil, + `first}last@iana.org`: nil, + `first~last@iana.org`: nil, + `first;last@iana.org`: ErrEmailCharIsNotSupported{`first;last@iana.org`}, + ".233@qq.com": ErrEmailInvalid{".233@qq.com"}, + "!233@qq.com": ErrEmailInvalid{"!233@qq.com"}, + "#233@qq.com": ErrEmailInvalid{"#233@qq.com"}, + "$233@qq.com": ErrEmailInvalid{"$233@qq.com"}, + "%233@qq.com": ErrEmailInvalid{"%233@qq.com"}, + "&233@qq.com": ErrEmailInvalid{"&233@qq.com"}, + "'233@qq.com": ErrEmailInvalid{"'233@qq.com"}, + "*233@qq.com": ErrEmailInvalid{"*233@qq.com"}, + "+233@qq.com": ErrEmailInvalid{"+233@qq.com"}, + "/233@qq.com": ErrEmailInvalid{"/233@qq.com"}, + "=233@qq.com": ErrEmailInvalid{"=233@qq.com"}, + "?233@qq.com": ErrEmailInvalid{"?233@qq.com"}, + "^233@qq.com": ErrEmailInvalid{"^233@qq.com"}, + "`233@qq.com": ErrEmailInvalid{"`233@qq.com"}, + "{233@qq.com": ErrEmailInvalid{"{233@qq.com"}, + "|233@qq.com": ErrEmailInvalid{"|233@qq.com"}, + "}233@qq.com": ErrEmailInvalid{"}233@qq.com"}, + "~233@qq.com": ErrEmailInvalid{"~233@qq.com"}, + ";233@qq.com": ErrEmailCharIsNotSupported{";233@qq.com"}, + "Foo ": ErrEmailCharIsNotSupported{"Foo "}, + string([]byte{0xE2, 0x84, 0xAA}): ErrEmailCharIsNotSupported{string([]byte{0xE2, 0x84, 0xAA})}, + } + for kase, err := range kases { + t.Run(kase, func(t *testing.T) { + assert.EqualValues(t, err, ValidateEmail(kase)) + }) + } +} diff --git a/models/user/list.go b/models/user/list.go index 06ec511375..5cdc92ba4a 100644 --- a/models/user/list.go +++ b/models/user/list.go @@ -17,7 +17,7 @@ type UserList []*User //revive:disable-line:exported // GetUserIDs returns a slice of user's id func (users UserList) GetUserIDs() []int64 { - userIDs := make([]int64, len(users)) + userIDs := make([]int64, 0, len(users)) for _, user := range users { userIDs = append(userIDs, user.ID) // Considering that user id are unique in the list } diff --git a/models/user/main_test.go b/models/user/main_test.go index 55e5f9341c..09af550a95 100644 --- a/models/user/main_test.go +++ b/models/user/main_test.go @@ -12,13 +12,16 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), - "email_address.yml", - "user_redirect.yml", - "follow.yml", - "user_open_id.yml", - "two_factor.yml", - "oauth2_application.yml", - "user.yml", - ) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "email_address.yml", + "user_redirect.yml", + "follow.yml", + "user_open_id.yml", + "two_factor.yml", + "oauth2_application.yml", + "user.yml", + }, + }) } diff --git a/models/user/user.go b/models/user/user.go index 3eabf4808c..6aa63a0a56 100644 --- a/models/user/user.go +++ b/models/user/user.go @@ -576,6 +576,7 @@ var ( "api", "assets", "attachments", + "avatar", "avatars", "captcha", "commits", @@ -584,10 +585,7 @@ var ( "explore", "favicon.ico", "ghost", - "help", - "install", "issues", - "less", "login", "manifest.json", "metrics", @@ -595,16 +593,17 @@ var ( "new", "notifications", "org", - "plugins", "pulls", "raw", "repo", + "repo-avatars", "robots.txt", "search", "serviceworker.js", - "stars", - "template", + "ssh_info", + "swagger.v1.json", "user", + "v2", } reservedUserPatterns = []string{"*.keys", "*.gpg", "*.rss", "*.atom"} @@ -622,7 +621,14 @@ func IsUsableUsername(name string) error { // CreateUserOverwriteOptions are an optional options who overwrite system defaults on user creation type CreateUserOverwriteOptions struct { - Visibility structs.VisibleType + KeepEmailPrivate util.OptionalBool + Visibility *structs.VisibleType + AllowCreateOrganization util.OptionalBool + EmailNotificationsPreference *string + MaxRepoCreation *int + Theme *string + IsRestricted util.OptionalBool + IsActive util.OptionalBool } // CreateUser creates record of a new user. @@ -638,10 +644,45 @@ func CreateUser(u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err e u.EmailNotificationsPreference = setting.Admin.DefaultEmailNotification u.MaxRepoCreation = -1 u.Theme = setting.UI.DefaultTheme + u.IsRestricted = setting.Service.DefaultUserIsRestricted + u.IsActive = !(setting.Service.RegisterEmailConfirm || setting.Service.RegisterManualConfirm) // overwrite defaults if set if len(overwriteDefault) != 0 && overwriteDefault[0] != nil { - u.Visibility = overwriteDefault[0].Visibility + overwrite := overwriteDefault[0] + if !overwrite.KeepEmailPrivate.IsNone() { + u.KeepEmailPrivate = overwrite.KeepEmailPrivate.IsTrue() + } + if overwrite.Visibility != nil { + u.Visibility = *overwrite.Visibility + } + if !overwrite.AllowCreateOrganization.IsNone() { + u.AllowCreateOrganization = overwrite.AllowCreateOrganization.IsTrue() + } + if overwrite.EmailNotificationsPreference != nil { + u.EmailNotificationsPreference = *overwrite.EmailNotificationsPreference + } + if overwrite.MaxRepoCreation != nil { + u.MaxRepoCreation = *overwrite.MaxRepoCreation + } + if overwrite.Theme != nil { + u.Theme = *overwrite.Theme + } + if !overwrite.IsRestricted.IsNone() { + u.IsRestricted = overwrite.IsRestricted.IsTrue() + } + if !overwrite.IsActive.IsNone() { + u.IsActive = overwrite.IsActive.IsTrue() + } + } + + // validate data + if err := validateUser(u); err != nil { + return err + } + + if err := ValidateEmail(u.Email); err != nil { + return err } ctx, committer, err := db.TxContext() @@ -652,11 +693,6 @@ func CreateUser(u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err e sess := db.GetEngine(ctx) - // validate data - if err := validateUser(u); err != nil { - return err - } - isExist, err := isUserExist(sess, 0, u.Name) if err != nil { return err @@ -708,16 +744,25 @@ func CreateUser(u *User, overwriteDefault ...*CreateUserOverwriteOptions) (err e return committer.Commit() } -func countUsers(e db.Engine) int64 { - count, _ := e. - Where("type=0"). - Count(new(User)) - return count +// CountUserFilter represent optional filters for CountUsers +type CountUserFilter struct { + LastLoginSince *int64 } // CountUsers returns number of users. -func CountUsers() int64 { - return countUsers(db.GetEngine(db.DefaultContext)) +func CountUsers(opts *CountUserFilter) int64 { + return countUsers(db.DefaultContext, opts) +} + +func countUsers(ctx context.Context, opts *CountUserFilter) int64 { + sess := db.GetEngine(ctx).Where(builder.Eq{"type": "0"}) + + if opts != nil && opts.LastLoginSince != nil { + sess = sess.Where(builder.Gte{"last_login_unix": *opts.LastLoginSince}) + } + + count, _ := sess.Count(new(User)) + return count } // GetVerifyUser get user by verify code @@ -1207,3 +1252,59 @@ func GetAdminUser() (*User, error) { return &admin, nil } + +// IsUserVisibleToViewer check if viewer is able to see user profile +func IsUserVisibleToViewer(u, viewer *User) bool { + return isUserVisibleToViewer(db.GetEngine(db.DefaultContext), u, viewer) +} + +func isUserVisibleToViewer(e db.Engine, u, viewer *User) bool { + if viewer != nil && viewer.IsAdmin { + return true + } + + switch u.Visibility { + case structs.VisibleTypePublic: + return true + case structs.VisibleTypeLimited: + if viewer == nil || viewer.IsRestricted { + return false + } + return true + case structs.VisibleTypePrivate: + if viewer == nil || viewer.IsRestricted { + return false + } + + // If they follow - they see each over + follower := IsFollowing(u.ID, viewer.ID) + if follower { + return true + } + + // Now we need to check if they in some organization together + count, err := e.Table("team_user"). + Where( + builder.And( + builder.Eq{"uid": viewer.ID}, + builder.Or( + builder.Eq{"org_id": u.ID}, + builder.In("org_id", + builder.Select("org_id"). + From("team_user", "t2"). + Where(builder.Eq{"uid": u.ID}))))). + Count() + if err != nil { + return false + } + + if count < 0 { + // No common organization + return false + } + + // they are in an organization together + return true + } + return false +} diff --git a/models/user/user_test.go b/models/user/user_test.go index a5f47172ee..335537aa13 100644 --- a/models/user/user_test.go +++ b/models/user/user_test.go @@ -232,7 +232,7 @@ func TestCreateUserInvalidEmail(t *testing.T) { err := CreateUser(user) assert.Error(t, err) - assert.True(t, IsErrEmailInvalid(err)) + assert.True(t, IsErrEmailCharIsNotSupported(err)) } func TestCreateUserEmailAlreadyUsed(t *testing.T) { diff --git a/models/user_heatmap.go b/models/user_heatmap.go index f331a0c16b..e908837ae8 100644 --- a/models/user_heatmap.go +++ b/models/user_heatmap.go @@ -6,6 +6,7 @@ package models import ( "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -23,11 +24,11 @@ func GetUserHeatmapDataByUser(user, doer *user_model.User) ([]*UserHeatmapData, } // GetUserHeatmapDataByUserTeam returns an array of UserHeatmapData -func GetUserHeatmapDataByUserTeam(user *user_model.User, team *Team, doer *user_model.User) ([]*UserHeatmapData, error) { +func GetUserHeatmapDataByUserTeam(user *user_model.User, team *organization.Team, doer *user_model.User) ([]*UserHeatmapData, error) { return getUserHeatmapData(user, team, doer) } -func getUserHeatmapData(user *user_model.User, team *Team, doer *user_model.User) ([]*UserHeatmapData, error) { +func getUserHeatmapData(user *user_model.User, team *organization.Team, doer *user_model.User) ([]*UserHeatmapData, error) { hdata := make([]*UserHeatmapData, 0) if !activityReadable(user, doer) { diff --git a/models/user_heatmap_test.go b/models/user_heatmap_test.go index 7d2997648d..9361cb3452 100644 --- a/models/user_heatmap_test.go +++ b/models/user_heatmap_test.go @@ -9,6 +9,7 @@ import ( "testing" "time" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/json" @@ -19,25 +20,40 @@ import ( func TestGetUserHeatmapDataByUser(t *testing.T) { testCases := []struct { + desc string userID int64 doerID int64 CountResult int JSONResult string }{ - // self looks at action in private repo - {2, 2, 1, `[{"timestamp":1603227600,"contributions":1}]`}, - // admin looks at action in private repo - {2, 1, 1, `[{"timestamp":1603227600,"contributions":1}]`}, - // other user looks at action in private repo - {2, 3, 0, `[]`}, - // nobody looks at action in private repo - {2, 0, 0, `[]`}, - // collaborator looks at action in private repo - {16, 15, 1, `[{"timestamp":1603267200,"contributions":1}]`}, - // no action action not performed by target user - {3, 3, 0, `[]`}, - // multiple actions performed with two grouped together - {10, 10, 3, `[{"timestamp":1603009800,"contributions":1},{"timestamp":1603010700,"contributions":2}]`}, + { + "self looks at action in private repo", + 2, 2, 1, `[{"timestamp":1603227600,"contributions":1}]`, + }, + { + "admin looks at action in private repo", + 2, 1, 1, `[{"timestamp":1603227600,"contributions":1}]`, + }, + { + "other user looks at action in private repo", + 2, 3, 0, `[]`, + }, + { + "nobody looks at action in private repo", + 2, 0, 0, `[]`, + }, + { + "collaborator looks at action in private repo", + 16, 15, 1, `[{"timestamp":1603267200,"contributions":1}]`, + }, + { + "no action action not performed by target user", + 3, 3, 0, `[]`, + }, + { + "multiple actions performed with two grouped together", + 10, 10, 3, `[{"timestamp":1603009800,"contributions":1},{"timestamp":1603010700,"contributions":2}]`, + }, } // Prepare assert.NoError(t, unittest.PrepareTestDatabase()) @@ -46,7 +62,7 @@ func TestGetUserHeatmapDataByUser(t *testing.T) { timeutil.Set(time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)) defer timeutil.Unset() - for i, tc := range testCases { + for _, tc := range testCases { user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: tc.userID}).(*user_model.User) doer := &user_model.User{ID: tc.doerID} @@ -57,7 +73,7 @@ func TestGetUserHeatmapDataByUser(t *testing.T) { } // get the action for comparison - actions, err := GetFeeds(GetFeedsOptions{ + actions, err := GetFeeds(db.DefaultContext, GetFeedsOptions{ RequestedUser: user, Actor: doer, IncludePrivate: true, @@ -74,7 +90,7 @@ func TestGetUserHeatmapDataByUser(t *testing.T) { } assert.NoError(t, err) assert.Len(t, actions, contributions, "invalid action count: did the test data became too old?") - assert.Equal(t, tc.CountResult, contributions, fmt.Sprintf("testcase %d", i)) + assert.Equal(t, tc.CountResult, contributions, fmt.Sprintf("testcase '%s'", tc.desc)) // Test JSON rendering jsonData, err := json.Marshal(heatmap) diff --git a/models/user_test.go b/models/user_test.go index 83201ff4cb..bf6f71895e 100644 --- a/models/user_test.go +++ b/models/user_test.go @@ -5,7 +5,6 @@ package models import ( - "fmt" "testing" "code.gitea.io/gitea/models/unittest" @@ -42,59 +41,3 @@ func TestUnfollowUser(t *testing.T) { unittest.CheckConsistencyFor(t, &user_model.User{}) } - -func TestUserIsPublicMember(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - tt := []struct { - uid int64 - orgid int64 - expected bool - }{ - {2, 3, true}, - {4, 3, false}, - {5, 6, true}, - {5, 7, false}, - } - for _, v := range tt { - t.Run(fmt.Sprintf("UserId%dIsPublicMemberOf%d", v.uid, v.orgid), func(t *testing.T) { - testUserIsPublicMember(t, v.uid, v.orgid, v.expected) - }) - } -} - -func testUserIsPublicMember(t *testing.T, uid, orgID int64, expected bool) { - user, err := user_model.GetUserByID(uid) - assert.NoError(t, err) - is, err := IsPublicMembership(orgID, user.ID) - assert.NoError(t, err) - assert.Equal(t, expected, is) -} - -func TestIsUserOrgOwner(t *testing.T) { - assert.NoError(t, unittest.PrepareTestDatabase()) - - tt := []struct { - uid int64 - orgid int64 - expected bool - }{ - {2, 3, true}, - {4, 3, false}, - {5, 6, true}, - {5, 7, true}, - } - for _, v := range tt { - t.Run(fmt.Sprintf("UserId%dIsOrgOwnerOf%d", v.uid, v.orgid), func(t *testing.T) { - testIsUserOrgOwner(t, v.uid, v.orgid, v.expected) - }) - } -} - -func testIsUserOrgOwner(t *testing.T, uid, orgID int64, expected bool) { - user, err := user_model.GetUserByID(uid) - assert.NoError(t, err) - is, err := IsOrganizationOwner(orgID, user.ID) - assert.NoError(t, err) - assert.Equal(t, expected, is) -} diff --git a/models/userlist.go b/models/userlist.go index 102c587dfe..fbe1995b40 100644 --- a/models/userlist.go +++ b/models/userlist.go @@ -5,9 +5,11 @@ package models import ( + "context" "fmt" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" ) @@ -18,7 +20,7 @@ func IsUserOrgOwner(users user_model.UserList, orgID int64) map[int64]bool { for _, user := range users { results[user.ID] = false // Set default to false } - ownerMaps, err := loadOrganizationOwners(db.GetEngine(db.DefaultContext), users, orgID) + ownerMaps, err := loadOrganizationOwners(db.DefaultContext, users, orgID) if err == nil { for _, owner := range ownerMaps { results[owner.UID] = true @@ -27,13 +29,13 @@ func IsUserOrgOwner(users user_model.UserList, orgID int64) map[int64]bool { return results } -func loadOrganizationOwners(e db.Engine, users user_model.UserList, orgID int64) (map[int64]*TeamUser, error) { +func loadOrganizationOwners(ctx context.Context, users user_model.UserList, orgID int64) (map[int64]*organization.TeamUser, error) { if len(users) == 0 { return nil, nil } - ownerTeam, err := getOwnerTeam(e, orgID) + ownerTeam, err := organization.GetOwnerTeam(ctx, orgID) if err != nil { - if IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { log.Error("Organization does not have owner team: %d", orgID) return nil, nil } @@ -41,8 +43,8 @@ func loadOrganizationOwners(e db.Engine, users user_model.UserList, orgID int64) } userIDs := users.GetUserIDs() - ownerMaps := make(map[int64]*TeamUser) - err = e.In("uid", userIDs). + ownerMaps := make(map[int64]*organization.TeamUser) + err = db.GetEngine(ctx).In("uid", userIDs). And("org_id=?", orgID). And("team_id=?", ownerTeam.ID). Find(&ownerMaps) diff --git a/models/userlist_test.go b/models/userlist_test.go index 6776850b60..9b3c796e64 100644 --- a/models/userlist_test.go +++ b/models/userlist_test.go @@ -8,6 +8,7 @@ import ( "fmt" "testing" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/unittest" "github.com/stretchr/testify/assert" @@ -33,7 +34,7 @@ func TestUserListIsPublicMember(t *testing.T) { } func testUserListIsPublicMember(t *testing.T, orgID int64, expected map[int64]bool) { - org, err := GetOrgByID(orgID) + org, err := organization.GetOrgByID(orgID) assert.NoError(t, err) _, membersIsPublic, err := org.GetMembers() assert.NoError(t, err) @@ -60,7 +61,7 @@ func TestUserListIsUserOrgOwner(t *testing.T) { } func testUserListIsUserOrgOwner(t *testing.T, orgID int64, expected map[int64]bool) { - org, err := GetOrgByID(orgID) + org, err := organization.GetOrgByID(orgID) assert.NoError(t, err) members, _, err := org.GetMembers() assert.NoError(t, err) diff --git a/models/webhook/hooktask.go b/models/webhook/hooktask.go index 1d19ebd24e..c71b18f662 100644 --- a/models/webhook/hooktask.go +++ b/models/webhook/hooktask.go @@ -49,6 +49,7 @@ const ( HookEventPullRequestSync HookEventType = "pull_request_sync" HookEventRepository HookEventType = "repository" HookEventRelease HookEventType = "release" + HookEventPackage HookEventType = "package" ) // Event returns the HookEventType as an event string diff --git a/models/webhook/main_test.go b/models/webhook/main_test.go index 89c947b061..39ef2da6a2 100644 --- a/models/webhook/main_test.go +++ b/models/webhook/main_test.go @@ -12,5 +12,11 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), "webhook.yml", "hook_task.yml") + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{ + "webhook.yml", + "hook_task.yml", + }, + }) } diff --git a/models/webhook/webhook.go b/models/webhook/webhook.go index ffc9b72b64..941a3f15c7 100644 --- a/models/webhook/webhook.go +++ b/models/webhook/webhook.go @@ -134,6 +134,7 @@ type HookEvents struct { PullRequestSync bool `json:"pull_request_sync"` Repository bool `json:"repository"` Release bool `json:"release"` + Package bool `json:"package"` } // HookEvent represents events that will delivery hook. @@ -339,6 +340,12 @@ func (w *Webhook) HasRepositoryEvent() bool { (w.ChooseEvents && w.HookEvents.Repository) } +// HasPackageEvent returns if hook enabled package event. +func (w *Webhook) HasPackageEvent() bool { + return w.SendEverything || + (w.ChooseEvents && w.HookEvents.Package) +} + // EventCheckers returns event checkers func (w *Webhook) EventCheckers() []struct { Has func() bool @@ -368,6 +375,7 @@ func (w *Webhook) EventCheckers() []struct { {w.HasPullRequestSyncEvent, HookEventPullRequestSync}, {w.HasRepositoryEvent, HookEventRepository}, {w.HasReleaseEvent, HookEventRelease}, + {w.HasPackageEvent, HookEventPackage}, } } @@ -498,14 +506,19 @@ func GetSystemOrDefaultWebhook(id int64) (*Webhook, error) { } // GetSystemWebhooks returns all admin system webhooks. -func GetSystemWebhooks() ([]*Webhook, error) { - return getSystemWebhooks(db.GetEngine(db.DefaultContext)) +func GetSystemWebhooks(isActive util.OptionalBool) ([]*Webhook, error) { + return getSystemWebhooks(db.GetEngine(db.DefaultContext), isActive) } -func getSystemWebhooks(e db.Engine) ([]*Webhook, error) { +func getSystemWebhooks(e db.Engine, isActive util.OptionalBool) ([]*Webhook, error) { webhooks := make([]*Webhook, 0, 5) + if isActive.IsNone() { + return webhooks, e. + Where("repo_id=? AND org_id=? AND is_system_webhook=?", 0, 0, true). + Find(&webhooks) + } return webhooks, e. - Where("repo_id=? AND org_id=? AND is_system_webhook=?", 0, 0, true). + Where("repo_id=? AND org_id=? AND is_system_webhook=? AND is_active = ?", 0, 0, true, isActive.IsTrue()). Find(&webhooks) } diff --git a/models/webhook/webhook_test.go b/models/webhook/webhook_test.go index d1a76795fd..5ce564b775 100644 --- a/models/webhook/webhook_test.go +++ b/models/webhook/webhook_test.go @@ -72,6 +72,7 @@ func TestWebhook_EventsArray(t *testing.T) { "pull_request", "pull_request_assign", "pull_request_label", "pull_request_milestone", "pull_request_comment", "pull_request_review_approved", "pull_request_review_rejected", "pull_request_review_comment", "pull_request_sync", "repository", "release", + "package", }, (&Webhook{ HookEvent: &HookEvent{SendEverything: true}, diff --git a/modules/activitypub/client.go b/modules/activitypub/client.go index a11284dbce..0e4998234a 100644 --- a/modules/activitypub/client.go +++ b/modules/activitypub/client.go @@ -24,7 +24,7 @@ import ( const ( // ActivityStreamsContentType const ActivityStreamsContentType = `application/ld+json; profile="https://www.w3.org/ns/activitystreams"` - httpsigExpirationTime = 60 + httpsigExpirationTime = 60 ) func containsRequiredHTTPHeaders(method string, headers []string) error { diff --git a/modules/analyze/vendor.go b/modules/analyze/vendor.go index 12ae8dbd80..976a6ddc7b 100644 --- a/modules/analyze/vendor.go +++ b/modules/analyze/vendor.go @@ -5,66 +5,10 @@ package analyze import ( - "regexp" - "sort" - "strings" - - "github.com/go-enry/go-enry/v2/data" + "github.com/go-enry/go-enry/v2" ) -var isVendorRegExp *regexp.Regexp - -func init() { - matchers := data.VendorMatchers - - caretStrings := make([]string, 0, 10) - caretShareStrings := make([]string, 0, 10) - - matcherStrings := make([]string, 0, len(matchers)) - for _, matcher := range matchers { - str := matcher.String() - if str[0] == '^' { - caretStrings = append(caretStrings, str[1:]) - } else if str[0:5] == "(^|/)" { - caretShareStrings = append(caretShareStrings, str[5:]) - } else { - matcherStrings = append(matcherStrings, str) - } - } - - sort.Strings(caretShareStrings) - sort.Strings(caretStrings) - sort.Strings(matcherStrings) - - sb := &strings.Builder{} - sb.WriteString("(?:^(?:") - sb.WriteString(caretStrings[0]) - for _, matcher := range caretStrings[1:] { - sb.WriteString(")|(?:") - sb.WriteString(matcher) - } - sb.WriteString("))") - sb.WriteString("|") - sb.WriteString("(?:(?:^|/)(?:") - sb.WriteString(caretShareStrings[0]) - for _, matcher := range caretShareStrings[1:] { - sb.WriteString(")|(?:") - sb.WriteString(matcher) - } - sb.WriteString("))") - sb.WriteString("|") - sb.WriteString("(?:") - sb.WriteString(matcherStrings[0]) - for _, matcher := range matcherStrings[1:] { - sb.WriteString(")|(?:") - sb.WriteString(matcher) - } - sb.WriteString(")") - combined := sb.String() - isVendorRegExp = regexp.MustCompile(combined) -} - // IsVendor returns whether or not path is a vendor path. func IsVendor(path string) bool { - return isVendorRegExp.MatchString(path) + return enry.IsVendor(path) } diff --git a/modules/appstate/appstate_test.go b/modules/appstate/appstate_test.go index 15fbc829ba..e4a0d72850 100644 --- a/modules/appstate/appstate_test.go +++ b/modules/appstate/appstate_test.go @@ -14,7 +14,10 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", ".."), "") + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + FixtureFiles: []string{""}, // load nothing + }) } type testItem1 struct { diff --git a/modules/auth/pam/pam.go b/modules/auth/pam/pam.go index 0a3d8e9f91..39e93d7162 100644 --- a/modules/auth/pam/pam.go +++ b/modules/auth/pam/pam.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build pam -// +build pam package pam @@ -34,10 +33,10 @@ func Auth(serviceName, userName, passwd string) (string, error) { if err = t.Authenticate(0); err != nil { return "", err } - + if err = t.AcctMgmt(0); err != nil { - return "", err - } + return "", err + } // PAM login names might suffer transformations in the PAM stack. // We should take whatever the PAM stack returns for it. diff --git a/modules/auth/pam/pam_stub.go b/modules/auth/pam/pam_stub.go index 815ccf2b0e..414d7631b5 100644 --- a/modules/auth/pam/pam_stub.go +++ b/modules/auth/pam/pam_stub.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !pam -// +build !pam package pam diff --git a/modules/auth/pam/pam_test.go b/modules/auth/pam/pam_test.go index d6d78a748b..08565d2f31 100644 --- a/modules/auth/pam/pam_test.go +++ b/modules/auth/pam/pam_test.go @@ -1,5 +1,4 @@ //go:build pam -// +build pam // Copyright 2021 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style diff --git a/modules/avatar/identicon/identicon_test.go b/modules/avatar/identicon/identicon_test.go index ee44c95139..44635fbb3b 100644 --- a/modules/avatar/identicon/identicon_test.go +++ b/modules/avatar/identicon/identicon_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build test_avatar_identicon -// +build test_avatar_identicon package identicon diff --git a/modules/base/tool.go b/modules/base/tool.go index bf53a8ea8a..47ce125853 100644 --- a/modules/base/tool.go +++ b/modules/base/tool.go @@ -279,7 +279,7 @@ func EntryIcon(entry *git.TreeEntry) string { } return "file-symlink-file" case entry.IsDir(): - return "file-directory" + return "file-directory-fill" case entry.IsSubModule(): return "file-submodule" } diff --git a/modules/cache/cache.go b/modules/cache/cache.go index 0198f8da73..fd32aa153b 100644 --- a/modules/cache/cache.go +++ b/modules/cache/cache.go @@ -5,6 +5,7 @@ package cache import ( + "errors" "fmt" "strconv" @@ -34,25 +35,37 @@ func NewContext() error { if conn, err = newCache(setting.CacheService.Cache); err != nil { return err } - const testKey = "__gitea_cache_test" - const testVal = "test-value" - if err = conn.Put(testKey, testVal, 10); err != nil { + if err = Ping(); err != nil { return err } - val := conn.Get(testKey) - if valStr, ok := val.(string); !ok || valStr != testVal { - // If the cache is full, the Get may not read the expected value stored by Put. - // Since we have checked that Put can success, so we just show a warning here, do not return an error to panic. - log.Warn("cache (adapter:%s, config:%s) doesn't seem to work correctly, set test value '%v' but get '%v'", - setting.CacheService.Cache.Adapter, setting.CacheService.Cache.Conn, - testVal, val, - ) - } } return err } +// Ping checks if the cache service works or not, it not, it returns an error +func Ping() error { + if conn == nil { + return errors.New("cache not available") + } + var err error + const testKey = "__gitea_cache_test" + const testVal = "test-value" + if err = conn.Put(testKey, testVal, 10); err != nil { + return err + } + val := conn.Get(testKey) + if valStr, ok := val.(string); !ok || valStr != testVal { + // If the cache is full, the Get may not read the expected value stored by Put. + // Since we have checked that Put can success, so we just show a warning here, do not return an error to panic. + log.Warn("cache (adapter:%s, config:%s) doesn't seem to work correctly, set test value '%v' but get '%v'", + setting.CacheService.Cache.Adapter, setting.CacheService.Cache.Conn, + testVal, val, + ) + } + return nil +} + // GetCache returns the currently configured cache func GetCache() mc.Cache { return conn diff --git a/modules/cache/cache_redis.go b/modules/cache/cache_redis.go index 148725ae66..ff6c8d424c 100644 --- a/modules/cache/cache_redis.go +++ b/modules/cache/cache_redis.go @@ -6,6 +6,7 @@ package cache import ( "fmt" + "strconv" "time" "code.gitea.io/gitea/modules/graceful" @@ -13,7 +14,6 @@ import ( "gitea.com/go-chi/cache" "github.com/go-redis/redis/v8" - "github.com/unknwon/com" ) // RedisCacher represents a redis cache adapter implementation. @@ -24,20 +24,37 @@ type RedisCacher struct { occupyMode bool } -// Put puts value into cache with key and expire time. +// toStr convert string/int/int64 interface to string. it's only used by the RedisCacher.Put internally +func toStr(v interface{}) string { + if v == nil { + return "" + } + switch v := v.(type) { + case string: + return v + case []byte: + return string(v) + case int: + return strconv.FormatInt(int64(v), 10) + case int64: + return strconv.FormatInt(v, 10) + default: + return fmt.Sprint(v) // as what the old com.ToStr does in most cases + } +} + +// Put puts value (string type) into cache with key and expire time. // If expired is 0, it lives forever. func (c *RedisCacher) Put(key string, val interface{}, expire int64) error { + // this function is not well-designed, it only puts string values into cache key = c.prefix + key if expire == 0 { - if err := c.c.Set(graceful.GetManager().HammerContext(), key, com.ToStr(val), 0).Err(); err != nil { + if err := c.c.Set(graceful.GetManager().HammerContext(), key, toStr(val), 0).Err(); err != nil { return err } } else { - dur, err := time.ParseDuration(com.ToStr(expire) + "s") - if err != nil { - return err - } - if err = c.c.Set(graceful.GetManager().HammerContext(), key, com.ToStr(val), dur).Err(); err != nil { + dur := time.Duration(expire) * time.Second + if err := c.c.Set(graceful.GetManager().HammerContext(), key, toStr(val), dur).Err(); err != nil { return err } } diff --git a/modules/container/map.go b/modules/container/map.go new file mode 100644 index 0000000000..3519de0951 --- /dev/null +++ b/modules/container/map.go @@ -0,0 +1,14 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +// KeysInt64 returns keys slice for a map with int64 key +func KeysInt64(m map[int64]struct{}) []int64 { + keys := make([]int64, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + return keys +} diff --git a/modules/context/api.go b/modules/context/api.go index c825e48753..33534dbf6b 100644 --- a/modules/context/api.go +++ b/modules/context/api.go @@ -8,20 +8,18 @@ package context import ( "context" "fmt" - "html" "net/http" "net/url" "strings" "code.gitea.io/gitea/models/auth" repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web/middleware" auth_service "code.gitea.io/gitea/services/auth" - - "gitea.com/go-chi/session" ) // APIContext is a specific context for API service @@ -100,7 +98,7 @@ func (ctx *APIContext) Error(status int, title string, obj interface{}) { if status == http.StatusInternalServerError { log.ErrorWithSkip(1, "%s: %s", title, message) - if setting.IsProd && !(ctx.User != nil && ctx.User.IsAdmin) { + if setting.IsProd && !(ctx.Doer != nil && ctx.Doer.IsAdmin) { message = "" } } @@ -117,7 +115,7 @@ func (ctx *APIContext) InternalServerError(err error) { log.ErrorWithSkip(1, "InternalServerError: %v", err) var message string - if !setting.IsProd || (ctx.User != nil && ctx.User.IsAdmin) { + if !setting.IsProd || (ctx.Doer != nil && ctx.Doer.IsAdmin) { message = err.Error() } @@ -191,33 +189,6 @@ func (ctx *APIContext) SetLinkHeader(total, pageSize int) { } } -// SetTotalCountHeader set "X-Total-Count" header -func (ctx *APIContext) SetTotalCountHeader(total int64) { - ctx.RespHeader().Set("X-Total-Count", fmt.Sprint(total)) - ctx.AppendAccessControlExposeHeaders("X-Total-Count") -} - -// AppendAccessControlExposeHeaders append headers by name to "Access-Control-Expose-Headers" header -func (ctx *APIContext) AppendAccessControlExposeHeaders(names ...string) { - val := ctx.RespHeader().Get("Access-Control-Expose-Headers") - if len(val) != 0 { - ctx.RespHeader().Set("Access-Control-Expose-Headers", fmt.Sprintf("%s, %s", val, strings.Join(names, ", "))) - } else { - ctx.RespHeader().Set("Access-Control-Expose-Headers", strings.Join(names, ", ")) - } -} - -// RequireCSRF requires a validated a CSRF token -func (ctx *APIContext) RequireCSRF() { - headerToken := ctx.Req.Header.Get(ctx.csrf.GetHeaderName()) - formValueToken := ctx.Req.FormValue(ctx.csrf.GetFormName()) - if len(headerToken) > 0 || len(formValueToken) > 0 { - Validate(ctx.Context, ctx.csrf) - } else { - ctx.Context.Error(401, "Missing CSRF token.") - } -} - // CheckForOTP validates OTP func (ctx *APIContext) CheckForOTP() { if skip, ok := ctx.Data["SkipLocalTwoFA"]; ok && skip.(bool) { @@ -225,7 +196,7 @@ func (ctx *APIContext) CheckForOTP() { } otpHeader := ctx.Req.Header.Get("X-Gitea-OTP") - twofa, err := auth.GetTwoFactorByUID(ctx.Context.User.ID) + twofa, err := auth.GetTwoFactorByUID(ctx.Context.Doer.ID) if err != nil { if auth.IsErrTwoFactorNotEnrolled(err) { return // No 2FA enrollment for this user @@ -239,7 +210,7 @@ func (ctx *APIContext) CheckForOTP() { return } if !ok { - ctx.Context.Error(401) + ctx.Context.Error(http.StatusUnauthorized) return } } @@ -248,18 +219,18 @@ func (ctx *APIContext) CheckForOTP() { func APIAuth(authMethod auth_service.Method) func(*APIContext) { return func(ctx *APIContext) { // Get user from session if logged in. - ctx.User = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) - if ctx.User != nil { - if ctx.Locale.Language() != ctx.User.Language { + ctx.Doer = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) + if ctx.Doer != nil { + if ctx.Locale.Language() != ctx.Doer.Language { ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req) } ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == auth_service.BasicMethodName ctx.IsSigned = true ctx.Data["IsSigned"] = ctx.IsSigned - ctx.Data["SignedUser"] = ctx.User - ctx.Data["SignedUserID"] = ctx.User.ID - ctx.Data["SignedUserName"] = ctx.User.Name - ctx.Data["IsAdmin"] = ctx.User.IsAdmin + ctx.Data["SignedUser"] = ctx.Doer + ctx.Data["SignedUserID"] = ctx.Doer.ID + ctx.Data["SignedUserName"] = ctx.Doer.Name + ctx.Data["IsAdmin"] = ctx.Doer.IsAdmin } else { ctx.Data["SignedUserID"] = int64(0) ctx.Data["SignedUserName"] = "" @@ -269,17 +240,15 @@ func APIAuth(authMethod auth_service.Method) func(*APIContext) { // APIContexter returns apicontext as middleware func APIContexter() func(http.Handler) http.Handler { - csrfOpts := getCsrfOpts() - return func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { locale := middleware.Locale(w, req) ctx := APIContext{ Context: &Context{ - Resp: NewResponse(w), - Data: map[string]interface{}{}, - Locale: locale, - Session: session.GetSession(req), + Resp: NewResponse(w), + Data: map[string]interface{}{}, + Locale: locale, + Cache: cache.GetCache(), Repo: &Repository{ PullRequest: &PullRequest{}, }, @@ -287,9 +256,9 @@ func APIContexter() func(http.Handler) http.Handler { }, Org: &APIOrganization{}, } + defer ctx.Close() ctx.Req = WithAPIContext(WithContext(req, ctx.Context), &ctx) - ctx.csrf = Csrfer(csrfOpts, ctx.Context) // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid. if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") { @@ -301,7 +270,6 @@ func APIContexter() func(http.Handler) http.Handler { ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions) - ctx.Data["CsrfToken"] = html.EscapeString(ctx.csrf.GetToken()) ctx.Data["Context"] = &ctx next.ServeHTTP(ctx.Resp, ctx.Req) @@ -320,36 +288,6 @@ func APIContexter() func(http.Handler) http.Handler { } } -// ReferencesGitRepo injects the GitRepo into the Context -func ReferencesGitRepo(allowEmpty bool) func(ctx *APIContext) (cancel context.CancelFunc) { - return func(ctx *APIContext) (cancel context.CancelFunc) { - // Empty repository does not have reference information. - if !allowEmpty && ctx.Repo.Repository.IsEmpty { - return - } - - // For API calls. - if ctx.Repo.GitRepo == nil { - repoPath := repo_model.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name) - gitRepo, err := git.OpenRepositoryCtx(ctx, repoPath) - if err != nil { - ctx.Error(http.StatusInternalServerError, "RepoRef Invalid repo "+repoPath, err) - return - } - ctx.Repo.GitRepo = gitRepo - // We opened it, we should close it - return func() { - // If it's been set to nil then assume someone else has closed it. - if ctx.Repo.GitRepo != nil { - ctx.Repo.GitRepo.Close() - } - } - } - - return - } -} - // NotFound handles 404s for APIContext // String will replace message, errors will be added to a slice func (ctx *APIContext) NotFound(objs ...interface{}) { @@ -375,33 +313,63 @@ func (ctx *APIContext) NotFound(objs ...interface{}) { }) } -// RepoRefForAPI handles repository reference names when the ref name is not explicitly given -func RepoRefForAPI(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { - ctx := GetAPIContext(req) +// ReferencesGitRepo injects the GitRepo into the Context +// you can optional skip the IsEmpty check +func ReferencesGitRepo(allowEmpty ...bool) func(ctx *APIContext) (cancel context.CancelFunc) { + return func(ctx *APIContext) (cancel context.CancelFunc) { // Empty repository does not have reference information. - if ctx.Repo.Repository.IsEmpty { + if ctx.Repo.Repository.IsEmpty && !(len(allowEmpty) != 0 && allowEmpty[0]) { return } - var err error - + // For API calls. if ctx.Repo.GitRepo == nil { repoPath := repo_model.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name) - ctx.Repo.GitRepo, err = git.OpenRepositoryCtx(ctx, repoPath) + gitRepo, err := git.OpenRepository(ctx, repoPath) if err != nil { - ctx.InternalServerError(err) + ctx.Error(http.StatusInternalServerError, "RepoRef Invalid repo "+repoPath, err) return } + ctx.Repo.GitRepo = gitRepo // We opened it, we should close it - defer func() { + return func() { // If it's been set to nil then assume someone else has closed it. if ctx.Repo.GitRepo != nil { ctx.Repo.GitRepo.Close() } - }() + } } + return + } +} + +// RepoRefForAPI handles repository reference names when the ref name is not explicitly given +func RepoRefForAPI(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + ctx := GetAPIContext(req) + + if ctx.Repo.GitRepo == nil { + ctx.InternalServerError(fmt.Errorf("no open git repo")) + return + } + + if ref := ctx.FormTrim("ref"); len(ref) > 0 { + commit, err := ctx.Repo.GitRepo.GetCommit(ref) + if err != nil { + if git.IsErrNotExist(err) { + ctx.NotFound() + } else { + ctx.Error(http.StatusInternalServerError, "GetBlobByPath", err) + } + return + } + ctx.Repo.Commit = commit + ctx.Repo.TreePath = ctx.Params("*") + return + } + + var err error refName := getRefName(ctx.Context, RepoRefAny) if ctx.Repo.GitRepo.IsBranchExist(refName) { diff --git a/modules/context/api_org.go b/modules/context/api_org.go index 6d86fa6ed2..2231677d42 100644 --- a/modules/context/api_org.go +++ b/modules/context/api_org.go @@ -4,12 +4,10 @@ package context -import ( - "code.gitea.io/gitea/models" -) +import "code.gitea.io/gitea/models/organization" // APIOrganization contains organization and team type APIOrganization struct { - Organization *models.Organization - Team *models.Team + Organization *organization.Organization + Team *organization.Team } diff --git a/modules/context/auth.go b/modules/context/auth.go index 7e7e8ab961..09c2295455 100644 --- a/modules/context/auth.go +++ b/modules/context/auth.go @@ -27,19 +27,19 @@ func Toggle(options *ToggleOptions) func(ctx *Context) { return func(ctx *Context) { // Check prohibit login users. if ctx.IsSigned { - if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm { + if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm { ctx.Data["Title"] = ctx.Tr("auth.active_your_account") ctx.HTML(http.StatusOK, "user/auth/activate") return } - if !ctx.User.IsActive || ctx.User.ProhibitLogin { - log.Info("Failed authentication attempt for %s from %s", ctx.User.Name, ctx.RemoteAddr()) + if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin { + log.Info("Failed authentication attempt for %s from %s", ctx.Doer.Name, ctx.RemoteAddr()) ctx.Data["Title"] = ctx.Tr("auth.prohibit_login") ctx.HTML(http.StatusOK, "user/auth/prohibit_login") return } - if ctx.User.MustChangePassword { + if ctx.Doer.MustChangePassword { if ctx.Req.URL.Path != "/user/settings/change_password" { ctx.Data["Title"] = ctx.Tr("auth.must_change_password") ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password" @@ -63,7 +63,7 @@ func Toggle(options *ToggleOptions) func(ctx *Context) { } if !options.SignOutRequired && !options.DisableCSRF && ctx.Req.Method == "POST" { - Validate(ctx, ctx.csrf) + ctx.csrf.Validate(ctx) if ctx.Written() { return } @@ -76,7 +76,7 @@ func Toggle(options *ToggleOptions) func(ctx *Context) { } ctx.Redirect(setting.AppSubURL + "/user/login") return - } else if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm { + } else if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm { ctx.Data["Title"] = ctx.Tr("auth.active_your_account") ctx.HTML(http.StatusOK, "user/auth/activate") return @@ -94,7 +94,7 @@ func Toggle(options *ToggleOptions) func(ctx *Context) { } if options.AdminRequired { - if !ctx.User.IsAdmin { + if !ctx.Doer.IsAdmin { ctx.Error(http.StatusForbidden) return } @@ -108,15 +108,15 @@ func ToggleAPI(options *ToggleOptions) func(ctx *APIContext) { return func(ctx *APIContext) { // Check prohibit login users. if ctx.IsSigned { - if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm { + if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm { ctx.Data["Title"] = ctx.Tr("auth.active_your_account") ctx.JSON(http.StatusForbidden, map[string]string{ "message": "This account is not activated.", }) return } - if !ctx.User.IsActive || ctx.User.ProhibitLogin { - log.Info("Failed authentication attempt for %s from %s", ctx.User.Name, ctx.RemoteAddr()) + if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin { + log.Info("Failed authentication attempt for %s from %s", ctx.Doer.Name, ctx.RemoteAddr()) ctx.Data["Title"] = ctx.Tr("auth.prohibit_login") ctx.JSON(http.StatusForbidden, map[string]string{ "message": "This account is prohibited from signing in, please contact your site administrator.", @@ -124,7 +124,7 @@ func ToggleAPI(options *ToggleOptions) func(ctx *APIContext) { return } - if ctx.User.MustChangePassword { + if ctx.Doer.MustChangePassword { ctx.JSON(http.StatusForbidden, map[string]string{ "message": "You must change your password. Change it at: " + setting.AppURL + "/user/change_password", }) @@ -145,7 +145,7 @@ func ToggleAPI(options *ToggleOptions) func(ctx *APIContext) { "message": "Only signed in user is allowed to call APIs.", }) return - } else if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm { + } else if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm { ctx.Data["Title"] = ctx.Tr("auth.active_your_account") ctx.HTML(http.StatusOK, "user/auth/activate") return @@ -154,7 +154,7 @@ func ToggleAPI(options *ToggleOptions) func(ctx *APIContext) { if skip, ok := ctx.Data["SkipLocalTwoFA"]; ok && skip.(bool) { return // Skip 2FA } - twofa, err := auth.GetTwoFactorByUID(ctx.User.ID) + twofa, err := auth.GetTwoFactorByUID(ctx.Doer.ID) if err != nil { if auth.IsErrTwoFactorNotEnrolled(err) { return // No 2FA enrollment for this user @@ -178,7 +178,7 @@ func ToggleAPI(options *ToggleOptions) func(ctx *APIContext) { } if options.AdminRequired { - if !ctx.User.IsAdmin { + if !ctx.Doer.IsAdmin { ctx.JSON(http.StatusForbidden, map[string]string{ "message": "You have no permission to request for this.", }) diff --git a/modules/context/context.go b/modules/context/context.go index 6aeeb9e694..dcc43973ca 100644 --- a/modules/context/context.go +++ b/modules/context/context.go @@ -10,6 +10,7 @@ import ( "crypto/sha256" "encoding/hex" "errors" + "fmt" "html" "html/template" "io" @@ -21,6 +22,7 @@ import ( "strings" "time" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" @@ -31,13 +33,13 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/auth" "gitea.com/go-chi/cache" "gitea.com/go-chi/session" chi "github.com/go-chi/chi/v5" - "github.com/unknwon/com" "github.com/unrolled/render" "golang.org/x/crypto/pbkdf2" ) @@ -57,18 +59,30 @@ type Context struct { Render Render translation.Locale Cache cache.Cache - csrf CSRF + csrf CSRFProtector Flash *middleware.Flash Session session.Store Link string // current request URL EscapedLink string - User *user_model.User + Doer *user_model.User IsSigned bool IsBasicAuth bool - Repo *Repository - Org *Organization + ContextUser *user_model.User + Repo *Repository + Org *Organization + Package *Package +} + +// Close frees all resources hold by Context +func (ctx *Context) Close() error { + var err error + if ctx.Req != nil && ctx.Req.MultipartForm != nil { + err = ctx.Req.MultipartForm.RemoveAll() // remove the temp files buffered to tmp directory + } + // TODO: close opened repo, and more + return err } // TrHTMLEscapeArgs runs Tr but pre-escapes all arguments with html.EscapeString. @@ -88,7 +102,7 @@ func (ctx *Context) GetData() map[string]interface{} { // IsUserSiteAdmin returns true if current user is a site admin func (ctx *Context) IsUserSiteAdmin() bool { - return ctx.IsSigned && ctx.User.IsAdmin + return ctx.IsSigned && ctx.Doer.IsAdmin } // IsUserRepoOwner returns true if current user owns current repo @@ -139,7 +153,7 @@ func RedirectToUser(ctx *Context, userName string, redirectUserID int64) { if ctx.Req.URL.RawQuery != "" { redirectPath += "?" + ctx.Req.URL.RawQuery } - ctx.Redirect(path.Join(setting.AppSubURL, redirectPath)) + ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect) } // HasAPIError returns true if error occurs in form validation. @@ -181,6 +195,12 @@ func (ctx *Context) RedirectToFirst(location ...string) { continue } + // Unfortunately browsers consider a redirect Location with preceding "//" and "/\" as meaning redirect to "http(s)://REST_OF_PATH" + // Therefore we should ignore these redirect locations to prevent open redirects + if len(loc) > 1 && loc[0] == '/' && (loc[1] == '/' || loc[1] == '\\') { + continue + } + u, err := url.Parse(loc) if err != nil || ((u.Scheme != "" || u.Host != "") && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) { continue @@ -215,7 +235,7 @@ func (ctx *Context) HTML(status int, name base.TplName) { // RenderToString renders the template content to a string func (ctx *Context) RenderToString(name base.TplName, data map[string]interface{}) (string, error) { var buf strings.Builder - err := ctx.Render.HTML(&buf, 200, string(name), data) + err := ctx.Render.HTML(&buf, http.StatusOK, string(name), data) return buf.String(), err } @@ -269,7 +289,7 @@ func (ctx *Context) ServerError(logMsg string, logErr error) { func (ctx *Context) serverErrorInternal(logMsg string, logErr error) { if logErr != nil { log.ErrorWithSkip(2, "%s: %v", logMsg, logErr) - if errors.Is(logErr, &net.OpError{}) { + if _, ok := logErr.(*net.OpError); ok || errors.Is(logErr, &net.OpError{}) { // This is an error within the underlying connection // and further rendering will not work so just return return @@ -324,6 +344,18 @@ func (ctx *Context) RespHeader() http.Header { return ctx.Resp.Header() } +// SetServeHeaders sets necessary content serve headers +func (ctx *Context) SetServeHeaders(filename string) { + ctx.Resp.Header().Set("Content-Description", "File Transfer") + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+filename) + ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary") + ctx.Resp.Header().Set("Expires", "0") + ctx.Resp.Header().Set("Cache-Control", "must-revalidate") + ctx.Resp.Header().Set("Pragma", "public") + ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition") +} + // ServeContent serves content to http request func (ctx *Context) ServeContent(name string, r io.ReadSeeker, params ...interface{}) { modTime := time.Now() @@ -333,14 +365,7 @@ func (ctx *Context) ServeContent(name string, r io.ReadSeeker, params ...interfa modTime = v } } - ctx.Resp.Header().Set("Content-Description", "File Transfer") - ctx.Resp.Header().Set("Content-Type", "application/octet-stream") - ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+name) - ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary") - ctx.Resp.Header().Set("Expires", "0") - ctx.Resp.Header().Set("Cache-Control", "must-revalidate") - ctx.Resp.Header().Set("Pragma", "public") - ctx.Resp.Header().Set("Access-Control-Expose-Headers", "Content-Disposition") + ctx.SetServeHeaders(name) http.ServeContent(ctx.Resp, ctx.Req, name, modTime, r) } @@ -352,31 +377,41 @@ func (ctx *Context) ServeFile(file string, names ...string) { } else { name = path.Base(file) } - ctx.Resp.Header().Set("Content-Description", "File Transfer") - ctx.Resp.Header().Set("Content-Type", "application/octet-stream") - ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+name) - ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary") - ctx.Resp.Header().Set("Expires", "0") - ctx.Resp.Header().Set("Cache-Control", "must-revalidate") - ctx.Resp.Header().Set("Pragma", "public") + ctx.SetServeHeaders(name) http.ServeFile(ctx.Resp, ctx.Req, file) } // ServeStream serves file via io stream func (ctx *Context) ServeStream(rd io.Reader, name string) { - ctx.Resp.Header().Set("Content-Description", "File Transfer") - ctx.Resp.Header().Set("Content-Type", "application/octet-stream") - ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+name) - ctx.Resp.Header().Set("Content-Transfer-Encoding", "binary") - ctx.Resp.Header().Set("Expires", "0") - ctx.Resp.Header().Set("Cache-Control", "must-revalidate") - ctx.Resp.Header().Set("Pragma", "public") + ctx.SetServeHeaders(name) _, err := io.Copy(ctx.Resp, rd) if err != nil { ctx.ServerError("Download file failed", err) } } +// UploadStream returns the request body or the first form file +// Only form files need to get closed. +func (ctx *Context) UploadStream() (rd io.ReadCloser, needToClose bool, err error) { + contentType := strings.ToLower(ctx.Req.Header.Get("Content-Type")) + if strings.HasPrefix(contentType, "application/x-www-form-urlencoded") || strings.HasPrefix(contentType, "multipart/form-data") { + if err := ctx.Req.ParseMultipartForm(32 << 20); err != nil { + return nil, false, err + } + if ctx.Req.MultipartForm.File == nil { + return nil, false, http.ErrMissingFile + } + for _, files := range ctx.Req.MultipartForm.File { + if len(files) > 0 { + r, err := files[0].Open() + return r, true, err + } + } + return nil, false, http.ErrMissingFile + } + return ctx.Req.Body, false, nil +} + // Error returned an error to web browser func (ctx *Context) Error(status int, contents ...string) { v := http.StatusText(status) @@ -397,7 +432,7 @@ func (ctx *Context) JSON(status int, content interface{}) { // Redirect redirects the request func (ctx *Context) Redirect(location string, status ...int) { - code := http.StatusFound + code := http.StatusSeeOther if len(status) == 1 { code = status[0] } @@ -452,7 +487,7 @@ func (ctx *Context) CookieDecrypt(secret, val string) (string, bool) { } key := pbkdf2.Key([]byte(secret), []byte(secret), 1000, 16, sha256.New) - text, err = com.AESGCMDecrypt(key, text) + text, err = util.AESGCMDecrypt(key, text) return string(text), err == nil } @@ -466,7 +501,7 @@ func (ctx *Context) SetSuperSecureCookie(secret, name, value string, expiry int) // CookieEncrypt encrypts a given value using the provided secret func (ctx *Context) CookieEncrypt(secret, value string) string { key := pbkdf2.Key([]byte(secret), []byte(secret), 1000, 16, sha256.New) - text, err := com.AESGCMEncrypt(key, []byte(value)) + text, err := util.AESGCMEncrypt(key, []byte(value)) if err != nil { panic("error encrypting cookie: " + err.Error()) } @@ -554,6 +589,22 @@ func (ctx *Context) Value(key interface{}) interface{} { return ctx.Req.Context().Value(key) } +// SetTotalCountHeader set "X-Total-Count" header +func (ctx *Context) SetTotalCountHeader(total int64) { + ctx.RespHeader().Set("X-Total-Count", fmt.Sprint(total)) + ctx.AppendAccessControlExposeHeaders("X-Total-Count") +} + +// AppendAccessControlExposeHeaders append headers by name to "Access-Control-Expose-Headers" header +func (ctx *Context) AppendAccessControlExposeHeaders(names ...string) { + val := ctx.RespHeader().Get("Access-Control-Expose-Headers") + if len(val) != 0 { + ctx.RespHeader().Set("Access-Control-Expose-Headers", fmt.Sprintf("%s, %s", val, strings.Join(names, ", "))) + } else { + ctx.RespHeader().Set("Access-Control-Expose-Headers", strings.Join(names, ", ")) + } +} + // Handler represents a custom handler type Handler func(*Context) @@ -574,10 +625,10 @@ func GetContext(req *http.Request) *Context { // GetContextUser returns context user func GetContextUser(req *http.Request) *user_model.User { if apiContext, ok := req.Context().Value(apiContextKey).(*APIContext); ok { - return apiContext.User + return apiContext.Doer } if ctx, ok := req.Context().Value(contextKey).(*Context); ok { - return ctx.User + return ctx.Doer } return nil } @@ -599,18 +650,18 @@ func getCsrfOpts() CsrfOptions { // Auth converts auth.Auth as a middleware func Auth(authMethod auth.Method) func(*Context) { return func(ctx *Context) { - ctx.User = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) - if ctx.User != nil { - if ctx.Locale.Language() != ctx.User.Language { + ctx.Doer = authMethod.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) + if ctx.Doer != nil { + if ctx.Locale.Language() != ctx.Doer.Language { ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req) } ctx.IsBasicAuth = ctx.Data["AuthedMethod"].(string) == auth.BasicMethodName ctx.IsSigned = true ctx.Data["IsSigned"] = ctx.IsSigned - ctx.Data["SignedUser"] = ctx.User - ctx.Data["SignedUserID"] = ctx.User.ID - ctx.Data["SignedUserName"] = ctx.User.Name - ctx.Data["IsAdmin"] = ctx.User.IsAdmin + ctx.Data["SignedUser"] = ctx.Doer + ctx.Data["SignedUserID"] = ctx.Doer.ID + ctx.Data["SignedUserName"] = ctx.Doer.Name + ctx.Data["IsAdmin"] = ctx.Doer.IsAdmin } else { ctx.Data["SignedUserID"] = int64(0) ctx.Data["SignedUserName"] = "" @@ -625,7 +676,9 @@ func Auth(authMethod auth.Method) func(*Context) { func Contexter() func(next http.Handler) http.Handler { rnd := templates.HTMLRenderer() csrfOpts := getCsrfOpts() - + if !setting.IsProd { + CsrfTokenRegenerationInterval = 5 * time.Second // in dev, re-generate the tokens more aggressively for debug purpose + } return func(next http.Handler) http.Handler { return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { locale := middleware.Locale(resp, req) @@ -650,13 +703,15 @@ func Contexter() func(next http.Handler) http.Handler { "RunModeIsProd": setting.IsProd, }, } + defer ctx.Close() + // PageData is passed by reference, and it will be rendered to `window.config.pageData` in `head.tmpl` for JavaScript modules ctx.PageData = map[string]interface{}{} ctx.Data["PageData"] = ctx.PageData ctx.Data["Context"] = &ctx ctx.Req = WithContext(req, &ctx) - ctx.csrf = Csrfer(csrfOpts, &ctx) + ctx.csrf = PrepareCSRFProtector(csrfOpts, &ctx) // Get flash. flashCookie := ctx.GetCookie("macaron_flash") @@ -714,7 +769,7 @@ func Contexter() func(next http.Handler) http.Handler { ctx.Resp.Header().Set(`X-Frame-Options`, setting.CORSConfig.XFrameOptions) - ctx.Data["CsrfToken"] = html.EscapeString(ctx.csrf.GetToken()) + ctx.Data["CsrfToken"] = ctx.csrf.GetToken() ctx.Data["CsrfTokenHtml"] = template.HTML(``) // FIXME: do we really always need these setting? There should be someway to have to avoid having to always set these @@ -757,3 +812,21 @@ func Contexter() func(next http.Handler) http.Handler { }) } } + +// SearchOrderByMap represents all possible search order +var SearchOrderByMap = map[string]map[string]db.SearchOrderBy{ + "asc": { + "alpha": db.SearchOrderByAlphabetically, + "created": db.SearchOrderByOldest, + "updated": db.SearchOrderByLeastUpdated, + "size": db.SearchOrderBySize, + "id": db.SearchOrderByID, + }, + "desc": { + "alpha": db.SearchOrderByAlphabeticallyReverse, + "created": db.SearchOrderByNewest, + "updated": db.SearchOrderByRecentUpdated, + "size": db.SearchOrderBySizeReverse, + "id": db.SearchOrderByIDReverse, + }, +} diff --git a/modules/context/csrf.go b/modules/context/csrf.go index 99c223c884..df775048cb 100644 --- a/modules/context/csrf.go +++ b/modules/context/csrf.go @@ -19,38 +19,31 @@ package context import ( + "encoding/base32" + "fmt" "net/http" + "strconv" "time" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" - - "github.com/unknwon/com" ) -// CSRF represents a CSRF service and is used to get the current token and validate a suspect token. -type CSRF interface { - // Return HTTP header to search for token. +// CSRFProtector represents a CSRF protector and is used to get the current token and validate the token. +type CSRFProtector interface { + // GetHeaderName returns HTTP header to search for token. GetHeaderName() string - // Return form value to search for token. + // GetFormName returns form value to search for token. GetFormName() string - // Return cookie name to search for token. - GetCookieName() string - // Return cookie path - GetCookiePath() string - // Return the flag value used for the csrf token. - GetCookieHTTPOnly() bool - // Return cookie domain - GetCookieDomain() string - // Return the token. + // GetToken returns the token. GetToken() string - // Validate by token. - ValidToken(t string) bool - // Error replies to the request with a custom function when ValidToken fails. - Error(w http.ResponseWriter) + // Validate validates the token in http context. + Validate(ctx *Context) } -type csrf struct { +type csrfProtector struct { // Header name value for setting and getting csrf token. Header string // Form name value for setting and getting csrf token. @@ -69,56 +62,24 @@ type csrf struct { ID string // Secret used along with the unique id above to generate the Token. Secret string - // ErrorFunc is the custom function that replies to the request when ValidToken fails. - ErrorFunc func(w http.ResponseWriter) } // GetHeaderName returns the name of the HTTP header for csrf token. -func (c *csrf) GetHeaderName() string { +func (c *csrfProtector) GetHeaderName() string { return c.Header } // GetFormName returns the name of the form value for csrf token. -func (c *csrf) GetFormName() string { +func (c *csrfProtector) GetFormName() string { return c.Form } -// GetCookieName returns the name of the cookie for csrf token. -func (c *csrf) GetCookieName() string { - return c.Cookie -} - -// GetCookiePath returns the path of the cookie for csrf token. -func (c *csrf) GetCookiePath() string { - return c.CookiePath -} - -// GetCookieHTTPOnly returns the flag value used for the csrf token. -func (c *csrf) GetCookieHTTPOnly() bool { - return c.CookieHTTPOnly -} - -// GetCookieDomain returns the flag value used for the csrf token. -func (c *csrf) GetCookieDomain() string { - return c.CookieDomain -} - // GetToken returns the current token. This is typically used // to populate a hidden form in an HTML template. -func (c *csrf) GetToken() string { +func (c *csrfProtector) GetToken() string { return c.Token } -// ValidToken validates the passed token against the existing Secret and ID. -func (c *csrf) ValidToken(t string) bool { - return ValidToken(t, c.Secret, c.ID, "POST") -} - -// Error replies to the request when ValidToken fails. -func (c *csrf) Error(w http.ResponseWriter) { - c.ErrorFunc(w) -} - // CsrfOptions maintains options to manage behavior of Generate. type CsrfOptions struct { // The global secret value used to generate Tokens. @@ -140,7 +101,7 @@ type CsrfOptions struct { SessionKey string // oldSessionKey saves old value corresponding to SessionKey. oldSessionKey string - // If true, send token via X-CSRFToken header. + // If true, send token via X-Csrf-Token header. SetHeader bool // If true, send token via _csrf cookie. SetCookie bool @@ -148,52 +109,43 @@ type CsrfOptions struct { Secure bool // Disallow Origin appear in request header. Origin bool - // The function called when Validate fails. - ErrorFunc func(w http.ResponseWriter) - // Cookie life time. Default is 0 + // Cookie lifetime. Default is 0 CookieLifeTime int } -func prepareOptions(options []CsrfOptions) CsrfOptions { - var opt CsrfOptions - if len(options) > 0 { - opt = options[0] +func prepareDefaultCsrfOptions(opt CsrfOptions) CsrfOptions { + if opt.Secret == "" { + randBytes, err := util.CryptoRandomBytes(8) + if err != nil { + // this panic can be handled by the recover() in http handlers + panic(fmt.Errorf("failed to generate random bytes: %w", err)) + } + opt.Secret = base32.StdEncoding.EncodeToString(randBytes) } - - // Defaults. - if len(opt.Secret) == 0 { - opt.Secret = string(com.RandomCreateBytes(10)) + if opt.Header == "" { + opt.Header = "X-Csrf-Token" } - if len(opt.Header) == 0 { - opt.Header = "X-CSRFToken" - } - if len(opt.Form) == 0 { + if opt.Form == "" { opt.Form = "_csrf" } - if len(opt.Cookie) == 0 { + if opt.Cookie == "" { opt.Cookie = "_csrf" } - if len(opt.CookiePath) == 0 { + if opt.CookiePath == "" { opt.CookiePath = "/" } - if len(opt.SessionKey) == 0 { + if opt.SessionKey == "" { opt.SessionKey = "uid" } opt.oldSessionKey = "_old_" + opt.SessionKey - if opt.ErrorFunc == nil { - opt.ErrorFunc = func(w http.ResponseWriter) { - http.Error(w, "Invalid csrf token.", http.StatusBadRequest) - } - } - return opt } -// Csrfer maps CSRF to each request. If this request is a Get request, it will generate a new token. +// PrepareCSRFProtector returns a CSRFProtector to be used for every request. // Additionally, depending on options set, generated tokens will be sent via Header and/or Cookie. -func Csrfer(opt CsrfOptions, ctx *Context) CSRF { - opt = prepareOptions([]CsrfOptions{opt}) - x := &csrf{ +func PrepareCSRFProtector(opt CsrfOptions, ctx *Context) CSRFProtector { + opt = prepareDefaultCsrfOptions(opt) + x := &csrfProtector{ Secret: opt.Secret, Header: opt.Header, Form: opt.Form, @@ -201,7 +153,6 @@ func Csrfer(opt CsrfOptions, ctx *Context) CSRF { CookieDomain: opt.CookieDomain, CookiePath: opt.CookiePath, CookieHTTPOnly: opt.CookieHTTPOnly, - ErrorFunc: opt.ErrorFunc, } if opt.Origin && len(ctx.Req.Header.Get("Origin")) > 0 { @@ -209,33 +160,43 @@ func Csrfer(opt CsrfOptions, ctx *Context) CSRF { } x.ID = "0" - uid := ctx.Session.Get(opt.SessionKey) - if uid != nil { - x.ID = com.ToStr(uid) + uidAny := ctx.Session.Get(opt.SessionKey) + if uidAny != nil { + switch uidVal := uidAny.(type) { + case string: + x.ID = uidVal + case int64: + x.ID = strconv.FormatInt(uidVal, 10) + default: + log.Error("invalid uid type in session: %T", uidAny) + } } - needsNew := false oldUID := ctx.Session.Get(opt.oldSessionKey) - if oldUID == nil || oldUID.(string) != x.ID { - needsNew = true + uidChanged := oldUID == nil || oldUID.(string) != x.ID + cookieToken := ctx.GetCookie(opt.Cookie) + + needsNew := true + if uidChanged { _ = ctx.Session.Set(opt.oldSessionKey, x.ID) - } else { - // If cookie present, map existing token, else generate a new one. - if val := ctx.GetCookie(opt.Cookie); len(val) > 0 { - // FIXME: test coverage. - x.Token = val - } else { - needsNew = true + } else if cookieToken != "" { + // If cookie token presents, re-use existing unexpired token, else generate a new one. + if issueTime, ok := ParseCsrfToken(cookieToken); ok { + dur := time.Since(issueTime) // issueTime is not a monotonic-clock, the server time may change a lot to an early time. + if dur >= -CsrfTokenRegenerationInterval && dur <= CsrfTokenRegenerationInterval { + x.Token = cookieToken + needsNew = false + } } } if needsNew { // FIXME: actionId. - x.Token = GenerateToken(x.Secret, x.ID, "POST") + x.Token = GenerateCsrfToken(x.Secret, x.ID, "POST", time.Now()) if opt.SetCookie { var expires interface{} if opt.CookieLifeTime == 0 { - expires = time.Now().AddDate(0, 0, 1) + expires = time.Now().Add(CsrfTokenTimeout) } middleware.SetCookie(ctx.Resp, opt.Cookie, x.Token, opt.CookieLifeTime, @@ -255,47 +216,31 @@ func Csrfer(opt CsrfOptions, ctx *Context) CSRF { return x } -// Validate should be used as a per route middleware. It attempts to get a token from a "X-CSRFToken" -// HTTP header and then a "_csrf" form value. If one of these is found, the token will be validated -// using ValidToken. If this validation fails, custom Error is sent in the reply. -// If neither a header or form value is found, http.StatusBadRequest is sent. -func Validate(ctx *Context, x CSRF) { - if token := ctx.Req.Header.Get(x.GetHeaderName()); len(token) > 0 { - if !x.ValidToken(token) { - // Delete the cookie - middleware.SetCookie(ctx.Resp, x.GetCookieName(), "", - -1, - x.GetCookiePath(), - x.GetCookieDomain()) // FIXME: Do we need to set the Secure, httpOnly and SameSite values too? - if middleware.IsAPIPath(ctx.Req) { - x.Error(ctx.Resp) - return - } +func (c *csrfProtector) validateToken(ctx *Context, token string) { + if !ValidCsrfToken(token, c.Secret, c.ID, "POST", time.Now()) { + middleware.DeleteCSRFCookie(ctx.Resp) + if middleware.IsAPIPath(ctx.Req) { + // currently, there should be no access to the APIPath with CSRF token. because templates shouldn't use the `/api/` endpoints. + http.Error(ctx.Resp, "Invalid CSRF token.", http.StatusBadRequest) + } else { ctx.Flash.Error(ctx.Tr("error.invalid_csrf")) ctx.Redirect(setting.AppSubURL + "/") } - return } - if token := ctx.Req.FormValue(x.GetFormName()); len(token) > 0 { - if !x.ValidToken(token) { - // Delete the cookie - middleware.SetCookie(ctx.Resp, x.GetCookieName(), "", - -1, - x.GetCookiePath(), - x.GetCookieDomain()) // FIXME: Do we need to set the Secure, httpOnly and SameSite values too? - if middleware.IsAPIPath(ctx.Req) { - x.Error(ctx.Resp) - return - } - ctx.Flash.Error(ctx.Tr("error.invalid_csrf")) - ctx.Redirect(setting.AppSubURL + "/") - } - return - } - if middleware.IsAPIPath(ctx.Req) { - http.Error(ctx.Resp, "Bad Request: no CSRF token present", http.StatusBadRequest) - return - } - ctx.Flash.Error(ctx.Tr("error.missing_csrf")) - ctx.Redirect(setting.AppSubURL + "/") +} + +// Validate should be used as a per route middleware. It attempts to get a token from an "X-Csrf-Token" +// HTTP header and then a "_csrf" form value. If one of these is found, the token will be validated. +// If this validation fails, custom Error is sent in the reply. +// If neither a header nor form value is found, http.StatusBadRequest is sent. +func (c *csrfProtector) Validate(ctx *Context) { + if token := ctx.Req.Header.Get(c.GetHeaderName()); token != "" { + c.validateToken(ctx, token) + return + } + if token := ctx.Req.FormValue(c.GetFormName()); token != "" { + c.validateToken(ctx, token) + return + } + c.validateToken(ctx, "") // no csrf token, use an empty token to respond error } diff --git a/modules/context/org.go b/modules/context/org.go index 824f22b2f3..9f4ce485e5 100644 --- a/modules/context/org.go +++ b/modules/context/org.go @@ -8,9 +8,10 @@ package context import ( "strings" - "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" ) // Organization contains organization context @@ -19,12 +20,12 @@ type Organization struct { IsMember bool IsTeamMember bool // Is member of team. IsTeamAdmin bool // In owner team or team that has admin permission level. - Organization *models.Organization + Organization *organization.Organization OrgLink string CanCreateOrgRepo bool - Team *models.Team - Teams []*models.Team + Team *organization.Team + Teams []*organization.Team } // HandleOrgAssignment handles organization assignment @@ -51,9 +52,9 @@ func HandleOrgAssignment(ctx *Context, args ...bool) { orgName := ctx.Params(":org") var err error - ctx.Org.Organization, err = models.GetOrgByName(orgName) + ctx.Org.Organization, err = organization.GetOrgByName(orgName) if err != nil { - if user_model.IsErrUserNotExist(err) { + if organization.IsErrOrgNotExist(err) { redirectUserID, err := user_model.LookupUserRedirect(orgName) if err == nil { RedirectToUser(ctx, orgName, redirectUserID) @@ -68,23 +69,18 @@ func HandleOrgAssignment(ctx *Context, args ...bool) { return } org := ctx.Org.Organization + ctx.ContextUser = org.AsUser() ctx.Data["Org"] = org - teams, err := org.LoadTeams() - if err != nil { - ctx.ServerError("LoadTeams", err) - } - ctx.Data["OrgTeams"] = teams - // Admin has super access. - if ctx.IsSigned && ctx.User.IsAdmin { + if ctx.IsSigned && ctx.Doer.IsAdmin { ctx.Org.IsOwner = true ctx.Org.IsMember = true ctx.Org.IsTeamMember = true ctx.Org.IsTeamAdmin = true ctx.Org.CanCreateOrgRepo = true } else if ctx.IsSigned { - ctx.Org.IsOwner, err = org.IsOwnedBy(ctx.User.ID) + ctx.Org.IsOwner, err = org.IsOwnedBy(ctx.Doer.ID) if err != nil { ctx.ServerError("IsOwnedBy", err) return @@ -96,12 +92,12 @@ func HandleOrgAssignment(ctx *Context, args ...bool) { ctx.Org.IsTeamAdmin = true ctx.Org.CanCreateOrgRepo = true } else { - ctx.Org.IsMember, err = org.IsOrgMember(ctx.User.ID) + ctx.Org.IsMember, err = org.IsOrgMember(ctx.Doer.ID) if err != nil { ctx.ServerError("IsOrgMember", err) return } - ctx.Org.CanCreateOrgRepo, err = org.CanCreateOrgRepo(ctx.User.ID) + ctx.Org.CanCreateOrgRepo, err = org.CanCreateOrgRepo(ctx.Doer.ID) if err != nil { ctx.ServerError("CanCreateOrgRepo", err) return @@ -118,8 +114,9 @@ func HandleOrgAssignment(ctx *Context, args ...bool) { } ctx.Data["IsOrganizationOwner"] = ctx.Org.IsOwner ctx.Data["IsOrganizationMember"] = ctx.Org.IsMember + ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled ctx.Data["IsPublicMember"] = func(uid int64) bool { - is, _ := models.IsPublicMembership(ctx.Org.Organization.ID, uid) + is, _ := organization.IsPublicMembership(ctx.Org.Organization.ID, uid) return is } ctx.Data["CanCreateOrgRepo"] = ctx.Org.CanCreateOrgRepo @@ -133,7 +130,7 @@ func HandleOrgAssignment(ctx *Context, args ...bool) { if ctx.Org.IsOwner { shouldSeeAllTeams = true } else { - teams, err := org.GetUserTeams(ctx.User.ID) + teams, err := org.GetUserTeams(ctx.Doer.ID) if err != nil { ctx.ServerError("GetUserTeams", err) return @@ -152,7 +149,7 @@ func HandleOrgAssignment(ctx *Context, args ...bool) { return } } else { - ctx.Org.Teams, err = org.GetUserTeams(ctx.User.ID) + ctx.Org.Teams, err = org.GetUserTeams(ctx.Doer.ID) if err != nil { ctx.ServerError("GetUserTeams", err) return diff --git a/modules/context/package.go b/modules/context/package.go new file mode 100644 index 0000000000..cb352fb18a --- /dev/null +++ b/modules/context/package.go @@ -0,0 +1,110 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package context + +import ( + "fmt" + "net/http" + + "code.gitea.io/gitea/models/organization" + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/models/perm" + user_model "code.gitea.io/gitea/models/user" +) + +// Package contains owner, access mode and optional the package descriptor +type Package struct { + Owner *user_model.User + AccessMode perm.AccessMode + Descriptor *packages_model.PackageDescriptor +} + +// PackageAssignment returns a middleware to handle Context.Package assignment +func PackageAssignment() func(ctx *Context) { + return func(ctx *Context) { + packageAssignment(ctx, func(status int, title string, obj interface{}) { + err, ok := obj.(error) + if !ok { + err = fmt.Errorf("%s", obj) + } + if status == http.StatusNotFound { + ctx.NotFound(title, err) + } else { + ctx.ServerError(title, err) + } + }) + } +} + +// PackageAssignmentAPI returns a middleware to handle Context.Package assignment +func PackageAssignmentAPI() func(ctx *APIContext) { + return func(ctx *APIContext) { + packageAssignment(ctx.Context, ctx.Error) + } +} + +func packageAssignment(ctx *Context, errCb func(int, string, interface{})) { + ctx.Package = &Package{ + Owner: ctx.ContextUser, + } + + if ctx.Doer != nil && ctx.Doer.ID == ctx.ContextUser.ID { + ctx.Package.AccessMode = perm.AccessModeOwner + } else { + if ctx.Package.Owner.IsOrganization() { + if organization.HasOrgOrUserVisible(ctx, ctx.Package.Owner, ctx.Doer) { + ctx.Package.AccessMode = perm.AccessModeRead + if ctx.Doer != nil { + var err error + ctx.Package.AccessMode, err = organization.OrgFromUser(ctx.Package.Owner).GetOrgUserMaxAuthorizeLevel(ctx.Doer.ID) + if err != nil { + errCb(http.StatusInternalServerError, "GetOrgUserMaxAuthorizeLevel", err) + return + } + } + } + } else { + ctx.Package.AccessMode = perm.AccessModeRead + } + } + + packageType := ctx.Params("type") + name := ctx.Params("name") + version := ctx.Params("version") + if packageType != "" && name != "" && version != "" { + pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.Type(packageType), name, version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + errCb(http.StatusNotFound, "GetVersionByNameAndVersion", err) + } else { + errCb(http.StatusInternalServerError, "GetVersionByNameAndVersion", err) + } + return + } + + ctx.Package.Descriptor, err = packages_model.GetPackageDescriptor(ctx, pv) + if err != nil { + errCb(http.StatusInternalServerError, "GetPackageDescriptor", err) + return + } + } +} + +// PackageContexter initializes a package context for a request. +func PackageContexter() func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + ctx := Context{ + Resp: NewResponse(resp), + Data: map[string]interface{}{}, + } + defer ctx.Close() + + ctx.Req = WithContext(req, &ctx) + + next.ServeHTTP(ctx.Resp, ctx.Req) + }) + } +} diff --git a/modules/context/pagination.go b/modules/context/pagination.go index 107cbf6186..c0079c2950 100644 --- a/modules/context/pagination.go +++ b/modules/context/pagination.go @@ -10,19 +10,19 @@ import ( "net/url" "strings" - "github.com/unknwon/paginater" + "code.gitea.io/gitea/modules/paginator" ) -// Pagination provides a pagination via Paginater and additional configurations for the link params used in rendering +// Pagination provides a pagination via paginator.Paginator and additional configurations for the link params used in rendering type Pagination struct { - Paginater *paginater.Paginater + Paginater *paginator.Paginator urlParams []string } // NewPagination creates a new instance of the Pagination struct func NewPagination(total, page, issueNum, numPages int) *Pagination { p := &Pagination{} - p.Paginater = paginater.New(total, page, issueNum, numPages) + p.Paginater = paginator.New(total, page, issueNum, numPages) return p } @@ -53,5 +53,6 @@ func (p *Pagination) SetDefaultParams(ctx *Context) { p.AddParam(ctx, "sort", "SortType") p.AddParam(ctx, "q", "Keyword") p.AddParam(ctx, "tab", "TabName") + // do not add any more uncommon params here! p.AddParam(ctx, "t", "queryType") } diff --git a/modules/context/permission.go b/modules/context/permission.go index 2b87aa4591..8dc3b3cd46 100644 --- a/modules/context/permission.go +++ b/modules/context/permission.go @@ -29,6 +29,16 @@ func RequireRepoWriter(unitType unit.Type) func(ctx *Context) { } } +// CanEnableEditor checks if the user is allowed to write to the branch of the repo +func CanEnableEditor() func(ctx *Context) { + return func(ctx *Context) { + if !ctx.Repo.Permission.CanWriteToBranch(ctx.Doer, ctx.Repo.BranchName) { + ctx.NotFound("CanWriteToBranch denies permission", nil) + return + } + } +} + // RequireRepoWriterOr returns a middleware for requiring repository write to one of the unit permission func RequireRepoWriterOr(unitTypes ...unit.Type) func(ctx *Context) { return func(ctx *Context) { @@ -49,7 +59,7 @@ func RequireRepoReader(unitType unit.Type) func(ctx *Context) { if ctx.IsSigned { log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, unitType, ctx.Repo.Repository, ctx.Repo.Permission) @@ -80,7 +90,7 @@ func RequireRepoReaderOr(unitTypes ...unit.Type) func(ctx *Context) { var args []interface{} if ctx.IsSigned { format = "Permission Denied: User %-v cannot read [" - args = append(args, ctx.User) + args = append(args, ctx.Doer) } else { format = "Permission Denied: Anonymous user cannot read [" } diff --git a/modules/context/private.go b/modules/context/private.go index 6e5ef1bd12..fdc7751227 100644 --- a/modules/context/private.go +++ b/modules/context/private.go @@ -66,6 +66,8 @@ func PrivateContexter() func(http.Handler) http.Handler { Data: map[string]interface{}{}, }, } + defer ctx.Close() + ctx.Req = WithPrivateContext(req, ctx) ctx.Data["Context"] = ctx next.ServeHTTP(ctx.Resp, ctx.Req) @@ -79,6 +81,6 @@ func PrivateContexter() func(http.Handler) http.Handler { // the underlying request has timed out from the ssh/http push func OverrideContext(ctx *PrivateContext) (cancel context.CancelFunc) { // We now need to override the request context as the base for our work because even if the request is cancelled we have to continue this work - ctx.Override, _, cancel = process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("PrivateContext: %s", ctx.Req.RequestURI)) + ctx.Override, _, cancel = process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), fmt.Sprintf("PrivateContext: %s", ctx.Req.RequestURI), process.RequestProcessType, true) return } diff --git a/modules/context/repo.go b/modules/context/repo.go index 355c40af8a..3dc8e51392 100644 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -8,6 +8,7 @@ package context import ( "context" "fmt" + "html" "io" "net/http" "net/url" @@ -15,7 +16,6 @@ import ( "strings" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" unit_model "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -30,7 +30,6 @@ import ( asymkey_service "code.gitea.io/gitea/services/asymkey" "github.com/editorconfig/editorconfig-core-go/v2" - "github.com/unknwon/com" ) // IssueTemplateDirCandidates issue templates directory @@ -79,8 +78,8 @@ type Repository struct { } // CanEnableEditor returns true if repository is editable and user has proper access level. -func (r *Repository) CanEnableEditor() bool { - return r.Permission.CanWrite(unit_model.TypeCode) && r.Repository.CanEnableEditor() && r.IsViewBranch && !r.Repository.IsArchived +func (r *Repository) CanEnableEditor(user *user_model.User) bool { + return r.IsViewBranch && r.Permission.CanWriteToBranch(user, r.BranchName) && r.Repository.CanEnableEditor() && !r.Repository.IsArchived } // CanCreateBranch returns true if repository is editable and user has proper access level. @@ -124,7 +123,7 @@ func (r *Repository) CanCommitToBranch(ctx context.Context, doer *user_model.Use sign, keyID, _, err := asymkey_service.SignCRUDAction(ctx, r.Repository.RepoPath(), doer, r.Repository.RepoPath(), git.BranchPrefix+r.BranchName) - canCommit := r.CanEnableEditor() && userCanPush + canCommit := r.CanEnableEditor(doer) && userCanPush if requireSigned { canCommit = canCommit && sign } @@ -140,7 +139,7 @@ func (r *Repository) CanCommitToBranch(ctx context.Context, doer *user_model.Use return CanCommitToBranchResults{ CanCommitToBranch: canCommit, - EditorEnabled: r.CanEnableEditor(), + EditorEnabled: r.CanEnableEditor(doer), UserCanPush: userCanPush, RequireSigned: requireSigned, WillSign: sign, @@ -222,13 +221,21 @@ func (r *Repository) FileExists(path, branch string) (bool, error) { // GetEditorconfig returns the .editorconfig definition if found in the // HEAD of the default repo branch. -func (r *Repository) GetEditorconfig() (*editorconfig.Editorconfig, error) { +func (r *Repository) GetEditorconfig(optCommit ...*git.Commit) (*editorconfig.Editorconfig, error) { if r.GitRepo == nil { return nil, nil } - commit, err := r.GitRepo.GetBranchCommit(r.Repository.DefaultBranch) - if err != nil { - return nil, err + var ( + err error + commit *git.Commit + ) + if len(optCommit) != 0 { + commit = optCommit[0] + } else { + commit, err = r.GitRepo.GetBranchCommit(r.Repository.DefaultBranch) + if err != nil { + return nil, err + } } treeEntry, err := commit.GetTreeEntryByPath(".editorconfig") if err != nil { @@ -256,7 +263,7 @@ func RetrieveBaseRepo(ctx *Context, repo *repo_model.Repository) { } ctx.ServerError("GetBaseRepo", err) return - } else if err = repo.BaseRepo.GetOwner(db.DefaultContext); err != nil { + } else if err = repo.BaseRepo.GetOwner(ctx); err != nil { ctx.ServerError("BaseRepo.GetOwner", err) return } @@ -273,12 +280,12 @@ func RetrieveTemplateRepo(ctx *Context, repo *repo_model.Repository) { } ctx.ServerError("GetTemplateRepo", err) return - } else if err = templateRepo.GetOwner(db.DefaultContext); err != nil { + } else if err = templateRepo.GetOwner(ctx); err != nil { ctx.ServerError("TemplateRepo.GetOwner", err) return } - perm, err := models.GetUserRepoPermission(templateRepo, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, templateRepo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return @@ -309,11 +316,9 @@ func EarlyResponseForGoGetMeta(ctx *Context) { ctx.PlainText(http.StatusBadRequest, "invalid repository path") return } - ctx.PlainText(http.StatusOK, com.Expand(``, - map[string]string{ - "GoGetImport": ComposeGoGetImport(username, reponame), - "CloneLink": repo_model.ComposeHTTPSCloneURL(username, reponame), - })) + goImportContent := fmt.Sprintf("%s git %s", ComposeGoGetImport(username, reponame), repo_model.ComposeHTTPSCloneURL(username, reponame)) + htmlMeta := fmt.Sprintf(``, html.EscapeString(goImportContent)) + ctx.PlainText(http.StatusOK, htmlMeta) } // RedirectToRepo redirect to a differently-named repository @@ -336,17 +341,17 @@ func RedirectToRepo(ctx *Context, redirectRepoID int64) { if ctx.Req.URL.RawQuery != "" { redirectPath += "?" + ctx.Req.URL.RawQuery } - ctx.Redirect(path.Join(setting.AppSubURL, redirectPath)) + ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect) } func repoAssignment(ctx *Context, repo *repo_model.Repository) { var err error - if err = repo.GetOwner(db.DefaultContext); err != nil { + if err = repo.GetOwner(ctx); err != nil { ctx.ServerError("GetOwner", err) return } - ctx.Repo.Permission, err = models.GetUserRepoPermission(repo, ctx.User) + ctx.Repo.Permission, err = models.GetUserRepoPermission(ctx, repo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return @@ -365,15 +370,24 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) { ctx.Data["Permission"] = &ctx.Repo.Permission if repo.IsMirror { - var err error - ctx.Repo.Mirror, err = repo_model.GetMirrorByRepoID(repo.ID) + + // Check if the mirror has finsihed migrationg, only then we can + // lookup the mirror informtation the database. + finishedMigrating, err := models.HasFinishedMigratingTask(repo.ID) if err != nil { - ctx.ServerError("GetMirrorByRepoID", err) + ctx.ServerError("HasFinishedMigratingTask", err) return } - ctx.Data["MirrorEnablePrune"] = ctx.Repo.Mirror.EnablePrune - ctx.Data["MirrorInterval"] = ctx.Repo.Mirror.Interval - ctx.Data["Mirror"] = ctx.Repo.Mirror + if finishedMigrating { + ctx.Repo.Mirror, err = repo_model.GetMirrorByRepoID(repo.ID) + if err != nil { + ctx.ServerError("GetMirrorByRepoID", err) + return + } + ctx.Data["MirrorEnablePrune"] = ctx.Repo.Mirror.EnablePrune + ctx.Data["MirrorInterval"] = ctx.Repo.Mirror.Interval + ctx.Data["Mirror"] = ctx.Repo.Mirror + } } pushMirrors, err := repo_model.GetPushMirrorsByRepoID(repo.ID) @@ -410,6 +424,12 @@ func RepoIDAssignment() func(ctx *Context) { // RepoAssignment returns a middleware to handle repository assignment func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { + if _, repoAssignmentOnce := ctx.Data["repoAssignmentExecuted"]; repoAssignmentOnce { + log.Trace("RepoAssignment was exec already, skipping second call ...") + return + } + ctx.Data["repoAssignmentExecuted"] = true + var ( owner *user_model.User err error @@ -418,10 +438,12 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { userName := ctx.Params(":username") repoName := ctx.Params(":reponame") repoName = strings.TrimSuffix(repoName, ".git") + repoName = strings.TrimSuffix(repoName, ".rss") + repoName = strings.TrimSuffix(repoName, ".atom") // Check if the user is the same as the repository owner - if ctx.IsSigned && ctx.User.LowerName == strings.ToLower(userName) { - owner = ctx.User + if ctx.IsSigned && ctx.Doer.LowerName == strings.ToLower(userName) { + owner = ctx.Doer } else { owner, err = user_model.GetUserByName(userName) if err != nil { @@ -438,8 +460,29 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { } } ctx.Repo.Owner = owner + ctx.ContextUser = owner ctx.Data["Username"] = ctx.Repo.Owner.Name + // redirect link to wiki + if strings.HasSuffix(repoName, ".wiki") { + // ctx.Req.URL.Path does not have the preceding appSubURL - any redirect must have this added + // Now we happen to know that all of our paths are: /:username/:reponame/whatever_else + originalRepoName := ctx.Params(":reponame") + redirectRepoName := strings.TrimSuffix(repoName, ".wiki") + redirectRepoName += originalRepoName[len(redirectRepoName)+5:] + redirectPath := strings.Replace( + ctx.Req.URL.EscapedPath(), + url.PathEscape(userName)+"/"+url.PathEscape(originalRepoName), + url.PathEscape(userName)+"/"+url.PathEscape(redirectRepoName)+"/wiki", + 1, + ) + if ctx.Req.URL.RawQuery != "" { + redirectPath += "?" + ctx.Req.URL.RawQuery + } + ctx.Redirect(path.Join(setting.AppSubURL, redirectPath)) + return + } + // Get repository. repo, err := repo_model.GetRepositoryByName(owner.ID, repoName) if err != nil { @@ -500,14 +543,14 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { ctx.Data["CanWriteIssues"] = ctx.Repo.CanWrite(unit_model.TypeIssues) ctx.Data["CanWritePulls"] = ctx.Repo.CanWrite(unit_model.TypePullRequests) - canSignedUserFork, err := models.CanUserForkRepo(ctx.User, ctx.Repo.Repository) + canSignedUserFork, err := models.CanUserForkRepo(ctx.Doer, ctx.Repo.Repository) if err != nil { ctx.ServerError("CanUserForkRepo", err) return } ctx.Data["CanSignedUserFork"] = canSignedUserFork - userAndOrgForks, err := models.GetForksByUserAndOrgs(ctx.User, ctx.Repo.Repository) + userAndOrgForks, err := models.GetForksByUserAndOrgs(ctx, ctx.Doer, ctx.Repo.Repository) if err != nil { ctx.ServerError("GetForksByUserAndOrgs", err) return @@ -519,19 +562,26 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { // If multiple forks are available or if the user can fork to another account, but there is already a fork: open selection dialog ctx.Data["ShowForkModal"] = len(userAndOrgForks) > 1 || (canSignedUserFork && len(userAndOrgForks) > 0) - ctx.Data["DisableSSH"] = setting.SSH.Disabled - ctx.Data["ExposeAnonSSH"] = setting.SSH.ExposeAnonymous - ctx.Data["DisableHTTP"] = setting.Repository.DisableHTTPGit + ctx.Data["RepoCloneLink"] = repo.CloneLink() + + cloneButtonShowHTTPS := !setting.Repository.DisableHTTPGit + cloneButtonShowSSH := !setting.SSH.Disabled && (ctx.IsSigned || setting.SSH.ExposeAnonymous) + if !cloneButtonShowHTTPS && !cloneButtonShowSSH { + // We have to show at least one link, so we just show the HTTPS + cloneButtonShowHTTPS = true + } + ctx.Data["CloneButtonShowHTTPS"] = cloneButtonShowHTTPS + ctx.Data["CloneButtonShowSSH"] = cloneButtonShowSSH + ctx.Data["CloneButtonOriginLink"] = ctx.Data["RepoCloneLink"] // it may be rewritten to the WikiCloneLink by the router middleware + ctx.Data["RepoSearchEnabled"] = setting.Indexer.RepoIndexerEnabled if setting.Indexer.RepoIndexerEnabled { ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable() } - ctx.Data["CloneLink"] = repo.CloneLink() - ctx.Data["WikiCloneLink"] = repo.WikiCloneLink() if ctx.IsSigned { - ctx.Data["IsWatchingRepo"] = repo_model.IsWatching(ctx.User.ID, repo.ID) - ctx.Data["IsStaringRepo"] = repo_model.IsStaring(ctx.User.ID, repo.ID) + ctx.Data["IsWatchingRepo"] = repo_model.IsWatching(ctx.Doer.ID, repo.ID) + ctx.Data["IsStaringRepo"] = repo_model.IsStaring(ctx.Doer.ID, repo.ID) } if repo.IsFork { @@ -559,7 +609,7 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { return } - gitRepo, err := git.OpenRepositoryCtx(ctx, repo_model.RepoPath(userName, repoName)) + gitRepo, err := git.OpenRepository(ctx, repo_model.RepoPath(userName, repoName)) if err != nil { if strings.Contains(err.Error(), "repository does not exist") || strings.Contains(err.Error(), "no such file or directory") { log.Error("Repository %-v has a broken repository on the file system: %s Error: %v", ctx.Repo.Repository, ctx.Repo.Repository.RepoPath(), err) @@ -575,6 +625,9 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { ctx.ServerError("RepoAssignment Invalid repo "+repo_model.RepoPath(userName, repoName), err) return } + if ctx.Repo.GitRepo != nil { + ctx.Repo.GitRepo.Close() + } ctx.Repo.GitRepo = gitRepo // We opened it, we should close it @@ -631,7 +684,7 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { // People who have push access or have forked repository can propose a new pull request. canPush := ctx.Repo.CanWrite(unit_model.TypeCode) || - (ctx.IsSigned && repo_model.HasForkedRepo(ctx.User.ID, ctx.Repo.Repository.ID)) + (ctx.IsSigned && repo_model.HasForkedRepo(ctx.Doer.ID, ctx.Repo.Repository.ID)) canCompare := false // Pull request is allowed if this is a fork repository @@ -667,8 +720,8 @@ func RepoAssignment(ctx *Context) (cancel context.CancelFunc) { } ctx.Data["RepoTransfer"] = repoTransfer - if ctx.User != nil { - ctx.Data["CanUserAcceptTransfer"] = repoTransfer.CanUserAcceptTransfer(ctx.User) + if ctx.Doer != nil { + ctx.Data["CanUserAcceptTransfer"] = repoTransfer.CanUserAcceptTransfer(ctx.Doer) } } @@ -817,7 +870,7 @@ func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context if ctx.Repo.GitRepo == nil { repoPath := repo_model.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name) - ctx.Repo.GitRepo, err = git.OpenRepositoryCtx(ctx, repoPath) + ctx.Repo.GitRepo, err = git.OpenRepository(ctx, repoPath) if err != nil { ctx.ServerError("RepoRef Invalid repo "+repoPath, err) return @@ -948,7 +1001,7 @@ func RepoRefByType(refType RepoRefType, ignoreNotExistErr ...bool) func(*Context // GitHookService checks if repository Git hooks service has been enabled. func GitHookService() func(ctx *Context) { return func(ctx *Context) { - if !ctx.User.CanEditGitHook() { + if !ctx.Doer.CanEditGitHook() { ctx.NotFound("GitHookService", nil) return } @@ -966,6 +1019,7 @@ func UnitTypes() func(ctx *Context) { ctx.Data["UnitTypeExternalWiki"] = unit_model.TypeExternalWiki ctx.Data["UnitTypeExternalTracker"] = unit_model.TypeExternalTracker ctx.Data["UnitTypeProjects"] = unit_model.TypeProjects + ctx.Data["UnitTypePackages"] = unit_model.TypePackages } } diff --git a/routers/api/v1/utils/utils.go b/modules/context/utils.go similarity index 66% rename from routers/api/v1/utils/utils.go rename to modules/context/utils.go index 7564857115..aea51cc5d6 100644 --- a/routers/api/v1/utils/utils.go +++ b/modules/context/utils.go @@ -2,20 +2,16 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package utils +package context import ( "net/url" "strings" "time" - - "code.gitea.io/gitea/models/db" - "code.gitea.io/gitea/modules/context" - "code.gitea.io/gitea/modules/convert" ) // GetQueryBeforeSince return parsed time (unix format) from URL query's before and since -func GetQueryBeforeSince(ctx *context.APIContext) (before, since int64, err error) { +func GetQueryBeforeSince(ctx *Context) (before, since int64, err error) { qCreatedBefore, err := prepareQueryArg(ctx, "before") if err != nil { return 0, 0, err @@ -53,16 +49,8 @@ func parseTime(value string) (int64, error) { } // prepareQueryArg unescape and trim a query arg -func prepareQueryArg(ctx *context.APIContext, name string) (value string, err error) { +func prepareQueryArg(ctx *Context, name string) (value string, err error) { value, err = url.PathUnescape(ctx.FormString(name)) value = strings.TrimSpace(value) return } - -// GetListOptions returns list options using the page and limit parameters -func GetListOptions(ctx *context.APIContext) db.ListOptions { - return db.ListOptions{ - Page: ctx.FormInt("page"), - PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), - } -} diff --git a/modules/context/xsrf.go b/modules/context/xsrf.go index 10e63a4180..e3ecc82f6d 100644 --- a/modules/context/xsrf.go +++ b/modules/context/xsrf.go @@ -28,69 +28,69 @@ import ( "time" ) -// Timeout represents the duration that XSRF tokens are valid. +// CsrfTokenTimeout represents the duration that XSRF tokens are valid. // It is exported so clients may set cookie timeouts that match generated tokens. -const Timeout = 24 * time.Hour +const CsrfTokenTimeout = 24 * time.Hour -// clean sanitizes a string for inclusion in a token by replacing all ":"s. -func clean(s string) string { - return strings.ReplaceAll(s, ":", "_") -} +// CsrfTokenRegenerationInterval is the interval between token generations, old tokens are still valid before CsrfTokenTimeout +var CsrfTokenRegenerationInterval = 10 * time.Minute -// GenerateToken returns a URL-safe secure XSRF token that expires in 24 hours. -// +var csrfTokenSep = []byte(":") + +// GenerateCsrfToken returns a URL-safe secure XSRF token that expires in CsrfTokenTimeout hours. // key is a secret key for your application. // userID is a unique identifier for the user. // actionID is the action the user is taking (e.g. POSTing to a particular path). -func GenerateToken(key, userID, actionID string) string { - return generateTokenAtTime(key, userID, actionID, time.Now()) -} - -// generateTokenAtTime is like Generate, but returns a token that expires 24 hours from now. -func generateTokenAtTime(key, userID, actionID string, now time.Time) string { +func GenerateCsrfToken(key, userID, actionID string, now time.Time) string { + nowUnixNano := now.UnixNano() + nowUnixNanoStr := strconv.FormatInt(nowUnixNano, 10) h := hmac.New(sha1.New, []byte(key)) - fmt.Fprintf(h, "%s:%s:%d", clean(userID), clean(actionID), now.UnixNano()) - tok := fmt.Sprintf("%s:%d", h.Sum(nil), now.UnixNano()) + h.Write([]byte(strings.ReplaceAll(userID, ":", "_"))) + h.Write(csrfTokenSep) + h.Write([]byte(strings.ReplaceAll(actionID, ":", "_"))) + h.Write(csrfTokenSep) + h.Write([]byte(nowUnixNanoStr)) + tok := fmt.Sprintf("%s:%s", h.Sum(nil), nowUnixNanoStr) return base64.RawURLEncoding.EncodeToString([]byte(tok)) } -// ValidToken returns true if token is a valid, unexpired token returned by Generate. -func ValidToken(token, key, userID, actionID string) bool { - return validTokenAtTime(token, key, userID, actionID, time.Now()) -} - -// validTokenAtTime is like Valid, but it uses now to check if the token is expired. -func validTokenAtTime(token, key, userID, actionID string, now time.Time) bool { - // Decode the token. +func ParseCsrfToken(token string) (issueTime time.Time, ok bool) { data, err := base64.RawURLEncoding.DecodeString(token) if err != nil { - return false + return time.Time{}, false } - // Extract the issue time of the token. - sep := bytes.LastIndex(data, []byte{':'}) - if sep < 0 { - return false + pos := bytes.LastIndex(data, csrfTokenSep) + if pos == -1 { + return time.Time{}, false } - nanos, err := strconv.ParseInt(string(data[sep+1:]), 10, 64) + nanos, err := strconv.ParseInt(string(data[pos+1:]), 10, 64) if err != nil { + return time.Time{}, false + } + return time.Unix(0, nanos), true +} + +// ValidCsrfToken returns true if token is a valid and unexpired token returned by Generate. +func ValidCsrfToken(token, key, userID, actionID string, now time.Time) bool { + issueTime, ok := ParseCsrfToken(token) + if !ok { return false } - issueTime := time.Unix(0, nanos) // Check that the token is not expired. - if now.Sub(issueTime) >= Timeout { + if now.Sub(issueTime) >= CsrfTokenTimeout { return false } // Check that the token is not from the future. - // Allow 1 minute grace period in case the token is being verified on a + // Allow 1-minute grace period in case the token is being verified on a // machine whose clock is behind the machine that issued the token. if issueTime.After(now.Add(1 * time.Minute)) { return false } - expected := generateTokenAtTime(key, userID, actionID, issueTime) + expected := GenerateCsrfToken(key, userID, actionID, issueTime) // Check that the token matches the expected value. // Use constant time comparison to avoid timing attacks. diff --git a/modules/context/xsrf_test.go b/modules/context/xsrf_test.go index c0c711bf07..ef42d61d5a 100644 --- a/modules/context/xsrf_test.go +++ b/modules/context/xsrf_test.go @@ -37,18 +37,18 @@ var ( func Test_ValidToken(t *testing.T) { t.Run("Validate token", func(t *testing.T) { - tok := generateTokenAtTime(key, userID, actionID, now) - assert.True(t, validTokenAtTime(tok, key, userID, actionID, oneMinuteFromNow)) - assert.True(t, validTokenAtTime(tok, key, userID, actionID, now.Add(Timeout-1*time.Nanosecond))) - assert.True(t, validTokenAtTime(tok, key, userID, actionID, now.Add(-1*time.Minute))) + tok := GenerateCsrfToken(key, userID, actionID, now) + assert.True(t, ValidCsrfToken(tok, key, userID, actionID, oneMinuteFromNow)) + assert.True(t, ValidCsrfToken(tok, key, userID, actionID, now.Add(CsrfTokenTimeout-1*time.Nanosecond))) + assert.True(t, ValidCsrfToken(tok, key, userID, actionID, now.Add(-1*time.Minute))) }) } // Test_SeparatorReplacement tests that separators are being correctly substituted func Test_SeparatorReplacement(t *testing.T) { t.Run("Test two separator replacements", func(t *testing.T) { - assert.NotEqual(t, generateTokenAtTime("foo:bar", "baz", "wah", now), - generateTokenAtTime("foo", "bar:baz", "wah", now)) + assert.NotEqual(t, GenerateCsrfToken("foo:bar", "baz", "wah", now), + GenerateCsrfToken("foo", "bar:baz", "wah", now)) }) } @@ -61,13 +61,13 @@ func Test_InvalidToken(t *testing.T) { {"Bad key", "foobar", userID, actionID, oneMinuteFromNow}, {"Bad userID", key, "foobar", actionID, oneMinuteFromNow}, {"Bad actionID", key, userID, "foobar", oneMinuteFromNow}, - {"Expired", key, userID, actionID, now.Add(Timeout)}, + {"Expired", key, userID, actionID, now.Add(CsrfTokenTimeout)}, {"More than 1 minute from the future", key, userID, actionID, now.Add(-1*time.Nanosecond - 1*time.Minute)}, } - tok := generateTokenAtTime(key, userID, actionID, now) + tok := GenerateCsrfToken(key, userID, actionID, now) for _, itt := range invalidTokenTests { - assert.False(t, validTokenAtTime(tok, itt.key, itt.userID, itt.actionID, itt.t)) + assert.False(t, ValidCsrfToken(tok, itt.key, itt.userID, itt.actionID, itt.t)) } }) } @@ -84,7 +84,7 @@ func Test_ValidateBadData(t *testing.T) { } for _, bdt := range badDataTests { - assert.False(t, validTokenAtTime(bdt.tok, key, userID, actionID, oneMinuteFromNow)) + assert.False(t, ValidCsrfToken(bdt.tok, key, userID, actionID, oneMinuteFromNow)) } }) } diff --git a/modules/convert/convert.go b/modules/convert/convert.go index 41a044c6d7..3a12ed8f1f 100644 --- a/modules/convert/convert.go +++ b/modules/convert/convert.go @@ -14,6 +14,8 @@ import ( "code.gitea.io/gitea/models" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/auth" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -39,12 +41,19 @@ func ToEmail(email *user_model.EmailAddress) *api.Email { func ToBranch(repo *repo_model.Repository, b *git.Branch, c *git.Commit, bp *models.ProtectedBranch, user *user_model.User, isRepoAdmin bool) (*api.Branch, error) { if bp == nil { var hasPerm bool + var canPush bool var err error if user != nil { hasPerm, err = models.HasAccessUnit(user, repo, unit.TypeCode, perm.AccessModeWrite) if err != nil { return nil, err } + + perms, err := models.GetUserRepoPermission(db.DefaultContext, repo, user) + if err != nil { + return nil, err + } + canPush = perms.CanWriteToBranch(user, b.Name) } return &api.Branch{ @@ -54,7 +63,7 @@ func ToBranch(repo *repo_model.Repository, b *git.Branch, c *git.Commit, bp *mod RequiredApprovals: 0, EnableStatusCheck: false, StatusCheckContexts: []string{}, - UserCanPush: hasPerm, + UserCanPush: canPush, UserCanMerge: hasPerm, }, nil } @@ -73,12 +82,12 @@ func ToBranch(repo *repo_model.Repository, b *git.Branch, c *git.Commit, bp *mod } if user != nil { - permission, err := models.GetUserRepoPermission(repo, user) + permission, err := models.GetUserRepoPermission(db.DefaultContext, repo, user) if err != nil { return nil, err } branch.UserCanPush = bp.CanUserPush(user.ID) - branch.UserCanMerge = models.IsUserMergeWhitelisted(bp, user.ID, permission) + branch.UserCanMerge = models.IsUserMergeWhitelisted(db.DefaultContext, bp, user.ID, permission) } return branch, nil @@ -98,15 +107,15 @@ func ToBranchProtection(bp *models.ProtectedBranch) *api.BranchProtection { if err != nil { log.Error("GetUserNamesByIDs (ApprovalsWhitelistUserIDs): %v", err) } - pushWhitelistTeams, err := models.GetTeamNamesByID(bp.WhitelistTeamIDs) + pushWhitelistTeams, err := organization.GetTeamNamesByID(bp.WhitelistTeamIDs) if err != nil { log.Error("GetTeamNamesByID (WhitelistTeamIDs): %v", err) } - mergeWhitelistTeams, err := models.GetTeamNamesByID(bp.MergeWhitelistTeamIDs) + mergeWhitelistTeams, err := organization.GetTeamNamesByID(bp.MergeWhitelistTeamIDs) if err != nil { log.Error("GetTeamNamesByID (MergeWhitelistTeamIDs): %v", err) } - approvalsWhitelistTeams, err := models.GetTeamNamesByID(bp.ApprovalsWhitelistTeamIDs) + approvalsWhitelistTeams, err := organization.GetTeamNamesByID(bp.ApprovalsWhitelistTeamIDs) if err != nil { log.Error("GetTeamNamesByID (ApprovalsWhitelistTeamIDs): %v", err) } @@ -280,7 +289,7 @@ func ToDeployKey(apiLink string, key *asymkey_model.DeployKey) *api.DeployKey { } // ToOrganization convert user_model.User to api.Organization -func ToOrganization(org *models.Organization) *api.Organization { +func ToOrganization(org *organization.Organization) *api.Organization { return &api.Organization{ ID: org.ID, AvatarURL: org.AsUser().AvatarLink(), @@ -294,8 +303,8 @@ func ToOrganization(org *models.Organization) *api.Organization { } } -// ToTeam convert models.Team to api.Team -func ToTeam(team *models.Team) *api.Team { +// ToTeam convert organization.Team to api.Team +func ToTeam(team *organization.Team) *api.Team { if team == nil { return nil } diff --git a/modules/convert/git_commit.go b/modules/convert/git_commit.go index a5c3112f13..dfd6cb080c 100644 --- a/modules/convert/git_commit.go +++ b/modules/convert/git_commit.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/modules/log" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/services/gitdiff" ) // ToCommitUser convert a git.Signature to an api.CommitUser @@ -146,6 +147,13 @@ func ToCommit(repo *repo_model.Repository, gitRepo *git.Repository, commit *git. } } + diff, err := gitdiff.GetDiff(gitRepo, &gitdiff.DiffOptions{ + AfterCommitID: commit.ID.String(), + }) + if err != nil { + return nil, err + } + return &api.Commit{ CommitMeta: &api.CommitMeta{ URL: repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String()), @@ -175,10 +183,16 @@ func ToCommit(repo *repo_model.Repository, gitRepo *git.Repository, commit *git. SHA: commit.ID.String(), Created: commit.Committer.When, }, + Verification: ToVerification(commit), }, Author: apiAuthor, Committer: apiCommitter, Parents: apiParents, Files: affectedFileList, + Stats: &api.CommitStats{ + Total: diff.TotalAddition + diff.TotalDeletion, + Additions: diff.TotalAddition, + Deletions: diff.TotalDeletion, + }, }, nil } diff --git a/modules/convert/issue.go b/modules/convert/issue.go index 4972b070a7..bf116e2283 100644 --- a/modules/convert/issue.go +++ b/modules/convert/issue.go @@ -11,6 +11,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" @@ -23,13 +24,13 @@ import ( // Required - Poster, Labels, // Optional - Milestone, Assignee, PullRequest func ToAPIIssue(issue *models.Issue) *api.Issue { - if err := issue.LoadLabels(); err != nil { + if err := issue.LoadLabels(db.DefaultContext); err != nil { return &api.Issue{} } if err := issue.LoadPoster(); err != nil { return &api.Issue{} } - if err := issue.LoadRepo(); err != nil { + if err := issue.LoadRepo(db.DefaultContext); err != nil { return &api.Issue{} } if err := issue.Repo.GetOwner(db.DefaultContext); err != nil { @@ -214,7 +215,7 @@ func ToLabelList(labels []*models.Label, repo *repo_model.Repository, org *user_ } // ToAPIMilestone converts Milestone into API Format -func ToAPIMilestone(m *models.Milestone) *api.Milestone { +func ToAPIMilestone(m *issues_model.Milestone) *api.Milestone { apiMilestone := &api.Milestone{ ID: m.ID, State: m.State(), diff --git a/modules/convert/issue_test.go b/modules/convert/issue_test.go index fb75413eab..b237c18f69 100644 --- a/modules/convert/issue_test.go +++ b/modules/convert/issue_test.go @@ -10,6 +10,7 @@ import ( "time" "code.gitea.io/gitea/models" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" "code.gitea.io/gitea/modules/setting" @@ -32,7 +33,7 @@ func TestLabel_ToLabel(t *testing.T) { } func TestMilestone_APIFormat(t *testing.T) { - milestone := &models.Milestone{ + milestone := &issues_model.Milestone{ ID: 3, RepoID: 4, Name: "milestoneName", diff --git a/modules/convert/main_test.go b/modules/convert/main_test.go index 307fd06197..84f16b9af3 100644 --- a/modules/convert/main_test.go +++ b/modules/convert/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/modules/convert/package.go b/modules/convert/package.go new file mode 100644 index 0000000000..a4ea41d522 --- /dev/null +++ b/modules/convert/package.go @@ -0,0 +1,53 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package convert + +import ( + "context" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/packages" + user_model "code.gitea.io/gitea/models/user" + api "code.gitea.io/gitea/modules/structs" +) + +// ToPackage convert a packages.PackageDescriptor to api.Package +func ToPackage(ctx context.Context, pd *packages.PackageDescriptor, doer *user_model.User) (*api.Package, error) { + var repo *api.Repository + if pd.Repository != nil { + permission, err := models.GetUserRepoPermission(ctx, pd.Repository, doer) + if err != nil { + return nil, err + } + + if permission.HasAccess() { + repo = ToRepo(pd.Repository, permission.AccessMode) + } + } + + return &api.Package{ + ID: pd.Version.ID, + Owner: ToUser(pd.Owner, doer), + Repository: repo, + Creator: ToUser(pd.Creator, doer), + Type: string(pd.Package.Type), + Name: pd.Package.Name, + Version: pd.Version.Version, + CreatedAt: pd.Version.CreatedUnix.AsTime(), + }, nil +} + +// ToPackageFile converts packages.PackageFileDescriptor to api.PackageFile +func ToPackageFile(pfd *packages.PackageFileDescriptor) *api.PackageFile { + return &api.PackageFile{ + ID: pfd.File.ID, + Size: pfd.Blob.Size, + Name: pfd.File.Name, + HashMD5: pfd.Blob.HashMD5, + HashSHA1: pfd.Blob.HashSHA1, + HashSHA256: pfd.Blob.HashSHA256, + HashSHA512: pfd.Blob.HashSHA512, + } +} diff --git a/modules/convert/pull.go b/modules/convert/pull.go index 1551645a51..a2f54270e4 100644 --- a/modules/convert/pull.go +++ b/modules/convert/pull.go @@ -27,23 +27,23 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo err error ) - if err = pr.Issue.LoadRepo(); err != nil { + if err = pr.Issue.LoadRepo(ctx); err != nil { log.Error("pr.Issue.LoadRepo[%d]: %v", pr.ID, err) return nil } apiIssue := ToAPIIssue(pr.Issue) - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("GetRepositoryById[%d]: %v", pr.ID, err) return nil } - if err := pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { log.Error("GetRepositoryById[%d]: %v", pr.ID, err) return nil } - p, err := models.GetUserRepoPermission(pr.BaseRepo, doer) + p, err := models.GetUserRepoPermission(ctx, pr.BaseRepo, doer) if err != nil { log.Error("GetUserRepoPermission[%d]: %v", pr.BaseRepoID, err) p.AccessMode = perm.AccessModeNone @@ -68,10 +68,13 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo PatchURL: pr.Issue.PatchURL(), HasMerged: pr.HasMerged, MergeBase: pr.MergeBase, + Mergeable: pr.Mergeable(), Deadline: apiIssue.Deadline, Created: pr.Issue.CreatedUnix.AsTimePtr(), Updated: pr.Issue.UpdatedUnix.AsTimePtr(), + AllowMaintainerEdit: pr.AllowMaintainerEdit, + Base: &api.PRBranchInfo{ Name: pr.BaseBranch, Ref: pr.BaseBranch, @@ -85,7 +88,7 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo }, } - gitRepo, err := git.OpenRepositoryCtx(ctx, pr.BaseRepo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { log.Error("OpenRepository[%s]: %v", pr.BaseRepo.RepoPath(), err) return nil @@ -111,7 +114,7 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo } if pr.Flow == models.PullRequestFlowAGit { - gitRepo, err := git.OpenRepositoryCtx(ctx, pr.BaseRepo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { log.Error("OpenRepository[%s]: %v", pr.GetGitRefName(), err) return nil @@ -129,7 +132,7 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo } if pr.HeadRepo != nil && pr.Flow == models.PullRequestFlowGithub { - p, err := models.GetUserRepoPermission(pr.HeadRepo, doer) + p, err := models.GetUserRepoPermission(ctx, pr.HeadRepo, doer) if err != nil { log.Error("GetUserRepoPermission[%d]: %v", pr.HeadRepoID, err) p.AccessMode = perm.AccessModeNone @@ -138,7 +141,7 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo apiPullRequest.Head.RepoID = pr.HeadRepo.ID apiPullRequest.Head.Repository = ToRepo(pr.HeadRepo, p.AccessMode) - headGitRepo, err := git.OpenRepositoryCtx(ctx, pr.HeadRepo.RepoPath()) + headGitRepo, err := git.OpenRepository(ctx, pr.HeadRepo.RepoPath()) if err != nil { log.Error("OpenRepository[%s]: %v", pr.HeadRepo.RepoPath(), err) return nil @@ -174,7 +177,7 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo } if len(apiPullRequest.Head.Sha) == 0 && len(apiPullRequest.Head.Ref) != 0 { - baseGitRepo, err := git.OpenRepositoryCtx(ctx, pr.BaseRepo.RepoPath()) + baseGitRepo, err := git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { log.Error("OpenRepository[%s]: %v", pr.BaseRepo.RepoPath(), err) return nil @@ -191,10 +194,6 @@ func ToAPIPullRequest(ctx context.Context, pr *models.PullRequest, doer *user_mo } } - if pr.Status != models.PullRequestStatusChecking { - mergeable := !(pr.Status == models.PullRequestStatusConflict || pr.Status == models.PullRequestStatusError) && !pr.IsWorkInProgress() - apiPullRequest.Mergeable = mergeable - } if pr.HasMerged { apiPullRequest.Merged = pr.MergedUnix.AsTimePtr() apiPullRequest.MergedCommitID = &pr.MergedCommitID diff --git a/modules/convert/user.go b/modules/convert/user.go index dc4a8c49c7..2b07d21838 100644 --- a/modules/convert/user.go +++ b/modules/convert/user.go @@ -95,3 +95,12 @@ func User2UserSettings(user *user_model.User) api.UserSettings { DiffViewStyle: user.DiffViewStyle, } } + +// ToUserAndPermission return User and its collaboration permission for a repository +func ToUserAndPermission(user, doer *user_model.User, accessMode perm.AccessMode) api.RepoCollaboratorPermission { + return api.RepoCollaboratorPermission{ + User: ToUser(user, doer), + Permission: accessMode.String(), + RoleName: accessMode.String(), + } +} diff --git a/modules/doctor/authorizedkeys.go b/modules/doctor/authorizedkeys.go index 3eb931e6f6..18e7a3cbf4 100644 --- a/modules/doctor/authorizedkeys.go +++ b/modules/doctor/authorizedkeys.go @@ -72,8 +72,8 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e "authorized_keys file %q is out of date.\nRegenerate it with:\n\t\"%s\"\nor\n\t\"%s\"", fPath, "gitea admin regenerate keys", - "gitea doctor --run authorized_keys --fix") - return fmt.Errorf(`authorized_keys is out of date and should be regenerated with "gitea admin regenerate keys" or "gitea doctor --run authorized_keys --fix"`) + "gitea doctor --run authorized-keys --fix") + return fmt.Errorf(`authorized_keys is out of date and should be regenerated with "gitea admin regenerate keys" or "gitea doctor --run authorized-keys --fix"`) } logger.Warn("authorized_keys is out of date. Attempting rewrite...") err = asymkey_model.RewriteAllPublicKeys() diff --git a/modules/doctor/checkOldArchives.go b/modules/doctor/checkOldArchives.go index 6353eaddd2..cefb3817bf 100644 --- a/modules/doctor/checkOldArchives.go +++ b/modules/doctor/checkOldArchives.go @@ -17,7 +17,7 @@ import ( func checkOldArchives(ctx context.Context, logger log.Logger, autofix bool) error { numRepos := 0 numReposUpdated := 0 - err := iterateRepositories(func(repo *repo_model.Repository) error { + err := iterateRepositories(ctx, func(repo *repo_model.Repository) error { if repo.IsEmpty { return nil } diff --git a/modules/doctor/fix16961.go b/modules/doctor/fix16961.go index e241838068..92c77ba80f 100644 --- a/modules/doctor/fix16961.go +++ b/modules/doctor/fix16961.go @@ -268,7 +268,7 @@ func fixBrokenRepoUnits16961(ctx context.Context, logger log.Logger, autofix boo count := 0 err := db.Iterate( - db.DefaultContext, + ctx, new(RepoUnit), builder.Gt{ "id": 0, diff --git a/modules/doctor/mergebase.go b/modules/doctor/mergebase.go index a655826e1c..8f5c61a5da 100644 --- a/modules/doctor/mergebase.go +++ b/modules/doctor/mergebase.go @@ -18,9 +18,9 @@ import ( "xorm.io/builder" ) -func iteratePRs(repo *repo_model.Repository, each func(*repo_model.Repository, *models.PullRequest) error) error { +func iteratePRs(ctx context.Context, repo *repo_model.Repository, each func(*repo_model.Repository, *models.PullRequest) error) error { return db.Iterate( - db.DefaultContext, + ctx, new(models.PullRequest), builder.Eq{"base_repo_id": repo.ID}, func(idx int, bean interface{}) error { @@ -33,9 +33,9 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro numRepos := 0 numPRs := 0 numPRsUpdated := 0 - err := iterateRepositories(func(repo *repo_model.Repository) error { + err := iterateRepositories(ctx, func(repo *repo_model.Repository) error { numRepos++ - return iteratePRs(repo, func(repo *repo_model.Repository, pr *models.PullRequest) error { + return iteratePRs(ctx, repo, func(repo *repo_model.Repository, pr *models.PullRequest) error { numPRs++ pr.BaseRepo = repo repoPath := repo.RepoPath() @@ -44,17 +44,17 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro if !pr.HasMerged { var err error - pr.MergeBase, err = git.NewCommand(ctx, "merge-base", "--", pr.BaseBranch, pr.GetGitRefName()).RunInDir(repoPath) + pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", pr.BaseBranch, pr.GetGitRefName()).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { var err2 error - pr.MergeBase, err2 = git.NewCommand(ctx, "rev-parse", git.BranchPrefix+pr.BaseBranch).RunInDir(repoPath) + pr.MergeBase, _, err2 = git.NewCommand(ctx, "rev-parse", git.BranchPrefix+pr.BaseBranch).RunStdString(&git.RunOpts{Dir: repoPath}) if err2 != nil { logger.Warn("Unable to get merge base for PR ID %d, #%d onto %s in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2) return nil } } } else { - parentsString, err := git.NewCommand(ctx, "rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunInDir(repoPath) + parentsString, _, err := git.NewCommand(ctx, "rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { logger.Warn("Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err) return nil @@ -67,7 +67,7 @@ func checkPRMergeBase(ctx context.Context, logger log.Logger, autofix bool) erro args := append([]string{"merge-base", "--"}, parents[1:]...) args = append(args, pr.GetGitRefName()) - pr.MergeBase, err = git.NewCommand(ctx, args...).RunInDir(repoPath) + pr.MergeBase, _, err = git.NewCommand(ctx, args...).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil { logger.Warn("Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err) return nil diff --git a/modules/doctor/misc.go b/modules/doctor/misc.go index 2f6baa814d..60c190cf98 100644 --- a/modules/doctor/misc.go +++ b/modules/doctor/misc.go @@ -27,9 +27,9 @@ import ( "xorm.io/builder" ) -func iterateRepositories(each func(*repo_model.Repository) error) error { +func iterateRepositories(ctx context.Context, each func(*repo_model.Repository) error) error { err := db.Iterate( - db.DefaultContext, + ctx, new(repo_model.Repository), builder.Gt{"id": 0}, func(idx int, bean interface{}) error { @@ -50,7 +50,7 @@ func checkScriptType(ctx context.Context, logger log.Logger, autofix bool) error } func checkHooks(ctx context.Context, logger log.Logger, autofix bool) error { - if err := iterateRepositories(func(repo *repo_model.Repository) error { + if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error { results, err := repository.CheckDelegateHooks(repo.RepoPath()) if err != nil { logger.Critical("Unable to check delegate hooks for repo %-v. ERROR: %v", repo, err) @@ -86,20 +86,20 @@ func checkEnablePushOptions(ctx context.Context, logger log.Logger, autofix bool numRepos := 0 numNeedUpdate := 0 - if err := iterateRepositories(func(repo *repo_model.Repository) error { + if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error { numRepos++ - r, err := git.OpenRepositoryCtx(git.DefaultContext, repo.RepoPath()) + r, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { return err } defer r.Close() if autofix { - _, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions", "true").RunInDir(r.Path) + _, _, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions", "true").RunStdString(&git.RunOpts{Dir: r.Path}) return err } - value, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions").RunInDir(r.Path) + value, _, err := git.NewCommand(ctx, "config", "receive.advertisePushOptions").RunStdString(&git.RunOpts{Dir: r.Path}) if err != nil { return err } @@ -132,13 +132,13 @@ func checkDaemonExport(ctx context.Context, logger log.Logger, autofix bool) err logger.Critical("Unable to create cache: %v", err) return err } - if err := iterateRepositories(func(repo *repo_model.Repository) error { + if err := iterateRepositories(ctx, func(repo *repo_model.Repository) error { numRepos++ if owner, has := cache.Get(repo.OwnerID); has { repo.Owner = owner.(*user_model.User) } else { - if err := repo.GetOwner(db.DefaultContext); err != nil { + if err := repo.GetOwner(ctx); err != nil { return err } cache.Add(repo.OwnerID, repo.Owner) diff --git a/modules/eventsource/manager_run.go b/modules/eventsource/manager_run.go index 60598ecb49..127979ad63 100644 --- a/modules/eventsource/manager_run.go +++ b/modules/eventsource/manager_run.go @@ -9,8 +9,11 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" ) @@ -25,6 +28,9 @@ func (m *Manager) Init() { // Run runs the manager within a provided context func (m *Manager) Run(ctx context.Context) { + ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Service: EventSource", process.SystemProcessType, true) + defer finished() + then := timeutil.TimeStampNow().Add(-2) timer := time.NewTicker(setting.UI.Notification.EventSourceUpdateTime) loop: @@ -76,6 +82,31 @@ loop: }) } then = now + + if setting.Service.EnableTimetracking { + usersStopwatches, err := models.GetUIDsAndStopwatch() + if err != nil { + log.Error("Unable to get GetUIDsAndStopwatch: %v", err) + return + } + + for _, userStopwatches := range usersStopwatches { + apiSWs, err := convert.ToStopWatches(userStopwatches.StopWatches) + if err != nil { + log.Error("Unable to APIFormat stopwatches: %v", err) + continue + } + dataBs, err := json.Marshal(apiSWs) + if err != nil { + log.Error("Unable to marshal stopwatches: %v", err) + continue + } + m.SendMessage(userStopwatches.UserID, &Event{ + Name: "stopwatches", + Data: string(dataBs), + }) + } + } } } m.UnregisterAll() diff --git a/modules/git/batch_reader.go b/modules/git/batch_reader.go index 66ca118de5..902fa89718 100644 --- a/modules/git/batch_reader.go +++ b/modules/git/batch_reader.go @@ -34,10 +34,9 @@ func EnsureValidGitRepository(ctx context.Context, repoPath string) error { stderr := strings.Builder{} err := NewCommand(ctx, "rev-parse"). SetDescription(fmt.Sprintf("%s rev-parse [repo_path: %s]", GitExecutable, repoPath)). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repoPath, - Stderr: &stderr, + Run(&RunOpts{ + Dir: repoPath, + Stderr: &stderr, }) if err != nil { return ConcatenateError(err, (&stderr).String()) @@ -58,6 +57,12 @@ func CatFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError, <-closed } + // Ensure cancel is called as soon as the provided context is cancelled + go func() { + <-ctx.Done() + cancel() + }() + _, filename, line, _ := runtime.Caller(2) filename = strings.TrimPrefix(filename, callerPrefix) @@ -65,12 +70,11 @@ func CatFileBatchCheck(ctx context.Context, repoPath string) (WriteCloserError, stderr := strings.Builder{} err := NewCommand(ctx, "cat-file", "--batch-check"). SetDescription(fmt.Sprintf("%s cat-file --batch-check [repo_path: %s] (%s:%d)", GitExecutable, repoPath, filename, line)). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repoPath, - Stdin: batchStdinReader, - Stdout: batchStdoutWriter, - Stderr: &stderr, + Run(&RunOpts{ + Dir: repoPath, + Stdin: batchStdinReader, + Stdout: batchStdoutWriter, + Stderr: &stderr, }) if err != nil { _ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) @@ -103,6 +107,12 @@ func CatFileBatch(ctx context.Context, repoPath string) (WriteCloserError, *bufi <-closed } + // Ensure cancel is called as soon as the provided context is cancelled + go func() { + <-ctx.Done() + cancel() + }() + _, filename, line, _ := runtime.Caller(2) filename = strings.TrimPrefix(filename, callerPrefix) @@ -110,12 +120,11 @@ func CatFileBatch(ctx context.Context, repoPath string) (WriteCloserError, *bufi stderr := strings.Builder{} err := NewCommand(ctx, "cat-file", "--batch"). SetDescription(fmt.Sprintf("%s cat-file --batch [repo_path: %s] (%s:%d)", GitExecutable, repoPath, filename, line)). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repoPath, - Stdin: batchStdinReader, - Stdout: batchStdoutWriter, - Stderr: &stderr, + Run(&RunOpts{ + Dir: repoPath, + Stdin: batchStdinReader, + Stdout: batchStdoutWriter, + Stderr: &stderr, }) if err != nil { _ = batchStdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) diff --git a/modules/git/blob_gogit.go b/modules/git/blob_gogit.go index ef7a90c3f4..2a2b51e422 100644 --- a/modules/git/blob_gogit.go +++ b/modules/git/blob_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/blob_nogogit.go b/modules/git/blob_nogogit.go index aabf1b34ad..211c188559 100644 --- a/modules/git/blob_nogogit.go +++ b/modules/git/blob_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/blob_test.go b/modules/git/blob_test.go index 34d8054d1e..39f3f11162 100644 --- a/modules/git/blob_test.go +++ b/modules/git/blob_test.go @@ -17,7 +17,7 @@ import ( func TestBlob_Data(t *testing.T) { output := "file2\n" bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - repo, err := OpenRepository(bareRepo1Path) + repo, err := openRepositoryWithDefaultContext(bareRepo1Path) if !assert.NoError(t, err) { t.Fatal() } @@ -39,7 +39,7 @@ func TestBlob_Data(t *testing.T) { func Benchmark_Blob_Data(b *testing.B) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - repo, err := OpenRepository(bareRepo1Path) + repo, err := openRepositoryWithDefaultContext(bareRepo1Path) if err != nil { b.Fatal(err) } diff --git a/modules/git/command.go b/modules/git/command.go index 649d9cf249..3dd12e421e 100644 --- a/modules/git/command.go +++ b/modules/git/command.go @@ -14,9 +14,11 @@ import ( "os/exec" "strings" "time" + "unsafe" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" + "code.gitea.io/gitea/modules/util" ) var ( @@ -32,10 +34,11 @@ const DefaultLocale = "C" // Command represents a command with its subcommands or arguments. type Command struct { - name string - args []string - parentContext context.Context - desc string + name string + args []string + parentContext context.Context + desc string + globalArgsLength int } func (c *Command) String() string { @@ -51,9 +54,10 @@ func NewCommand(ctx context.Context, args ...string) *Command { cargs := make([]string, len(globalCommandArgs)) copy(cargs, globalCommandArgs) return &Command{ - name: GitExecutable, - args: append(cargs, args...), - parentContext: ctx, + name: GitExecutable, + args: append(cargs, args...), + parentContext: ctx, + globalArgsLength: len(globalCommandArgs), } } @@ -90,34 +94,8 @@ func (c *Command) AddArguments(args ...string) *Command { return c } -// RunInDirTimeoutEnvPipeline executes the command in given directory with given timeout, -// it pipes stdout and stderr to given io.Writer. -func (c *Command) RunInDirTimeoutEnvPipeline(env []string, timeout time.Duration, dir string, stdout, stderr io.Writer) error { - return c.RunInDirTimeoutEnvFullPipeline(env, timeout, dir, stdout, stderr, nil) -} - -// RunInDirTimeoutEnvFullPipeline executes the command in given directory with given timeout, -// it pipes stdout and stderr to given io.Writer and passes in an io.Reader as stdin. -func (c *Command) RunInDirTimeoutEnvFullPipeline(env []string, timeout time.Duration, dir string, stdout, stderr io.Writer, stdin io.Reader) error { - return c.RunInDirTimeoutEnvFullPipelineFunc(env, timeout, dir, stdout, stderr, stdin, nil) -} - -// RunInDirTimeoutEnvFullPipelineFunc executes the command in given directory with given timeout, -// it pipes stdout and stderr to given io.Writer and passes in an io.Reader as stdin. Between cmd.Start and cmd.Wait the passed in function is run. -func (c *Command) RunInDirTimeoutEnvFullPipelineFunc(env []string, timeout time.Duration, dir string, stdout, stderr io.Writer, stdin io.Reader, fn func(context.Context, context.CancelFunc) error) error { - return c.RunWithContext(&RunContext{ - Env: env, - Timeout: timeout, - Dir: dir, - Stdout: stdout, - Stderr: stderr, - Stdin: stdin, - PipelineFunc: fn, - }) -} - -// RunContext represents parameters to run the command -type RunContext struct { +// RunOpts represents parameters to run the command +type RunOpts struct { Env []string Timeout time.Duration Dir string @@ -126,31 +104,48 @@ type RunContext struct { PipelineFunc func(context.Context, context.CancelFunc) error } -// RunWithContext run the command with context -func (c *Command) RunWithContext(rc *RunContext) error { - if rc.Timeout == -1 { - rc.Timeout = defaultCommandExecutionTimeout +// Run runs the command with the RunOpts +func (c *Command) Run(opts *RunOpts) error { + if opts == nil { + opts = &RunOpts{} + } + if opts.Timeout <= 0 { + opts.Timeout = defaultCommandExecutionTimeout } - if len(rc.Dir) == 0 { + if len(opts.Dir) == 0 { log.Debug("%s", c) } else { - log.Debug("%s: %v", rc.Dir, c) + log.Debug("%s: %v", opts.Dir, c) } desc := c.desc if desc == "" { - desc = fmt.Sprintf("%s %s [repo_path: %s]", c.name, strings.Join(c.args, " "), rc.Dir) + args := c.args[c.globalArgsLength:] + var argSensitiveURLIndexes []int + for i, arg := range c.args { + if strings.Contains(arg, "://") && strings.Contains(arg, "@") { + argSensitiveURLIndexes = append(argSensitiveURLIndexes, i) + } + } + if len(argSensitiveURLIndexes) > 0 { + args = make([]string, len(c.args)) + copy(args, c.args) + for _, urlArgIndex := range argSensitiveURLIndexes { + args[urlArgIndex] = util.SanitizeCredentialURLs(args[urlArgIndex]) + } + } + desc = fmt.Sprintf("%s %s [repo_path: %s]", c.name, strings.Join(args, " "), opts.Dir) } - ctx, cancel, finished := process.GetManager().AddContextTimeout(c.parentContext, rc.Timeout, desc) + ctx, cancel, finished := process.GetManager().AddContextTimeout(c.parentContext, opts.Timeout, desc) defer finished() cmd := exec.CommandContext(ctx, c.name, c.args...) - if rc.Env == nil { + if opts.Env == nil { cmd.Env = os.Environ() } else { - cmd.Env = rc.Env + cmd.Env = opts.Env } cmd.Env = append( @@ -162,16 +157,16 @@ func (c *Command) RunWithContext(rc *RunContext) error { "GIT_NO_REPLACE_OBJECTS=1", ) - cmd.Dir = rc.Dir - cmd.Stdout = rc.Stdout - cmd.Stderr = rc.Stderr - cmd.Stdin = rc.Stdin + cmd.Dir = opts.Dir + cmd.Stdout = opts.Stdout + cmd.Stderr = opts.Stderr + cmd.Stdin = opts.Stdin if err := cmd.Start(); err != nil { return err } - if rc.PipelineFunc != nil { - err := rc.PipelineFunc(ctx, cancel) + if opts.PipelineFunc != nil { + err := opts.PipelineFunc(ctx, cancel) if err != nil { cancel() _ = cmd.Wait() @@ -186,90 +181,69 @@ func (c *Command) RunWithContext(rc *RunContext) error { return ctx.Err() } -// RunInDirTimeoutPipeline executes the command in given directory with given timeout, -// it pipes stdout and stderr to given io.Writer. -func (c *Command) RunInDirTimeoutPipeline(timeout time.Duration, dir string, stdout, stderr io.Writer) error { - return c.RunInDirTimeoutEnvPipeline(nil, timeout, dir, stdout, stderr) +type RunStdError interface { + error + Stderr() string } -// RunInDirTimeoutFullPipeline executes the command in given directory with given timeout, -// it pipes stdout and stderr to given io.Writer, and stdin from the given io.Reader -func (c *Command) RunInDirTimeoutFullPipeline(timeout time.Duration, dir string, stdout, stderr io.Writer, stdin io.Reader) error { - return c.RunInDirTimeoutEnvFullPipeline(nil, timeout, dir, stdout, stderr, stdin) +type runStdError struct { + err error + stderr string + errMsg string } -// RunInDirTimeout executes the command in given directory with given timeout, -// and returns stdout in []byte and error (combined with stderr). -func (c *Command) RunInDirTimeout(timeout time.Duration, dir string) ([]byte, error) { - return c.RunInDirTimeoutEnv(nil, timeout, dir) -} - -// RunInDirTimeoutEnv executes the command in given directory with given timeout, -// and returns stdout in []byte and error (combined with stderr). -func (c *Command) RunInDirTimeoutEnv(env []string, timeout time.Duration, dir string) ([]byte, error) { - stdout := new(bytes.Buffer) - stderr := new(bytes.Buffer) - if err := c.RunInDirTimeoutEnvPipeline(env, timeout, dir, stdout, stderr); err != nil { - return nil, ConcatenateError(err, stderr.String()) +func (r *runStdError) Error() string { + // the stderr must be in the returned error text, some code only checks `strings.Contains(err.Error(), "git error")` + if r.errMsg == "" { + r.errMsg = ConcatenateError(r.err, r.stderr).Error() } - if stdout.Len() > 0 && log.IsTrace() { - tracelen := stdout.Len() - if tracelen > 1024 { - tracelen = 1024 - } - log.Trace("Stdout:\n %s", stdout.Bytes()[:tracelen]) - } - return stdout.Bytes(), nil + return r.errMsg } -// RunInDirPipeline executes the command in given directory, -// it pipes stdout and stderr to given io.Writer. -func (c *Command) RunInDirPipeline(dir string, stdout, stderr io.Writer) error { - return c.RunInDirFullPipeline(dir, stdout, stderr, nil) +func (r *runStdError) Unwrap() error { + return r.err } -// RunInDirFullPipeline executes the command in given directory, -// it pipes stdout and stderr to given io.Writer. -func (c *Command) RunInDirFullPipeline(dir string, stdout, stderr io.Writer, stdin io.Reader) error { - return c.RunInDirTimeoutFullPipeline(-1, dir, stdout, stderr, stdin) +func (r *runStdError) Stderr() string { + return r.stderr } -// RunInDirBytes executes the command in given directory -// and returns stdout in []byte and error (combined with stderr). -func (c *Command) RunInDirBytes(dir string) ([]byte, error) { - return c.RunInDirTimeout(-1, dir) +func bytesToString(b []byte) string { + return *(*string)(unsafe.Pointer(&b)) // that's what Golang's strings.Builder.String() does (go/src/strings/builder.go) } -// RunInDir executes the command in given directory -// and returns stdout in string and error (combined with stderr). -func (c *Command) RunInDir(dir string) (string, error) { - return c.RunInDirWithEnv(dir, nil) -} - -// RunInDirWithEnv executes the command in given directory -// and returns stdout in string and error (combined with stderr). -func (c *Command) RunInDirWithEnv(dir string, env []string) (string, error) { - stdout, err := c.RunInDirTimeoutEnv(env, -1, dir) +// RunStdString runs the command with options and returns stdout/stderr as string. and store stderr to returned error (err combined with stderr). +func (c *Command) RunStdString(opts *RunOpts) (stdout, stderr string, runErr RunStdError) { + stdoutBytes, stderrBytes, err := c.RunStdBytes(opts) + stdout = bytesToString(stdoutBytes) + stderr = bytesToString(stderrBytes) if err != nil { - return "", err + return stdout, stderr, &runStdError{err: err, stderr: stderr} } - return string(stdout), nil + // even if there is no err, there could still be some stderr output, so we just return stdout/stderr as they are + return stdout, stderr, nil } -// RunTimeout executes the command in default working directory with given timeout, -// and returns stdout in string and error (combined with stderr). -func (c *Command) RunTimeout(timeout time.Duration) (string, error) { - stdout, err := c.RunInDirTimeout(timeout, "") +// RunStdBytes runs the command with options and returns stdout/stderr as bytes. and store stderr to returned error (err combined with stderr). +func (c *Command) RunStdBytes(opts *RunOpts) (stdout, stderr []byte, runErr RunStdError) { + if opts == nil { + opts = &RunOpts{} + } + if opts.Stdout != nil || opts.Stderr != nil { + // we must panic here, otherwise there would be bugs if developers set Stdin/Stderr by mistake, and it would be very difficult to debug + panic("stdout and stderr field must be nil when using RunStdBytes") + } + stdoutBuf := &bytes.Buffer{} + stderrBuf := &bytes.Buffer{} + opts.Stdout = stdoutBuf + opts.Stderr = stderrBuf + err := c.Run(opts) + stderr = stderrBuf.Bytes() if err != nil { - return "", err + return nil, stderr, &runStdError{err: err, stderr: bytesToString(stderr)} } - return string(stdout), nil -} - -// Run executes the command in default working directory -// and returns stdout in string and error (combined with stderr). -func (c *Command) Run() (string, error) { - return c.RunTimeout(-1) + // even if there is no err, there could still be some stderr output + return stdoutBuf.Bytes(), stderr, nil } // AllowLFSFiltersArgs return globalCommandArgs with lfs filter, it should only be used for tests diff --git a/modules/git/command_race_test.go b/modules/git/command_race_test.go new file mode 100644 index 0000000000..ae2acc3a5a --- /dev/null +++ b/modules/git/command_race_test.go @@ -0,0 +1,39 @@ +// Copyright 2017 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +//go:build race + +package git + +import ( + "context" + "testing" + "time" +) + +func TestRunWithContextNoTimeout(t *testing.T) { + maxLoops := 10 + + // 'git --version' does not block so it must be finished before the timeout triggered. + cmd := NewCommand(context.Background(), "--version") + for i := 0; i < maxLoops; i++ { + if err := cmd.Run(&RunOpts{}); err != nil { + t.Fatal(err) + } + } +} + +func TestRunWithContextTimeout(t *testing.T) { + maxLoops := 10 + + // 'git hash-object --stdin' blocks on stdin so we can have the timeout triggered. + cmd := NewCommand(context.Background(), "hash-object", "--stdin") + for i := 0; i < maxLoops; i++ { + if err := cmd.Run(&RunOpts{Timeout: 1 * time.Millisecond}); err != nil { + if err != context.DeadlineExceeded { + t.Fatalf("Testing %d/%d: %v", i, maxLoops, err) + } + } + } +} diff --git a/modules/git/command_test.go b/modules/git/command_test.go index f92f526d2d..67d4ca388e 100644 --- a/modules/git/command_test.go +++ b/modules/git/command_test.go @@ -1,40 +1,29 @@ -// Copyright 2017 The Gitea Authors. All rights reserved. +// Copyright 2022 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -//go:build race -// +build race - package git import ( "context" "testing" - "time" + + "github.com/stretchr/testify/assert" ) -func TestRunInDirTimeoutPipelineNoTimeout(t *testing.T) { - maxLoops := 1000 - - // 'git --version' does not block so it must be finished before the timeout triggered. +func TestRunWithContextStd(t *testing.T) { cmd := NewCommand(context.Background(), "--version") - for i := 0; i < maxLoops; i++ { - if err := cmd.RunInDirTimeoutPipeline(-1, "", nil, nil); err != nil { - t.Fatal(err) - } - } -} - -func TestRunInDirTimeoutPipelineAlwaysTimeout(t *testing.T) { - maxLoops := 1000 - - // 'git hash-object --stdin' blocks on stdin so we can have the timeout triggered. - cmd := NewCommand(context.Background(), "hash-object", "--stdin") - for i := 0; i < maxLoops; i++ { - if err := cmd.RunInDirTimeoutPipeline(1*time.Microsecond, "", nil, nil); err != nil { - if err != context.DeadlineExceeded { - t.Fatalf("Testing %d/%d: %v", i, maxLoops, err) - } - } + stdout, stderr, err := cmd.RunStdString(&RunOpts{}) + assert.NoError(t, err) + assert.Empty(t, stderr) + assert.Contains(t, stdout, "git version") + + cmd = NewCommand(context.Background(), "--no-such-arg") + stdout, stderr, err = cmd.RunStdString(&RunOpts{}) + if assert.Error(t, err) { + assert.Equal(t, stderr, err.Stderr()) + assert.Contains(t, err.Stderr(), "unknown option:") + assert.Contains(t, err.Error(), "exit status 129 - unknown option:") + assert.Empty(t, stdout) } } diff --git a/modules/git/commit.go b/modules/git/commit.go index 340a7e21dd..8c194ef502 100644 --- a/modules/git/commit.go +++ b/modules/git/commit.go @@ -17,6 +17,7 @@ import ( "strings" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" ) // Commit represents a git commit. @@ -94,7 +95,7 @@ func AddChangesWithArgs(repoPath string, globalArgs []string, all bool, files .. cmd.AddArguments("--all") } cmd.AddArguments("--") - _, err := cmd.AddArguments(files...).RunInDir(repoPath) + _, _, err := cmd.AddArguments(files...).RunStdString(&RunOpts{Dir: repoPath}) return err } @@ -130,7 +131,7 @@ func CommitChangesWithArgs(repoPath string, args []string, opts CommitChangesOpt } cmd.AddArguments("-m", opts.Message) - _, err := cmd.RunInDir(repoPath) + _, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) // No stderr but exit status 1 means nothing to commit. if err != nil && err.Error() == "exit status 1" { return nil @@ -151,7 +152,7 @@ func AllCommitsCount(ctx context.Context, repoPath string, hidePRRefs bool, file cmd.AddArguments(files...) } - stdout, err := cmd.RunInDir(repoPath) + stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) if err != nil { return 0, err } @@ -168,7 +169,7 @@ func CommitsCountFiles(ctx context.Context, repoPath string, revision, relpath [ cmd.AddArguments(relpath...) } - stdout, err := cmd.RunInDir(repoPath) + stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) if err != nil { return 0, err } @@ -206,7 +207,7 @@ func (c *Commit) HasPreviousCommit(commitHash SHA1) (bool, error) { } if err := CheckGitVersionAtLeast("1.8"); err == nil { - _, err := NewCommand(c.repo.Ctx, "merge-base", "--is-ancestor", that, this).RunInDir(c.repo.Path) + _, _, err := NewCommand(c.repo.Ctx, "merge-base", "--is-ancestor", that, this).RunStdString(&RunOpts{Dir: c.repo.Path}) if err == nil { return true, nil } @@ -219,7 +220,7 @@ func (c *Commit) HasPreviousCommit(commitHash SHA1) (bool, error) { return false, err } - result, err := NewCommand(c.repo.Ctx, "rev-list", "--ancestry-path", "-n1", that+".."+this, "--").RunInDir(c.repo.Path) + result, _, err := NewCommand(c.repo.Ctx, "rev-list", "--ancestry-path", "-n1", that+".."+this, "--").RunStdString(&RunOpts{Dir: c.repo.Path}) if err != nil { return false, err } @@ -306,6 +307,35 @@ func (c *Commit) HasFile(filename string) (bool, error) { return true, nil } +// GetFileContent reads a file content as a string or returns false if this was not possible +func (c *Commit) GetFileContent(filename string, limit int) (string, error) { + entry, err := c.GetTreeEntryByPath(filename) + if err != nil { + return "", err + } + + r, err := entry.Blob().DataAsync() + if err != nil { + return "", err + } + defer r.Close() + + if limit > 0 { + bs := make([]byte, limit) + n, err := util.ReadAtMost(r, bs) + if err != nil { + return "", err + } + return string(bs[:n]), nil + } + + bytes, err := io.ReadAll(r) + if err != nil { + return "", err + } + return string(bytes), nil +} + // GetSubModules get all the sub modules of current revision git tree func (c *Commit) GetSubModules() (*ObjectCache, error) { if c.submoduleCache != nil { @@ -381,7 +411,7 @@ func (c *Commit) GetBranchName() (string, error) { } args = append(args, "--name-only", "--no-undefined", c.ID.String()) - data, err := NewCommand(c.repo.Ctx, args...).RunInDir(c.repo.Path) + data, _, err := NewCommand(c.repo.Ctx, args...).RunStdString(&RunOpts{Dir: c.repo.Path}) if err != nil { // handle special case where git can not describe commit if strings.Contains(err.Error(), "cannot describe") { @@ -407,7 +437,7 @@ func (c *Commit) LoadBranchName() (err error) { // GetTagName gets the current tag name for given commit func (c *Commit) GetTagName() (string, error) { - data, err := NewCommand(c.repo.Ctx, "describe", "--exact-match", "--tags", "--always", c.ID.String()).RunInDir(c.repo.Path) + data, _, err := NewCommand(c.repo.Ctx, "describe", "--exact-match", "--tags", "--always", c.ID.String()).RunStdString(&RunOpts{Dir: c.repo.Path}) if err != nil { // handle special case where there is no tag for this commit if strings.Contains(err.Error(), "no tag exactly matches") { @@ -486,11 +516,10 @@ func GetCommitFileStatus(ctx context.Context, repoPath, commitID string) (*Commi stderr := new(bytes.Buffer) args := []string{"log", "--name-status", "-c", "--pretty=format:", "--parents", "--no-renames", "-z", "-1", commitID} - err := NewCommand(ctx, args...).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repoPath, - Stdout: w, - Stderr: stderr, + err := NewCommand(ctx, args...).Run(&RunOpts{ + Dir: repoPath, + Stdout: w, + Stderr: stderr, }) w.Close() // Close writer to exit parsing goroutine if err != nil { @@ -503,7 +532,7 @@ func GetCommitFileStatus(ctx context.Context, repoPath, commitID string) (*Commi // GetFullCommitID returns full length (40) of commit ID by given short SHA in a repository. func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) { - commitID, err := NewCommand(ctx, "rev-parse", shortID).RunInDir(repoPath) + commitID, _, err := NewCommand(ctx, "rev-parse", shortID).RunStdString(&RunOpts{Dir: repoPath}) if err != nil { if strings.Contains(err.Error(), "exit status 128") { return "", ErrNotExist{shortID, ""} diff --git a/modules/git/commit_convert_gogit.go b/modules/git/commit_convert_gogit.go index b328b3c0ed..bb9d3bf8ce 100644 --- a/modules/git/commit_convert_gogit.go +++ b/modules/git/commit_convert_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/commit_info_gogit.go b/modules/git/commit_info_gogit.go index ab6e738103..91a1804db5 100644 --- a/modules/git/commit_info_gogit.go +++ b/modules/git/commit_info_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/commit_info_nogogit.go b/modules/git/commit_info_nogogit.go index 347ad7d059..f430c672f8 100644 --- a/modules/git/commit_info_nogogit.go +++ b/modules/git/commit_info_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/commit_info_test.go b/modules/git/commit_info_test.go index 42bed75a3d..49845522a9 100644 --- a/modules/git/commit_info_test.go +++ b/modules/git/commit_info_test.go @@ -112,7 +112,7 @@ func testGetCommitsInfo(t *testing.T, repo1 *Repository) { func TestEntries_GetCommitsInfo(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -123,7 +123,7 @@ func TestEntries_GetCommitsInfo(t *testing.T) { assert.NoError(t, err) } defer util.RemoveAll(clonedPath) - clonedRepo1, err := OpenRepository(clonedPath) + clonedRepo1, err := openRepositoryWithDefaultContext(clonedPath) if err != nil { assert.NoError(t, err) } @@ -156,7 +156,7 @@ func BenchmarkEntries_GetCommitsInfo(b *testing.B) { } defer util.RemoveAll(repoPath) - if repo, err = OpenRepository(repoPath); err != nil { + if repo, err = openRepositoryWithDefaultContext(repoPath); err != nil { b.Fatal(err) } defer repo.Close() diff --git a/modules/git/commit_test.go b/modules/git/commit_test.go index 6e9dd34ea7..fb8c22dfd3 100644 --- a/modules/git/commit_test.go +++ b/modules/git/commit_test.go @@ -64,9 +64,10 @@ gpgsig -----BEGIN PGP SIGNATURE----- empty commit` sha := SHA1{0xfe, 0xaf, 0x4b, 0xa6, 0xbc, 0x63, 0x5f, 0xec, 0x44, 0x2f, 0x46, 0xdd, 0xd4, 0x51, 0x24, 0x16, 0xec, 0x43, 0xc2, 0xc2} - gitRepo, err := OpenRepository(filepath.Join(testReposDir, "repo1_bare")) + gitRepo, err := openRepositoryWithDefaultContext(filepath.Join(testReposDir, "repo1_bare")) assert.NoError(t, err) assert.NotNil(t, gitRepo) + defer gitRepo.Close() commitFromReader, err := CommitFromReader(gitRepo, sha, strings.NewReader(commitString)) assert.NoError(t, err) @@ -109,8 +110,9 @@ empty commit`, commitFromReader.Signature.Payload) func TestHasPreviousCommit(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - repo, err := OpenRepository(bareRepo1Path) + repo, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) + defer repo.Close() commit, err := repo.GetCommit("8006ff9adbf0cb94da7dad9e537e53817f9fa5c0") assert.NoError(t, err) diff --git a/modules/git/diff.go b/modules/git/diff.go index 621878f620..c9d68bb130 100644 --- a/modules/git/diff.go +++ b/modules/git/diff.go @@ -28,36 +28,24 @@ const ( ) // GetRawDiff dumps diff results of repository in given commit ID to io.Writer. -func GetRawDiff(ctx context.Context, repoPath, commitID string, diffType RawDiffType, writer io.Writer) error { - return GetRawDiffForFile(ctx, repoPath, "", commitID, diffType, "", writer) +func GetRawDiff(repo *Repository, commitID string, diffType RawDiffType, writer io.Writer) error { + return GetRepoRawDiffForFile(repo, "", commitID, diffType, "", writer) } // GetReverseRawDiff dumps the reverse diff results of repository in given commit ID to io.Writer. func GetReverseRawDiff(ctx context.Context, repoPath, commitID string, writer io.Writer) error { stderr := new(bytes.Buffer) cmd := NewCommand(ctx, "show", "--pretty=format:revert %H%n", "-R", commitID) - if err := cmd.RunWithContext(&RunContext{ - Timeout: -1, - Dir: repoPath, - Stdout: writer, - Stderr: stderr, + if err := cmd.Run(&RunOpts{ + Dir: repoPath, + Stdout: writer, + Stderr: stderr, }); err != nil { return fmt.Errorf("Run: %v - %s", err, stderr) } return nil } -// GetRawDiffForFile dumps diff results of file in given commit ID to io.Writer. -func GetRawDiffForFile(ctx context.Context, repoPath, startCommit, endCommit string, diffType RawDiffType, file string, writer io.Writer) error { - repo, closer, err := RepositoryFromContextOrOpen(ctx, repoPath) - if err != nil { - return fmt.Errorf("OpenRepository: %v", err) - } - defer closer.Close() - - return GetRepoRawDiffForFile(repo, startCommit, endCommit, diffType, file, writer) -} - // GetRepoRawDiffForFile dumps diff results of file in given commit ID to io.Writer according given repository func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diffType RawDiffType, file string, writer io.Writer) error { commit, err := repo.GetCommit(endCommit) @@ -97,11 +85,10 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff stderr := new(bytes.Buffer) cmd := NewCommand(repo.Ctx, args...) - if err = cmd.RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: writer, - Stderr: stderr, + if err = cmd.Run(&RunOpts{ + Dir: repo.Path, + Stdout: writer, + Stderr: stderr, }); err != nil { return fmt.Errorf("Run: %v - %s", err, stderr) } @@ -301,11 +288,10 @@ func GetAffectedFiles(repo *Repository, oldCommitID, newCommitID string, env []s // Run `git diff --name-only` to get the names of the changed files err = NewCommand(repo.Ctx, "diff", "--name-only", oldCommitID, newCommitID). - RunWithContext(&RunContext{ - Env: env, - Timeout: -1, - Dir: repo.Path, - Stdout: stdoutWriter, + Run(&RunOpts{ + Env: env, + Dir: repo.Path, + Stdout: stdoutWriter, PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { // Close the writer end of the pipe to begin processing _ = stdoutWriter.Close() diff --git a/modules/git/foreachref/format.go b/modules/git/foreachref/format.go new file mode 100644 index 0000000000..c9aa5233e1 --- /dev/null +++ b/modules/git/foreachref/format.go @@ -0,0 +1,84 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package foreachref + +import ( + "encoding/hex" + "fmt" + "io" + "strings" +) + +var ( + nullChar = []byte("\x00") + dualNullChar = []byte("\x00\x00") +) + +// Format supports specifying and parsing an output format for 'git +// for-each-ref'. See See git-for-each-ref(1) for available fields. +type Format struct { + // fieldNames hold %(fieldname)s to be passed to the '--format' flag of + // for-each-ref. See git-for-each-ref(1) for available fields. + fieldNames []string + + // fieldDelim is the character sequence that is used to separate fields + // for each reference. fieldDelim and refDelim should be selected to not + // interfere with each other and to not be present in field values. + fieldDelim []byte + // fieldDelimStr is a string representation of fieldDelim. Used to save + // us from repetitive reallocation whenever we need the delimiter as a + // string. + fieldDelimStr string + // refDelim is the character sequence used to separate reference from + // each other in the output. fieldDelim and refDelim should be selected + // to not interfere with each other and to not be present in field + // values. + refDelim []byte +} + +// NewFormat creates a forEachRefFormat using the specified fieldNames. See +// git-for-each-ref(1) for available fields. +func NewFormat(fieldNames ...string) Format { + return Format{ + fieldNames: fieldNames, + fieldDelim: nullChar, + fieldDelimStr: string(nullChar), + refDelim: dualNullChar, + } +} + +// Flag returns a for-each-ref --format flag value that captures the fieldNames. +func (f Format) Flag() string { + var formatFlag strings.Builder + for i, field := range f.fieldNames { + // field key and field value + formatFlag.WriteString(fmt.Sprintf("%s %%(%s)", field, field)) + + if i < len(f.fieldNames)-1 { + // note: escape delimiters to allow control characters as + // delimiters. For example, '%00' for null character or '%0a' + // for newline. + formatFlag.WriteString(f.hexEscaped(f.fieldDelim)) + } + } + formatFlag.WriteString(f.hexEscaped(f.refDelim)) + return formatFlag.String() +} + +// Parser returns a Parser capable of parsing 'git for-each-ref' output produced +// with this Format. +func (f Format) Parser(r io.Reader) *Parser { + return NewParser(r, f) +} + +// hexEscaped produces hex-escpaed characters from a string. For example, "\n\0" +// would turn into "%0a%00". +func (f Format) hexEscaped(delim []byte) string { + escaped := "" + for i := 0; i < len(delim); i++ { + escaped += "%" + hex.EncodeToString([]byte{delim[i]}) + } + return escaped +} diff --git a/modules/git/foreachref/format_test.go b/modules/git/foreachref/format_test.go new file mode 100644 index 0000000000..5aca10f752 --- /dev/null +++ b/modules/git/foreachref/format_test.go @@ -0,0 +1,67 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package foreachref_test + +import ( + "testing" + + "code.gitea.io/gitea/modules/git/foreachref" + + "github.com/stretchr/testify/require" +) + +func TestFormat_Flag(t *testing.T) { + tests := []struct { + name string + + givenFormat foreachref.Format + + wantFlag string + }{ + { + name: "references are delimited by dual null chars", + + // no reference fields requested + givenFormat: foreachref.NewFormat(), + + // only a reference delimiter field in --format + wantFlag: "%00%00", + }, + + { + name: "a field is a space-separated key-value pair", + + givenFormat: foreachref.NewFormat("refname:short"), + + // only a reference delimiter field + wantFlag: "refname:short %(refname:short)%00%00", + }, + + { + name: "fields are separated by a null char field-delimiter", + + givenFormat: foreachref.NewFormat("refname:short", "author"), + + wantFlag: "refname:short %(refname:short)%00author %(author)%00%00", + }, + + { + name: "multiple fields", + + givenFormat: foreachref.NewFormat("refname:short", "objecttype", "objectname"), + + wantFlag: "refname:short %(refname:short)%00objecttype %(objecttype)%00objectname %(objectname)%00%00", + }, + } + + for _, test := range tests { + tc := test // don't close over loop variable + t.Run(tc.name, func(t *testing.T) { + gotFlag := tc.givenFormat.Flag() + + require.Equal(t, tc.wantFlag, gotFlag, "unexpected for-each-ref --format string. wanted: '%s', got: '%s'", tc.wantFlag, gotFlag) + }) + } +} diff --git a/modules/git/foreachref/parser.go b/modules/git/foreachref/parser.go new file mode 100644 index 0000000000..eb8b77d903 --- /dev/null +++ b/modules/git/foreachref/parser.go @@ -0,0 +1,131 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package foreachref + +import ( + "bufio" + "bytes" + "fmt" + "io" + "strings" +) + +// Parser parses 'git for-each-ref' output according to a given output Format. +type Parser struct { + // tokenizes 'git for-each-ref' output into "reference paragraphs". + scanner *bufio.Scanner + + // format represents the '--format' string that describes the expected + // 'git for-each-ref' output structure. + format Format + + // err holds the last encountered error during parsing. + err error +} + +// NewParser creates a 'git for-each-ref' output parser that will parse all +// references in the provided Reader. The references in the output are assumed +// to follow the specified Format. +func NewParser(r io.Reader, format Format) *Parser { + scanner := bufio.NewScanner(r) + + // in addition to the reference delimiter we specified in the --format, + // `git for-each-ref` will always add a newline after every reference. + refDelim := make([]byte, 0, len(format.refDelim)+1) + refDelim = append(refDelim, format.refDelim...) + refDelim = append(refDelim, '\n') + + // Split input into delimiter-separated "reference blocks". + scanner.Split( + func(data []byte, atEOF bool) (advance int, token []byte, err error) { + // Scan until delimiter, marking end of reference. + delimIdx := bytes.Index(data, refDelim) + if delimIdx >= 0 { + token := data[:delimIdx] + advance := delimIdx + len(refDelim) + return advance, token, nil + } + // If we're at EOF, we have a final, non-terminated reference. Return it. + if atEOF { + return len(data), data, nil + } + // Not yet a full field. Request more data. + return 0, nil, nil + }) + + return &Parser{ + scanner: scanner, + format: format, + err: nil, + } +} + +// Next returns the next reference as a collection of key-value pairs. nil +// denotes EOF but is also returned on errors. The Err method should always be +// consulted after Next returning nil. +// +// It could, for example return something like: +// +// { "objecttype": "tag", "refname:short": "v1.16.4", "object": "f460b7543ed500e49c133c2cd85c8c55ee9dbe27" } +// +func (p *Parser) Next() map[string]string { + if !p.scanner.Scan() { + return nil + } + fields, err := p.parseRef(p.scanner.Text()) + if err != nil { + p.err = err + return nil + } + return fields +} + +// Err returns the latest encountered parsing error. +func (p *Parser) Err() error { + return p.err +} + +// parseRef parses out all key-value pairs from a single reference block, such as +// +// "objecttype tag\0refname:short v1.16.4\0object f460b7543ed500e49c133c2cd85c8c55ee9dbe27" +// +func (p *Parser) parseRef(refBlock string) (map[string]string, error) { + if refBlock == "" { + // must be at EOF + return nil, nil + } + + fieldValues := make(map[string]string) + + fields := strings.Split(refBlock, p.format.fieldDelimStr) + if len(fields) != len(p.format.fieldNames) { + return nil, fmt.Errorf("unexpected number of reference fields: wanted %d, was %d", + len(fields), len(p.format.fieldNames)) + } + for i, field := range fields { + field = strings.TrimSpace(field) + + var fieldKey string + var fieldVal string + firstSpace := strings.Index(field, " ") + if firstSpace > 0 { + fieldKey = field[:firstSpace] + fieldVal = field[firstSpace+1:] + } else { + // could be the case if the requested field had no value + fieldKey = field + } + + // enforce the format order of fields + if p.format.fieldNames[i] != fieldKey { + return nil, fmt.Errorf("unexpected field name at position %d: wanted: '%s', was: '%s'", + i, p.format.fieldNames[i], fieldKey) + } + + fieldValues[fieldKey] = fieldVal + } + + return fieldValues, nil +} diff --git a/modules/git/foreachref/parser_test.go b/modules/git/foreachref/parser_test.go new file mode 100644 index 0000000000..cb36428604 --- /dev/null +++ b/modules/git/foreachref/parser_test.go @@ -0,0 +1,228 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package foreachref_test + +import ( + "errors" + "fmt" + "io" + "strings" + "testing" + + "code.gitea.io/gitea/modules/git/foreachref" + "code.gitea.io/gitea/modules/json" + + "github.com/stretchr/testify/require" +) + +type refSlice = []map[string]string + +func TestParser(t *testing.T) { + tests := []struct { + name string + + givenFormat foreachref.Format + givenInput io.Reader + + wantRefs refSlice + wantErr bool + expectedErr error + }{ + // this would, for example, be the result when running `git + // for-each-ref refs/tags` on a repo without tags. + { + name: "no references on empty input", + + givenFormat: foreachref.NewFormat("refname:short"), + givenInput: strings.NewReader(``), + + wantRefs: []map[string]string{}, + }, + + // note: `git for-each-ref` will add a newline between every + // reference (in addition to the ref-delimiter we've chosen) + { + name: "single field requested, single reference in output", + + givenFormat: foreachref.NewFormat("refname:short"), + givenInput: strings.NewReader("refname:short v0.0.1\x00\x00" + "\n"), + + wantRefs: []map[string]string{ + {"refname:short": "v0.0.1"}, + }, + }, + { + name: "single field requested, multiple references in output", + + givenFormat: foreachref.NewFormat("refname:short"), + givenInput: strings.NewReader( + "refname:short v0.0.1\x00\x00" + "\n" + + "refname:short v0.0.2\x00\x00" + "\n" + + "refname:short v0.0.3\x00\x00" + "\n"), + + wantRefs: []map[string]string{ + {"refname:short": "v0.0.1"}, + {"refname:short": "v0.0.2"}, + {"refname:short": "v0.0.3"}, + }, + }, + + { + name: "multiple fields requested for each reference", + + givenFormat: foreachref.NewFormat("refname:short", "objecttype", "objectname"), + givenInput: strings.NewReader( + + "refname:short v0.0.1\x00objecttype commit\x00objectname 7b2c5ac9fc04fc5efafb60700713d4fa609b777b\x00\x00" + "\n" + + "refname:short v0.0.2\x00objecttype commit\x00objectname a1f051bc3eba734da4772d60e2d677f47cf93ef4\x00\x00" + "\n" + + "refname:short v0.0.3\x00objecttype commit\x00objectname ef82de70bb3f60c65fb8eebacbb2d122ef517385\x00\x00" + "\n", + ), + + wantRefs: []map[string]string{ + { + "refname:short": "v0.0.1", + "objecttype": "commit", + "objectname": "7b2c5ac9fc04fc5efafb60700713d4fa609b777b", + }, + { + "refname:short": "v0.0.2", + "objecttype": "commit", + "objectname": "a1f051bc3eba734da4772d60e2d677f47cf93ef4", + }, + { + "refname:short": "v0.0.3", + "objecttype": "commit", + "objectname": "ef82de70bb3f60c65fb8eebacbb2d122ef517385", + }, + }, + }, + + { + name: "must handle multi-line fields such as 'content'", + + givenFormat: foreachref.NewFormat("refname:short", "contents", "author"), + givenInput: strings.NewReader( + "refname:short v0.0.1\x00contents Create new buffer if not present yet (#549)\n\nFixes a nil dereference when ProcessFoo is used\nwith multiple commands.\x00author Foo Bar 1507832733 +0200\x00\x00" + "\n" + + "refname:short v0.0.2\x00contents Update CI config (#651)\n\n\x00author John Doe 1521643174 +0000\x00\x00" + "\n" + + "refname:short v0.0.3\x00contents Fixed code sample for bash completion (#687)\n\n\x00author Foo Baz 1524836750 +0200\x00\x00" + "\n", + ), + + wantRefs: []map[string]string{ + { + "refname:short": "v0.0.1", + "contents": "Create new buffer if not present yet (#549)\n\nFixes a nil dereference when ProcessFoo is used\nwith multiple commands.", + "author": "Foo Bar 1507832733 +0200", + }, + { + "refname:short": "v0.0.2", + "contents": "Update CI config (#651)", + "author": "John Doe 1521643174 +0000", + }, + { + "refname:short": "v0.0.3", + "contents": "Fixed code sample for bash completion (#687)", + "author": "Foo Baz 1524836750 +0200", + }, + }, + }, + + { + name: "must handle fields without values", + + givenFormat: foreachref.NewFormat("refname:short", "object", "objecttype"), + givenInput: strings.NewReader( + "refname:short v0.0.1\x00object \x00objecttype commit\x00\x00" + "\n" + + "refname:short v0.0.2\x00object \x00objecttype commit\x00\x00" + "\n" + + "refname:short v0.0.3\x00object \x00objecttype commit\x00\x00" + "\n", + ), + + wantRefs: []map[string]string{ + { + "refname:short": "v0.0.1", + "object": "", + "objecttype": "commit", + }, + { + "refname:short": "v0.0.2", + "object": "", + "objecttype": "commit", + }, + { + "refname:short": "v0.0.3", + "object": "", + "objecttype": "commit", + }, + }, + }, + + { + name: "must fail when the number of fields in the input doesn't match expected format", + + givenFormat: foreachref.NewFormat("refname:short", "objecttype", "objectname"), + givenInput: strings.NewReader( + "refname:short v0.0.1\x00objecttype commit\x00\x00" + "\n" + + "refname:short v0.0.2\x00objecttype commit\x00\x00" + "\n" + + "refname:short v0.0.3\x00objecttype commit\x00\x00" + "\n", + ), + + wantErr: true, + expectedErr: errors.New("unexpected number of reference fields: wanted 2, was 3"), + }, + + { + name: "must fail input fields don't match expected format", + + givenFormat: foreachref.NewFormat("refname:short", "objectname"), + givenInput: strings.NewReader( + "refname:short v0.0.1\x00objecttype commit\x00\x00" + "\n" + + "refname:short v0.0.2\x00objecttype commit\x00\x00" + "\n" + + "refname:short v0.0.3\x00objecttype commit\x00\x00" + "\n", + ), + + wantErr: true, + expectedErr: errors.New("unexpected field name at position 1: wanted: 'objectname', was: 'objecttype'"), + }, + } + + for _, test := range tests { + tc := test // don't close over loop variable + t.Run(tc.name, func(t *testing.T) { + parser := tc.givenFormat.Parser(tc.givenInput) + + // + // parse references from input + // + gotRefs := make([]map[string]string, 0) + for { + ref := parser.Next() + if ref == nil { + break + } + gotRefs = append(gotRefs, ref) + } + err := parser.Err() + + // + // verify expectations + // + if tc.wantErr { + require.Error(t, err) + require.EqualError(t, err, tc.expectedErr.Error()) + } else { + require.NoError(t, err, "for-each-ref parser unexpectedly failed with: %v", err) + require.Equal(t, tc.wantRefs, gotRefs, "for-each-ref parser produced unexpected reference set. wanted: %v, got: %v", pretty(tc.wantRefs), pretty(gotRefs)) + } + }) + } +} + +func pretty(v interface{}) string { + data, err := json.MarshalIndent(v, "", " ") + if err != nil { + // shouldn't happen + panic(fmt.Sprintf("json-marshalling failed: %v", err)) + } + return string(data) +} diff --git a/modules/git/git.go b/modules/git/git.go index 14940d1f16..8fad070330 100644 --- a/modules/git/git.go +++ b/modules/git/git.go @@ -8,6 +8,7 @@ package git import ( "context" "fmt" + "os" "os/exec" "runtime" "strings" @@ -20,10 +21,11 @@ import ( ) var ( - // Prefix the log prefix - Prefix = "[git-module] " // GitVersionRequired is the minimum Git version required - GitVersionRequired = "1.7.2" + // At the moment, all code for git 1.x are not changed, if some users want to test with old git client + // or bypass the check, they still have a chance to edit this variable manually. + // If everything works fine, the code for git 1.x could be removed in a separate PR before 1.17 frozen. + GitVersionRequired = "2.0.0" // GitExecutable is the command name of git // Could be updated to an absolute path while initialization @@ -54,9 +56,9 @@ func LoadGitVersion() error { return nil } - stdout, err := NewCommand(context.Background(), "version").Run() - if err != nil { - return err + stdout, _, runErr := NewCommand(context.Background(), "version").RunStdString(nil) + if runErr != nil { + return runErr } fields := strings.Fields(stdout) @@ -74,6 +76,7 @@ func LoadGitVersion() error { versionString = fields[2] } + var err error gitVersion, err = version.NewVersion(versionString) return err } @@ -86,13 +89,13 @@ func SetExecutablePath(path string) error { } absPath, err := exec.LookPath(GitExecutable) if err != nil { - return fmt.Errorf("Git not found: %v", err) + return fmt.Errorf("git not found: %w", err) } GitExecutable = absPath err = LoadGitVersion() if err != nil { - return fmt.Errorf("Git version missing: %v", err) + return fmt.Errorf("unable to load git version: %w", err) } versionRequired, err := version.NewVersion(GitVersionRequired) @@ -101,7 +104,15 @@ func SetExecutablePath(path string) error { } if gitVersion.LessThan(versionRequired) { - return fmt.Errorf("Git version not supported. Requires version > %v", GitVersionRequired) + moreHint := "get git: https://git-scm.com/download/" + if runtime.GOOS == "linux" { + // there are a lot of CentOS/RHEL users using old git, so we add a special hint for them + if _, err = os.Stat("/etc/redhat-release"); err == nil { + // ius.io is the recommended official(git-scm.com) method to install git + moreHint = "get git: https://git-scm.com/download/linux and https://ius.io" + } + } + return fmt.Errorf("installed git version %q is not supported, Gitea requires git version >= %q, %s", gitVersion.Original(), GitVersionRequired, moreHint) } return nil @@ -124,7 +135,9 @@ func VersionInfo() string { func Init(ctx context.Context) error { DefaultContext = ctx - defaultCommandExecutionTimeout = time.Duration(setting.Git.Timeout.Default) * time.Second + if setting.Git.Timeout.Default > 0 { + defaultCommandExecutionTimeout = time.Duration(setting.Git.Timeout.Default) * time.Second + } if err := SetExecutablePath(setting.Git.Path); err != nil { return err @@ -145,7 +158,7 @@ func Init(ctx context.Context) error { // By default partial clones are disabled, enable them from git v2.22 if !setting.Git.DisablePartialClone && CheckGitVersionAtLeast("2.22") == nil { - globalCommandArgs = append(globalCommandArgs, "-c", "uploadpack.allowfilter=true") + globalCommandArgs = append(globalCommandArgs, "-c", "uploadpack.allowfilter=true", "-c", "uploadpack.allowAnySHA1InWant=true") } // Save current git version on init to gitVersion otherwise it would require an RWMutex @@ -295,10 +308,5 @@ func checkAndRemoveConfig(key, value string) error { // Fsck verifies the connectivity and validity of the objects in the database func Fsck(ctx context.Context, repoPath string, timeout time.Duration, args ...string) error { - // Make sure timeout makes sense. - if timeout <= 0 { - timeout = -1 - } - _, err := NewCommand(ctx, "fsck").AddArguments(args...).RunInDirTimeout(timeout, repoPath) - return err + return NewCommand(ctx, "fsck").AddArguments(args...).Run(&RunOpts{Timeout: timeout, Dir: repoPath}) } diff --git a/modules/git/last_commit_cache_gogit.go b/modules/git/last_commit_cache_gogit.go index 06e85a6db2..8897000350 100644 --- a/modules/git/last_commit_cache_gogit.go +++ b/modules/git/last_commit_cache_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/last_commit_cache_nogogit.go b/modules/git/last_commit_cache_nogogit.go index 5315c0a152..030d5486b6 100644 --- a/modules/git/last_commit_cache_nogogit.go +++ b/modules/git/last_commit_cache_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/log_name_status.go b/modules/git/log_name_status.go index 0571a4dd20..ffd0a0991b 100644 --- a/modules/git/log_name_status.go +++ b/modules/git/log_name_status.go @@ -55,11 +55,10 @@ func LogNameStatusRepo(ctx context.Context, repository, head, treepath string, p go func() { stderr := strings.Builder{} - err := NewCommand(ctx, args...).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repository, - Stdout: stdoutWriter, - Stderr: &stderr, + err := NewCommand(ctx, args...).Run(&RunOpts{ + Dir: repository, + Stdout: stdoutWriter, + Stderr: &stderr, }) if err != nil { _ = stdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) diff --git a/modules/git/notes_gogit.go b/modules/git/notes_gogit.go index b1e5e453e4..76bc828957 100644 --- a/modules/git/notes_gogit.go +++ b/modules/git/notes_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/notes_nogogit.go b/modules/git/notes_nogogit.go index bbc8ee1371..e3f0a3fee9 100644 --- a/modules/git/notes_nogogit.go +++ b/modules/git/notes_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/notes_test.go b/modules/git/notes_test.go index fec46e5960..34e7178bea 100644 --- a/modules/git/notes_test.go +++ b/modules/git/notes_test.go @@ -14,7 +14,7 @@ import ( func TestGetNotes(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -27,7 +27,7 @@ func TestGetNotes(t *testing.T) { func TestGetNestedNotes(t *testing.T) { repoPath := filepath.Join(testReposDir, "repo3_notes") - repo, err := OpenRepository(repoPath) + repo, err := openRepositoryWithDefaultContext(repoPath) assert.NoError(t, err) defer repo.Close() @@ -42,7 +42,7 @@ func TestGetNestedNotes(t *testing.T) { func TestGetNonExistentNotes(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() diff --git a/modules/git/parse_gogit.go b/modules/git/parse_gogit.go index c42e32929e..409432c5d6 100644 --- a/modules/git/parse_gogit.go +++ b/modules/git/parse_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/parse_gogit_test.go b/modules/git/parse_gogit_test.go index c27f5172d5..075de6d25d 100644 --- a/modules/git/parse_gogit_test.go +++ b/modules/git/parse_gogit_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/parse_nogogit.go b/modules/git/parse_nogogit.go index dd5554b5dd..6dc4900992 100644 --- a/modules/git/parse_nogogit.go +++ b/modules/git/parse_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/parse_nogogit_test.go b/modules/git/parse_nogogit_test.go index d6d6f3868c..483f96e9a7 100644 --- a/modules/git/parse_nogogit_test.go +++ b/modules/git/parse_nogogit_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/pipeline/catfile.go b/modules/git/pipeline/catfile.go index 6948131e46..40dd2bca29 100644 --- a/modules/git/pipeline/catfile.go +++ b/modules/git/pipeline/catfile.go @@ -27,12 +27,11 @@ func CatFileBatchCheck(ctx context.Context, shasToCheckReader *io.PipeReader, ca stderr := new(bytes.Buffer) var errbuf strings.Builder cmd := git.NewCommand(ctx, "cat-file", "--batch-check") - if err := cmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdin: shasToCheckReader, - Stdout: catFileCheckWriter, - Stderr: stderr, + if err := cmd.Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdin: shasToCheckReader, + Stdout: catFileCheckWriter, + Stderr: stderr, }); err != nil { _ = catFileCheckWriter.CloseWithError(fmt.Errorf("git cat-file --batch-check [%s]: %v - %s", tmpBasePath, err, errbuf.String())) } @@ -46,11 +45,10 @@ func CatFileBatchCheckAllObjects(ctx context.Context, catFileCheckWriter *io.Pip stderr := new(bytes.Buffer) var errbuf strings.Builder cmd := git.NewCommand(ctx, "cat-file", "--batch-check", "--batch-all-objects") - if err := cmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: catFileCheckWriter, - Stderr: stderr, + if err := cmd.Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: catFileCheckWriter, + Stderr: stderr, }); err != nil { log.Error("git cat-file --batch-check --batch-all-object [%s]: %v - %s", tmpBasePath, err, errbuf.String()) err = fmt.Errorf("git cat-file --batch-check --batch-all-object [%s]: %v - %s", tmpBasePath, err, errbuf.String()) @@ -67,12 +65,11 @@ func CatFileBatch(ctx context.Context, shasToBatchReader *io.PipeReader, catFile stderr := new(bytes.Buffer) var errbuf strings.Builder - if err := git.NewCommand(ctx, "cat-file", "--batch").RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: catFileBatchWriter, - Stdin: shasToBatchReader, - Stderr: stderr, + if err := git.NewCommand(ctx, "cat-file", "--batch").Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: catFileBatchWriter, + Stdin: shasToBatchReader, + Stderr: stderr, }); err != nil { _ = shasToBatchReader.CloseWithError(fmt.Errorf("git rev-list [%s]: %v - %s", tmpBasePath, err, errbuf.String())) } diff --git a/modules/git/pipeline/lfs.go b/modules/git/pipeline/lfs.go index 1b64b672e4..18cce34289 100644 --- a/modules/git/pipeline/lfs.go +++ b/modules/git/pipeline/lfs.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package pipeline diff --git a/modules/git/pipeline/lfs_nogogit.go b/modules/git/pipeline/lfs_nogogit.go index 1d43080a5a..a2b5dd0c96 100644 --- a/modules/git/pipeline/lfs_nogogit.go +++ b/modules/git/pipeline/lfs_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package pipeline @@ -53,11 +52,10 @@ func FindLFSFile(repo *git.Repository, hash git.SHA1) ([]*LFSResult, error) { go func() { stderr := strings.Builder{} - err := git.NewCommand(repo.Ctx, "rev-list", "--all").RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: revListWriter, - Stderr: &stderr, + err := git.NewCommand(repo.Ctx, "rev-list", "--all").Run(&git.RunOpts{ + Dir: repo.Path, + Stdout: revListWriter, + Stderr: &stderr, }) if err != nil { _ = revListWriter.CloseWithError(git.ConcatenateError(err, (&stderr).String())) diff --git a/modules/git/pipeline/namerev.go b/modules/git/pipeline/namerev.go index 357322070e..8356e70234 100644 --- a/modules/git/pipeline/namerev.go +++ b/modules/git/pipeline/namerev.go @@ -23,12 +23,11 @@ func NameRevStdin(ctx context.Context, shasToNameReader *io.PipeReader, nameRevS stderr := new(bytes.Buffer) var errbuf strings.Builder - if err := git.NewCommand(ctx, "name-rev", "--stdin", "--name-only", "--always").RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: nameRevStdinWriter, - Stdin: shasToNameReader, - Stderr: stderr, + if err := git.NewCommand(ctx, "name-rev", "--stdin", "--name-only", "--always").Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: nameRevStdinWriter, + Stdin: shasToNameReader, + Stderr: stderr, }); err != nil { _ = shasToNameReader.CloseWithError(fmt.Errorf("git name-rev [%s]: %v - %s", tmpBasePath, err, errbuf.String())) } diff --git a/modules/git/pipeline/revlist.go b/modules/git/pipeline/revlist.go index a1f8f079f9..02619cb583 100644 --- a/modules/git/pipeline/revlist.go +++ b/modules/git/pipeline/revlist.go @@ -25,11 +25,10 @@ func RevListAllObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sy stderr := new(bytes.Buffer) var errbuf strings.Builder cmd := git.NewCommand(ctx, "rev-list", "--objects", "--all") - if err := cmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: basePath, - Stdout: revListWriter, - Stderr: stderr, + if err := cmd.Run(&git.RunOpts{ + Dir: basePath, + Stdout: revListWriter, + Stderr: stderr, }); err != nil { log.Error("git rev-list --objects --all [%s]: %v - %s", basePath, err, errbuf.String()) err = fmt.Errorf("git rev-list --objects --all [%s]: %v - %s", basePath, err, errbuf.String()) @@ -45,11 +44,10 @@ func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync. stderr := new(bytes.Buffer) var errbuf strings.Builder cmd := git.NewCommand(ctx, "rev-list", "--objects", headSHA, "--not", baseSHA) - if err := cmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: revListWriter, - Stderr: stderr, + if err := cmd.Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: revListWriter, + Stderr: stderr, }); err != nil { log.Error("git rev-list [%s]: %v - %s", tmpBasePath, err, errbuf.String()) errChan <- fmt.Errorf("git rev-list [%s]: %v - %s", tmpBasePath, err, errbuf.String()) diff --git a/modules/git/remote.go b/modules/git/remote.go index dfd0686d8b..536b1681ce 100644 --- a/modules/git/remote.go +++ b/modules/git/remote.go @@ -22,7 +22,7 @@ func GetRemoteAddress(ctx context.Context, repoPath, remoteName string) (*url.UR cmd = NewCommand(ctx, "config", "--get", "remote."+remoteName+".url") } - result, err := cmd.RunInDir(repoPath) + result, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) if err != nil { return nil, err } diff --git a/modules/git/repo.go b/modules/git/repo.go index 8217521b06..3176e27695 100644 --- a/modules/git/repo.go +++ b/modules/git/repo.go @@ -19,6 +19,7 @@ import ( "time" "code.gitea.io/gitea/modules/proxy" + "code.gitea.io/gitea/modules/util" ) // GPGSettings represents the default GPG settings for this repository @@ -58,7 +59,7 @@ func (repo *Repository) parsePrettyFormatLogToList(logs []byte) ([]*Commit, erro // IsRepoURLAccessible checks if given repository URL is accessible. func IsRepoURLAccessible(ctx context.Context, url string) bool { - _, err := NewCommand(ctx, "ls-remote", "-q", "-h", url, "HEAD").Run() + _, _, err := NewCommand(ctx, "ls-remote", "-q", "-h", url, "HEAD").RunStdString(nil) return err == nil } @@ -73,7 +74,7 @@ func InitRepository(ctx context.Context, repoPath string, bare bool) error { if bare { cmd.AddArguments("--bare") } - _, err = cmd.RunInDir(repoPath) + _, _, err = cmd.RunStdString(&RunOpts{Dir: repoPath}) return err } @@ -81,11 +82,10 @@ func InitRepository(ctx context.Context, repoPath string, bare bool) error { func (repo *Repository) IsEmpty() (bool, error) { var errbuf, output strings.Builder if err := NewCommand(repo.Ctx, "show-ref", "--head", "^HEAD$"). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: &output, - Stderr: &errbuf, + Run(&RunOpts{ + Dir: repo.Path, + Stdout: &output, + Stderr: &errbuf, }); err != nil { if err.Error() == "exit status 1" && errbuf.String() == "" { return true, nil @@ -98,15 +98,16 @@ func (repo *Repository) IsEmpty() (bool, error) { // CloneRepoOptions options when clone a repository type CloneRepoOptions struct { - Timeout time.Duration - Mirror bool - Bare bool - Quiet bool - Branch string - Shared bool - NoCheckout bool - Depth int - Filter string + Timeout time.Duration + Mirror bool + Bare bool + Quiet bool + Branch string + Shared bool + NoCheckout bool + Depth int + Filter string + SkipTLSVerify bool } // Clone clones original repository to target path. @@ -124,6 +125,9 @@ func CloneWithArgs(ctx context.Context, from, to string, args []string, opts Clo } cmd := NewCommandContextNoGlobals(ctx, args...).AddArguments("clone") + if opts.SkipTLSVerify { + cmd.AddArguments("-c", "http.sslVerify=false") + } if opts.Mirror { cmd.AddArguments("--mirror") } @@ -150,6 +154,12 @@ func CloneWithArgs(ctx context.Context, from, to string, args []string, opts Clo } cmd.AddArguments("--", from, to) + if strings.Contains(from, "://") && strings.Contains(from, "@") { + cmd.SetDescription(fmt.Sprintf("clone branch %s from %s to %s (shared: %t, mirror: %t, depth: %d)", opts.Branch, util.SanitizeCredentialURLs(from), to, opts.Shared, opts.Mirror, opts.Depth)) + } else { + cmd.SetDescription(fmt.Sprintf("clone branch %s from %s to %s (shared: %t, mirror: %t, depth: %d)", opts.Branch, from, to, opts.Shared, opts.Mirror, opts.Depth)) + } + if opts.Timeout <= 0 { opts.Timeout = -1 } @@ -163,7 +173,7 @@ func CloneWithArgs(ctx context.Context, from, to string, args []string, opts Clo } stderr := new(bytes.Buffer) - if err = cmd.RunWithContext(&RunContext{ + if err = cmd.Run(&RunOpts{ Timeout: opts.Timeout, Env: envs, Stdout: io.Discard, @@ -197,13 +207,18 @@ func Push(ctx context.Context, repoPath string, opts PushOptions) error { if len(opts.Branch) > 0 { cmd.AddArguments(opts.Branch) } + if strings.Contains(opts.Remote, "://") && strings.Contains(opts.Remote, "@") { + cmd.SetDescription(fmt.Sprintf("push branch %s to %s (force: %t, mirror: %t)", opts.Branch, util.SanitizeCredentialURLs(opts.Remote), opts.Force, opts.Mirror)) + } else { + cmd.SetDescription(fmt.Sprintf("push branch %s to %s (force: %t, mirror: %t)", opts.Branch, opts.Remote, opts.Force, opts.Mirror)) + } var outbuf, errbuf strings.Builder if opts.Timeout == 0 { opts.Timeout = -1 } - err := cmd.RunWithContext(&RunContext{ + err := cmd.Run(&RunOpts{ Env: opts.Env, Timeout: opts.Timeout, Dir: repoPath, @@ -245,7 +260,7 @@ func Push(ctx context.Context, repoPath string, opts PushOptions) error { // GetLatestCommitTime returns time for latest commit in repository (across all branches) func GetLatestCommitTime(ctx context.Context, repoPath string) (time.Time, error) { cmd := NewCommand(ctx, "for-each-ref", "--sort=-committerdate", BranchPrefix, "--count", "1", "--format=%(committerdate)") - stdout, err := cmd.RunInDir(repoPath) + stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) if err != nil { return time.Time{}, err } @@ -262,7 +277,7 @@ type DivergeObject struct { func checkDivergence(ctx context.Context, repoPath, baseBranch, targetBranch string) (int, error) { branches := fmt.Sprintf("%s..%s", baseBranch, targetBranch) cmd := NewCommand(ctx, "rev-list", "--count", branches) - stdout, err := cmd.RunInDir(repoPath) + stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) if err != nil { return -1, err } @@ -299,23 +314,23 @@ func (repo *Repository) CreateBundle(ctx context.Context, commit string, out io. defer os.RemoveAll(tmp) env := append(os.Environ(), "GIT_OBJECT_DIRECTORY="+filepath.Join(repo.Path, "objects")) - _, err = NewCommand(ctx, "init", "--bare").RunInDirWithEnv(tmp, env) + _, _, err = NewCommand(ctx, "init", "--bare").RunStdString(&RunOpts{Dir: tmp, Env: env}) if err != nil { return err } - _, err = NewCommand(ctx, "reset", "--soft", commit).RunInDirWithEnv(tmp, env) + _, _, err = NewCommand(ctx, "reset", "--soft", commit).RunStdString(&RunOpts{Dir: tmp, Env: env}) if err != nil { return err } - _, err = NewCommand(ctx, "branch", "-m", "bundle").RunInDirWithEnv(tmp, env) + _, _, err = NewCommand(ctx, "branch", "-m", "bundle").RunStdString(&RunOpts{Dir: tmp, Env: env}) if err != nil { return err } tmpFile := filepath.Join(tmp, "bundle") - _, err = NewCommand(ctx, "bundle", "create", tmpFile, "bundle", "HEAD").RunInDirWithEnv(tmp, env) + _, _, err = NewCommand(ctx, "bundle", "create", tmpFile, "bundle", "HEAD").RunStdString(&RunOpts{Dir: tmp, Env: env}) if err != nil { return err } diff --git a/modules/git/repo_archive.go b/modules/git/repo_archive.go index b7c339c271..4a97989949 100644 --- a/modules/git/repo_archive.go +++ b/modules/git/repo_archive.go @@ -57,11 +57,10 @@ func (repo *Repository) CreateArchive(ctx context.Context, format ArchiveType, t ) var stderr strings.Builder - err := NewCommand(ctx, args...).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: target, - Stderr: &stderr, + err := NewCommand(ctx, args...).Run(&RunOpts{ + Dir: repo.Path, + Stdout: target, + Stderr: &stderr, }) if err != nil { return ConcatenateError(err, stderr.String()) diff --git a/modules/git/repo_attribute.go b/modules/git/repo_attribute.go index ce24b0a7a3..a18c80c3f1 100644 --- a/modules/git/repo_attribute.go +++ b/modules/git/repo_attribute.go @@ -76,12 +76,11 @@ func (repo *Repository) CheckAttribute(opts CheckAttributeOpts) (map[string]map[ cmd := NewCommand(repo.Ctx, cmdArgs...) - if err := cmd.RunWithContext(&RunContext{ - Env: env, - Timeout: -1, - Dir: repo.Path, - Stdout: stdOut, - Stderr: stdErr, + if err := cmd.Run(&RunOpts{ + Env: env, + Dir: repo.Path, + Stdout: stdOut, + Stderr: stdErr, }); err != nil { return nil, fmt.Errorf("failed to run check-attr: %v\n%s\n%s", err, stdOut.String(), stdErr.String()) } @@ -125,12 +124,10 @@ type CheckAttributeReader struct { env []string ctx context.Context cancel context.CancelFunc - running chan struct{} } // Init initializes the cmd func (c *CheckAttributeReader) Init(ctx context.Context) error { - c.running = make(chan struct{}) cmdArgs := []string{"check-attr", "--stdin", "-z"} if len(c.IndexFile) > 0 && CheckGitVersionAtLeast("1.7.8") == nil { @@ -189,21 +186,12 @@ func (c *CheckAttributeReader) Run() error { _ = c.stdOut.Close() }() stdErr := new(bytes.Buffer) - err := c.cmd.RunWithContext(&RunContext{ - Env: c.env, - Timeout: -1, - Dir: c.Repo.Path, - Stdin: c.stdinReader, - Stdout: c.stdOut, - Stderr: stdErr, - PipelineFunc: func(_ context.Context, _ context.CancelFunc) error { - select { - case <-c.running: - default: - close(c.running) - } - return nil - }, + err := c.cmd.Run(&RunOpts{ + Env: c.env, + Dir: c.Repo.Path, + Stdin: c.stdinReader, + Stdout: c.stdOut, + Stderr: stdErr, }) if err != nil && // If there is an error we need to return but: c.ctx.Err() != err && // 1. Ignore the context error if the context is cancelled or exceeds the deadline (RunWithContext could return c.ctx.Err() which is Canceled or DeadlineExceeded) @@ -224,7 +212,7 @@ func (c *CheckAttributeReader) CheckPath(path string) (rs map[string]string, err select { case <-c.ctx.Done(): return nil, c.ctx.Err() - case <-c.running: + default: } if _, err = c.stdinWriter.Write([]byte(path + "\x00")); err != nil { @@ -251,11 +239,6 @@ func (c *CheckAttributeReader) CheckPath(path string) (rs map[string]string, err func (c *CheckAttributeReader) Close() error { c.cancel() err := c.stdinWriter.Close() - select { - case <-c.running: - default: - close(c.running) - } return err } diff --git a/modules/git/repo_base.go b/modules/git/repo_base.go index 8c2f19f6d7..710f0de9f6 100644 --- a/modules/git/repo_base.go +++ b/modules/git/repo_base.go @@ -44,6 +44,6 @@ func RepositoryFromContextOrOpen(ctx context.Context, path string) (*Repository, return gitRepo, nopCloser(nil), nil } - gitRepo, err := OpenRepositoryCtx(ctx, path) + gitRepo, err := OpenRepository(ctx, path) return gitRepo, gitRepo, err } diff --git a/modules/git/repo_base_gogit.go b/modules/git/repo_base_gogit.go index 23009d80f7..cd2ca25dfb 100644 --- a/modules/git/repo_base_gogit.go +++ b/modules/git/repo_base_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git @@ -35,13 +34,13 @@ type Repository struct { Ctx context.Context } -// OpenRepository opens the repository at the given path. -func OpenRepository(repoPath string) (*Repository, error) { - return OpenRepositoryCtx(DefaultContext, repoPath) +// openRepositoryWithDefaultContext opens the repository at the given path with DefaultContext. +func openRepositoryWithDefaultContext(repoPath string) (*Repository, error) { + return OpenRepository(DefaultContext, repoPath) } -// OpenRepositoryCtx opens the repository at the given path within the context.Context -func OpenRepositoryCtx(ctx context.Context, repoPath string) (*Repository, error) { +// OpenRepository opens the repository at the given path within the context.Context +func OpenRepository(ctx context.Context, repoPath string) (*Repository, error) { repoPath, err := filepath.Abs(repoPath) if err != nil { return nil, err diff --git a/modules/git/repo_base_nogogit.go b/modules/git/repo_base_nogogit.go index ad4a00be1f..df24d952a8 100644 --- a/modules/git/repo_base_nogogit.go +++ b/modules/git/repo_base_nogogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git @@ -36,13 +35,13 @@ type Repository struct { Ctx context.Context } -// OpenRepository opens the repository at the given path. -func OpenRepository(repoPath string) (*Repository, error) { - return OpenRepositoryCtx(DefaultContext, repoPath) +// openRepositoryWithDefaultContext opens the repository at the given path with DefaultContext. +func openRepositoryWithDefaultContext(repoPath string) (*Repository, error) { + return OpenRepository(DefaultContext, repoPath) } -// OpenRepositoryCtx opens the repository at the given path with the provided context. -func OpenRepositoryCtx(ctx context.Context, repoPath string) (*Repository, error) { +// OpenRepository opens the repository at the given path with the provided context. +func OpenRepository(ctx context.Context, repoPath string) (*Repository, error) { repoPath, err := filepath.Abs(repoPath) if err != nil { return nil, err diff --git a/modules/git/repo_blame.go b/modules/git/repo_blame.go index a71122527f..6fe6d235ba 100644 --- a/modules/git/repo_blame.go +++ b/modules/git/repo_blame.go @@ -8,12 +8,13 @@ import "fmt" // FileBlame return the Blame object of file func (repo *Repository) FileBlame(revision, path, file string) ([]byte, error) { - return NewCommand(repo.Ctx, "blame", "--root", "--", file).RunInDirBytes(path) + stdout, _, err := NewCommand(repo.Ctx, "blame", "--root", "--", file).RunStdBytes(&RunOpts{Dir: path}) + return stdout, err } // LineBlame returns the latest commit at the given line func (repo *Repository) LineBlame(revision, path, file string, line uint) (*Commit, error) { - res, err := NewCommand(repo.Ctx, "blame", fmt.Sprintf("-L %d,%d", line, line), "-p", revision, "--", file).RunInDir(path) + res, _, err := NewCommand(repo.Ctx, "blame", fmt.Sprintf("-L %d,%d", line, line), "-p", revision, "--", file).RunStdString(&RunOpts{Dir: path}) if err != nil { return nil, err } diff --git a/modules/git/repo_blob_gogit.go b/modules/git/repo_blob_gogit.go index b11e9f58fe..5640011f4a 100644 --- a/modules/git/repo_blob_gogit.go +++ b/modules/git/repo_blob_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/repo_blob_nogogit.go b/modules/git/repo_blob_nogogit.go index 775b3835dd..44ba0a36b1 100644 --- a/modules/git/repo_blob_nogogit.go +++ b/modules/git/repo_blob_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/repo_blob_test.go b/modules/git/repo_blob_test.go index 132a3fa50c..9f0b865377 100644 --- a/modules/git/repo_blob_test.go +++ b/modules/git/repo_blob_test.go @@ -15,7 +15,7 @@ import ( func TestRepository_GetBlob_Found(t *testing.T) { repoPath := filepath.Join(testReposDir, "repo1_bare") - r, err := OpenRepository(repoPath) + r, err := openRepositoryWithDefaultContext(repoPath) assert.NoError(t, err) defer r.Close() @@ -43,7 +43,7 @@ func TestRepository_GetBlob_Found(t *testing.T) { func TestRepository_GetBlob_NotExist(t *testing.T) { repoPath := filepath.Join(testReposDir, "repo1_bare") - r, err := OpenRepository(repoPath) + r, err := openRepositoryWithDefaultContext(repoPath) assert.NoError(t, err) defer r.Close() @@ -57,7 +57,7 @@ func TestRepository_GetBlob_NotExist(t *testing.T) { func TestRepository_GetBlob_NoId(t *testing.T) { repoPath := filepath.Join(testReposDir, "repo1_bare") - r, err := OpenRepository(repoPath) + r, err := openRepositoryWithDefaultContext(repoPath) assert.NoError(t, err) defer r.Close() diff --git a/modules/git/repo_branch.go b/modules/git/repo_branch.go index d9a7a47771..8e455480e7 100644 --- a/modules/git/repo_branch.go +++ b/modules/git/repo_branch.go @@ -24,7 +24,7 @@ const PullRequestPrefix = "refs/for/" // IsReferenceExist returns true if given reference exists in the repository. func IsReferenceExist(ctx context.Context, repoPath, name string) bool { - _, err := NewCommand(ctx, "show-ref", "--verify", "--", name).RunInDir(repoPath) + _, _, err := NewCommand(ctx, "show-ref", "--verify", "--", name).RunStdString(&RunOpts{Dir: repoPath}) return err == nil } @@ -46,7 +46,7 @@ func (repo *Repository) GetHEADBranch() (*Branch, error) { if repo == nil { return nil, fmt.Errorf("nil repo") } - stdout, err := NewCommand(repo.Ctx, "symbolic-ref", "HEAD").RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "symbolic-ref", "HEAD").RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -65,13 +65,14 @@ func (repo *Repository) GetHEADBranch() (*Branch, error) { // SetDefaultBranch sets default branch of repository. func (repo *Repository) SetDefaultBranch(name string) error { - _, err := NewCommand(repo.Ctx, "symbolic-ref", "HEAD", BranchPrefix+name).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "symbolic-ref", "HEAD", BranchPrefix+name).RunStdString(&RunOpts{Dir: repo.Path}) return err } // GetDefaultBranch gets default branch of repository. func (repo *Repository) GetDefaultBranch() (string, error) { - return NewCommand(repo.Ctx, "symbolic-ref", "HEAD").RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "symbolic-ref", "HEAD").RunStdString(&RunOpts{Dir: repo.Path}) + return stdout, err } // GetBranch returns a branch by it's name @@ -89,7 +90,7 @@ func (repo *Repository) GetBranch(branch string) (*Branch, error) { // GetBranchesByPath returns a branch by it's path // if limit = 0 it will not limit func GetBranchesByPath(ctx context.Context, path string, skip, limit int) ([]*Branch, int, error) { - gitRepo, err := OpenRepositoryCtx(ctx, path) + gitRepo, err := OpenRepository(ctx, path) if err != nil { return nil, 0, err } @@ -133,7 +134,7 @@ func (repo *Repository) DeleteBranch(name string, opts DeleteBranchOptions) erro } cmd.AddArguments("--", name) - _, err := cmd.RunInDir(repo.Path) + _, _, err := cmd.RunStdString(&RunOpts{Dir: repo.Path}) return err } @@ -143,7 +144,7 @@ func (repo *Repository) CreateBranch(branch, oldbranchOrCommit string) error { cmd := NewCommand(repo.Ctx, "branch") cmd.AddArguments("--", branch, oldbranchOrCommit) - _, err := cmd.RunInDir(repo.Path) + _, _, err := cmd.RunStdString(&RunOpts{Dir: repo.Path}) return err } @@ -156,13 +157,13 @@ func (repo *Repository) AddRemote(name, url string, fetch bool) error { } cmd.AddArguments(name, url) - _, err := cmd.RunInDir(repo.Path) + _, _, err := cmd.RunStdString(&RunOpts{Dir: repo.Path}) return err } // RemoveRemote removes a remote from repository. func (repo *Repository) RemoveRemote(name string) error { - _, err := NewCommand(repo.Ctx, "remote", "rm", name).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "remote", "rm", name).RunStdString(&RunOpts{Dir: repo.Path}) return err } @@ -173,6 +174,6 @@ func (branch *Branch) GetCommit() (*Commit, error) { // RenameBranch rename a branch func (repo *Repository) RenameBranch(from, to string) error { - _, err := NewCommand(repo.Ctx, "branch", "-m", from, to).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "branch", "-m", from, to).RunStdString(&RunOpts{Dir: repo.Path}) return err } diff --git a/modules/git/repo_branch_gogit.go b/modules/git/repo_branch_gogit.go index 57952bcc64..dc29576562 100644 --- a/modules/git/repo_branch_gogit.go +++ b/modules/git/repo_branch_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git @@ -13,6 +12,7 @@ import ( "strings" "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/storer" ) // IsObjectExist returns true if given reference exists in the repository. @@ -82,11 +82,12 @@ func (repo *Repository) GetBranchNames(skip, limit int) ([]string, int, error) { } // WalkReferences walks all the references from the repository -func WalkReferences(ctx context.Context, repoPath string, walkfn func(string) error) (int, error) { +// refType should be empty, ObjectTag or ObjectBranch. All other values are equivalent to empty. +func WalkReferences(ctx context.Context, repoPath string, walkfn func(sha1, refname string) error) (int, error) { repo := RepositoryFromContext(ctx, repoPath) if repo == nil { var err error - repo, err = OpenRepositoryCtx(ctx, repoPath) + repo, err = OpenRepository(ctx, repoPath) if err != nil { return 0, err } @@ -101,9 +102,61 @@ func WalkReferences(ctx context.Context, repoPath string, walkfn func(string) er defer iter.Close() err = iter.ForEach(func(ref *plumbing.Reference) error { - err := walkfn(string(ref.Name())) + err := walkfn(ref.Hash().String(), string(ref.Name())) i++ return err }) return i, err } + +// WalkReferences walks all the references from the repository +func (repo *Repository) WalkReferences(arg ObjectType, skip, limit int, walkfn func(sha1, refname string) error) (int, error) { + i := 0 + var iter storer.ReferenceIter + var err error + switch arg { + case ObjectTag: + iter, err = repo.gogitRepo.Tags() + case ObjectBranch: + iter, err = repo.gogitRepo.Branches() + default: + iter, err = repo.gogitRepo.References() + } + if err != nil { + return i, err + } + defer iter.Close() + + err = iter.ForEach(func(ref *plumbing.Reference) error { + if i < skip { + i++ + return nil + } + err := walkfn(ref.Hash().String(), string(ref.Name())) + i++ + if err != nil { + return err + } + if limit != 0 && i >= skip+limit { + return storer.ErrStop + } + return nil + }) + return i, err +} + +// GetRefsBySha returns all references filtered with prefix that belong to a sha commit hash +func (repo *Repository) GetRefsBySha(sha, prefix string) ([]string, error) { + var revList []string + iter, err := repo.gogitRepo.References() + if err != nil { + return nil, err + } + err = iter.ForEach(func(ref *plumbing.Reference) error { + if ref.Hash().String() == sha && strings.HasPrefix(string(ref.Name()), prefix) { + revList = append(revList, string(ref.Name())) + } + return nil + }) + return revList, err +} diff --git a/modules/git/repo_branch_nogogit.go b/modules/git/repo_branch_nogogit.go index 66990add6f..bc58991085 100644 --- a/modules/git/repo_branch_nogogit.go +++ b/modules/git/repo_branch_nogogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git @@ -68,13 +67,29 @@ func (repo *Repository) GetBranchNames(skip, limit int) ([]string, int, error) { } // WalkReferences walks all the references from the repository -func WalkReferences(ctx context.Context, repoPath string, walkfn func(string) error) (int, error) { +func WalkReferences(ctx context.Context, repoPath string, walkfn func(sha1, refname string) error) (int, error) { return walkShowRef(ctx, repoPath, "", 0, 0, walkfn) } +// WalkReferences walks all the references from the repository +// refType should be empty, ObjectTag or ObjectBranch. All other values are equivalent to empty. +func (repo *Repository) WalkReferences(refType ObjectType, skip, limit int, walkfn func(sha1, refname string) error) (int, error) { + var arg string + switch refType { + case ObjectTag: + arg = "--tags" + case ObjectBranch: + arg = "--heads" + default: + arg = "" + } + + return walkShowRef(repo.Ctx, repo.Path, arg, skip, limit, walkfn) +} + // callShowRef return refs, if limit = 0 it will not limit func callShowRef(ctx context.Context, repoPath, prefix, arg string, skip, limit int) (branchNames []string, countAll int, err error) { - countAll, err = walkShowRef(ctx, repoPath, arg, skip, limit, func(branchName string) error { + countAll, err = walkShowRef(ctx, repoPath, arg, skip, limit, func(_, branchName string) error { branchName = strings.TrimPrefix(branchName, prefix) branchNames = append(branchNames, branchName) @@ -83,7 +98,7 @@ func callShowRef(ctx context.Context, repoPath, prefix, arg string, skip, limit return } -func walkShowRef(ctx context.Context, repoPath, arg string, skip, limit int, walkfn func(string) error) (countAll int, err error) { +func walkShowRef(ctx context.Context, repoPath, arg string, skip, limit int, walkfn func(sha1, refname string) error) (countAll int, err error) { stdoutReader, stdoutWriter := io.Pipe() defer func() { _ = stdoutReader.Close() @@ -96,11 +111,10 @@ func walkShowRef(ctx context.Context, repoPath, arg string, skip, limit int, wal if arg != "" { args = append(args, arg) } - err := NewCommand(ctx, args...).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repoPath, - Stdout: stdoutWriter, - Stderr: stderrBuilder, + err := NewCommand(ctx, args...).Run(&RunOpts{ + Dir: repoPath, + Stdout: stdoutWriter, + Stderr: stderrBuilder, }) if err != nil { if stderrBuilder.Len() == 0 { @@ -130,11 +144,7 @@ func walkShowRef(ctx context.Context, repoPath, arg string, skip, limit int, wal for limit == 0 || i < skip+limit { // The output of show-ref is simply a list: // SP LF - _, err := bufReader.ReadSlice(' ') - for err == bufio.ErrBufferFull { - // This shouldn't happen but we'll tolerate it for the sake of peace - _, err = bufReader.ReadSlice(' ') - } + sha, err := bufReader.ReadString(' ') if err == io.EOF { return i, nil } @@ -154,7 +164,12 @@ func walkShowRef(ctx context.Context, repoPath, arg string, skip, limit int, wal if len(branchName) > 0 { branchName = branchName[:len(branchName)-1] } - err = walkfn(branchName) + + if len(sha) > 0 { + sha = sha[:len(sha)-1] + } + + err = walkfn(sha, branchName) if err != nil { return i, err } @@ -175,3 +190,15 @@ func walkShowRef(ctx context.Context, repoPath, arg string, skip, limit int, wal } return i, nil } + +// GetRefsBySha returns all references filtered with prefix that belong to a sha commit hash +func (repo *Repository) GetRefsBySha(sha, prefix string) ([]string, error) { + var revList []string + _, err := walkShowRef(repo.Ctx, repo.Path, "", 0, 0, func(walkSha, refname string) error { + if walkSha == sha && strings.HasPrefix(refname, prefix) { + revList = append(revList, refname) + } + return nil + }) + return revList, err +} diff --git a/modules/git/repo_branch_test.go b/modules/git/repo_branch_test.go index ac5f5deea9..56f7387097 100644 --- a/modules/git/repo_branch_test.go +++ b/modules/git/repo_branch_test.go @@ -13,7 +13,7 @@ import ( func TestRepository_GetBranches(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -41,7 +41,7 @@ func TestRepository_GetBranches(t *testing.T) { func BenchmarkRepository_GetBranches(b *testing.B) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) if err != nil { b.Fatal(err) } @@ -54,3 +54,44 @@ func BenchmarkRepository_GetBranches(b *testing.B) { } } } + +func TestGetRefsBySha(t *testing.T) { + bareRepo5Path := filepath.Join(testReposDir, "repo5_pulls") + bareRepo5, err := OpenRepository(DefaultContext, bareRepo5Path) + if err != nil { + t.Fatal(err) + } + defer bareRepo5.Close() + + // do not exist + branches, err := bareRepo5.GetRefsBySha("8006ff9adbf0cb94da7dad9e537e53817f9fa5c0", "") + assert.NoError(t, err) + assert.Len(t, branches, 0) + + // refs/pull/1/head + branches, err = bareRepo5.GetRefsBySha("c83380d7056593c51a699d12b9c00627bd5743e9", PullPrefix) + assert.NoError(t, err) + assert.EqualValues(t, []string{"refs/pull/1/head"}, branches) + + branches, err = bareRepo5.GetRefsBySha("d8e0bbb45f200e67d9a784ce55bd90821af45ebd", BranchPrefix) + assert.NoError(t, err) + assert.EqualValues(t, []string{"refs/heads/master", "refs/heads/master-clone"}, branches) + + branches, err = bareRepo5.GetRefsBySha("58a4bcc53ac13e7ff76127e0fb518b5262bf09af", BranchPrefix) + assert.NoError(t, err) + assert.EqualValues(t, []string{"refs/heads/test-patch-1"}, branches) +} + +func BenchmarkGetRefsBySha(b *testing.B) { + bareRepo5Path := filepath.Join(testReposDir, "repo5_pulls") + bareRepo5, err := OpenRepository(DefaultContext, bareRepo5Path) + if err != nil { + b.Fatal(err) + } + defer bareRepo5.Close() + + _, _ = bareRepo5.GetRefsBySha("8006ff9adbf0cb94da7dad9e537e53817f9fa5c0", "") + _, _ = bareRepo5.GetRefsBySha("d8e0bbb45f200e67d9a784ce55bd90821af45ebd", "") + _, _ = bareRepo5.GetRefsBySha("c83380d7056593c51a699d12b9c00627bd5743e9", "") + _, _ = bareRepo5.GetRefsBySha("58a4bcc53ac13e7ff76127e0fb518b5262bf09af", "") +} diff --git a/modules/git/repo_commit.go b/modules/git/repo_commit.go index 8e059ce0ea..e6fec4d1a3 100644 --- a/modules/git/repo_commit.go +++ b/modules/git/repo_commit.go @@ -58,12 +58,12 @@ func (repo *Repository) getCommitByPathWithID(id SHA1, relpath string) (*Commit, relpath = `\` + relpath } - stdout, err := NewCommand(repo.Ctx, "log", "-1", prettyLogFormat, id.String(), "--", relpath).RunInDir(repo.Path) - if err != nil { - return nil, err + stdout, _, runErr := NewCommand(repo.Ctx, "log", "-1", prettyLogFormat, id.String(), "--", relpath).RunStdString(&RunOpts{Dir: repo.Path}) + if runErr != nil { + return nil, runErr } - id, err = NewIDFromString(stdout) + id, err := NewIDFromString(stdout) if err != nil { return nil, err } @@ -73,9 +73,9 @@ func (repo *Repository) getCommitByPathWithID(id SHA1, relpath string) (*Commit, // GetCommitByPath returns the last commit of relative path. func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) { - stdout, err := NewCommand(repo.Ctx, "log", "-1", prettyLogFormat, "--", relpath).RunInDirBytes(repo.Path) - if err != nil { - return nil, err + stdout, _, runErr := NewCommand(repo.Ctx, "log", "-1", prettyLogFormat, "--", relpath).RunStdBytes(&RunOpts{Dir: repo.Path}) + if runErr != nil { + return nil, runErr } commits, err := repo.parsePrettyFormatLogToList(stdout) @@ -86,8 +86,8 @@ func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) { } func (repo *Repository) commitsByRange(id SHA1, page, pageSize int) ([]*Commit, error) { - stdout, err := NewCommand(repo.Ctx, "log", id.String(), "--skip="+strconv.Itoa((page-1)*pageSize), - "--max-count="+strconv.Itoa(pageSize), prettyLogFormat).RunInDirBytes(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "log", id.String(), "--skip="+strconv.Itoa((page-1)*pageSize), + "--max-count="+strconv.Itoa(pageSize), prettyLogFormat).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -139,7 +139,7 @@ func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) ([]*Co // search for commits matching given constraints and keywords in commit msg cmd.AddArguments(args...) - stdout, err := cmd.RunInDirBytes(repo.Path) + stdout, _, err := cmd.RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -161,7 +161,7 @@ func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) ([]*Co hashCmd.AddArguments(v) // search with given constraints for commit matching sha hash of v - hashMatching, err := hashCmd.RunInDirBytes(repo.Path) + hashMatching, _, err := hashCmd.RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil || bytes.Contains(stdout, hashMatching) { continue } @@ -175,7 +175,7 @@ func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) ([]*Co } func (repo *Repository) getFilesChanged(id1, id2 string) ([]string, error) { - stdout, err := NewCommand(repo.Ctx, "diff", "--name-only", id1, id2).RunInDirBytes(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "diff", "--name-only", id1, id2).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -185,7 +185,7 @@ func (repo *Repository) getFilesChanged(id1, id2 string) ([]string, error) { // FileChangedBetweenCommits Returns true if the file changed between commit IDs id1 and id2 // You must ensure that id1 and id2 are valid commit ids. func (repo *Repository) FileChangedBetweenCommits(filename, id1, id2 string) (bool, error) { - stdout, err := NewCommand(repo.Ctx, "diff", "--name-only", "-z", id1, id2, "--", filename).RunInDirBytes(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "diff", "--name-only", "-z", id1, id2, "--", filename).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return false, err } @@ -211,11 +211,10 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) ( err := NewCommand(repo.Ctx, "log", revision, "--follow", "--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize*page), prettyLogFormat, "--", file). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: stdoutWriter, - Stderr: &stderr, + Run(&RunOpts{ + Dir: repo.Path, + Stdout: stdoutWriter, + Stderr: &stderr, }) if err != nil { _ = stdoutWriter.CloseWithError(ConcatenateError(err, (&stderr).String())) @@ -244,8 +243,8 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) ( // CommitsByFileAndRangeNoFollow return the commits according revision file and the page func (repo *Repository) CommitsByFileAndRangeNoFollow(revision, file string, page int) ([]*Commit, error) { - stdout, err := NewCommand(repo.Ctx, "log", revision, "--skip="+strconv.Itoa((page-1)*50), - "--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize), prettyLogFormat, "--", file).RunInDirBytes(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "log", revision, "--skip="+strconv.Itoa((page-1)*50), + "--max-count="+strconv.Itoa(setting.Git.CommitsRangeSize), prettyLogFormat, "--", file).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -254,11 +253,11 @@ func (repo *Repository) CommitsByFileAndRangeNoFollow(revision, file string, pag // FilesCountBetween return the number of files changed between two commits func (repo *Repository) FilesCountBetween(startCommitID, endCommitID string) (int, error) { - stdout, err := NewCommand(repo.Ctx, "diff", "--name-only", startCommitID+"..."+endCommitID).RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "diff", "--name-only", startCommitID+"..."+endCommitID).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil && strings.Contains(err.Error(), "no merge base") { // git >= 2.28 now returns an error if startCommitID and endCommitID have become unrelated. // previously it would return the results of git diff --name-only startCommitID endCommitID so let's try that... - stdout, err = NewCommand(repo.Ctx, "diff", "--name-only", startCommitID, endCommitID).RunInDir(repo.Path) + stdout, _, err = NewCommand(repo.Ctx, "diff", "--name-only", startCommitID, endCommitID).RunStdString(&RunOpts{Dir: repo.Path}) } if err != nil { return 0, err @@ -272,13 +271,13 @@ func (repo *Repository) CommitsBetween(last, before *Commit) ([]*Commit, error) var stdout []byte var err error if before == nil { - stdout, err = NewCommand(repo.Ctx, "rev-list", last.ID.String()).RunInDirBytes(repo.Path) + stdout, _, err = NewCommand(repo.Ctx, "rev-list", last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) } else { - stdout, err = NewCommand(repo.Ctx, "rev-list", before.ID.String()+".."+last.ID.String()).RunInDirBytes(repo.Path) + stdout, _, err = NewCommand(repo.Ctx, "rev-list", before.ID.String()+".."+last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil && strings.Contains(err.Error(), "no merge base") { // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. // previously it would return the results of git rev-list before last so let's try that... - stdout, err = NewCommand(repo.Ctx, "rev-list", before.ID.String(), last.ID.String()).RunInDirBytes(repo.Path) + stdout, _, err = NewCommand(repo.Ctx, "rev-list", before.ID.String(), last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) } } if err != nil { @@ -292,13 +291,13 @@ func (repo *Repository) CommitsBetweenLimit(last, before *Commit, limit, skip in var stdout []byte var err error if before == nil { - stdout, err = NewCommand(repo.Ctx, "rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), last.ID.String()).RunInDirBytes(repo.Path) + stdout, _, err = NewCommand(repo.Ctx, "rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) } else { - stdout, err = NewCommand(repo.Ctx, "rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), before.ID.String()+".."+last.ID.String()).RunInDirBytes(repo.Path) + stdout, _, err = NewCommand(repo.Ctx, "rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), before.ID.String()+".."+last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil && strings.Contains(err.Error(), "no merge base") { // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. // previously it would return the results of git rev-list --max-count n before last so let's try that... - stdout, err = NewCommand(repo.Ctx, "rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), before.ID.String(), last.ID.String()).RunInDirBytes(repo.Path) + stdout, _, err = NewCommand(repo.Ctx, "rev-list", "--max-count", strconv.Itoa(limit), "--skip", strconv.Itoa(skip), before.ID.String(), last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) } } if err != nil { @@ -344,9 +343,9 @@ func (repo *Repository) commitsBefore(id SHA1, limit int) ([]*Commit, error) { cmd.AddArguments(prettyLogFormat, id.String()) } - stdout, err := cmd.RunInDirBytes(repo.Path) - if err != nil { - return nil, err + stdout, _, runErr := cmd.RunStdBytes(&RunOpts{Dir: repo.Path}) + if runErr != nil { + return nil, runErr } formattedLog, err := repo.parsePrettyFormatLogToList(bytes.TrimSpace(stdout)) @@ -381,7 +380,7 @@ func (repo *Repository) getCommitsBeforeLimit(id SHA1, num int) ([]*Commit, erro func (repo *Repository) getBranches(commit *Commit, limit int) ([]string, error) { if CheckGitVersionAtLeast("2.7.0") == nil { - stdout, err := NewCommand(repo.Ctx, "for-each-ref", "--count="+strconv.Itoa(limit), "--format=%(refname:strip=2)", "--contains", commit.ID.String(), BranchPrefix).RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "for-each-ref", "--count="+strconv.Itoa(limit), "--format=%(refname:strip=2)", "--contains", commit.ID.String(), BranchPrefix).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -390,7 +389,7 @@ func (repo *Repository) getBranches(commit *Commit, limit int) ([]string, error) return branches, nil } - stdout, err := NewCommand(repo.Ctx, "branch", "--contains", commit.ID.String()).RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "branch", "--contains", commit.ID.String()).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -429,7 +428,7 @@ func (repo *Repository) GetCommitsFromIDs(commitIDs []string) []*Commit { // IsCommitInBranch check if the commit is on the branch func (repo *Repository) IsCommitInBranch(commitID, branch string) (r bool, err error) { - stdout, err := NewCommand(repo.Ctx, "branch", "--contains", commitID, branch).RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "branch", "--contains", commitID, branch).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return false, err } diff --git a/modules/git/repo_commit_gogit.go b/modules/git/repo_commit_gogit.go index 3693f7883f..9333b0d7b7 100644 --- a/modules/git/repo_commit_gogit.go +++ b/modules/git/repo_commit_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git @@ -50,7 +49,7 @@ func (repo *Repository) ConvertToSHA1(commitID string) (SHA1, error) { } } - actualCommitID, err := NewCommand(repo.Ctx, "rev-parse", "--verify", commitID).RunInDir(repo.Path) + actualCommitID, _, err := NewCommand(repo.Ctx, "rev-parse", "--verify", commitID).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { if strings.Contains(err.Error(), "unknown revision or path") || strings.Contains(err.Error(), "fatal: Needed a single revision") { diff --git a/modules/git/repo_commit_nogogit.go b/modules/git/repo_commit_nogogit.go index b65565c98c..e528af0ffb 100644 --- a/modules/git/repo_commit_nogogit.go +++ b/modules/git/repo_commit_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git @@ -18,7 +17,7 @@ import ( // ResolveReference resolves a name to a reference func (repo *Repository) ResolveReference(name string) (string, error) { - stdout, err := NewCommand(repo.Ctx, "show-ref", "--hash", name).RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "show-ref", "--hash", name).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { if strings.Contains(err.Error(), "not a valid ref") { return "", ErrNotExist{name, ""} @@ -51,19 +50,19 @@ func (repo *Repository) GetRefCommitID(name string) (string, error) { // SetReference sets the commit ID string of given reference (e.g. branch or tag). func (repo *Repository) SetReference(name, commitID string) error { - _, err := NewCommand(repo.Ctx, "update-ref", name, commitID).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "update-ref", name, commitID).RunStdString(&RunOpts{Dir: repo.Path}) return err } // RemoveReference removes the given reference (e.g. branch or tag). func (repo *Repository) RemoveReference(name string) error { - _, err := NewCommand(repo.Ctx, "update-ref", "--no-deref", "-d", name).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "update-ref", "--no-deref", "-d", name).RunStdString(&RunOpts{Dir: repo.Path}) return err } // IsCommitExist returns true if given commit exists in current repository. func (repo *Repository) IsCommitExist(name string) bool { - _, err := NewCommand(repo.Ctx, "cat-file", "-e", name).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "cat-file", "-e", name).RunStdString(&RunOpts{Dir: repo.Path}) return err == nil } diff --git a/modules/git/repo_commit_test.go b/modules/git/repo_commit_test.go index 232d6a218c..7b3a5101f2 100644 --- a/modules/git/repo_commit_test.go +++ b/modules/git/repo_commit_test.go @@ -13,7 +13,7 @@ import ( func TestRepository_GetCommitBranches(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -40,7 +40,7 @@ func TestRepository_GetCommitBranches(t *testing.T) { func TestGetTagCommitWithSignature(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -54,7 +54,7 @@ func TestGetTagCommitWithSignature(t *testing.T) { func TestGetCommitWithBadCommitID(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -66,7 +66,7 @@ func TestGetCommitWithBadCommitID(t *testing.T) { func TestIsCommitInBranch(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -81,7 +81,7 @@ func TestIsCommitInBranch(t *testing.T) { func TestRepository_CommitsBetweenIDs(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo4_commitsbetween") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() diff --git a/modules/git/repo_commitgraph.go b/modules/git/repo_commitgraph.go new file mode 100644 index 0000000000..075b59ad06 --- /dev/null +++ b/modules/git/repo_commitgraph.go @@ -0,0 +1,21 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package git + +import ( + "context" + "fmt" +) + +// WriteCommitGraph write commit graph to speed up repo access +// this requires git v2.18 to be installed +func WriteCommitGraph(ctx context.Context, repoPath string) error { + if CheckGitVersionAtLeast("2.18") == nil { + if _, _, err := NewCommand(ctx, "commit-graph", "write").RunStdString(&RunOpts{Dir: repoPath}); err != nil { + return fmt.Errorf("unable to write commit-graph for '%s' : %w", repoPath, err) + } + } + return nil +} diff --git a/modules/git/repo_commitgraph_gogit.go b/modules/git/repo_commitgraph_gogit.go index 84a2edb664..6b00a4fdc4 100644 --- a/modules/git/repo_commitgraph_gogit.go +++ b/modules/git/repo_commitgraph_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/repo_compare.go b/modules/git/repo_compare.go index aa8015af14..4b0cc8536b 100644 --- a/modules/git/repo_compare.go +++ b/modules/git/repo_compare.go @@ -40,13 +40,13 @@ func (repo *Repository) GetMergeBase(tmpRemote, base, head string) (string, stri if tmpRemote != "origin" { tmpBaseName := RemotePrefix + tmpRemote + "/tmp_" + base // Fetch commit into a temporary branch in order to be able to handle commits and tags - _, err := NewCommand(repo.Ctx, "fetch", tmpRemote, base+":"+tmpBaseName).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "fetch", tmpRemote, base+":"+tmpBaseName).RunStdString(&RunOpts{Dir: repo.Path}) if err == nil { base = tmpBaseName } } - stdout, err := NewCommand(repo.Ctx, "merge-base", "--", base, head).RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "merge-base", "--", base, head).RunStdString(&RunOpts{Dir: repo.Path}) return strings.TrimSpace(stdout), base, err } @@ -93,7 +93,8 @@ func (repo *Repository) GetCompareInfo(basePath, baseBranch, headBranch string, // We have a common base - therefore we know that ... should work if !fileOnly { - logs, err := NewCommand(repo.Ctx, "log", baseCommitID+separator+headBranch, prettyLogFormat).RunInDirBytes(repo.Path) + var logs []byte + logs, _, err = NewCommand(repo.Ctx, "log", baseCommitID+separator+headBranch, prettyLogFormat).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -147,22 +148,20 @@ func (repo *Repository) GetDiffNumChangedFiles(base, head string, directComparis } if err := NewCommand(repo.Ctx, "diff", "-z", "--name-only", base+separator+head). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, - Stderr: stderr, + Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, + Stderr: stderr, }); err != nil { if strings.Contains(stderr.String(), "no merge base") { // git >= 2.28 now returns an error if base and head have become unrelated. // previously it would return the results of git diff -z --name-only base head so let's try that... w = &lineCountWriter{} stderr.Reset() - if err = NewCommand(repo.Ctx, "diff", "-z", "--name-only", base, head).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, - Stderr: stderr, + if err = NewCommand(repo.Ctx, "diff", "-z", "--name-only", base, head).Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, + Stderr: stderr, }); err == nil { return w.numLines, nil } @@ -192,7 +191,7 @@ func GetDiffShortStat(ctx context.Context, repoPath string, args ...string) (num "--shortstat", }, args...) - stdout, err := NewCommand(ctx, args...).RunInDir(repoPath) + stdout, _, err := NewCommand(ctx, args...).RunStdString(&RunOpts{Dir: repoPath}) if err != nil { return 0, 0, 0, err } @@ -248,26 +247,23 @@ func (repo *Repository) GetDiffOrPatch(base, head string, w io.Writer, patch, bi // GetDiff generates and returns patch data between given revisions, optimized for human readability func (repo *Repository) GetDiff(base, head string, w io.Writer) error { - return NewCommand(repo.Ctx, "diff", "-p", base, head).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, + return NewCommand(repo.Ctx, "diff", "-p", base, head).Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, }) } // GetDiffBinary generates and returns patch data between given revisions, including binary diffs. func (repo *Repository) GetDiffBinary(base, head string, w io.Writer) error { if CheckGitVersionAtLeast("1.7.7") == nil { - return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram", base, head).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, + return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--histogram", base, head).Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, }) } - return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--patience", base, head).RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, + return NewCommand(repo.Ctx, "diff", "-p", "--binary", "--patience", base, head).Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, }) } @@ -275,32 +271,38 @@ func (repo *Repository) GetDiffBinary(base, head string, w io.Writer) error { func (repo *Repository) GetPatch(base, head string, w io.Writer) error { stderr := new(bytes.Buffer) err := NewCommand(repo.Ctx, "format-patch", "--binary", "--stdout", base+"..."+head). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, - Stderr: stderr, + Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, + Stderr: stderr, }) if err != nil && bytes.Contains(stderr.Bytes(), []byte("no merge base")) { return NewCommand(repo.Ctx, "format-patch", "--binary", "--stdout", base, head). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, + Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, }) } return err } +// GetFilesChangedBetween returns a list of all files that have been changed between the given commits +func (repo *Repository) GetFilesChangedBetween(base, head string) ([]string, error) { + stdout, _, err := NewCommand(repo.Ctx, "diff", "--name-only", base+".."+head).RunStdString(&RunOpts{Dir: repo.Path}) + if err != nil { + return nil, err + } + return strings.Split(stdout, "\n"), err +} + // GetDiffFromMergeBase generates and return patch data from merge base to head func (repo *Repository) GetDiffFromMergeBase(base, head string, w io.Writer) error { stderr := new(bytes.Buffer) err := NewCommand(repo.Ctx, "diff", "-p", "--binary", base+"..."+head). - RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: w, - Stderr: stderr, + Run(&RunOpts{ + Dir: repo.Path, + Stdout: w, + Stderr: stderr, }) if err != nil && bytes.Contains(stderr.Bytes(), []byte("no merge base")) { return repo.GetDiffBinary(base, head, w) diff --git a/modules/git/repo_compare_test.go b/modules/git/repo_compare_test.go index 82d3257c0f..e163a3090b 100644 --- a/modules/git/repo_compare_test.go +++ b/modules/git/repo_compare_test.go @@ -24,7 +24,7 @@ func TestGetFormatPatch(t *testing.T) { } defer util.RemoveAll(clonedPath) - repo, err := OpenRepository(clonedPath) + repo, err := openRepositoryWithDefaultContext(clonedPath) if err != nil { assert.NoError(t, err) return @@ -52,7 +52,7 @@ func TestGetFormatPatch(t *testing.T) { func TestReadPatch(t *testing.T) { // Ensure we can read the patch files bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - repo, err := OpenRepository(bareRepo1Path) + repo, err := openRepositoryWithDefaultContext(bareRepo1Path) if err != nil { assert.NoError(t, err) return @@ -91,7 +91,7 @@ func TestReadWritePullHead(t *testing.T) { } defer util.RemoveAll(clonedPath) - repo, err := OpenRepository(clonedPath) + repo, err := openRepositoryWithDefaultContext(clonedPath) if err != nil { assert.NoError(t, err) return diff --git a/modules/git/repo_gpg.go b/modules/git/repo_gpg.go index 14eb894be6..abbb349159 100644 --- a/modules/git/repo_gpg.go +++ b/modules/git/repo_gpg.go @@ -34,7 +34,7 @@ func (repo *Repository) GetDefaultPublicGPGKey(forceUpdate bool) (*GPGSettings, Sign: true, } - value, _ := NewCommand(repo.Ctx, "config", "--get", "commit.gpgsign").RunInDir(repo.Path) + value, _, _ := NewCommand(repo.Ctx, "config", "--get", "commit.gpgsign").RunStdString(&RunOpts{Dir: repo.Path}) sign, valid := ParseBool(strings.TrimSpace(value)) if !sign || !valid { gpgSettings.Sign = false @@ -42,13 +42,13 @@ func (repo *Repository) GetDefaultPublicGPGKey(forceUpdate bool) (*GPGSettings, return gpgSettings, nil } - signingKey, _ := NewCommand(repo.Ctx, "config", "--get", "user.signingkey").RunInDir(repo.Path) + signingKey, _, _ := NewCommand(repo.Ctx, "config", "--get", "user.signingkey").RunStdString(&RunOpts{Dir: repo.Path}) gpgSettings.KeyID = strings.TrimSpace(signingKey) - defaultEmail, _ := NewCommand(repo.Ctx, "config", "--get", "user.email").RunInDir(repo.Path) + defaultEmail, _, _ := NewCommand(repo.Ctx, "config", "--get", "user.email").RunStdString(&RunOpts{Dir: repo.Path}) gpgSettings.Email = strings.TrimSpace(defaultEmail) - defaultName, _ := NewCommand(repo.Ctx, "config", "--get", "user.name").RunInDir(repo.Path) + defaultName, _, _ := NewCommand(repo.Ctx, "config", "--get", "user.name").RunStdString(&RunOpts{Dir: repo.Path}) gpgSettings.Name = strings.TrimSpace(defaultName) if err := gpgSettings.LoadPublicKeyContent(); err != nil { diff --git a/modules/git/repo_index.go b/modules/git/repo_index.go index 53de0f1cb8..ae68dcaa87 100644 --- a/modules/git/repo_index.go +++ b/modules/git/repo_index.go @@ -18,7 +18,7 @@ import ( // ReadTreeToIndex reads a treeish to the index func (repo *Repository) ReadTreeToIndex(treeish string, indexFilename ...string) error { if len(treeish) != 40 { - res, err := NewCommand(repo.Ctx, "rev-parse", "--verify", treeish).RunInDir(repo.Path) + res, _, err := NewCommand(repo.Ctx, "rev-parse", "--verify", treeish).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return err } @@ -38,7 +38,7 @@ func (repo *Repository) readTreeToIndex(id SHA1, indexFilename ...string) error if len(indexFilename) > 0 { env = append(os.Environ(), "GIT_INDEX_FILE="+indexFilename[0]) } - _, err := NewCommand(repo.Ctx, "read-tree", id.String()).RunInDirWithEnv(repo.Path, env) + _, _, err := NewCommand(repo.Ctx, "read-tree", id.String()).RunStdString(&RunOpts{Dir: repo.Path, Env: env}) if err != nil { return err } @@ -69,7 +69,7 @@ func (repo *Repository) ReadTreeToTemporaryIndex(treeish string) (filename, tmpD // EmptyIndex empties the index func (repo *Repository) EmptyIndex() error { - _, err := NewCommand(repo.Ctx, "read-tree", "--empty").RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "read-tree", "--empty").RunStdString(&RunOpts{Dir: repo.Path}) return err } @@ -81,7 +81,7 @@ func (repo *Repository) LsFiles(filenames ...string) ([]string, error) { cmd.AddArguments(arg) } } - res, err := cmd.RunInDirBytes(repo.Path) + res, _, err := cmd.RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } @@ -106,29 +106,28 @@ func (repo *Repository) RemoveFilesFromIndex(filenames ...string) error { buffer.WriteByte('\000') } } - return cmd.RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdin: bytes.NewReader(buffer.Bytes()), - Stdout: stdout, - Stderr: stderr, + return cmd.Run(&RunOpts{ + Dir: repo.Path, + Stdin: bytes.NewReader(buffer.Bytes()), + Stdout: stdout, + Stderr: stderr, }) } // AddObjectToIndex adds the provided object hash to the index at the provided filename func (repo *Repository) AddObjectToIndex(mode string, object SHA1, filename string) error { cmd := NewCommand(repo.Ctx, "update-index", "--add", "--replace", "--cacheinfo", mode, object.String(), filename) - _, err := cmd.RunInDir(repo.Path) + _, _, err := cmd.RunStdString(&RunOpts{Dir: repo.Path}) return err } // WriteTree writes the current index as a tree to the object db and returns its hash func (repo *Repository) WriteTree() (*Tree, error) { - res, err := NewCommand(repo.Ctx, "write-tree").RunInDir(repo.Path) - if err != nil { - return nil, err + stdout, _, runErr := NewCommand(repo.Ctx, "write-tree").RunStdString(&RunOpts{Dir: repo.Path}) + if runErr != nil { + return nil, runErr } - id, err := NewIDFromString(strings.TrimSpace(res)) + id, err := NewIDFromString(strings.TrimSpace(stdout)) if err != nil { return nil, err } diff --git a/modules/git/repo_language_stats_gogit.go b/modules/git/repo_language_stats_gogit.go index 037ec41ec6..3c9f026b7a 100644 --- a/modules/git/repo_language_stats_gogit.go +++ b/modules/git/repo_language_stats_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/repo_language_stats_nogogit.go b/modules/git/repo_language_stats_nogogit.go index adb11dd8fa..41b176f816 100644 --- a/modules/git/repo_language_stats_nogogit.go +++ b/modules/git/repo_language_stats_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/repo_language_stats_test.go b/modules/git/repo_language_stats_test.go index a77266413a..0234c77c0b 100644 --- a/modules/git/repo_language_stats_test.go +++ b/modules/git/repo_language_stats_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git @@ -16,7 +15,7 @@ import ( func TestRepository_GetLanguageStats(t *testing.T) { repoPath := filepath.Join(testReposDir, "language_stats_repo") - gitRepo, err := OpenRepository(repoPath) + gitRepo, err := openRepositoryWithDefaultContext(repoPath) if !assert.NoError(t, err) { t.Fatal() } diff --git a/modules/git/repo_object.go b/modules/git/repo_object.go index 378e657ce4..af448b0110 100644 --- a/modules/git/repo_object.go +++ b/modules/git/repo_object.go @@ -45,12 +45,11 @@ func (repo *Repository) hashObject(reader io.Reader) (string, error) { cmd := NewCommand(repo.Ctx, "hash-object", "-w", "--stdin") stdout := new(bytes.Buffer) stderr := new(bytes.Buffer) - err := cmd.RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdin: reader, - Stdout: stdout, - Stderr: stderr, + err := cmd.Run(&RunOpts{ + Dir: repo.Path, + Stdin: reader, + Stdout: stdout, + Stderr: stderr, }) if err != nil { return "", err diff --git a/modules/git/repo_ref_gogit.go b/modules/git/repo_ref_gogit.go index 9f0e11366f..d11c58e005 100644 --- a/modules/git/repo_ref_gogit.go +++ b/modules/git/repo_ref_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/repo_ref_nogogit.go b/modules/git/repo_ref_nogogit.go index 42295e43ac..d766a8cac3 100644 --- a/modules/git/repo_ref_nogogit.go +++ b/modules/git/repo_ref_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git @@ -23,11 +22,10 @@ func (repo *Repository) GetRefsFiltered(pattern string) ([]*Reference, error) { go func() { stderrBuilder := &strings.Builder{} - err := NewCommand(repo.Ctx, "for-each-ref").RunWithContext(&RunContext{ - Timeout: -1, - Dir: repo.Path, - Stdout: stdoutWriter, - Stderr: stderrBuilder, + err := NewCommand(repo.Ctx, "for-each-ref").Run(&RunOpts{ + Dir: repo.Path, + Stdout: stdoutWriter, + Stderr: stderrBuilder, }) if err != nil { _ = stdoutWriter.CloseWithError(ConcatenateError(err, stderrBuilder.String())) diff --git a/modules/git/repo_ref_test.go b/modules/git/repo_ref_test.go index 303c496c1d..afd38ca251 100644 --- a/modules/git/repo_ref_test.go +++ b/modules/git/repo_ref_test.go @@ -13,7 +13,7 @@ import ( func TestRepository_GetRefs(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() @@ -37,7 +37,7 @@ func TestRepository_GetRefs(t *testing.T) { func TestRepository_GetRefsFiltered(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() diff --git a/modules/git/repo_stats.go b/modules/git/repo_stats.go index 598ec37a2c..c0c91c6fc6 100644 --- a/modules/git/repo_stats.go +++ b/modules/git/repo_stats.go @@ -39,12 +39,12 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) since := fromTime.Format(time.RFC3339) - stdout, err := NewCommand(repo.Ctx, "rev-list", "--count", "--no-merges", "--branches=*", "--date=iso", fmt.Sprintf("--since='%s'", since)).RunInDirBytes(repo.Path) - if err != nil { - return nil, err + stdout, _, runErr := NewCommand(repo.Ctx, "rev-list", "--count", "--no-merges", "--branches=*", "--date=iso", fmt.Sprintf("--since='%s'", since)).RunStdString(&RunOpts{Dir: repo.Path}) + if runErr != nil { + return nil, runErr } - c, err := strconv.ParseInt(strings.TrimSpace(string(stdout)), 10, 64) + c, err := strconv.ParseInt(strings.TrimSpace(stdout), 10, 64) if err != nil { return nil, err } @@ -67,12 +67,11 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) } stderr := new(strings.Builder) - err = NewCommand(repo.Ctx, args...).RunWithContext(&RunContext{ - Env: []string{}, - Timeout: -1, - Dir: repo.Path, - Stdout: stdoutWriter, - Stderr: stderr, + err = NewCommand(repo.Ctx, args...).Run(&RunOpts{ + Env: []string{}, + Dir: repo.Path, + Stdout: stdoutWriter, + Stderr: stderr, PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { _ = stdoutWriter.Close() scanner := bufio.NewScanner(stdoutReader) diff --git a/modules/git/repo_stats_test.go b/modules/git/repo_stats_test.go index c5dd66182b..494a161305 100644 --- a/modules/git/repo_stats_test.go +++ b/modules/git/repo_stats_test.go @@ -14,7 +14,7 @@ import ( func TestRepository_GetCodeActivityStats(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) assert.NoError(t, err) defer bareRepo1.Close() diff --git a/modules/git/repo_tag.go b/modules/git/repo_tag.go index afeb7f5df8..8444e8d035 100644 --- a/modules/git/repo_tag.go +++ b/modules/git/repo_tag.go @@ -8,9 +8,10 @@ package git import ( "context" "fmt" + "io" "strings" - "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/git/foreachref" "code.gitea.io/gitea/modules/util" ) @@ -24,86 +25,23 @@ func IsTagExist(ctx context.Context, repoPath, name string) bool { // CreateTag create one tag in the repository func (repo *Repository) CreateTag(name, revision string) error { - _, err := NewCommand(repo.Ctx, "tag", "--", name, revision).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "tag", "--", name, revision).RunStdString(&RunOpts{Dir: repo.Path}) return err } // CreateAnnotatedTag create one annotated tag in the repository func (repo *Repository) CreateAnnotatedTag(name, message, revision string) error { - _, err := NewCommand(repo.Ctx, "tag", "-a", "-m", message, "--", name, revision).RunInDir(repo.Path) + _, _, err := NewCommand(repo.Ctx, "tag", "-a", "-m", message, "--", name, revision).RunStdString(&RunOpts{Dir: repo.Path}) return err } -func (repo *Repository) getTag(tagID SHA1, name string) (*Tag, error) { - t, ok := repo.tagCache.Get(tagID.String()) - if ok { - log.Debug("Hit cache: %s", tagID) - tagClone := *t.(*Tag) - tagClone.Name = name // This is necessary because lightweight tags may have same id - return &tagClone, nil - } - - tp, err := repo.GetTagType(tagID) - if err != nil { - return nil, err - } - - // Get the commit ID and tag ID (may be different for annotated tag) for the returned tag object - commitIDStr, err := repo.GetTagCommitID(name) - if err != nil { - // every tag should have a commit ID so return all errors - return nil, err - } - commitID, err := NewIDFromString(commitIDStr) - if err != nil { - return nil, err - } - - // If type is "commit, the tag is a lightweight tag - if ObjectType(tp) == ObjectCommit { - commit, err := repo.GetCommit(commitIDStr) - if err != nil { - return nil, err - } - tag := &Tag{ - Name: name, - ID: tagID, - Object: commitID, - Type: tp, - Tagger: commit.Committer, - Message: commit.Message(), - } - - repo.tagCache.Set(tagID.String(), tag) - return tag, nil - } - - // The tag is an annotated tag with a message. - data, err := NewCommand(repo.Ctx, "cat-file", "-p", tagID.String()).RunInDirBytes(repo.Path) - if err != nil { - return nil, err - } - - tag, err := parseTagData(data) - if err != nil { - return nil, err - } - - tag.Name = name - tag.ID = tagID - tag.Type = tp - - repo.tagCache.Set(tagID.String(), tag) - return tag, nil -} - // GetTagNameBySHA returns the name of a tag from its tag object SHA or commit SHA func (repo *Repository) GetTagNameBySHA(sha string) (string, error) { if len(sha) < 5 { return "", fmt.Errorf("SHA is too short: %s", sha) } - stdout, err := NewCommand(repo.Ctx, "show-ref", "--tags", "-d").RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "show-ref", "--tags", "-d").RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return "", err } @@ -126,7 +64,7 @@ func (repo *Repository) GetTagNameBySHA(sha string) (string, error) { // GetTagID returns the object ID for a tag (annotated tags have both an object SHA AND a commit SHA) func (repo *Repository) GetTagID(name string) (string, error) { - stdout, err := NewCommand(repo.Ctx, "show-ref", "--tags", "--", name).RunInDir(repo.Path) + stdout, _, err := NewCommand(repo.Ctx, "show-ref", "--tags", "--", name).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return "", err } @@ -159,50 +97,112 @@ func (repo *Repository) GetTag(name string) (*Tag, error) { return tag, nil } +// GetTagWithID returns a Git tag by given name and ID +func (repo *Repository) GetTagWithID(idStr, name string) (*Tag, error) { + id, err := NewIDFromString(idStr) + if err != nil { + return nil, err + } + + tag, err := repo.getTag(id, name) + if err != nil { + return nil, err + } + return tag, nil +} + // GetTagInfos returns all tag infos of the repository. func (repo *Repository) GetTagInfos(page, pageSize int) ([]*Tag, int, error) { - // TODO this a slow implementation, makes one git command per tag - stdout, err := NewCommand(repo.Ctx, "tag").RunInDir(repo.Path) - if err != nil { - return nil, 0, err - } + forEachRefFmt := foreachref.NewFormat("objecttype", "refname:short", "object", "objectname", "creator", "contents", "contents:signature") - tagNames := strings.Split(strings.TrimRight(stdout, "\n"), "\n") - tagsTotal := len(tagNames) + stdoutReader, stdoutWriter := io.Pipe() + defer stdoutReader.Close() + defer stdoutWriter.Close() + stderr := strings.Builder{} + rc := &RunOpts{Dir: repo.Path, Stdout: stdoutWriter, Stderr: &stderr} - if page != 0 { - tagNames = util.PaginateSlice(tagNames, page, pageSize).([]string) - } - - tags := make([]*Tag, 0, len(tagNames)) - for _, tagName := range tagNames { - tagName = strings.TrimSpace(tagName) - if len(tagName) == 0 { - continue - } - - tag, err := repo.GetTag(tagName) + go func() { + err := NewCommand(repo.Ctx, "for-each-ref", "--format", forEachRefFmt.Flag(), "--sort", "-*creatordate", "refs/tags").Run(rc) if err != nil { - return nil, tagsTotal, err + _ = stdoutWriter.CloseWithError(ConcatenateError(err, stderr.String())) + } else { + _ = stdoutWriter.Close() + } + }() + + var tags []*Tag + parser := forEachRefFmt.Parser(stdoutReader) + for { + ref := parser.Next() + if ref == nil { + break + } + + tag, err := parseTagRef(ref) + if err != nil { + return nil, 0, fmt.Errorf("GetTagInfos: parse tag: %w", err) } - tag.Name = tagName tags = append(tags, tag) } + if err := parser.Err(); err != nil { + return nil, 0, fmt.Errorf("GetTagInfos: parse output: %w", err) + } + sortTagsByTime(tags) + tagsTotal := len(tags) + if page != 0 { + tags = util.PaginateSlice(tags, page, pageSize).([]*Tag) + } + return tags, tagsTotal, nil } -// GetTagType gets the type of the tag, either commit (simple) or tag (annotated) -func (repo *Repository) GetTagType(id SHA1) (string, error) { - // Get tag type - stdout, err := NewCommand(repo.Ctx, "cat-file", "-t", id.String()).RunInDir(repo.Path) +// parseTagRef parses a tag from a 'git for-each-ref'-produced reference. +func parseTagRef(ref map[string]string) (tag *Tag, err error) { + tag = &Tag{ + Type: ref["objecttype"], + Name: ref["refname:short"], + } + + tag.ID, err = NewIDFromString(ref["objectname"]) if err != nil { - return "", err + return nil, fmt.Errorf("parse objectname '%s': %v", ref["objectname"], err) } - if len(stdout) == 0 { - return "", ErrNotExist{ID: id.String()} + + if tag.Type == "commit" { + // lightweight tag + tag.Object = tag.ID + } else { + // annotated tag + tag.Object, err = NewIDFromString(ref["object"]) + if err != nil { + return nil, fmt.Errorf("parse object '%s': %v", ref["object"], err) + } } - return strings.TrimSpace(stdout), nil + + tag.Tagger, err = newSignatureFromCommitline([]byte(ref["creator"])) + if err != nil { + return nil, fmt.Errorf("parse tagger: %w", err) + } + + tag.Message = ref["contents"] + // strip PGP signature if present in contents field + pgpStart := strings.Index(tag.Message, beginpgp) + if pgpStart >= 0 { + tag.Message = tag.Message[0:pgpStart] + } + + // annotated tag with GPG signature + if tag.Type == "tag" && ref["contents:signature"] != "" { + payload := fmt.Sprintf("object %s\ntype commit\ntag %s\ntagger %s\n\n%s\n", + tag.Object, tag.Name, ref["creator"], strings.TrimSpace(tag.Message)) + tag.Signature = &CommitGPGSignature{ + Signature: ref["contents:signature"], + Payload: payload, + } + } + + return tag, nil } // GetAnnotatedTag returns a Git tag by its SHA, must be an annotated tag diff --git a/modules/git/repo_tag_gogit.go b/modules/git/repo_tag_gogit.go index ff8a6d53ee..c6dec28987 100644 --- a/modules/git/repo_tag_gogit.go +++ b/modules/git/repo_tag_gogit.go @@ -4,13 +4,14 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git import ( "strings" + "code.gitea.io/gitea/modules/log" + "github.com/go-git/go-git/v5/plumbing" ) @@ -53,3 +54,83 @@ func (repo *Repository) GetTags(skip, limit int) ([]string, error) { return tagNames, nil } + +// GetTagType gets the type of the tag, either commit (simple) or tag (annotated) +func (repo *Repository) GetTagType(id SHA1) (string, error) { + // Get tag type + obj, err := repo.gogitRepo.Object(plumbing.AnyObject, id) + if err != nil { + if err == plumbing.ErrReferenceNotFound { + return "", &ErrNotExist{ID: id.String()} + } + return "", err + } + + return obj.Type().String(), nil +} + +func (repo *Repository) getTag(tagID SHA1, name string) (*Tag, error) { + t, ok := repo.tagCache.Get(tagID.String()) + if ok { + log.Debug("Hit cache: %s", tagID) + tagClone := *t.(*Tag) + tagClone.Name = name // This is necessary because lightweight tags may have same id + return &tagClone, nil + } + + tp, err := repo.GetTagType(tagID) + if err != nil { + return nil, err + } + + // Get the commit ID and tag ID (may be different for annotated tag) for the returned tag object + commitIDStr, err := repo.GetTagCommitID(name) + if err != nil { + // every tag should have a commit ID so return all errors + return nil, err + } + commitID, err := NewIDFromString(commitIDStr) + if err != nil { + return nil, err + } + + // If type is "commit, the tag is a lightweight tag + if ObjectType(tp) == ObjectCommit { + commit, err := repo.GetCommit(commitIDStr) + if err != nil { + return nil, err + } + tag := &Tag{ + Name: name, + ID: tagID, + Object: commitID, + Type: tp, + Tagger: commit.Committer, + Message: commit.Message(), + } + + repo.tagCache.Set(tagID.String(), tag) + return tag, nil + } + + gogitTag, err := repo.gogitRepo.TagObject(tagID) + if err != nil { + if err == plumbing.ErrReferenceNotFound { + return nil, &ErrNotExist{ID: tagID.String()} + } + + return nil, err + } + + tag := &Tag{ + Name: name, + ID: tagID, + Object: gogitTag.Target, + Type: tp, + Tagger: &gogitTag.Tagger, + Message: gogitTag.Message, + } + + repo.tagCache.Set(tagID.String(), tag) + return tag, nil +} diff --git a/modules/git/repo_tag_nogogit.go b/modules/git/repo_tag_nogogit.go index 1a23755aa6..8d44db0a2e 100644 --- a/modules/git/repo_tag_nogogit.go +++ b/modules/git/repo_tag_nogogit.go @@ -4,10 +4,16 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git +import ( + "errors" + "io" + + "code.gitea.io/gitea/modules/log" +) + // IsTagExist returns true if given tag exists in the repository. func (repo *Repository) IsTagExist(name string) bool { if name == "" { @@ -23,3 +29,104 @@ func (repo *Repository) GetTags(skip, limit int) (tags []string, err error) { tags, _, err = callShowRef(repo.Ctx, repo.Path, TagPrefix, "--tags", skip, limit) return } + +// GetTagType gets the type of the tag, either commit (simple) or tag (annotated) +func (repo *Repository) GetTagType(id SHA1) (string, error) { + wr, rd, cancel := repo.CatFileBatchCheck(repo.Ctx) + defer cancel() + _, err := wr.Write([]byte(id.String() + "\n")) + if err != nil { + return "", err + } + _, typ, _, err := ReadBatchLine(rd) + if IsErrNotExist(err) { + return "", ErrNotExist{ID: id.String()} + } + return typ, nil +} + +func (repo *Repository) getTag(tagID SHA1, name string) (*Tag, error) { + t, ok := repo.tagCache.Get(tagID.String()) + if ok { + log.Debug("Hit cache: %s", tagID) + tagClone := *t.(*Tag) + tagClone.Name = name // This is necessary because lightweight tags may have same id + return &tagClone, nil + } + + tp, err := repo.GetTagType(tagID) + if err != nil { + return nil, err + } + + // Get the commit ID and tag ID (may be different for annotated tag) for the returned tag object + commitIDStr, err := repo.GetTagCommitID(name) + if err != nil { + // every tag should have a commit ID so return all errors + return nil, err + } + commitID, err := NewIDFromString(commitIDStr) + if err != nil { + return nil, err + } + + // If type is "commit, the tag is a lightweight tag + if ObjectType(tp) == ObjectCommit { + commit, err := repo.GetCommit(commitIDStr) + if err != nil { + return nil, err + } + tag := &Tag{ + Name: name, + ID: tagID, + Object: commitID, + Type: tp, + Tagger: commit.Committer, + Message: commit.Message(), + } + + repo.tagCache.Set(tagID.String(), tag) + return tag, nil + } + + // The tag is an annotated tag with a message. + wr, rd, cancel := repo.CatFileBatch(repo.Ctx) + defer cancel() + + if _, err := wr.Write([]byte(tagID.String() + "\n")); err != nil { + return nil, err + } + _, typ, size, err := ReadBatchLine(rd) + if err != nil { + if errors.Is(err, io.EOF) || IsErrNotExist(err) { + return nil, ErrNotExist{ID: tagID.String()} + } + return nil, err + } + if typ != "tag" { + return nil, ErrNotExist{ID: tagID.String()} + } + + // then we need to parse the tag + // and load the commit + data, err := io.ReadAll(io.LimitReader(rd, size)) + if err != nil { + return nil, err + } + _, err = rd.Discard(1) + if err != nil { + return nil, err + } + + tag, err := parseTagData(data) + if err != nil { + return nil, err + } + + tag.Name = name + tag.ID = tagID + tag.Type = tp + + repo.tagCache.Set(tagID.String(), tag) + return tag, nil +} diff --git a/modules/git/repo_tag_test.go b/modules/git/repo_tag_test.go index 25fb8fcd9b..9d84672862 100644 --- a/modules/git/repo_tag_test.go +++ b/modules/git/repo_tag_test.go @@ -11,11 +11,12 @@ import ( "code.gitea.io/gitea/modules/util" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestRepository_GetTags(t *testing.T) { bareRepo1Path := filepath.Join(testReposDir, "repo1_bare") - bareRepo1, err := OpenRepository(bareRepo1Path) + bareRepo1, err := openRepositoryWithDefaultContext(bareRepo1Path) if err != nil { assert.NoError(t, err) return @@ -44,7 +45,7 @@ func TestRepository_GetTag(t *testing.T) { } defer util.RemoveAll(clonedPath) - bareRepo1, err := OpenRepository(clonedPath) + bareRepo1, err := openRepositoryWithDefaultContext(clonedPath) if err != nil { assert.NoError(t, err) return @@ -149,7 +150,7 @@ func TestRepository_GetAnnotatedTag(t *testing.T) { } defer util.RemoveAll(clonedPath) - bareRepo1, err := OpenRepository(clonedPath) + bareRepo1, err := openRepositoryWithDefaultContext(clonedPath) if err != nil { assert.NoError(t, err) return @@ -195,3 +196,184 @@ func TestRepository_GetAnnotatedTag(t *testing.T) { assert.True(t, IsErrNotExist(err)) assert.Nil(t, tag4) } + +func TestRepository_parseTagRef(t *testing.T) { + tests := []struct { + name string + + givenRef map[string]string + + want *Tag + wantErr bool + expectedErr error + }{ + { + name: "lightweight tag", + + givenRef: map[string]string{ + "objecttype": "commit", + "refname:short": "v1.9.1", + // object will be empty for lightweight tags + "object": "", + "objectname": "ab23e4b7f4cd0caafe0174c0e7ef6d651ba72889", + "creator": "Foo Bar 1565789218 +0300", + "contents": `Add changelog of v1.9.1 (#7859) + +* add changelog of v1.9.1 +* Update CHANGELOG.md +`, + "contents:signature": "", + }, + + want: &Tag{ + Name: "v1.9.1", + ID: MustIDFromString("ab23e4b7f4cd0caafe0174c0e7ef6d651ba72889"), + Object: MustIDFromString("ab23e4b7f4cd0caafe0174c0e7ef6d651ba72889"), + Type: "commit", + Tagger: parseAuthorLine(t, "Foo Bar 1565789218 +0300"), + Message: "Add changelog of v1.9.1 (#7859)\n\n* add changelog of v1.9.1\n* Update CHANGELOG.md\n", + Signature: nil, + }, + }, + + { + name: "annotated tag", + + givenRef: map[string]string{ + "objecttype": "tag", + "refname:short": "v0.0.1", + // object will refer to commit hash for annotated tag + "object": "3325fd8a973321fd59455492976c042dde3fd1ca", + "objectname": "8c68a1f06fc59c655b7e3905b159d761e91c53c9", + "creator": "Foo Bar 1565789218 +0300", + "contents": `Add changelog of v1.9.1 (#7859) + +* add changelog of v1.9.1 +* Update CHANGELOG.md +`, + "contents:signature": "", + }, + + want: &Tag{ + Name: "v0.0.1", + ID: MustIDFromString("8c68a1f06fc59c655b7e3905b159d761e91c53c9"), + Object: MustIDFromString("3325fd8a973321fd59455492976c042dde3fd1ca"), + Type: "tag", + Tagger: parseAuthorLine(t, "Foo Bar 1565789218 +0300"), + Message: "Add changelog of v1.9.1 (#7859)\n\n* add changelog of v1.9.1\n* Update CHANGELOG.md\n", + Signature: nil, + }, + }, + + { + name: "annotated tag with signature", + + givenRef: map[string]string{ + "objecttype": "tag", + "refname:short": "v0.0.1", + "object": "3325fd8a973321fd59455492976c042dde3fd1ca", + "objectname": "8c68a1f06fc59c655b7e3905b159d761e91c53c9", + "creator": "Foo Bar 1565789218 +0300", + "contents": `Add changelog of v1.9.1 (#7859) + +* add changelog of v1.9.1 +* Update CHANGELOG.md +-----BEGIN PGP SIGNATURE----- + +aBCGzBAABCgAdFiEEyWRwv/q1Q6IjSv+D4IPOwzt33PoFAmI8jbIACgkQ4IPOwzt3 +3PoRuAv9FVSbPBXvzECubls9KQd7urwEvcfG20Uf79iBwifQJUv+egNQojrs6APT +T4CdIXeGRpwJZaGTUX9RWnoDO1SLXAWnc82CypWraNwrHq8Go2YeoVu0Iy3vb0EU +REdob/tXYZecMuP8AjhUR0XfdYaERYAvJ2dYsH/UkFrqDjM3V4kPXWG+R5DCaZiE +slB5U01i4Dwb/zm/ckzhUGEcOgcnpOKX8SnY5kYRVDY47dl/yJZ1u2XWir3mu60G +1geIitH7StBddHi/8rz+sJwTfcVaLjn2p59p/Dr9aGbk17GIaKq1j0pZA2lKT0Xt +f9jDqU+9vCxnKgjSDhrwN69LF2jT47ZFjEMGV/wFPOa1EBxVWpgQ/CfEolBlbUqx +yVpbxi/6AOK2lmG130e9jEZJcu+WeZUeq851WgKSEkf2d5f/JpwtSTEOlOedu6V6 +kl845zu5oE2nKM4zMQ7XrYQn538I31ps+VGQ0H8R07WrZP8WKUWugL2cU8KmXFwg +qbHDASXl +=2yGi +-----END PGP SIGNATURE----- + +`, + "contents:signature": `-----BEGIN PGP SIGNATURE----- + +aBCGzBAABCgAdFiEEyWRwv/q1Q6IjSv+D4IPOwzt33PoFAmI8jbIACgkQ4IPOwzt3 +3PoRuAv9FVSbPBXvzECubls9KQd7urwEvcfG20Uf79iBwifQJUv+egNQojrs6APT +T4CdIXeGRpwJZaGTUX9RWnoDO1SLXAWnc82CypWraNwrHq8Go2YeoVu0Iy3vb0EU +REdob/tXYZecMuP8AjhUR0XfdYaERYAvJ2dYsH/UkFrqDjM3V4kPXWG+R5DCaZiE +slB5U01i4Dwb/zm/ckzhUGEcOgcnpOKX8SnY5kYRVDY47dl/yJZ1u2XWir3mu60G +1geIitH7StBddHi/8rz+sJwTfcVaLjn2p59p/Dr9aGbk17GIaKq1j0pZA2lKT0Xt +f9jDqU+9vCxnKgjSDhrwN69LF2jT47ZFjEMGV/wFPOa1EBxVWpgQ/CfEolBlbUqx +yVpbxi/6AOK2lmG130e9jEZJcu+WeZUeq851WgKSEkf2d5f/JpwtSTEOlOedu6V6 +kl845zu5oE2nKM4zMQ7XrYQn538I31ps+VGQ0H8R07WrZP8WKUWugL2cU8KmXFwg +qbHDASXl +=2yGi +-----END PGP SIGNATURE----- + +`, + }, + + want: &Tag{ + Name: "v0.0.1", + ID: MustIDFromString("8c68a1f06fc59c655b7e3905b159d761e91c53c9"), + Object: MustIDFromString("3325fd8a973321fd59455492976c042dde3fd1ca"), + Type: "tag", + Tagger: parseAuthorLine(t, "Foo Bar 1565789218 +0300"), + Message: "Add changelog of v1.9.1 (#7859)\n\n* add changelog of v1.9.1\n* Update CHANGELOG.md", + Signature: &CommitGPGSignature{ + Signature: `-----BEGIN PGP SIGNATURE----- + +aBCGzBAABCgAdFiEEyWRwv/q1Q6IjSv+D4IPOwzt33PoFAmI8jbIACgkQ4IPOwzt3 +3PoRuAv9FVSbPBXvzECubls9KQd7urwEvcfG20Uf79iBwifQJUv+egNQojrs6APT +T4CdIXeGRpwJZaGTUX9RWnoDO1SLXAWnc82CypWraNwrHq8Go2YeoVu0Iy3vb0EU +REdob/tXYZecMuP8AjhUR0XfdYaERYAvJ2dYsH/UkFrqDjM3V4kPXWG+R5DCaZiE +slB5U01i4Dwb/zm/ckzhUGEcOgcnpOKX8SnY5kYRVDY47dl/yJZ1u2XWir3mu60G +1geIitH7StBddHi/8rz+sJwTfcVaLjn2p59p/Dr9aGbk17GIaKq1j0pZA2lKT0Xt +f9jDqU+9vCxnKgjSDhrwN69LF2jT47ZFjEMGV/wFPOa1EBxVWpgQ/CfEolBlbUqx +yVpbxi/6AOK2lmG130e9jEZJcu+WeZUeq851WgKSEkf2d5f/JpwtSTEOlOedu6V6 +kl845zu5oE2nKM4zMQ7XrYQn538I31ps+VGQ0H8R07WrZP8WKUWugL2cU8KmXFwg +qbHDASXl +=2yGi +-----END PGP SIGNATURE----- + +`, + Payload: `object 3325fd8a973321fd59455492976c042dde3fd1ca +type commit +tag v0.0.1 +tagger Foo Bar 1565789218 +0300 + +Add changelog of v1.9.1 (#7859) + +* add changelog of v1.9.1 +* Update CHANGELOG.md +`, + }, + }, + }, + } + + for _, test := range tests { + tc := test // don't close over loop variable + t.Run(tc.name, func(t *testing.T) { + got, err := parseTagRef(tc.givenRef) + + if tc.wantErr { + require.Error(t, err) + require.ErrorIs(t, err, tc.expectedErr) + } else { + require.NoError(t, err) + require.Equal(t, tc.want, got) + } + }) + } +} + +func parseAuthorLine(t *testing.T, committer string) *Signature { + t.Helper() + + sig, err := newSignatureFromCommitline([]byte(committer)) + if err != nil { + t.Fatalf("parse author line '%s': %v", committer, err) + } + + return sig +} diff --git a/modules/git/repo_test.go b/modules/git/repo_test.go index e143c1b39d..8ca170cd1f 100644 --- a/modules/git/repo_test.go +++ b/modules/git/repo_test.go @@ -23,7 +23,7 @@ func TestGetLatestCommitTime(t *testing.T) { func TestRepoIsEmpty(t *testing.T) { emptyRepo2Path := filepath.Join(testReposDir, "repo2_empty") - repo, err := OpenRepository(emptyRepo2Path) + repo, err := openRepositoryWithDefaultContext(emptyRepo2Path) assert.NoError(t, err) defer repo.Close() isEmpty, err := repo.IsEmpty() diff --git a/modules/git/repo_tree.go b/modules/git/repo_tree.go index 3219b569a5..3e7a9c2cfb 100644 --- a/modules/git/repo_tree.go +++ b/modules/git/repo_tree.go @@ -60,13 +60,12 @@ func (repo *Repository) CommitTree(author, committer *Signature, tree *Tree, opt stdout := new(bytes.Buffer) stderr := new(bytes.Buffer) - err = cmd.RunWithContext(&RunContext{ - Env: env, - Timeout: -1, - Dir: repo.Path, - Stdin: messageBytes, - Stdout: stdout, - Stderr: stderr, + err = cmd.Run(&RunOpts{ + Env: env, + Dir: repo.Path, + Stdin: messageBytes, + Stdout: stdout, + Stderr: stderr, }) if err != nil { diff --git a/modules/git/repo_tree_gogit.go b/modules/git/repo_tree_gogit.go index 0089d2c9a4..eef09cddd6 100644 --- a/modules/git/repo_tree_gogit.go +++ b/modules/git/repo_tree_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git @@ -22,7 +21,7 @@ func (repo *Repository) getTree(id SHA1) (*Tree, error) { // GetTree find the tree object in the repository. func (repo *Repository) GetTree(idStr string) (*Tree, error) { if len(idStr) != 40 { - res, err := NewCommand(repo.Ctx, "rev-parse", "--verify", idStr).RunInDir(repo.Path) + res, _, err := NewCommand(repo.Ctx, "rev-parse", "--verify", idStr).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } diff --git a/modules/git/repo_tree_nogogit.go b/modules/git/repo_tree_nogogit.go index 00009c997d..dc4a5becb9 100644 --- a/modules/git/repo_tree_nogogit.go +++ b/modules/git/repo_tree_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/sha1_gogit.go b/modules/git/sha1_gogit.go index 30290f14b7..16501efb43 100644 --- a/modules/git/sha1_gogit.go +++ b/modules/git/sha1_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/sha1_nogogit.go b/modules/git/sha1_nogogit.go index 53665fc921..1835c68f5a 100644 --- a/modules/git/sha1_nogogit.go +++ b/modules/git/sha1_nogogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/signature_gogit.go b/modules/git/signature_gogit.go index 903a48133f..fe81cd97df 100644 --- a/modules/git/signature_gogit.go +++ b/modules/git/signature_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/signature_nogogit.go b/modules/git/signature_nogogit.go index c6fe8e6d1a..81da739a5b 100644 --- a/modules/git/signature_nogogit.go +++ b/modules/git/signature_nogogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/tests/repos/language_stats_repo/hooks/applypatch-msg.sample b/modules/git/tests/repos/language_stats_repo/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/modules/git/tests/repos/language_stats_repo/hooks/commit-msg.sample b/modules/git/tests/repos/language_stats_repo/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/modules/git/tests/repos/language_stats_repo/hooks/fsmonitor-watchman.sample b/modules/git/tests/repos/language_stats_repo/hooks/fsmonitor-watchman.sample deleted file mode 100755 index 14ed0aa42d..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,173 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 2) and last update token -# formatted as a string and outputs to stdout a new update token and -# all files that have been modified since the update token. Paths must -# be relative to the root of the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $last_update_token) = @ARGV; - -# Uncomment for debugging -# print STDERR "$0 $version $last_update_token\n"; - -# Check the hook interface version -if ($version ne 2) { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree = get_working_dir(); - -my $retry = 1; - -my $json_pkg; -eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; -} or do { - require JSON::PP; - $json_pkg = "JSON::PP"; -}; - -launch_watchman(); - -sub launch_watchman { - my $o = watchman_query(); - if (is_work_tree_watched($o)) { - output_result($o->{clock}, @{$o->{files}}); - } -} - -sub output_result { - my ($clockid, @files) = @_; - - # Uncomment for debugging watchman output - # open (my $fh, ">", ".git/watchman-output.out"); - # binmode $fh, ":utf8"; - # print $fh "$clockid\n@files\n"; - # close $fh; - - binmode STDOUT, ":utf8"; - print $clockid; - print "\0"; - local $, = "\0"; - print @files; -} - -sub watchman_clock { - my $response = qx/watchman clock "$git_work_tree"/; - die "Failed to get clock id on '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - return $json_pkg->new->utf8->decode($response); -} - -sub watchman_query { - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $last_update_token but not from the .git folder. - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - if (substr($last_update_token, 0, 1) eq "c") { - $last_update_token = "\"$last_update_token\""; - } - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $last_update_token, - "fields": ["name"], - "expression": ["not", ["dirname", ".git"]] - }] - END - - # Uncomment for debugging the watchman query - # open (my $fh, ">", ".git/watchman-query.json"); - # print $fh $query; - # close $fh; - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - # Uncomment for debugging the watch response - # open ($fh, ">", ".git/watchman-response.json"); - # print $fh $response; - # close $fh; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - return $json_pkg->new->utf8->decode($response); -} - -sub is_work_tree_watched { - my ($output) = @_; - my $error = $output->{error}; - if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) { - $retry--; - my $response = qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - $output = $json_pkg->new->utf8->decode($response); - $error = $output->{error}; - die "Watchman: $error.\n" . - "Falling back to scanning...\n" if $error; - - # Uncomment for debugging watchman output - # open (my $fh, ">", ".git/watchman-output.out"); - # close $fh; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - my $o = watchman_clock(); - $error = $output->{error}; - - die "Watchman: $error.\n" . - "Falling back to scanning...\n" if $error; - - output_result($o->{clock}, ("/")); - $last_update_token = $o->{clock}; - - eval { launch_watchman() }; - return 0; - } - - die "Watchman: $error.\n" . - "Falling back to scanning...\n" if $error; - - return 1; -} - -sub get_working_dir { - my $working_dir; - if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $working_dir = Win32::GetCwd(); - $working_dir =~ tr/\\/\//; - } else { - require Cwd; - $working_dir = Cwd::cwd(); - } - - return $working_dir; -} diff --git a/modules/git/tests/repos/language_stats_repo/hooks/post-update.sample b/modules/git/tests/repos/language_stats_repo/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/modules/git/tests/repos/language_stats_repo/hooks/pre-applypatch.sample b/modules/git/tests/repos/language_stats_repo/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/modules/git/tests/repos/language_stats_repo/hooks/pre-commit.sample b/modules/git/tests/repos/language_stats_repo/hooks/pre-commit.sample deleted file mode 100755 index e144712c85..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --type=bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/modules/git/tests/repos/language_stats_repo/hooks/pre-merge-commit.sample b/modules/git/tests/repos/language_stats_repo/hooks/pre-merge-commit.sample deleted file mode 100755 index 399eab1924..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/pre-merge-commit.sample +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git merge" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message to -# stderr if it wants to stop the merge commit. -# -# To enable this hook, rename this file to "pre-merge-commit". - -. git-sh-setup -test -x "$GIT_DIR/hooks/pre-commit" && - exec "$GIT_DIR/hooks/pre-commit" -: diff --git a/modules/git/tests/repos/language_stats_repo/hooks/pre-push.sample b/modules/git/tests/repos/language_stats_repo/hooks/pre-push.sample deleted file mode 100755 index 4ce688d32b..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -zero=$(git hash-object --stdin &2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/modules/git/tests/repos/language_stats_repo/hooks/pre-rebase.sample b/modules/git/tests/repos/language_stats_repo/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/modules/git/tests/repos/language_stats_repo/hooks/pre-receive.sample b/modules/git/tests/repos/language_stats_repo/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/modules/git/tests/repos/language_stats_repo/hooks/prepare-commit-msg.sample b/modules/git/tests/repos/language_stats_repo/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/modules/git/tests/repos/language_stats_repo/hooks/push-to-checkout.sample b/modules/git/tests/repos/language_stats_repo/hooks/push-to-checkout.sample deleted file mode 100755 index af5a0c0018..0000000000 --- a/modules/git/tests/repos/language_stats_repo/hooks/push-to-checkout.sample +++ /dev/null @@ -1,78 +0,0 @@ -#!/bin/sh - -# An example hook script to update a checked-out tree on a git push. -# -# This hook is invoked by git-receive-pack(1) when it reacts to git -# push and updates reference(s) in its repository, and when the push -# tries to update the branch that is currently checked out and the -# receive.denyCurrentBranch configuration variable is set to -# updateInstead. -# -# By default, such a push is refused if the working tree and the index -# of the remote repository has any difference from the currently -# checked out commit; when both the working tree and the index match -# the current commit, they are updated to match the newly pushed tip -# of the branch. This hook is to be used to override the default -# behaviour; however the code below reimplements the default behaviour -# as a starting point for convenient modification. -# -# The hook receives the commit with which the tip of the current -# branch is going to be updated: -commit=$1 - -# It can exit with a non-zero status to refuse the push (when it does -# so, it must not modify the index or the working tree). -die () { - echo >&2 "$*" - exit 1 -} - -# Or it can make any necessary changes to the working tree and to the -# index to bring them to the desired state when the tip of the current -# branch is updated to the new commit, and exit with a zero status. -# -# For example, the hook can simply run git read-tree -u -m HEAD "$1" -# in order to emulate git fetch that is run in the reverse direction -# with git push, as the two-tree form of git read-tree -u -m is -# essentially the same as git switch or git checkout that switches -# branches while keeping the local changes in the working tree that do -# not interfere with the difference between the branches. - -# The below is a more-or-less exact translation to shell of the C code -# for the default behaviour for git's push-to-checkout hook defined in -# the push_to_deploy() function in builtin/receive-pack.c. -# -# Note that the hook will be executed from the repository directory, -# not from the working tree, so if you want to perform operations on -# the working tree, you will have to adapt your code accordingly, e.g. -# by adding "cd .." or using relative paths. - -if ! git update-index -q --ignore-submodules --refresh -then - die "Up-to-date check failed" -fi - -if ! git diff-files --quiet --ignore-submodules -- -then - die "Working directory has unstaged changes" -fi - -# This is a rough translation of: -# -# head_has_history() ? "HEAD" : EMPTY_TREE_SHA1_HEX -if git cat-file -e HEAD 2>/dev/null -then - head=HEAD -else - head=$(git hash-object -t tree --stdin &2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --type=bool hooks.allowunannotated) -allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch) -denycreatebranch=$(git config --type=bool hooks.denycreatebranch) -allowdeletetag=$(git config --type=bool hooks.allowdeletetag) -allowmodifytag=$(git config --type=bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero=$(git hash-object --stdin &2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/modules/git/tests/repos/repo1_bare/hooks/applypatch-msg.sample b/modules/git/tests/repos/repo1_bare/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/modules/git/tests/repos/repo1_bare/hooks/commit-msg.sample b/modules/git/tests/repos/repo1_bare/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/modules/git/tests/repos/repo1_bare/hooks/post-update.sample b/modules/git/tests/repos/repo1_bare/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/modules/git/tests/repos/repo1_bare/hooks/pre-applypatch.sample b/modules/git/tests/repos/repo1_bare/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/modules/git/tests/repos/repo1_bare/hooks/pre-commit.sample b/modules/git/tests/repos/repo1_bare/hooks/pre-commit.sample deleted file mode 100755 index 68d62d5446..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/modules/git/tests/repos/repo1_bare/hooks/pre-push.sample b/modules/git/tests/repos/repo1_bare/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/modules/git/tests/repos/repo1_bare/hooks/pre-rebase.sample b/modules/git/tests/repos/repo1_bare/hooks/pre-rebase.sample deleted file mode 100755 index 33730ca647..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up-to-date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/modules/git/tests/repos/repo1_bare/hooks/pre-receive.sample b/modules/git/tests/repos/repo1_bare/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/modules/git/tests/repos/repo1_bare/hooks/prepare-commit-msg.sample b/modules/git/tests/repos/repo1_bare/hooks/prepare-commit-msg.sample deleted file mode 100755 index f093a02ec4..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first comments out the -# "Conflicts:" part of a merge commit. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -case "$2,$3" in - merge,) - /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; - -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$1" ;; - - *) ;; -esac - -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/modules/git/tests/repos/repo1_bare/hooks/update.sample b/modules/git/tests/repos/repo1_bare/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/modules/git/tests/repos/repo1_bare/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/modules/git/tests/repos/repo2_empty/hooks/applypatch-msg.sample b/modules/git/tests/repos/repo2_empty/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/modules/git/tests/repos/repo2_empty/hooks/commit-msg.sample b/modules/git/tests/repos/repo2_empty/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/modules/git/tests/repos/repo2_empty/hooks/fsmonitor-watchman.sample b/modules/git/tests/repos/repo2_empty/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/modules/git/tests/repos/repo2_empty/hooks/post-update.sample b/modules/git/tests/repos/repo2_empty/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/modules/git/tests/repos/repo2_empty/hooks/pre-applypatch.sample b/modules/git/tests/repos/repo2_empty/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/modules/git/tests/repos/repo2_empty/hooks/pre-commit.sample b/modules/git/tests/repos/repo2_empty/hooks/pre-commit.sample deleted file mode 100755 index 6a75641638..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/modules/git/tests/repos/repo2_empty/hooks/pre-push.sample b/modules/git/tests/repos/repo2_empty/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/modules/git/tests/repos/repo2_empty/hooks/pre-rebase.sample b/modules/git/tests/repos/repo2_empty/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/modules/git/tests/repos/repo2_empty/hooks/pre-receive.sample b/modules/git/tests/repos/repo2_empty/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/modules/git/tests/repos/repo2_empty/hooks/prepare-commit-msg.sample b/modules/git/tests/repos/repo2_empty/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/modules/git/tests/repos/repo2_empty/hooks/update.sample b/modules/git/tests/repos/repo2_empty/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/modules/git/tests/repos/repo2_empty/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/modules/git/tests/repos/repo5_pulls/HEAD b/modules/git/tests/repos/repo5_pulls/HEAD new file mode 100644 index 0000000000..cb089cd89a --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/modules/git/tests/repos/repo5_pulls/config b/modules/git/tests/repos/repo5_pulls/config new file mode 100644 index 0000000000..0a0ad6d9fe --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/config @@ -0,0 +1,6 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = true +[receive] + advertisePushOptions = true diff --git a/modules/git/tests/repos/repo5_pulls/description b/modules/git/tests/repos/repo5_pulls/description new file mode 100644 index 0000000000..498b267a8c --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/modules/git/tests/repos/repo5_pulls/info/exclude b/modules/git/tests/repos/repo5_pulls/info/exclude new file mode 100644 index 0000000000..a5196d1be8 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/modules/git/tests/repos/repo5_pulls/objects/1a/2959532d2d18daa87bbd9f9d16051bef7b51df b/modules/git/tests/repos/repo5_pulls/objects/1a/2959532d2d18daa87bbd9f9d16051bef7b51df new file mode 100644 index 0000000000..90464be078 Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/1a/2959532d2d18daa87bbd9f9d16051bef7b51df differ diff --git a/modules/git/tests/repos/repo5_pulls/objects/56/51a1c4a48c47484a7a00a967ba4b6dde070bbf b/modules/git/tests/repos/repo5_pulls/objects/56/51a1c4a48c47484a7a00a967ba4b6dde070bbf new file mode 100644 index 0000000000..cf9d59f7ae Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/56/51a1c4a48c47484a7a00a967ba4b6dde070bbf differ diff --git a/modules/git/tests/repos/repo5_pulls/objects/58/a4bcc53ac13e7ff76127e0fb518b5262bf09af b/modules/git/tests/repos/repo5_pulls/objects/58/a4bcc53ac13e7ff76127e0fb518b5262bf09af new file mode 100644 index 0000000000..efc69b12e6 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/objects/58/a4bcc53ac13e7ff76127e0fb518b5262bf09af @@ -0,0 +1 @@ +x%n0 ;)0H1 P](F2Tk7|wu]{OқHp8$A1"\aRff4 #ZL:J\-#fO2sN6ӯN;v# 3p׺5py^yL)xۼs_n1]ާa_)@X \ No newline at end of file diff --git a/modules/git/tests/repos/repo5_pulls/objects/6d/0b4cca434953833618fcd3dd7acff42c800df1 b/modules/git/tests/repos/repo5_pulls/objects/6d/0b4cca434953833618fcd3dd7acff42c800df1 new file mode 100644 index 0000000000..74e848ffcc Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/6d/0b4cca434953833618fcd3dd7acff42c800df1 differ diff --git a/modules/git/tests/repos/repo5_pulls/objects/a5/2ca5af1b0277638ce20797f80bb1a2997470ab b/modules/git/tests/repos/repo5_pulls/objects/a5/2ca5af1b0277638ce20797f80bb1a2997470ab new file mode 100644 index 0000000000..d6e616d902 Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/a5/2ca5af1b0277638ce20797f80bb1a2997470ab differ diff --git a/modules/git/tests/repos/repo5_pulls/objects/bf/4dc0709be60f043821351ff4bb2b17e5cabbb2 b/modules/git/tests/repos/repo5_pulls/objects/bf/4dc0709be60f043821351ff4bb2b17e5cabbb2 new file mode 100644 index 0000000000..271cffb983 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/objects/bf/4dc0709be60f043821351ff4bb2b17e5cabbb2 @@ -0,0 +1,2 @@ +xMN0 Yl' i%4ܟ <=}~2MccM"h֬z)q(CRIOtk27Ƚ1=GrL&]YBFt'&o?^/uѾ*Lݛů6,\ǵO +5ؤ#xj吇CA9VyBciޤ^Rs8.klyCi \ No newline at end of file diff --git a/modules/git/tests/repos/repo5_pulls/objects/d8/e0bbb45f200e67d9a784ce55bd90821af45ebd b/modules/git/tests/repos/repo5_pulls/objects/d8/e0bbb45f200e67d9a784ce55bd90821af45ebd new file mode 100644 index 0000000000..0e2dc872fa --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/objects/d8/e0bbb45f200e67d9a784ce55bd90821af45ebd @@ -0,0 +1,2 @@ +xAJAE])"VwWt EčzNU5$T9&$'1+y|f6=^XSNpE̅"R1v>W(gDJ@%WPKZ +c2D2)rm`Yyfh:j\)۩=.">W~65w<|>>/| mp?X \ No newline at end of file diff --git a/modules/git/tests/repos/repo5_pulls/objects/ed/5119b3c1f45547b6785bc03eac7f87570fa17f b/modules/git/tests/repos/repo5_pulls/objects/ed/5119b3c1f45547b6785bc03eac7f87570fa17f new file mode 100644 index 0000000000..33d2a219e2 Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/ed/5119b3c1f45547b6785bc03eac7f87570fa17f differ diff --git a/modules/git/tests/repos/repo5_pulls/objects/ed/8f4d2fa5b2420706580d191f5dd50c4e491f3f b/modules/git/tests/repos/repo5_pulls/objects/ed/8f4d2fa5b2420706580d191f5dd50c4e491f3f new file mode 100644 index 0000000000..d64847cf20 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/objects/ed/8f4d2fa5b2420706580d191f5dd50c4e491f3f @@ -0,0 +1,3 @@ +xAJAE])!VtM"YF=@uw5$D\yoh +n?lxbMd,TC7f%uĔP3Jr;i:ԎJ,`5P)a̔1ƞ +9ym9U.nIgYOlG,:=qs$DMwa_S6o9X \ No newline at end of file diff --git a/modules/git/tests/repos/repo5_pulls/objects/ee/469963e76ae1bb7ee83d7510df2864e6c8c640 b/modules/git/tests/repos/repo5_pulls/objects/ee/469963e76ae1bb7ee83d7510df2864e6c8c640 new file mode 100644 index 0000000000..9cd9d008e1 Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/ee/469963e76ae1bb7ee83d7510df2864e6c8c640 differ diff --git a/modules/git/tests/repos/repo5_pulls/objects/info/packs b/modules/git/tests/repos/repo5_pulls/objects/info/packs new file mode 100644 index 0000000000..8bbc848724 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/objects/info/packs @@ -0,0 +1,2 @@ +P pack-81423f591973f5d9dab89cc45afa1c544448133e.pack + diff --git a/modules/git/tests/repos/repo5_pulls/objects/pack/pack-81423f591973f5d9dab89cc45afa1c544448133e.idx b/modules/git/tests/repos/repo5_pulls/objects/pack/pack-81423f591973f5d9dab89cc45afa1c544448133e.idx new file mode 100644 index 0000000000..b66df23164 Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/pack/pack-81423f591973f5d9dab89cc45afa1c544448133e.idx differ diff --git a/modules/git/tests/repos/repo5_pulls/objects/pack/pack-81423f591973f5d9dab89cc45afa1c544448133e.pack b/modules/git/tests/repos/repo5_pulls/objects/pack/pack-81423f591973f5d9dab89cc45afa1c544448133e.pack new file mode 100644 index 0000000000..a5dfc5ebde Binary files /dev/null and b/modules/git/tests/repos/repo5_pulls/objects/pack/pack-81423f591973f5d9dab89cc45afa1c544448133e.pack differ diff --git a/modules/git/tests/repos/repo5_pulls/packed-refs b/modules/git/tests/repos/repo5_pulls/packed-refs new file mode 100644 index 0000000000..d0012b5441 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/packed-refs @@ -0,0 +1,5 @@ +# pack-refs with: peeled fully-peeled sorted +c83380d7056593c51a699d12b9c00627bd5743e9 refs/heads/test-patch-1 +c83380d7056593c51a699d12b9c00627bd5743e9 refs/pull/1/head +111cac04bd7d20301964e27a93698aabb5781b80 refs/pull/1/merge +72866af952e98d02a73003501836074b286a78f6 refs/tags/v0.9.99 diff --git a/modules/git/tests/repos/repo5_pulls/refs/heads/master b/modules/git/tests/repos/repo5_pulls/refs/heads/master new file mode 100644 index 0000000000..9a8e3b2a34 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/refs/heads/master @@ -0,0 +1 @@ +d8e0bbb45f200e67d9a784ce55bd90821af45ebd diff --git a/modules/git/tests/repos/repo5_pulls/refs/heads/master-clone b/modules/git/tests/repos/repo5_pulls/refs/heads/master-clone new file mode 100644 index 0000000000..9a8e3b2a34 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/refs/heads/master-clone @@ -0,0 +1 @@ +d8e0bbb45f200e67d9a784ce55bd90821af45ebd diff --git a/modules/git/tests/repos/repo5_pulls/refs/heads/test-patch-1 b/modules/git/tests/repos/repo5_pulls/refs/heads/test-patch-1 new file mode 100644 index 0000000000..d8b26cb037 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/refs/heads/test-patch-1 @@ -0,0 +1 @@ +58a4bcc53ac13e7ff76127e0fb518b5262bf09af diff --git a/modules/git/tests/repos/repo5_pulls/refs/pull/4/head b/modules/git/tests/repos/repo5_pulls/refs/pull/4/head new file mode 100644 index 0000000000..d8b26cb037 --- /dev/null +++ b/modules/git/tests/repos/repo5_pulls/refs/pull/4/head @@ -0,0 +1 @@ +58a4bcc53ac13e7ff76127e0fb518b5262bf09af diff --git a/modules/git/tree.go b/modules/git/tree.go index f34e0554d7..a83336f3db 100644 --- a/modules/git/tree.go +++ b/modules/git/tree.go @@ -55,7 +55,7 @@ func (repo *Repository) LsTree(ref string, filenames ...string) ([]string, error cmd.AddArguments(arg) } } - res, err := cmd.RunInDirBytes(repo.Path) + res, _, err := cmd.RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } diff --git a/modules/git/tree_blob_gogit.go b/modules/git/tree_blob_gogit.go index be7cb33d35..bb010b5883 100644 --- a/modules/git/tree_blob_gogit.go +++ b/modules/git/tree_blob_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/tree_blob_nogogit.go b/modules/git/tree_blob_nogogit.go index df23ff01b4..3770004d6d 100644 --- a/modules/git/tree_blob_nogogit.go +++ b/modules/git/tree_blob_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/tree_entry_gogit.go b/modules/git/tree_entry_gogit.go index 20e767eea1..2b2992c32a 100644 --- a/modules/git/tree_entry_gogit.go +++ b/modules/git/tree_entry_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/tree_entry_nogogit.go b/modules/git/tree_entry_nogogit.go index 076062e1d6..aff67a3b22 100644 --- a/modules/git/tree_entry_nogogit.go +++ b/modules/git/tree_entry_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git diff --git a/modules/git/tree_entry_test.go b/modules/git/tree_entry_test.go index 402c345887..c512c7348e 100644 --- a/modules/git/tree_entry_test.go +++ b/modules/git/tree_entry_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git @@ -57,7 +56,7 @@ func TestEntriesCustomSort(t *testing.T) { } func TestFollowLink(t *testing.T) { - r, err := OpenRepository("tests/repos/repo1_bare") + r, err := openRepositoryWithDefaultContext("tests/repos/repo1_bare") assert.NoError(t, err) defer r.Close() diff --git a/modules/git/tree_gogit.go b/modules/git/tree_gogit.go index bc02088366..54f8e140fb 100644 --- a/modules/git/tree_gogit.go +++ b/modules/git/tree_gogit.go @@ -4,7 +4,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package git diff --git a/modules/git/tree_nogogit.go b/modules/git/tree_nogogit.go index d02fe8a006..7defb064a4 100644 --- a/modules/git/tree_nogogit.go +++ b/modules/git/tree_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package git @@ -81,16 +80,17 @@ func (t *Tree) ListEntries() (Entries, error) { } } - stdout, err := NewCommand(t.repo.Ctx, "ls-tree", "-l", t.ID.String()).RunInDirBytes(t.repo.Path) - if err != nil { - if strings.Contains(err.Error(), "fatal: Not a valid object name") || strings.Contains(err.Error(), "fatal: not a tree object") { + stdout, _, runErr := NewCommand(t.repo.Ctx, "ls-tree", "-l", t.ID.String()).RunStdBytes(&RunOpts{Dir: t.repo.Path}) + if runErr != nil { + if strings.Contains(runErr.Error(), "fatal: Not a valid object name") || strings.Contains(runErr.Error(), "fatal: not a tree object") { return nil, ErrNotExist{ ID: t.ID.String(), } } - return nil, err + return nil, runErr } + var err error t.entries, err = parseTreeEntries(stdout, t) if err == nil { t.entriesParsed = true @@ -104,11 +104,13 @@ func (t *Tree) ListEntriesRecursive() (Entries, error) { if t.entriesRecursiveParsed { return t.entriesRecursive, nil } - stdout, err := NewCommand(t.repo.Ctx, "ls-tree", "-t", "-l", "-r", t.ID.String()).RunInDirBytes(t.repo.Path) - if err != nil { - return nil, err + + stdout, _, runErr := NewCommand(t.repo.Ctx, "ls-tree", "-t", "-l", "-r", t.ID.String()).RunStdBytes(&RunOpts{Dir: t.repo.Path}) + if runErr != nil { + return nil, runErr } + var err error t.entriesRecursive, err = parseTreeEntries(stdout, t) if err == nil { t.entriesRecursiveParsed = true diff --git a/modules/gitgraph/graph.go b/modules/gitgraph/graph.go index e15441b883..271382525a 100644 --- a/modules/gitgraph/graph.go +++ b/modules/gitgraph/graph.go @@ -64,11 +64,10 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo scanner := bufio.NewScanner(stdoutReader) - if err := graphCmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: r.Path, - Stdout: stdoutWriter, - Stderr: stderr, + if err := graphCmd.Run(&git.RunOpts{ + Dir: r.Path, + Stdout: stdoutWriter, + Stderr: stderr, PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { _ = stdoutWriter.Close() defer stdoutReader.Close() diff --git a/modules/gitgraph/graph_test.go b/modules/gitgraph/graph_test.go index e7173b521c..ea6553529a 100644 --- a/modules/gitgraph/graph_test.go +++ b/modules/gitgraph/graph_test.go @@ -14,7 +14,7 @@ import ( ) func BenchmarkGetCommitGraph(b *testing.B) { - currentRepo, err := git.OpenRepository(".") + currentRepo, err := git.OpenRepository(git.DefaultContext, ".") if err != nil || currentRepo == nil { b.Error("Could not open repository") } diff --git a/modules/graceful/manager.go b/modules/graceful/manager.go index f783573607..8766cfca0e 100644 --- a/modules/graceful/manager.go +++ b/modules/graceful/manager.go @@ -6,6 +6,7 @@ package graceful import ( "context" + "runtime/pprof" "sync" "time" @@ -62,7 +63,6 @@ type WithCallback func(callback func()) // Similarly the callback function provided to atTerminate must return once termination is complete. // Please note that use of the atShutdown and atTerminate callbacks will create go-routines that will wait till their respective signals // - users must therefore be careful to only call these as necessary. -// If run is not expected to run indefinitely RunWithShutdownChan is likely to be more appropriate. type RunnableWithShutdownFns func(atShutdown, atTerminate func(func())) // RunWithShutdownFns takes a function that has both atShutdown and atTerminate callbacks @@ -70,7 +70,6 @@ type RunnableWithShutdownFns func(atShutdown, atTerminate func(func())) // Similarly the callback function provided to atTerminate must return once termination is complete. // Please note that use of the atShutdown and atTerminate callbacks will create go-routines that will wait till their respective signals // - users must therefore be careful to only call these as necessary. -// If run is not expected to run indefinitely RunWithShutdownChan is likely to be more appropriate. func (g *Manager) RunWithShutdownFns(run RunnableWithShutdownFns) { g.runningServerWaitGroup.Add(1) defer g.runningServerWaitGroup.Done() @@ -98,32 +97,6 @@ func (g *Manager) RunWithShutdownFns(run RunnableWithShutdownFns) { }) } -// RunnableWithShutdownChan is a runnable with functions to run at shutdown and terminate. -// After the atShutdown channel is closed, the main function must return once shutdown is complete. -// (Optionally IsHammer may be waited for instead however, this should be avoided if possible.) -// The callback function provided to atTerminate must return once termination is complete. -// Please note that use of the atTerminate function will create a go-routine that will wait till terminate - users must therefore be careful to only call this as necessary. -type RunnableWithShutdownChan func(atShutdown <-chan struct{}, atTerminate WithCallback) - -// RunWithShutdownChan takes a function that has channel to watch for shutdown and atTerminate callbacks -// After the atShutdown channel is closed, the main function must return once shutdown is complete. -// (Optionally IsHammer may be waited for instead however, this should be avoided if possible.) -// The callback function provided to atTerminate must return once termination is complete. -// Please note that use of the atTerminate function will create a go-routine that will wait till terminate - users must therefore be careful to only call this as necessary. -func (g *Manager) RunWithShutdownChan(run RunnableWithShutdownChan) { - g.runningServerWaitGroup.Add(1) - defer g.runningServerWaitGroup.Done() - defer func() { - if err := recover(); err != nil { - log.Critical("PANIC during RunWithShutdownChan: %v\nStacktrace: %s", err, log.Stack(2)) - g.doShutdown() - } - }() - run(g.IsShutdown(), func(atTerminate func()) { - g.RunAtTerminate(atTerminate) - }) -} - // RunWithShutdownContext takes a function that has a context to watch for shutdown. // After the provided context is Done(), the main function must return once shutdown is complete. // (Optionally the HammerContext may be obtained and waited for however, this should be avoided if possible.) @@ -136,7 +109,9 @@ func (g *Manager) RunWithShutdownContext(run func(context.Context)) { g.doShutdown() } }() - run(g.ShutdownContext()) + ctx := g.ShutdownContext() + pprof.SetGoroutineLabels(ctx) // We don't have a label to restore back to but I think this is fine + run(ctx) } // RunAtTerminate adds to the terminate wait group and creates a go-routine to run the provided function at termination @@ -198,6 +173,8 @@ func (g *Manager) doShutdown() { } g.lock.Lock() g.shutdownCtxCancel() + atShutdownCtx := pprof.WithLabels(g.hammerCtx, pprof.Labels("graceful-lifecycle", "post-shutdown")) + pprof.SetGoroutineLabels(atShutdownCtx) for _, fn := range g.toRunAtShutdown { go fn() } @@ -214,7 +191,7 @@ func (g *Manager) doShutdown() { g.doTerminate() g.WaitForTerminate() g.lock.Lock() - g.doneCtxCancel() + g.managerCtxCancel() g.lock.Unlock() }() } @@ -227,6 +204,8 @@ func (g *Manager) doHammerTime(d time.Duration) { default: log.Warn("Setting Hammer condition") g.hammerCtxCancel() + atHammerCtx := pprof.WithLabels(g.terminateCtx, pprof.Labels("graceful-lifecycle", "post-hammer")) + pprof.SetGoroutineLabels(atHammerCtx) for _, fn := range g.toRunAtHammer { go fn() } @@ -244,6 +223,9 @@ func (g *Manager) doTerminate() { default: log.Warn("Terminating") g.terminateCtxCancel() + atTerminateCtx := pprof.WithLabels(g.managerCtx, pprof.Labels("graceful-lifecycle", "post-terminate")) + pprof.SetGoroutineLabels(atTerminateCtx) + for _, fn := range g.toRunAtTerminate { go fn() } @@ -331,20 +313,20 @@ func (g *Manager) InformCleanup() { // Done allows the manager to be viewed as a context.Context, it returns a channel that is closed when the server is finished terminating func (g *Manager) Done() <-chan struct{} { - return g.doneCtx.Done() + return g.managerCtx.Done() } // Err allows the manager to be viewed as a context.Context done at Terminate func (g *Manager) Err() error { - return g.doneCtx.Err() + return g.managerCtx.Err() } // Value allows the manager to be viewed as a context.Context done at Terminate func (g *Manager) Value(key interface{}) interface{} { - return g.doneCtx.Value(key) + return g.managerCtx.Value(key) } // Deadline returns nil as there is no fixed Deadline for the manager, it allows the manager to be viewed as a context.Context func (g *Manager) Deadline() (deadline time.Time, ok bool) { - return g.doneCtx.Deadline() + return g.managerCtx.Deadline() } diff --git a/modules/graceful/manager_unix.go b/modules/graceful/manager_unix.go index 99e84d73e8..9d3816e9c2 100644 --- a/modules/graceful/manager_unix.go +++ b/modules/graceful/manager_unix.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !windows -// +build !windows package graceful @@ -12,11 +11,13 @@ import ( "errors" "os" "os/signal" + "runtime/pprof" "sync" "syscall" "time" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" ) @@ -29,11 +30,11 @@ type Manager struct { shutdownCtx context.Context hammerCtx context.Context terminateCtx context.Context - doneCtx context.Context + managerCtx context.Context shutdownCtxCancel context.CancelFunc hammerCtxCancel context.CancelFunc terminateCtxCancel context.CancelFunc - doneCtxCancel context.CancelFunc + managerCtxCancel context.CancelFunc runningServerWaitGroup sync.WaitGroup createServerWaitGroup sync.WaitGroup terminateWaitGroup sync.WaitGroup @@ -58,11 +59,21 @@ func (g *Manager) start(ctx context.Context) { g.terminateCtx, g.terminateCtxCancel = context.WithCancel(ctx) g.shutdownCtx, g.shutdownCtxCancel = context.WithCancel(ctx) g.hammerCtx, g.hammerCtxCancel = context.WithCancel(ctx) - g.doneCtx, g.doneCtxCancel = context.WithCancel(ctx) + g.managerCtx, g.managerCtxCancel = context.WithCancel(ctx) + + // Next add pprof labels to these contexts + g.terminateCtx = pprof.WithLabels(g.terminateCtx, pprof.Labels("graceful-lifecycle", "with-terminate")) + g.shutdownCtx = pprof.WithLabels(g.shutdownCtx, pprof.Labels("graceful-lifecycle", "with-shutdown")) + g.hammerCtx = pprof.WithLabels(g.hammerCtx, pprof.Labels("graceful-lifecycle", "with-hammer")) + g.managerCtx = pprof.WithLabels(g.managerCtx, pprof.Labels("graceful-lifecycle", "with-manager")) + + // Now label this and all goroutines created by this goroutine with the graceful-lifecycle manager + pprof.SetGoroutineLabels(g.managerCtx) + defer pprof.SetGoroutineLabels(ctx) // Set the running state & handle signals g.setState(stateRunning) - go g.handleSignals(ctx) + go g.handleSignals(g.managerCtx) // Handle clean up of unused provided listeners and delayed start-up startupDone := make(chan struct{}) @@ -101,6 +112,9 @@ func (g *Manager) start(ctx context.Context) { } func (g *Manager) handleSignals(ctx context.Context) { + ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Graceful: HandleSignals", process.SystemProcessType, true) + defer finished() + signalChannel := make(chan os.Signal, 1) signal.Notify( diff --git a/modules/graceful/manager_windows.go b/modules/graceful/manager_windows.go index e5f5541ed3..e7e619f53f 100644 --- a/modules/graceful/manager_windows.go +++ b/modules/graceful/manager_windows.go @@ -4,13 +4,13 @@ // This code is heavily inspired by the archived gofacebook/gracenet/net.go handler //go:build windows -// +build windows package graceful import ( "context" "os" + "runtime/pprof" "strconv" "sync" "time" @@ -40,11 +40,11 @@ type Manager struct { shutdownCtx context.Context hammerCtx context.Context terminateCtx context.Context - doneCtx context.Context + managerCtx context.Context shutdownCtxCancel context.CancelFunc hammerCtxCancel context.CancelFunc terminateCtxCancel context.CancelFunc - doneCtxCancel context.CancelFunc + managerCtxCancel context.CancelFunc runningServerWaitGroup sync.WaitGroup createServerWaitGroup sync.WaitGroup terminateWaitGroup sync.WaitGroup @@ -71,7 +71,17 @@ func (g *Manager) start() { g.terminateCtx, g.terminateCtxCancel = context.WithCancel(g.ctx) g.shutdownCtx, g.shutdownCtxCancel = context.WithCancel(g.ctx) g.hammerCtx, g.hammerCtxCancel = context.WithCancel(g.ctx) - g.doneCtx, g.doneCtxCancel = context.WithCancel(g.ctx) + g.managerCtx, g.managerCtxCancel = context.WithCancel(g.ctx) + + // Next add pprof labels to these contexts + g.terminateCtx = pprof.WithLabels(g.terminateCtx, pprof.Labels("graceful-lifecycle", "with-terminate")) + g.shutdownCtx = pprof.WithLabels(g.shutdownCtx, pprof.Labels("graceful-lifecycle", "with-shutdown")) + g.hammerCtx = pprof.WithLabels(g.hammerCtx, pprof.Labels("graceful-lifecycle", "with-hammer")) + g.managerCtx = pprof.WithLabels(g.managerCtx, pprof.Labels("graceful-lifecycle", "with-manager")) + + // Now label this and all goroutines created by this goroutine with the graceful-lifecycle manager + pprof.SetGoroutineLabels(g.managerCtx) + defer pprof.SetGoroutineLabels(g.ctx) // Make channels g.shutdownRequested = make(chan struct{}) diff --git a/modules/graceful/net_unix.go b/modules/graceful/net_unix.go index 6ffa8150cc..680ff529af 100644 --- a/modules/graceful/net_unix.go +++ b/modules/graceful/net_unix.go @@ -4,7 +4,6 @@ // This code is heavily inspired by the archived gofacebook/gracenet/net.go handler //go:build !windows -// +build !windows package graceful diff --git a/modules/graceful/net_windows.go b/modules/graceful/net_windows.go index 35b7a9d1fe..07ae51b8dd 100644 --- a/modules/graceful/net_windows.go +++ b/modules/graceful/net_windows.go @@ -4,7 +4,6 @@ // This code is heavily inspired by the archived gofacebook/gracenet/net.go handler //go:build windows -// +build windows package graceful diff --git a/modules/graceful/restart_unix.go b/modules/graceful/restart_unix.go index 9969e007c3..2654ddfb94 100644 --- a/modules/graceful/restart_unix.go +++ b/modules/graceful/restart_unix.go @@ -4,7 +4,6 @@ // This code is heavily inspired by the archived gofacebook/gracenet/net.go handler //go:build !windows -// +build !windows package graceful diff --git a/modules/hostmatcher/hostmatcher.go b/modules/hostmatcher/hostmatcher.go index 9492a479f1..00bbc6cb0a 100644 --- a/modules/hostmatcher/hostmatcher.go +++ b/modules/hostmatcher/hostmatcher.go @@ -8,8 +8,6 @@ import ( "net" "path/filepath" "strings" - - "code.gitea.io/gitea/modules/util" ) // HostMatchList is used to check if a host or IP is in a list. @@ -104,11 +102,11 @@ func (hl *HostMatchList) checkIP(ip net.IP) bool { for _, builtin := range hl.builtins { switch builtin { case MatchBuiltinExternal: - if ip.IsGlobalUnicast() && !util.IsIPPrivate(ip) { + if ip.IsGlobalUnicast() && !ip.IsPrivate() { return true } case MatchBuiltinPrivate: - if util.IsIPPrivate(ip) { + if ip.IsPrivate() { return true } case MatchBuiltinLoopback: @@ -127,13 +125,18 @@ func (hl *HostMatchList) checkIP(ip net.IP) bool { // MatchHostName checks if the host matches an allow/deny(block) list func (hl *HostMatchList) MatchHostName(host string) bool { + hostname, _, err := net.SplitHostPort(host) + if err != nil { + hostname = host + } + if hl == nil { return false } - if hl.checkPattern(host) { + if hl.checkPattern(hostname) { return true } - if ip := net.ParseIP(host); ip != nil { + if ip := net.ParseIP(hostname); ip != nil { return hl.checkIP(ip) } return false diff --git a/modules/hostmatcher/hostmatcher_test.go b/modules/hostmatcher/hostmatcher_test.go index 66030a32f1..b93976df6a 100644 --- a/modules/hostmatcher/hostmatcher_test.go +++ b/modules/hostmatcher/hostmatcher_test.go @@ -38,6 +38,7 @@ func TestHostOrIPMatchesList(t *testing.T) { {"", net.ParseIP("10.0.1.1"), true}, {"10.0.1.1", nil, true}, + {"10.0.1.1:8080", nil, true}, {"", net.ParseIP("192.168.1.1"), true}, {"192.168.1.1", nil, true}, {"", net.ParseIP("fd00::1"), true}, @@ -48,6 +49,7 @@ func TestHostOrIPMatchesList(t *testing.T) { {"mydomain.com", net.IPv4zero, false}, {"sub.mydomain.com", net.IPv4zero, true}, + {"sub.mydomain.com:8080", net.IPv4zero, true}, {"", net.ParseIP("169.254.1.1"), true}, {"169.254.1.1", nil, true}, diff --git a/modules/httpcache/httpcache.go b/modules/httpcache/httpcache.go index 11b63148d9..5797e981cf 100644 --- a/modules/httpcache/httpcache.go +++ b/modules/httpcache/httpcache.go @@ -37,18 +37,23 @@ func generateETag(fi os.FileInfo) string { // HandleTimeCache handles time-based caching for a HTTP request func HandleTimeCache(req *http.Request, w http.ResponseWriter, fi os.FileInfo) (handled bool) { + return HandleGenericTimeCache(req, w, fi.ModTime()) +} + +// HandleGenericTimeCache handles time-based caching for a HTTP request +func HandleGenericTimeCache(req *http.Request, w http.ResponseWriter, lastModified time.Time) (handled bool) { AddCacheControlToHeader(w.Header(), setting.StaticCacheTime) ifModifiedSince := req.Header.Get("If-Modified-Since") if ifModifiedSince != "" { t, err := time.Parse(http.TimeFormat, ifModifiedSince) - if err == nil && fi.ModTime().Unix() <= t.Unix() { + if err == nil && lastModified.Unix() <= t.Unix() { w.WriteHeader(http.StatusNotModified) return true } } - w.Header().Set("Last-Modified", fi.ModTime().Format(http.TimeFormat)) + w.Header().Set("Last-Modified", lastModified.Format(http.TimeFormat)) return false } @@ -85,3 +90,33 @@ func checkIfNoneMatchIsValid(req *http.Request, etag string) bool { } return false } + +// HandleGenericETagTimeCache handles ETag-based caching with Last-Modified caching for a HTTP request. +// It returns true if the request was handled. +func HandleGenericETagTimeCache(req *http.Request, w http.ResponseWriter, etag string, lastModified time.Time) (handled bool) { + if len(etag) > 0 { + w.Header().Set("Etag", etag) + } + if !lastModified.IsZero() { + w.Header().Set("Last-Modified", lastModified.Format(http.TimeFormat)) + } + + if len(etag) > 0 { + if checkIfNoneMatchIsValid(req, etag) { + w.WriteHeader(http.StatusNotModified) + return true + } + } + if !lastModified.IsZero() { + ifModifiedSince := req.Header.Get("If-Modified-Since") + if ifModifiedSince != "" { + t, err := time.Parse(http.TimeFormat, ifModifiedSince) + if err == nil && lastModified.Unix() <= t.Unix() { + w.WriteHeader(http.StatusNotModified) + return true + } + } + } + AddCacheControlToHeader(w.Header(), setting.StaticCacheTime) + return false +} diff --git a/modules/indexer/code/bleve.go b/modules/indexer/code/bleve.go index 309b33bedf..1abb3c0219 100644 --- a/modules/indexer/code/bleve.go +++ b/modules/indexer/code/bleve.go @@ -191,9 +191,10 @@ func (b *BleveIndexer) addUpdate(ctx context.Context, batchWriter git.WriteClose size := update.Size + var err error if !update.Sized { - stdout, err := git.NewCommand(ctx, "cat-file", "-s", update.BlobSha). - RunInDir(repo.RepoPath()) + var stdout string + stdout, _, err = git.NewCommand(ctx, "cat-file", "-s", update.BlobSha).RunStdString(&git.RunOpts{Dir: repo.RepoPath()}) if err != nil { return err } @@ -210,7 +211,7 @@ func (b *BleveIndexer) addUpdate(ctx context.Context, batchWriter git.WriteClose return err } - _, _, size, err := git.ReadBatchLine(batchReader) + _, _, size, err = git.ReadBatchLine(batchReader) if err != nil { return err } diff --git a/modules/indexer/code/elastic_search.go b/modules/indexer/code/elastic_search.go index dd6ba19995..7263f27657 100644 --- a/modules/indexer/code/elastic_search.go +++ b/modules/indexer/code/elastic_search.go @@ -220,10 +220,10 @@ func (b *ElasticSearchIndexer) addUpdate(ctx context.Context, batchWriter git.Wr } size := update.Size - + var err error if !update.Sized { - stdout, err := git.NewCommand(ctx, "cat-file", "-s", update.BlobSha). - RunInDir(repo.RepoPath()) + var stdout string + stdout, _, err = git.NewCommand(ctx, "cat-file", "-s", update.BlobSha).RunStdString(&git.RunOpts{Dir: repo.RepoPath()}) if err != nil { return nil, err } @@ -240,7 +240,7 @@ func (b *ElasticSearchIndexer) addUpdate(ctx context.Context, batchWriter git.Wr return nil, err } - _, _, size, err := git.ReadBatchLine(batchReader) + _, _, size, err = git.ReadBatchLine(batchReader) if err != nil { return nil, err } diff --git a/modules/indexer/code/git.go b/modules/indexer/code/git.go index 62444f6251..60018af20c 100644 --- a/modules/indexer/code/git.go +++ b/modules/indexer/code/git.go @@ -29,7 +29,7 @@ type repoChanges struct { } func getDefaultBranchSha(ctx context.Context, repo *repo_model.Repository) (string, error) { - stdout, err := git.NewCommand(ctx, "show-ref", "-s", git.BranchPrefix+repo.DefaultBranch).RunInDir(repo.RepoPath()) + stdout, _, err := git.NewCommand(ctx, "show-ref", "-s", git.BranchPrefix+repo.DefaultBranch).RunStdString(&git.RunOpts{Dir: repo.RepoPath()}) if err != nil { return "", err } @@ -92,30 +92,32 @@ func parseGitLsTreeOutput(stdout []byte) ([]fileUpdate, error) { // genesisChanges get changes to add repo to the indexer for the first time func genesisChanges(ctx context.Context, repo *repo_model.Repository, revision string) (*repoChanges, error) { var changes repoChanges - stdout, err := git.NewCommand(ctx, "ls-tree", "--full-tree", "-l", "-r", revision). - RunInDirBytes(repo.RepoPath()) - if err != nil { - return nil, err + stdout, _, runErr := git.NewCommand(ctx, "ls-tree", "--full-tree", "-l", "-r", revision).RunStdBytes(&git.RunOpts{Dir: repo.RepoPath()}) + if runErr != nil { + return nil, runErr } + + var err error changes.Updates, err = parseGitLsTreeOutput(stdout) return &changes, err } // nonGenesisChanges get changes since the previous indexer update func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revision string) (*repoChanges, error) { - diffCmd := git.NewCommand(ctx, "diff", "--name-status", - repo.CodeIndexerStatus.CommitSha, revision) - stdout, err := diffCmd.RunInDir(repo.RepoPath()) - if err != nil { + diffCmd := git.NewCommand(ctx, "diff", "--name-status", repo.CodeIndexerStatus.CommitSha, revision) + stdout, _, runErr := diffCmd.RunStdString(&git.RunOpts{Dir: repo.RepoPath()}) + if runErr != nil { // previous commit sha may have been removed by a force push, so // try rebuilding from scratch - log.Warn("git diff: %v", err) - if err = indexer.Delete(repo.ID); err != nil { + log.Warn("git diff: %v", runErr) + if err := indexer.Delete(repo.ID); err != nil { return nil, err } return genesisChanges(ctx, repo, revision) } + var changes repoChanges + var err error updatedFilenames := make([]string, 0, 10) for _, line := range strings.Split(stdout, "\n") { line = strings.TrimSpace(line) @@ -169,7 +171,7 @@ func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revisio cmd := git.NewCommand(ctx, "ls-tree", "--full-tree", "-l", revision, "--") cmd.AddArguments(updatedFilenames...) - lsTreeStdout, err := cmd.RunInDirBytes(repo.RepoPath()) + lsTreeStdout, _, err := cmd.RunStdBytes(&git.RunOpts{Dir: repo.RepoPath()}) if err != nil { return nil, err } diff --git a/modules/indexer/code/indexer.go b/modules/indexer/code/indexer.go index d897fcccd5..f15b8d8651 100644 --- a/modules/indexer/code/indexer.go +++ b/modules/indexer/code/indexer.go @@ -7,6 +7,7 @@ package code import ( "context" "os" + "runtime/pprof" "strconv" "strings" "time" @@ -15,6 +16,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -116,7 +118,7 @@ func Init() { return } - ctx, cancel := context.WithCancel(context.Background()) + ctx, cancel, finished := process.GetManager().AddTypedContext(context.Background(), "Service: CodeIndexer", process.SystemProcessType, false) graceful.GetManager().RunAtTerminate(func() { select { @@ -128,9 +130,10 @@ func Init() { log.Debug("Closing repository indexer") indexer.Close() log.Info("PID: %d Repository Indexer closed", os.Getpid()) + finished() }) - waitChannel := make(chan time.Duration) + waitChannel := make(chan time.Duration, 1) // Create the Queue switch setting.Indexer.RepoType { @@ -172,6 +175,7 @@ func Init() { } go func() { + pprof.SetGoroutineLabels(ctx) start := time.Now() var ( rIndexer Indexer @@ -247,6 +251,7 @@ func Init() { if setting.Indexer.StartupTimeout > 0 { go func() { + pprof.SetGoroutineLabels(ctx) timeout := setting.Indexer.StartupTimeout if graceful.GetManager().IsChild() && setting.GracefulHammerTime > 0 { timeout += setting.GracefulHammerTime diff --git a/modules/indexer/code/indexer_test.go b/modules/indexer/code/indexer_test.go index d56c33653f..06923dfd25 100644 --- a/modules/indexer/code/indexer_test.go +++ b/modules/indexer/code/indexer_test.go @@ -18,7 +18,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } func testIndexer(name string, t *testing.T, indexer Indexer) { diff --git a/modules/indexer/issues/indexer.go b/modules/indexer/issues/indexer.go index 3aaa27eed2..d4df4f8a4f 100644 --- a/modules/indexer/issues/indexer.go +++ b/modules/indexer/issues/indexer.go @@ -8,6 +8,7 @@ import ( "context" "fmt" "os" + "runtime/pprof" "sync" "time" @@ -16,6 +17,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" @@ -100,7 +102,9 @@ var ( // InitIssueIndexer initialize issue indexer, syncReindex is true then reindex until // all issue index done. func InitIssueIndexer(syncReindex bool) { - waitChannel := make(chan time.Duration) + ctx, _, finished := process.GetManager().AddTypedContext(context.Background(), "Service: IssueIndexer", process.SystemProcessType, false) + + waitChannel := make(chan time.Duration, 1) // Create the Queue switch setting.Indexer.IssueType { @@ -165,6 +169,7 @@ func InitIssueIndexer(syncReindex bool) { // Create the Indexer go func() { + pprof.SetGoroutineLabels(ctx) start := time.Now() log.Info("PID %d: Initializing Issue Indexer: %s", os.Getpid(), setting.Indexer.IssueType) var populate bool @@ -193,11 +198,13 @@ func InitIssueIndexer(syncReindex bool) { if issueIndexer != nil { issueIndexer.Close() } + finished() log.Info("PID: %d Issue Indexer closed", os.Getpid()) }) log.Debug("Created Bleve Indexer") case "elasticsearch": graceful.GetManager().RunWithShutdownFns(func(_, atTerminate func(func())) { + pprof.SetGoroutineLabels(ctx) issueIndexer, err := NewElasticSearchIndexer(setting.Indexer.IssueConnStr, setting.Indexer.IssueIndexerName) if err != nil { log.Fatal("Unable to initialize Elastic Search Issue Indexer at connection: %s Error: %v", setting.Indexer.IssueConnStr, err) @@ -208,10 +215,12 @@ func InitIssueIndexer(syncReindex bool) { } populate = !exist holder.set(issueIndexer) + atTerminate(finished) }) case "db": issueIndexer := &DBIndexer{} holder.set(issueIndexer) + graceful.GetManager().RunAtTerminate(finished) default: holder.cancel() log.Fatal("Unknown issue indexer type: %s", setting.Indexer.IssueType) @@ -251,6 +260,7 @@ func InitIssueIndexer(syncReindex bool) { } } else if setting.Indexer.StartupTimeout > 0 { go func() { + pprof.SetGoroutineLabels(ctx) timeout := setting.Indexer.StartupTimeout if graceful.GetManager().IsChild() && setting.GracefulHammerTime > 0 { timeout += setting.GracefulHammerTime @@ -272,6 +282,8 @@ func InitIssueIndexer(syncReindex bool) { // populateIssueIndexer populate the issue indexer with issue data func populateIssueIndexer(ctx context.Context) { + ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Service: PopulateIssueIndexer", process.SystemProcessType, true) + defer finished() for page := 1; ; page++ { select { case <-ctx.Done(): @@ -309,7 +321,7 @@ func populateIssueIndexer(ctx context.Context) { // UpdateRepoIndexer add/update all issues of the repositories func UpdateRepoIndexer(repo *repo_model.Repository) { is, err := models.Issues(&models.IssuesOptions{ - RepoIDs: []int64{repo.ID}, + RepoID: repo.ID, IsClosed: util.OptionalBoolNone, IsPull: util.OptionalBoolNone, }) diff --git a/modules/indexer/issues/indexer_test.go b/modules/indexer/issues/indexer_test.go index d516615b56..6bafcbdf24 100644 --- a/modules/indexer/issues/indexer_test.go +++ b/modules/indexer/issues/indexer_test.go @@ -23,7 +23,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } func TestBleveSearchIssues(t *testing.T) { diff --git a/modules/indexer/stats/db.go b/modules/indexer/stats/db.go index 513f2270da..bb3385ab63 100644 --- a/modules/indexer/stats/db.go +++ b/modules/indexer/stats/db.go @@ -35,7 +35,7 @@ func (db *DBIndexer) Index(id int64) error { return err } - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { if err.Error() == "no such file or directory" { return nil diff --git a/modules/indexer/stats/indexer_test.go b/modules/indexer/stats/indexer_test.go index 50c6cc38e9..c8bd8d1783 100644 --- a/modules/indexer/stats/indexer_test.go +++ b/modules/indexer/stats/indexer_test.go @@ -5,12 +5,15 @@ package stats import ( + "context" "path/filepath" "testing" "time" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" _ "code.gitea.io/gitea/models" @@ -20,10 +23,16 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } func TestRepoStatsIndex(t *testing.T) { + if err := git.Init(context.Background()); !assert.NoError(t, err) { + return + } + assert.NoError(t, unittest.PrepareTestDatabase()) setting.Cfg = ini.Empty() @@ -32,10 +41,14 @@ func TestRepoStatsIndex(t *testing.T) { err := Init() assert.NoError(t, err) - time.Sleep(5 * time.Second) - repo, err := repo_model.GetRepositoryByID(1) assert.NoError(t, err) + + err = UpdateRepoIndexer(repo) + assert.NoError(t, err) + + queue.GetManager().FlushAll(context.Background(), 5*time.Second) + status, err := repo_model.GetIndexerStatus(repo, repo_model.RepoIndexerTypeStats) assert.NoError(t, err) assert.Equal(t, "65f1bf27bc3bf70f64657658635e66094edbcb4d", status.CommitSha) diff --git a/modules/json/json.go b/modules/json/json.go index 3afa86023c..4361262a2f 100644 --- a/modules/json/json.go +++ b/modules/json/json.go @@ -8,7 +8,7 @@ package json import ( "bytes" "encoding/binary" - "encoding/json" + "encoding/json" //nolint:depguard "io" jsoniter "github.com/json-iterator/go" diff --git a/modules/lfs/http_client_test.go b/modules/lfs/http_client_test.go index 0ffe663da5..8f6dcb1966 100644 --- a/modules/lfs/http_client_test.go +++ b/modules/lfs/http_client_test.go @@ -81,7 +81,7 @@ func lfsTestRoundtripHandler(req *http.Request) *http.Response { Objects: []*ObjectResponse{ { Error: &ObjectError{ - Code: 404, + Code: http.StatusNotFound, Message: "Object not found", }, }, diff --git a/modules/lfs/pointer_scanner_gogit.go b/modules/lfs/pointer_scanner_gogit.go index b4ba6fc133..ed27cb1f55 100644 --- a/modules/lfs/pointer_scanner_gogit.go +++ b/modules/lfs/pointer_scanner_gogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gogit -// +build gogit package lfs diff --git a/modules/lfs/pointer_scanner_nogogit.go b/modules/lfs/pointer_scanner_nogogit.go index cdf88c51b0..d17f1f7b98 100644 --- a/modules/lfs/pointer_scanner_nogogit.go +++ b/modules/lfs/pointer_scanner_nogogit.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gogit -// +build !gogit package lfs diff --git a/modules/log/console_other.go b/modules/log/console_other.go new file mode 100644 index 0000000000..b5cac55b52 --- /dev/null +++ b/modules/log/console_other.go @@ -0,0 +1,21 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +//go:build !windows + +package log + +import ( + "os" + + "github.com/mattn/go-isatty" +) + +func init() { + // when running gitea as a systemd unit with logging set to console, the output can not be colorized, + // otherwise it spams the journal / syslog with escape sequences like "#033[0m#033[32mcmd/web.go:102:#033[32m" + // this file covers non-windows platforms. + CanColorStdout = isatty.IsTerminal(os.Stdout.Fd()) + CanColorStderr = isatty.IsTerminal(os.Stderr.Fd()) +} diff --git a/modules/log/event.go b/modules/log/event.go index b20dac17c7..f66ecd179b 100644 --- a/modules/log/event.go +++ b/modules/log/event.go @@ -5,9 +5,13 @@ package log import ( + "context" "fmt" + "runtime/pprof" "sync" "time" + + "code.gitea.io/gitea/modules/process" ) // Event represents a logging event @@ -34,6 +38,8 @@ type EventLogger interface { // ChannelledLog represents a cached channel to a LoggerProvider type ChannelledLog struct { + ctx context.Context + finished context.CancelFunc name string provider string queue chan *Event @@ -44,8 +50,9 @@ type ChannelledLog struct { } // NewChannelledLog a new logger instance with given logger provider and config. -func NewChannelledLog(name, provider, config string, bufferLength int64) (*ChannelledLog, error) { +func NewChannelledLog(parent context.Context, name, provider, config string, bufferLength int64) (*ChannelledLog, error) { if log, ok := providers[provider]; ok { + l := &ChannelledLog{ queue: make(chan *Event, bufferLength), flush: make(chan bool), @@ -58,6 +65,7 @@ func NewChannelledLog(name, provider, config string, bufferLength int64) (*Chann } l.name = name l.provider = provider + l.ctx, _, l.finished = process.GetManager().AddTypedContext(parent, fmt.Sprintf("Logger: %s(%s)", l.name, l.provider), process.SystemProcessType, false) go l.Start() return l, nil } @@ -66,6 +74,8 @@ func NewChannelledLog(name, provider, config string, bufferLength int64) (*Chann // Start processing the ChannelledLog func (l *ChannelledLog) Start() { + pprof.SetGoroutineLabels(l.ctx) + defer l.finished() for { select { case event, ok := <-l.queue: @@ -140,6 +150,8 @@ func (l *ChannelledLog) GetName() string { // MultiChannelledLog represents a cached channel to a LoggerProvider type MultiChannelledLog struct { + ctx context.Context + finished context.CancelFunc name string bufferLength int64 queue chan *Event @@ -156,7 +168,11 @@ type MultiChannelledLog struct { // NewMultiChannelledLog a new logger instance with given logger provider and config. func NewMultiChannelledLog(name string, bufferLength int64) *MultiChannelledLog { + ctx, _, finished := process.GetManager().AddTypedContext(context.Background(), fmt.Sprintf("Logger: %s", name), process.SystemProcessType, false) + m := &MultiChannelledLog{ + ctx: ctx, + finished: finished, name: name, queue: make(chan *Event, bufferLength), flush: make(chan bool), @@ -277,6 +293,9 @@ func (m *MultiChannelledLog) Start() { m.rwmutex.Unlock() return } + pprof.SetGoroutineLabels(m.ctx) + defer m.finished() + m.started = true m.rwmutex.Unlock() paused := false diff --git a/modules/log/groutinelabel.go b/modules/log/groutinelabel.go new file mode 100644 index 0000000000..0d3739fd98 --- /dev/null +++ b/modules/log/groutinelabel.go @@ -0,0 +1,20 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package log + +import "unsafe" + +//go:linkname runtime_getProfLabel runtime/pprof.runtime_getProfLabel +func runtime_getProfLabel() unsafe.Pointer // nolint + +type labelMap map[string]string + +func getGoroutineLabels() map[string]string { + l := (*labelMap)(runtime_getProfLabel()) + if l == nil { + return nil + } + return *l +} diff --git a/modules/log/groutinelabel_test.go b/modules/log/groutinelabel_test.go new file mode 100644 index 0000000000..8e23721b86 --- /dev/null +++ b/modules/log/groutinelabel_test.go @@ -0,0 +1,34 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package log + +import ( + "context" + "runtime/pprof" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_getGoroutineLabels(t *testing.T) { + pprof.Do(context.Background(), pprof.Labels(), func(ctx context.Context) { + currentLabels := getGoroutineLabels() + pprof.ForLabels(ctx, func(key, value string) bool { + assert.EqualValues(t, value, currentLabels[key]) + return true + }) + + pprof.Do(ctx, pprof.Labels("Test_getGoroutineLabels", "Test_getGoroutineLabels_child1"), func(ctx context.Context) { + currentLabels := getGoroutineLabels() + pprof.ForLabels(ctx, func(key, value string) bool { + assert.EqualValues(t, value, currentLabels[key]) + return true + }) + if assert.NotNil(t, currentLabels) { + assert.EqualValues(t, "Test_getGoroutineLabels_child1", currentLabels["Test_getGoroutineLabels"]) + } + }) + }) +} diff --git a/modules/log/multichannel.go b/modules/log/multichannel.go index c725df4f3e..273df81df1 100644 --- a/modules/log/multichannel.go +++ b/modules/log/multichannel.go @@ -31,7 +31,7 @@ func newLogger(name string, buffer int64) *MultiChannelledLogger { // SetLogger sets new logger instance with given logger provider and config. func (l *MultiChannelledLogger) SetLogger(name, provider, config string) error { - eventLogger, err := NewChannelledLog(name, provider, config, l.bufferLength) + eventLogger, err := NewChannelledLog(l.ctx, name, provider, config, l.bufferLength) if err != nil { return fmt.Errorf("Failed to create sublogger (%s): %v", name, err) } @@ -72,6 +72,13 @@ func (l *MultiChannelledLogger) Log(skip int, level Level, format string, v ...i if len(v) > 0 { msg = ColorSprintf(format, v...) } + labels := getGoroutineLabels() + if labels != nil { + pid, ok := labels["pid"] + if ok { + msg = "[" + ColorString(FgHiYellow) + pid + ColorString(Reset) + "] " + msg + } + } stack := "" if l.GetStacktraceLevel() <= level { stack = Stack(skip + 1) diff --git a/modules/markup/camo.go b/modules/markup/camo.go new file mode 100644 index 0000000000..f804447f2d --- /dev/null +++ b/modules/markup/camo.go @@ -0,0 +1,47 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package markup + +import ( + "crypto/hmac" + "crypto/sha1" + "encoding/base64" + "net/url" + "strings" + + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" +) + +// CamoEncode encodes a lnk to fit with the go-camo and camo proxy links. The purposes of camo-proxy are: +// 1. Allow accessing "http://" images on a HTTPS site by using the "https://" URLs provided by camo-proxy. +// 2. Hide the visitor's real IP (protect privacy) when accessing external images. +func CamoEncode(link string) string { + if strings.HasPrefix(link, setting.Camo.ServerURL) { + return link + } + + mac := hmac.New(sha1.New, []byte(setting.Camo.HMACKey)) + _, _ = mac.Write([]byte(link)) // hmac does not return errors + macSum := b64encode(mac.Sum(nil)) + encodedURL := b64encode([]byte(link)) + + return util.URLJoin(setting.Camo.ServerURL, macSum, encodedURL) +} + +func b64encode(data []byte) string { + return strings.TrimRight(base64.URLEncoding.EncodeToString(data), "=") +} + +func camoHandleLink(link string) string { + if setting.Camo.Enabled { + lnkURL, err := url.Parse(link) + if err == nil && lnkURL.IsAbs() && !strings.HasPrefix(link, setting.AppURL) && + (setting.Camo.Allways || lnkURL.Scheme != "https") { + return CamoEncode(link) + } + } + return link +} diff --git a/modules/markup/camo_test.go b/modules/markup/camo_test.go new file mode 100644 index 0000000000..cc917039d8 --- /dev/null +++ b/modules/markup/camo_test.go @@ -0,0 +1,45 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package markup + +import ( + "testing" + + "code.gitea.io/gitea/modules/setting" + + "github.com/stretchr/testify/assert" +) + +func TestCamoHandleLink(t *testing.T) { + setting.AppURL = "https://gitea.com" + // Test media proxy + setting.Camo.Enabled = true + setting.Camo.ServerURL = "https://image.proxy" + setting.Camo.HMACKey = "geheim" + + assert.Equal(t, + "https://gitea.com/img.jpg", + camoHandleLink("https://gitea.com/img.jpg")) + assert.Equal(t, + "https://testimages.org/img.jpg", + camoHandleLink("https://testimages.org/img.jpg")) + assert.Equal(t, + "https://image.proxy/eivin43gJwGVIjR9MiYYtFIk0mw/aHR0cDovL3Rlc3RpbWFnZXMub3JnL2ltZy5qcGc", + camoHandleLink("http://testimages.org/img.jpg")) + + setting.Camo.Allways = true + assert.Equal(t, + "https://gitea.com/img.jpg", + camoHandleLink("https://gitea.com/img.jpg")) + assert.Equal(t, + "https://image.proxy/tkdlvmqpbIr7SjONfHNgEU622y0/aHR0cHM6Ly90ZXN0aW1hZ2VzLm9yZy9pbWcuanBn", + camoHandleLink("https://testimages.org/img.jpg")) + assert.Equal(t, + "https://image.proxy/eivin43gJwGVIjR9MiYYtFIk0mw/aHR0cDovL3Rlc3RpbWFnZXMub3JnL2ltZy5qcGc", + camoHandleLink("http://testimages.org/img.jpg")) + + // Restore previous settings + setting.Camo.Enabled = false +} diff --git a/modules/markup/html.go b/modules/markup/html.go index 758746ef87..c5d36e701f 100644 --- a/modules/markup/html.go +++ b/modules/markup/html.go @@ -21,9 +21,9 @@ import ( "code.gitea.io/gitea/modules/markup/common" "code.gitea.io/gitea/modules/references" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/templates/vars" "code.gitea.io/gitea/modules/util" - "github.com/unknwon/com" "golang.org/x/net/html" "golang.org/x/net/html/atom" "mvdan.cc/xurls/v2" @@ -348,8 +348,7 @@ func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output // Render everything to buf. for _, node := range newNodes { - err = html.Render(output, node) - if err != nil { + if err := html.Render(output, node); err != nil { return &postProcessError{"error rendering processed HTML", err} } } @@ -387,6 +386,7 @@ func visitNode(ctx *RenderContext, procs, textProcs []processor, node *html.Node attr.Val = util.URLJoin(prefix, attr.Val) } + attr.Val = camoHandleLink(attr.Val) node.Attr[i] = attr } } else if node.Data == "a" { @@ -838,7 +838,14 @@ func issueIndexPatternProcessor(ctx *RenderContext, node *html.Node) { reftext := node.Data[ref.RefLocation.Start:ref.RefLocation.End] if exttrack && !ref.IsPull { ctx.Metas["index"] = ref.Issue - link = createLink(com.Expand(ctx.Metas["format"], ctx.Metas), reftext, "ref-issue ref-external-issue") + + res, err := vars.Expand(ctx.Metas["format"], ctx.Metas) + if err != nil { + // here we could just log the error and continue the rendering + log.Error("unable to expand template vars for ref %s, err: %v", ref.Issue, err) + } + + link = createLink(res, reftext, "ref-issue ref-external-issue") } else { // Path determines the type of link that will be rendered. It's unknown at this point whether // the linked item is actually a PR or an issue. Luckily it's of no real consequence because @@ -1079,7 +1086,7 @@ func sha1CurrentPatternProcessor(ctx *RenderContext, node *html.Node) { if !inCache { if ctx.GitRepo == nil { var err error - ctx.GitRepo, err = git.OpenRepositoryCtx(ctx.Ctx, ctx.Metas["repoPath"]) + ctx.GitRepo, err = git.OpenRepository(ctx.Ctx, ctx.Metas["repoPath"]) if err != nil { log.Error("unable to open repository: %s Error: %v", ctx.Metas["repoPath"], err) return diff --git a/modules/markup/markdown/markdown_test.go b/modules/markup/markdown/markdown_test.go index 54c2ea87d6..a069d402bb 100644 --- a/modules/markup/markdown/markdown_test.go +++ b/modules/markup/markdown/markdown_test.go @@ -199,6 +199,11 @@ func testAnswers(baseURLContent, baseURLImages string) []string { +`, `
    +
  • If you want to rebase/retry this PR, click this checkbox.
  • +
+
+

This PR has been generated by Renovate Bot.

`, } } @@ -271,6 +276,14 @@ Here is a simple footnote,[^1] and here is a longer one.[^bignote] Add as many paragraphs as you like. `, + ` +- [ ] If you want to rebase/retry this PR, click this checkbox. + +--- + +This PR has been generated by [Renovate Bot](https://github.com/renovatebot/renovate). + +`, } func TestTotal_RenderWiki(t *testing.T) { diff --git a/modules/markup/markdown/toc.go b/modules/markup/markdown/toc.go index 189821c341..9d11b771f7 100644 --- a/modules/markup/markdown/toc.go +++ b/modules/markup/markdown/toc.go @@ -8,7 +8,8 @@ import ( "fmt" "net/url" - "github.com/unknwon/i18n" + "code.gitea.io/gitea/modules/translation/i18n" + "github.com/yuin/goldmark/ast" ) diff --git a/modules/migration/comment.go b/modules/migration/comment.go index f364ffc93a..0447689b74 100644 --- a/modules/migration/comment.go +++ b/modules/migration/comment.go @@ -7,6 +7,13 @@ package migration import "time" +// Commentable can be commented upon +type Commentable interface { + GetLocalIndex() int64 + GetForeignIndex() int64 + GetContext() DownloaderContext +} + // Comment is a standard comment information type Comment struct { IssueIndex int64 `yaml:"issue_index"` diff --git a/modules/migration/downloader.go b/modules/migration/downloader.go index 90e149fb1a..7759c96056 100644 --- a/modules/migration/downloader.go +++ b/modules/migration/downloader.go @@ -11,13 +11,6 @@ import ( "code.gitea.io/gitea/modules/structs" ) -// GetCommentOptions represents an options for get comment -type GetCommentOptions struct { - Context IssueContext - Page int - PageSize int -} - // Downloader downloads the site repo information type Downloader interface { SetContext(context.Context) @@ -27,10 +20,11 @@ type Downloader interface { GetReleases() ([]*Release, error) GetLabels() ([]*Label, error) GetIssues(page, perPage int) ([]*Issue, bool, error) - GetComments(opts GetCommentOptions) ([]*Comment, bool, error) + GetComments(commentable Commentable) ([]*Comment, bool, error) + GetAllComments(page, perPage int) ([]*Comment, bool, error) SupportGetRepoComments() bool GetPullRequests(page, perPage int) ([]*PullRequest, bool, error) - GetReviews(pullRequestContext IssueContext) ([]*Review, error) + GetReviews(reviewable Reviewable) ([]*Review, error) FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error) } @@ -39,3 +33,6 @@ type DownloaderFactory interface { New(ctx context.Context, opts MigrateOptions) (Downloader, error) GitServiceType() structs.GitServiceType } + +// DownloaderContext has opaque information only relevant to a given downloader +type DownloaderContext interface{} diff --git a/modules/migration/issue.go b/modules/migration/issue.go index 984f07d8c9..78f648dd2d 100644 --- a/modules/migration/issue.go +++ b/modules/migration/issue.go @@ -7,44 +7,26 @@ package migration import "time" -// IssueContext is used to map between local and foreign issue/PR ids. -type IssueContext interface { - LocalID() int64 - ForeignID() int64 -} - -// BasicIssueContext is a 1:1 mapping between local and foreign ids. -type BasicIssueContext int64 - -// LocalID gets the local id. -func (c BasicIssueContext) LocalID() int64 { - return int64(c) -} - -// ForeignID gets the foreign id. -func (c BasicIssueContext) ForeignID() int64 { - return int64(c) -} - // Issue is a standard issue information type Issue struct { - Number int64 `json:"number"` - PosterID int64 `yaml:"poster_id" json:"poster_id"` - PosterName string `yaml:"poster_name" json:"poster_name"` - PosterEmail string `yaml:"poster_email" json:"poster_email"` - Title string `json:"title"` - Content string `json:"content"` - Ref string `json:"ref"` - Milestone string `json:"milestone"` - State string `json:"state"` // closed, open - IsLocked bool `yaml:"is_locked" json:"is_locked"` - Created time.Time `json:"created"` - Updated time.Time `json:"updated"` - Closed *time.Time `json:"closed"` - Labels []*Label `json:"labels"` - Reactions []*Reaction `json:"reactions"` - Assignees []string `json:"assignees"` - Context IssueContext `yaml:"-"` + Number int64 `json:"number"` + PosterID int64 `yaml:"poster_id" json:"poster_id"` + PosterName string `yaml:"poster_name" json:"poster_name"` + PosterEmail string `yaml:"poster_email" json:"poster_email"` + Title string `json:"title"` + Content string `json:"content"` + Ref string `json:"ref"` + Milestone string `json:"milestone"` + State string `json:"state"` // closed, open + IsLocked bool `yaml:"is_locked" json:"is_locked"` + Created time.Time `json:"created"` + Updated time.Time `json:"updated"` + Closed *time.Time `json:"closed"` + Labels []*Label `json:"labels"` + Reactions []*Reaction `json:"reactions"` + Assignees []string `json:"assignees"` + ForeignIndex int64 `json:"foreign_id"` + Context DownloaderContext `yaml:"-"` } // GetExternalName ExternalUserMigrated interface @@ -52,3 +34,7 @@ func (i *Issue) GetExternalName() string { return i.PosterName } // GetExternalID ExternalUserMigrated interface func (i *Issue) GetExternalID() int64 { return i.PosterID } + +func (i *Issue) GetLocalIndex() int64 { return i.Number } +func (i *Issue) GetForeignIndex() int64 { return i.ForeignIndex } +func (i *Issue) GetContext() DownloaderContext { return i.Context } diff --git a/modules/migration/null_downloader.go b/modules/migration/null_downloader.go index 6192870873..32da720f16 100644 --- a/modules/migration/null_downloader.go +++ b/modules/migration/null_downloader.go @@ -47,18 +47,23 @@ func (n NullDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) { return nil, false, &ErrNotSupported{Entity: "Issues"} } -// GetComments returns comments according the options -func (n NullDownloader) GetComments(GetCommentOptions) ([]*Comment, bool, error) { +// GetComments returns comments of an issue or PR +func (n NullDownloader) GetComments(commentable Commentable) ([]*Comment, bool, error) { return nil, false, &ErrNotSupported{Entity: "Comments"} } +// GetAllComments returns paginated comments +func (n NullDownloader) GetAllComments(page, perPage int) ([]*Comment, bool, error) { + return nil, false, &ErrNotSupported{Entity: "AllComments"} +} + // GetPullRequests returns pull requests according page and perPage func (n NullDownloader) GetPullRequests(page, perPage int) ([]*PullRequest, bool, error) { return nil, false, &ErrNotSupported{Entity: "PullRequests"} } // GetReviews returns pull requests review -func (n NullDownloader) GetReviews(pullRequestContext IssueContext) ([]*Review, error) { +func (n NullDownloader) GetReviews(reviewable Reviewable) ([]*Review, error) { return nil, &ErrNotSupported{Entity: "Reviews"} } diff --git a/modules/migration/pullrequest.go b/modules/migration/pullrequest.go index 7a681940a7..eaa0dd45e2 100644 --- a/modules/migration/pullrequest.go +++ b/modules/migration/pullrequest.go @@ -35,9 +35,14 @@ type PullRequest struct { Assignees []string IsLocked bool `yaml:"is_locked"` Reactions []*Reaction - Context IssueContext `yaml:"-"` + ForeignIndex int64 + Context DownloaderContext `yaml:"-"` } +func (p *PullRequest) GetLocalIndex() int64 { return p.Number } +func (p *PullRequest) GetForeignIndex() int64 { return p.ForeignIndex } +func (p *PullRequest) GetContext() DownloaderContext { return p.Context } + // IsForkPullRequest returns true if the pull request from a forked repository but not the same repository func (p *PullRequest) IsForkPullRequest() bool { return p.Head.RepoPath() != p.Base.RepoPath() diff --git a/modules/migration/retry_downloader.go b/modules/migration/retry_downloader.go index 1095a26891..2e40c102be 100644 --- a/modules/migration/retry_downloader.go +++ b/modules/migration/retry_downloader.go @@ -148,7 +148,7 @@ func (d *RetryDownloader) GetIssues(page, perPage int) ([]*Issue, bool, error) { } // GetComments returns a repository's comments with retry -func (d *RetryDownloader) GetComments(opts GetCommentOptions) ([]*Comment, bool, error) { +func (d *RetryDownloader) GetComments(commentable Commentable) ([]*Comment, bool, error) { var ( comments []*Comment isEnd bool @@ -156,7 +156,7 @@ func (d *RetryDownloader) GetComments(opts GetCommentOptions) ([]*Comment, bool, ) err = d.retry(func() error { - comments, isEnd, err = d.Downloader.GetComments(opts) + comments, isEnd, err = d.Downloader.GetComments(commentable) return err }) @@ -180,14 +180,14 @@ func (d *RetryDownloader) GetPullRequests(page, perPage int) ([]*PullRequest, bo } // GetReviews returns pull requests reviews -func (d *RetryDownloader) GetReviews(pullRequestContext IssueContext) ([]*Review, error) { +func (d *RetryDownloader) GetReviews(reviewable Reviewable) ([]*Review, error) { var ( reviews []*Review err error ) err = d.retry(func() error { - reviews, err = d.Downloader.GetReviews(pullRequestContext) + reviews, err = d.Downloader.GetReviews(reviewable) return err }) diff --git a/modules/migration/review.go b/modules/migration/review.go index 85795385e9..e4db33d98f 100644 --- a/modules/migration/review.go +++ b/modules/migration/review.go @@ -6,6 +6,12 @@ package migration import "time" +// Reviewable can be reviewed +type Reviewable interface { + GetLocalIndex() int64 + GetForeignIndex() int64 +} + // enumerate all review states const ( ReviewStatePending = "PENDING" diff --git a/modules/migration/schemas_bindata.go b/modules/migration/schemas_bindata.go index d0fef698b4..febe0f75c0 100644 --- a/modules/migration/schemas_bindata.go +++ b/modules/migration/schemas_bindata.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package migration diff --git a/modules/migration/schemas_dynamic.go b/modules/migration/schemas_dynamic.go index c883fafe98..1b767b2e72 100644 --- a/modules/migration/schemas_dynamic.go +++ b/modules/migration/schemas_dynamic.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !bindata -// +build !bindata package migration diff --git a/modules/migration/schemas_static.go b/modules/migration/schemas_static.go index 10c83b313a..02957fc4ed 100644 --- a/modules/migration/schemas_static.go +++ b/modules/migration/schemas_static.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package migration diff --git a/modules/nosql/manager.go b/modules/nosql/manager.go index a89b5bb633..93338fdc3f 100644 --- a/modules/nosql/manager.go +++ b/modules/nosql/manager.go @@ -5,10 +5,13 @@ package nosql import ( + "context" "strconv" "sync" "time" + "code.gitea.io/gitea/modules/process" + "github.com/go-redis/redis/v8" "github.com/syndtr/goleveldb/leveldb" ) @@ -17,7 +20,9 @@ var manager *Manager // Manager is the nosql connection manager type Manager struct { - mutex sync.Mutex + ctx context.Context + finished context.CancelFunc + mutex sync.Mutex RedisConnections map[string]*redisClientHolder LevelDBConnections map[string]*levelDBHolder @@ -46,7 +51,10 @@ func init() { // GetManager returns a Manager and initializes one as singleton is there's none yet func GetManager() *Manager { if manager == nil { + ctx, _, finished := process.GetManager().AddTypedContext(context.Background(), "Service: NoSQL", process.SystemProcessType, false) manager = &Manager{ + ctx: ctx, + finished: finished, RedisConnections: make(map[string]*redisClientHolder), LevelDBConnections: make(map[string]*levelDBHolder), } diff --git a/modules/nosql/manager_leveldb.go b/modules/nosql/manager_leveldb.go index de4ef14d7d..d69ae88800 100644 --- a/modules/nosql/manager_leveldb.go +++ b/modules/nosql/manager_leveldb.go @@ -7,6 +7,7 @@ package nosql import ( "fmt" "path" + "runtime/pprof" "strconv" "strings" @@ -50,7 +51,31 @@ func (m *Manager) CloseLevelDB(connection string) error { } // GetLevelDB gets a levelDB for a particular connection -func (m *Manager) GetLevelDB(connection string) (*leveldb.DB, error) { +func (m *Manager) GetLevelDB(connection string) (db *leveldb.DB, err error) { + // Because we want associate any goroutines created by this call to the main nosqldb context we need to + // wrap this in a goroutine labelled with the nosqldb context + done := make(chan struct{}) + var recovered interface{} + go func() { + defer func() { + recovered = recover() + if recovered != nil { + log.Critical("PANIC during GetLevelDB: %v\nStacktrace: %s", recovered, log.Stack(2)) + } + close(done) + }() + pprof.SetGoroutineLabels(m.ctx) + + db, err = m.getLevelDB(connection) + }() + <-done + if recovered != nil { + panic(recovered) + } + return +} + +func (m *Manager) getLevelDB(connection string) (*leveldb.DB, error) { // Convert the provided connection description to the common format uri := ToLevelDBURI(connection) @@ -168,15 +193,18 @@ func (m *Manager) GetLevelDB(connection string) (*leveldb.DB, error) { if err != nil { if !errors.IsCorrupted(err) { if strings.Contains(err.Error(), "resource temporarily unavailable") { - return nil, fmt.Errorf("unable to lock level db at %s: %w", dataDir, err) + err = fmt.Errorf("unable to lock level db at %s: %w", dataDir, err) + return nil, err } - return nil, fmt.Errorf("unable to open level db at %s: %w", dataDir, err) - } - db.db, err = leveldb.RecoverFile(dataDir, opts) - if err != nil { + err = fmt.Errorf("unable to open level db at %s: %w", dataDir, err) return nil, err } + db.db, err = leveldb.RecoverFile(dataDir, opts) + } + + if err != nil { + return nil, err } for _, name := range db.name { diff --git a/modules/nosql/manager_redis.go b/modules/nosql/manager_redis.go index b4852cecc8..b82f899db0 100644 --- a/modules/nosql/manager_redis.go +++ b/modules/nosql/manager_redis.go @@ -6,10 +6,14 @@ package nosql import ( "crypto/tls" + "net/url" "path" + "runtime/pprof" "strconv" "strings" + "code.gitea.io/gitea/modules/log" + "github.com/go-redis/redis/v8" ) @@ -40,7 +44,31 @@ func (m *Manager) CloseRedisClient(connection string) error { } // GetRedisClient gets a redis client for a particular connection -func (m *Manager) GetRedisClient(connection string) redis.UniversalClient { +func (m *Manager) GetRedisClient(connection string) (client redis.UniversalClient) { + // Because we want associate any goroutines created by this call to the main nosqldb context we need to + // wrap this in a goroutine labelled with the nosqldb context + done := make(chan struct{}) + var recovered interface{} + go func() { + defer func() { + recovered = recover() + if recovered != nil { + log.Critical("PANIC during GetRedisClient: %v\nStacktrace: %s", recovered, log.Stack(2)) + } + close(done) + }() + pprof.SetGoroutineLabels(m.ctx) + + client = m.getRedisClient(connection) + }() + <-done + if recovered != nil { + panic(recovered) + } + return +} + +func (m *Manager) getRedisClient(connection string) redis.UniversalClient { m.mutex.Lock() defer m.mutex.Unlock() client, ok := m.RedisConnections[connection] @@ -59,8 +87,59 @@ func (m *Manager) GetRedisClient(connection string) redis.UniversalClient { name: []string{connection, uri.String()}, } + opts := getRedisOptions(uri) + tlsConfig := getRedisTLSOptions(uri) + + clientName := uri.Query().Get("clientname") + + if len(clientName) > 0 { + client.name = append(client.name, clientName) + } + + switch uri.Scheme { + case "redis+sentinels": + fallthrough + case "rediss+sentinel": + opts.TLSConfig = tlsConfig + fallthrough + case "redis+sentinel": + client.UniversalClient = redis.NewFailoverClient(opts.Failover()) + case "redis+clusters": + fallthrough + case "rediss+cluster": + opts.TLSConfig = tlsConfig + fallthrough + case "redis+cluster": + client.UniversalClient = redis.NewClusterClient(opts.Cluster()) + case "redis+socket": + simpleOpts := opts.Simple() + simpleOpts.Network = "unix" + simpleOpts.Addr = path.Join(uri.Host, uri.Path) + client.UniversalClient = redis.NewClient(simpleOpts) + case "rediss": + opts.TLSConfig = tlsConfig + fallthrough + case "redis": + client.UniversalClient = redis.NewClient(opts.Simple()) + default: + return nil + } + + for _, name := range client.name { + m.RedisConnections[name] = client + } + + client.count++ + + return client +} + +// getRedisOptions pulls various configuration options based on the RedisUri format and converts them to go-redis's +// UniversalOptions fields. This function explicitly excludes fields related to TLS configuration, which is +// conditionally attached to this options struct before being converted to the specific type for the redis scheme being +// used, and only in scenarios where TLS is applicable (e.g. rediss://, redis+clusters://). +func getRedisOptions(uri *url.URL) *redis.UniversalOptions { opts := &redis.UniversalOptions{} - tlsConfig := &tls.Config{} // Handle username/password if password, ok := uri.User.Password(); ok { @@ -131,75 +210,54 @@ func (m *Manager) GetRedisClient(connection string) redis.UniversalClient { fallthrough case "mastername": opts.MasterName = v[0] - case "skipverify": - fallthrough - case "insecureskipverify": - insecureSkipVerify, _ := strconv.ParseBool(v[0]) - tlsConfig.InsecureSkipVerify = insecureSkipVerify - case "clientname": - client.name = append(client.name, v[0]) + case "sentinelusername": + opts.SentinelUsername = v[0] + case "sentinelpassword": + opts.SentinelPassword = v[0] } } - switch uri.Scheme { - case "redis+sentinels": - fallthrough - case "rediss+sentinel": - opts.TLSConfig = tlsConfig - fallthrough - case "redis+sentinel": - if uri.Host != "" { - opts.Addrs = append(opts.Addrs, strings.Split(uri.Host, ",")...) - } - if uri.Path != "" { - if db, err := strconv.Atoi(uri.Path[1:]); err == nil { - opts.DB = db - } - } - - client.UniversalClient = redis.NewFailoverClient(opts.Failover()) - case "redis+clusters": - fallthrough - case "rediss+cluster": - opts.TLSConfig = tlsConfig - fallthrough - case "redis+cluster": - if uri.Host != "" { - opts.Addrs = append(opts.Addrs, strings.Split(uri.Host, ",")...) - } - if uri.Path != "" { - if db, err := strconv.Atoi(uri.Path[1:]); err == nil { - opts.DB = db - } - } - client.UniversalClient = redis.NewClusterClient(opts.Cluster()) - case "redis+socket": - simpleOpts := opts.Simple() - simpleOpts.Network = "unix" - simpleOpts.Addr = path.Join(uri.Host, uri.Path) - client.UniversalClient = redis.NewClient(simpleOpts) - case "rediss": - opts.TLSConfig = tlsConfig - fallthrough - case "redis": - if uri.Host != "" { - opts.Addrs = append(opts.Addrs, strings.Split(uri.Host, ",")...) - } - if uri.Path != "" { - if db, err := strconv.Atoi(uri.Path[1:]); err == nil { - opts.DB = db - } - } - client.UniversalClient = redis.NewClient(opts.Simple()) - default: - return nil + if uri.Host != "" { + opts.Addrs = append(opts.Addrs, strings.Split(uri.Host, ",")...) } - for _, name := range client.name { - m.RedisConnections[name] = client + // A redis connection string uses the path section of the URI in two different ways. In a TCP-based connection, the + // path will be a database index to automatically have the client SELECT. In a Unix socket connection, it will be the + // file path. We only want to try to coerce this to the database index when we're not expecting a file path so that + // the error log stays clean. + if uri.Path != "" && uri.Scheme != "redis+socket" { + if db, err := strconv.Atoi(uri.Path[1:]); err == nil { + opts.DB = db + } else { + log.Error("Provided database identifier '%s' is not a valid integer. Gitea will ignore this option.", uri.Path) + } } - client.count++ - - return client + return opts +} + +// getRedisTlsOptions parses RedisUri TLS configuration parameters and converts them to the go TLS configuration +// equivalent fields. +func getRedisTLSOptions(uri *url.URL) *tls.Config { + tlsConfig := &tls.Config{} + + skipverify := uri.Query().Get("skipverify") + + if len(skipverify) > 0 { + skipverify, err := strconv.ParseBool(skipverify) + if err != nil { + tlsConfig.InsecureSkipVerify = skipverify + } + } + + insecureskipverify := uri.Query().Get("insecureskipverify") + + if len(insecureskipverify) > 0 { + insecureskipverify, err := strconv.ParseBool(insecureskipverify) + if err != nil { + tlsConfig.InsecureSkipVerify = insecureskipverify + } + } + + return tlsConfig } diff --git a/modules/nosql/manager_redis_test.go b/modules/nosql/manager_redis_test.go new file mode 100644 index 0000000000..3d94532135 --- /dev/null +++ b/modules/nosql/manager_redis_test.go @@ -0,0 +1,64 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nosql + +import ( + "net/url" + "testing" +) + +func TestRedisUsernameOpt(t *testing.T) { + uri, _ := url.Parse("redis://redis:password@myredis/0") + opts := getRedisOptions(uri) + + if opts.Username != "redis" { + t.Fail() + } +} + +func TestRedisPasswordOpt(t *testing.T) { + uri, _ := url.Parse("redis://redis:password@myredis/0") + opts := getRedisOptions(uri) + + if opts.Password != "password" { + t.Fail() + } +} + +func TestRedisSentinelUsernameOpt(t *testing.T) { + uri, _ := url.Parse("redis+sentinel://redis:password@myredis/0?sentinelusername=suser&sentinelpassword=spass") + opts := getRedisOptions(uri).Failover() + + if opts.SentinelUsername != "suser" { + t.Fail() + } +} + +func TestRedisSentinelPasswordOpt(t *testing.T) { + uri, _ := url.Parse("redis+sentinel://redis:password@myredis/0?sentinelusername=suser&sentinelpassword=spass") + opts := getRedisOptions(uri).Failover() + + if opts.SentinelPassword != "spass" { + t.Fail() + } +} + +func TestRedisDatabaseIndexTcp(t *testing.T) { + uri, _ := url.Parse("redis://redis:password@myredis/12") + opts := getRedisOptions(uri) + + if opts.DB != 12 { + t.Fail() + } +} + +func TestRedisDatabaseIndexUnix(t *testing.T) { + uri, _ := url.Parse("redis+socket:///var/run/redis.sock?database=12") + opts := getRedisOptions(uri) + + if opts.DB != 12 { + t.Fail() + } +} diff --git a/modules/notification/action/action.go b/modules/notification/action/action.go index bab28db475..547498a9dc 100644 --- a/modules/notification/action/action.go +++ b/modules/notification/action/action.go @@ -10,6 +10,7 @@ import ( "strings" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/graceful" @@ -37,7 +38,7 @@ func (a *actionNotifier) NotifyNewIssue(issue *models.Issue, mentions []*user_mo log.Error("issue.LoadPoster: %v", err) return } - if err := issue.LoadRepo(); err != nil { + if err := issue.LoadRepo(db.DefaultContext); err != nil { log.Error("issue.LoadRepo: %v", err) return } @@ -130,7 +131,7 @@ func (a *actionNotifier) NotifyNewPullRequest(pull *models.PullRequest, mentions log.Error("pull.LoadIssue: %v", err) return } - if err := pull.Issue.LoadRepo(); err != nil { + if err := pull.Issue.LoadRepo(db.DefaultContext); err != nil { log.Error("pull.Issue.LoadRepo: %v", err) return } diff --git a/modules/notification/action/action_test.go b/modules/notification/action/action_test.go index 3664b82104..2898c8ec3d 100644 --- a/modules/notification/action/action_test.go +++ b/modules/notification/action/action_test.go @@ -18,7 +18,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } func TestRenameRepoAction(t *testing.T) { diff --git a/modules/notification/base/notifier.go b/modules/notification/base/notifier.go index 8174741169..2b8be18ad3 100644 --- a/modules/notification/base/notifier.go +++ b/modules/notification/base/notifier.go @@ -6,6 +6,7 @@ package base import ( "code.gitea.io/gitea/models" + packages_model "code.gitea.io/gitea/models/packages" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/repository" @@ -54,4 +55,6 @@ type Notifier interface { NotifySyncCreateRef(doer *user_model.User, repo *repo_model.Repository, refType, refFullName, refID string) NotifySyncDeleteRef(doer *user_model.User, repo *repo_model.Repository, refType, refFullName string) NotifyRepoPendingTransfer(doer, newOwner *user_model.User, repo *repo_model.Repository) + NotifyPackageCreate(doer *user_model.User, pd *packages_model.PackageDescriptor) + NotifyPackageDelete(doer *user_model.User, pd *packages_model.PackageDescriptor) } diff --git a/modules/notification/base/null.go b/modules/notification/base/null.go index 2bfcaafda9..29b5f0c97e 100644 --- a/modules/notification/base/null.go +++ b/modules/notification/base/null.go @@ -6,6 +6,7 @@ package base import ( "code.gitea.io/gitea/models" + packages_model "code.gitea.io/gitea/models/packages" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/repository" @@ -173,3 +174,11 @@ func (*NullNotifier) NotifySyncDeleteRef(doer *user_model.User, repo *repo_model // NotifyRepoPendingTransfer places a place holder function func (*NullNotifier) NotifyRepoPendingTransfer(doer, newOwner *user_model.User, repo *repo_model.Repository) { } + +// NotifyPackageCreate places a place holder function +func (*NullNotifier) NotifyPackageCreate(doer *user_model.User, pd *packages_model.PackageDescriptor) { +} + +// NotifyPackageDelete places a place holder function +func (*NullNotifier) NotifyPackageDelete(doer *user_model.User, pd *packages_model.PackageDescriptor) { +} diff --git a/modules/notification/mail/mail.go b/modules/notification/mail/mail.go index b74482fbf7..138e438751 100644 --- a/modules/notification/mail/mail.go +++ b/modules/notification/mail/mail.go @@ -161,7 +161,7 @@ func (m *mailNotifier) NotifyPullRequestPushCommits(doer *user_model.User, pr *m log.Error("comment.LoadIssue: %v", err) return } - if err = comment.Issue.LoadRepo(); err != nil { + if err = comment.Issue.LoadRepo(ctx); err != nil { log.Error("comment.Issue.LoadRepo: %v", err) return } @@ -169,7 +169,7 @@ func (m *mailNotifier) NotifyPullRequestPushCommits(doer *user_model.User, pr *m log.Error("comment.Issue.LoadPullRequest: %v", err) return } - if err = comment.Issue.PullRequest.LoadBaseRepo(); err != nil { + if err = comment.Issue.PullRequest.LoadBaseRepoCtx(ctx); err != nil { log.Error("comment.Issue.PullRequest.LoadBaseRepo: %v", err) return } diff --git a/modules/notification/notification.go b/modules/notification/notification.go index a31e3810e2..90ff87941f 100644 --- a/modules/notification/notification.go +++ b/modules/notification/notification.go @@ -6,6 +6,7 @@ package notification import ( "code.gitea.io/gitea/models" + packages_model "code.gitea.io/gitea/models/packages" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/notification/action" @@ -306,3 +307,17 @@ func NotifyRepoPendingTransfer(doer, newOwner *user_model.User, repo *repo_model notifier.NotifyRepoPendingTransfer(doer, newOwner, repo) } } + +// NotifyPackageCreate notifies creation of a package to notifiers +func NotifyPackageCreate(doer *user_model.User, pd *packages_model.PackageDescriptor) { + for _, notifier := range notifiers { + notifier.NotifyPackageCreate(doer, pd) + } +} + +// NotifyPackageDelete notifies deletion of a package to notifiers +func NotifyPackageDelete(doer *user_model.User, pd *packages_model.PackageDescriptor) { + for _, notifier := range notifiers { + notifier.NotifyPackageDelete(doer, pd) + } +} diff --git a/modules/notification/webhook/webhook.go b/modules/notification/webhook/webhook.go index d4d5eea6cb..c59e972ed6 100644 --- a/modules/notification/webhook/webhook.go +++ b/modules/notification/webhook/webhook.go @@ -8,6 +8,8 @@ import ( "fmt" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" @@ -45,7 +47,7 @@ func (m *webhookNotifier) NotifyIssueClearLabels(doer *user_model.User, issue *m return } - if err := issue.LoadRepo(); err != nil { + if err := issue.LoadRepo(ctx); err != nil { log.Error("LoadRepo: %v", err) return } @@ -281,7 +283,7 @@ func (m *webhookNotifier) NotifyIssueChangeStatus(doer *user_model.User, issue * } func (m *webhookNotifier) NotifyNewIssue(issue *models.Issue, mentions []*user_model.User) { - if err := issue.LoadRepo(); err != nil { + if err := issue.LoadRepo(db.DefaultContext); err != nil { log.Error("issue.LoadRepo: %v", err) return } @@ -310,7 +312,7 @@ func (m *webhookNotifier) NotifyNewPullRequest(pull *models.PullRequest, mention log.Error("pull.LoadIssue: %v", err) return } - if err := pull.Issue.LoadRepo(); err != nil { + if err := pull.Issue.LoadRepo(ctx); err != nil { log.Error("pull.Issue.LoadRepo: %v", err) return } @@ -506,7 +508,7 @@ func (m *webhookNotifier) NotifyIssueChangeLabels(doer *user_model.User, issue * var err error - if err = issue.LoadRepo(); err != nil { + if err = issue.LoadRepo(ctx); err != nil { log.Error("LoadRepo: %v", err) return } @@ -633,7 +635,7 @@ func (*webhookNotifier) NotifyMergePullRequest(pr *models.PullRequest, doer *use return } - if err := pr.Issue.LoadRepo(); err != nil { + if err := pr.Issue.LoadRepo(ctx); err != nil { log.Error("pr.Issue.LoadRepo: %v", err) return } @@ -855,3 +857,35 @@ func (m *webhookNotifier) NotifySyncCreateRef(pusher *user_model.User, repo *rep func (m *webhookNotifier) NotifySyncDeleteRef(pusher *user_model.User, repo *repo_model.Repository, refType, refFullName string) { m.NotifyDeleteRef(pusher, repo, refType, refFullName) } + +func (m *webhookNotifier) NotifyPackageCreate(doer *user_model.User, pd *packages_model.PackageDescriptor) { + notifyPackage(doer, pd, api.HookPackageCreated) +} + +func (m *webhookNotifier) NotifyPackageDelete(doer *user_model.User, pd *packages_model.PackageDescriptor) { + notifyPackage(doer, pd, api.HookPackageDeleted) +} + +func notifyPackage(sender *user_model.User, pd *packages_model.PackageDescriptor, action api.HookPackageAction) { + if pd.Repository == nil { + // TODO https://github.com/go-gitea/gitea/pull/17940 + return + } + + ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("webhook.notifyPackage Package: %s[%d]", pd.Package.Name, pd.Package.ID)) + defer finished() + + apiPackage, err := convert.ToPackage(ctx, pd, sender) + if err != nil { + log.Error("Error converting package: %v", err) + return + } + + if err := webhook_services.PrepareWebhooks(pd.Repository, webhook.HookEventPackage, &api.PackagePayload{ + Action: action, + Package: apiPackage, + Sender: convert.ToUser(sender, nil), + }); err != nil { + log.Error("PrepareWebhooks: %v", err) + } +} diff --git a/modules/options/dynamic.go b/modules/options/dynamic.go index e1b9353c33..5fea337e42 100644 --- a/modules/options/dynamic.go +++ b/modules/options/dynamic.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !bindata -// +build !bindata package options diff --git a/modules/options/options_bindata.go b/modules/options/options_bindata.go index 921e15ab38..77b7a7ef41 100644 --- a/modules/options/options_bindata.go +++ b/modules/options/options_bindata.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package options diff --git a/modules/options/static.go b/modules/options/static.go index 5b61e58f8f..6cad88cb61 100644 --- a/modules/options/static.go +++ b/modules/options/static.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package options diff --git a/modules/packages/composer/metadata.go b/modules/packages/composer/metadata.go new file mode 100644 index 0000000000..797576b1e7 --- /dev/null +++ b/modules/packages/composer/metadata.go @@ -0,0 +1,147 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package composer + +import ( + "archive/zip" + "errors" + "io" + "regexp" + "strings" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/validation" + + "github.com/hashicorp/go-version" +) + +// TypeProperty is the name of the property for Composer package types +const TypeProperty = "composer.type" + +var ( + // ErrMissingComposerFile indicates a missing composer.json file + ErrMissingComposerFile = errors.New("composer.json file is missing") + // ErrInvalidName indicates an invalid package name + ErrInvalidName = errors.New("package name is invalid") + // ErrInvalidVersion indicates an invalid package version + ErrInvalidVersion = errors.New("package version is invalid") +) + +// Package represents a Composer package +type Package struct { + Name string + Version string + Type string + Metadata *Metadata +} + +// Metadata represents the metadata of a Composer package +type Metadata struct { + Description string `json:"description,omitempty"` + Keywords []string `json:"keywords,omitempty"` + Homepage string `json:"homepage,omitempty"` + License Licenses `json:"license,omitempty"` + Authors []Author `json:"authors,omitempty"` + Autoload map[string]interface{} `json:"autoload,omitempty"` + AutoloadDev map[string]interface{} `json:"autoload-dev,omitempty"` + Extra map[string]interface{} `json:"extra,omitempty"` + Require map[string]string `json:"require,omitempty"` + RequireDev map[string]string `json:"require-dev,omitempty"` + Suggest map[string]string `json:"suggest,omitempty"` + Provide map[string]string `json:"provide,omitempty"` +} + +// Licenses represents the licenses of a Composer package +type Licenses []string + +// UnmarshalJSON reads from a string or array +func (l *Licenses) UnmarshalJSON(data []byte) error { + switch data[0] { + case '"': + var value string + if err := json.Unmarshal(data, &value); err != nil { + return err + } + *l = Licenses{value} + case '[': + values := make([]string, 0, 5) + if err := json.Unmarshal(data, &values); err != nil { + return err + } + *l = Licenses(values) + } + return nil +} + +// Author represents an author +type Author struct { + Name string `json:"name,omitempty"` + Email string `json:"email,omitempty"` + Homepage string `json:"homepage,omitempty"` +} + +var nameMatch = regexp.MustCompile(`\A[a-z0-9]([_\.-]?[a-z0-9]+)*/[a-z0-9](([_\.]?|-{0,2})[a-z0-9]+)*\z`) + +// ParsePackage parses the metadata of a Composer package file +func ParsePackage(r io.ReaderAt, size int64) (*Package, error) { + archive, err := zip.NewReader(r, size) + if err != nil { + return nil, err + } + + for _, file := range archive.File { + if strings.Count(file.Name, "/") > 1 { + continue + } + if strings.HasSuffix(strings.ToLower(file.Name), "composer.json") { + f, err := archive.Open(file.Name) + if err != nil { + return nil, err + } + defer f.Close() + + return ParseComposerFile(f) + } + } + return nil, ErrMissingComposerFile +} + +// ParseComposerFile parses a composer.json file to retrieve the metadata of a Composer package +func ParseComposerFile(r io.Reader) (*Package, error) { + var cj struct { + Name string `json:"name"` + Version string `json:"version"` + Type string `json:"type"` + Metadata + } + if err := json.NewDecoder(r).Decode(&cj); err != nil { + return nil, err + } + + if !nameMatch.MatchString(cj.Name) { + return nil, ErrInvalidName + } + + if cj.Version != "" { + if _, err := version.NewSemver(cj.Version); err != nil { + return nil, ErrInvalidVersion + } + } + + if !validation.IsValidURL(cj.Homepage) { + cj.Homepage = "" + } + + if cj.Type == "" { + cj.Type = "library" + } + + return &Package{ + Name: cj.Name, + Version: cj.Version, + Type: cj.Type, + Metadata: &cj.Metadata, + }, nil +} diff --git a/modules/packages/composer/metadata_test.go b/modules/packages/composer/metadata_test.go new file mode 100644 index 0000000000..feadc18b6a --- /dev/null +++ b/modules/packages/composer/metadata_test.go @@ -0,0 +1,130 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package composer + +import ( + "archive/zip" + "bytes" + "strings" + "testing" + + "code.gitea.io/gitea/modules/json" + + "github.com/stretchr/testify/assert" +) + +const ( + name = "gitea/composer-package" + description = "Package Description" + packageType = "composer-plugin" + author = "Gitea Authors" + email = "no.reply@gitea.io" + homepage = "https://gitea.io" + license = "MIT" +) + +const composerContent = `{ + "name": "` + name + `", + "description": "` + description + `", + "type": "` + packageType + `", + "license": "` + license + `", + "authors": [ + { + "name": "` + author + `", + "email": "` + email + `" + } + ], + "homepage": "` + homepage + `", + "autoload": { + "psr-4": {"Gitea\\ComposerPackage\\": "src/"} + }, + "require": { + "php": ">=7.2 || ^8.0" + } +}` + +func TestLicenseUnmarshal(t *testing.T) { + var l Licenses + assert.NoError(t, json.NewDecoder(strings.NewReader(`["MIT"]`)).Decode(&l)) + assert.Len(t, l, 1) + assert.Equal(t, "MIT", l[0]) + assert.NoError(t, json.NewDecoder(strings.NewReader(`"MIT"`)).Decode(&l)) + assert.Len(t, l, 1) + assert.Equal(t, "MIT", l[0]) +} + +func TestParsePackage(t *testing.T) { + createArchive := func(name, content string) []byte { + var buf bytes.Buffer + archive := zip.NewWriter(&buf) + w, _ := archive.Create(name) + w.Write([]byte(content)) + archive.Close() + return buf.Bytes() + } + + t.Run("MissingComposerFile", func(t *testing.T) { + data := createArchive("dummy.txt", "") + + cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, cp) + assert.ErrorIs(t, err, ErrMissingComposerFile) + }) + + t.Run("MissingComposerFileInRoot", func(t *testing.T) { + data := createArchive("sub/sub/composer.json", "") + + cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, cp) + assert.ErrorIs(t, err, ErrMissingComposerFile) + }) + + t.Run("InvalidComposerFile", func(t *testing.T) { + data := createArchive("composer.json", "") + + cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, cp) + assert.Error(t, err) + }) + + t.Run("Valid", func(t *testing.T) { + data := createArchive("composer.json", composerContent) + + cp, err := ParsePackage(bytes.NewReader(data), int64(len(data))) + assert.NoError(t, err) + assert.NotNil(t, cp) + }) +} + +func TestParseComposerFile(t *testing.T) { + t.Run("InvalidPackageName", func(t *testing.T) { + cp, err := ParseComposerFile(strings.NewReader(`{}`)) + assert.Nil(t, cp) + assert.ErrorIs(t, err, ErrInvalidName) + }) + + t.Run("InvalidPackageVersion", func(t *testing.T) { + cp, err := ParseComposerFile(strings.NewReader(`{"name": "gitea/composer-package", "version": "1.a.3"}`)) + assert.Nil(t, cp) + assert.ErrorIs(t, err, ErrInvalidVersion) + }) + + t.Run("Valid", func(t *testing.T) { + cp, err := ParseComposerFile(strings.NewReader(composerContent)) + assert.NoError(t, err) + assert.NotNil(t, cp) + + assert.Equal(t, name, cp.Name) + assert.Empty(t, cp.Version) + assert.Equal(t, description, cp.Metadata.Description) + assert.Len(t, cp.Metadata.Authors, 1) + assert.Equal(t, author, cp.Metadata.Authors[0].Name) + assert.Equal(t, email, cp.Metadata.Authors[0].Email) + assert.Equal(t, homepage, cp.Metadata.Homepage) + assert.Equal(t, packageType, cp.Type) + assert.Len(t, cp.Metadata.License, 1) + assert.Equal(t, license, cp.Metadata.License[0]) + }) +} diff --git a/modules/packages/conan/conanfile_parser.go b/modules/packages/conan/conanfile_parser.go new file mode 100644 index 0000000000..960e813533 --- /dev/null +++ b/modules/packages/conan/conanfile_parser.go @@ -0,0 +1,68 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "io" + "regexp" + "strings" +) + +var ( + patternAuthor = compilePattern("author") + patternHomepage = compilePattern("homepage") + patternURL = compilePattern("url") + patternLicense = compilePattern("license") + patternDescription = compilePattern("description") + patternTopics = regexp.MustCompile(`(?im)^\s*topics\s*=\s*\((.+)\)`) + patternTopicList = regexp.MustCompile(`\s*['"](.+?)['"]\s*,?`) +) + +func compilePattern(name string) *regexp.Regexp { + return regexp.MustCompile(`(?im)^\s*` + name + `\s*=\s*['"\(](.+)['"\)]`) +} + +func ParseConanfile(r io.Reader) (*Metadata, error) { + buf, err := io.ReadAll(io.LimitReader(r, 1<<20)) + if err != nil { + return nil, err + } + + metadata := &Metadata{} + + m := patternAuthor.FindSubmatch(buf) + if len(m) > 1 && len(m[1]) > 0 { + metadata.Author = string(m[1]) + } + m = patternHomepage.FindSubmatch(buf) + if len(m) > 1 && len(m[1]) > 0 { + metadata.ProjectURL = string(m[1]) + } + m = patternURL.FindSubmatch(buf) + if len(m) > 1 && len(m[1]) > 0 { + metadata.RepositoryURL = string(m[1]) + } + m = patternLicense.FindSubmatch(buf) + if len(m) > 1 && len(m[1]) > 0 { + metadata.License = strings.ReplaceAll(strings.ReplaceAll(string(m[1]), "'", ""), "\"", "") + } + m = patternDescription.FindSubmatch(buf) + if len(m) > 1 && len(m[1]) > 0 { + metadata.Description = string(m[1]) + } + m = patternTopics.FindSubmatch(buf) + if len(m) > 1 && len(m[1]) > 0 { + m2 := patternTopicList.FindAllSubmatch(m[1], -1) + if len(m2) > 0 { + metadata.Keywords = make([]string, 0, len(m2)) + for _, g := range m2 { + if len(g) > 1 { + metadata.Keywords = append(metadata.Keywords, string(g[1])) + } + } + } + } + return metadata, nil +} diff --git a/modules/packages/conan/conanfile_parser_test.go b/modules/packages/conan/conanfile_parser_test.go new file mode 100644 index 0000000000..0ac9c87b14 --- /dev/null +++ b/modules/packages/conan/conanfile_parser_test.go @@ -0,0 +1,51 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + name = "ConanPackage" + version = "1.2" + license = "MIT" + author = "Gitea " + homepage = "https://gitea.io/" + url = "https://gitea.com/" + description = "Description of ConanPackage" + topic1 = "gitea" + topic2 = "conan" + contentConanfile = `from conans import ConanFile, CMake, tools + +class ConanPackageConan(ConanFile): + name = "` + name + `" + version = "` + version + `" + license = "` + license + `" + author = "` + author + `" + homepage = "` + homepage + `" + url = "` + url + `" + description = "` + description + `" + topics = ("` + topic1 + `", "` + topic2 + `") + settings = "os", "compiler", "build_type", "arch" + options = {"shared": [True, False], "fPIC": [True, False]} + default_options = {"shared": False, "fPIC": True} + generators = "cmake" +` +) + +func TestParseConanfile(t *testing.T) { + metadata, err := ParseConanfile(strings.NewReader(contentConanfile)) + assert.Nil(t, err) + assert.Equal(t, license, metadata.License) + assert.Equal(t, author, metadata.Author) + assert.Equal(t, homepage, metadata.ProjectURL) + assert.Equal(t, url, metadata.RepositoryURL) + assert.Equal(t, description, metadata.Description) + assert.Equal(t, []string{topic1, topic2}, metadata.Keywords) +} diff --git a/modules/packages/conan/conaninfo_parser.go b/modules/packages/conan/conaninfo_parser.go new file mode 100644 index 0000000000..bb228e0207 --- /dev/null +++ b/modules/packages/conan/conaninfo_parser.go @@ -0,0 +1,123 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "bufio" + "errors" + "io" + "strings" +) + +// Conaninfo represents infos of a Conan package +type Conaninfo struct { + Settings map[string]string `json:"settings"` + FullSettings map[string]string `json:"full_settings"` + Requires []string `json:"requires"` + FullRequires []string `json:"full_requires"` + Options map[string]string `json:"options"` + FullOptions map[string]string `json:"full_options"` + RecipeHash string `json:"recipe_hash"` + Environment map[string][]string `json:"environment"` +} + +func ParseConaninfo(r io.Reader) (*Conaninfo, error) { + sections, err := readSections(io.LimitReader(r, 1<<20)) + if err != nil { + return nil, err + } + + info := &Conaninfo{} + for section, lines := range sections { + if len(lines) == 0 { + continue + } + switch section { + case "settings": + info.Settings = toMap(lines) + case "full_settings": + info.FullSettings = toMap(lines) + case "options": + info.Options = toMap(lines) + case "full_options": + info.FullOptions = toMap(lines) + case "requires": + info.Requires = lines + case "full_requires": + info.FullRequires = lines + case "recipe_hash": + info.RecipeHash = lines[0] + case "env": + info.Environment = toMapArray(lines) + } + } + return info, nil +} + +func readSections(r io.Reader) (map[string][]string, error) { + sections := make(map[string][]string) + + section := "" + lines := make([]string, 0, 5) + + scanner := bufio.NewScanner(r) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + if section != "" { + sections[section] = lines + } + section = line[1 : len(line)-1] + lines = make([]string, 0, 5) + continue + } + if section != "" { + if line != "" { + lines = append(lines, line) + } + continue + } + if line != "" { + return nil, errors.New("Invalid conaninfo.txt") + } + } + if err := scanner.Err(); err != nil { + return nil, err + } + if section != "" { + sections[section] = lines + } + return sections, nil +} + +func toMap(lines []string) map[string]string { + result := make(map[string]string) + for _, line := range lines { + parts := strings.SplitN(line, "=", 2) + if len(parts) != 2 || len(parts[0]) == 0 || len(parts[1]) == 0 { + continue + } + result[parts[0]] = parts[1] + } + return result +} + +func toMapArray(lines []string) map[string][]string { + result := make(map[string][]string) + for _, line := range lines { + parts := strings.SplitN(line, "=", 2) + if len(parts) != 2 || len(parts[0]) == 0 || len(parts[1]) == 0 { + continue + } + var items []string + if strings.HasPrefix(parts[1], "[") && strings.HasSuffix(parts[1], "]") { + items = strings.Split(parts[1], ",") + } else { + items = []string{parts[1]} + } + result[parts[0]] = items + } + return result +} diff --git a/modules/packages/conan/conaninfo_parser_test.go b/modules/packages/conan/conaninfo_parser_test.go new file mode 100644 index 0000000000..3e28191b06 --- /dev/null +++ b/modules/packages/conan/conaninfo_parser_test.go @@ -0,0 +1,85 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + settingsKey = "arch" + settingsValue = "x84_64" + optionsKey = "shared" + optionsValue = "False" + requires = "fmt/7.1.3" + hash = "74714915a51073acb548ca1ce29afbac" + envKey = "CC" + envValue = "gcc-10" + + contentConaninfo = `[settings] + ` + settingsKey + `=` + settingsValue + ` + +[requires] + ` + requires + ` + +[options] + ` + optionsKey + `=` + optionsValue + ` + +[full_settings] + ` + settingsKey + `=` + settingsValue + ` + +[full_requires] + ` + requires + ` + +[full_options] + ` + optionsKey + `=` + optionsValue + ` + +[recipe_hash] + ` + hash + ` + +[env] +` + envKey + `=` + envValue + ` + +` +) + +func TestParseConaninfo(t *testing.T) { + info, err := ParseConaninfo(strings.NewReader(contentConaninfo)) + assert.NotNil(t, info) + assert.Nil(t, err) + assert.Equal( + t, + map[string]string{ + settingsKey: settingsValue, + }, + info.Settings, + ) + assert.Equal(t, info.Settings, info.FullSettings) + assert.Equal( + t, + map[string]string{ + optionsKey: optionsValue, + }, + info.Options, + ) + assert.Equal(t, info.Options, info.FullOptions) + assert.Equal( + t, + []string{requires}, + info.Requires, + ) + assert.Equal(t, info.Requires, info.FullRequires) + assert.Equal(t, hash, info.RecipeHash) + assert.Equal( + t, + map[string][]string{ + envKey: {envValue}, + }, + info.Environment, + ) +} diff --git a/modules/packages/conan/metadata.go b/modules/packages/conan/metadata.go new file mode 100644 index 0000000000..a7d6a9df0b --- /dev/null +++ b/modules/packages/conan/metadata.go @@ -0,0 +1,24 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +const ( + PropertyRecipeUser = "conan.recipe.user" + PropertyRecipeChannel = "conan.recipe.channel" + PropertyRecipeRevision = "conan.recipe.revision" + PropertyPackageReference = "conan.package.reference" + PropertyPackageRevision = "conan.package.revision" + PropertyPackageInfo = "conan.package.info" +) + +// Metadata represents the metadata of a Conan package +type Metadata struct { + Author string `json:"author,omitempty"` + License string `json:"license,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + RepositoryURL string `json:"repository_url,omitempty"` + Description string `json:"description,omitempty"` + Keywords []string `json:"keywords,omitempty"` +} diff --git a/modules/packages/conan/reference.go b/modules/packages/conan/reference.go new file mode 100644 index 0000000000..c43446e6e5 --- /dev/null +++ b/modules/packages/conan/reference.go @@ -0,0 +1,155 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "errors" + "fmt" + "regexp" + + "code.gitea.io/gitea/modules/log" + + goversion "github.com/hashicorp/go-version" +) + +const ( + // taken from https://github.com/conan-io/conan/blob/develop/conans/model/ref.py + minChars = 2 + maxChars = 51 + + // DefaultRevision if no revision is specified + DefaultRevision = "0" +) + +var ( + namePattern = regexp.MustCompile(fmt.Sprintf(`^[a-zA-Z0-9_][a-zA-Z0-9_\+\.-]{%d,%d}$`, minChars-1, maxChars-1)) + revisionPattern = regexp.MustCompile(fmt.Sprintf(`^[a-zA-Z0-9]{1,%d}$`, maxChars)) + + ErrValidation = errors.New("Could not validate one or more reference fields") +) + +// RecipeReference represents a recipe /@/# +type RecipeReference struct { + Name string + Version string + User string + Channel string + Revision string +} + +func NewRecipeReference(name, version, user, channel, revision string) (*RecipeReference, error) { + log.Trace("Conan Recipe: %s/%s(@%s/%s(#%s))", name, version, user, channel, revision) + + if user == "_" { + user = "" + } + if channel == "_" { + channel = "" + } + + if (user != "" && channel == "") || (user == "" && channel != "") { + return nil, ErrValidation + } + + if !namePattern.MatchString(name) { + return nil, ErrValidation + } + if _, err := goversion.NewSemver(version); err != nil { + return nil, ErrValidation + } + if user != "" && !namePattern.MatchString(user) { + return nil, ErrValidation + } + if channel != "" && !namePattern.MatchString(channel) { + return nil, ErrValidation + } + if revision != "" && !revisionPattern.MatchString(revision) { + return nil, ErrValidation + } + + return &RecipeReference{name, version, user, channel, revision}, nil +} + +func (r *RecipeReference) RevisionOrDefault() string { + if r.Revision == "" { + return DefaultRevision + } + return r.Revision +} + +func (r *RecipeReference) String() string { + rev := "" + if r.Revision != "" { + rev = "#" + r.Revision + } + if r.User == "" || r.Channel == "" { + return fmt.Sprintf("%s/%s%s", r.Name, r.Version, rev) + } + return fmt.Sprintf("%s/%s@%s/%s%s", r.Name, r.Version, r.User, r.Channel, rev) +} + +func (r *RecipeReference) LinkName() string { + user := r.User + if user == "" { + user = "_" + } + channel := r.Channel + if channel == "" { + channel = "_" + } + return fmt.Sprintf("%s/%s/%s/%s/%s", r.Name, r.Version, user, channel, r.RevisionOrDefault()) +} + +func (r *RecipeReference) WithRevision(revision string) *RecipeReference { + return &RecipeReference{r.Name, r.Version, r.User, r.Channel, revision} +} + +// AsKey builds the additional key for the package file +func (r *RecipeReference) AsKey() string { + return fmt.Sprintf("%s|%s|%s", r.User, r.Channel, r.RevisionOrDefault()) +} + +// PackageReference represents a package of a recipe /@/# # +type PackageReference struct { + Recipe *RecipeReference + Reference string + Revision string +} + +func NewPackageReference(recipe *RecipeReference, reference, revision string) (*PackageReference, error) { + log.Trace("Conan Package: %v %s(#%s)", recipe, reference, revision) + + if recipe == nil { + return nil, ErrValidation + } + if reference == "" || !revisionPattern.MatchString(reference) { + return nil, ErrValidation + } + if revision != "" && !revisionPattern.MatchString(revision) { + return nil, ErrValidation + } + + return &PackageReference{recipe, reference, revision}, nil +} + +func (r *PackageReference) RevisionOrDefault() string { + if r.Revision == "" { + return DefaultRevision + } + return r.Revision +} + +func (r *PackageReference) LinkName() string { + return fmt.Sprintf("%s/%s", r.Reference, r.RevisionOrDefault()) +} + +func (r *PackageReference) WithRevision(revision string) *PackageReference { + return &PackageReference{r.Recipe, r.Reference, revision} +} + +// AsKey builds the additional key for the package file +func (r *PackageReference) AsKey() string { + return fmt.Sprintf("%s|%s|%s|%s|%s", r.Recipe.User, r.Recipe.Channel, r.Recipe.RevisionOrDefault(), r.Reference, r.RevisionOrDefault()) +} diff --git a/modules/packages/conan/reference_test.go b/modules/packages/conan/reference_test.go new file mode 100644 index 0000000000..29ba3a543b --- /dev/null +++ b/modules/packages/conan/reference_test.go @@ -0,0 +1,147 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewRecipeReference(t *testing.T) { + cases := []struct { + Name string + Version string + User string + Channel string + Revision string + IsValid bool + }{ + {"", "", "", "", "", false}, + {"name", "", "", "", "", false}, + {"", "1.0", "", "", "", false}, + {"", "", "user", "", "", false}, + {"", "", "", "channel", "", false}, + {"", "", "", "", "0", false}, + {"name", "1.0", "", "", "", true}, + {"name", "1.0", "user", "", "", false}, + {"name", "1.0", "", "channel", "", false}, + {"name", "1.0", "user", "channel", "", true}, + {"name", "1.0", "_", "", "", true}, + {"name", "1.0", "", "_", "", true}, + {"name", "1.0", "_", "_", "", true}, + {"name", "1.0", "_", "_", "0", true}, + {"name", "1.0", "", "", "0", true}, + {"name", "1.0", "", "", "000000000000000000000000000000000000000000000000000000000000", false}, + } + + for i, c := range cases { + rref, err := NewRecipeReference(c.Name, c.Version, c.User, c.Channel, c.Revision) + if c.IsValid { + assert.NoError(t, err, "case %d, should be invalid", i) + assert.NotNil(t, rref, "case %d, should not be nil", i) + } else { + assert.Error(t, err, "case %d, should be valid", i) + } + } +} + +func TestRecipeReferenceRevisionOrDefault(t *testing.T) { + rref, err := NewRecipeReference("name", "1.0", "", "", "") + assert.NoError(t, err) + assert.Equal(t, DefaultRevision, rref.RevisionOrDefault()) + + rref, err = NewRecipeReference("name", "1.0", "", "", DefaultRevision) + assert.NoError(t, err) + assert.Equal(t, DefaultRevision, rref.RevisionOrDefault()) + + rref, err = NewRecipeReference("name", "1.0", "", "", "Az09") + assert.NoError(t, err) + assert.Equal(t, "Az09", rref.RevisionOrDefault()) +} + +func TestRecipeReferenceString(t *testing.T) { + rref, err := NewRecipeReference("name", "1.0", "", "", "") + assert.NoError(t, err) + assert.Equal(t, "name/1.0", rref.String()) + + rref, err = NewRecipeReference("name", "1.0", "user", "channel", "") + assert.NoError(t, err) + assert.Equal(t, "name/1.0@user/channel", rref.String()) + + rref, err = NewRecipeReference("name", "1.0", "user", "channel", "Az09") + assert.NoError(t, err) + assert.Equal(t, "name/1.0@user/channel#Az09", rref.String()) +} + +func TestRecipeReferenceLinkName(t *testing.T) { + rref, err := NewRecipeReference("name", "1.0", "", "", "") + assert.NoError(t, err) + assert.Equal(t, "name/1.0/_/_/0", rref.LinkName()) + + rref, err = NewRecipeReference("name", "1.0", "user", "channel", "") + assert.NoError(t, err) + assert.Equal(t, "name/1.0/user/channel/0", rref.LinkName()) + + rref, err = NewRecipeReference("name", "1.0", "user", "channel", "Az09") + assert.NoError(t, err) + assert.Equal(t, "name/1.0/user/channel/Az09", rref.LinkName()) +} + +func TestNewPackageReference(t *testing.T) { + rref, _ := NewRecipeReference("name", "1.0", "", "", "") + + cases := []struct { + Recipe *RecipeReference + Reference string + Revision string + IsValid bool + }{ + {nil, "", "", false}, + {rref, "", "", false}, + {nil, "aZ09", "", false}, + {rref, "aZ09", "", true}, + {rref, "", "Az09", false}, + {rref, "aZ09", "Az09", true}, + } + + for i, c := range cases { + pref, err := NewPackageReference(c.Recipe, c.Reference, c.Revision) + if c.IsValid { + assert.NoError(t, err, "case %d, should be invalid", i) + assert.NotNil(t, pref, "case %d, should not be nil", i) + } else { + assert.Error(t, err, "case %d, should be valid", i) + } + } +} + +func TestPackageReferenceRevisionOrDefault(t *testing.T) { + rref, _ := NewRecipeReference("name", "1.0", "", "", "") + + pref, err := NewPackageReference(rref, "ref", "") + assert.NoError(t, err) + assert.Equal(t, DefaultRevision, pref.RevisionOrDefault()) + + pref, err = NewPackageReference(rref, "ref", DefaultRevision) + assert.NoError(t, err) + assert.Equal(t, DefaultRevision, pref.RevisionOrDefault()) + + pref, err = NewPackageReference(rref, "ref", "Az09") + assert.NoError(t, err) + assert.Equal(t, "Az09", pref.RevisionOrDefault()) +} + +func TestPackageReferenceLinkName(t *testing.T) { + rref, _ := NewRecipeReference("name", "1.0", "", "", "") + + pref, err := NewPackageReference(rref, "ref", "") + assert.NoError(t, err) + assert.Equal(t, "ref/0", pref.LinkName()) + + pref, err = NewPackageReference(rref, "ref", "Az09") + assert.NoError(t, err) + assert.Equal(t, "ref/Az09", pref.LinkName()) +} diff --git a/modules/packages/container/helm/helm.go b/modules/packages/container/helm/helm.go new file mode 100644 index 0000000000..98d3824a85 --- /dev/null +++ b/modules/packages/container/helm/helm.go @@ -0,0 +1,56 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package helm + +// https://github.com/helm/helm/blob/main/pkg/chart/ + +const ConfigMediaType = "application/vnd.cncf.helm.config.v1+json" + +// Maintainer describes a Chart maintainer. +type Maintainer struct { + // Name is a user name or organization name + Name string `json:"name,omitempty"` + // Email is an optional email address to contact the named maintainer + Email string `json:"email,omitempty"` + // URL is an optional URL to an address for the named maintainer + URL string `json:"url,omitempty"` +} + +// Metadata for a Chart file. This models the structure of a Chart.yaml file. +type Metadata struct { + // The name of the chart. Required. + Name string `json:"name,omitempty"` + // The URL to a relevant project page, git repo, or contact person + Home string `json:"home,omitempty"` + // Source is the URL to the source code of this chart + Sources []string `json:"sources,omitempty"` + // A SemVer 2 conformant version string of the chart. Required. + Version string `json:"version,omitempty"` + // A one-sentence description of the chart + Description string `json:"description,omitempty"` + // A list of string keywords + Keywords []string `json:"keywords,omitempty"` + // A list of name and URL/email address combinations for the maintainer(s) + Maintainers []*Maintainer `json:"maintainers,omitempty"` + // The URL to an icon file. + Icon string `json:"icon,omitempty"` + // The API Version of this chart. Required. + APIVersion string `json:"apiVersion,omitempty"` + // The condition to check to enable chart + Condition string `json:"condition,omitempty"` + // The tags to check to enable chart + Tags string `json:"tags,omitempty"` + // The version of the application enclosed inside of this chart. + AppVersion string `json:"appVersion,omitempty"` + // Whether or not this chart is deprecated + Deprecated bool `json:"deprecated,omitempty"` + // Annotations are additional mappings uninterpreted by Helm, + // made available for inspection by other applications. + Annotations map[string]string `json:"annotations,omitempty"` + // KubeVersion is a SemVer constraint specifying the version of Kubernetes required. + KubeVersion string `json:"kubeVersion,omitempty"` + // Specifies the chart type: application or library + Type string `json:"type,omitempty"` +} diff --git a/modules/packages/container/metadata.go b/modules/packages/container/metadata.go new file mode 100644 index 0000000000..087d38e5bd --- /dev/null +++ b/modules/packages/container/metadata.go @@ -0,0 +1,157 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "fmt" + "io" + "strings" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/packages/container/helm" + "code.gitea.io/gitea/modules/packages/container/oci" + "code.gitea.io/gitea/modules/validation" +) + +const ( + PropertyDigest = "container.digest" + PropertyMediaType = "container.mediatype" + PropertyManifestTagged = "container.manifest.tagged" + PropertyManifestReference = "container.manifest.reference" + + DefaultPlatform = "linux/amd64" + + labelLicenses = "org.opencontainers.image.licenses" + labelURL = "org.opencontainers.image.url" + labelSource = "org.opencontainers.image.source" + labelDocumentation = "org.opencontainers.image.documentation" + labelDescription = "org.opencontainers.image.description" + labelAuthors = "org.opencontainers.image.authors" +) + +type ImageType string + +const ( + TypeOCI ImageType = "oci" + TypeHelm ImageType = "helm" +) + +// Name gets the name of the image type +func (it ImageType) Name() string { + switch it { + case TypeHelm: + return "Helm Chart" + default: + return "OCI / Docker" + } +} + +// Metadata represents the metadata of a Container package +type Metadata struct { + Type ImageType `json:"type"` + IsTagged bool `json:"is_tagged"` + Platform string `json:"platform,omitempty"` + Description string `json:"description,omitempty"` + Authors []string `json:"authors,omitempty"` + Licenses string `json:"license,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + RepositoryURL string `json:"repository_url,omitempty"` + DocumentationURL string `json:"documentation_url,omitempty"` + Labels map[string]string `json:"labels,omitempty"` + ImageLayers []string `json:"layer_creation,omitempty"` + MultiArch map[string]string `json:"multiarch,omitempty"` +} + +// ParseImageConfig parses the metadata of an image config +func ParseImageConfig(mediaType oci.MediaType, r io.Reader) (*Metadata, error) { + if strings.EqualFold(string(mediaType), helm.ConfigMediaType) { + return parseHelmConfig(r) + } + + // fallback to OCI Image Config + return parseOCIImageConfig(r) +} + +func parseOCIImageConfig(r io.Reader) (*Metadata, error) { + var image oci.Image + if err := json.NewDecoder(r).Decode(&image); err != nil { + return nil, err + } + + platform := DefaultPlatform + if image.OS != "" && image.Architecture != "" { + platform = fmt.Sprintf("%s/%s", image.OS, image.Architecture) + if image.Variant != "" { + platform = fmt.Sprintf("%s/%s", platform, image.Variant) + } + } + + imageLayers := make([]string, 0, len(image.History)) + for _, history := range image.History { + cmd := history.CreatedBy + if i := strings.Index(cmd, "#(nop) "); i != -1 { + cmd = strings.TrimSpace(cmd[i+7:]) + } + imageLayers = append(imageLayers, cmd) + } + + metadata := &Metadata{ + Type: TypeOCI, + Platform: platform, + Licenses: image.Config.Labels[labelLicenses], + ProjectURL: image.Config.Labels[labelURL], + RepositoryURL: image.Config.Labels[labelSource], + DocumentationURL: image.Config.Labels[labelDocumentation], + Description: image.Config.Labels[labelDescription], + Labels: image.Config.Labels, + ImageLayers: imageLayers, + } + + if authors, ok := image.Config.Labels[labelAuthors]; ok { + metadata.Authors = []string{authors} + } + + if !validation.IsValidURL(metadata.ProjectURL) { + metadata.ProjectURL = "" + } + if !validation.IsValidURL(metadata.RepositoryURL) { + metadata.RepositoryURL = "" + } + if !validation.IsValidURL(metadata.DocumentationURL) { + metadata.DocumentationURL = "" + } + + return metadata, nil +} + +func parseHelmConfig(r io.Reader) (*Metadata, error) { + var config helm.Metadata + if err := json.NewDecoder(r).Decode(&config); err != nil { + return nil, err + } + + metadata := &Metadata{ + Type: TypeHelm, + Description: config.Description, + ProjectURL: config.Home, + } + + if len(config.Maintainers) > 0 { + authors := make([]string, 0, len(config.Maintainers)) + for _, maintainer := range config.Maintainers { + authors = append(authors, maintainer.Name) + } + metadata.Authors = authors + } + + if len(config.Sources) > 0 && validation.IsValidURL(config.Sources[0]) { + metadata.RepositoryURL = config.Sources[0] + } + if !validation.IsValidURL(metadata.ProjectURL) { + metadata.ProjectURL = "" + } + + return metadata, nil +} diff --git a/modules/packages/container/metadata_test.go b/modules/packages/container/metadata_test.go new file mode 100644 index 0000000000..9400cf6954 --- /dev/null +++ b/modules/packages/container/metadata_test.go @@ -0,0 +1,62 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "strings" + "testing" + + "code.gitea.io/gitea/modules/packages/container/helm" + "code.gitea.io/gitea/modules/packages/container/oci" + + "github.com/stretchr/testify/assert" +) + +func TestParseImageConfig(t *testing.T) { + description := "Image Description" + author := "Gitea" + license := "MIT" + projectURL := "https://gitea.io" + repositoryURL := "https://gitea.com/gitea" + documentationURL := "https://docs.gitea.io" + + configOCI := `{"config": {"labels": {"` + labelAuthors + `": "` + author + `", "` + labelLicenses + `": "` + license + `", "` + labelURL + `": "` + projectURL + `", "` + labelSource + `": "` + repositoryURL + `", "` + labelDocumentation + `": "` + documentationURL + `", "` + labelDescription + `": "` + description + `"}}, "history": [{"created_by": "do it 1"}, {"created_by": "dummy #(nop) do it 2"}]}` + + metadata, err := ParseImageConfig(oci.MediaType(oci.MediaTypeImageManifest), strings.NewReader(configOCI)) + assert.NoError(t, err) + + assert.Equal(t, TypeOCI, metadata.Type) + assert.Equal(t, description, metadata.Description) + assert.ElementsMatch(t, []string{author}, metadata.Authors) + assert.Equal(t, license, metadata.Licenses) + assert.Equal(t, projectURL, metadata.ProjectURL) + assert.Equal(t, repositoryURL, metadata.RepositoryURL) + assert.Equal(t, documentationURL, metadata.DocumentationURL) + assert.Equal(t, []string{"do it 1", "do it 2"}, metadata.ImageLayers) + assert.Equal( + t, + map[string]string{ + labelAuthors: author, + labelLicenses: license, + labelURL: projectURL, + labelSource: repositoryURL, + labelDocumentation: documentationURL, + labelDescription: description, + }, + metadata.Labels, + ) + assert.Empty(t, metadata.MultiArch) + + configHelm := `{"description":"` + description + `", "home": "` + projectURL + `", "sources": ["` + repositoryURL + `"], "maintainers":[{"name":"` + author + `"}]}` + + metadata, err = ParseImageConfig(oci.MediaType(helm.ConfigMediaType), strings.NewReader(configHelm)) + assert.NoError(t, err) + + assert.Equal(t, TypeHelm, metadata.Type) + assert.Equal(t, description, metadata.Description) + assert.ElementsMatch(t, []string{author}, metadata.Authors) + assert.Equal(t, projectURL, metadata.ProjectURL) + assert.Equal(t, repositoryURL, metadata.RepositoryURL) +} diff --git a/modules/packages/container/oci/digest.go b/modules/packages/container/oci/digest.go new file mode 100644 index 0000000000..5234814cfe --- /dev/null +++ b/modules/packages/container/oci/digest.go @@ -0,0 +1,27 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package oci + +import ( + "regexp" + "strings" +) + +var digestPattern = regexp.MustCompile(`\Asha256:[a-f0-9]{64}\z`) + +type Digest string + +// Validate checks if the digest has a valid SHA256 signature +func (d Digest) Validate() bool { + return digestPattern.MatchString(string(d)) +} + +func (d Digest) Hash() string { + p := strings.SplitN(string(d), ":", 2) + if len(p) != 2 { + return "" + } + return p[1] +} diff --git a/modules/packages/container/oci/mediatype.go b/modules/packages/container/oci/mediatype.go new file mode 100644 index 0000000000..2636fbe288 --- /dev/null +++ b/modules/packages/container/oci/mediatype.go @@ -0,0 +1,36 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package oci + +import ( + "strings" +) + +const ( + MediaTypeImageManifest = "application/vnd.oci.image.manifest.v1+json" + MediaTypeImageIndex = "application/vnd.oci.image.index.v1+json" + MediaTypeDockerManifest = "application/vnd.docker.distribution.manifest.v2+json" + MediaTypeDockerManifestList = "application/vnd.docker.distribution.manifest.list.v2+json" +) + +type MediaType string + +// IsValid tests if the media type is in the OCI or Docker namespace +func (m MediaType) IsValid() bool { + s := string(m) + return strings.HasPrefix(s, "application/vnd.docker.") || strings.HasPrefix(s, "application/vnd.oci.") +} + +// IsImageManifest tests if the media type is an image manifest +func (m MediaType) IsImageManifest() bool { + s := string(m) + return strings.EqualFold(s, MediaTypeDockerManifest) || strings.EqualFold(s, MediaTypeImageManifest) +} + +// IsImageIndex tests if the media type is an image index +func (m MediaType) IsImageIndex() bool { + s := string(m) + return strings.EqualFold(s, MediaTypeDockerManifestList) || strings.EqualFold(s, MediaTypeImageIndex) +} diff --git a/modules/packages/container/oci/oci.go b/modules/packages/container/oci/oci.go new file mode 100644 index 0000000000..01cca8fe69 --- /dev/null +++ b/modules/packages/container/oci/oci.go @@ -0,0 +1,191 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package oci + +import ( + "time" +) + +// https://github.com/opencontainers/image-spec/tree/main/specs-go/v1 + +// ImageConfig defines the execution parameters which should be used as a base when running a container using an image. +type ImageConfig struct { + // User defines the username or UID which the process in the container should run as. + User string `json:"User,omitempty"` + + // ExposedPorts a set of ports to expose from a container running this image. + ExposedPorts map[string]struct{} `json:"ExposedPorts,omitempty"` + + // Env is a list of environment variables to be used in a container. + Env []string `json:"Env,omitempty"` + + // Entrypoint defines a list of arguments to use as the command to execute when the container starts. + Entrypoint []string `json:"Entrypoint,omitempty"` + + // Cmd defines the default arguments to the entrypoint of the container. + Cmd []string `json:"Cmd,omitempty"` + + // Volumes is a set of directories describing where the process is likely write data specific to a container instance. + Volumes map[string]struct{} `json:"Volumes,omitempty"` + + // WorkingDir sets the current working directory of the entrypoint process in the container. + WorkingDir string `json:"WorkingDir,omitempty"` + + // Labels contains arbitrary metadata for the container. + Labels map[string]string `json:"Labels,omitempty"` + + // StopSignal contains the system call signal that will be sent to the container to exit. + StopSignal string `json:"StopSignal,omitempty"` +} + +// RootFS describes a layer content addresses +type RootFS struct { + // Type is the type of the rootfs. + Type string `json:"type"` + + // DiffIDs is an array of layer content hashes, in order from bottom-most to top-most. + DiffIDs []string `json:"diff_ids"` +} + +// History describes the history of a layer. +type History struct { + // Created is the combined date and time at which the layer was created, formatted as defined by RFC 3339, section 5.6. + Created *time.Time `json:"created,omitempty"` + + // CreatedBy is the command which created the layer. + CreatedBy string `json:"created_by,omitempty"` + + // Author is the author of the build point. + Author string `json:"author,omitempty"` + + // Comment is a custom message set when creating the layer. + Comment string `json:"comment,omitempty"` + + // EmptyLayer is used to mark if the history item created a filesystem diff. + EmptyLayer bool `json:"empty_layer,omitempty"` +} + +// Image is the JSON structure which describes some basic information about the image. +// This provides the `application/vnd.oci.image.config.v1+json` mediatype when marshalled to JSON. +type Image struct { + // Created is the combined date and time at which the image was created, formatted as defined by RFC 3339, section 5.6. + Created *time.Time `json:"created,omitempty"` + + // Author defines the name and/or email address of the person or entity which created and is responsible for maintaining the image. + Author string `json:"author,omitempty"` + + // Architecture is the CPU architecture which the binaries in this image are built to run on. + Architecture string `json:"architecture"` + + // Variant is the variant of the specified CPU architecture which image binaries are intended to run on. + Variant string `json:"variant,omitempty"` + + // OS is the name of the operating system which the image is built to run on. + OS string `json:"os"` + + // OSVersion is an optional field specifying the operating system + // version, for example on Windows `10.0.14393.1066`. + OSVersion string `json:"os.version,omitempty"` + + // OSFeatures is an optional field specifying an array of strings, + // each listing a required OS feature (for example on Windows `win32k`). + OSFeatures []string `json:"os.features,omitempty"` + + // Config defines the execution parameters which should be used as a base when running a container using the image. + Config ImageConfig `json:"config,omitempty"` + + // RootFS references the layer content addresses used by the image. + RootFS RootFS `json:"rootfs"` + + // History describes the history of each layer. + History []History `json:"history,omitempty"` +} + +// Descriptor describes the disposition of targeted content. +// This structure provides `application/vnd.oci.descriptor.v1+json` mediatype +// when marshalled to JSON. +type Descriptor struct { + // MediaType is the media type of the object this schema refers to. + MediaType MediaType `json:"mediaType,omitempty"` + + // Digest is the digest of the targeted content. + Digest Digest `json:"digest"` + + // Size specifies the size in bytes of the blob. + Size int64 `json:"size"` + + // URLs specifies a list of URLs from which this object MAY be downloaded + URLs []string `json:"urls,omitempty"` + + // Annotations contains arbitrary metadata relating to the targeted content. + Annotations map[string]string `json:"annotations,omitempty"` + + // Data is an embedding of the targeted content. This is encoded as a base64 + // string when marshalled to JSON (automatically, by encoding/json). If + // present, Data can be used directly to avoid fetching the targeted content. + Data []byte `json:"data,omitempty"` + + // Platform describes the platform which the image in the manifest runs on. + // + // This should only be used when referring to a manifest. + Platform *Platform `json:"platform,omitempty"` +} + +// Platform describes the platform which the image in the manifest runs on. +type Platform struct { + // Architecture field specifies the CPU architecture, for example + // `amd64` or `ppc64`. + Architecture string `json:"architecture"` + + // OS specifies the operating system, for example `linux` or `windows`. + OS string `json:"os"` + + // OSVersion is an optional field specifying the operating system + // version, for example on Windows `10.0.14393.1066`. + OSVersion string `json:"os.version,omitempty"` + + // OSFeatures is an optional field specifying an array of strings, + // each listing a required OS feature (for example on Windows `win32k`). + OSFeatures []string `json:"os.features,omitempty"` + + // Variant is an optional field specifying a variant of the CPU, for + // example `v7` to specify ARMv7 when architecture is `arm`. + Variant string `json:"variant,omitempty"` +} + +type SchemaMediaBase struct { + // SchemaVersion is the image manifest schema that this image follows + SchemaVersion int `json:"schemaVersion"` + + // MediaType specifies the type of this document data structure e.g. `application/vnd.oci.image.manifest.v1+json` + MediaType MediaType `json:"mediaType,omitempty"` +} + +// Manifest provides `application/vnd.oci.image.manifest.v1+json` mediatype structure when marshalled to JSON. +type Manifest struct { + SchemaMediaBase + + // Config references a configuration object for a container, by digest. + // The referenced configuration object is a JSON blob that the runtime uses to set up the container. + Config Descriptor `json:"config"` + + // Layers is an indexed list of layers referenced by the manifest. + Layers []Descriptor `json:"layers"` + + // Annotations contains arbitrary metadata for the image manifest. + Annotations map[string]string `json:"annotations,omitempty"` +} + +// Index references manifests for various platforms. +// This structure provides `application/vnd.oci.image.index.v1+json` mediatype when marshalled to JSON. +type Index struct { + SchemaMediaBase + + // Manifests references platform specific manifests. + Manifests []Descriptor `json:"manifests"` + + // Annotations contains arbitrary metadata for the image index. + Annotations map[string]string `json:"annotations,omitempty"` +} diff --git a/modules/packages/container/oci/reference.go b/modules/packages/container/oci/reference.go new file mode 100644 index 0000000000..120ff122d4 --- /dev/null +++ b/modules/packages/container/oci/reference.go @@ -0,0 +1,17 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package oci + +import ( + "regexp" +) + +var referencePattern = regexp.MustCompile(`\A[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}\z`) + +type Reference string + +func (r Reference) Validate() bool { + return referencePattern.MatchString(string(r)) +} diff --git a/modules/packages/content_store.go b/modules/packages/content_store.go new file mode 100644 index 0000000000..64c3eedc23 --- /dev/null +++ b/modules/packages/content_store.go @@ -0,0 +1,47 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "io" + "path" + + "code.gitea.io/gitea/modules/storage" +) + +// BlobHash256Key is the key to address a blob content +type BlobHash256Key string + +// ContentStore is a wrapper around ObjectStorage +type ContentStore struct { + store storage.ObjectStorage +} + +// NewContentStore creates the default package store +func NewContentStore() *ContentStore { + contentStore := &ContentStore{storage.Packages} + return contentStore +} + +// Get gets a package blob +func (s *ContentStore) Get(key BlobHash256Key) (storage.Object, error) { + return s.store.Open(keyToRelativePath(key)) +} + +// Save stores a package blob +func (s *ContentStore) Save(key BlobHash256Key, r io.Reader, size int64) error { + _, err := s.store.Save(keyToRelativePath(key), r, size) + return err +} + +// Delete deletes a package blob +func (s *ContentStore) Delete(key BlobHash256Key) error { + return s.store.Delete(keyToRelativePath(key)) +} + +// keyToRelativePath converts the sha256 key aabb000000... to aa/bb/aabb000000... +func keyToRelativePath(key BlobHash256Key) string { + return path.Join(string(key)[0:2], string(key)[2:4], string(key)) +} diff --git a/modules/packages/hashed_buffer.go b/modules/packages/hashed_buffer.go new file mode 100644 index 0000000000..3f8cafcfb5 --- /dev/null +++ b/modules/packages/hashed_buffer.go @@ -0,0 +1,70 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "io" + + "code.gitea.io/gitea/modules/util/filebuffer" +) + +// HashedSizeReader provide methods to read, sum hashes and a Size method +type HashedSizeReader interface { + io.Reader + HashSummer + Size() int64 +} + +// HashedBuffer is buffer which calculates multiple checksums +type HashedBuffer struct { + *filebuffer.FileBackedBuffer + + hash *MultiHasher + + combinedWriter io.Writer +} + +// NewHashedBuffer creates a hashed buffer with a specific maximum memory size +func NewHashedBuffer(maxMemorySize int) (*HashedBuffer, error) { + b, err := filebuffer.New(maxMemorySize) + if err != nil { + return nil, err + } + + hash := NewMultiHasher() + + combinedWriter := io.MultiWriter(b, hash) + + return &HashedBuffer{ + b, + hash, + combinedWriter, + }, nil +} + +// CreateHashedBufferFromReader creates a hashed buffer and copies the provided reader data into it. +func CreateHashedBufferFromReader(r io.Reader, maxMemorySize int) (*HashedBuffer, error) { + b, err := NewHashedBuffer(maxMemorySize) + if err != nil { + return nil, err + } + + _, err = io.Copy(b, r) + if err != nil { + return nil, err + } + + return b, nil +} + +// Write implements io.Writer +func (b *HashedBuffer) Write(p []byte) (int, error) { + return b.combinedWriter.Write(p) +} + +// Sums gets the MD5, SHA1, SHA256 and SHA512 checksums of the data +func (b *HashedBuffer) Sums() (hashMD5, hashSHA1, hashSHA256, hashSHA512 []byte) { + return b.hash.Sums() +} diff --git a/modules/packages/helm/metadata.go b/modules/packages/helm/metadata.go new file mode 100644 index 0000000000..9517448ca6 --- /dev/null +++ b/modules/packages/helm/metadata.go @@ -0,0 +1,131 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package helm + +import ( + "archive/tar" + "compress/gzip" + "errors" + "io" + "strings" + + "code.gitea.io/gitea/modules/validation" + + "github.com/hashicorp/go-version" + "gopkg.in/yaml.v2" +) + +var ( + // ErrMissingChartFile indicates a missing Chart.yaml file + ErrMissingChartFile = errors.New("Chart.yaml file is missing") + // ErrInvalidName indicates an invalid package name + ErrInvalidName = errors.New("package name is invalid") + // ErrInvalidVersion indicates an invalid package version + ErrInvalidVersion = errors.New("package version is invalid") + // ErrInvalidChart indicates an invalid chart + ErrInvalidChart = errors.New("chart is invalid") +) + +// Metadata for a Chart file. This models the structure of a Chart.yaml file. +type Metadata struct { + APIVersion string `json:"api_version" yaml:"apiVersion"` + Type string `json:"type,omitempty" yaml:"type,omitempty"` + Name string `json:"name" yaml:"name"` + Version string `json:"version" yaml:"version"` + AppVersion string `json:"app_version,omitempty" yaml:"appVersion,omitempty"` + Home string `json:"home,omitempty" yaml:"home,omitempty"` + Sources []string `json:"sources,omitempty" yaml:"sources,omitempty"` + Description string `json:"description,omitempty" yaml:"description,omitempty"` + Keywords []string `json:"keywords,omitempty" yaml:"keywords,omitempty"` + Maintainers []*Maintainer `json:"maintainers,omitempty" yaml:"maintainers,omitempty"` + Icon string `json:"icon,omitempty" yaml:"icon,omitempty"` + Condition string `json:"condition,omitempty" yaml:"condition,omitempty"` + Tags string `json:"tags,omitempty" yaml:"tags,omitempty"` + Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"` + Annotations map[string]string `json:"annotations,omitempty" yaml:"annotations,omitempty"` + KubeVersion string `json:"kube_version,omitempty" yaml:"kubeVersion,omitempty"` + Dependencies []*Dependency `json:"dependencies,omitempty" yaml:"dependencies,omitempty"` +} + +type Maintainer struct { + Name string `json:"name,omitempty" yaml:"name,omitempty"` + Email string `json:"email,omitempty" yaml:"email,omitempty"` + URL string `json:"url,omitempty" yaml:"url,omitempty"` +} + +type Dependency struct { + Name string `json:"name" yaml:"name"` + Version string `json:"version,omitempty" yaml:"version,omitempty"` + Repository string `json:"repository" yaml:"repository"` + Condition string `json:"condition,omitempty" yaml:"condition,omitempty"` + Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` + Enabled bool `json:"enabled,omitempty" yaml:"enabled,omitempty"` + ImportValues []interface{} `json:"import_values,omitempty" yaml:"import-values,omitempty"` + Alias string `json:"alias,omitempty" yaml:"alias,omitempty"` +} + +// ParseChartArchive parses the metadata of a Helm archive +func ParseChartArchive(r io.Reader) (*Metadata, error) { + gzr, err := gzip.NewReader(r) + if err != nil { + return nil, err + } + defer gzr.Close() + + tr := tar.NewReader(gzr) + for { + hd, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + if hd.Typeflag != tar.TypeReg { + continue + } + + if hd.FileInfo().Name() == "Chart.yaml" { + if strings.Count(hd.Name, "/") != 1 { + continue + } + + return ParseChartFile(tr) + } + } + + return nil, ErrMissingChartFile +} + +// ParseChartFile parses a Chart.yaml file to retrieve the metadata of a Helm chart +func ParseChartFile(r io.Reader) (*Metadata, error) { + var metadata *Metadata + if err := yaml.NewDecoder(r).Decode(&metadata); err != nil { + return nil, err + } + + if metadata.APIVersion == "" { + return nil, ErrInvalidChart + } + + if metadata.Type != "" && metadata.Type != "application" && metadata.Type != "library" { + return nil, ErrInvalidChart + } + + if metadata.Name == "" { + return nil, ErrInvalidName + } + + if _, err := version.NewSemver(metadata.Version); err != nil { + return nil, ErrInvalidVersion + } + + if !validation.IsValidURL(metadata.Home) { + metadata.Home = "" + } + + return metadata, nil +} diff --git a/modules/packages/maven/metadata.go b/modules/packages/maven/metadata.go new file mode 100644 index 0000000000..6ee9d69687 --- /dev/null +++ b/modules/packages/maven/metadata.go @@ -0,0 +1,89 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package maven + +import ( + "encoding/xml" + "io" + + "code.gitea.io/gitea/modules/validation" +) + +// Metadata represents the metadata of a Maven package +type Metadata struct { + GroupID string `json:"group_id,omitempty"` + ArtifactID string `json:"artifact_id,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + Licenses []string `json:"licenses,omitempty"` + Dependencies []*Dependency `json:"dependencies,omitempty"` +} + +// Dependency represents a dependency of a Maven package +type Dependency struct { + GroupID string `json:"group_id,omitempty"` + ArtifactID string `json:"artifact_id,omitempty"` + Version string `json:"version,omitempty"` +} + +type pomStruct struct { + XMLName xml.Name `xml:"project"` + GroupID string `xml:"groupId"` + ArtifactID string `xml:"artifactId"` + Version string `xml:"version"` + Name string `xml:"name"` + Description string `xml:"description"` + URL string `xml:"url"` + Licenses []struct { + Name string `xml:"name"` + URL string `xml:"url"` + Distribution string `xml:"distribution"` + } `xml:"licenses>license"` + Dependencies []struct { + GroupID string `xml:"groupId"` + ArtifactID string `xml:"artifactId"` + Version string `xml:"version"` + Scope string `xml:"scope"` + } `xml:"dependencies>dependency"` +} + +// ParsePackageMetaData parses the metadata of a pom file +func ParsePackageMetaData(r io.Reader) (*Metadata, error) { + var pom pomStruct + if err := xml.NewDecoder(r).Decode(&pom); err != nil { + return nil, err + } + + if !validation.IsValidURL(pom.URL) { + pom.URL = "" + } + + licenses := make([]string, 0, len(pom.Licenses)) + for _, l := range pom.Licenses { + if l.Name != "" { + licenses = append(licenses, l.Name) + } + } + + dependencies := make([]*Dependency, 0, len(pom.Dependencies)) + for _, d := range pom.Dependencies { + dependencies = append(dependencies, &Dependency{ + GroupID: d.GroupID, + ArtifactID: d.ArtifactID, + Version: d.Version, + }) + } + + return &Metadata{ + GroupID: pom.GroupID, + ArtifactID: pom.ArtifactID, + Name: pom.Name, + Description: pom.Description, + ProjectURL: pom.URL, + Licenses: licenses, + Dependencies: dependencies, + }, nil +} diff --git a/modules/packages/maven/metadata_test.go b/modules/packages/maven/metadata_test.go new file mode 100644 index 0000000000..a17d456560 --- /dev/null +++ b/modules/packages/maven/metadata_test.go @@ -0,0 +1,73 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package maven + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + groupID = "org.gitea" + artifactID = "my-project" + version = "1.0.1" + name = "My Gitea Project" + description = "Package Description" + projectURL = "https://gitea.io" + license = "MIT" + dependencyGroupID = "org.gitea.core" + dependencyArtifactID = "git" + dependencyVersion = "5.0.0" +) + +const pomContent = ` + + ` + groupID + ` + ` + artifactID + ` + ` + version + ` + ` + name + ` + ` + description + ` + ` + projectURL + ` + + + ` + license + ` + + + + + ` + dependencyGroupID + ` + ` + dependencyArtifactID + ` + ` + dependencyVersion + ` + + +` + +func TestParsePackageMetaData(t *testing.T) { + t.Run("InvalidFile", func(t *testing.T) { + m, err := ParsePackageMetaData(strings.NewReader("")) + assert.Nil(t, m) + assert.Error(t, err) + }) + + t.Run("Valid", func(t *testing.T) { + m, err := ParsePackageMetaData(strings.NewReader(pomContent)) + assert.NoError(t, err) + assert.NotNil(t, m) + + assert.Equal(t, groupID, m.GroupID) + assert.Equal(t, artifactID, m.ArtifactID) + assert.Equal(t, name, m.Name) + assert.Equal(t, description, m.Description) + assert.Equal(t, projectURL, m.ProjectURL) + assert.Len(t, m.Licenses, 1) + assert.Equal(t, license, m.Licenses[0]) + assert.Len(t, m.Dependencies, 1) + assert.Equal(t, dependencyGroupID, m.Dependencies[0].GroupID) + assert.Equal(t, dependencyArtifactID, m.Dependencies[0].ArtifactID) + assert.Equal(t, dependencyVersion, m.Dependencies[0].Version) + }) +} diff --git a/modules/packages/multi_hasher.go b/modules/packages/multi_hasher.go new file mode 100644 index 0000000000..0659a18d2a --- /dev/null +++ b/modules/packages/multi_hasher.go @@ -0,0 +1,123 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding" + "errors" + "hash" + "io" +) + +const ( + marshaledSizeMD5 = 92 + marshaledSizeSHA1 = 96 + marshaledSizeSHA256 = 108 + marshaledSizeSHA512 = 204 + + marshaledSize = marshaledSizeMD5 + marshaledSizeSHA1 + marshaledSizeSHA256 + marshaledSizeSHA512 +) + +// HashSummer provide a Sums method +type HashSummer interface { + Sums() (hashMD5, hashSHA1, hashSHA256, hashSHA512 []byte) +} + +// MultiHasher calculates multiple checksums +type MultiHasher struct { + md5 hash.Hash + sha1 hash.Hash + sha256 hash.Hash + sha512 hash.Hash + + combinedWriter io.Writer +} + +// NewMultiHasher creates a multi hasher +func NewMultiHasher() *MultiHasher { + md5 := md5.New() + sha1 := sha1.New() + sha256 := sha256.New() + sha512 := sha512.New() + + combinedWriter := io.MultiWriter(md5, sha1, sha256, sha512) + + return &MultiHasher{ + md5, + sha1, + sha256, + sha512, + combinedWriter, + } +} + +// MarshalBinary implements encoding.BinaryMarshaler +func (h *MultiHasher) MarshalBinary() ([]byte, error) { + md5Bytes, err := h.md5.(encoding.BinaryMarshaler).MarshalBinary() + if err != nil { + return nil, err + } + sha1Bytes, err := h.sha1.(encoding.BinaryMarshaler).MarshalBinary() + if err != nil { + return nil, err + } + sha256Bytes, err := h.sha256.(encoding.BinaryMarshaler).MarshalBinary() + if err != nil { + return nil, err + } + sha512Bytes, err := h.sha512.(encoding.BinaryMarshaler).MarshalBinary() + if err != nil { + return nil, err + } + + b := make([]byte, 0, marshaledSize) + b = append(b, md5Bytes...) + b = append(b, sha1Bytes...) + b = append(b, sha256Bytes...) + b = append(b, sha512Bytes...) + return b, nil +} + +// UnmarshalBinary implements encoding.BinaryUnmarshaler +func (h *MultiHasher) UnmarshalBinary(b []byte) error { + if len(b) != marshaledSize { + return errors.New("invalid hash state size") + } + + if err := h.md5.(encoding.BinaryUnmarshaler).UnmarshalBinary(b[:marshaledSizeMD5]); err != nil { + return err + } + + b = b[marshaledSizeMD5:] + if err := h.sha1.(encoding.BinaryUnmarshaler).UnmarshalBinary(b[:marshaledSizeSHA1]); err != nil { + return err + } + + b = b[marshaledSizeSHA1:] + if err := h.sha256.(encoding.BinaryUnmarshaler).UnmarshalBinary(b[:marshaledSizeSHA256]); err != nil { + return err + } + + b = b[marshaledSizeSHA256:] + return h.sha512.(encoding.BinaryUnmarshaler).UnmarshalBinary(b[:marshaledSizeSHA512]) +} + +// Write implements io.Writer +func (h *MultiHasher) Write(p []byte) (int, error) { + return h.combinedWriter.Write(p) +} + +// Sums gets the MD5, SHA1, SHA256 and SHA512 checksums of the data +func (h *MultiHasher) Sums() (hashMD5, hashSHA1, hashSHA256, hashSHA512 []byte) { + hashMD5 = h.md5.Sum(nil) + hashSHA1 = h.sha1.Sum(nil) + hashSHA256 = h.sha256.Sum(nil) + hashSHA512 = h.sha512.Sum(nil) + return +} diff --git a/modules/packages/multi_hasher_test.go b/modules/packages/multi_hasher_test.go new file mode 100644 index 0000000000..6c895ce120 --- /dev/null +++ b/modules/packages/multi_hasher_test.go @@ -0,0 +1,54 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + expectedMD5 = "e3bef03c5f3b7f6b3ab3e3053ed71e9c" + expectedSHA1 = "060b3b99f88e96085b4a68e095bc9e3d1d91e1bc" + expectedSHA256 = "6ccce4863b70f258d691f59609d31b4502e1ba5199942d3bc5d35d17a4ce771d" + expectedSHA512 = "7f70e439ba8c52025c1f06cdf6ae443c4b8ed2e90059cdb9bbbf8adf80846f185a24acca9245b128b226d61753b0d7ed46580a69c8999eeff3bc13a4d0bd816c" +) + +func TestMultiHasherSums(t *testing.T) { + t.Run("Sums", func(t *testing.T) { + h := NewMultiHasher() + h.Write([]byte("gitea")) + + hashMD5, hashSHA1, hashSHA256, hashSHA512 := h.Sums() + + assert.Equal(t, expectedMD5, fmt.Sprintf("%x", hashMD5)) + assert.Equal(t, expectedSHA1, fmt.Sprintf("%x", hashSHA1)) + assert.Equal(t, expectedSHA256, fmt.Sprintf("%x", hashSHA256)) + assert.Equal(t, expectedSHA512, fmt.Sprintf("%x", hashSHA512)) + }) + + t.Run("State", func(t *testing.T) { + h := NewMultiHasher() + h.Write([]byte("git")) + + state, err := h.MarshalBinary() + assert.NoError(t, err) + + h2 := NewMultiHasher() + err = h2.UnmarshalBinary(state) + assert.NoError(t, err) + + h2.Write([]byte("ea")) + + hashMD5, hashSHA1, hashSHA256, hashSHA512 := h2.Sums() + + assert.Equal(t, expectedMD5, fmt.Sprintf("%x", hashMD5)) + assert.Equal(t, expectedSHA1, fmt.Sprintf("%x", hashSHA1)) + assert.Equal(t, expectedSHA256, fmt.Sprintf("%x", hashSHA256)) + assert.Equal(t, expectedSHA512, fmt.Sprintf("%x", hashSHA512)) + }) +} diff --git a/modules/packages/npm/creator.go b/modules/packages/npm/creator.go new file mode 100644 index 0000000000..88ce55ecdb --- /dev/null +++ b/modules/packages/npm/creator.go @@ -0,0 +1,256 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package npm + +import ( + "bytes" + "crypto/sha1" + "crypto/sha512" + "encoding/base64" + "errors" + "fmt" + "io" + "regexp" + "strings" + "time" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/validation" + + "github.com/hashicorp/go-version" +) + +var ( + // ErrInvalidPackage indicates an invalid package + ErrInvalidPackage = errors.New("The package is invalid") + // ErrInvalidPackageName indicates an invalid name + ErrInvalidPackageName = errors.New("The package name is invalid") + // ErrInvalidPackageVersion indicates an invalid version + ErrInvalidPackageVersion = errors.New("The package version is invalid") + // ErrInvalidAttachment indicates a invalid attachment + ErrInvalidAttachment = errors.New("The package attachment is invalid") + // ErrInvalidIntegrity indicates an integrity validation error + ErrInvalidIntegrity = errors.New("Failed to validate integrity") +) + +var nameMatch = regexp.MustCompile(`\A((@[^\s\/~'!\(\)\*]+?)[\/])?([^_.][^\s\/~'!\(\)\*]+)\z`) + +// Package represents a npm package +type Package struct { + Name string + Version string + DistTags []string + Metadata Metadata + Filename string + Data []byte +} + +// PackageMetadata https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#package +type PackageMetadata struct { + ID string `json:"_id"` + Name string `json:"name"` + Description string `json:"description"` + DistTags map[string]string `json:"dist-tags,omitempty"` + Versions map[string]*PackageMetadataVersion `json:"versions"` + Readme string `json:"readme,omitempty"` + Maintainers []User `json:"maintainers,omitempty"` + Time map[string]time.Time `json:"time,omitempty"` + Homepage string `json:"homepage,omitempty"` + Keywords []string `json:"keywords,omitempty"` + Repository Repository `json:"repository,omitempty"` + Author User `json:"author"` + ReadmeFilename string `json:"readmeFilename,omitempty"` + Users map[string]bool `json:"users,omitempty"` + License string `json:"license,omitempty"` +} + +// PackageMetadataVersion https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#version +type PackageMetadataVersion struct { + ID string `json:"_id"` + Name string `json:"name"` + Version string `json:"version"` + Description string `json:"description"` + Author User `json:"author"` + Homepage string `json:"homepage,omitempty"` + License string `json:"license,omitempty"` + Repository Repository `json:"repository,omitempty"` + Keywords []string `json:"keywords,omitempty"` + Dependencies map[string]string `json:"dependencies,omitempty"` + DevDependencies map[string]string `json:"devDependencies,omitempty"` + PeerDependencies map[string]string `json:"peerDependencies,omitempty"` + OptionalDependencies map[string]string `json:"optionalDependencies,omitempty"` + Readme string `json:"readme,omitempty"` + Dist PackageDistribution `json:"dist"` + Maintainers []User `json:"maintainers,omitempty"` +} + +// PackageDistribution https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#version +type PackageDistribution struct { + Integrity string `json:"integrity"` + Shasum string `json:"shasum"` + Tarball string `json:"tarball"` + FileCount int `json:"fileCount,omitempty"` + UnpackedSize int `json:"unpackedSize,omitempty"` + NpmSignature string `json:"npm-signature,omitempty"` +} + +// User https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#package +type User struct { + Username string `json:"username,omitempty"` + Name string `json:"name"` + Email string `json:"email,omitempty"` + URL string `json:"url,omitempty"` +} + +// UnmarshalJSON is needed because User objects can be strings or objects +func (u *User) UnmarshalJSON(data []byte) error { + switch data[0] { + case '"': + if err := json.Unmarshal(data, &u.Name); err != nil { + return err + } + case '{': + var tmp struct { + Username string `json:"username"` + Name string `json:"name"` + Email string `json:"email"` + URL string `json:"url"` + } + if err := json.Unmarshal(data, &tmp); err != nil { + return err + } + u.Username = tmp.Username + u.Name = tmp.Name + u.Email = tmp.Email + u.URL = tmp.URL + } + return nil +} + +// Repository https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#version +type Repository struct { + Type string `json:"type"` + URL string `json:"url"` +} + +// PackageAttachment https://github.com/npm/registry/blob/master/docs/REGISTRY-API.md#package +type PackageAttachment struct { + ContentType string `json:"content_type"` + Data string `json:"data"` + Length int `json:"length"` +} + +type packageUpload struct { + PackageMetadata + Attachments map[string]*PackageAttachment `json:"_attachments"` +} + +// ParsePackage parses the content into a npm package +func ParsePackage(r io.Reader) (*Package, error) { + var upload packageUpload + if err := json.NewDecoder(r).Decode(&upload); err != nil { + return nil, err + } + + for _, meta := range upload.Versions { + if !validateName(meta.Name) { + return nil, ErrInvalidPackageName + } + + v, err := version.NewSemver(meta.Version) + if err != nil { + return nil, ErrInvalidPackageVersion + } + + scope := "" + name := meta.Name + nameParts := strings.SplitN(meta.Name, "/", 2) + if len(nameParts) == 2 { + scope = nameParts[0] + name = nameParts[1] + } + + if !validation.IsValidURL(meta.Homepage) { + meta.Homepage = "" + } + + p := &Package{ + Name: meta.Name, + Version: v.String(), + DistTags: make([]string, 0, 1), + Metadata: Metadata{ + Scope: scope, + Name: name, + Description: meta.Description, + Author: meta.Author.Name, + License: meta.License, + ProjectURL: meta.Homepage, + Keywords: meta.Keywords, + Dependencies: meta.Dependencies, + DevelopmentDependencies: meta.DevDependencies, + PeerDependencies: meta.PeerDependencies, + OptionalDependencies: meta.OptionalDependencies, + Readme: meta.Readme, + }, + } + + for tag := range upload.DistTags { + p.DistTags = append(p.DistTags, tag) + } + + p.Filename = strings.ToLower(fmt.Sprintf("%s-%s.tgz", name, p.Version)) + + attachment := func() *PackageAttachment { + for _, a := range upload.Attachments { + return a + } + return nil + }() + if attachment == nil || len(attachment.Data) == 0 { + return nil, ErrInvalidAttachment + } + + data, err := base64.StdEncoding.DecodeString(attachment.Data) + if err != nil { + return nil, ErrInvalidAttachment + } + p.Data = data + + integrity := strings.SplitN(meta.Dist.Integrity, "-", 2) + if len(integrity) != 2 { + return nil, ErrInvalidIntegrity + } + integrityHash, err := base64.StdEncoding.DecodeString(integrity[1]) + if err != nil { + return nil, ErrInvalidIntegrity + } + var hash []byte + switch integrity[0] { + case "sha1": + tmp := sha1.Sum(data) + hash = tmp[:] + case "sha512": + tmp := sha512.Sum512(data) + hash = tmp[:] + } + if !bytes.Equal(integrityHash, hash) { + return nil, ErrInvalidIntegrity + } + + return p, nil + } + + return nil, ErrInvalidPackage +} + +func validateName(name string) bool { + if strings.TrimSpace(name) != name { + return false + } + if len(name) == 0 || len(name) > 214 { + return false + } + return nameMatch.MatchString(name) +} diff --git a/modules/packages/npm/creator_test.go b/modules/packages/npm/creator_test.go new file mode 100644 index 0000000000..64ae6238f3 --- /dev/null +++ b/modules/packages/npm/creator_test.go @@ -0,0 +1,272 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package npm + +import ( + "bytes" + "encoding/base64" + "fmt" + "strings" + "testing" + + "code.gitea.io/gitea/modules/json" + + "github.com/stretchr/testify/assert" +) + +func TestParsePackage(t *testing.T) { + packageScope := "@scope" + packageName := "test-package" + packageFullName := packageScope + "/" + packageName + packageVersion := "1.0.1-pre" + packageTag := "latest" + packageAuthor := "KN4CK3R" + packageDescription := "Test Description" + data := "H4sIAAAAAAAA/ytITM5OTE/VL4DQelnF+XkMVAYGBgZmJiYK2MRBwNDcSIHB2NTMwNDQzMwAqA7IMDUxA9LUdgg2UFpcklgEdAql5kD8ogCnhwio5lJQUMpLzE1VslJQcihOzi9I1S9JLS7RhSYIJR2QgrLUouLM/DyQGkM9Az1D3YIiqExKanFyUWZBCVQ2BKhVwQVJDKwosbQkI78IJO/tZ+LsbRykxFXLNdA+HwWjYBSMgpENACgAbtAACAAA" + integrity := "sha512-yA4FJsVhetynGfOC1jFf79BuS+jrHbm0fhh+aHzCQkOaOBXKf9oBnC4a6DnLLnEsHQDRLYd00cwj8sCXpC+wIg==" + + t.Run("InvalidUpload", func(t *testing.T) { + p, err := ParsePackage(bytes.NewReader([]byte{0})) + assert.Nil(t, p) + assert.Error(t, err) + }) + + t.Run("InvalidUploadNoData", func(t *testing.T) { + b, _ := json.Marshal(packageUpload{}) + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidPackage) + }) + + t.Run("InvalidPackageName", func(t *testing.T) { + test := func(t *testing.T, name string) { + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: name, + Name: name, + Versions: map[string]*PackageMetadataVersion{ + packageVersion: { + Name: name, + }, + }, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidPackageName) + } + + test(t, " test ") + test(t, " test") + test(t, "test ") + test(t, "te st") + test(t, "invalid/scope") + test(t, "@invalid/_name") + test(t, "@invalid/.name") + }) + + t.Run("ValidPackageName", func(t *testing.T) { + test := func(t *testing.T, name string) { + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: name, + Name: name, + Versions: map[string]*PackageMetadataVersion{ + packageVersion: { + Name: name, + }, + }, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidPackageVersion) + } + + test(t, "test") + test(t, "@scope/name") + test(t, packageFullName) + }) + + t.Run("InvalidPackageVersion", func(t *testing.T) { + version := "first-version" + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: packageFullName, + Name: packageFullName, + Versions: map[string]*PackageMetadataVersion{ + version: { + Name: packageFullName, + Version: version, + }, + }, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidPackageVersion) + }) + + t.Run("InvalidAttachment", func(t *testing.T) { + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: packageFullName, + Name: packageFullName, + Versions: map[string]*PackageMetadataVersion{ + packageVersion: { + Name: packageFullName, + Version: packageVersion, + }, + }, + }, + Attachments: map[string]*PackageAttachment{ + "dummy.tgz": {}, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidAttachment) + }) + + t.Run("InvalidData", func(t *testing.T) { + filename := fmt.Sprintf("%s-%s.tgz", packageFullName, packageVersion) + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: packageFullName, + Name: packageFullName, + Versions: map[string]*PackageMetadataVersion{ + packageVersion: { + Name: packageFullName, + Version: packageVersion, + }, + }, + }, + Attachments: map[string]*PackageAttachment{ + filename: { + Data: "/", + }, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidAttachment) + }) + + t.Run("InvalidIntegrity", func(t *testing.T) { + filename := fmt.Sprintf("%s-%s.tgz", packageFullName, packageVersion) + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: packageFullName, + Name: packageFullName, + Versions: map[string]*PackageMetadataVersion{ + packageVersion: { + Name: packageFullName, + Version: packageVersion, + Dist: PackageDistribution{ + Integrity: "sha512-test==", + }, + }, + }, + }, + Attachments: map[string]*PackageAttachment{ + filename: { + Data: data, + }, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidIntegrity) + }) + + t.Run("InvalidIntegrity2", func(t *testing.T) { + filename := fmt.Sprintf("%s-%s.tgz", packageFullName, packageVersion) + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: packageFullName, + Name: packageFullName, + Versions: map[string]*PackageMetadataVersion{ + packageVersion: { + Name: packageFullName, + Version: packageVersion, + Dist: PackageDistribution{ + Integrity: integrity, + }, + }, + }, + }, + Attachments: map[string]*PackageAttachment{ + filename: { + Data: base64.StdEncoding.EncodeToString([]byte("data")), + }, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.Nil(t, p) + assert.ErrorIs(t, err, ErrInvalidIntegrity) + }) + + t.Run("Valid", func(t *testing.T) { + filename := fmt.Sprintf("%s-%s.tgz", packageFullName, packageVersion) + b, _ := json.Marshal(packageUpload{ + PackageMetadata: PackageMetadata{ + ID: packageFullName, + Name: packageFullName, + DistTags: map[string]string{ + packageTag: packageVersion, + }, + Versions: map[string]*PackageMetadataVersion{ + packageVersion: { + Name: packageFullName, + Version: packageVersion, + Description: packageDescription, + Author: User{Name: packageAuthor}, + License: "MIT", + Homepage: "https://gitea.io/", + Readme: packageDescription, + Dependencies: map[string]string{ + "package": "1.2.0", + }, + Dist: PackageDistribution{ + Integrity: integrity, + }, + }, + }, + }, + Attachments: map[string]*PackageAttachment{ + filename: { + Data: data, + }, + }, + }) + + p, err := ParsePackage(bytes.NewReader(b)) + assert.NotNil(t, p) + assert.NoError(t, err) + + assert.Equal(t, packageFullName, p.Name) + assert.Equal(t, packageVersion, p.Version) + assert.Equal(t, []string{packageTag}, p.DistTags) + assert.Equal(t, fmt.Sprintf("%s-%s.tgz", strings.Split(packageFullName, "/")[1], packageVersion), p.Filename) + b, _ = base64.StdEncoding.DecodeString(data) + assert.Equal(t, b, p.Data) + assert.Equal(t, packageName, p.Metadata.Name) + assert.Equal(t, packageScope, p.Metadata.Scope) + assert.Equal(t, packageDescription, p.Metadata.Description) + assert.Equal(t, packageDescription, p.Metadata.Readme) + assert.Equal(t, packageAuthor, p.Metadata.Author) + assert.Equal(t, "MIT", p.Metadata.License) + assert.Equal(t, "https://gitea.io/", p.Metadata.ProjectURL) + assert.Contains(t, p.Metadata.Dependencies, "package") + assert.Equal(t, "1.2.0", p.Metadata.Dependencies["package"]) + }) +} diff --git a/modules/packages/npm/metadata.go b/modules/packages/npm/metadata.go new file mode 100644 index 0000000000..643a4d344b --- /dev/null +++ b/modules/packages/npm/metadata.go @@ -0,0 +1,24 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package npm + +// TagProperty is the name of the property for tag management +const TagProperty = "npm.tag" + +// Metadata represents the metadata of a npm package +type Metadata struct { + Scope string `json:"scope,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Author string `json:"author,omitempty"` + License string `json:"license,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + Keywords []string `json:"keywords,omitempty"` + Dependencies map[string]string `json:"dependencies,omitempty"` + DevelopmentDependencies map[string]string `json:"development_dependencies,omitempty"` + PeerDependencies map[string]string `json:"peer_dependencies,omitempty"` + OptionalDependencies map[string]string `json:"optional_dependencies,omitempty"` + Readme string `json:"readme,omitempty"` +} diff --git a/modules/packages/nuget/metadata.go b/modules/packages/nuget/metadata.go new file mode 100644 index 0000000000..797bff45ac --- /dev/null +++ b/modules/packages/nuget/metadata.go @@ -0,0 +1,187 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nuget + +import ( + "archive/zip" + "encoding/xml" + "errors" + "io" + "path/filepath" + "regexp" + "strings" + + "code.gitea.io/gitea/modules/validation" + + "github.com/hashicorp/go-version" +) + +var ( + // ErrMissingNuspecFile indicates a missing Nuspec file + ErrMissingNuspecFile = errors.New("Nuspec file is missing") + // ErrNuspecFileTooLarge indicates a Nuspec file which is too large + ErrNuspecFileTooLarge = errors.New("Nuspec file is too large") + // ErrNuspecInvalidID indicates an invalid id in the Nuspec file + ErrNuspecInvalidID = errors.New("Nuspec file contains an invalid id") + // ErrNuspecInvalidVersion indicates an invalid version in the Nuspec file + ErrNuspecInvalidVersion = errors.New("Nuspec file contains an invalid version") +) + +// PackageType specifies the package type the metadata describes +type PackageType int + +const ( + // DependencyPackage represents a package (*.nupkg) + DependencyPackage PackageType = iota + 1 + // SymbolsPackage represents a symbol package (*.snupkg) + SymbolsPackage + + PropertySymbolID = "nuget.symbol.id" +) + +var idmatch = regexp.MustCompile(`\A\w+(?:[.-]\w+)*\z`) + +const maxNuspecFileSize = 3 * 1024 * 1024 + +// Package represents a Nuget package +type Package struct { + PackageType PackageType + ID string + Version string + Metadata *Metadata +} + +// Metadata represents the metadata of a Nuget package +type Metadata struct { + Description string `json:"description,omitempty"` + ReleaseNotes string `json:"release_notes,omitempty"` + Authors string `json:"authors,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + RepositoryURL string `json:"repository_url,omitempty"` + Dependencies map[string][]Dependency `json:"dependencies,omitempty"` +} + +// Dependency represents a dependency of a Nuget package +type Dependency struct { + ID string `json:"id"` + Version string `json:"version"` +} + +type nuspecPackage struct { + Metadata struct { + ID string `xml:"id"` + Version string `xml:"version"` + Authors string `xml:"authors"` + RequireLicenseAcceptance bool `xml:"requireLicenseAcceptance"` + ProjectURL string `xml:"projectUrl"` + Description string `xml:"description"` + ReleaseNotes string `xml:"releaseNotes"` + PackageTypes struct { + PackageType []struct { + Name string `xml:"name,attr"` + } `xml:"packageType"` + } `xml:"packageTypes"` + Repository struct { + URL string `xml:"url,attr"` + } `xml:"repository"` + Dependencies struct { + Group []struct { + TargetFramework string `xml:"targetFramework,attr"` + Dependency []struct { + ID string `xml:"id,attr"` + Version string `xml:"version,attr"` + Exclude string `xml:"exclude,attr"` + } `xml:"dependency"` + } `xml:"group"` + } `xml:"dependencies"` + } `xml:"metadata"` +} + +// ParsePackageMetaData parses the metadata of a Nuget package file +func ParsePackageMetaData(r io.ReaderAt, size int64) (*Package, error) { + archive, err := zip.NewReader(r, size) + if err != nil { + return nil, err + } + + for _, file := range archive.File { + if filepath.Dir(file.Name) != "." { + continue + } + if strings.HasSuffix(strings.ToLower(file.Name), ".nuspec") { + if file.UncompressedSize64 > maxNuspecFileSize { + return nil, ErrNuspecFileTooLarge + } + f, err := archive.Open(file.Name) + if err != nil { + return nil, err + } + defer f.Close() + + return ParseNuspecMetaData(f) + } + } + return nil, ErrMissingNuspecFile +} + +// ParseNuspecMetaData parses a Nuspec file to retrieve the metadata of a Nuget package +func ParseNuspecMetaData(r io.Reader) (*Package, error) { + var p nuspecPackage + if err := xml.NewDecoder(r).Decode(&p); err != nil { + return nil, err + } + + if !idmatch.MatchString(p.Metadata.ID) { + return nil, ErrNuspecInvalidID + } + + v, err := version.NewSemver(p.Metadata.Version) + if err != nil { + return nil, ErrNuspecInvalidVersion + } + + if !validation.IsValidURL(p.Metadata.ProjectURL) { + p.Metadata.ProjectURL = "" + } + + packageType := DependencyPackage + for _, pt := range p.Metadata.PackageTypes.PackageType { + if pt.Name == "SymbolsPackage" { + packageType = SymbolsPackage + break + } + } + + m := &Metadata{ + Description: p.Metadata.Description, + ReleaseNotes: p.Metadata.ReleaseNotes, + Authors: p.Metadata.Authors, + ProjectURL: p.Metadata.ProjectURL, + RepositoryURL: p.Metadata.Repository.URL, + Dependencies: make(map[string][]Dependency), + } + + for _, group := range p.Metadata.Dependencies.Group { + deps := make([]Dependency, 0, len(group.Dependency)) + for _, dep := range group.Dependency { + if dep.ID == "" || dep.Version == "" { + continue + } + deps = append(deps, Dependency{ + ID: dep.ID, + Version: dep.Version, + }) + } + if len(deps) > 0 { + m.Dependencies[group.TargetFramework] = deps + } + } + return &Package{ + PackageType: packageType, + ID: p.Metadata.ID, + Version: v.String(), + Metadata: m, + }, nil +} diff --git a/modules/packages/nuget/metadata_test.go b/modules/packages/nuget/metadata_test.go new file mode 100644 index 0000000000..e8c7773e97 --- /dev/null +++ b/modules/packages/nuget/metadata_test.go @@ -0,0 +1,163 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nuget + +import ( + "archive/zip" + "bytes" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + id = "System.Gitea" + semver = "1.0.1" + authors = "Gitea Authors" + projectURL = "https://gitea.io" + description = "Package Description" + releaseNotes = "Package Release Notes" + repositoryURL = "https://gitea.io/gitea/gitea" + targetFramework = ".NETStandard2.1" + dependencyID = "System.Text.Json" + dependencyVersion = "5.0.0" +) + +const nuspecContent = ` + + + ` + id + ` + ` + semver + ` + ` + authors + ` + true + ` + projectURL + ` + ` + description + ` + ` + releaseNotes + ` + + + + + + + +` + +const symbolsNuspecContent = ` + + + ` + id + ` + ` + semver + ` + ` + description + ` + + + + + + + +` + +func TestParsePackageMetaData(t *testing.T) { + createArchive := func(name, content string) []byte { + var buf bytes.Buffer + archive := zip.NewWriter(&buf) + w, _ := archive.Create(name) + w.Write([]byte(content)) + archive.Close() + return buf.Bytes() + } + + t.Run("MissingNuspecFile", func(t *testing.T) { + data := createArchive("dummy.txt", "") + + np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, np) + assert.ErrorIs(t, err, ErrMissingNuspecFile) + }) + + t.Run("MissingNuspecFileInRoot", func(t *testing.T) { + data := createArchive("sub/package.nuspec", "") + + np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, np) + assert.ErrorIs(t, err, ErrMissingNuspecFile) + }) + + t.Run("InvalidNuspecFile", func(t *testing.T) { + data := createArchive("package.nuspec", "") + + np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, np) + assert.Error(t, err) + }) + + t.Run("InvalidPackageId", func(t *testing.T) { + data := createArchive("package.nuspec", ` + + + `) + + np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, np) + assert.ErrorIs(t, err, ErrNuspecInvalidID) + }) + + t.Run("InvalidPackageVersion", func(t *testing.T) { + data := createArchive("package.nuspec", ` + + + `+id+` + + `) + + np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data))) + assert.Nil(t, np) + assert.ErrorIs(t, err, ErrNuspecInvalidVersion) + }) + + t.Run("Valid", func(t *testing.T) { + data := createArchive("package.nuspec", nuspecContent) + + np, err := ParsePackageMetaData(bytes.NewReader(data), int64(len(data))) + assert.NoError(t, err) + assert.NotNil(t, np) + }) +} + +func TestParseNuspecMetaData(t *testing.T) { + t.Run("Dependency Package", func(t *testing.T) { + np, err := ParseNuspecMetaData(strings.NewReader(nuspecContent)) + assert.NoError(t, err) + assert.NotNil(t, np) + assert.Equal(t, DependencyPackage, np.PackageType) + + assert.Equal(t, id, np.ID) + assert.Equal(t, semver, np.Version) + assert.Equal(t, authors, np.Metadata.Authors) + assert.Equal(t, projectURL, np.Metadata.ProjectURL) + assert.Equal(t, description, np.Metadata.Description) + assert.Equal(t, releaseNotes, np.Metadata.ReleaseNotes) + assert.Equal(t, repositoryURL, np.Metadata.RepositoryURL) + assert.Len(t, np.Metadata.Dependencies, 1) + assert.Contains(t, np.Metadata.Dependencies, targetFramework) + deps := np.Metadata.Dependencies[targetFramework] + assert.Len(t, deps, 1) + assert.Equal(t, dependencyID, deps[0].ID) + assert.Equal(t, dependencyVersion, deps[0].Version) + }) + + t.Run("Symbols Package", func(t *testing.T) { + np, err := ParseNuspecMetaData(strings.NewReader(symbolsNuspecContent)) + assert.NoError(t, err) + assert.NotNil(t, np) + assert.Equal(t, SymbolsPackage, np.PackageType) + + assert.Equal(t, id, np.ID) + assert.Equal(t, semver, np.Version) + assert.Equal(t, description, np.Metadata.Description) + assert.Empty(t, np.Metadata.Dependencies) + }) +} diff --git a/modules/packages/nuget/symbol_extractor.go b/modules/packages/nuget/symbol_extractor.go new file mode 100644 index 0000000000..13641ca6ef --- /dev/null +++ b/modules/packages/nuget/symbol_extractor.go @@ -0,0 +1,187 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nuget + +import ( + "archive/zip" + "bytes" + "encoding/binary" + "errors" + "fmt" + "io" + "path" + "path/filepath" + "strings" + + "code.gitea.io/gitea/modules/packages" +) + +var ( + ErrMissingPdbFiles = errors.New("Package does not contain PDB files") + ErrInvalidFiles = errors.New("Package contains invalid files") + ErrInvalidPdbMagicNumber = errors.New("Invalid Portable PDB magic number") + ErrMissingPdbStream = errors.New("Missing PDB stream") +) + +type PortablePdb struct { + Name string + ID string + Content *packages.HashedBuffer +} + +type PortablePdbList []*PortablePdb + +func (l PortablePdbList) Close() { + for _, pdb := range l { + pdb.Content.Close() + } +} + +// ExtractPortablePdb extracts PDB files from a .snupkg file +func ExtractPortablePdb(r io.ReaderAt, size int64) (PortablePdbList, error) { + archive, err := zip.NewReader(r, size) + if err != nil { + return nil, err + } + + var pdbs PortablePdbList + + err = func() error { + for _, file := range archive.File { + if strings.HasSuffix(file.Name, "/") { + continue + } + ext := strings.ToLower(filepath.Ext(file.Name)) + + switch ext { + case ".nuspec", ".xml", ".psmdcp", ".rels", ".p7s": + continue + case ".pdb": + f, err := archive.Open(file.Name) + if err != nil { + return err + } + + buf, err := packages.CreateHashedBufferFromReader(f, 32*1024*1024) + + f.Close() + + if err != nil { + return err + } + + id, err := ParseDebugHeaderID(buf) + if err != nil { + buf.Close() + return fmt.Errorf("Invalid PDB file: %v", err) + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + buf.Close() + return err + } + + pdbs = append(pdbs, &PortablePdb{ + Name: path.Base(file.Name), + ID: id, + Content: buf, + }) + default: + return ErrInvalidFiles + } + } + return nil + }() + if err != nil { + pdbs.Close() + return nil, err + } + + if len(pdbs) == 0 { + return nil, ErrMissingPdbFiles + } + + return pdbs, nil +} + +// ParseDebugHeaderID TODO +func ParseDebugHeaderID(r io.ReadSeeker) (string, error) { + var magic uint32 + if err := binary.Read(r, binary.LittleEndian, &magic); err != nil { + return "", err + } + if magic != 0x424A5342 { + return "", ErrInvalidPdbMagicNumber + } + + if _, err := r.Seek(8, io.SeekCurrent); err != nil { + return "", err + } + + var versionStringSize int32 + if err := binary.Read(r, binary.LittleEndian, &versionStringSize); err != nil { + return "", err + } + if _, err := r.Seek(int64(versionStringSize), io.SeekCurrent); err != nil { + return "", err + } + if _, err := r.Seek(2, io.SeekCurrent); err != nil { + return "", err + } + + var streamCount int16 + if err := binary.Read(r, binary.LittleEndian, &streamCount); err != nil { + return "", err + } + + read4ByteAlignedString := func(r io.Reader) (string, error) { + b := make([]byte, 4) + var buf bytes.Buffer + for { + if _, err := r.Read(b); err != nil { + return "", err + } + if i := bytes.IndexByte(b, 0); i != -1 { + buf.Write(b[:i]) + return buf.String(), nil + } + buf.Write(b) + } + } + + for i := 0; i < int(streamCount); i++ { + var offset uint32 + if err := binary.Read(r, binary.LittleEndian, &offset); err != nil { + return "", err + } + if _, err := r.Seek(4, io.SeekCurrent); err != nil { + return "", err + } + name, err := read4ByteAlignedString(r) + if err != nil { + return "", err + } + + if name == "#Pdb" { + if _, err := r.Seek(int64(offset), io.SeekStart); err != nil { + return "", err + } + + b := make([]byte, 16) + if _, err := r.Read(b); err != nil { + return "", err + } + + data1 := binary.LittleEndian.Uint32(b[0:4]) + data2 := binary.LittleEndian.Uint16(b[4:6]) + data3 := binary.LittleEndian.Uint16(b[6:8]) + data4 := b[8:16] + + return fmt.Sprintf("%08x%04x%04x%04x%012x", data1, data2, data3, data4[:2], data4[2:]), nil + } + } + + return "", ErrMissingPdbStream +} diff --git a/modules/packages/nuget/symbol_extractor_test.go b/modules/packages/nuget/symbol_extractor_test.go new file mode 100644 index 0000000000..892d718caa --- /dev/null +++ b/modules/packages/nuget/symbol_extractor_test.go @@ -0,0 +1,82 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nuget + +import ( + "archive/zip" + "bytes" + "encoding/base64" + "testing" + + "github.com/stretchr/testify/assert" +) + +const pdbContent = `QlNKQgEAAQAAAAAADAAAAFBEQiB2MS4wAAAAAAAABgB8AAAAWAAAACNQZGIAAAAA1AAAAAgBAAAj +fgAA3AEAAAQAAAAjU3RyaW5ncwAAAADgAQAABAAAACNVUwDkAQAAMAAAACNHVUlEAAAAFAIAACgB +AAAjQmxvYgAAAGm7ENm9SGxMtAFVvPUsPJTF6PbtAAAAAFcVogEJAAAAAQAAAA==` + +func TestExtractPortablePdb(t *testing.T) { + createArchive := func(name string, content []byte) []byte { + var buf bytes.Buffer + archive := zip.NewWriter(&buf) + w, _ := archive.Create(name) + w.Write(content) + archive.Close() + return buf.Bytes() + } + + t.Run("MissingPdbFiles", func(t *testing.T) { + var buf bytes.Buffer + zip.NewWriter(&buf).Close() + + pdbs, err := ExtractPortablePdb(bytes.NewReader(buf.Bytes()), int64(buf.Len())) + assert.ErrorIs(t, err, ErrMissingPdbFiles) + assert.Empty(t, pdbs) + }) + + t.Run("InvalidFiles", func(t *testing.T) { + data := createArchive("sub/test.bin", []byte{}) + + pdbs, err := ExtractPortablePdb(bytes.NewReader(data), int64(len(data))) + assert.ErrorIs(t, err, ErrInvalidFiles) + assert.Empty(t, pdbs) + }) + + t.Run("Valid", func(t *testing.T) { + b, _ := base64.StdEncoding.DecodeString(pdbContent) + data := createArchive("test.pdb", b) + + pdbs, err := ExtractPortablePdb(bytes.NewReader(data), int64(len(data))) + assert.NoError(t, err) + assert.Len(t, pdbs, 1) + assert.Equal(t, "test.pdb", pdbs[0].Name) + assert.Equal(t, "d910bb6948bd4c6cb40155bcf52c3c94", pdbs[0].ID) + pdbs.Close() + }) +} + +func TestParseDebugHeaderID(t *testing.T) { + t.Run("InvalidPdbMagicNumber", func(t *testing.T) { + id, err := ParseDebugHeaderID(bytes.NewReader([]byte{0, 0, 0, 0})) + assert.ErrorIs(t, err, ErrInvalidPdbMagicNumber) + assert.Empty(t, id) + }) + + t.Run("MissingPdbStream", func(t *testing.T) { + b, _ := base64.StdEncoding.DecodeString(`QlNKQgEAAQAAAAAADAAAAFBEQiB2MS4wAAAAAAAAAQB8AAAAWAAAACNVUwA=`) + + id, err := ParseDebugHeaderID(bytes.NewReader(b)) + assert.ErrorIs(t, err, ErrMissingPdbStream) + assert.Empty(t, id) + }) + + t.Run("Valid", func(t *testing.T) { + b, _ := base64.StdEncoding.DecodeString(pdbContent) + + id, err := ParseDebugHeaderID(bytes.NewReader(b)) + assert.NoError(t, err) + assert.Equal(t, "d910bb6948bd4c6cb40155bcf52c3c94", id) + }) +} diff --git a/modules/packages/pypi/metadata.go b/modules/packages/pypi/metadata.go new file mode 100644 index 0000000000..df367d10e2 --- /dev/null +++ b/modules/packages/pypi/metadata.go @@ -0,0 +1,16 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package pypi + +// Metadata represents the metadata of a PyPI package +type Metadata struct { + Author string `json:"author,omitempty"` + Description string `json:"description,omitempty"` + LongDescription string `json:"long_description,omitempty"` + Summary string `json:"summary,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + License string `json:"license,omitempty"` + RequiresPython string `json:"requires_python,omitempty"` +} diff --git a/modules/packages/rubygems/marshal.go b/modules/packages/rubygems/marshal.go new file mode 100644 index 0000000000..2c45042fa8 --- /dev/null +++ b/modules/packages/rubygems/marshal.go @@ -0,0 +1,311 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package rubygems + +import ( + "bufio" + "bytes" + "errors" + "io" + "reflect" +) + +const ( + majorVersion = 4 + minorVersion = 8 + + typeNil = '0' + typeTrue = 'T' + typeFalse = 'F' + typeFixnum = 'i' + typeString = '"' + typeSymbol = ':' + typeSymbolLink = ';' + typeArray = '[' + typeIVar = 'I' + typeUserMarshal = 'U' + typeUserDef = 'u' + typeObject = 'o' +) + +var ( + // ErrUnsupportedType indicates an unsupported type + ErrUnsupportedType = errors.New("Type is unsupported") + // ErrInvalidIntRange indicates an invalid number range + ErrInvalidIntRange = errors.New("Number is not in valid range") +) + +// RubyUserMarshal is a Ruby object that has a marshal_load function. +type RubyUserMarshal struct { + Name string + Value interface{} +} + +// RubyUserDef is a Ruby object that has a _load function. +type RubyUserDef struct { + Name string + Value interface{} +} + +// RubyObject is a default Ruby object. +type RubyObject struct { + Name string + Member map[string]interface{} +} + +// MarshalEncoder mimics Rubys Marshal class. +// Note: Only supports types used by the RubyGems package registry. +type MarshalEncoder struct { + w *bufio.Writer + symbols map[string]int +} + +// NewMarshalEncoder creates a new MarshalEncoder +func NewMarshalEncoder(w io.Writer) *MarshalEncoder { + return &MarshalEncoder{ + w: bufio.NewWriter(w), + symbols: map[string]int{}, + } +} + +// Encode encodes the given type +func (e *MarshalEncoder) Encode(v interface{}) error { + if _, err := e.w.Write([]byte{majorVersion, minorVersion}); err != nil { + return err + } + + if err := e.marshal(v); err != nil { + return err + } + + return e.w.Flush() +} + +func (e *MarshalEncoder) marshal(v interface{}) error { + if v == nil { + return e.marshalNil() + } + + val := reflect.ValueOf(v) + typ := reflect.TypeOf(v) + + if typ.Kind() == reflect.Ptr { + val = val.Elem() + typ = typ.Elem() + } + + switch typ.Kind() { + case reflect.Bool: + return e.marshalBool(val.Bool()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32: + return e.marshalInt(val.Int()) + case reflect.String: + return e.marshalString(val.String()) + case reflect.Slice, reflect.Array: + return e.marshalArray(val) + } + + switch typ.Name() { + case "RubyUserMarshal": + return e.marshalUserMarshal(val.Interface().(RubyUserMarshal)) + case "RubyUserDef": + return e.marshalUserDef(val.Interface().(RubyUserDef)) + case "RubyObject": + return e.marshalObject(val.Interface().(RubyObject)) + } + + return ErrUnsupportedType +} + +func (e *MarshalEncoder) marshalNil() error { + return e.w.WriteByte(typeNil) +} + +func (e *MarshalEncoder) marshalBool(b bool) error { + if b { + return e.w.WriteByte(typeTrue) + } + return e.w.WriteByte(typeFalse) +} + +func (e *MarshalEncoder) marshalInt(i int64) error { + if err := e.w.WriteByte(typeFixnum); err != nil { + return err + } + + return e.marshalIntInternal(i) +} + +func (e *MarshalEncoder) marshalIntInternal(i int64) error { + if i == 0 { + return e.w.WriteByte(0) + } else if 0 < i && i < 123 { + return e.w.WriteByte(byte(i + 5)) + } else if -124 < i && i <= -1 { + return e.w.WriteByte(byte(i - 5)) + } + + var len int + if 122 < i && i <= 0xff { + len = 1 + } else if 0xff < i && i <= 0xffff { + len = 2 + } else if 0xffff < i && i <= 0xffffff { + len = 3 + } else if 0xffffff < i && i <= 0x3fffffff { + len = 4 + } else if -0x100 <= i && i < -123 { + len = -1 + } else if -0x10000 <= i && i < -0x100 { + len = -2 + } else if -0x1000000 <= i && i < -0x100000 { + len = -3 + } else if -0x40000000 <= i && i < -0x1000000 { + len = -4 + } else { + return ErrInvalidIntRange + } + + if err := e.w.WriteByte(byte(len)); err != nil { + return err + } + if len < 0 { + len = -len + } + + for c := 0; c < len; c++ { + if err := e.w.WriteByte(byte(i >> uint(8*c) & 0xff)); err != nil { + return err + } + } + + return nil +} + +func (e *MarshalEncoder) marshalString(str string) error { + if err := e.w.WriteByte(typeIVar); err != nil { + return err + } + + if err := e.marshalRawString(str); err != nil { + return err + } + + if err := e.marshalIntInternal(1); err != nil { + return err + } + + if err := e.marshalSymbol("E"); err != nil { + return err + } + + return e.marshalBool(true) +} + +func (e *MarshalEncoder) marshalRawString(str string) error { + if err := e.w.WriteByte(typeString); err != nil { + return err + } + + if err := e.marshalIntInternal(int64(len(str))); err != nil { + return err + } + + _, err := e.w.WriteString(str) + return err +} + +func (e *MarshalEncoder) marshalSymbol(str string) error { + if index, ok := e.symbols[str]; ok { + if err := e.w.WriteByte(typeSymbolLink); err != nil { + return err + } + return e.marshalIntInternal(int64(index)) + } + + e.symbols[str] = len(e.symbols) + + if err := e.w.WriteByte(typeSymbol); err != nil { + return err + } + + if err := e.marshalIntInternal(int64(len(str))); err != nil { + return err + } + + _, err := e.w.WriteString(str) + return err +} + +func (e *MarshalEncoder) marshalArray(arr reflect.Value) error { + if err := e.w.WriteByte(typeArray); err != nil { + return err + } + + len := arr.Len() + + if err := e.marshalIntInternal(int64(len)); err != nil { + return err + } + + for i := 0; i < len; i++ { + if err := e.marshal(arr.Index(i).Interface()); err != nil { + return err + } + } + return nil +} + +func (e *MarshalEncoder) marshalUserMarshal(userMarshal RubyUserMarshal) error { + if err := e.w.WriteByte(typeUserMarshal); err != nil { + return err + } + + if err := e.marshalSymbol(userMarshal.Name); err != nil { + return err + } + + return e.marshal(userMarshal.Value) +} + +func (e *MarshalEncoder) marshalUserDef(userDef RubyUserDef) error { + var buf bytes.Buffer + if err := NewMarshalEncoder(&buf).Encode(userDef.Value); err != nil { + return err + } + + if err := e.w.WriteByte(typeUserDef); err != nil { + return err + } + if err := e.marshalSymbol(userDef.Name); err != nil { + return err + } + if err := e.marshalIntInternal(int64(buf.Len())); err != nil { + return err + } + _, err := e.w.Write(buf.Bytes()) + return err +} + +func (e *MarshalEncoder) marshalObject(obj RubyObject) error { + if err := e.w.WriteByte(typeObject); err != nil { + return err + } + if err := e.marshalSymbol(obj.Name); err != nil { + return err + } + if err := e.marshalIntInternal(int64(len(obj.Member))); err != nil { + return err + } + for k, v := range obj.Member { + if err := e.marshalSymbol(k); err != nil { + return err + } + if err := e.marshal(v); err != nil { + return err + } + } + return nil +} diff --git a/modules/packages/rubygems/marshal_test.go b/modules/packages/rubygems/marshal_test.go new file mode 100644 index 0000000000..e5963ebcd6 --- /dev/null +++ b/modules/packages/rubygems/marshal_test.go @@ -0,0 +1,99 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package rubygems + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMinimalEncoder(t *testing.T) { + cases := []struct { + Value interface{} + Expected []byte + Error error + }{ + { + Value: nil, + Expected: []byte{4, 8, 0x30}, + }, + { + Value: true, + Expected: []byte{4, 8, 'T'}, + }, + { + Value: false, + Expected: []byte{4, 8, 'F'}, + }, + { + Value: 0, + Expected: []byte{4, 8, 'i', 0}, + }, + { + Value: 1, + Expected: []byte{4, 8, 'i', 6}, + }, + { + Value: -1, + Expected: []byte{4, 8, 'i', 0xfa}, + }, + { + Value: 0x1fffffff, + Expected: []byte{4, 8, 'i', 4, 0xff, 0xff, 0xff, 0x1f}, + }, + { + Value: 0x41000000, + Error: ErrInvalidIntRange, + }, + { + Value: "test", + Expected: []byte{4, 8, 'I', '"', 9, 't', 'e', 's', 't', 6, ':', 6, 'E', 'T'}, + }, + { + Value: []int{1, 2}, + Expected: []byte{4, 8, '[', 7, 'i', 6, 'i', 7}, + }, + { + Value: &RubyUserMarshal{ + Name: "Test", + Value: 4, + }, + Expected: []byte{4, 8, 'U', ':', 9, 'T', 'e', 's', 't', 'i', 9}, + }, + { + Value: &RubyUserDef{ + Name: "Test", + Value: 4, + }, + Expected: []byte{4, 8, 'u', ':', 9, 'T', 'e', 's', 't', 9, 4, 8, 'i', 9}, + }, + { + Value: &RubyObject{ + Name: "Test", + Member: map[string]interface{}{ + "test": 4, + }, + }, + Expected: []byte{4, 8, 'o', ':', 9, 'T', 'e', 's', 't', 6, ':', 9, 't', 'e', 's', 't', 'i', 9}, + }, + { + Value: &struct { + Name string + }{ + "test", + }, + Error: ErrUnsupportedType, + }, + } + + for i, c := range cases { + var b bytes.Buffer + err := NewMarshalEncoder(&b).Encode(c.Value) + assert.ErrorIs(t, err, c.Error) + assert.Equal(t, c.Expected, b.Bytes(), "case %d", i) + } +} diff --git a/modules/packages/rubygems/metadata.go b/modules/packages/rubygems/metadata.go new file mode 100644 index 0000000000..942f205fc3 --- /dev/null +++ b/modules/packages/rubygems/metadata.go @@ -0,0 +1,222 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package rubygems + +import ( + "archive/tar" + "compress/gzip" + "errors" + "io" + "regexp" + "strings" + + "code.gitea.io/gitea/modules/validation" + + "gopkg.in/yaml.v2" +) + +var ( + // ErrMissingMetadataFile indicates a missing metadata.gz file + ErrMissingMetadataFile = errors.New("Metadata file is missing") + // ErrInvalidName indicates an invalid id in the metadata.gz file + ErrInvalidName = errors.New("Metadata file contains an invalid name") + // ErrInvalidVersion indicates an invalid version in the metadata.gz file + ErrInvalidVersion = errors.New("Metadata file contains an invalid version") +) + +var versionMatcher = regexp.MustCompile(`\A[0-9]+(?:\.[0-9a-zA-Z]+)*(?:-[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?\z`) + +// Package represents a RubyGems package +type Package struct { + Name string + Version string + Metadata *Metadata +} + +// Metadata represents the metadata of a RubyGems package +type Metadata struct { + Platform string `json:"platform,omitempty"` + Description string `json:"description,omitempty"` + Summary string `json:"summary,omitempty"` + Authors []string `json:"authors,omitempty"` + Licenses []string `json:"licenses,omitempty"` + RequiredRubyVersion []VersionRequirement `json:"required_ruby_version,omitempty"` + RequiredRubygemsVersion []VersionRequirement `json:"required_rubygems_version,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + RuntimeDependencies []Dependency `json:"runtime_dependencies,omitempty"` + DevelopmentDependencies []Dependency `json:"development_dependencies,omitempty"` +} + +// VersionRequirement represents a version restriction +type VersionRequirement struct { + Restriction string `json:"restriction"` + Version string `json:"version"` +} + +// Dependency represents a dependency of a RubyGems package +type Dependency struct { + Name string `json:"name"` + Version []VersionRequirement `json:"version"` +} + +type gemspec struct { + Name string `yaml:"name"` + Version struct { + Version string `yaml:"version"` + } `yaml:"version"` + Platform string `yaml:"platform"` + Authors []string `yaml:"authors"` + Autorequire interface{} `yaml:"autorequire"` + Bindir string `yaml:"bindir"` + CertChain []interface{} `yaml:"cert_chain"` + Date string `yaml:"date"` + Dependencies []struct { + Name string `yaml:"name"` + Requirement requirement `yaml:"requirement"` + Type string `yaml:"type"` + Prerelease bool `yaml:"prerelease"` + VersionRequirements requirement `yaml:"version_requirements"` + } `yaml:"dependencies"` + Description string `yaml:"description"` + Email string `yaml:"email"` + Executables []string `yaml:"executables"` + Extensions []interface{} `yaml:"extensions"` + ExtraRdocFiles []string `yaml:"extra_rdoc_files"` + Files []string `yaml:"files"` + Homepage string `yaml:"homepage"` + Licenses []string `yaml:"licenses"` + Metadata struct { + BugTrackerURI string `yaml:"bug_tracker_uri"` + ChangelogURI string `yaml:"changelog_uri"` + DocumentationURI string `yaml:"documentation_uri"` + SourceCodeURI string `yaml:"source_code_uri"` + } `yaml:"metadata"` + PostInstallMessage interface{} `yaml:"post_install_message"` + RdocOptions []interface{} `yaml:"rdoc_options"` + RequirePaths []string `yaml:"require_paths"` + RequiredRubyVersion requirement `yaml:"required_ruby_version"` + RequiredRubygemsVersion requirement `yaml:"required_rubygems_version"` + Requirements []interface{} `yaml:"requirements"` + RubygemsVersion string `yaml:"rubygems_version"` + SigningKey interface{} `yaml:"signing_key"` + SpecificationVersion int `yaml:"specification_version"` + Summary string `yaml:"summary"` + TestFiles []interface{} `yaml:"test_files"` +} + +type requirement struct { + Requirements [][]interface{} `yaml:"requirements"` +} + +// AsVersionRequirement converts into []VersionRequirement +func (r requirement) AsVersionRequirement() []VersionRequirement { + requirements := make([]VersionRequirement, 0, len(r.Requirements)) + for _, req := range r.Requirements { + if len(req) != 2 { + continue + } + restriction, ok := req[0].(string) + if !ok { + continue + } + vm, ok := req[1].(map[interface{}]interface{}) + if !ok { + continue + } + versionInt, ok := vm["version"] + if !ok { + continue + } + version, ok := versionInt.(string) + if !ok || version == "0" { + continue + } + + requirements = append(requirements, VersionRequirement{ + Restriction: restriction, + Version: version, + }) + } + return requirements +} + +// ParsePackageMetaData parses the metadata of a Gem package file +func ParsePackageMetaData(r io.Reader) (*Package, error) { + archive := tar.NewReader(r) + for { + hdr, err := archive.Next() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + if hdr.Name == "metadata.gz" { + return parseMetadataFile(archive) + } + } + + return nil, ErrMissingMetadataFile +} + +func parseMetadataFile(r io.Reader) (*Package, error) { + zr, err := gzip.NewReader(r) + if err != nil { + return nil, err + } + defer zr.Close() + + var spec gemspec + if err := yaml.NewDecoder(zr).Decode(&spec); err != nil { + return nil, err + } + + if len(spec.Name) == 0 || strings.Contains(spec.Name, "/") { + return nil, ErrInvalidName + } + + if !versionMatcher.MatchString(spec.Version.Version) { + return nil, ErrInvalidVersion + } + + if !validation.IsValidURL(spec.Homepage) { + spec.Homepage = "" + } + if !validation.IsValidURL(spec.Metadata.SourceCodeURI) { + spec.Metadata.SourceCodeURI = "" + } + + m := &Metadata{ + Platform: spec.Platform, + Description: spec.Description, + Summary: spec.Summary, + Authors: spec.Authors, + Licenses: spec.Licenses, + ProjectURL: spec.Homepage, + RequiredRubyVersion: spec.RequiredRubyVersion.AsVersionRequirement(), + RequiredRubygemsVersion: spec.RequiredRubygemsVersion.AsVersionRequirement(), + DevelopmentDependencies: make([]Dependency, 0, 5), + RuntimeDependencies: make([]Dependency, 0, 5), + } + + for _, gemdep := range spec.Dependencies { + dep := Dependency{ + Name: gemdep.Name, + Version: gemdep.Requirement.AsVersionRequirement(), + } + if gemdep.Type == ":runtime" { + m.RuntimeDependencies = append(m.RuntimeDependencies, dep) + } else { + m.DevelopmentDependencies = append(m.DevelopmentDependencies, dep) + } + } + + return &Package{ + Name: spec.Name, + Version: spec.Version.Version, + Metadata: m, + }, nil +} diff --git a/modules/packages/rubygems/metadata_test.go b/modules/packages/rubygems/metadata_test.go new file mode 100644 index 0000000000..dbefa9c236 --- /dev/null +++ b/modules/packages/rubygems/metadata_test.go @@ -0,0 +1,89 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package rubygems + +import ( + "archive/tar" + "bytes" + "encoding/base64" + "io" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParsePackageMetaData(t *testing.T) { + createArchive := func(filename string, content []byte) io.Reader { + var buf bytes.Buffer + tw := tar.NewWriter(&buf) + hdr := &tar.Header{ + Name: filename, + Mode: 0o600, + Size: int64(len(content)), + } + tw.WriteHeader(hdr) + tw.Write(content) + tw.Close() + return &buf + } + + t.Run("MissingMetadataFile", func(t *testing.T) { + data := createArchive("dummy.txt", []byte{0}) + + rp, err := ParsePackageMetaData(data) + assert.ErrorIs(t, err, ErrMissingMetadataFile) + assert.Nil(t, rp) + }) + + t.Run("Valid", func(t *testing.T) { + content, _ := base64.StdEncoding.DecodeString("H4sICHC/I2EEAG1ldGFkYXRhAAEeAOH/bmFtZTogZwp2ZXJzaW9uOgogIHZlcnNpb246IDEKWw35Tx4AAAA=") + data := createArchive("metadata.gz", content) + + rp, err := ParsePackageMetaData(data) + assert.NoError(t, err) + assert.NotNil(t, rp) + }) +} + +func TestParseMetadataFile(t *testing.T) { + content, _ := base64.StdEncoding.DecodeString(`H4sIAMe7I2ECA9VVTW/UMBC9+1eYXvaUbJpSQBZUHJAqDlwK4kCFIseZzZrGH9iTqisEv52Js9nd +0KqggiqRXWnX45n3ZuZ5nCzL+JPQ15ulq7+AQnEORoj3HpReaSVRO8usNCB4qxEku4YQySbuCPo4 +bjHOd07HeZGfMt9JXLlgBB9imOxx7UIULOPnCZMMLsDXXgeiYbW2jQ6C0y9TELBSa6kJ6/IzaySS +R1mUx1nxIitPeFGI9M2L6eGfWAMebANWaUgktzN9M3lsKNmxutBb1AYyCibbNhsDFu+q9GK/Tc4z +d2IcLBl9js5eHaXFsLyvXeNz0LQyL/YoLx8EsiCMBZlx46k6sS2PDD5AgA5kJPNKdhH2elWzOv7n +uv9Q9Aau/6ngP84elvNpXh5oRVlB5/yW7BH0+qu0G4gqaI/JdEHBFBS5l+pKtsARIjIwUnfj8Le0 ++TrdJLl2DG5A9SjrjgZ1mG+4QbAD+G4ZZBUap6qVnnzGf6Rwp+vliBRqtnYGPBEKvkb0USyXE8mS +dVoR6hj07u0HZgAl3SRS8G/fmXcRK20jyq6rDMSYQFgidamqkXbbuspLXE/0k7GphtKqe67GuRC/ +yjAbmt9LsOMp8xMamFkSQ38fP5EFjdz8LA4do2C69VvqWXAJgrPbKZb58/xZXrKoW6ttW13Bhvzi +4ftn7/yUxd4YGcglvTmmY8aGY3ZwRn4CqcWcidUGAAA=`) + rp, err := parseMetadataFile(bytes.NewReader(content)) + assert.NoError(t, err) + assert.NotNil(t, rp) + + assert.Equal(t, "gitea", rp.Name) + assert.Equal(t, "1.0.5", rp.Version) + assert.Equal(t, "ruby", rp.Metadata.Platform) + assert.Equal(t, "Gitea package", rp.Metadata.Summary) + assert.Equal(t, "RubyGems package test", rp.Metadata.Description) + assert.Equal(t, []string{"Gitea"}, rp.Metadata.Authors) + assert.Equal(t, "https://gitea.io/", rp.Metadata.ProjectURL) + assert.Equal(t, []string{"MIT"}, rp.Metadata.Licenses) + assert.Empty(t, rp.Metadata.RequiredRubygemsVersion) + assert.Len(t, rp.Metadata.RequiredRubyVersion, 1) + assert.Equal(t, ">=", rp.Metadata.RequiredRubyVersion[0].Restriction) + assert.Equal(t, "2.3.0", rp.Metadata.RequiredRubyVersion[0].Version) + assert.Len(t, rp.Metadata.RuntimeDependencies, 1) + assert.Equal(t, "runtime-dep", rp.Metadata.RuntimeDependencies[0].Name) + assert.Len(t, rp.Metadata.RuntimeDependencies[0].Version, 2) + assert.Equal(t, ">=", rp.Metadata.RuntimeDependencies[0].Version[0].Restriction) + assert.Equal(t, "1.2.0", rp.Metadata.RuntimeDependencies[0].Version[0].Version) + assert.Equal(t, "<", rp.Metadata.RuntimeDependencies[0].Version[1].Restriction) + assert.Equal(t, "2.0", rp.Metadata.RuntimeDependencies[0].Version[1].Version) + assert.Len(t, rp.Metadata.DevelopmentDependencies, 1) + assert.Equal(t, "dev-dep", rp.Metadata.DevelopmentDependencies[0].Name) + assert.Len(t, rp.Metadata.DevelopmentDependencies[0].Version, 1) + assert.Equal(t, "~>", rp.Metadata.DevelopmentDependencies[0].Version[0].Restriction) + assert.Equal(t, "5.2", rp.Metadata.DevelopmentDependencies[0].Version[0].Version) +} diff --git a/modules/paginator/paginator.go b/modules/paginator/paginator.go new file mode 100644 index 0000000000..873cfe49d4 --- /dev/null +++ b/modules/paginator/paginator.go @@ -0,0 +1,203 @@ +// Copyright 2022 The Gitea Authors. +// Copyright 2015 Unknwon. Licensed under the Apache License, Version 2.0 + +package paginator + +/* +In template: + +```html +{{if not .Page.IsFirst}}[First](1){{end}} +{{if .Page.HasPrevious}}[Previous]({{.Page.Previous}}){{end}} + +{{range .Page.Pages}} + {{if eq .Num -1}} + ... + {{else}} + {{.Num}}{{if .IsCurrent}}(current){{end}} + {{end}} +{{end}} + +{{if .Page.HasNext}}[Next]({{.Page.Next}}){{end}} +{{if not .Page.IsLast}}[Last]({{.Page.TotalPages}}){{end}} +``` + +Output: + +``` +[First](1) [Previous](2) ... 2 3(current) 4 ... [Next](4) [Last](5) +``` +*/ + +// Paginator represents a set of results of pagination calculations. +type Paginator struct { + total int // total rows count + pagingNum int // how many rows in one page + current int // current page number + numPages int // how many pages to show on the UI +} + +// New initialize a new pagination calculation and returns a Paginator as result. +func New(total, pagingNum, current, numPages int) *Paginator { + if pagingNum <= 0 { + pagingNum = 1 + } + if current <= 0 { + current = 1 + } + p := &Paginator{total, pagingNum, current, numPages} + if p.current > p.TotalPages() { + p.current = p.TotalPages() + } + return p +} + +// IsFirst returns true if current page is the first page. +func (p *Paginator) IsFirst() bool { + return p.current == 1 +} + +// HasPrevious returns true if there is a previous page relative to current page. +func (p *Paginator) HasPrevious() bool { + return p.current > 1 +} + +func (p *Paginator) Previous() int { + if !p.HasPrevious() { + return p.current + } + return p.current - 1 +} + +// HasNext returns true if there is a next page relative to current page. +func (p *Paginator) HasNext() bool { + return p.total > p.current*p.pagingNum +} + +func (p *Paginator) Next() int { + if !p.HasNext() { + return p.current + } + return p.current + 1 +} + +// IsLast returns true if current page is the last page. +func (p *Paginator) IsLast() bool { + if p.total == 0 { + return true + } + return p.total > (p.current-1)*p.pagingNum && !p.HasNext() +} + +// Total returns number of total rows. +func (p *Paginator) Total() int { + return p.total +} + +// TotalPages returns number of total pages. +func (p *Paginator) TotalPages() int { + if p.total == 0 { + return 1 + } + return (p.total + p.pagingNum - 1) / p.pagingNum +} + +// Current returns current page number. +func (p *Paginator) Current() int { + return p.current +} + +// PagingNum returns number of page size. +func (p *Paginator) PagingNum() int { + return p.pagingNum +} + +// Page presents a page in the paginator. +type Page struct { + num int + isCurrent bool +} + +func (p *Page) Num() int { + return p.num +} + +func (p *Page) IsCurrent() bool { + return p.isCurrent +} + +func getMiddleIdx(numPages int) int { + return (numPages + 1) / 2 +} + +// Pages returns a list of nearby page numbers relative to current page. +// If value is -1 means "..." that more pages are not showing. +func (p *Paginator) Pages() []*Page { + if p.numPages == 0 { + return []*Page{} + } else if p.numPages == 1 && p.TotalPages() == 1 { + // Only show current page. + return []*Page{{1, true}} + } + + // Total page number is less or equal. + if p.TotalPages() <= p.numPages { + pages := make([]*Page, p.TotalPages()) + for i := range pages { + pages[i] = &Page{i + 1, i+1 == p.current} + } + return pages + } + + numPages := p.numPages + offsetIdx := 0 + hasMoreNext := false + + // Check more previous and next pages. + previousNum := getMiddleIdx(p.numPages) - 1 + if previousNum > p.current-1 { + previousNum -= previousNum - (p.current - 1) + } + nextNum := p.numPages - previousNum - 1 + if p.current+nextNum > p.TotalPages() { + delta := nextNum - (p.TotalPages() - p.current) + nextNum -= delta + previousNum += delta + } + + offsetVal := p.current - previousNum + if offsetVal > 1 { + numPages++ + offsetIdx = 1 + } + + if p.current+nextNum < p.TotalPages() { + numPages++ + hasMoreNext = true + } + + pages := make([]*Page, numPages) + + // There are more previous pages. + if offsetIdx == 1 { + pages[0] = &Page{-1, false} + } + // There are more next pages. + if hasMoreNext { + pages[len(pages)-1] = &Page{-1, false} + } + + // Check previous pages. + for i := 0; i < previousNum; i++ { + pages[offsetIdx+i] = &Page{i + offsetVal, false} + } + + pages[offsetIdx+previousNum] = &Page{p.current, true} + + // Check next pages. + for i := 1; i <= nextNum; i++ { + pages[offsetIdx+previousNum+i] = &Page{p.current + i, false} + } + + return pages +} diff --git a/modules/paginator/paginator_test.go b/modules/paginator/paginator_test.go new file mode 100644 index 0000000000..ce7b7275e1 --- /dev/null +++ b/modules/paginator/paginator_test.go @@ -0,0 +1,311 @@ +// Copyright 2022 The Gitea Authors. +// Copyright 2015 Unknwon. Licensed under the Apache License, Version 2.0 + +package paginator + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPaginator(t *testing.T) { + t.Run("Basic logics", func(t *testing.T) { + p := New(0, -1, -1, 0) + assert.Equal(t, 1, p.PagingNum()) + assert.True(t, p.IsFirst()) + assert.False(t, p.HasPrevious()) + assert.Equal(t, 1, p.Previous()) + assert.False(t, p.HasNext()) + assert.Equal(t, 1, p.Next()) + assert.True(t, p.IsLast()) + assert.Equal(t, 0, p.Total()) + + p = New(1, 10, 2, 0) + assert.Equal(t, 10, p.PagingNum()) + assert.True(t, p.IsFirst()) + assert.False(t, p.HasPrevious()) + assert.False(t, p.HasNext()) + assert.True(t, p.IsLast()) + + p = New(10, 10, 1, 0) + assert.Equal(t, 10, p.PagingNum()) + assert.True(t, p.IsFirst()) + assert.False(t, p.HasPrevious()) + assert.False(t, p.HasNext()) + assert.True(t, p.IsLast()) + + p = New(11, 10, 1, 0) + assert.Equal(t, 10, p.PagingNum()) + assert.True(t, p.IsFirst()) + assert.False(t, p.HasPrevious()) + assert.True(t, p.HasNext()) + assert.Equal(t, 2, p.Next()) + assert.False(t, p.IsLast()) + + p = New(11, 10, 2, 0) + assert.Equal(t, 10, p.PagingNum()) + assert.False(t, p.IsFirst()) + assert.True(t, p.HasPrevious()) + assert.Equal(t, 1, p.Previous()) + assert.False(t, p.HasNext()) + assert.True(t, p.IsLast()) + + p = New(20, 10, 2, 0) + assert.Equal(t, 10, p.PagingNum()) + assert.False(t, p.IsFirst()) + assert.True(t, p.HasPrevious()) + assert.False(t, p.HasNext()) + assert.True(t, p.IsLast()) + + p = New(25, 10, 2, 0) + assert.Equal(t, 10, p.PagingNum()) + assert.False(t, p.IsFirst()) + assert.True(t, p.HasPrevious()) + assert.True(t, p.HasNext()) + assert.False(t, p.IsLast()) + }) + + t.Run("Generate pages", func(t *testing.T) { + p := New(0, 10, 1, 0) + pages := p.Pages() + assert.Equal(t, 0, len(pages)) + }) + + t.Run("Only current page", func(t *testing.T) { + p := New(0, 10, 1, 1) + pages := p.Pages() + assert.Equal(t, 1, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.True(t, pages[0].IsCurrent()) + + p = New(1, 10, 1, 1) + pages = p.Pages() + assert.Equal(t, 1, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.True(t, pages[0].IsCurrent()) + }) + + t.Run("Total page number is less or equal", func(t *testing.T) { + p := New(1, 10, 1, 2) + pages := p.Pages() + assert.Equal(t, 1, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.True(t, pages[0].IsCurrent()) + + p = New(11, 10, 1, 2) + pages = p.Pages() + assert.Equal(t, 2, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.True(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + + p = New(11, 10, 2, 2) + pages = p.Pages() + assert.Equal(t, 2, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + + p = New(25, 10, 2, 3) + pages = p.Pages() + assert.Equal(t, 3, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + }) + + t.Run("Has more previous pages ", func(t *testing.T) { + // ... 2 + p := New(11, 10, 2, 1) + pages := p.Pages() + assert.Equal(t, 2, len(pages)) + assert.Equal(t, -1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + + // ... 2 3 + p = New(21, 10, 2, 2) + pages = p.Pages() + assert.Equal(t, 3, len(pages)) + assert.Equal(t, -1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + + // ... 2 3 4 + p = New(31, 10, 3, 3) + pages = p.Pages() + assert.Equal(t, 4, len(pages)) + assert.Equal(t, -1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.True(t, pages[2].IsCurrent()) + assert.Equal(t, 4, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + + // ... 3 4 5 + p = New(41, 10, 4, 3) + pages = p.Pages() + assert.Equal(t, 4, len(pages)) + assert.Equal(t, -1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 3, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + assert.Equal(t, 4, pages[2].Num()) + assert.True(t, pages[2].IsCurrent()) + assert.Equal(t, 5, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + + // ... 4 5 6 7 8 9 10 + p = New(100, 10, 9, 7) + pages = p.Pages() + assert.Equal(t, 8, len(pages)) + assert.Equal(t, -1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 4, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + assert.Equal(t, 5, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + assert.Equal(t, 6, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + assert.Equal(t, 7, pages[4].Num()) + assert.False(t, pages[4].IsCurrent()) + assert.Equal(t, 8, pages[5].Num()) + assert.False(t, pages[5].IsCurrent()) + assert.Equal(t, 9, pages[6].Num()) + assert.True(t, pages[6].IsCurrent()) + assert.Equal(t, 10, pages[7].Num()) + assert.False(t, pages[7].IsCurrent()) + }) + + t.Run("Has more next pages", func(t *testing.T) { + // 1 ... + p := New(21, 10, 1, 1) + pages := p.Pages() + assert.Equal(t, 2, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.True(t, pages[0].IsCurrent()) + assert.Equal(t, -1, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + + // 1 2 ... + p = New(21, 10, 1, 2) + pages = p.Pages() + assert.Equal(t, 3, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.True(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + assert.Equal(t, -1, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + + // 1 2 3 ... + p = New(31, 10, 2, 3) + pages = p.Pages() + assert.Equal(t, 4, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + assert.Equal(t, -1, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + + // 1 2 3 ... + p = New(41, 10, 2, 3) + pages = p.Pages() + assert.Equal(t, 4, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + assert.Equal(t, -1, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + + // 1 2 3 4 5 6 7 ... + p = New(100, 10, 1, 7) + pages = p.Pages() + assert.Equal(t, 8, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.True(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + assert.Equal(t, 4, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + assert.Equal(t, 5, pages[4].Num()) + assert.False(t, pages[4].IsCurrent()) + assert.Equal(t, 6, pages[5].Num()) + assert.False(t, pages[5].IsCurrent()) + assert.Equal(t, 7, pages[6].Num()) + assert.False(t, pages[6].IsCurrent()) + assert.Equal(t, -1, pages[7].Num()) + assert.False(t, pages[7].IsCurrent()) + + // 1 2 3 4 5 6 7 ... + p = New(100, 10, 2, 7) + pages = p.Pages() + assert.Equal(t, 8, len(pages)) + assert.Equal(t, 1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + assert.Equal(t, 4, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + assert.Equal(t, 5, pages[4].Num()) + assert.False(t, pages[4].IsCurrent()) + assert.Equal(t, 6, pages[5].Num()) + assert.False(t, pages[5].IsCurrent()) + assert.Equal(t, 7, pages[6].Num()) + assert.False(t, pages[6].IsCurrent()) + assert.Equal(t, -1, pages[7].Num()) + assert.False(t, pages[7].IsCurrent()) + }) + + t.Run("Has both more previous and next pages", func(t *testing.T) { + // ... 2 3 ... + p := New(35, 10, 2, 2) + pages := p.Pages() + assert.Equal(t, 4, len(pages)) + assert.Equal(t, -1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.True(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.False(t, pages[2].IsCurrent()) + assert.Equal(t, -1, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + + // ... 2 3 4 ... + p = New(49, 10, 3, 3) + pages = p.Pages() + assert.Equal(t, 5, len(pages)) + assert.Equal(t, -1, pages[0].Num()) + assert.False(t, pages[0].IsCurrent()) + assert.Equal(t, 2, pages[1].Num()) + assert.False(t, pages[1].IsCurrent()) + assert.Equal(t, 3, pages[2].Num()) + assert.True(t, pages[2].IsCurrent()) + assert.Equal(t, 4, pages[3].Num()) + assert.False(t, pages[3].IsCurrent()) + assert.Equal(t, -1, pages[4].Num()) + assert.False(t, pages[4].IsCurrent()) + }) +} diff --git a/modules/private/hook.go b/modules/private/hook.go index fd864b1e6b..559019344e 100644 --- a/modules/private/hook.go +++ b/modules/private/hook.go @@ -56,7 +56,7 @@ type HookOptions struct { GitQuarantinePath string GitPushOptions GitPushOptions PullRequestID int64 - IsDeployKey bool + DeployKeyID int64 // if the pusher is a DeployKey, then UserID is the repo's org user. IsWiki bool } diff --git a/modules/private/manager.go b/modules/private/manager.go index 2543e141ea..8405bf2c83 100644 --- a/modules/private/manager.go +++ b/modules/private/manager.go @@ -7,6 +7,7 @@ package private import ( "context" "fmt" + "io" "net/http" "net/url" "time" @@ -189,3 +190,25 @@ func RemoveLogger(ctx context.Context, group, name string) (int, string) { return http.StatusOK, "Removed" } + +// Processes return the current processes from this gitea instance +func Processes(ctx context.Context, out io.Writer, flat, noSystem, stacktraces, json bool, cancel string) (int, string) { + reqURL := setting.LocalURL + fmt.Sprintf("api/internal/manager/processes?flat=%t&no-system=%t&stacktraces=%t&json=%t&cancel-pid=%s", flat, noSystem, stacktraces, json, url.QueryEscape(cancel)) + + req := newInternalRequest(ctx, reqURL, "GET") + resp, err := req.Response() + if err != nil { + return http.StatusInternalServerError, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return resp.StatusCode, decodeJSONError(resp).Err + } + + _, err = io.Copy(out, resp.Body) + if err != nil { + return http.StatusInternalServerError, err.Error() + } + return http.StatusOK, "" +} diff --git a/modules/private/restore_repo.go b/modules/private/restore_repo.go index 347ed5e78a..b1561f392b 100644 --- a/modules/private/restore_repo.go +++ b/modules/private/restore_repo.go @@ -45,7 +45,7 @@ func RestoreRepo(ctx context.Context, repoDir, ownerName, repoName string, units } defer resp.Body.Close() - if resp.StatusCode != 200 { + if resp.StatusCode != http.StatusOK { ret := struct { Err string `json:"err"` }{} diff --git a/modules/private/serv.go b/modules/private/serv.go index e1204c23a7..2e1367e4c4 100644 --- a/modules/private/serv.go +++ b/modules/private/serv.go @@ -46,9 +46,9 @@ func ServNoCommand(ctx context.Context, keyID int64) (*asymkey_model.PublicKey, // ServCommandResults are the results of a call to the private route serv type ServCommandResults struct { IsWiki bool - IsDeployKey bool - KeyID int64 - KeyName string + DeployKeyID int64 + KeyID int64 // public key + KeyName string // this field is ambiguous, it can be the name of DeployKey, or the name of the PublicKey UserName string UserEmail string UserID int64 diff --git a/modules/process/error.go b/modules/process/error.go new file mode 100644 index 0000000000..7a72bda40e --- /dev/null +++ b/modules/process/error.go @@ -0,0 +1,26 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package process + +import "fmt" + +// Error is a wrapped error describing the error results of Process Execution +type Error struct { + PID IDType + Description string + Err error + CtxErr error + Stdout string + Stderr string +} + +func (err *Error) Error() string { + return fmt.Sprintf("exec(%s:%s) failed: %v(%v) stdout: %s stderr: %s", err.PID, err.Description, err.Err, err.CtxErr, err.Stdout, err.Stderr) +} + +// Unwrap implements the unwrappable implicit interface for go1.13 Unwrap() +func (err *Error) Unwrap() error { + return err.Err +} diff --git a/modules/process/manager.go b/modules/process/manager.go index d9d2f8c3e5..5d7aee760f 100644 --- a/modules/process/manager.go +++ b/modules/process/manager.go @@ -6,12 +6,8 @@ package process import ( - "bytes" "context" - "fmt" - "io" - "os/exec" - "sort" + "runtime/pprof" "strconv" "sync" "time" @@ -29,6 +25,18 @@ var ( DefaultContext = context.Background() ) +// DescriptionPProfLabel is a label set on goroutines that have a process attached +const DescriptionPProfLabel = "process-description" + +// PIDPProfLabel is a label set on goroutines that have a process attached +const PIDPProfLabel = "pid" + +// PPIDPProfLabel is a label set on goroutines that have a process attached +const PPIDPProfLabel = "ppid" + +// ProcessTypePProfLabel is a label set on goroutines that have a process attached +const ProcessTypePProfLabel = "process-type" + // IDType is a pid type type IDType string @@ -43,15 +51,15 @@ type Manager struct { next int64 lastTime int64 - processes map[IDType]*Process + processMap map[IDType]*process } // GetManager returns a Manager and initializes one as singleton if there's none yet func GetManager() *Manager { managerInit.Do(func() { manager = &Manager{ - processes: make(map[IDType]*Process), - next: 1, + processMap: make(map[IDType]*process), + next: 1, } }) return manager @@ -66,16 +74,27 @@ func GetManager() *Manager { // Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the // process table. func (pm *Manager) AddContext(parent context.Context, description string) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) { - parentPID := GetParentPID(parent) - ctx, cancel = context.WithCancel(parent) - pid, finished := pm.Add(parentPID, description, cancel) + ctx, _, finished = pm.Add(ctx, description, cancel, NormalProcessType, true) - return &Context{ - Context: ctx, - pid: pid, - }, cancel, finished + return ctx, cancel, finished +} + +// AddTypedContext creates a new context and adds it as a process. Once the process is finished, finished must be called +// to remove the process from the process table. It should not be called until the process is finished but must always be called. +// +// cancel should be used to cancel the returned context, however it will not remove the process from the process table. +// finished will cancel the returned context and remove it from the process table. +// +// Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the +// process table. +func (pm *Manager) AddTypedContext(parent context.Context, description, processType string, currentlyRunning bool) (ctx context.Context, cancel context.CancelFunc, finished FinishedFunc) { + ctx, cancel = context.WithCancel(parent) + + ctx, _, finished = pm.Add(ctx, description, cancel, processType, currentlyRunning) + + return ctx, cancel, finished } // AddContextTimeout creates a new context and add it as a process. Once the process is finished, finished must be called @@ -87,48 +106,65 @@ func (pm *Manager) AddContext(parent context.Context, description string) (ctx c // Most processes will not need to use the cancel function but there will be cases whereby you want to cancel the process but not immediately remove it from the // process table. func (pm *Manager) AddContextTimeout(parent context.Context, timeout time.Duration, description string) (ctx context.Context, cancel context.CancelFunc, finshed FinishedFunc) { - parentPID := GetParentPID(parent) + if timeout <= 0 { + // it's meaningless to use timeout <= 0, and it must be a bug! so we must panic here to tell developers to make the timeout correct + panic("the timeout must be greater than zero, otherwise the context will be cancelled immediately") + } ctx, cancel = context.WithTimeout(parent, timeout) - pid, finshed := pm.Add(parentPID, description, cancel) + ctx, _, finshed = pm.Add(ctx, description, cancel, NormalProcessType, true) - return &Context{ - Context: ctx, - pid: pid, - }, cancel, finshed + return ctx, cancel, finshed } // Add create a new process -func (pm *Manager) Add(parentPID IDType, description string, cancel context.CancelFunc) (IDType, FinishedFunc) { +func (pm *Manager) Add(ctx context.Context, description string, cancel context.CancelFunc, processType string, currentlyRunning bool) (context.Context, IDType, FinishedFunc) { + parentPID := GetParentPID(ctx) + pm.mutex.Lock() start, pid := pm.nextPID() - parent := pm.processes[parentPID] + parent := pm.processMap[parentPID] if parent == nil { parentPID = "" } - process := &Process{ + process := &process{ PID: pid, ParentPID: parentPID, Description: description, Start: start, Cancel: cancel, + Type: processType, } - finished := func() { - cancel() - pm.remove(process) + var finished FinishedFunc + if currentlyRunning { + finished = func() { + cancel() + pm.remove(process) + pprof.SetGoroutineLabels(ctx) + } + } else { + finished = func() { + cancel() + pm.remove(process) + } } - if parent != nil { - parent.AddChild(process) - } - pm.processes[pid] = process + pm.processMap[pid] = process pm.mutex.Unlock() - return pid, finished + pprofCtx := pprof.WithLabels(ctx, pprof.Labels(DescriptionPProfLabel, description, PPIDPProfLabel, string(parentPID), PIDPProfLabel, string(pid), ProcessTypePProfLabel, processType)) + if currentlyRunning { + pprof.SetGoroutineLabels(pprofCtx) + } + + return &Context{ + Context: pprofCtx, + pid: pid, + }, pid, finished } // nextPID will return the next available PID. pm.mutex should already be locked. @@ -153,142 +189,24 @@ func (pm *Manager) nextPID() (start time.Time, pid IDType) { // Remove a process from the ProcessManager. func (pm *Manager) Remove(pid IDType) { pm.mutex.Lock() - delete(pm.processes, pid) + delete(pm.processMap, pid) pm.mutex.Unlock() } -func (pm *Manager) remove(process *Process) { +func (pm *Manager) remove(process *process) { pm.mutex.Lock() - if p := pm.processes[process.PID]; p == process { - delete(pm.processes, process.PID) + defer pm.mutex.Unlock() + if p := pm.processMap[process.PID]; p == process { + delete(pm.processMap, process.PID) } - parent := pm.processes[process.ParentPID] - pm.mutex.Unlock() - - if parent == nil { - return - } - - parent.RemoveChild(process) } // Cancel a process in the ProcessManager. func (pm *Manager) Cancel(pid IDType) { pm.mutex.Lock() - process, ok := pm.processes[pid] + process, ok := pm.processMap[pid] pm.mutex.Unlock() - if ok { + if ok && process.Type != SystemProcessType { process.Cancel() } } - -// Processes gets the processes in a thread safe manner -func (pm *Manager) Processes(onlyRoots bool) []*Process { - pm.mutex.Lock() - processes := make([]*Process, 0, len(pm.processes)) - if onlyRoots { - for _, process := range pm.processes { - if _, has := pm.processes[process.ParentPID]; !has { - processes = append(processes, process) - } - } - } else { - for _, process := range pm.processes { - processes = append(processes, process) - } - } - pm.mutex.Unlock() - - sort.Slice(processes, func(i, j int) bool { - left, right := processes[i], processes[j] - - return left.Start.Before(right.Start) - }) - - return processes -} - -// Exec a command and use the default timeout. -func (pm *Manager) Exec(desc, cmdName string, args ...string) (string, string, error) { - return pm.ExecDir(DefaultContext, -1, "", desc, cmdName, args...) -} - -// ExecTimeout a command and use a specific timeout duration. -func (pm *Manager) ExecTimeout(timeout time.Duration, desc, cmdName string, args ...string) (string, string, error) { - return pm.ExecDir(DefaultContext, timeout, "", desc, cmdName, args...) -} - -// ExecDir a command and use the default timeout. -func (pm *Manager) ExecDir(ctx context.Context, timeout time.Duration, dir, desc, cmdName string, args ...string) (string, string, error) { - return pm.ExecDirEnv(ctx, timeout, dir, desc, nil, cmdName, args...) -} - -// ExecDirEnv runs a command in given path and environment variables, and waits for its completion -// up to the given timeout (or DefaultTimeout if -1 is given). -// Returns its complete stdout and stderr -// outputs and an error, if any (including timeout) -func (pm *Manager) ExecDirEnv(ctx context.Context, timeout time.Duration, dir, desc string, env []string, cmdName string, args ...string) (string, string, error) { - return pm.ExecDirEnvStdIn(ctx, timeout, dir, desc, env, nil, cmdName, args...) -} - -// ExecDirEnvStdIn runs a command in given path and environment variables with provided stdIN, and waits for its completion -// up to the given timeout (or DefaultTimeout if -1 is given). -// Returns its complete stdout and stderr -// outputs and an error, if any (including timeout) -func (pm *Manager) ExecDirEnvStdIn(ctx context.Context, timeout time.Duration, dir, desc string, env []string, stdIn io.Reader, cmdName string, args ...string) (string, string, error) { - if timeout == -1 { - timeout = 60 * time.Second - } - - stdOut := new(bytes.Buffer) - stdErr := new(bytes.Buffer) - - ctx, _, finished := pm.AddContextTimeout(ctx, timeout, desc) - defer finished() - - cmd := exec.CommandContext(ctx, cmdName, args...) - cmd.Dir = dir - cmd.Env = env - cmd.Stdout = stdOut - cmd.Stderr = stdErr - if stdIn != nil { - cmd.Stdin = stdIn - } - - if err := cmd.Start(); err != nil { - return "", "", err - } - - err := cmd.Wait() - if err != nil { - err = &Error{ - PID: GetPID(ctx), - Description: desc, - Err: err, - CtxErr: ctx.Err(), - Stdout: stdOut.String(), - Stderr: stdErr.String(), - } - } - - return stdOut.String(), stdErr.String(), err -} - -// Error is a wrapped error describing the error results of Process Execution -type Error struct { - PID IDType - Description string - Err error - CtxErr error - Stdout string - Stderr string -} - -func (err *Error) Error() string { - return fmt.Sprintf("exec(%s:%s) failed: %v(%v) stdout: %s stderr: %s", err.PID, err.Description, err.Err, err.CtxErr, err.Stdout, err.Stderr) -} - -// Unwrap implements the unwrappable implicit interface for go1.13 Unwrap() -func (err *Error) Unwrap() error { - return err.Err -} diff --git a/modules/process/manager_exec.go b/modules/process/manager_exec.go new file mode 100644 index 0000000000..61ddae646f --- /dev/null +++ b/modules/process/manager_exec.go @@ -0,0 +1,79 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package process + +import ( + "bytes" + "context" + "io" + "os/exec" + "time" +) + +// Exec a command and use the default timeout. +func (pm *Manager) Exec(desc, cmdName string, args ...string) (string, string, error) { + return pm.ExecDir(DefaultContext, -1, "", desc, cmdName, args...) +} + +// ExecTimeout a command and use a specific timeout duration. +func (pm *Manager) ExecTimeout(timeout time.Duration, desc, cmdName string, args ...string) (string, string, error) { + return pm.ExecDir(DefaultContext, timeout, "", desc, cmdName, args...) +} + +// ExecDir a command and use the default timeout. +func (pm *Manager) ExecDir(ctx context.Context, timeout time.Duration, dir, desc, cmdName string, args ...string) (string, string, error) { + return pm.ExecDirEnv(ctx, timeout, dir, desc, nil, cmdName, args...) +} + +// ExecDirEnv runs a command in given path and environment variables, and waits for its completion +// up to the given timeout (or DefaultTimeout if -1 is given). +// Returns its complete stdout and stderr +// outputs and an error, if any (including timeout) +func (pm *Manager) ExecDirEnv(ctx context.Context, timeout time.Duration, dir, desc string, env []string, cmdName string, args ...string) (string, string, error) { + return pm.ExecDirEnvStdIn(ctx, timeout, dir, desc, env, nil, cmdName, args...) +} + +// ExecDirEnvStdIn runs a command in given path and environment variables with provided stdIN, and waits for its completion +// up to the given timeout (or DefaultTimeout if timeout <= 0 is given). +// Returns its complete stdout and stderr +// outputs and an error, if any (including timeout) +func (pm *Manager) ExecDirEnvStdIn(ctx context.Context, timeout time.Duration, dir, desc string, env []string, stdIn io.Reader, cmdName string, args ...string) (string, string, error) { + if timeout <= 0 { + timeout = 60 * time.Second + } + + stdOut := new(bytes.Buffer) + stdErr := new(bytes.Buffer) + + ctx, _, finished := pm.AddContextTimeout(ctx, timeout, desc) + defer finished() + + cmd := exec.CommandContext(ctx, cmdName, args...) + cmd.Dir = dir + cmd.Env = env + cmd.Stdout = stdOut + cmd.Stderr = stdErr + if stdIn != nil { + cmd.Stdin = stdIn + } + + if err := cmd.Start(); err != nil { + return "", "", err + } + + err := cmd.Wait() + if err != nil { + err = &Error{ + PID: GetPID(ctx), + Description: desc, + Err: err, + CtxErr: ctx.Err(), + Stdout: stdOut.String(), + Stderr: stdErr.String(), + } + } + + return stdOut.String(), stdErr.String(), err +} diff --git a/modules/process/manager_stacktraces.go b/modules/process/manager_stacktraces.go new file mode 100644 index 0000000000..fbe3374b87 --- /dev/null +++ b/modules/process/manager_stacktraces.go @@ -0,0 +1,355 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package process + +import ( + "fmt" + "io" + "runtime/pprof" + "sort" + "time" + + "github.com/google/pprof/profile" +) + +// StackEntry is an entry on a stacktrace +type StackEntry struct { + Function string + File string + Line int +} + +// Label represents a pprof label assigned to goroutine stack +type Label struct { + Name string + Value string +} + +// Stack is a stacktrace relating to a goroutine. (Multiple goroutines may have the same stacktrace) +type Stack struct { + Count int64 // Number of goroutines with this stack trace + Description string + Labels []*Label `json:",omitempty"` + Entry []*StackEntry `json:",omitempty"` +} + +// A Process is a combined representation of a Process and a Stacktrace for the goroutines associated with it +type Process struct { + PID IDType + ParentPID IDType + Description string + Start time.Time + Type string + + Children []*Process `json:",omitempty"` + Stacks []*Stack `json:",omitempty"` +} + +// Processes gets the processes in a thread safe manner +func (pm *Manager) Processes(flat, noSystem bool) ([]*Process, int) { + pm.mutex.Lock() + processCount := len(pm.processMap) + processes := make([]*Process, 0, len(pm.processMap)) + if flat { + for _, process := range pm.processMap { + if noSystem && process.Type == SystemProcessType { + continue + } + processes = append(processes, process.toProcess()) + } + } else { + // We need our own processMap + processMap := map[IDType]*Process{} + for _, internalProcess := range pm.processMap { + process, ok := processMap[internalProcess.PID] + if !ok { + process = internalProcess.toProcess() + processMap[process.PID] = process + } + + // Check its parent + if process.ParentPID == "" { + processes = append(processes, process) + continue + } + + internalParentProcess, ok := pm.processMap[internalProcess.ParentPID] + if ok { + parentProcess, ok := processMap[process.ParentPID] + if !ok { + parentProcess = internalParentProcess.toProcess() + processMap[parentProcess.PID] = parentProcess + } + parentProcess.Children = append(parentProcess.Children, process) + continue + } + + processes = append(processes, process) + } + } + pm.mutex.Unlock() + + if !flat && noSystem { + for i := 0; i < len(processes); i++ { + process := processes[i] + if process.Type != SystemProcessType { + continue + } + processes[len(processes)-1], processes[i] = processes[i], processes[len(processes)-1] + processes = append(processes[:len(processes)-1], process.Children...) + i-- + } + } + + // Sort by process' start time. Oldest process appears first. + sort.Slice(processes, func(i, j int) bool { + left, right := processes[i], processes[j] + + return left.Start.Before(right.Start) + }) + + return processes, processCount +} + +// ProcessStacktraces gets the processes and stacktraces in a thread safe manner +func (pm *Manager) ProcessStacktraces(flat, noSystem bool) ([]*Process, int, int64, error) { + var stacks *profile.Profile + var err error + + // We cannot use the pm.ProcessMap here because we will release the mutex ... + processMap := map[IDType]*Process{} + processCount := 0 + + // Lock the manager + pm.mutex.Lock() + processCount = len(pm.processMap) + + // Add a defer to unlock in case there is a panic + unlocked := false + defer func() { + if !unlocked { + pm.mutex.Unlock() + } + }() + + processes := make([]*Process, 0, len(pm.processMap)) + if flat { + for _, internalProcess := range pm.processMap { + process := internalProcess.toProcess() + processMap[process.PID] = process + if noSystem && internalProcess.Type == SystemProcessType { + continue + } + processes = append(processes, process) + } + } else { + for _, internalProcess := range pm.processMap { + process, ok := processMap[internalProcess.PID] + if !ok { + process = internalProcess.toProcess() + processMap[process.PID] = process + } + + // Check its parent + if process.ParentPID == "" { + processes = append(processes, process) + continue + } + + internalParentProcess, ok := pm.processMap[internalProcess.ParentPID] + if ok { + parentProcess, ok := processMap[process.ParentPID] + if !ok { + parentProcess = internalParentProcess.toProcess() + processMap[parentProcess.PID] = parentProcess + } + parentProcess.Children = append(parentProcess.Children, process) + continue + } + + processes = append(processes, process) + } + } + + // Now from within the lock we need to get the goroutines. + // Why? If we release the lock then between between filling the above map and getting + // the stacktraces another process could be created which would then look like a dead process below + reader, writer := io.Pipe() + defer reader.Close() + go func() { + err := pprof.Lookup("goroutine").WriteTo(writer, 0) + _ = writer.CloseWithError(err) + }() + stacks, err = profile.Parse(reader) + if err != nil { + return nil, 0, 0, err + } + + // Unlock the mutex + pm.mutex.Unlock() + unlocked = true + + goroutineCount := int64(0) + + // Now walk through the "Sample" slice in the goroutines stack + for _, sample := range stacks.Sample { + // In the "goroutine" pprof profile each sample represents one or more goroutines + // with the same labels and stacktraces. + + // We will represent each goroutine by a `Stack` + stack := &Stack{} + + // Add the non-process associated labels from the goroutine sample to the Stack + for name, value := range sample.Label { + if name == DescriptionPProfLabel || name == PIDPProfLabel || (!flat && name == PPIDPProfLabel) || name == ProcessTypePProfLabel { + continue + } + + // Labels from the "goroutine" pprof profile only have one value. + // This is because the underlying representation is a map[string]string + if len(value) != 1 { + // Unexpected... + return nil, 0, 0, fmt.Errorf("label: %s in goroutine stack with unexpected number of values: %v", name, value) + } + + stack.Labels = append(stack.Labels, &Label{Name: name, Value: value[0]}) + } + + // The number of goroutines that this sample represents is the `stack.Value[0]` + stack.Count = sample.Value[0] + goroutineCount += stack.Count + + // Now we want to associate this Stack with a Process. + var process *Process + + // Try to get the PID from the goroutine labels + if pidvalue, ok := sample.Label[PIDPProfLabel]; ok && len(pidvalue) == 1 { + pid := IDType(pidvalue[0]) + + // Now try to get the process from our map + process, ok = processMap[pid] + if !ok && pid != "" { + // This means that no process has been found in the process map - but there was a process PID + // Therefore this goroutine belongs to a dead process and it has escaped control of the process as it + // should have died with the process context cancellation. + + // We need to create a dead process holder for this process and label it appropriately + + // get the parent PID + ppid := IDType("") + if value, ok := sample.Label[PPIDPProfLabel]; ok && len(value) == 1 { + ppid = IDType(value[0]) + } + + // format the description + description := "(dead process)" + if value, ok := sample.Label[DescriptionPProfLabel]; ok && len(value) == 1 { + description = value[0] + " " + description + } + + // override the type of the process to "code" but add the old type as a label on the first stack + ptype := NoneProcessType + if value, ok := sample.Label[ProcessTypePProfLabel]; ok && len(value) == 1 { + stack.Labels = append(stack.Labels, &Label{Name: ProcessTypePProfLabel, Value: value[0]}) + } + process = &Process{ + PID: pid, + ParentPID: ppid, + Description: description, + Type: ptype, + } + + // Now add the dead process back to the map and tree so we don't go back through this again. + processMap[process.PID] = process + added := false + if process.ParentPID != "" && !flat { + if parent, ok := processMap[process.ParentPID]; ok { + parent.Children = append(parent.Children, process) + added = true + } + } + if !added { + processes = append(processes, process) + } + } + } + + if process == nil { + // This means that the sample we're looking has no PID label + var ok bool + process, ok = processMap[""] + if !ok { + // this is the first time we've come acrross an unassociated goroutine so create a "process" to hold them + process = &Process{ + Description: "(unassociated)", + Type: NoneProcessType, + } + processMap[process.PID] = process + processes = append(processes, process) + } + } + + // The sample.Location represents a stack trace for this goroutine, + // however each Location can represent multiple lines (mostly due to inlining) + // so we need to walk the lines too + for _, location := range sample.Location { + for _, line := range location.Line { + entry := &StackEntry{ + Function: line.Function.Name, + File: line.Function.Filename, + Line: int(line.Line), + } + stack.Entry = append(stack.Entry, entry) + } + } + + // Now we need a short-descriptive name to call the stack trace if when it is folded and + // assuming the stack trace has some lines we'll choose the bottom of the stack (i.e. the + // initial function that started the stack trace.) The top of the stack is unlikely to + // be very helpful as a lot of the time it will be runtime.select or some other call into + // a std library. + stack.Description = "(unknown)" + if len(stack.Entry) > 0 { + stack.Description = stack.Entry[len(stack.Entry)-1].Function + } + + process.Stacks = append(process.Stacks, stack) + } + + // restrict to not show system processes + if noSystem { + for i := 0; i < len(processes); i++ { + process := processes[i] + if process.Type != SystemProcessType && process.Type != NoneProcessType { + continue + } + processes[len(processes)-1], processes[i] = processes[i], processes[len(processes)-1] + processes = append(processes[:len(processes)-1], process.Children...) + i-- + } + } + + // Now finally re-sort the processes. Newest process appears first + after := func(processes []*Process) func(i, j int) bool { + return func(i, j int) bool { + left, right := processes[i], processes[j] + return left.Start.After(right.Start) + } + } + sort.Slice(processes, after(processes)) + if !flat { + + var sortChildren func(process *Process) + + sortChildren = func(process *Process) { + sort.Slice(process.Children, after(process.Children)) + for _, child := range process.Children { + sortChildren(child) + } + } + } + + return processes, processCount, goroutineCount, err +} diff --git a/modules/process/manager_test.go b/modules/process/manager_test.go index 152c7a9235..30eabeb37a 100644 --- a/modules/process/manager_test.go +++ b/modules/process/manager_test.go @@ -22,7 +22,7 @@ func TestGetManager(t *testing.T) { } func TestManager_AddContext(t *testing.T) { - pm := Manager{processes: make(map[IDType]*Process), next: 1} + pm := Manager{processMap: make(map[IDType]*process), next: 1} ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -41,7 +41,7 @@ func TestManager_AddContext(t *testing.T) { } func TestManager_Cancel(t *testing.T) { - pm := Manager{processes: make(map[IDType]*Process), next: 1} + pm := Manager{processMap: make(map[IDType]*process), next: 1} ctx, _, finished := pm.AddContext(context.Background(), "foo") defer finished() @@ -69,7 +69,7 @@ func TestManager_Cancel(t *testing.T) { } func TestManager_Remove(t *testing.T) { - pm := Manager{processes: make(map[IDType]*Process), next: 1} + pm := Manager{processMap: make(map[IDType]*process), next: 1} ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -85,7 +85,7 @@ func TestManager_Remove(t *testing.T) { pm.Remove(GetPID(p2Ctx)) - _, exists := pm.processes[GetPID(p2Ctx)] + _, exists := pm.processMap[GetPID(p2Ctx)] assert.False(t, exists, "PID %d is in the list but shouldn't", GetPID(p2Ctx)) } diff --git a/modules/process/process.go b/modules/process/process.go index 662f878d7f..2f7ea18373 100644 --- a/modules/process/process.go +++ b/modules/process/process.go @@ -6,61 +6,34 @@ package process import ( "context" - "sync" "time" ) -// Process represents a working process inheriting from Gitea. -type Process struct { +var ( + SystemProcessType = "system" + RequestProcessType = "request" + NormalProcessType = "normal" + NoneProcessType = "none" +) + +// process represents a working process inheriting from Gitea. +type process struct { PID IDType // Process ID, not system one. ParentPID IDType Description string Start time.Time Cancel context.CancelFunc - - lock sync.Mutex - children []*Process + Type string } -// Children gets the children of the process -// Note: this function will behave nicely even if p is nil -func (p *Process) Children() (children []*Process) { - if p == nil { - return - } - - p.lock.Lock() - defer p.lock.Unlock() - children = make([]*Process, len(p.children)) - copy(children, p.children) - return children -} - -// AddChild adds a child process -// Note: this function will behave nicely even if p is nil -func (p *Process) AddChild(child *Process) { - if p == nil { - return - } - - p.lock.Lock() - defer p.lock.Unlock() - p.children = append(p.children, child) -} - -// RemoveChild removes a child process -// Note: this function will behave nicely even if p is nil -func (p *Process) RemoveChild(process *Process) { - if p == nil { - return - } - - p.lock.Lock() - defer p.lock.Unlock() - for i, child := range p.children { - if child == process { - p.children = append(p.children[:i], p.children[i+1:]...) - return - } +// ToProcess converts a process to a externally usable Process +func (p *process) toProcess() *Process { + process := &Process{ + PID: p.PID, + ParentPID: p.ParentPID, + Description: p.Description, + Start: p.Start, + Type: p.Type, } + return process } diff --git a/modules/public/public_bindata.go b/modules/public/public_bindata.go index 25c3c0d2a1..fe250c6454 100644 --- a/modules/public/public_bindata.go +++ b/modules/public/public_bindata.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package public diff --git a/modules/public/serve_dynamic.go b/modules/public/serve_dynamic.go index 955c01e510..672924a636 100644 --- a/modules/public/serve_dynamic.go +++ b/modules/public/serve_dynamic.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !bindata -// +build !bindata package public diff --git a/modules/public/serve_static.go b/modules/public/serve_static.go index 8e82175e39..9666880adf 100644 --- a/modules/public/serve_static.go +++ b/modules/public/serve_static.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package public diff --git a/modules/queue/queue_bytefifo.go b/modules/queue/queue_bytefifo.go index ead3828f33..99c6428abc 100644 --- a/modules/queue/queue_bytefifo.go +++ b/modules/queue/queue_bytefifo.go @@ -7,6 +7,7 @@ package queue import ( "context" "fmt" + "runtime/pprof" "sync" "sync/atomic" "time" @@ -20,7 +21,6 @@ import ( type ByteFIFOQueueConfiguration struct { WorkerPoolConfiguration Workers int - Name string WaitOnEmpty bool } @@ -153,6 +153,7 @@ func (q *ByteFIFOQueue) Flush(timeout time.Duration) error { // Run runs the bytefifo queue func (q *ByteFIFOQueue) Run(atShutdown, atTerminate func(func())) { + pprof.SetGoroutineLabels(q.baseCtx) atShutdown(q.Shutdown) atTerminate(q.Terminate) log.Debug("%s: %s Starting", q.typ, q.name) @@ -355,6 +356,7 @@ func (q *ByteFIFOQueue) Terminate() { if err := q.byteFIFO.Close(); err != nil { log.Error("Error whilst closing internal byte fifo in %s: %s: %v", q.typ, q.name, err) } + q.baseCtxFinished() log.Debug("%s: %s Terminated", q.typ, q.name) } diff --git a/modules/queue/queue_channel.go b/modules/queue/queue_channel.go index 5469c03100..028023d500 100644 --- a/modules/queue/queue_channel.go +++ b/modules/queue/queue_channel.go @@ -7,6 +7,7 @@ package queue import ( "context" "fmt" + "runtime/pprof" "sync/atomic" "time" @@ -20,7 +21,6 @@ const ChannelQueueType Type = "channel" type ChannelQueueConfiguration struct { WorkerPoolConfiguration Workers int - Name string } // ChannelQueue implements Queue @@ -84,6 +84,7 @@ func NewChannelQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue, erro // Run starts to run the queue func (q *ChannelQueue) Run(atShutdown, atTerminate func(func())) { + pprof.SetGoroutineLabels(q.baseCtx) atShutdown(q.Shutdown) atTerminate(q.Terminate) log.Debug("ChannelQueue: %s Starting", q.name) @@ -169,6 +170,7 @@ func (q *ChannelQueue) Terminate() { default: } q.terminateCtxCancel() + q.baseCtxFinished() log.Debug("ChannelQueue: %s Terminated", q.name) } diff --git a/modules/queue/queue_channel_test.go b/modules/queue/queue_channel_test.go index 26a635b918..949c452893 100644 --- a/modules/queue/queue_channel_test.go +++ b/modules/queue/queue_channel_test.go @@ -34,9 +34,9 @@ func TestChannelQueue(t *testing.T) { BlockTimeout: 1 * time.Second, BoostTimeout: 5 * time.Minute, BoostWorkers: 5, + Name: "TestChannelQueue", }, Workers: 0, - Name: "TestChannelQueue", }, &testData{}) assert.NoError(t, err) @@ -128,6 +128,8 @@ func TestChannelQueue_Pause(t *testing.T) { queueShutdown := []func(){} queueTerminate := []func(){} + terminated := make(chan struct{}) + queue, err = NewChannelQueue(handle, ChannelQueueConfiguration{ WorkerPoolConfiguration: WorkerPoolConfiguration{ @@ -142,15 +144,18 @@ func TestChannelQueue_Pause(t *testing.T) { }, &testData{}) assert.NoError(t, err) - go queue.Run(func(shutdown func()) { - lock.Lock() - defer lock.Unlock() - queueShutdown = append(queueShutdown, shutdown) - }, func(terminate func()) { - lock.Lock() - defer lock.Unlock() - queueTerminate = append(queueTerminate, terminate) - }) + go func() { + queue.Run(func(shutdown func()) { + lock.Lock() + defer lock.Unlock() + queueShutdown = append(queueShutdown, shutdown) + }, func(terminate func()) { + lock.Lock() + defer lock.Unlock() + queueTerminate = append(queueTerminate, terminate) + }) + close(terminated) + }() // Shutdown and Terminate in defer defer func() { @@ -278,4 +283,30 @@ func TestChannelQueue_Pause(t *testing.T) { } assert.Equal(t, test1.TestString, result1.TestString) assert.Equal(t, test1.TestInt, result1.TestInt) + + lock.Lock() + callbacks := make([]func(), len(queueShutdown)) + copy(callbacks, queueShutdown) + queueShutdown = queueShutdown[:0] + lock.Unlock() + // Now shutdown the queue + for _, callback := range callbacks { + callback() + } + + // terminate the queue + lock.Lock() + callbacks = make([]func(), len(queueTerminate)) + copy(callbacks, queueTerminate) + queueShutdown = queueTerminate[:0] + lock.Unlock() + for _, callback := range callbacks { + callback() + } + select { + case <-terminated: + case <-time.After(10 * time.Second): + assert.Fail(t, "Queue should have terminated") + return + } } diff --git a/modules/queue/queue_disk_channel.go b/modules/queue/queue_disk_channel.go index 0494698e0e..014d93f5b5 100644 --- a/modules/queue/queue_disk_channel.go +++ b/modules/queue/queue_disk_channel.go @@ -7,6 +7,7 @@ package queue import ( "context" "fmt" + "runtime/pprof" "sync" "sync/atomic" "time" @@ -72,9 +73,9 @@ func NewPersistableChannelQueue(handle HandlerFunc, cfg, exemplar interface{}) ( BoostTimeout: config.BoostTimeout, BoostWorkers: config.BoostWorkers, MaxWorkers: config.MaxWorkers, + Name: config.Name + "-channel", }, Workers: config.Workers, - Name: config.Name + "-channel", }, exemplar) if err != nil { return nil, err @@ -90,9 +91,9 @@ func NewPersistableChannelQueue(handle HandlerFunc, cfg, exemplar interface{}) ( BoostTimeout: 5 * time.Minute, BoostWorkers: 1, MaxWorkers: 5, + Name: config.Name + "-level", }, Workers: 0, - Name: config.Name + "-level", }, DataDir: config.DataDir, } @@ -154,6 +155,7 @@ func (q *PersistableChannelQueue) PushBack(data Data) error { // Run starts to run the queue func (q *PersistableChannelQueue) Run(atShutdown, atTerminate func(func())) { + pprof.SetGoroutineLabels(q.channelQueue.baseCtx) log.Debug("PersistableChannelQueue: %s Starting", q.delayedStarter.name) _ = q.channelQueue.AddWorkers(q.channelQueue.workers, 0) diff --git a/modules/queue/queue_disk_channel_test.go b/modules/queue/queue_disk_channel_test.go index f092bb1f56..22b4f0f452 100644 --- a/modules/queue/queue_disk_channel_test.go +++ b/modules/queue/queue_disk_channel_test.go @@ -221,6 +221,7 @@ func TestPersistableChannelQueue_Pause(t *testing.T) { queueShutdown := []func(){} queueTerminate := []func(){} + terminated := make(chan struct{}) tmpDir, err := os.MkdirTemp("", "persistable-channel-queue-pause-test-data") assert.NoError(t, err) @@ -237,15 +238,18 @@ func TestPersistableChannelQueue_Pause(t *testing.T) { }, &testData{}) assert.NoError(t, err) - go queue.Run(func(shutdown func()) { - lock.Lock() - defer lock.Unlock() - queueShutdown = append(queueShutdown, shutdown) - }, func(terminate func()) { - lock.Lock() - defer lock.Unlock() - queueTerminate = append(queueTerminate, terminate) - }) + go func() { + queue.Run(func(shutdown func()) { + lock.Lock() + defer lock.Unlock() + queueShutdown = append(queueShutdown, shutdown) + }, func(terminate func()) { + lock.Lock() + defer lock.Unlock() + queueTerminate = append(queueTerminate, terminate) + }) + close(terminated) + }() // Shutdown and Terminate in defer defer func() { @@ -417,7 +421,10 @@ func TestPersistableChannelQueue_Pause(t *testing.T) { case <-handleChan: assert.Fail(t, "Handler processing should have stopped") return - default: + case <-terminated: + case <-time.After(10 * time.Second): + assert.Fail(t, "Queue should have terminated") + return } lock.Lock() @@ -425,6 +432,7 @@ func TestPersistableChannelQueue_Pause(t *testing.T) { lock.Unlock() // Reopen queue + terminated = make(chan struct{}) queue, err = NewPersistableChannelQueue(handle, PersistableChannelQueueConfiguration{ DataDir: tmpDir, BatchLength: 1, @@ -442,15 +450,18 @@ func TestPersistableChannelQueue_Pause(t *testing.T) { paused, _ = pausable.IsPausedIsResumed() - go queue.Run(func(shutdown func()) { - lock.Lock() - defer lock.Unlock() - queueShutdown = append(queueShutdown, shutdown) - }, func(terminate func()) { - lock.Lock() - defer lock.Unlock() - queueTerminate = append(queueTerminate, terminate) - }) + go func() { + queue.Run(func(shutdown func()) { + lock.Lock() + defer lock.Unlock() + queueShutdown = append(queueShutdown, shutdown) + }, func(terminate func()) { + lock.Lock() + defer lock.Unlock() + queueTerminate = append(queueTerminate, terminate) + }) + close(terminated) + }() select { case <-handleChan: @@ -510,4 +521,31 @@ func TestPersistableChannelQueue_Pause(t *testing.T) { assert.Equal(t, test2.TestString, result4.TestString) assert.Equal(t, test2.TestInt, result4.TestInt) + + lock.Lock() + callbacks = make([]func(), len(queueShutdown)) + copy(callbacks, queueShutdown) + queueShutdown = queueShutdown[:0] + lock.Unlock() + // Now shutdown the queue + for _, callback := range callbacks { + callback() + } + + // terminate the queue + lock.Lock() + callbacks = make([]func(), len(queueTerminate)) + copy(callbacks, queueTerminate) + queueShutdown = queueTerminate[:0] + lock.Unlock() + for _, callback := range callbacks { + callback() + } + + select { + case <-time.After(10 * time.Second): + assert.Fail(t, "Queue should have terminated") + return + case <-terminated: + } } diff --git a/modules/queue/unique_queue_channel.go b/modules/queue/unique_queue_channel.go index b7282e6c6c..6e8d37a20c 100644 --- a/modules/queue/unique_queue_channel.go +++ b/modules/queue/unique_queue_channel.go @@ -7,6 +7,7 @@ package queue import ( "context" "fmt" + "runtime/pprof" "sync" "sync/atomic" "time" @@ -97,6 +98,7 @@ func NewChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interface{}) (Queue // Run starts to run the queue func (q *ChannelUniqueQueue) Run(atShutdown, atTerminate func(func())) { + pprof.SetGoroutineLabels(q.baseCtx) atShutdown(q.Shutdown) atTerminate(q.Terminate) log.Debug("ChannelUniqueQueue: %s Starting", q.name) @@ -226,6 +228,7 @@ func (q *ChannelUniqueQueue) Terminate() { default: } q.terminateCtxCancel() + q.baseCtxFinished() log.Debug("ChannelUniqueQueue: %s Terminated", q.name) } diff --git a/modules/queue/unique_queue_channel_test.go b/modules/queue/unique_queue_channel_test.go index ef6752079e..6daf3fc96e 100644 --- a/modules/queue/unique_queue_channel_test.go +++ b/modules/queue/unique_queue_channel_test.go @@ -32,9 +32,9 @@ func TestChannelUniqueQueue(t *testing.T) { BlockTimeout: 1 * time.Second, BoostTimeout: 5 * time.Minute, BoostWorkers: 5, + Name: "TestChannelQueue", }, Workers: 0, - Name: "TestChannelQueue", }, &testData{}) assert.NoError(t, err) diff --git a/modules/queue/unique_queue_disk_channel.go b/modules/queue/unique_queue_disk_channel.go index 5ee1c396fc..6ab03094ba 100644 --- a/modules/queue/unique_queue_disk_channel.go +++ b/modules/queue/unique_queue_disk_channel.go @@ -6,6 +6,7 @@ package queue import ( "context" + "runtime/pprof" "sync" "time" @@ -72,9 +73,9 @@ func NewPersistableChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interfac BoostTimeout: config.BoostTimeout, BoostWorkers: config.BoostWorkers, MaxWorkers: config.MaxWorkers, + Name: config.Name + "-channel", }, Workers: config.Workers, - Name: config.Name + "-channel", }, exemplar) if err != nil { return nil, err @@ -90,9 +91,9 @@ func NewPersistableChannelUniqueQueue(handle HandlerFunc, cfg, exemplar interfac BoostTimeout: 5 * time.Minute, BoostWorkers: 1, MaxWorkers: 5, + Name: config.Name + "-level", }, Workers: 0, - Name: config.Name + "-level", }, DataDir: config.DataDir, } @@ -183,6 +184,7 @@ func (q *PersistableChannelUniqueQueue) Has(data Data) (bool, error) { // Run starts to run the queue func (q *PersistableChannelUniqueQueue) Run(atShutdown, atTerminate func(func())) { + pprof.SetGoroutineLabels(q.channelQueue.baseCtx) log.Debug("PersistableChannelUniqueQueue: %s Starting", q.delayedStarter.name) q.lock.Lock() @@ -301,6 +303,7 @@ func (q *PersistableChannelUniqueQueue) Terminate() { if q.internal != nil { q.internal.(*LevelUniqueQueue).Terminate() } + q.channelQueue.baseCtxFinished() log.Debug("PersistableChannelUniqueQueue: %s Terminated", q.delayedStarter.name) } diff --git a/modules/queue/workerpool.go b/modules/queue/workerpool.go index 5f6ec18710..bdf04a363b 100644 --- a/modules/queue/workerpool.go +++ b/modules/queue/workerpool.go @@ -6,11 +6,14 @@ package queue import ( "context" + "fmt" + "runtime/pprof" "sync" "sync/atomic" "time" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/util" ) @@ -19,9 +22,14 @@ import ( // they use to detect if there is a block and will grow and shrink in // response to demand as per configuration. type WorkerPool struct { + // This field requires to be the first one in the struct. + // This is to allow 64 bit atomic operations on 32-bit machines. + // See: https://pkg.go.dev/sync/atomic#pkg-note-BUG & Gitea issue 19518 + numInQueue int64 lock sync.Mutex baseCtx context.Context baseCtxCancel context.CancelFunc + baseCtxFinished process.FinishedFunc paused chan struct{} resumed chan struct{} cond *sync.Cond @@ -34,7 +42,6 @@ type WorkerPool struct { blockTimeout time.Duration boostTimeout time.Duration boostWorkers int - numInQueue int64 } var ( @@ -44,6 +51,7 @@ var ( // WorkerPoolConfiguration is the basic configuration for a WorkerPool type WorkerPoolConfiguration struct { + Name string QueueLength int BatchLength int BlockTimeout time.Duration @@ -54,12 +62,13 @@ type WorkerPoolConfiguration struct { // NewWorkerPool creates a new worker pool func NewWorkerPool(handle HandlerFunc, config WorkerPoolConfiguration) *WorkerPool { - ctx, cancel := context.WithCancel(context.Background()) + ctx, cancel, finished := process.GetManager().AddTypedContext(context.Background(), fmt.Sprintf("Queue: %s", config.Name), process.SystemProcessType, false) dataChan := make(chan Data, config.QueueLength) pool := &WorkerPool{ baseCtx: ctx, baseCtxCancel: cancel, + baseCtxFinished: finished, batchLength: config.BatchLength, dataChan: dataChan, resumed: closedChan, @@ -299,6 +308,7 @@ func (p *WorkerPool) addWorkers(ctx context.Context, cancel context.CancelFunc, p.numberOfWorkers++ p.lock.Unlock() go func() { + pprof.SetGoroutineLabels(ctx) p.doWork(ctx) p.lock.Lock() @@ -476,6 +486,7 @@ func (p *WorkerPool) FlushWithContext(ctx context.Context) error { } func (p *WorkerPool) doWork(ctx context.Context) { + pprof.SetGoroutineLabels(ctx) delay := time.Millisecond * 300 // Create a common timer - we will use this elsewhere diff --git a/modules/repository/create.go b/modules/repository/create.go index 6409cc55ce..21d45c896e 100644 --- a/modules/repository/create.go +++ b/modules/repository/create.go @@ -33,7 +33,7 @@ func CreateRepository(doer, u *user_model.User, opts models.CreateRepoOptions) ( // Check if label template exist if len(opts.IssueLabels) > 0 { - if _, err := models.GetLabelTemplateFile(opts.IssueLabels); err != nil { + if _, err := GetLabelTemplateFile(opts.IssueLabels); err != nil { return nil, err } } @@ -54,6 +54,7 @@ func CreateRepository(doer, u *user_model.User, opts models.CreateRepoOptions) ( Status: opts.Status, IsEmpty: !opts.AutoInit, TrustModel: opts.TrustModel, + IsMirror: opts.IsMirror, } var rollbackRepo *repo_model.Repository @@ -100,7 +101,7 @@ func CreateRepository(doer, u *user_model.User, opts models.CreateRepoOptions) ( // Initialize Issue Labels if selected if len(opts.IssueLabels) > 0 { - if err = models.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { + if err = InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { rollbackRepo = repo rollbackRepo.OwnerID = u.ID return fmt.Errorf("InitializeLabels: %v", err) @@ -111,9 +112,9 @@ func CreateRepository(doer, u *user_model.User, opts models.CreateRepoOptions) ( return fmt.Errorf("checkDaemonExportOK: %v", err) } - if stdout, err := git.NewCommand(ctx, "update-server-info"). + if stdout, _, err := git.NewCommand(ctx, "update-server-info"). SetDescription(fmt.Sprintf("CreateRepository(git update-server-info): %s", repoPath)). - RunInDir(repoPath); err != nil { + RunStdString(&git.RunOpts{Dir: repoPath}); err != nil { log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) rollbackRepo = repo rollbackRepo.OwnerID = u.ID diff --git a/modules/repository/create_test.go b/modules/repository/create_test.go index 4e232a8609..b6a89a7ed6 100644 --- a/modules/repository/create_test.go +++ b/modules/repository/create_test.go @@ -10,6 +10,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -22,13 +23,13 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) testTeamRepositories := func(teamID int64, repoIds []int64) { - team := unittest.AssertExistsAndLoadBean(t, &models.Team{ID: teamID}).(*models.Team) - assert.NoError(t, team.GetRepositories(&models.SearchOrgTeamOptions{}), "%s: GetRepositories", team.Name) + team := unittest.AssertExistsAndLoadBean(t, &organization.Team{ID: teamID}).(*organization.Team) + assert.NoError(t, team.GetRepositoriesCtx(db.DefaultContext), "%s: GetRepositories", team.Name) assert.Len(t, team.Repos, team.NumRepos, "%s: len repo", team.Name) assert.Len(t, team.Repos, len(repoIds), "%s: repo count", team.Name) for i, rid := range repoIds { if rid > 0 { - assert.True(t, team.HasRepository(rid), "%s: HasRepository(%d) %d", rid, i) + assert.True(t, models.HasRepository(team, rid), "%s: HasRepository(%d) %d", rid, i) } } } @@ -38,13 +39,13 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { assert.NoError(t, err, "GetUserByID") // Create org. - org := &models.Organization{ + org := &organization.Organization{ Name: "All_repo", IsActive: true, Type: user_model.UserTypeOrganization, Visibility: structs.VisibleTypePublic, } - assert.NoError(t, models.CreateOrganization(org, user), "CreateOrganization") + assert.NoError(t, organization.CreateOrganization(org, user), "CreateOrganization") // Check Owner team. ownerTeam, err := org.GetOwnerTeam() @@ -65,7 +66,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { assert.NoError(t, err, "GetOwnerTeam") // Create teams and check repositories. - teams := []*models.Team{ + teams := []*organization.Team{ ownerTeam, { OrgID: org.ID, @@ -144,5 +145,5 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) { assert.NoError(t, models.DeleteRepository(user, org.ID, rid), "DeleteRepository %d", i) } } - assert.NoError(t, models.DeleteOrganization(db.DefaultContext, org), "DeleteOrganization") + assert.NoError(t, organization.DeleteOrganization(db.DefaultContext, org), "DeleteOrganization") } diff --git a/models/helper_environment.go b/modules/repository/env.go similarity index 95% rename from models/helper_environment.go rename to modules/repository/env.go index 57ec3ea1e9..e86e0d4535 100644 --- a/models/helper_environment.go +++ b/modules/repository/env.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models +package repository import ( "fmt" @@ -23,8 +23,8 @@ const ( EnvPusherName = "GITEA_PUSHER_NAME" EnvPusherEmail = "GITEA_PUSHER_EMAIL" EnvPusherID = "GITEA_PUSHER_ID" - EnvKeyID = "GITEA_KEY_ID" - EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" + EnvKeyID = "GITEA_KEY_ID" // public key ID + EnvDeployKeyID = "GITEA_DEPLOY_KEY_ID" EnvPRID = "GITEA_PR_ID" EnvIsInternal = "GITEA_INTERNAL_PUSH" EnvAppURL = "GITEA_ROOT_URL" diff --git a/modules/repository/generate.go b/modules/repository/generate.go index d0b5fa0820..1436d764f0 100644 --- a/modules/repository/generate.go +++ b/modules/repository/generate.go @@ -177,14 +177,20 @@ func generateRepoCommit(ctx context.Context, repo, templateRepo, generateRepo *r } repoPath := repo.RepoPath() - if stdout, err := git.NewCommand(ctx, "remote", "add", "origin", repoPath). + if stdout, _, err := git.NewCommand(ctx, "remote", "add", "origin", repoPath). SetDescription(fmt.Sprintf("generateRepoCommit (git remote add): %s to %s", templateRepoPath, tmpDir)). - RunInDirWithEnv(tmpDir, env); err != nil { + RunStdString(&git.RunOpts{Dir: tmpDir, Env: env}); err != nil { log.Error("Unable to add %v as remote origin to temporary repo to %s: stdout %s\nError: %v", repo, tmpDir, stdout, err) return fmt.Errorf("git remote add: %v", err) } - return initRepoCommit(ctx, tmpDir, repo, repo.Owner, templateRepo.DefaultBranch) + // set default branch based on whether it's specified in the newly generated repo or not + defaultBranch := repo.DefaultBranch + if strings.TrimSpace(defaultBranch) == "" { + defaultBranch = templateRepo.DefaultBranch + } + + return initRepoCommit(ctx, tmpDir, repo, repo.Owner, defaultBranch) } func generateGitContent(ctx context.Context, repo, templateRepo, generateRepo *repo_model.Repository) (err error) { @@ -208,8 +214,12 @@ func generateGitContent(ctx context.Context, repo, templateRepo, generateRepo *r return fmt.Errorf("getRepositoryByID: %v", err) } - repo.DefaultBranch = templateRepo.DefaultBranch - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + // if there was no default branch supplied when generating the repo, use the default one from the template + if strings.TrimSpace(repo.DefaultBranch) == "" { + repo.DefaultBranch = templateRepo.DefaultBranch + } + + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { return fmt.Errorf("openRepository: %v", err) } @@ -249,6 +259,7 @@ func GenerateRepository(ctx context.Context, doer, owner *user_model.User, templ Name: opts.Name, LowerName: strings.ToLower(opts.Name), Description: opts.Description, + DefaultBranch: opts.DefaultBranch, IsPrivate: opts.Private, IsEmpty: !opts.GitContent || templateRepo.IsEmpty, IsFsckEnabled: templateRepo.IsFsckEnabled, @@ -281,9 +292,9 @@ func GenerateRepository(ctx context.Context, doer, owner *user_model.User, templ return generateRepo, fmt.Errorf("checkDaemonExportOK: %v", err) } - if stdout, err := git.NewCommand(ctx, "update-server-info"). + if stdout, _, err := git.NewCommand(ctx, "update-server-info"). SetDescription(fmt.Sprintf("GenerateRepository(git update-server-info): %s", repoPath)). - RunInDir(repoPath); err != nil { + RunStdString(&git.RunOpts{Dir: repoPath}); err != nil { log.Error("GenerateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", generateRepo, stdout, err) return generateRepo, fmt.Errorf("error in GenerateRepository(git update-server-info): %v", err) } diff --git a/modules/repository/init.go b/modules/repository/init.go index 66d464ef13..845a61ed0a 100644 --- a/modules/repository/init.go +++ b/modules/repository/init.go @@ -9,7 +9,9 @@ import ( "context" "fmt" "os" + "path" "path/filepath" + "sort" "strings" "time" @@ -18,13 +20,199 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/options" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/templates/vars" "code.gitea.io/gitea/modules/util" asymkey_service "code.gitea.io/gitea/services/asymkey" - - "github.com/unknwon/com" ) +var ( + // Gitignores contains the gitiginore files + Gitignores []string + + // Licenses contains the license files + Licenses []string + + // Readmes contains the readme files + Readmes []string + + // LabelTemplates contains the label template files and the list of labels for each file + LabelTemplates map[string]string +) + +// ErrIssueLabelTemplateLoad represents a "ErrIssueLabelTemplateLoad" kind of error. +type ErrIssueLabelTemplateLoad struct { + TemplateFile string + OriginalError error +} + +// IsErrIssueLabelTemplateLoad checks if an error is a ErrIssueLabelTemplateLoad. +func IsErrIssueLabelTemplateLoad(err error) bool { + _, ok := err.(ErrIssueLabelTemplateLoad) + return ok +} + +func (err ErrIssueLabelTemplateLoad) Error() string { + return fmt.Sprintf("Failed to load label template file '%s': %v", err.TemplateFile, err.OriginalError) +} + +// GetRepoInitFile returns repository init files +func GetRepoInitFile(tp, name string) ([]byte, error) { + cleanedName := strings.TrimLeft(path.Clean("/"+name), "/") + relPath := path.Join("options", tp, cleanedName) + + // Use custom file when available. + customPath := path.Join(setting.CustomPath, relPath) + isFile, err := util.IsFile(customPath) + if err != nil { + log.Error("Unable to check if %s is a file. Error: %v", customPath, err) + } + if isFile { + return os.ReadFile(customPath) + } + + switch tp { + case "readme": + return options.Readme(cleanedName) + case "gitignore": + return options.Gitignore(cleanedName) + case "license": + return options.License(cleanedName) + case "label": + return options.Labels(cleanedName) + default: + return []byte{}, fmt.Errorf("Invalid init file type") + } +} + +// GetLabelTemplateFile loads the label template file by given name, +// then parses and returns a list of name-color pairs and optionally description. +func GetLabelTemplateFile(name string) ([][3]string, error) { + data, err := GetRepoInitFile("label", name) + if err != nil { + return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("GetRepoInitFile: %v", err)} + } + + lines := strings.Split(string(data), "\n") + list := make([][3]string, 0, len(lines)) + for i := 0; i < len(lines); i++ { + line := strings.TrimSpace(lines[i]) + if len(line) == 0 { + continue + } + + parts := strings.SplitN(line, ";", 2) + + fields := strings.SplitN(parts[0], " ", 2) + if len(fields) != 2 { + return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("line is malformed: %s", line)} + } + + color := strings.Trim(fields[0], " ") + if len(color) == 6 { + color = "#" + color + } + if !models.LabelColorPattern.MatchString(color) { + return nil, ErrIssueLabelTemplateLoad{name, fmt.Errorf("bad HTML color code in line: %s", line)} + } + + var description string + + if len(parts) > 1 { + description = strings.TrimSpace(parts[1]) + } + + fields[1] = strings.TrimSpace(fields[1]) + list = append(list, [3]string{fields[1], color, description}) + } + + return list, nil +} + +func loadLabels(labelTemplate string) ([]string, error) { + list, err := GetLabelTemplateFile(labelTemplate) + if err != nil { + return nil, err + } + + labels := make([]string, len(list)) + for i := 0; i < len(list); i++ { + labels[i] = list[i][0] + } + return labels, nil +} + +// LoadLabelsFormatted loads the labels' list of a template file as a string separated by comma +func LoadLabelsFormatted(labelTemplate string) (string, error) { + labels, err := loadLabels(labelTemplate) + return strings.Join(labels, ", "), err +} + +// LoadRepoConfig loads the repository config +func LoadRepoConfig() { + // Load .gitignore and license files and readme templates. + types := []string{"gitignore", "license", "readme", "label"} + typeFiles := make([][]string, 4) + for i, t := range types { + files, err := options.Dir(t) + if err != nil { + log.Fatal("Failed to get %s files: %v", t, err) + } + customPath := path.Join(setting.CustomPath, "options", t) + isDir, err := util.IsDir(customPath) + if err != nil { + log.Fatal("Failed to get custom %s files: %v", t, err) + } + if isDir { + customFiles, err := util.StatDir(customPath) + if err != nil { + log.Fatal("Failed to get custom %s files: %v", t, err) + } + + for _, f := range customFiles { + if !util.IsStringInSlice(f, files, true) { + files = append(files, f) + } + } + } + typeFiles[i] = files + } + + Gitignores = typeFiles[0] + Licenses = typeFiles[1] + Readmes = typeFiles[2] + LabelTemplatesFiles := typeFiles[3] + sort.Strings(Gitignores) + sort.Strings(Licenses) + sort.Strings(Readmes) + sort.Strings(LabelTemplatesFiles) + + // Load label templates + LabelTemplates = make(map[string]string) + for _, templateFile := range LabelTemplatesFiles { + labels, err := LoadLabelsFormatted(templateFile) + if err != nil { + log.Error("Failed to load labels: %v", err) + } + LabelTemplates[templateFile] = labels + } + + // Filter out invalid names and promote preferred licenses. + sortedLicenses := make([]string, 0, len(Licenses)) + for _, name := range setting.Repository.PreferredLicenses { + if util.IsStringInSlice(name, Licenses, true) { + sortedLicenses = append(sortedLicenses, name) + } + } + for _, name := range Licenses { + if !util.IsStringInSlice(name, setting.Repository.PreferredLicenses, true) { + sortedLicenses = append(sortedLicenses, name) + } + } + Licenses = sortedLicenses +} + func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir, repoPath string, opts models.CreateRepoOptions) error { commitTimeStr := time.Now().Format(time.RFC3339) authorSig := repo.Owner.NewGitSig() @@ -40,15 +228,15 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir, ) // Clone to temporary path and do the init commit. - if stdout, err := git.NewCommand(ctx, "clone", repoPath, tmpDir). + if stdout, _, err := git.NewCommand(ctx, "clone", repoPath, tmpDir). SetDescription(fmt.Sprintf("prepareRepoCommit (git clone): %s to %s", repoPath, tmpDir)). - RunInDirWithEnv("", env); err != nil { + RunStdString(&git.RunOpts{Dir: "", Env: env}); err != nil { log.Error("Failed to clone from %v into %s: stdout: %s\nError: %v", repo, tmpDir, stdout, err) return fmt.Errorf("git clone: %v", err) } // README - data, err := models.GetRepoInitFile("readme", opts.Readme) + data, err := GetRepoInitFile("readme", opts.Readme) if err != nil { return fmt.Errorf("GetRepoInitFile[%s]: %v", opts.Readme, err) } @@ -61,8 +249,13 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir, "CloneURL.HTTPS": cloneLink.HTTPS, "OwnerName": repo.OwnerName, } + res, err := vars.Expand(string(data), match) + if err != nil { + // here we could just log the error and continue the rendering + log.Error("unable to expand template vars for repo README: %s, err: %v", opts.Readme, err) + } if err = os.WriteFile(filepath.Join(tmpDir, "README.md"), - []byte(com.Expand(string(data), match)), 0o644); err != nil { + []byte(res), 0o644); err != nil { return fmt.Errorf("write README.md: %v", err) } @@ -71,7 +264,7 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir, var buf bytes.Buffer names := strings.Split(opts.Gitignores, ",") for _, name := range names { - data, err = models.GetRepoInitFile("gitignore", name) + data, err = GetRepoInitFile("gitignore", name) if err != nil { return fmt.Errorf("GetRepoInitFile[%s]: %v", name, err) } @@ -89,7 +282,7 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir, // LICENSE if len(opts.License) > 0 { - data, err = models.GetRepoInitFile("license", opts.License) + data, err = GetRepoInitFile("license", opts.License) if err != nil { return fmt.Errorf("GetRepoInitFile[%s]: %v", opts.License, err) } @@ -117,9 +310,9 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi committerName := sig.Name committerEmail := sig.Email - if stdout, err := git.NewCommand(ctx, "add", "--all"). + if stdout, _, err := git.NewCommand(ctx, "add", "--all"). SetDescription(fmt.Sprintf("initRepoCommit (git add): %s", tmpPath)). - RunInDir(tmpPath); err != nil { + RunStdString(&git.RunOpts{Dir: tmpPath}); err != nil { log.Error("git add --all failed: Stdout: %s\nError: %v", stdout, err) return fmt.Errorf("git add --all: %v", err) } @@ -154,9 +347,9 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi "GIT_COMMITTER_EMAIL="+committerEmail, ) - if stdout, err := git.NewCommand(ctx, args...). + if stdout, _, err := git.NewCommand(ctx, args...). SetDescription(fmt.Sprintf("initRepoCommit (git commit): %s", tmpPath)). - RunInDirWithEnv(tmpPath, env); err != nil { + RunStdString(&git.RunOpts{Dir: tmpPath, Env: env}); err != nil { log.Error("Failed to commit: %v: Stdout: %s\nError: %v", args, stdout, err) return fmt.Errorf("git commit: %v", err) } @@ -165,9 +358,9 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi defaultBranch = setting.Repository.DefaultBranch } - if stdout, err := git.NewCommand(ctx, "push", "origin", "HEAD:"+defaultBranch). + if stdout, _, err := git.NewCommand(ctx, "push", "origin", "HEAD:"+defaultBranch). SetDescription(fmt.Sprintf("initRepoCommit (git push): %s", tmpPath)). - RunInDirWithEnv(tmpPath, models.InternalPushingEnvironment(u, repo)); err != nil { + RunStdString(&git.RunOpts{Dir: tmpPath, Env: InternalPushingEnvironment(u, repo)}); err != nil { log.Error("Failed to push back to HEAD: Stdout: %s\nError: %v", stdout, err) return fmt.Errorf("git push: %v", err) } @@ -241,7 +434,7 @@ func initRepository(ctx context.Context, repoPath string, u *user_model.User, re if len(opts.DefaultBranch) > 0 { repo.DefaultBranch = opts.DefaultBranch - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { return fmt.Errorf("openRepository: %v", err) } @@ -257,3 +450,31 @@ func initRepository(ctx context.Context, repoPath string, u *user_model.User, re return nil } + +// InitializeLabels adds a label set to a repository using a template +func InitializeLabels(ctx context.Context, id int64, labelTemplate string, isOrg bool) error { + list, err := GetLabelTemplateFile(labelTemplate) + if err != nil { + return err + } + + labels := make([]*models.Label, len(list)) + for i := 0; i < len(list); i++ { + labels[i] = &models.Label{ + Name: list[i][0], + Description: list[i][2], + Color: list[i][1], + } + if isOrg { + labels[i].OrgID = id + } else { + labels[i].RepoID = id + } + } + for _, label := range labels { + if err = models.NewLabel(ctx, label); err != nil { + return err + } + } + return nil +} diff --git a/modules/repository/main_test.go b/modules/repository/main_test.go index 262d339481..42134fa7ad 100644 --- a/modules/repository/main_test.go +++ b/modules/repository/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/modules/repository/push.go b/modules/repository/push.go index aa94a3e242..e1dbd5d2fc 100644 --- a/modules/repository/push.go +++ b/modules/repository/push.go @@ -104,8 +104,8 @@ func IsForcePush(ctx context.Context, opts *PushUpdateOptions) (bool, error) { return false, nil } - output, err := git.NewCommand(ctx, "rev-list", "--max-count=1", opts.OldCommitID, "^"+opts.NewCommitID). - RunInDir(repo_model.RepoPath(opts.RepoUserName, opts.RepoName)) + output, _, err := git.NewCommand(ctx, "rev-list", "--max-count=1", opts.OldCommitID, "^"+opts.NewCommitID). + RunStdString(&git.RunOpts{Dir: repo_model.RepoPath(opts.RepoUserName, opts.RepoName)}) if err != nil { return false, err } else if len(output) > 0 { diff --git a/modules/repository/repo.go b/modules/repository/repo.go index ed9ed508be..30ca6fdff8 100644 --- a/modules/repository/repo.go +++ b/modules/repository/repo.go @@ -15,6 +15,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" @@ -55,7 +56,7 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, repoPath := repo_model.RepoPath(u.Name, opts.RepoName) if u.IsOrganization() { - t, err := models.OrgFromUser(u).GetOwnerTeam() + t, err := organization.OrgFromUser(u).GetOwnerTeam() if err != nil { return nil, err } @@ -72,13 +73,18 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, } if err = git.Clone(ctx, opts.CloneAddr, repoPath, git.CloneRepoOptions{ - Mirror: true, - Quiet: true, - Timeout: migrateTimeout, + Mirror: true, + Quiet: true, + Timeout: migrateTimeout, + SkipTLSVerify: setting.Migrations.SkipTLSVerify, }); err != nil { return repo, fmt.Errorf("Clone: %v", err) } + if err := git.WriteCommitGraph(ctx, repoPath); err != nil { + return repo, err + } + if opts.Wiki { wikiPath := repo_model.WikiPath(u.Name, opts.RepoName) wikiRemotePath := WikiRemoteURL(ctx, opts.CloneAddr) @@ -87,16 +93,21 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, return repo, fmt.Errorf("Failed to remove %s: %v", wikiPath, err) } - if err = git.Clone(ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{ - Mirror: true, - Quiet: true, - Timeout: migrateTimeout, - Branch: "master", + if err := git.Clone(ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{ + Mirror: true, + Quiet: true, + Timeout: migrateTimeout, + Branch: "master", + SkipTLSVerify: setting.Migrations.SkipTLSVerify, }); err != nil { log.Warn("Clone wiki: %v", err) if err := util.RemoveAll(wikiPath); err != nil { return repo, fmt.Errorf("Failed to remove %s: %v", wikiPath, err) } + } else { + if err := git.WriteCommitGraph(ctx, wikiPath); err != nil { + return repo, err + } } } } @@ -109,14 +120,14 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, return repo, fmt.Errorf("checkDaemonExportOK: %v", err) } - if stdout, err := git.NewCommand(ctx, "update-server-info"). + if stdout, _, err := git.NewCommand(ctx, "update-server-info"). SetDescription(fmt.Sprintf("MigrateRepositoryGitData(git update-server-info): %s", repoPath)). - RunInDir(repoPath); err != nil { + RunStdString(&git.RunOpts{Dir: repoPath}); err != nil { log.Error("MigrateRepositoryGitData(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) return repo, fmt.Errorf("error in MigrateRepositoryGitData(git update-server-info): %v", err) } - gitRepo, err := git.OpenRepositoryCtx(ctx, repoPath) + gitRepo, err := git.OpenRepository(ctx, repoPath) if err != nil { return repo, fmt.Errorf("OpenRepository: %v", err) } @@ -140,6 +151,9 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, } if !opts.Releases { + // note: this will greatly improve release (tag) sync + // for pull-mirrors with many tags + repo.IsMirror = opts.Mirror if err = SyncReleasesWithTags(repo, gitRepo); err != nil { log.Error("Failed to synchronize tags to releases for repository: %v", err) } @@ -154,7 +168,7 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User, } } - if err = models.UpdateRepoSize(db.DefaultContext, repo); err != nil { + if err = models.UpdateRepoSize(ctx, repo); err != nil { log.Error("Failed to update size for repository: %v", err) } @@ -228,7 +242,7 @@ func CleanUpMigrateInfo(ctx context.Context, repo *repo_model.Repository) (*repo } } - _, err := git.NewCommand(ctx, "remote", "rm", "origin").RunInDir(repoPath) + _, _, err := git.NewCommand(ctx, "remote", "rm", "origin").RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { return repo, fmt.Errorf("CleanUpMigrateInfo: %v", err) } @@ -244,6 +258,14 @@ func CleanUpMigrateInfo(ctx context.Context, repo *repo_model.Repository) (*repo // SyncReleasesWithTags synchronizes release table with repository tags func SyncReleasesWithTags(repo *repo_model.Repository, gitRepo *git.Repository) error { + log.Debug("SyncReleasesWithTags: in Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name) + + // optimized procedure for pull-mirrors which saves a lot of time (in + // particular for repos with many tags). + if repo.IsMirror { + return pullMirrorReleaseSync(repo, gitRepo) + } + existingRelTags := make(map[string]struct{}) opts := models.FindReleasesOptions{ IncludeDrafts: true, @@ -276,23 +298,25 @@ func SyncReleasesWithTags(repo *repo_model.Repository, gitRepo *git.Repository) } } } - tags, err := gitRepo.GetTags(0, 0) - if err != nil { - return fmt.Errorf("unable to GetTags in Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err) - } - for _, tagName := range tags { - if _, ok := existingRelTags[strings.ToLower(tagName)]; !ok { - if err := PushUpdateAddTag(repo, gitRepo, tagName); err != nil { - return fmt.Errorf("unable to PushUpdateAddTag: %q to Repo[%d:%s/%s]: %w", tagName, repo.ID, repo.OwnerName, repo.Name, err) - } + + _, err := gitRepo.WalkReferences(git.ObjectTag, 0, 0, func(sha1, refname string) error { + tagName := strings.TrimPrefix(refname, git.TagPrefix) + if _, ok := existingRelTags[strings.ToLower(tagName)]; ok { + return nil } - } - return nil + + if err := PushUpdateAddTag(repo, gitRepo, tagName, sha1, refname); err != nil { + return fmt.Errorf("unable to PushUpdateAddTag: %q to Repo[%d:%s/%s]: %w", tagName, repo.ID, repo.OwnerName, repo.Name, err) + } + + return nil + }) + return err } // PushUpdateAddTag must be called for any push actions to add tag -func PushUpdateAddTag(repo *repo_model.Repository, gitRepo *git.Repository, tagName string) error { - tag, err := gitRepo.GetTag(tagName) +func PushUpdateAddTag(repo *repo_model.Repository, gitRepo *git.Repository, tagName, sha1, refname string) error { + tag, err := gitRepo.GetTagWithID(sha1, tagName) if err != nil { return fmt.Errorf("unable to GetTag: %w", err) } @@ -438,3 +462,52 @@ func StoreMissingLfsObjectsInRepository(ctx context.Context, repo *repo_model.Re return nil } + +// pullMirrorReleaseSync is a pull-mirror specific tag<->release table +// synchronization which overwrites all Releases from the repository tags. This +// can be relied on since a pull-mirror is always identical to its +// upstream. Hence, after each sync we want the pull-mirror release set to be +// identical to the upstream tag set. This is much more efficient for +// repositories like https://github.com/vim/vim (with over 13000 tags). +func pullMirrorReleaseSync(repo *repo_model.Repository, gitRepo *git.Repository) error { + log.Trace("pullMirrorReleaseSync: rebuilding releases for pull-mirror Repo[%d:%s/%s]", repo.ID, repo.OwnerName, repo.Name) + tags, numTags, err := gitRepo.GetTagInfos(0, 0) + if err != nil { + return fmt.Errorf("unable to GetTagInfos in pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err) + } + err = db.WithTx(func(ctx context.Context) error { + // + // clear out existing releases + // + if _, err := db.DeleteByBean(ctx, &models.Release{RepoID: repo.ID}); err != nil { + return fmt.Errorf("unable to clear releases for pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err) + } + // + // make release set identical to upstream tags + // + for _, tag := range tags { + release := models.Release{ + RepoID: repo.ID, + TagName: tag.Name, + LowerTagName: strings.ToLower(tag.Name), + Sha1: tag.Object.String(), + // NOTE: ignored, since NumCommits are unused + // for pull-mirrors (only relevant when + // displaying releases, IsTag: false) + NumCommits: -1, + CreatedUnix: timeutil.TimeStamp(tag.Tagger.When.Unix()), + IsTag: true, + } + if err := db.Insert(ctx, release); err != nil { + return fmt.Errorf("unable insert tag %s for pull-mirror Repo[%d:%s/%s]: %w", tag.Name, repo.ID, repo.OwnerName, repo.Name, err) + } + } + return nil + }) + if err != nil { + return fmt.Errorf("unable to rebuild release table for pull-mirror Repo[%d:%s/%s]: %w", repo.ID, repo.OwnerName, repo.Name, err) + } + + log.Trace("pullMirrorReleaseSync: done rebuilding %d releases", numTags) + return nil +} diff --git a/models/helper_directory.go b/modules/repository/temp.go similarity index 98% rename from models/helper_directory.go rename to modules/repository/temp.go index 10114959ef..5947d29965 100644 --- a/models/helper_directory.go +++ b/modules/repository/temp.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. -package models +package repository import ( "fmt" diff --git a/modules/setting/database_sqlite.go b/modules/setting/database_sqlite.go index 12c60cc86c..1f18868d8e 100644 --- a/modules/setting/database_sqlite.go +++ b/modules/setting/database_sqlite.go @@ -1,5 +1,4 @@ //go:build sqlite -// +build sqlite // Copyright 2014 The Gogs Authors. All rights reserved. // Use of this source code is governed by a MIT-style diff --git a/modules/setting/federation.go b/modules/setting/federation.go index 8aa481cd98..2fca3c2882 100644 --- a/modules/setting/federation.go +++ b/modules/setting/federation.go @@ -13,17 +13,19 @@ import ( // Federation settings var ( Federation = struct { - Enabled bool - Algorithms []string - DigestAlgorithm string - GetHeaders []string - PostHeaders []string + Enabled bool + ShareUserStatistics bool + Algorithms []string + DigestAlgorithm string + GetHeaders []string + PostHeaders []string }{ - Enabled: true, - Algorithms: []string{"rsa-sha256", "rsa-sha512"}, - DigestAlgorithm: "SHA-256", - GetHeaders: []string{"(request-target)", "Date"}, - PostHeaders: []string{"(request-target)", "Date", "Digest"}, + Enabled: true, + ShareUserStatistics: true, + Algorithms: []string{"rsa-sha256", "rsa-sha512"}, + DigestAlgorithm: "SHA-256", + GetHeaders: []string{"(request-target)", "Date"}, + PostHeaders: []string{"(request-target)", "Date", "Digest"}, } ) diff --git a/modules/setting/log.go b/modules/setting/log.go index e666e2a027..008a419b09 100644 --- a/modules/setting/log.go +++ b/modules/setting/log.go @@ -32,9 +32,8 @@ func GetLogDescriptions() map[string]*LogDescription { descs := make(map[string]*LogDescription, len(logDescriptions)) for k, v := range logDescriptions { subLogDescriptions := make([]SubLogDescription, len(v.SubLogDescriptions)) - for i, s := range v.SubLogDescriptions { - subLogDescriptions[i] = s - } + copy(subLogDescriptions, v.SubLogDescriptions) + descs[k] = &LogDescription{ Name: v.Name, SubLogDescriptions: subLogDescriptions, diff --git a/modules/setting/packages.go b/modules/setting/packages.go new file mode 100644 index 0000000000..5e0f2a3b03 --- /dev/null +++ b/modules/setting/packages.go @@ -0,0 +1,46 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package setting + +import ( + "net/url" + "os" + "path/filepath" + + "code.gitea.io/gitea/modules/log" +) + +// Package registry settings +var ( + Packages = struct { + Storage + Enabled bool + ChunkedUploadPath string + RegistryHost string + }{ + Enabled: true, + } +) + +func newPackages() { + sec := Cfg.Section("packages") + if err := sec.MapTo(&Packages); err != nil { + log.Fatal("Failed to map Packages settings: %v", err) + } + + Packages.Storage = getStorage("packages", "", nil) + + appURL, _ := url.Parse(AppURL) + Packages.RegistryHost = appURL.Host + + Packages.ChunkedUploadPath = filepath.ToSlash(sec.Key("CHUNKED_UPLOAD_PATH").MustString("tmp/package-upload")) + if !filepath.IsAbs(Packages.ChunkedUploadPath) { + Packages.ChunkedUploadPath = filepath.ToSlash(filepath.Join(AppDataPath, Packages.ChunkedUploadPath)) + } + + if err := os.MkdirAll(Packages.ChunkedUploadPath, os.ModePerm); err != nil { + log.Error("Unable to create chunked upload directory: %s (%v)", Packages.ChunkedUploadPath, err) + } +} diff --git a/modules/setting/repository.go b/modules/setting/repository.go index f4a2f4ad66..f24bc841d6 100644 --- a/modules/setting/repository.go +++ b/modules/setting/repository.go @@ -154,7 +154,7 @@ var ( PrefixArchiveFiles: true, DisableMigrations: false, DisableStars: false, - DefaultBranch: "master", + DefaultBranch: "main", // Repository editor settings Editor: struct { @@ -295,6 +295,10 @@ func newRepository() { log.Fatal("Failed to map Repository.PullRequest settings: %v", err) } + if !Cfg.Section("packages").Key("ENABLED").MustBool(false) { + Repository.DisabledRepoUnits = append(Repository.DisabledRepoUnits, "repo.packages") + } + // Handle default trustmodel settings Repository.Signing.DefaultTrustModel = strings.ToLower(strings.TrimSpace(Repository.Signing.DefaultTrustModel)) if Repository.Signing.DefaultTrustModel == "default" { diff --git a/modules/setting/setting.go b/modules/setting/setting.go index f93dc53c32..5e317b39ea 100644 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -27,7 +27,6 @@ import ( "code.gitea.io/gitea/modules/user" "code.gitea.io/gitea/modules/util" - "github.com/unknwon/com" gossh "golang.org/x/crypto/ssh" ini "gopkg.in/ini.v1" ) @@ -89,13 +88,15 @@ var ( // AppDataPath is the default path for storing data. // It maps to ini:"APP_DATA_PATH" and defaults to AppWorkPath + "/data" AppDataPath string + // LocalURL is the url for locally running applications to contact Gitea. It always has a '/' suffix + // It maps to ini:"LOCAL_ROOT_URL" + LocalURL string // Server settings Protocol Scheme Domain string HTTPAddr string HTTPPort string - LocalURL string RedirectOtherPort bool PortToRedirect string OfflineMode bool @@ -105,6 +106,7 @@ var ( StaticCacheTime time.Duration EnableGzip bool LandingPageURL LandingPage + LandingPageCustom string UnixSocketPermission uint32 EnablePprof bool PprofDataPath string @@ -164,7 +166,7 @@ var ( Domain: "", Port: 22, ServerCiphers: []string{"chacha20-poly1305@openssh.com", "aes128-ctr", "aes192-ctr", "aes256-ctr", "aes128-gcm@openssh.com", "aes256-gcm@openssh.com"}, - ServerKeyExchanges: []string{"curve25519-sha256@libssh.org", "ecdh-sha2-nistp256", "ecdh-sha2-nistp384", "ecdh-sha2-nistp521", "diffie-hellman-group14-sha1"}, + ServerKeyExchanges: []string{"curve25519-sha256", "ecdh-sha2-nistp256", "ecdh-sha2-nistp384", "ecdh-sha2-nistp521", "diffie-hellman-group14-sha256", "diffie-hellman-group14-sha1"}, ServerMACs: []string{"hmac-sha2-256-etm@openssh.com", "hmac-sha2-256", "hmac-sha1"}, KeygenPath: "ssh-keygen", MinimumKeySizeCheck: true, @@ -195,6 +197,13 @@ var ( PasswordCheckPwn bool SuccessfulTokensCacheSize int + Camo = struct { + Enabled bool + ServerURL string `ini:"SERVER_URL"` + HMACKey string `ini:"HMAC_KEY"` + Allways bool + }{} + // UI settings UI = struct { ExplorePagingNum int @@ -203,6 +212,7 @@ var ( MembersPagingNum int FeedMaxCommitNum int FeedPagingNum int + PackagesPagingNum int GraphMaxCommitNum int CodeCommentLines int ReactionMaxUserNum int @@ -255,6 +265,7 @@ var ( MembersPagingNum: 20, FeedMaxCommitNum: 5, FeedPagingNum: 20, + PackagesPagingNum: 20, GraphMaxCommitNum: 100, CodeCommentLines: 4, ReactionMaxUserNum: 10, @@ -601,7 +612,7 @@ func loadFromConf(allowEmpty bool, extraConfig string) { Cfg.NameMapper = ini.SnackCase - homeDir, err := com.HomeDir() + homeDir, err := util.HomeDir() if err != nil { log.Fatal("Failed to get home directory: %v", err) } @@ -747,6 +758,7 @@ func loadFromConf(allowEmpty bool, extraConfig string) { } } LocalURL = sec.Key("LOCAL_ROOT_URL").MustString(defaultLocalURL) + LocalURL = strings.TrimRight(LocalURL, "/") + "/" RedirectOtherPort = sec.Key("REDIRECT_OTHER_PORT").MustBool(false) PortToRedirect = sec.Key("PORT_TO_REDIRECT").MustString("80") OfflineMode = sec.Key("OFFLINE_MODE").MustBool() @@ -765,15 +777,19 @@ func loadFromConf(allowEmpty bool, extraConfig string) { PprofDataPath = filepath.Join(AppWorkPath, PprofDataPath) } - switch sec.Key("LANDING_PAGE").MustString("home") { + landingPage := sec.Key("LANDING_PAGE").MustString("home") + switch landingPage { case "explore": LandingPageURL = LandingPageExplore case "organizations": LandingPageURL = LandingPageOrganizations case "login": LandingPageURL = LandingPageLogin - default: + case "": + case "home": LandingPageURL = LandingPageHome + default: + LandingPageURL = LandingPage(landingPage) } if len(SSH.Domain) == 0 { @@ -1006,6 +1022,8 @@ func loadFromConf(allowEmpty bool, extraConfig string) { newPictureService() + newPackages() + if err = Cfg.Section("ui").MapTo(&UI); err != nil { log.Fatal("Failed to map UI settings: %v", err) } else if err = Cfg.Section("markdown").MapTo(&Markdown); err != nil { @@ -1016,6 +1034,14 @@ func loadFromConf(allowEmpty bool, extraConfig string) { log.Fatal("Failed to map API settings: %v", err) } else if err = Cfg.Section("metrics").MapTo(&Metrics); err != nil { log.Fatal("Failed to map Metrics settings: %v", err) + } else if err = Cfg.Section("camo").MapTo(&Camo); err != nil { + log.Fatal("Failed to map Camo settings: %v", err) + } + + if Camo.Enabled { + if Camo.ServerURL == "" || Camo.HMACKey == "" { + log.Fatal(`Camo settings require "SERVER_URL" and HMAC_KEY`) + } } u := *appURL diff --git a/modules/ssh/ssh.go b/modules/ssh/ssh.go index 6f4e993457..44ed431c93 100644 --- a/modules/ssh/ssh.go +++ b/modules/ssh/ssh.go @@ -23,7 +23,9 @@ import ( "syscall" asymkey_model "code.gitea.io/gitea/models/asymkey" + "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" @@ -317,63 +319,11 @@ func Listen(host string, port int, ciphers, keyExchanges, macs []string) { } } - // Workaround slightly broken behaviour in x/crypto/ssh/handshake.go:458-463 - // - // Fundamentally the issue here is that HostKeyAlgos make the incorrect assumption - // that the PublicKey().Type() matches the signature algorithm. - // - // Therefore we need to add duplicates for the RSA with different signing algorithms. - signers := make([]ssh.Signer, 0, len(srv.HostSigners)) - for _, signer := range srv.HostSigners { - if signer.PublicKey().Type() == "ssh-rsa" { - signers = append(signers, - &wrapSigner{ - Signer: signer, - algorithm: gossh.SigAlgoRSASHA2512, - }, - &wrapSigner{ - Signer: signer, - algorithm: gossh.SigAlgoRSASHA2256, - }, - ) - } - signers = append(signers, signer) - } - srv.HostSigners = signers - - go listen(&srv) -} - -// wrapSigner wraps a signer and overrides its public key type with the provided algorithm -type wrapSigner struct { - ssh.Signer - algorithm string -} - -// PublicKey returns an associated PublicKey instance. -func (s *wrapSigner) PublicKey() gossh.PublicKey { - return &wrapPublicKey{ - PublicKey: s.Signer.PublicKey(), - algorithm: s.algorithm, - } -} - -// Sign returns raw signature for the given data. This method -// will apply the hash specified for the keytype to the data using -// the algorithm assigned for this key -func (s *wrapSigner) Sign(rand io.Reader, data []byte) (*gossh.Signature, error) { - return s.Signer.(gossh.AlgorithmSigner).SignWithAlgorithm(rand, data, s.algorithm) -} - -// wrapPublicKey wraps a PublicKey and overrides its type -type wrapPublicKey struct { - gossh.PublicKey - algorithm string -} - -// Type returns the algorithm -func (k *wrapPublicKey) Type() string { - return k.algorithm + go func() { + _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "Service: Built-in SSH server", process.SystemProcessType, true) + defer finished() + listen(&srv) + }() } // GenKeyPair make a pair of public and private keys for SSH access. diff --git a/modules/storage/local.go b/modules/storage/local.go index 022e6186d4..701b0b1a9f 100644 --- a/modules/storage/local.go +++ b/modules/storage/local.go @@ -9,7 +9,9 @@ import ( "io" "net/url" "os" + "path" "path/filepath" + "strings" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" @@ -57,14 +59,18 @@ func NewLocalStorage(ctx context.Context, cfg interface{}) (ObjectStorage, error }, nil } +func (l *LocalStorage) buildLocalPath(p string) string { + return filepath.Join(l.dir, path.Clean("/" + strings.ReplaceAll(p, "\\", "/"))[1:]) +} + // Open a file func (l *LocalStorage) Open(path string) (Object, error) { - return os.Open(filepath.Join(l.dir, path)) + return os.Open(l.buildLocalPath(path)) } // Save a file func (l *LocalStorage) Save(path string, r io.Reader, size int64) (int64, error) { - p := filepath.Join(l.dir, path) + p := l.buildLocalPath(path) if err := os.MkdirAll(filepath.Dir(p), os.ModePerm); err != nil { return 0, err } @@ -104,13 +110,12 @@ func (l *LocalStorage) Save(path string, r io.Reader, size int64) (int64, error) // Stat returns the info of the file func (l *LocalStorage) Stat(path string) (os.FileInfo, error) { - return os.Stat(filepath.Join(l.dir, path)) + return os.Stat(l.buildLocalPath(path)) } // Delete delete a file func (l *LocalStorage) Delete(path string) error { - p := filepath.Join(l.dir, path) - return util.Remove(p) + return util.Remove(l.buildLocalPath(path)) } // URL gets the redirect URL to a file diff --git a/modules/storage/local_test.go b/modules/storage/local_test.go new file mode 100644 index 0000000000..0749036cb7 --- /dev/null +++ b/modules/storage/local_test.go @@ -0,0 +1,53 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package storage + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBuildLocalPath(t *testing.T) { + kases := []struct { + localDir string + path string + expected string + }{ + { + "a", + "0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + "a/0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + }, + { + "a", + "../0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + "a/0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + }, + { + "a", + "0\\a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + "a/0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + }, + { + "b", + "a/../0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + "b/0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + }, + { + "b", + "a\\..\\0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + "b/0/a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a14", + }, + } + + for _, k := range kases { + t.Run(k.path, func(t *testing.T) { + l := LocalStorage{dir: k.localDir} + + assert.EqualValues(t, k.expected, l.buildLocalPath(k.path)) + }) + } +} diff --git a/modules/storage/minio.go b/modules/storage/minio.go index f35f4092a9..f7b42d674c 100644 --- a/modules/storage/minio.go +++ b/modules/storage/minio.go @@ -117,7 +117,7 @@ func NewMinioStorage(ctx context.Context, cfg interface{}) (ObjectStorage, error } func (m *MinioStorage) buildMinioPath(p string) string { - return strings.TrimPrefix(path.Join(m.basePath, p), "/") + return strings.TrimPrefix(path.Join(m.basePath, path.Clean("/" + strings.ReplaceAll(p, "\\", "/"))[1:]), "/") } // Open open a file diff --git a/modules/storage/storage.go b/modules/storage/storage.go index f11e1ac743..ef7f6029a5 100644 --- a/modules/storage/storage.go +++ b/modules/storage/storage.go @@ -123,6 +123,9 @@ var ( // RepoArchives represents repository archives storage RepoArchives ObjectStorage + + // Packages represents packages storage + Packages ObjectStorage ) // Init init the stoarge @@ -143,7 +146,11 @@ func Init() error { return err } - return initRepoArchives() + if err := initRepoArchives(); err != nil { + return err + } + + return initPackages() } // NewStorage takes a storage type and some config and returns an ObjectStorage or an error @@ -188,3 +195,9 @@ func initRepoArchives() (err error) { RepoArchives, err = NewStorage(setting.RepoArchive.Storage.Type, &setting.RepoArchive.Storage) return } + +func initPackages() (err error) { + log.Info("Initialising Packages storage with type: %s", setting.Packages.Storage.Type) + Packages, err = NewStorage(setting.Packages.Storage.Type, &setting.Packages.Storage) + return +} diff --git a/modules/structs/admin_user.go b/modules/structs/admin_user.go index facf16a395..eccbf29a46 100644 --- a/modules/structs/admin_user.go +++ b/modules/structs/admin_user.go @@ -19,6 +19,7 @@ type CreateUserOption struct { Password string `json:"password" binding:"Required;MaxSize(255)"` MustChangePassword *bool `json:"must_change_password"` SendNotify bool `json:"send_notify"` + Restricted *bool `json:"restricted"` Visibility string `json:"visibility" binding:"In(,public,limited,private)"` } diff --git a/modules/structs/hook.go b/modules/structs/hook.go index e4d7652c72..07d51915de 100644 --- a/modules/structs/hook.go +++ b/modules/structs/hook.go @@ -110,6 +110,7 @@ var ( _ Payloader = &PullRequestPayload{} _ Payloader = &RepositoryPayload{} _ Payloader = &ReleasePayload{} + _ Payloader = &PackagePayload{} ) // _________ __ @@ -425,3 +426,27 @@ type RepositoryPayload struct { func (p *RepositoryPayload) JSONPayload() ([]byte, error) { return json.MarshalIndent(p, "", " ") } + +// HookPackageAction an action that happens to a package +type HookPackageAction string + +const ( + // HookPackageCreated created + HookPackageCreated HookPackageAction = "created" + // HookPackageDeleted deleted + HookPackageDeleted HookPackageAction = "deleted" +) + +// PackagePayload represents a package payload +type PackagePayload struct { + Action HookPackageAction `json:"action"` + Repository *Repository `json:"repository"` + Package *Package `json:"package"` + Organization *User `json:"organization"` + Sender *User `json:"sender"` +} + +// JSONPayload implements Payload +func (p *PackagePayload) JSONPayload() ([]byte, error) { + return json.MarshalIndent(p, "", " ") +} diff --git a/modules/structs/package.go b/modules/structs/package.go new file mode 100644 index 0000000000..fbdd6c90aa --- /dev/null +++ b/modules/structs/package.go @@ -0,0 +1,33 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package structs + +import ( + "time" +) + +// Package represents a package +type Package struct { + ID int64 `json:"id"` + Owner *User `json:"owner"` + Repository *Repository `json:"repository"` + Creator *User `json:"creator"` + Type string `json:"type"` + Name string `json:"name"` + Version string `json:"version"` + // swagger:strfmt date-time + CreatedAt time.Time `json:"created_at"` +} + +// PackageFile represents a package file +type PackageFile struct { + ID int64 `json:"id"` + Size int64 + Name string `json:"name"` + HashMD5 string `json:"md5"` + HashSHA1 string `json:"sha1"` + HashSHA256 string `json:"sha256"` + HashSHA512 string `json:"sha512"` +} diff --git a/modules/structs/pull.go b/modules/structs/pull.go index 653091b2f4..b63b3edfd3 100644 --- a/modules/structs/pull.go +++ b/modules/structs/pull.go @@ -31,9 +31,10 @@ type PullRequest struct { Mergeable bool `json:"mergeable"` HasMerged bool `json:"merged"` // swagger:strfmt date-time - Merged *time.Time `json:"merged_at"` - MergedCommitID *string `json:"merge_commit_sha"` - MergedBy *User `json:"merged_by"` + Merged *time.Time `json:"merged_at"` + MergedCommitID *string `json:"merge_commit_sha"` + MergedBy *User `json:"merged_by"` + AllowMaintainerEdit bool `json:"allow_maintainer_edit"` Base *PRBranchInfo `json:"base"` Head *PRBranchInfo `json:"head"` @@ -90,6 +91,7 @@ type EditPullRequestOption struct { Labels []int64 `json:"labels"` State *string `json:"state"` // swagger:strfmt date-time - Deadline *time.Time `json:"due_date"` - RemoveDeadline *bool `json:"unset_due_date"` + Deadline *time.Time `json:"due_date"` + RemoveDeadline *bool `json:"unset_due_date"` + AllowMaintainerEdit *bool `json:"allow_maintainer_edit"` } diff --git a/modules/structs/repo.go b/modules/structs/repo.go index 27d0b4f3ba..ef247ebc9c 100644 --- a/modules/structs/repo.go +++ b/modules/structs/repo.go @@ -187,6 +187,8 @@ type EditRepoOption struct { Archived *bool `json:"archived,omitempty"` // set to a string like `8h30m0s` to set the mirror interval time MirrorInterval *string `json:"mirror_interval,omitempty"` + // enable prune - remove obsolete remote-tracking references + EnablePrune *bool `json:"enable_prune,omitempty"` } // GenerateRepoOption options when creating repository using a template @@ -201,6 +203,8 @@ type GenerateRepoOption struct { // required: true // unique: true Name string `json:"name" binding:"Required;AlphaDashDot;MaxSize(100)"` + // Default branch of the new repository + DefaultBranch string `json:"default_branch"` // Description of the repository to create Description string `json:"description" binding:"MaxSize(255)"` // Whether the repository is private diff --git a/modules/structs/repo_collaborator.go b/modules/structs/repo_collaborator.go index 2b4fa390d2..2f9c8992a1 100644 --- a/modules/structs/repo_collaborator.go +++ b/modules/structs/repo_collaborator.go @@ -8,3 +8,10 @@ package structs type AddCollaboratorOption struct { Permission *string `json:"permission"` } + +// RepoCollaboratorPermission to get repository permission for a collaborator +type RepoCollaboratorPermission struct { + Permission string `json:"permission"` + RoleName string `json:"role_name"` + User *User `json:"user"` +} diff --git a/modules/structs/repo_commit.go b/modules/structs/repo_commit.go index f5c5f1b940..55a516a975 100644 --- a/modules/structs/repo_commit.go +++ b/modules/structs/repo_commit.go @@ -32,11 +32,19 @@ type CommitUser struct { // RepoCommit contains information of a commit in the context of a repository. type RepoCommit struct { - URL string `json:"url"` - Author *CommitUser `json:"author"` - Committer *CommitUser `json:"committer"` - Message string `json:"message"` - Tree *CommitMeta `json:"tree"` + URL string `json:"url"` + Author *CommitUser `json:"author"` + Committer *CommitUser `json:"committer"` + Message string `json:"message"` + Tree *CommitMeta `json:"tree"` + Verification *PayloadCommitVerification `json:"verification"` +} + +// CommitStats is statistics for a RepoCommit +type CommitStats struct { + Total int `json:"total"` + Additions int `json:"additions"` + Deletions int `json:"deletions"` } // Commit contains information generated from a Git commit. @@ -48,6 +56,7 @@ type Commit struct { Committer *User `json:"committer"` Parents []*CommitMeta `json:"parents"` Files []*CommitAffectedFiles `json:"files"` + Stats *CommitStats `json:"stats"` } // CommitDateOptions store dates for GIT_AUTHOR_DATE and GIT_COMMITTER_DATE diff --git a/modules/structs/repo_file.go b/modules/structs/repo_file.go index e2947bf7ac..135e6484cd 100644 --- a/modules/structs/repo_file.go +++ b/modules/structs/repo_file.go @@ -30,6 +30,11 @@ type CreateFileOptions struct { Content string `json:"content"` } +// Branch returns branch name +func (o *CreateFileOptions) Branch() string { + return o.FileOptions.BranchName +} + // DeleteFileOptions options for deleting files (used for other File structs below) // Note: `author` and `committer` are optional (if only one is given, it will be used for the other, otherwise the authenticated user will be used) type DeleteFileOptions struct { @@ -39,6 +44,11 @@ type DeleteFileOptions struct { SHA string `json:"sha" binding:"Required"` } +// Branch returns branch name +func (o *DeleteFileOptions) Branch() string { + return o.FileOptions.BranchName +} + // UpdateFileOptions options for updating files // Note: `author` and `committer` are optional (if only one is given, it will be used for the other, otherwise the authenticated user will be used) type UpdateFileOptions struct { @@ -50,6 +60,16 @@ type UpdateFileOptions struct { FromPath string `json:"from_path" binding:"MaxSize(500)"` } +// Branch returns branch name +func (o *UpdateFileOptions) Branch() string { + return o.FileOptions.BranchName +} + +// FileOptionInterface provides a unified interface for the different file options +type FileOptionInterface interface { + Branch() string +} + // ApplyDiffPatchFileOptions options for applying a diff patch // Note: `author` and `committer` are optional (if only one is given, it will be used for the other, otherwise the authenticated user will be used) type ApplyDiffPatchFileOptions struct { diff --git a/modules/svg/discover_bindata.go b/modules/svg/discover_bindata.go index e11951ff7e..cca1de76a7 100644 --- a/modules/svg/discover_bindata.go +++ b/modules/svg/discover_bindata.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package svg diff --git a/modules/svg/discover_nobindata.go b/modules/svg/discover_nobindata.go index e3f13ddf6c..ef01fbcc3e 100644 --- a/modules/svg/discover_nobindata.go +++ b/modules/svg/discover_nobindata.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !bindata -// +build !bindata package svg diff --git a/modules/sync/unique_queue.go b/modules/sync/unique_queue.go deleted file mode 100644 index d41726b5af..0000000000 --- a/modules/sync/unique_queue.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2016 The Gogs Authors. All rights reserved. -// Copyright 2019 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package sync - -import ( - "github.com/unknwon/com" -) - -// UniqueQueue is a queue which guarantees only one instance of same -// identity is in the line. Instances with same identity will be -// discarded if there is already one in the line. -// -// This queue is particularly useful for preventing duplicated task -// of same purpose. -type UniqueQueue struct { - table *StatusTable - queue chan string - closed chan struct{} -} - -// NewUniqueQueue initializes and returns a new UniqueQueue object. -func NewUniqueQueue(queueLength int) *UniqueQueue { - if queueLength <= 0 { - queueLength = 100 - } - - return &UniqueQueue{ - table: NewStatusTable(), - queue: make(chan string, queueLength), - closed: make(chan struct{}), - } -} - -// Close closes this queue -func (q *UniqueQueue) Close() { - select { - case <-q.closed: - default: - q.table.lock.Lock() - select { - case <-q.closed: - default: - close(q.closed) - } - q.table.lock.Unlock() - } -} - -// IsClosed returns a channel that is closed when this Queue is closed -func (q *UniqueQueue) IsClosed() <-chan struct{} { - return q.closed -} - -// IDs returns the current ids in the pool -func (q *UniqueQueue) IDs() []interface{} { - q.table.lock.Lock() - defer q.table.lock.Unlock() - ids := make([]interface{}, 0, len(q.table.pool)) - for id := range q.table.pool { - ids = append(ids, id) - } - return ids -} - -// Queue returns channel of queue for retrieving instances. -func (q *UniqueQueue) Queue() <-chan string { - return q.queue -} - -// Exist returns true if there is an instance with given identity -// exists in the queue. -func (q *UniqueQueue) Exist(id interface{}) bool { - return q.table.IsRunning(com.ToStr(id)) -} - -// AddFunc adds new instance to the queue with a custom runnable function, -// the queue is blocked until the function exits. -func (q *UniqueQueue) AddFunc(id interface{}, fn func()) { - idStr := com.ToStr(id) - q.table.lock.Lock() - if _, ok := q.table.pool[idStr]; ok { - q.table.lock.Unlock() - return - } - q.table.pool[idStr] = struct{}{} - if fn != nil { - fn() - } - q.table.lock.Unlock() - select { - case <-q.closed: - return - case q.queue <- idStr: - return - } -} - -// Add adds new instance to the queue. -func (q *UniqueQueue) Add(id interface{}) { - q.AddFunc(id, nil) -} - -// Remove removes instance from the queue. -func (q *UniqueQueue) Remove(id interface{}) { - q.table.Stop(com.ToStr(id)) -} diff --git a/modules/templates/dynamic.go b/modules/templates/dynamic.go index c6c47a6c88..de6968c314 100644 --- a/modules/templates/dynamic.go +++ b/modules/templates/dynamic.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !bindata -// +build !bindata package templates diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 63c165bc8b..682459d94a 100644 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -25,6 +25,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/avatars" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" @@ -33,6 +34,7 @@ import ( "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/svg" @@ -144,7 +146,6 @@ func NewFuncMap() []template.FuncMap { "EllipsisString": base.EllipsisString, "DiffTypeToStr": DiffTypeToStr, "DiffLineTypeToStr": DiffLineTypeToStr, - "Sha1": Sha1, "ShortSha": base.ShortSha, "MD5": base.EncodeMD5, "ActionContent2Commits": ActionContent2Commits, @@ -160,7 +161,16 @@ func NewFuncMap() []template.FuncMap { "RenderEmojiPlain": emoji.ReplaceAliases, "ReactionToEmoji": ReactionToEmoji, "RenderNote": RenderNote, - "IsMultilineCommitMessage": IsMultilineCommitMessage, + "RenderMarkdownToHtml": func(input string) template.HTML { + output, err := markdown.RenderString(&markup.RenderContext{ + URLPrefix: setting.AppSubURL, + }, input) + if err != nil { + log.Error("RenderString: %v", err) + } + return template.HTML(output) + }, + "IsMultilineCommitMessage": IsMultilineCommitMessage, "ThemeColorMetaTag": func() string { return setting.UI.ThemeColorMetaTag }, @@ -379,6 +389,7 @@ func NewFuncMap() []template.FuncMap { }, "Join": strings.Join, "QueryEscape": url.QueryEscape, + "DotEscape": DotEscape, }} } @@ -568,7 +579,7 @@ func Avatar(item interface{}, others ...interface{}) template.HTML { if src != "" { return AvatarHTML(src, size, class, t.DisplayName()) } - case *models.Organization: + case *organization.Organization: src := t.AsUser().AvatarLinkWithSize(size * setting.Avatar.RenderedSizeFactor) if src != "" { return AvatarHTML(src, size, class, t.AsUser().DisplayName()) @@ -632,9 +643,9 @@ func JSEscape(raw string) string { return template.JSEscapeString(raw) } -// Sha1 returns sha1 sum of string -func Sha1(str string) string { - return base.EncodeSha1(str) +// DotEscape wraps a dots in names with ZWJ [U+200D] in order to prevent autolinkers from detecting these as urls +func DotEscape(raw string) string { + return strings.ReplaceAll(raw, ".", "\u200d.\u200d") } // RenderCommitMessage renders commit message with XSS-safe and special links. diff --git a/modules/templates/static.go b/modules/templates/static.go index cb2978c2ef..351e48b4da 100644 --- a/modules/templates/static.go +++ b/modules/templates/static.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package templates diff --git a/modules/templates/templates_bindata.go b/modules/templates/templates_bindata.go index 5b59e4447e..bcb2cbaf3f 100644 --- a/modules/templates/templates_bindata.go +++ b/modules/templates/templates_bindata.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build bindata -// +build bindata package templates diff --git a/modules/templates/vars/vars.go b/modules/templates/vars/vars.go new file mode 100644 index 0000000000..a22ea4d777 --- /dev/null +++ b/modules/templates/vars/vars.go @@ -0,0 +1,93 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package vars + +import ( + "fmt" + "strings" + "unicode" + "unicode/utf8" +) + +// ErrWrongSyntax represents a wrong syntax with a template +type ErrWrongSyntax struct { + Template string +} + +func (err ErrWrongSyntax) Error() string { + return fmt.Sprintf("wrong syntax found in %s", err.Template) +} + +// ErrVarMissing represents an error that no matched variable +type ErrVarMissing struct { + Template string + Var string +} + +func (err ErrVarMissing) Error() string { + return fmt.Sprintf("the variable %s is missing for %s", err.Var, err.Template) +} + +// Expand replaces all variables like {var} by `vars` map, it always returns the expanded string regardless of errors +// if error occurs, the error part doesn't change and is returned as it is. +func Expand(template string, vars map[string]string) (string, error) { + // in the future, if necessary, we can introduce some escape-char, + // for example: it will use `#' as a reversed char, templates will use `{#{}` to do escape and output char '{'. + var buf strings.Builder + var err error + + posBegin := 0 + strLen := len(template) + for posBegin < strLen { + // find the next `{` + pos := strings.IndexByte(template[posBegin:], '{') + if pos == -1 { + buf.WriteString(template[posBegin:]) + break + } + + // copy texts between vars + buf.WriteString(template[posBegin : posBegin+pos]) + + // find the var between `{` and `}`/end + posBegin += pos + posEnd := posBegin + 1 + for posEnd < strLen { + if template[posEnd] == '}' { + posEnd++ + break + } // in the future, if we need to support escape chars, we can do: if (isEscapeChar) { posEnd+=2 } + posEnd++ + } + + // the var part, it can be "{", "{}", "{..." or or "{...}" + part := template[posBegin:posEnd] + posBegin = posEnd + if part == "{}" || part[len(part)-1] != '}' { + // treat "{}" or "{..." as error + err = ErrWrongSyntax{Template: template} + buf.WriteString(part) + } else { + // now we get a valid key "{...}" + key := part[1 : len(part)-1] + keyFirst, _ := utf8.DecodeRuneInString(key) + if unicode.IsSpace(keyFirst) || unicode.IsPunct(keyFirst) || unicode.IsControl(keyFirst) { + // the if key doesn't start with a letter, then we do not treat it as a var now + buf.WriteString(part) + } else { + // look up in the map + if val, ok := vars[key]; ok { + buf.WriteString(val) + } else { + // write the non-existing var as it is + buf.WriteString(part) + err = ErrVarMissing{Template: template, Var: key} + } + } + } + } + + return buf.String(), err +} diff --git a/modules/templates/vars/vars_test.go b/modules/templates/vars/vars_test.go new file mode 100644 index 0000000000..1cd7669c00 --- /dev/null +++ b/modules/templates/vars/vars_test.go @@ -0,0 +1,72 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package vars + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExpandVars(t *testing.T) { + kases := []struct { + tmpl string + data map[string]string + out string + error bool + }{ + { + tmpl: "{a}", + data: map[string]string{ + "a": "1", + }, + out: "1", + }, + { + tmpl: "expand {a}, {b} and {c}, with non-var { } {#}", + data: map[string]string{ + "a": "1", + "b": "2", + "c": "3", + }, + out: "expand 1, 2 and 3, with non-var { } {#}", + }, + { + tmpl: "中文内容 {一}, {二} 和 {三} 中文结尾", + data: map[string]string{ + "一": "11", + "二": "22", + "三": "33", + }, + out: "中文内容 11, 22 和 33 中文结尾", + }, + { + tmpl: "expand {{a}, {b} and {c}", + data: map[string]string{ + "a": "foo", + "b": "bar", + }, + out: "expand {{a}, bar and {c}", + error: true, + }, + { + tmpl: "expand } {} and {", + out: "expand } {} and {", + error: true, + }, + } + + for _, kase := range kases { + t.Run(kase.tmpl, func(t *testing.T) { + res, err := Expand(kase.tmpl, kase.data) + assert.EqualValues(t, kase.out, res) + if kase.error { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} diff --git a/modules/test/context_tests.go b/modules/test/context_tests.go index e6c78bcaa5..c745a106c5 100644 --- a/modules/test/context_tests.go +++ b/modules/test/context_tests.go @@ -14,7 +14,6 @@ import ( "testing" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -39,6 +38,7 @@ func MockContext(t *testing.T, path string) *context.Context { Resp: context.NewResponse(resp), Locale: &mockLocale{}, } + defer ctx.Close() requestURL, err := url.Parse(path) assert.NoError(t, err) @@ -61,13 +61,13 @@ func LoadRepo(t *testing.T, ctx *context.Context, repoID int64) { ctx.Repo.Owner, err = user_model.GetUserByID(ctx.Repo.Repository.OwnerID) assert.NoError(t, err) ctx.Repo.RepoLink = ctx.Repo.Repository.Link() - ctx.Repo.Permission, err = models.GetUserRepoPermission(ctx.Repo.Repository, ctx.User) + ctx.Repo.Permission, err = models.GetUserRepoPermission(ctx, ctx.Repo.Repository, ctx.Doer) assert.NoError(t, err) } // LoadRepoCommit loads a repo's commit into a test context. func LoadRepoCommit(t *testing.T, ctx *context.Context) { - gitRepo, err := git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, ctx.Repo.Repository.RepoPath()) assert.NoError(t, err) defer gitRepo.Close() branch, err := gitRepo.GetHEADBranch() @@ -81,15 +81,15 @@ func LoadRepoCommit(t *testing.T, ctx *context.Context) { // LoadUser load a user into a test context. func LoadUser(t *testing.T, ctx *context.Context, userID int64) { - ctx.User = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID}).(*user_model.User) + ctx.Doer = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID}).(*user_model.User) } // LoadGitRepo load a git repo into a test context. Requires that ctx.Repo has // already been populated. func LoadGitRepo(t *testing.T, ctx *context.Context) { - assert.NoError(t, ctx.Repo.Repository.GetOwner(db.DefaultContext)) + assert.NoError(t, ctx.Repo.Repository.GetOwner(ctx)) var err error - ctx.Repo.GitRepo, err = git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.RepoPath()) + ctx.Repo.GitRepo, err = git.OpenRepository(ctx, ctx.Repo.Repository.RepoPath()) assert.NoError(t, err) } diff --git a/modules/timeutil/since.go b/modules/timeutil/since.go index c0240907ae..38b12829ad 100644 --- a/modules/timeutil/since.go +++ b/modules/timeutil/since.go @@ -12,8 +12,7 @@ import ( "time" "code.gitea.io/gitea/modules/setting" - - "github.com/unknwon/i18n" + "code.gitea.io/gitea/modules/translation/i18n" ) // Seconds-based time units diff --git a/modules/timeutil/since_test.go b/modules/timeutil/since_test.go index 1379e71c3d..49951b6e41 100644 --- a/modules/timeutil/since_test.go +++ b/modules/timeutil/since_test.go @@ -12,9 +12,9 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/translation/i18n" "github.com/stretchr/testify/assert" - "github.com/unknwon/i18n" ) var BaseDate time.Time diff --git a/modules/translation/i18n/i18n.go b/modules/translation/i18n/i18n.go new file mode 100644 index 0000000000..664e457ecf --- /dev/null +++ b/modules/translation/i18n/i18n.go @@ -0,0 +1,143 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package i18n + +import ( + "errors" + "fmt" + "reflect" + "strings" + + "code.gitea.io/gitea/modules/log" + + "gopkg.in/ini.v1" +) + +var ( + ErrLocaleAlreadyExist = errors.New("lang already exists") + + DefaultLocales = NewLocaleStore() +) + +type locale struct { + store *LocaleStore + langName string + langDesc string + messages *ini.File +} + +type LocaleStore struct { + // at the moment, all these fields are readonly after initialization + langNames []string + langDescs []string + localeMap map[string]*locale + defaultLang string +} + +func NewLocaleStore() *LocaleStore { + return &LocaleStore{localeMap: make(map[string]*locale)} +} + +// AddLocaleByIni adds locale by ini into the store +func (ls *LocaleStore) AddLocaleByIni(langName, langDesc string, localeFile interface{}, otherLocaleFiles ...interface{}) error { + if _, ok := ls.localeMap[langName]; ok { + return ErrLocaleAlreadyExist + } + iniFile, err := ini.LoadSources(ini.LoadOptions{ + IgnoreInlineComment: true, + UnescapeValueCommentSymbols: true, + }, localeFile, otherLocaleFiles...) + if err == nil { + iniFile.BlockMode = false + lc := &locale{store: ls, langName: langName, langDesc: langDesc, messages: iniFile} + ls.langNames = append(ls.langNames, lc.langName) + ls.langDescs = append(ls.langDescs, lc.langDesc) + ls.localeMap[lc.langName] = lc + } + return err +} + +func (ls *LocaleStore) HasLang(langName string) bool { + _, ok := ls.localeMap[langName] + return ok +} + +func (ls *LocaleStore) ListLangNameDesc() (names, desc []string) { + return ls.langNames, ls.langDescs +} + +// SetDefaultLang sets default language as a fallback +func (ls *LocaleStore) SetDefaultLang(lang string) { + ls.defaultLang = lang +} + +// Tr translates content to target language. fall back to default language. +func (ls *LocaleStore) Tr(lang, trKey string, trArgs ...interface{}) string { + l, ok := ls.localeMap[lang] + if !ok { + l, ok = ls.localeMap[ls.defaultLang] + } + if ok { + return l.Tr(trKey, trArgs...) + } + return trKey +} + +// Tr translates content to locale language. fall back to default language. +func (l *locale) Tr(trKey string, trArgs ...interface{}) string { + var section string + + idx := strings.IndexByte(trKey, '.') + if idx > 0 { + section = trKey[:idx] + trKey = trKey[idx+1:] + } + + trMsg := trKey + if trIni, err := l.messages.Section(section).GetKey(trKey); err == nil { + trMsg = trIni.Value() + } else if l.store.defaultLang != "" && l.langName != l.store.defaultLang { + // try to fall back to default + if defaultLocale, ok := l.store.localeMap[l.store.defaultLang]; ok { + if trIni, err = defaultLocale.messages.Section(section).GetKey(trKey); err == nil { + trMsg = trIni.Value() + } + } + } + + if len(trArgs) > 0 { + fmtArgs := make([]interface{}, 0, len(trArgs)) + for _, arg := range trArgs { + val := reflect.ValueOf(arg) + if val.Kind() == reflect.Slice { + // before, it can accept Tr(lang, key, a, [b, c], d, [e, f]) as Sprintf(msg, a, b, c, d, e, f), it's an unstable behavior + // now, we restrict the strange behavior and only support: + // 1. Tr(lang, key, [slice-items]) as Sprintf(msg, items...) + // 2. Tr(lang, key, args...) as Sprintf(msg, args...) + if len(trArgs) == 1 { + for i := 0; i < val.Len(); i++ { + fmtArgs = append(fmtArgs, val.Index(i).Interface()) + } + } else { + log.Error("the args for i18n shouldn't contain uncertain slices, key=%q, args=%v", trKey, trArgs) + break + } + } else { + fmtArgs = append(fmtArgs, arg) + } + } + return fmt.Sprintf(trMsg, fmtArgs...) + } + return trMsg +} + +func ResetDefaultLocales() { + DefaultLocales = NewLocaleStore() +} + +// Tr use default locales to translate content to target language. +func Tr(lang, trKey string, trArgs ...interface{}) string { + return DefaultLocales.Tr(lang, trKey, trArgs...) +} diff --git a/modules/translation/i18n/i18n_test.go b/modules/translation/i18n/i18n_test.go new file mode 100644 index 0000000000..70066016cf --- /dev/null +++ b/modules/translation/i18n/i18n_test.go @@ -0,0 +1,56 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package i18n + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_Tr(t *testing.T) { + testData1 := []byte(` +.dot.name = Dot Name +fmt = %[1]s %[2]s + +[section] +sub = Sub String +mixed = test value; more text +`) + + testData2 := []byte(` +fmt = %[2]s %[1]s + +[section] +sub = Changed Sub String +`) + + ls := NewLocaleStore() + assert.NoError(t, ls.AddLocaleByIni("lang1", "Lang1", testData1)) + assert.NoError(t, ls.AddLocaleByIni("lang2", "Lang2", testData2)) + ls.SetDefaultLang("lang1") + + result := ls.Tr("lang1", "fmt", "a", "b") + assert.Equal(t, "a b", result) + + result = ls.Tr("lang2", "fmt", "a", "b") + assert.Equal(t, "b a", result) + + result = ls.Tr("lang1", "section.sub") + assert.Equal(t, "Sub String", result) + + result = ls.Tr("lang2", "section.sub") + assert.Equal(t, "Changed Sub String", result) + + result = ls.Tr("", ".dot.name") + assert.Equal(t, "Dot Name", result) + + result = ls.Tr("lang2", "section.mixed") + assert.Equal(t, `test value; more text`, result) + + langs, descs := ls.ListLangNameDesc() + assert.Equal(t, []string{"lang1", "lang2"}, langs) + assert.Equal(t, []string{"Lang1", "Lang2"}, descs) +} diff --git a/modules/translation/translation.go b/modules/translation/translation.go index fd38e4d510..da9d9b9b68 100644 --- a/modules/translation/translation.go +++ b/modules/translation/translation.go @@ -11,8 +11,8 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/options" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/translation/i18n" - "github.com/unknwon/i18n" "golang.org/x/text/language" ) @@ -54,13 +54,13 @@ func TryTr(lang, format string, args ...interface{}) (string, bool) { // InitLocales loads the locales func InitLocales() { - i18n.Reset() + i18n.ResetDefaultLocales() localeNames, err := options.Dir("locale") if err != nil { log.Fatal("Failed to list locale files: %v", err) } - localFiles := make(map[string][]byte) + localFiles := make(map[string][]byte, len(localeNames)) for _, name := range localeNames { localFiles[name], err = options.Locale(name) if err != nil { @@ -76,16 +76,21 @@ func InitLocales() { matcher = language.NewMatcher(supportedTags) for i := range setting.Names { key := "locale_" + setting.Langs[i] + ".ini" - if err = i18n.SetMessageWithDesc(setting.Langs[i], setting.Names[i], localFiles[key]); err != nil { + if err = i18n.DefaultLocales.AddLocaleByIni(setting.Langs[i], setting.Names[i], localFiles[key]); err != nil { log.Error("Failed to set messages to %s: %v", setting.Langs[i], err) } } - i18n.SetDefaultLang("en-US") + if len(setting.Langs) != 0 { + defaultLangName := setting.Langs[0] + if defaultLangName != "en-US" { + log.Info("Use the first locale (%s) in LANGS setting option as default", defaultLangName) + } + i18n.DefaultLocales.SetDefaultLang(defaultLangName) + } - allLangs = make([]*LangType, 0, i18n.Count()) + langs, descs := i18n.DefaultLocales.ListLangNameDesc() + allLangs = make([]*LangType, 0, len(langs)) allLangMap = map[string]*LangType{} - langs := i18n.ListLangs() - descs := i18n.ListLangDescs() for i, v := range langs { l := &LangType{v, descs[i]} allLangs = append(allLangs, l) diff --git a/modules/util/copy.go b/modules/util/copy.go deleted file mode 100644 index 46765849dc..0000000000 --- a/modules/util/copy.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2020 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package util - -import ( - "github.com/unknwon/com" -) - -// CopyFile copies file from source to target path. -func CopyFile(src, dest string) error { - return com.Copy(src, dest) -} - -// CopyDir copy files recursively from source to target directory. -// It returns error when error occurs in underlying functions. -func CopyDir(srcPath, destPath string) error { - return com.CopyDir(srcPath, destPath) -} diff --git a/modules/util/filebuffer/file_backed_buffer.go b/modules/util/filebuffer/file_backed_buffer.go new file mode 100644 index 0000000000..128030b4c5 --- /dev/null +++ b/modules/util/filebuffer/file_backed_buffer.go @@ -0,0 +1,147 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package filebuffer + +import ( + "bytes" + "errors" + "io" + "os" +) + +const maxInt = int(^uint(0) >> 1) // taken from bytes.Buffer + +var ( + // ErrInvalidMemorySize occurs if the memory size is not in a valid range + ErrInvalidMemorySize = errors.New("Memory size must be greater 0 and lower math.MaxInt32") + // ErrWriteAfterRead occurs if Write is called after a read operation + ErrWriteAfterRead = errors.New("Write is unsupported after a read operation") +) + +type readAtSeeker interface { + io.ReadSeeker + io.ReaderAt +} + +// FileBackedBuffer uses a memory buffer with a fixed size. +// If more data is written a temporary file is used instead. +// It implements io.ReadWriteCloser, io.ReadSeekCloser and io.ReaderAt +type FileBackedBuffer struct { + maxMemorySize int64 + size int64 + buffer bytes.Buffer + file *os.File + reader readAtSeeker +} + +// New creates a file backed buffer with a specific maximum memory size +func New(maxMemorySize int) (*FileBackedBuffer, error) { + if maxMemorySize < 0 || maxMemorySize > maxInt { + return nil, ErrInvalidMemorySize + } + + return &FileBackedBuffer{ + maxMemorySize: int64(maxMemorySize), + }, nil +} + +// CreateFromReader creates a file backed buffer and copies the provided reader data into it. +func CreateFromReader(r io.Reader, maxMemorySize int) (*FileBackedBuffer, error) { + b, err := New(maxMemorySize) + if err != nil { + return nil, err + } + + _, err = io.Copy(b, r) + if err != nil { + return nil, err + } + + return b, nil +} + +// Write implements io.Writer +func (b *FileBackedBuffer) Write(p []byte) (int, error) { + if b.reader != nil { + return 0, ErrWriteAfterRead + } + + var n int + var err error + + if b.file != nil { + n, err = b.file.Write(p) + } else { + if b.size+int64(len(p)) > b.maxMemorySize { + b.file, err = os.CreateTemp("", "gitea-buffer-") + if err != nil { + return 0, err + } + + _, err = io.Copy(b.file, &b.buffer) + if err != nil { + return 0, err + } + + return b.Write(p) + } + + n, err = b.buffer.Write(p) + } + + if err != nil { + return n, err + } + b.size += int64(n) + return n, nil +} + +// Size returns the byte size of the buffered data +func (b *FileBackedBuffer) Size() int64 { + return b.size +} + +func (b *FileBackedBuffer) switchToReader() { + if b.reader != nil { + return + } + + if b.file != nil { + b.reader = b.file + } else { + b.reader = bytes.NewReader(b.buffer.Bytes()) + } +} + +// Read implements io.Reader +func (b *FileBackedBuffer) Read(p []byte) (int, error) { + b.switchToReader() + + return b.reader.Read(p) +} + +// ReadAt implements io.ReaderAt +func (b *FileBackedBuffer) ReadAt(p []byte, off int64) (int, error) { + b.switchToReader() + + return b.reader.ReadAt(p, off) +} + +// Seek implements io.Seeker +func (b *FileBackedBuffer) Seek(offset int64, whence int) (int64, error) { + b.switchToReader() + + return b.reader.Seek(offset, whence) +} + +// Close implements io.Closer +func (b *FileBackedBuffer) Close() error { + if b.file != nil { + err := b.file.Close() + os.Remove(b.file.Name()) + return err + } + return nil +} diff --git a/modules/util/io.go b/modules/util/io.go index b467c0ac8a..0c677c359f 100644 --- a/modules/util/io.go +++ b/modules/util/io.go @@ -9,7 +9,7 @@ import ( ) // ReadAtMost reads at most len(buf) bytes from r into buf. -// It returns the number of bytes copied. n is only less then len(buf) if r provides fewer bytes. +// It returns the number of bytes copied. n is only less than len(buf) if r provides fewer bytes. // If EOF occurs while reading, err will be nil. func ReadAtMost(r io.Reader, buf []byte) (n int, err error) { n, err = io.ReadFull(r, buf) diff --git a/modules/util/legacy.go b/modules/util/legacy.go new file mode 100644 index 0000000000..d319faad09 --- /dev/null +++ b/modules/util/legacy.go @@ -0,0 +1,92 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package util + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "errors" + "io" + "os" +) + +// CopyFile copies file from source to target path. +func CopyFile(src, dest string) error { + si, err := os.Lstat(src) + if err != nil { + return err + } + + sr, err := os.Open(src) + if err != nil { + return err + } + defer sr.Close() + + dw, err := os.Create(dest) + if err != nil { + return err + } + defer dw.Close() + + if _, err = io.Copy(dw, sr); err != nil { + return err + } + + if err = os.Chtimes(dest, si.ModTime(), si.ModTime()); err != nil { + return err + } + return os.Chmod(dest, si.Mode()) +} + +// AESGCMEncrypt (from legacy package): encrypts plaintext with the given key using AES in GCM mode. should be replaced. +func AESGCMEncrypt(key, plaintext []byte) ([]byte, error) { + block, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + nonce := make([]byte, gcm.NonceSize()) + if _, err := rand.Read(nonce); err != nil { + return nil, err + } + + ciphertext := gcm.Seal(nil, nonce, plaintext, nil) + return append(nonce, ciphertext...), nil +} + +// AESGCMDecrypt (from legacy package): decrypts ciphertext with the given key using AES in GCM mode. should be replaced. +func AESGCMDecrypt(key, ciphertext []byte) ([]byte, error) { + block, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + + size := gcm.NonceSize() + if len(ciphertext)-size <= 0 { + return nil, errors.New("ciphertext is empty") + } + + nonce := ciphertext[:size] + ciphertext = ciphertext[size:] + + plainText, err := gcm.Open(nil, nonce, ciphertext, nil) + if err != nil { + return nil, err + } + + return plainText, nil +} diff --git a/modules/util/legacy_test.go b/modules/util/legacy_test.go new file mode 100644 index 0000000000..c41f7a008c --- /dev/null +++ b/modules/util/legacy_test.go @@ -0,0 +1,58 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package util + +import ( + "crypto/aes" + "crypto/rand" + "fmt" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestCopyFile(t *testing.T) { + testContent := []byte("hello") + + tmpDir := os.TempDir() + now := time.Now() + srcFile := fmt.Sprintf("%s/copy-test-%d-src.txt", tmpDir, now.UnixMicro()) + dstFile := fmt.Sprintf("%s/copy-test-%d-dst.txt", tmpDir, now.UnixMicro()) + + _ = os.Remove(srcFile) + _ = os.Remove(dstFile) + defer func() { + _ = os.Remove(srcFile) + _ = os.Remove(dstFile) + }() + + err := os.WriteFile(srcFile, testContent, 0o777) + assert.NoError(t, err) + err = CopyFile(srcFile, dstFile) + assert.NoError(t, err) + dstContent, err := os.ReadFile(dstFile) + assert.NoError(t, err) + assert.Equal(t, testContent, dstContent) +} + +func TestAESGCM(t *testing.T) { + t.Parallel() + + key := make([]byte, aes.BlockSize) + _, err := rand.Read(key) + assert.NoError(t, err) + + plaintext := []byte("this will be encrypted") + + ciphertext, err := AESGCMEncrypt(key, plaintext) + assert.NoError(t, err) + + decrypted, err := AESGCMDecrypt(key, ciphertext) + assert.NoError(t, err) + + assert.Equal(t, plaintext, decrypted) +} diff --git a/modules/util/net.go b/modules/util/net.go deleted file mode 100644 index 54c0a2ca39..0000000000 --- a/modules/util/net.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2021 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -package util - -import ( - "net" -) - -// IsIPPrivate for net.IP.IsPrivate. TODO: replace with `ip.IsPrivate()` if min go version is bumped to 1.17 -func IsIPPrivate(ip net.IP) bool { - if ip4 := ip.To4(); ip4 != nil { - return ip4[0] == 10 || - (ip4[0] == 172 && ip4[1]&0xf0 == 16) || - (ip4[0] == 192 && ip4[1] == 168) - } - return len(ip) == net.IPv6len && ip[0]&0xfe == 0xfc -} diff --git a/modules/util/path.go b/modules/util/path.go index f4acf92ba9..ed7cc62699 100644 --- a/modules/util/path.go +++ b/modules/util/path.go @@ -154,6 +154,10 @@ func StatDir(rootPath string, includeDir ...bool) ([]string, error) { return statDir(rootPath, "", isIncludeDir, false, false) } +func isOSWindows() bool { + return runtime.GOOS == "windows" +} + // FileURLToPath extracts the path information from a file://... url. func FileURLToPath(u *url.URL) (string, error) { if u.Scheme != "file" { @@ -162,7 +166,7 @@ func FileURLToPath(u *url.URL) (string, error) { path := u.Path - if runtime.GOOS != "windows" { + if !isOSWindows() { return path, nil } @@ -173,3 +177,24 @@ func FileURLToPath(u *url.URL) (string, error) { } return path, nil } + +// HomeDir returns path of '~'(in Linux) on Windows, +// it returns error when the variable does not exist. +func HomeDir() (home string, err error) { + // TODO: some users run Gitea with mismatched uid and "HOME=xxx" (they set HOME=xxx by environment manually) + // so at the moment we can not use `user.Current().HomeDir` + if isOSWindows() { + home = os.Getenv("USERPROFILE") + if home == "" { + home = os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") + } + } else { + home = os.Getenv("HOME") + } + + if home == "" { + return "", errors.New("cannot get home directory") + } + + return home, nil +} diff --git a/modules/util/sanitize.go b/modules/util/sanitize.go index a782fcf512..685b2699b0 100644 --- a/modules/util/sanitize.go +++ b/modules/util/sanitize.go @@ -5,59 +5,71 @@ package util import ( - "net/url" - "strings" -) + "bytes" + "unicode" -const ( - userPlaceholder = "sanitized-credential" - unparsableURL = "(unparsable url)" + "github.com/yuin/goldmark/util" ) type sanitizedError struct { - err error - replacer *strings.Replacer + err error } func (err sanitizedError) Error() string { - return err.replacer.Replace(err.err.Error()) + return SanitizeCredentialURLs(err.err.Error()) } -// NewSanitizedError wraps an error and replaces all old, new string pairs in the message text. -func NewSanitizedError(err error, oldnew ...string) error { - return sanitizedError{err: err, replacer: strings.NewReplacer(oldnew...)} +func (err sanitizedError) Unwrap() error { + return err.err } -// NewURLSanitizedError wraps an error and replaces the url credential or removes them. -func NewURLSanitizedError(err error, u *url.URL, usePlaceholder bool) error { - return sanitizedError{err: err, replacer: NewURLSanitizer(u, usePlaceholder)} +// SanitizeErrorCredentialURLs wraps the error and make sure the returned error message doesn't contain sensitive credentials in URLs +func SanitizeErrorCredentialURLs(err error) error { + return sanitizedError{err: err} } -// NewStringURLSanitizedError wraps an error and replaces the url credential or removes them. -// If the url can't get parsed it gets replaced with a placeholder string. -func NewStringURLSanitizedError(err error, unsanitizedURL string, usePlaceholder bool) error { - return sanitizedError{err: err, replacer: NewStringURLSanitizer(unsanitizedURL, usePlaceholder)} -} +const userPlaceholder = "sanitized-credential" -// NewURLSanitizer creates a replacer for the url with the credential sanitized or removed. -func NewURLSanitizer(u *url.URL, usePlaceholder bool) *strings.Replacer { - old := u.String() +var schemeSep = []byte("://") - if u.User != nil && usePlaceholder { - u.User = url.User(userPlaceholder) - } else { - u.User = nil +// SanitizeCredentialURLs remove all credentials in URLs (starting with "scheme://") for the input string: "https://user:pass@domain.com" => "https://sanitized-credential@domain.com" +func SanitizeCredentialURLs(s string) string { + bs := util.StringToReadOnlyBytes(s) + schemeSepPos := bytes.Index(bs, schemeSep) + if schemeSepPos == -1 || bytes.IndexByte(bs[schemeSepPos:], '@') == -1 { + return s // fast return if there is no URL scheme or no userinfo } - return strings.NewReplacer(old, u.String()) -} - -// NewStringURLSanitizer creates a replacer for the url with the credential sanitized or removed. -// If the url can't get parsed it gets replaced with a placeholder string -func NewStringURLSanitizer(unsanitizedURL string, usePlaceholder bool) *strings.Replacer { - u, err := url.Parse(unsanitizedURL) - if err != nil { - // don't log the error, since it might contain unsanitized URL. - return strings.NewReplacer(unsanitizedURL, unparsableURL) + out := make([]byte, 0, len(bs)+len(userPlaceholder)) + for schemeSepPos != -1 { + schemeSepPos += 3 // skip the "://" + sepAtPos := -1 // the possible '@' position: "https://foo@[^here]host" + sepEndPos := schemeSepPos // the possible end position: "The https://host[^here] in log for test" + sepLoop: + for ; sepEndPos < len(bs); sepEndPos++ { + c := bs[sepEndPos] + if ('A' <= c && c <= 'Z') || ('a' <= c && c <= 'z') || ('0' <= c && c <= '9') { + continue + } + switch c { + case '@': + sepAtPos = sepEndPos + case '-', '.', '_', '~', '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '%': + continue // due to RFC 3986, userinfo can contain - . _ ~ ! $ & ' ( ) * + , ; = : and any percent-encoded chars + default: + break sepLoop // if it is an invalid char for URL (eg: space, '/', and others), stop the loop + } + } + // if there is '@', and the string is like "s://u@h", then hide the "u" part + if sepAtPos != -1 && (schemeSepPos >= 4 && unicode.IsLetter(rune(bs[schemeSepPos-4]))) && sepAtPos-schemeSepPos > 0 && sepEndPos-sepAtPos > 0 { + out = append(out, bs[:schemeSepPos]...) + out = append(out, userPlaceholder...) + out = append(out, bs[sepAtPos:sepEndPos]...) + } else { + out = append(out, bs[:sepEndPos]...) + } + bs = bs[sepEndPos:] + schemeSepPos = bytes.Index(bs, schemeSep) } - return NewURLSanitizer(u, usePlaceholder) + out = append(out, bs...) + return util.BytesToReadOnlyString(out) } diff --git a/modules/util/sanitize_test.go b/modules/util/sanitize_test.go index c141f5e947..78166cfdff 100644 --- a/modules/util/sanitize_test.go +++ b/modules/util/sanitize_test.go @@ -11,154 +11,65 @@ import ( "github.com/stretchr/testify/assert" ) -func TestNewSanitizedError(t *testing.T) { - err := errors.New("error while secret on test") - err2 := NewSanitizedError(err) - assert.Equal(t, err.Error(), err2.Error()) - - cases := []struct { - input error - oldnew []string - expected string - }{ - // case 0 - { - errors.New("error while secret on test"), - []string{"secret", "replaced"}, - "error while replaced on test", - }, - // case 1 - { - errors.New("error while sec-ret on test"), - []string{"secret", "replaced"}, - "error while sec-ret on test", - }, - } - - for n, c := range cases { - err := NewSanitizedError(c.input, c.oldnew...) - - assert.Equal(t, c.expected, err.Error(), "case %d: error should match", n) - } +func TestSanitizeErrorCredentialURLs(t *testing.T) { + err := errors.New("error with https://a@b.com") + se := SanitizeErrorCredentialURLs(err) + assert.Equal(t, "error with https://"+userPlaceholder+"@b.com", se.Error()) } -func TestNewStringURLSanitizer(t *testing.T) { +func TestSanitizeCredentialURLs(t *testing.T) { cases := []struct { - input string - placeholder bool - expected string + input string + expected string }{ - // case 0 { "https://github.com/go-gitea/test_repo.git", - true, "https://github.com/go-gitea/test_repo.git", }, - // case 1 - { - "https://github.com/go-gitea/test_repo.git", - false, - "https://github.com/go-gitea/test_repo.git", - }, - // case 2 { "https://mytoken@github.com/go-gitea/test_repo.git", - true, "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", }, - // case 3 - { - "https://mytoken@github.com/go-gitea/test_repo.git", - false, - "https://github.com/go-gitea/test_repo.git", - }, - // case 4 { "https://user:password@github.com/go-gitea/test_repo.git", - true, "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", }, - // case 5 { - "https://user:password@github.com/go-gitea/test_repo.git", - false, - "https://github.com/go-gitea/test_repo.git", + "ftp://x@", + "ftp://" + userPlaceholder + "@", }, - // case 6 { - "https://gi\nthub.com/go-gitea/test_repo.git", - false, - unparsableURL, + "ftp://x/@", + "ftp://x/@", + }, + { + "ftp://u@x/@", // test multiple @ chars + "ftp://" + userPlaceholder + "@x/@", + }, + { + "😊ftp://u@x😊", // test unicode + "😊ftp://" + userPlaceholder + "@x😊", + }, + { + "://@", + "://@", + }, + { + "//u:p@h", // do not process URLs without explicit scheme, they are not treated as "valid" URLs because there is no scheme context in string + "//u:p@h", + }, + { + "s://u@h", // the minimal pattern to be sanitized + "s://" + userPlaceholder + "@h", + }, + { + "URLs in log https://u:b@h and https://u:b@h:80/, with https://h.com and u@h.com", + "URLs in log https://" + userPlaceholder + "@h and https://" + userPlaceholder + "@h:80/, with https://h.com and u@h.com", }, } for n, c := range cases { - // uses NewURLSanitizer internally - result := NewStringURLSanitizer(c.input, c.placeholder).Replace(c.input) - + result := SanitizeCredentialURLs(c.input) assert.Equal(t, c.expected, result, "case %d: error should match", n) } } - -func TestNewStringURLSanitizedError(t *testing.T) { - cases := []struct { - input string - placeholder bool - expected string - }{ - // case 0 - { - "https://github.com/go-gitea/test_repo.git", - true, - "https://github.com/go-gitea/test_repo.git", - }, - // case 1 - { - "https://github.com/go-gitea/test_repo.git", - false, - "https://github.com/go-gitea/test_repo.git", - }, - // case 2 - { - "https://mytoken@github.com/go-gitea/test_repo.git", - true, - "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", - }, - // case 3 - { - "https://mytoken@github.com/go-gitea/test_repo.git", - false, - "https://github.com/go-gitea/test_repo.git", - }, - // case 4 - { - "https://user:password@github.com/go-gitea/test_repo.git", - true, - "https://" + userPlaceholder + "@github.com/go-gitea/test_repo.git", - }, - // case 5 - { - "https://user:password@github.com/go-gitea/test_repo.git", - false, - "https://github.com/go-gitea/test_repo.git", - }, - // case 6 - { - "https://gi\nthub.com/go-gitea/test_repo.git", - false, - unparsableURL, - }, - } - - encloseText := func(input string) string { - return "test " + input + " test" - } - - for n, c := range cases { - err := errors.New(encloseText(c.input)) - - result := NewStringURLSanitizedError(err, c.input, c.placeholder) - - assert.Equal(t, encloseText(c.expected), result.Error(), "case %d: error should match", n) - } -} diff --git a/modules/util/slice.go b/modules/util/slice.go new file mode 100644 index 0000000000..552f5b866a --- /dev/null +++ b/modules/util/slice.go @@ -0,0 +1,18 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package util + +// RemoveIDFromList removes the given ID from the slice, if found. +// It does not preserve order, and assumes the ID is unique. +func RemoveIDFromList(list []int64, id int64) ([]int64, bool) { + n := len(list) - 1 + for i, item := range list { + if item == id { + list[i] = list[n] + return list[:n], true + } + } + return list, false +} diff --git a/modules/util/string.go b/modules/util/string.go new file mode 100644 index 0000000000..4301f75f99 --- /dev/null +++ b/modules/util/string.go @@ -0,0 +1,88 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package util + +import "github.com/yuin/goldmark/util" + +func isSnakeCaseUpper(c byte) bool { + return 'A' <= c && c <= 'Z' +} + +func isSnakeCaseLowerOrNumber(c byte) bool { + return 'a' <= c && c <= 'z' || '0' <= c && c <= '9' +} + +// ToSnakeCase convert the input string to snake_case format. +// +// Some samples. +// "FirstName" => "first_name" +// "HTTPServer" => "http_server" +// "NoHTTPS" => "no_https" +// "GO_PATH" => "go_path" +// "GO PATH" => "go_path" // space is converted to underscore. +// "GO-PATH" => "go_path" // hyphen is converted to underscore. +// +func ToSnakeCase(input string) string { + if len(input) == 0 { + return "" + } + + var res []byte + if len(input) == 1 { + c := input[0] + if isSnakeCaseUpper(c) { + res = []byte{c + 'a' - 'A'} + } else if isSnakeCaseLowerOrNumber(c) { + res = []byte{c} + } else { + res = []byte{'_'} + } + } else { + res = make([]byte, 0, len(input)*4/3) + pos := 0 + needSep := false + for pos < len(input) { + c := input[pos] + if c >= 0x80 { + res = append(res, c) + pos++ + continue + } + isUpper := isSnakeCaseUpper(c) + if isUpper || isSnakeCaseLowerOrNumber(c) { + end := pos + 1 + if isUpper { + // skip the following upper letters + for end < len(input) && isSnakeCaseUpper(input[end]) { + end++ + } + if end-pos > 1 && end < len(input) && isSnakeCaseLowerOrNumber(input[end]) { + end-- + } + } + // skip the following lower or number letters + for end < len(input) && (isSnakeCaseLowerOrNumber(input[end]) || input[end] >= 0x80) { + end++ + } + if needSep { + res = append(res, '_') + } + res = append(res, input[pos:end]...) + pos = end + needSep = true + } else { + res = append(res, '_') + pos++ + needSep = false + } + } + for i := 0; i < len(res); i++ { + if isSnakeCaseUpper(res[i]) { + res[i] += 'a' - 'A' + } + } + } + return util.BytesToReadOnlyString(res) +} diff --git a/modules/util/string_test.go b/modules/util/string_test.go new file mode 100644 index 0000000000..49de29ab67 --- /dev/null +++ b/modules/util/string_test.go @@ -0,0 +1,48 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package util + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestToSnakeCase(t *testing.T) { + cases := map[string]string{ + // all old cases from the legacy package + "HTTPServer": "http_server", + "_camelCase": "_camel_case", + "NoHTTPS": "no_https", + "Wi_thF": "wi_th_f", + "_AnotherTES_TCaseP": "_another_tes_t_case_p", + "ALL": "all", + "_HELLO_WORLD_": "_hello_world_", + "HELLO_WORLD": "hello_world", + "HELLO____WORLD": "hello____world", + "TW": "tw", + "_C": "_c", + + " sentence case ": "__sentence_case__", + " Mixed-hyphen case _and SENTENCE_case and UPPER-case": "_mixed_hyphen_case__and_sentence_case_and_upper_case", + + // new cases + " ": "_", + "A": "a", + "A0": "a0", + "a0": "a0", + "Aa0": "aa0", + "啊": "啊", + "A啊": "a啊", + "Aa啊b": "aa啊b", + "A啊B": "a啊_b", + "Aa啊B": "aa啊_b", + "TheCase2": "the_case2", + "ObjIDs": "obj_i_ds", // the strange database column name which already exists + } + for input, expected := range cases { + assert.Equal(t, expected, ToSnakeCase(input)) + } +} diff --git a/modules/validation/helpers.go b/modules/validation/helpers.go index 617ec3578c..484b12b2a2 100644 --- a/modules/validation/helpers.go +++ b/modules/validation/helpers.go @@ -13,32 +13,10 @@ import ( "code.gitea.io/gitea/modules/setting" ) -var loopbackIPBlocks []*net.IPNet - var externalTrackerRegex = regexp.MustCompile(`({?)(?:user|repo|index)+?(}?)`) -func init() { - for _, cidr := range []string{ - "127.0.0.0/8", // IPv4 loopback - "::1/128", // IPv6 loopback - } { - if _, block, err := net.ParseCIDR(cidr); err == nil { - loopbackIPBlocks = append(loopbackIPBlocks, block) - } - } -} - func isLoopbackIP(ip string) bool { - pip := net.ParseIP(ip) - if pip == nil { - return false - } - for _, block := range loopbackIPBlocks { - if block.Contains(pip) { - return true - } - } - return false + return net.ParseIP(ip).IsLoopback() } // IsValidURL checks if URL is valid diff --git a/modules/web/middleware/binding.go b/modules/web/middleware/binding.go index 9b0b1d7784..c9dc4a8f59 100644 --- a/modules/web/middleware/binding.go +++ b/modules/web/middleware/binding.go @@ -10,10 +10,10 @@ import ( "strings" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/validation" "gitea.com/go-chi/binding" - "github.com/unknwon/com" ) // Form form binding interface @@ -22,7 +22,7 @@ type Form interface { } func init() { - binding.SetNameMapper(com.ToSnakeCase) + binding.SetNameMapper(util.ToSnakeCase) } // AssignForm assign form values back to the template data. @@ -43,7 +43,7 @@ func AssignForm(form interface{}, data map[string]interface{}) { if fieldName == "-" { continue } else if len(fieldName) == 0 { - fieldName = com.ToSnakeCase(field.Name) + fieldName = util.ToSnakeCase(field.Name) } data[fieldName] = val.Field(i).Interface() diff --git a/modules/web/middleware/cookie.go b/modules/web/middleware/cookie.go index 80fe302137..b5904d6713 100644 --- a/modules/web/middleware/cookie.go +++ b/modules/web/middleware/cookie.go @@ -98,17 +98,6 @@ func DeleteRedirectToCookie(resp http.ResponseWriter) { SameSite(setting.SessionConfig.SameSite)) } -// DeleteSesionConfigPathCookie convenience function to delete SessionConfigPath cookies consistently -func DeleteSesionConfigPathCookie(resp http.ResponseWriter, name string) { - SetCookie(resp, name, "", - -1, - setting.SessionConfig.CookiePath, - setting.SessionConfig.Domain, - setting.SessionConfig.Secure, - true, - SameSite(setting.SessionConfig.SameSite)) -} - // DeleteCSRFCookie convenience function to delete SessionConfigPath cookies consistently func DeleteCSRFCookie(resp http.ResponseWriter) { SetCookie(resp, setting.CSRFCookieName, "", @@ -117,7 +106,7 @@ func DeleteCSRFCookie(resp http.ResponseWriter) { setting.SessionConfig.Domain) // FIXME: Do we need to set the Secure, httpOnly and SameSite values too? } -// SetCookie set the cookies +// SetCookie set the cookies. (name, value, lifetime, path, domain, secure, httponly, expires, {sameSite, ...}) // TODO: Copied from gitea.com/macaron/macaron and should be improved after macaron removed. func SetCookie(resp http.ResponseWriter, name, value string, others ...interface{}) { cookie := http.Cookie{} diff --git a/modules/web/middleware/locale.go b/modules/web/middleware/locale.go index 3daf5f32d4..de8e497965 100644 --- a/modules/web/middleware/locale.go +++ b/modules/web/middleware/locale.go @@ -9,8 +9,8 @@ import ( "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/translation" + "code.gitea.io/gitea/modules/translation/i18n" - "github.com/unknwon/i18n" "golang.org/x/text/language" ) @@ -28,8 +28,8 @@ func Locale(resp http.ResponseWriter, req *http.Request) translation.Locale { } } - // Check again in case someone modify by purpose. - if lang != "" && !i18n.IsExist(lang) { + // Check again in case someone changes the supported language list. + if lang != "" && !i18n.DefaultLocales.HasLang(lang) { lang = "" changeLang = false } diff --git a/modules/web/route_test.go b/modules/web/route_test.go index a8470fec94..801afe92c9 100644 --- a/modules/web/route_test.go +++ b/modules/web/route_test.go @@ -67,7 +67,7 @@ func TestRoute2(t *testing.T) { route = 1 }) }, func(resp http.ResponseWriter, req *http.Request) { - resp.WriteHeader(200) + resp.WriteHeader(http.StatusOK) }) r.Group("/issues/{index}", func() { diff --git a/modules/web/routing/logger_manager.go b/modules/web/routing/logger_manager.go index cc434c338d..7715b0b5d3 100644 --- a/modules/web/routing/logger_manager.go +++ b/modules/web/routing/logger_manager.go @@ -11,6 +11,7 @@ import ( "time" "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/process" ) // Event indicates when the printer is triggered @@ -40,7 +41,9 @@ type requestRecordsManager struct { } func (manager *requestRecordsManager) startSlowQueryDetector(threshold time.Duration) { - go graceful.GetManager().RunWithShutdownContext(func(baseCtx context.Context) { + go graceful.GetManager().RunWithShutdownContext(func(ctx context.Context) { + ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Service: SlowQueryDetector", process.SystemProcessType, true) + defer finished() // This go-routine checks all active requests every second. // If a request has been running for a long time (eg: /user/events), we also print a log with "still-executing" message // After the "still-executing" log is printed, the record will be removed from the map to prevent from duplicated logs in future @@ -49,7 +52,7 @@ func (manager *requestRecordsManager) startSlowQueryDetector(threshold time.Dura t := time.NewTicker(time.Second) for { select { - case <-baseCtx.Done(): + case <-ctx.Done(): return case <-t.C: now := time.Now() diff --git a/options/gitignore/Python b/options/gitignore/Python index de2d5e086e..68bc17f9ff 100644 --- a/options/gitignore/Python +++ b/options/gitignore/Python @@ -101,7 +101,15 @@ ipython_config.py # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control #poetry.lock -# PEP 582; used by e.g. github.com/David-OConnor/pyflow +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ # Celery stuff diff --git a/options/license/Baekmuk b/options/license/Baekmuk new file mode 100644 index 0000000000..b86efc04a0 --- /dev/null +++ b/options/license/Baekmuk @@ -0,0 +1,9 @@ +Copyright (c) 1986-2002 Kim Jeong-Hwan All rights reserved. + +Permission to use, copy, modify and distribute this font +is hereby granted, provided that both the copyright notice +and this permission notice appear in all copies of the +font, derivative works or modified versions, and that the +following acknowledgement appear in supporting documentation: +Baekmuk Batang, Baekmuk Dotum, Baekmuk Gulim, and Baekmuk +Headline are registered trademarks owned by Kim Jeong-Hwan. diff --git a/options/license/KiCad-libraries-exception b/options/license/KiCad-libraries-exception new file mode 100644 index 0000000000..ae8854b119 --- /dev/null +++ b/options/license/KiCad-libraries-exception @@ -0,0 +1 @@ +To the extent that the creation of electronic designs that use 'Licensed Material' can be considered to be 'Adapted Material', then the copyright holder waives article 3 of the license with respect to these designs and any generated files which use data provided as part of the 'Licensed Material'. diff --git a/options/license/LGPL-3.0-only b/options/license/LGPL-3.0-only index c9287dd363..513d1c01fe 100644 --- a/options/license/LGPL-3.0-only +++ b/options/license/LGPL-3.0-only @@ -69,3 +69,236 @@ Each version is given a distinguishing version number. If the Library as you rec If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. + +GNU GENERAL PUBLIC LICENSE +Version 3, 29 June 2007 + +Copyright © 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The GNU General Public License is a free, copyleft license for software and other kinds of works. + +The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. + +To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. + +For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. + +Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. + +For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. + +Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. + +Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. + +The precise terms and conditions for copying, distribution and modification follow. + +TERMS AND CONDITIONS + +0. Definitions. + +“This License” refers to version 3 of the GNU General Public License. + +“Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. + +“The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations. + +To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work. + +A “covered work” means either the unmodified Program or a work based on the Program. + +To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. + +To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. + +An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. + +1. Source Code. +The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work. + +A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. + +The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. + +The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. + +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. + +The Corresponding Source for a work in source code form is that same work. + +2. Basic Permissions. +All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. + +You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. + +3. Protecting Users' Legal Rights From Anti-Circumvention Law. +No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. + +When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. + +4. Conveying Verbatim Copies. +You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. + +You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. + +5. Conveying Modified Source Versions. +You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”. + + c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. + +A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. + +6. Conveying Non-Source Forms. +You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: + + a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. + +A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. + +A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. + +“Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. + +If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). + +The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. + +Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. + +7. Additional Terms. +“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. + +When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. + +Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. + +All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. + +8. Termination. +You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). + +However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. + +Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. + +9. Acceptance Not Required for Having Copies. +You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. + +10. Automatic Licensing of Downstream Recipients. +Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. + +An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. + +11. Patents. +A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”. + +A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. + +In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. + +If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. + +If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. + +A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. + +Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. + +12. No Surrender of Others' Freedom. +If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. + +13. Use with the GNU Affero General Public License. +Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. + +14. Revised Versions of this License. +The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. + +If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. + +Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. + +15. Disclaimer of Warranty. +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +16. Limitation of Liability. +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +17. Interpretation of Sections 15 and 16. +If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about box”. + +You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . + +The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . diff --git a/options/license/LGPL-3.0-or-later b/options/license/LGPL-3.0-or-later index c9287dd363..513d1c01fe 100644 --- a/options/license/LGPL-3.0-or-later +++ b/options/license/LGPL-3.0-or-later @@ -69,3 +69,236 @@ Each version is given a distinguishing version number. If the Library as you rec If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. + +GNU GENERAL PUBLIC LICENSE +Version 3, 29 June 2007 + +Copyright © 2007 Free Software Foundation, Inc. + +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. + +Preamble + +The GNU General Public License is a free, copyleft license for software and other kinds of works. + +The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. + +To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. + +For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. + +Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. + +For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. + +Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. + +Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. + +The precise terms and conditions for copying, distribution and modification follow. + +TERMS AND CONDITIONS + +0. Definitions. + +“This License” refers to version 3 of the GNU General Public License. + +“Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. + +“The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations. + +To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work. + +A “covered work” means either the unmodified Program or a work based on the Program. + +To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. + +To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. + +An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. + +1. Source Code. +The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work. + +A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. + +The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. + +The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. + +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. + +The Corresponding Source for a work in source code form is that same work. + +2. Basic Permissions. +All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. + +You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. + +3. Protecting Users' Legal Rights From Anti-Circumvention Law. +No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. + +When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. + +4. Conveying Verbatim Copies. +You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. + +You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. + +5. Conveying Modified Source Versions. +You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”. + + c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. + +A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. + +6. Conveying Non-Source Forms. +You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: + + a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. + + d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. + +A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. + +A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. + +“Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. + +If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). + +The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. + +Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. + +7. Additional Terms. +“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. + +When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. + +Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or authors of the material; or + + e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. + +All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. + +8. Termination. +You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). + +However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. + +Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. + +9. Acceptance Not Required for Having Copies. +You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. + +10. Automatic Licensing of Downstream Recipients. +Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. + +An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. + +11. Patents. +A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”. + +A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. + +In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. + +If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. + +If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. + +A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. + +Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. + +12. No Surrender of Others' Freedom. +If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. + +13. Use with the GNU Affero General Public License. +Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. + +14. Revised Versions of this License. +The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. + +If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. + +Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. + +15. Disclaimer of Warranty. +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +16. Limitation of Liability. +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +17. Interpretation of Sections 15 and 16. +If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about box”. + +You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . + +The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . diff --git a/options/license/PDDL-1.0 b/options/license/PDDL-1.0 index 6a830d7df2..b5399f7c13 100644 --- a/options/license/PDDL-1.0 +++ b/options/license/PDDL-1.0 @@ -127,7 +127,7 @@ Please note that some jurisdictions do not allow for the waiver of moral rights, 6.0 General -6.1 If any provision of this Document is held to be invalid or unenforceable, that must not affect the cvalidity or enforceability of the remainder of the terms of this Document. +6.1 If any provision of this Document is held to be invalid or unenforceable, that must not affect the validity or enforceability of the remainder of the terms of this Document. 6.2 This Document is the entire agreement between the parties with respect to the Work covered here. It replaces any earlier understandings, agreements or representations with respect to the Work not specified here. diff --git a/options/license/mplus b/options/license/mplus new file mode 100644 index 0000000000..7ff7d504d1 --- /dev/null +++ b/options/license/mplus @@ -0,0 +1,6 @@ + +These fonts are free softwares. Unlimited permission is +granted to use, copy, and distribute it, with or without +modification, either commercially and noncommercially. +THESE FONTS ARE PROVIDED "AS IS" WITHOUT WARRANTY. + diff --git a/options/locale/locale_bg-BG.ini b/options/locale/locale_bg-BG.ini index a2bf0d0062..af6f019e9d 100644 --- a/options/locale/locale_bg-BG.ini +++ b/options/locale/locale_bg-BG.ini @@ -72,6 +72,7 @@ loading=Зареждане… error404=Страницата, която се опитвате да достъпите, не съществува или не сте оторизирани да я достъпите. + [error] [startpage] @@ -770,7 +771,6 @@ pulls.no_merge_wip=Тази заявка за сливане не може да ; %[2]s
%[3]s
pulls.status_checks_success=Всички проверявания бяха успешни - milestones.new=Нов етап milestones.open_tab=%d отворени milestones.close_tab=%d затворени @@ -1080,6 +1080,7 @@ repos.size=Големина + auths.name=Име auths.type=Тип auths.enabled=Активно @@ -1258,3 +1259,5 @@ mark_all_as_read=Бележа всичко като прочетено [units] +[packages] + diff --git a/options/locale/locale_cs-CZ.ini b/options/locale/locale_cs-CZ.ini index 8818f09351..098d47020e 100644 --- a/options/locale/locale_cs-CZ.ini +++ b/options/locale/locale_cs-CZ.ini @@ -34,6 +34,20 @@ twofa=Dvoufaktorové ověřování twofa_scratch=Dvoufaktorový pomocný kód passcode=Přístupový kód +webauthn_insert_key=Vložte svůj bezpečnostní klíč +webauthn_sign_in=Stiskněte tlačítko na svém bezpečnostním klíči. Pokud bezpečnostní klíč nemá žádné tlačítko, vložte jej znovu. +webauthn_press_button=Stiskněte prosím tlačítko na zabezpečovacím klíči… +webauthn_use_twofa=Použít dvoufaktorový kód z vašeho telefonu +webauthn_error=Nepodařilo se přečíst váš zabezpečovací klíč. +webauthn_unsupported_browser=Váš prohlížeč momentálně nepodporuje WebAuthn. +webauthn_error_unknown=Došlo k neznámé chybě. Opakujte akci. +webauthn_error_insecure=WebAuthn podporuje pouze zabezpečená připojení. Pro testování přes HTTP můžete použít výchozí "localhost" nebo "127.0.0.1" +webauthn_error_unable_to_process=Server nemohl zpracovat váš požadavek. +webauthn_error_duplicated=Zabezpečovací klíč není pro tento požadavek povolen. Prosím ujistěte se, zda klíč není již registrován. +webauthn_error_empty=Musíte nastavit název tohoto klíče. +webauthn_error_timeout=Požadavek vypršel dříve, než se podařilo přečíst váš klíč. Znovu načtěte tuto stránku a akci opakujte. +webauthn_u2f_deprecated=Klíč: „%s“ autentifikuje pomocí zastaralého procesu U2F. Měli byste znovu zaregistrovat tento klíč a zrušit starou registraci. +webauthn_reload=Znovu načíst repository=Repozitář organization=Organizace @@ -73,6 +87,11 @@ remove=Odstranit remove_all=Odstranit vše edit=Upravit +copy=Kopírovat +copy_url=Kopírovat URL +copy_branch=Kopírovat jméno větve +copy_success=Zkopírováno! +copy_error=Kopírování se nezdařilo write=Zapsat preview=Náhled @@ -86,8 +105,15 @@ error404=Stránka, kterou se snažíte zobrazit, buď neexistujeGitHub a v případě potřeby otevřete nový problém. missing_csrf=Špatný požadavek: Neexistuje CSRF token +invalid_csrf=Špatný požadavek: Neplatný CSRF token +not_found=Cíl nebyl nalezen. +network_error=Chyba sítě [startpage] app_desc=Snadno přístupný vlastní Git @@ -104,6 +130,7 @@ license_desc=Vše je na dokumentaci, než budete měnit jakákoliv nastavení. +require_db_desc=Gitea requires MySQL, PostgreSQL, MSSQL, SQLite3 or TiDB (MySQL protocol). db_title=Nastavení databáze db_type=Typ databáze host=Hostitel @@ -117,6 +144,9 @@ ssl_mode=SSL charset=Znaková sada path=Cesta sqlite_helper=Cesta k souboru SQLite3 databáze.
Pokud spouštíte Gitea jako službu, zadejte absolutní cestu. +reinstall_error=Pokoušíte se nainstalovat do existující databáze Gitea +reinstall_confirm_message=Přeinstalování s existující databází Gitea může způsobit více problémů. Ve většině případů byste měli použít existující „app.ini“ pro spuštění Gitea. Pokud víte, co děláte, potvrďte následující: +reinstall_confirm_check_1=Data šifrovaná pomocí SECRET_KEY v souboru api.ini mohou být ztracena: uživatelé nemusí být schopni se přihlásit s 2FA/OTP a zrcadla nemusí fungovat správně. Zaškrtnutím tohoto políčka potvrdíte, že aktuální soubor app.ini obsahuje správný SECRET_KEY. err_empty_db_path=Cesta k SQLite3 databázi nemůže být prázdná. no_admin_and_disable_registration=Nemůžete vypnout registraci účtů bez vytvoření účtu správce. err_empty_admin_password=Heslo administrátora nemůže být prázdné. @@ -134,6 +164,8 @@ lfs_path=Kořenový adresář Git LFS lfs_path_helper=V tomto adresáři budou uloženy soubory, které jsou sledovány Git LFS. Pokud ponecháte prázdné, LFS zakážete. run_user=Spustit jako uživatel run_user_helper=Zadejte uživatelské jméno, pod kterým Gitea běží v operačním systému. Pozor: tento uživatel musí mít přístup ke kořenovému adresáři repozitářů. +domain=Doména serveru +domain_helper=Adresa domény, nebo hostitele serveru. ssh_port=Port SSH serveru ssh_port_helper=Číslo portu, na kterém SSH server naslouchá. Když ponecháte prázdné, SSH server zakážete. http_port=Port, na kterém Gitea naslouchá HTTP protokolu @@ -182,6 +214,7 @@ sqlite3_not_available=Tato verze Gitea nepodporuje SQLite3. Stáhněte si ofici invalid_db_setting=Nastavení databáze je neplatné: %v invalid_repo_path=Kořenový adresář repozitářů není správný: %v run_user_not_match="Run as" uživatelské jméno není aktuální uživatelské jméno: %s -> %s +secret_key_failed=Nepodařilo se vytvořit tajný klíč: %v save_config_failed=Uložení konfigurace se nezdařilo: %v invalid_admin_setting=Nastavení účtu správce není správné: %v install_success=Vítejte! Děkujeme, že jste si vybrali Gitea. Štastné používání! @@ -231,6 +264,7 @@ search=Vyhledat code=Zdrojový kód search.fuzzy=Fuzzy search.match=Shoda +code_search_unavailable=V současné době není vyhledávání kódu dostupné. Obraťte se na správce webu. repo_no_results=Nebyly nalezeny žádné odpovídající repozitáře. user_no_results=Nebyly nalezeni žádní odpovídající uživatelé. org_no_results=Nebyly nalezeny žádné odpovídající organizace. @@ -244,6 +278,7 @@ register_helper_msg=Již máte účet? Přihlaste se! social_register_helper_msg=Již máte účet? Připojte ho! disable_register_prompt=Registrace jsou vypnuty. Prosíme, kontaktujte správce systému. disable_register_mail=E-mailové potvrzení o registraci je zakázané. +manual_activation_only=Pro dokončení aktivace kontaktujte správce webu. remember_me=Pamatovat si toto zařízení forgot_password_title=Zapomenuté heslo forgot_password=Zapomenuté heslo? @@ -282,6 +317,7 @@ oauth_signup_submit=Dokončit účet oauth_signin_tab=Propojit s existujícím účtem oauth_signin_title=Přihlaste se pro ověření propojeného účtu oauth_signin_submit=Propojit účet +oauth.signin.error.access_denied=Žádost o autorizaci byla zamítnuta. openid_connect_submit=Připojit openid_connect_title=Připojení k existujícímu účtu openid_connect_desc=Zvolené OpenID URI není známé. Přidružte nový účet zde. @@ -323,18 +359,31 @@ register_notify.text_2=Nyní se můžete přihlásit přes uživatelské jméno: register_notify.text_3=Pokud pro vás byl vytvořen tento účet, nejprve nastavte své heslo. reset_password=Obnovit váš účet +reset_password.title=%s, požádal jste o obnovení vašeho účtu reset_password.text=Klikněte prosím na následující odkaz pro obnovení vašeho účtu v rámci %s: register_success=Registrace byla úspěšná +issue_assigned.pull=@%[1]s vás přiřadil/a k požadavku na natažení %[2]v repozitáři %[3]s. +issue_assigned.issue=@%[1]s vás přiřadil/a k úkolu %[2]v repozitáři %[3]s. issue.x_mentioned_you=@%s vás zmínil/a: +issue.action.force_push=%[1]s vynutil/a nahrání %[2]s z %[3]do %[4]. issue.action.push_1=@%[1]s nahrál/a %[3]d commit do %[2]s issue.action.push_n=@%[1]s nahrál/a %[3]d commity do %[2]s issue.action.close=@%[1]s uzavřel/a #%[2]d. +issue.action.reopen=@%[1]s znovu otevřel/a #%[2]d. +issue.action.merge=@%[1]s sloučil/a #%[2]d do %[3]s. +issue.action.approve=@%[1]s schválil/a tento požadavek na natažení. +issue.action.reject=@%[1]s požadoval/a změny v tomto požadavku na natažení. +issue.action.review=@%[1]s okomentoval/a tento požadavek na natažení. +issue.action.review_dismissed=@%[1]s odmítl/a poslední kontrolu z %[2]s pro tento požadavek na natažení. +issue.action.ready_for_review=@%[1]s označil/a tento požadavek na natažení jako připravený ke kontrole. +issue.action.new=@%[1]s vytvořil/a #%[2]d. issue.in_tree_path=V %s: release.new.subject=%s v %s vydáno +release.new.text=@%[1]s vydal/a %[2]s v %[3]s release.title=Název: %s release.note=Poznámka: release.downloads=Soubory ke stažení: @@ -344,8 +393,10 @@ release.download.targz=Zdrojový kód (TAR.GZ) repo.transfer.subject_to=%s by chtěl převést „%s“ pro %s repo.transfer.subject_to_you=%s by Vám chtěl převést „%s“ repo.transfer.to_you=vám +repo.transfer.body=Chcete-li ji přijmout nebo odmítnout, navštivte %s nebo ji prostě ignorujte. repo.collaborator.added.subject=%s vás přidal do %s +repo.collaborator.added.text=Byl jste přidán jako spolupracovník repozitáře: [modal] yes=Ano @@ -386,6 +437,7 @@ email_error=` není správná e-mailová adresa.` url_error=` není správná URL.` include_error=` musí obsahovat řetězec „%s“.` glob_pattern_error=`zástupný vzor je neplatný: %s.` +regex_pattern_error=` regex vzor je neplatný: %s.` unknown_error=Neznámá chyba: captcha_incorrect=CAPTCHA kód není správný. password_not_match=Zadaná hesla nesouhlasí. @@ -456,6 +508,7 @@ form.name_chars_not_allowed=Uživatelské jméno ‚%s‘ obsahuje neplatné zna [settings] profile=Profil account=Účet +appearance=Vzhled password=Heslo security=Zabezpečení avatar=Avatar @@ -469,6 +522,7 @@ twofa=Dvoufaktorové ověřování account_link=Propojené účty organization=Organizace uid=UID +webauthn=Bezpečnostní klíče public_profile=Veřejný profil biography_placeholder=Řekněte nám něco o sobě @@ -479,7 +533,9 @@ website=Web location=Místo update_theme=Aktualizovat motiv vzhledu update_profile=Aktualizovat profil +update_language=Aktualizovat jazyk update_language_not_found=Jazyk „%s“ není k dispozici. +update_language_success=Jazyk byl aktualizován. update_profile_success=Váš profil byl aktualizován. change_username=Vaše uživatelské jméno bylo změněno. change_username_prompt=Poznámka: změna uživatelského jména změní také URL účtu. @@ -488,6 +544,19 @@ continue=Pokračovat cancel=Zrušit language=Jazyk ui=Motiv vzhledu +comment_type_group_reference=Reference +comment_type_group_label=Štítek +comment_type_group_milestone=Milník +comment_type_group_assignee=Zpracovatel +comment_type_group_title=Název +comment_type_group_branch=Větev +comment_type_group_time_tracking=Sledování času +comment_type_group_deadline=Uzávěrka +comment_type_group_dependency=Závislost +comment_type_group_lock=Stav zámku +comment_type_group_pull_request_push=Přidané commity +comment_type_group_project=Projekt +comment_type_group_issue_ref=Referenční číslo úkolu privacy=Soukromí keep_activity_private=Skrýt aktivitu z profilové stránky keep_activity_private_popup=Učinit aktivitu viditelnou pouze pro vás a administrátory @@ -501,6 +570,7 @@ delete_current_avatar=Smazat aktuální avatar uploaded_avatar_not_a_image=Nahraný soubor není obrázek. uploaded_avatar_is_too_big=Nahraný soubor překročil maximální velikost. update_avatar_success=Vaše avatar byl aktualizován. +update_user_avatar_success=Uživatelův avatar byl aktualizován. change_password=Aktualizovat heslo old_password=Stávající heslo @@ -562,6 +632,7 @@ ssh_key_name_used=SSH klíč se stejným jménem již u vašeho účtu existuje. ssh_principal_been_used=Tento SSH Principal certifikát již byl přidán na server. gpg_key_id_used=Veřejný GPG klíč se stejným ID již existuje. gpg_no_key_email_found=Tento GPG klíč neodpovídá žádné aktivované e-mailové adrese spojené s vaším účtem. Může být stále přidán, pokud podepíšete zadaný token. +gpg_key_matched_identities=Odpovídající identity: gpg_key_matched_identities_long=Vložené identity do tohoto klíče odpovídají následujícím aktivovaným e-mailovým adresám tohoto uživatele. Commity odpovídající těmto e-mailovým adresám lze ověřit pomocí tohoto klíče. gpg_key_verified=Ověřený klíč gpg_key_verified_long=Klíč byl ověřen pomocí tokenu a může být použit k ověření commitů shodujících se s libovolnou vaší aktivovanou e-mailovou adresou pro tohoto uživatele navíc k jakékoli odpovídající identitě tohoto klíče. @@ -571,6 +642,11 @@ gpg_token_required=Musíte zadat podpis pro níže uvedený token gpg_token=Token gpg_token_help=Podpis můžete vygenerovat pomocí: gpg_token_code=echo "%s" | gpg -a --default-key %s --detach-sig +key_signature_gpg_placeholder=Začíná s „-----BEGIN PGP SIGNATURE-----“ +ssh_key_verified=Ověřený klíč +ssh_key_verify=Ověřit +ssh_token=Token +verify_ssh_key_success=SSH klíč „%s“ byl ověřen. subkeys=Podklíče key_id=ID klíče key_name=Název klíče @@ -671,6 +747,9 @@ passcode_invalid=Přístupový kód není platný. Zkuste to znovu. twofa_enrolled=Ve vašem účtu bylo povoleno dvoufaktorové ověřování. Uložte si pomocný token (%s) na bezpečném místě, protože bude zobrazen pouze jednou! twofa_failed_get_secret=Nepodařilo se získat tajemství. +webauthn_register_key=Přidat bezpečnostní klíč +webauthn_nickname=Přezdívka +webauthn_delete_key=Odstranit bezpečnostní klíč manage_account_links=Správa propojených účtů manage_account_links_desc=Tyto externí účty jsou propojeny s vaším Gitea účtem. @@ -695,8 +774,12 @@ email_notifications.disable=Zakázat e-mailová oznámení email_notifications.submit=Nastavit předvolby e-mailu visibility=Viditelnost uživatele +visibility.public=Veřejný visibility.public_tooltip=Viditelné pro všechny uživatele +visibility.limited=Omezený visibility.limited_tooltip=Viditelné jen pro přihlášené uživatele +visibility.private=Soukromý +visibility.private_tooltip=Viditelné pouze pro členy organizace [repo] new_repo_helper=Repozitář obsahuje všechny projektové soubory, včetně historie revizí. Už ho máte jinde? Migrovat repozitář. @@ -717,10 +800,13 @@ visibility_fork_helper=(Změna tohoto ovlivní všechny rozštěpení repozitá clone_helper=Potřebujete pomoci s klonováním? Navštivte nápovědu. fork_repo=Rozštěpení repozitáře fork_from=Rozštěpit z +fork_to_different_account=Rozštěpit na jiný účet fork_visibility_helper=Viditelnost rozštěpeného repozitáře nemůže být změněna. use_template=Použít tuto šablonu +clone_in_vsc=Klonovat ve VS Code download_zip=Stáhnout ZIP download_tar=Stáhnout TAR.GZ +download_bundle=Stáhnout BUNDLE generate_repo=Generovat repozitář generate_from=Generovat z repo_desc=Popis @@ -750,6 +836,7 @@ mirror_prune_desc=Odstranit zastaralé reference na vzdálené sledování mirror_interval=Interval zrcadlení (platné časové jednotky jsou „h“, „m“ a „s“). 0 zakáže automatickou synchronizaci. mirror_interval_invalid=Interval zrcadlení není platný. mirror_address=Klonovat z URL +mirror_address_desc=Zadejte požadované přístupové údaje do sekce Ověření. mirror_address_url_invalid=Poskytnutá URL je neplatná. Všechny komponenty musíte správně nahradit escape sekvencí. mirror_address_protocol_invalid=Zadaná URL je neplatná. Mohou být zrcadleny pouze umístění http(s):// nebo git://. mirror_lfs=Úložiště velkých souborů (LFS) @@ -844,6 +931,22 @@ migrate.migrate=Migrovat z %s migrate.migrating=Probíhá migrace z %s ... migrate.migrating_failed=Migrace z %s se nezdařila. migrate.migrating_failed.error=Chyba: %s +migrate.migrating_failed_no_addr=Migrace se nezdařila. +migrate.github.description=Migrovat data z github.com nebo jiných GitHub instancí. +migrate.git.description=Migrovat pouze repozitář z libovolné služby Git. +migrate.gitlab.description=Migrovat data z gitlab.com nebo jiných GitLab instancí. +migrate.gitea.description=Migrovat data z gitea.com nebo jiných Gitea instancí. +migrate.gogs.description=Migrovat data z notabug.com nebo jiných Gogs instancí. +migrate.onedev.description=Migrovat data z code.onedev.io nebo jiných OneDev instancí. +migrate.codebase.description=Migrovat data z codebasehq.com. +migrate.gitbucket.description=Migrovat data z GitBucket instancí. +migrate.migrating_git=Migrování data gitu +migrate.migrating_topics=Migrování témat +migrate.migrating_milestones=Migrování milnků +migrate.migrating_labels=Migrování štítků +migrate.migrating_releases=Migrování vydání +migrate.migrating_issues=Migrování úkolů +migrate.migrating_pulls=Migrování požadavků na natažení mirror_from=zrcadlo forked_from=rozštěpen z @@ -889,6 +992,7 @@ release=Vydání releases=Vydání tag=Značka released_this=vydal/a toto +file.title=%s v %s file_raw=Surový file_history=Historie file_view_source=Zobrazit zdroj @@ -896,7 +1000,10 @@ file_view_rendered=Zobrazit vykreslené file_view_raw=Zobrazit v surovém stavu file_permalink=Trvalý odkaz file_too_large=Soubor je příliš velký pro zobrazení. +bidi_bad_header=`Tento soubor obsahuje neočekávané obousměrné znaky Unicode!` +line_unicode=`Tento řádek má skryté unicode znaky` +file_copy_permalink=Kopírovat trvalý odkaz video_not_supported_in_browser=Váš prohlížeč nepodporuje značku pro HTML5 video. audio_not_supported_in_browser=Váš prohlížeč nepodporuje značku pro HTML5 audio. stored_lfs=Uloženo pomocí Git LFS @@ -907,6 +1014,7 @@ commit_graph.hide_pr_refs=Skrýt požadavky na natažení commit_graph.monochrome=Černobílé commit_graph.color=Barva blame=Blame +download_file=Stáhnout soubor normal_view=Normální zobrazení line=řádek lines=řádky @@ -985,7 +1093,14 @@ commits.signed_by=Podepsáno commits.signed_by_untrusted_user=Podepsáno nedůvěryhodným uživatelem commits.signed_by_untrusted_user_unmatched=Podepsáno nedůvěryhodným uživatelem, který nesouhlasí s přispěvatelem commits.gpg_key_id=ID GPG klíče +commits.ssh_key_fingerprint=Otisk klíče SSH +commit.actions=Akce +commit.revert=Vrátit +commit.revert-header=Vrátit: %s +commit.revert-content=Vyberte větev pro návrat na: +commit.cherry-pick=Cherry-pick +commit.cherry-pick-header=Cherry-pick: %s ext_issues.desc=Odkaz na externí systém úkolů. @@ -1023,6 +1138,7 @@ projects.board.deletion_desc=Smazáním projektové nástěnky přesune všechny projects.board.color=Barva projects.open=Otevřít projects.close=Zavřít +projects.board.assigned_to=Přiřazeno k issues.desc=Organizování hlášení chyb, úkolů a milníků. issues.filter_assignees=Filtrovat zpracovatele @@ -1087,6 +1203,8 @@ issues.add_assignee_at=`byl přiřazen %s %s` issues.remove_assignee_at=`byl odstraněn z přiřazení %s %s` issues.remove_self_assignment=`odstranil/a jejich přiřazení %s` issues.change_title_at=`změnil/a název z %s na %s %s` +issues.remove_ref_at=`odstranil/a referenci %s %s` +issues.add_ref_at=`přidal/a referenci %s %s` issues.delete_branch_at=`odstranil/a větev %s %s` issues.open_tab=%d otevřených issues.close_tab=%d zavřených @@ -1124,6 +1242,8 @@ issues.action_milestone_no_select=Žádný milník issues.action_assignee=Zpracovatel issues.action_assignee_no_select=Bez zpracovatele issues.opened_by=otevřeno %[1]s uživatelem %[3]s +issues.opened_by_fake=otevřeno %[1]s uživatelem %[2]s +issues.closed_by_fake=od %[2]s byl uzavřen %[1]s issues.previous=Předchozí issues.next=Další issues.open_title=otevřený @@ -1206,6 +1326,9 @@ issues.lock.reason=Důvod pro uzamčení issues.lock.title=Uzamknout konverzaci u tohoto úkolu. issues.unlock.title=Odemknout konverzaci u tohoto úkolu. issues.comment_on_locked=Nemůžete komentovat uzamčený úkol. +issues.delete=Smazat +issues.delete.title=Smazat tento úkol? +issues.delete.text=Opravdu chcete tento úkol smazat? (Tím se trvale odstraní veškerý obsah. Pokud jej hodláte archivovat, zvažte raději jeho uzavření.) issues.tracker=Sledování času issues.start_tracking_short=Spustit časovač issues.start_tracking=Spustit sledování času @@ -1246,14 +1369,16 @@ issues.due_date_remove=odstranil/a termín dokončení %s %s issues.due_date_overdue=Zpožděné issues.due_date_invalid=Termín dokončení není platný nebo je mimo rozsah. Použijte prosím formát „rrrr-mm-dd“. issues.dependency.title=Závislosti -issues.dependency.issue_no_dependencies=Tento úkol momentálně nemá žádné závislosti. -issues.dependency.pr_no_dependencies=Tento požadavek na natažení momentálně nemá žádné závislosti. +issues.dependency.issue_no_dependencies=Nejsou nastaveny žádné závislosti. +issues.dependency.pr_no_dependencies=Nejsou nastaveny žádné závislosti. issues.dependency.add=Přidat závislost… issues.dependency.cancel=Zrušit issues.dependency.remove=Odstranit issues.dependency.remove_info=Odstranit tuto závislost issues.dependency.added_dependency=`přidal/a novou závislost %s` issues.dependency.removed_dependency=`odstranil/a závislost %s` +issues.dependency.pr_closing_blockedby=Uzavření tohoto požadavku na natažení je blokováno následujícími úkoly +issues.dependency.issue_closing_blockedby=Uzavření tohoto úkolu je blokováno následujícími úkoly issues.dependency.issue_close_blocks=Tento úkol blokuje uzavření následujících úkolů issues.dependency.pr_close_blocks=Tento požadavek na natažení blokuje uzavření následujících úkolů issues.dependency.issue_close_blocked=Musíte zavřít všechny úkoly, které blokují tento úkol, aby jej bylo možné zavřít. @@ -1296,6 +1421,8 @@ issues.review.un_resolve_conversation=Nevyřešit konverzaci issues.review.resolved_by=označil tuto konverzaci jako vyřešenou issues.assignee.error=Ne všichni zpracovatelé byli přidáni z důvodu neočekávané chyby. issues.reference_issue.body=Tělo zprávy +issues.content_history.deleted=vymazáno +issues.content_history.edited=upraveno issues.content_history.created=vytvořeno issues.content_history.delete_from_history=Smazat z historie issues.content_history.delete_from_history_confirm=Smazat z historie? @@ -1306,10 +1433,15 @@ compare.compare_head=porovnat pulls.desc=Povolit požadavky na natažení a posuzování kódu. pulls.new=Nový požadavek na natažení +pulls.view=Zobrazit požadavek na natažení pulls.compare_changes=Nový požadavek na natažení +pulls.allow_edits_from_maintainers=Povolit úpravy od správců +pulls.allow_edits_from_maintainers_desc=Uživatelé s přístupem k zápisu do základní větve mohou také nahrávat do této větve +pulls.allow_edits_from_maintainers_err=Aktualizace se nezdařila pulls.compare_changes_desc=Vyberte větev pro sloučení a větev pro natažení. pulls.compare_base=sloučit do pulls.compare_compare=natáhnout z +pulls.switch_comparison_type=Přepnout typ porovnání pulls.filter_branch=Filtrovat větev pulls.no_results=Nebyly nalezeny žádné výsledky. pulls.nothing_to_compare=Tyto větve jsou stejné. Není potřeba vytvářet požadavek na natažení. @@ -1388,13 +1520,14 @@ pulls.status_checks_failure=Některé kontroly se nezdařily pulls.status_checks_error=Některé kontroly nahlásily chyby pulls.status_checks_requested=Požadováno pulls.status_checks_details=Podrobnosti +pulls.update_branch=Aktualizovat větev sloučením +pulls.update_branch_rebase=Aktualizovat větev pomocí rebase pulls.update_branch_success=Aktualizace větve byla úspěšná pulls.update_not_allowed=Nemáte oprávnění aktualizovat větev pulls.outdated_with_base_branch=Tato větev je zastaralá oproti základní větvi pulls.closed_at=`uzavřel/a tento požadavek na natažení %[2]s` pulls.reopened_at=`znovuotevřel/a tento požadavek na natažení %[2]s` pulls.merge_instruction_hint=`Můžete také zobrazit instrukce příkazové řádky.` - pulls.merge_instruction_step1_desc=Z vašeho repositáře projektu se podívejte na novou větev a vyzkoušejte změny. pulls.merge_instruction_step2_desc=Slučte změny a aktualizujte je na Gitea. @@ -1444,6 +1577,7 @@ signing.wont_sign.commitssigned=Sloučení nebude podepsáno, protože všechny signing.wont_sign.approved=Sloučení nebude podepsáno, protože požadavek na natažení není schválen signing.wont_sign.not_signed_in=Nejste přihlášeni +ext_wiki=Přístup k externí Wiki ext_wiki.desc=Odkaz do externí Wiki. wiki=Wiki @@ -1554,8 +1688,11 @@ settings.hooks=Webové háčky settings.githooks=Háčky Gitu settings.basic_settings=Základní nastavení settings.mirror_settings=Nastavení zrcadla +settings.mirror_settings.mirrored_repository=Zrcadlený repozitář +settings.mirror_settings.direction=Směr settings.mirror_settings.direction.pull=Natáhnout settings.mirror_settings.direction.push=Nahrát +settings.mirror_settings.last_update=Poslední aktualizace settings.mirror_settings.push_mirror.none=Nenastavena žádná zrcadla pro nahrání settings.mirror_settings.push_mirror.remote_url=URL vzdáleného Git repozitáře settings.mirror_settings.push_mirror.add=Přidat zrcadlo pro nahrání @@ -1600,6 +1737,12 @@ settings.pulls.enable_autodetect_manual_merge=Povolit autodetekci ručních slou settings.projects_desc=Povolit projekty v repozitáři settings.admin_settings=Nastavení správce settings.admin_enable_health_check=Povolit kontrolu stavu repozitáře (git fsck) +settings.admin_code_indexer=Indexování kódu +settings.admin_stats_indexer=Index statistiky kódu +settings.admin_indexer_commit_sha=Poslední indexovaná SHA +settings.admin_indexer_unindexed=Neindexováno +settings.reindex_button=Přidat do fronty reindexace +settings.reindex_requested=Požadováno reindexování settings.admin_enable_close_issues_via_commit_in_any_branch=Zavřít úkol pomocí commitu v jiné než výchozí větvi settings.danger_zone=Nebezpečná zóna settings.new_owner_has_same_repo=Nový vlastník již repozitář se stejným názvem má. Vyberte prosím jiné jméno. @@ -1687,6 +1830,7 @@ settings.webhook.response=Odpověď settings.webhook.headers=Hlavičky settings.webhook.payload=Obsah settings.webhook.body=Tělo zprávy +settings.webhook.replay.description=Zopakovat tento webový háček. settings.githook_edit_desc=Je-li háček neaktivní, bude zobrazen vzorový obsah. Nebude-li zadán žádný obsah, háček bude vypnut. settings.githook_name=Název háčku settings.githook_content=Obsah háčku @@ -1744,6 +1888,8 @@ settings.event_pull_request_review=Požadavek na natažení přezkoumán settings.event_pull_request_review_desc=Požadavek na natažení schválen, odmítnut nebo zkontrolován. settings.event_pull_request_sync=Požadavek na natažení synchronizován settings.event_pull_request_sync_desc=Požadavek na natažení synchronizován. +settings.event_package=Balíček +settings.event_package_desc=Balíček vytvořen nebo odstraněn v repozitáři. settings.branch_filter=Filtr větví settings.active=Aktivní settings.active_helper=Informace o spuštěných událostech budou odeslány na URL webového háčku. @@ -1756,6 +1902,22 @@ settings.hook_type=Typ háčku settings.slack_token=Poukázka settings.slack_domain=Doména settings.slack_channel=Kanál +settings.web_hook_name_gitea=Gitea +settings.web_hook_name_gogs=Gogs +settings.web_hook_name_slack=Slack +settings.web_hook_name_discord=Discord +settings.web_hook_name_dingtalk=DingTalk +settings.web_hook_name_telegram=Telegram +settings.web_hook_name_matrix=Matrix +settings.web_hook_name_msteams=Microsoft Teams +settings.web_hook_name_feishu_or_larksuite=Feishu / Lark Suite +settings.web_hook_name_feishu=Feishu +settings.web_hook_name_larksuite=Lark Suite +settings.web_hook_name_wechatwork=WeCom (Wechat Work) +settings.web_hook_name_packagist=Packagist +settings.packagist_username=Uživatelské jméno pro Packagist +settings.packagist_api_token=API token +settings.packagist_package_url=Packagist URL balíčku settings.deploy_keys=Klíče pro nasazení settings.add_deploy_key=Přidat klíč pro nasazení settings.deploy_key_desc=Klíče pro nasazení mají k tomuto repozitáři přístup pouze pro čtení. @@ -1807,6 +1969,8 @@ settings.dismiss_stale_approvals_desc=Pokud budou do větve nahrány nové reviz settings.require_signed_commits=Vyžadovat podepsané revize settings.require_signed_commits_desc=Odmítnout nahrání do této větve pokud nejsou podepsaná nebo jsou neověřitelná. settings.protect_protected_file_patterns=Chráněné vzory souborů (oddělené středníkem „\;“): +settings.protect_protected_file_patterns_desc=Chráněné soubory, které nemají povoleno být měněny přímo, i když uživatel má právo přidávat, upravovat nebo mazat soubory v této větvi. Více vzorů lze oddělit pomocí středníku („\;“). Podívejte se na github.com/gobwas/glob dokumentaci pro syntaxi vzoru. Příklady: .drone.yml, /docs/**/*.txt. +settings.protect_unprotected_file_patterns=Nechráněné vzory souborů (oddělené středníkem „\;“): settings.add_protected_branch=Zapnout ochranu settings.delete_protected_branch=Vypnout ochranu settings.update_protect_branch_success=Ochrana větví pro větev „%s“ byla aktualizována. @@ -1828,6 +1992,7 @@ settings.protected_branch_required_approvals_min=Požadovaná schválení nesmí settings.tags=Značky settings.tags.protection=Ochrana značek settings.tags.protection.pattern=Vzor značky +settings.tags.protection.allowed=Povoleno settings.tags.protection.allowed.users=Povolení uživatelé settings.tags.protection.allowed.teams=Povolené týmy settings.tags.protection.allowed.noone=Nikdo @@ -1878,6 +2043,12 @@ settings.lfs_pointers.inRepo=V repozitáři settings.lfs_pointers.exists=Existuje v úložišti settings.lfs_pointers.accessible=Přístupné uživateli settings.lfs_pointers.associateAccessible=Přiřadit přístupné %d OID +settings.rename_branch_failed_exist=Nelze přejmenovat větev, protože cílová větev %s existuje. +settings.rename_branch_failed_not_exist=Větev %s nelze přejmenovat, protože neexistuje. +settings.rename_branch_success=Větev %s byla úspěšně přejmenována na %s. +settings.rename_branch_from=starý název větve +settings.rename_branch_to=nový název větve +settings.rename_branch=Přejmenovat větev diff.browse_source=Procházet zdrojové kódy diff.parent=rodič @@ -1907,6 +2078,9 @@ diff.file_image_height=Výška diff.file_byte_size=Velikost diff.file_suppressed=Rozdílový obsah nebyl zobrazen, protože je příliš veliký diff.file_suppressed_line_too_long=Rozdílový obsah nebyl zobrazen, protože některé řádky jsou příliš dlouhá +diff.too_many_files=Některé soubory nejsou zobrazny, neboť je v této revizi změněno mnoho souborů +diff.show_more=Zobrazit více +diff.generated=vygenerováno diff.comment.placeholder=Zanechat komentář diff.comment.markdown_info=Je podporována úprava vzhledu pomocí markdown. diff.comment.add_single_comment=Přidat jeden komentář @@ -1924,6 +2098,7 @@ diff.protected=Chráněno diff.image.side_by_side=Vedle sebe diff.image.swipe=Posunout diff.image.overlay=Překrytí +diff.has_escaped=Tento řádek má skryté znaky Unicode releases.desc=Sledování verzí projektu a souborů ke stažení. release.releases=Vydání @@ -1994,10 +2169,15 @@ branch.included_desc=Tato větev je součástí výchozí větve branch.included=Zahrnuje branch.create_new_branch=Vytvořit větev z větve: branch.confirm_create_branch=Vytvořit větev +branch.create_branch_operation=Vytvořit větev branch.new_branch=Vytvořit novou větev branch.new_branch_from=Vytvořit novou větev z „%s“ +branch.renamed=Větev %s byla přejmenována na %s. tag.create_tag=Vytvořit značku %s +tag.create_tag_operation=Vytvořit značku +tag.confirm_create_tag=Vytvořit značku +tag.create_tag_from=Vytvořit novou značku z „%s“ tag.create_success=Značka „%s“ byla vytvořena. @@ -2074,15 +2254,22 @@ members.member_role=Role člena: members.owner=Vlastník members.member=Člen members.remove=Smazat +members.remove.detail=Odstranit %[1]s z %[2]? members.leave=Opustit +members.leave.detail=Opustit %s? members.invite_desc=Přidat nového člena do %s: members.invite_now=Pozvat teď teams.join=Připojit teams.leave=Opustit +teams.leave.detail=Opustit %s? teams.can_create_org_repo=Vytvořit repozitáře teams.can_create_org_repo_helper=Členové mohou vytvářet nové repozitáře v organizaci. Tvůrce získá přístup správce do nového repozitáře. +teams.none_access=Bez přístupu +teams.general_access=Obecný přístup +teams.read_access=Čtení teams.read_access_helper=Členové mohou zobrazit a klonovat repozitáře týmu. +teams.write_access=Zápis teams.write_access_helper=Členové mohou číst a nahrávat do repozitářů týmu. teams.admin_access=Přístup správce teams.admin_access_helper=Členové mohou natahovat i nahrávat do repozitářů týmu a mohou přidávat spolupracovníky. @@ -2175,6 +2362,7 @@ dashboard.resync_all_hooks=Znovu synchronizovat háčky před přijetím, aktual dashboard.reinit_missing_repos=Znovu inicializovat všechny chybějící repozitáře, pro které existují záznamy dashboard.sync_external_users=Synchronizovat externí uživatelská data dashboard.cleanup_hook_task_table=Vyčistit tabulku hook_task +dashboard.cleanup_packages=Vyčistit prošlé balíčky dashboard.server_uptime=Doba provozu serveru dashboard.current_goroutine=Aktuální Goroutines dashboard.current_memory_usage=Aktuální využití paměti @@ -2206,6 +2394,7 @@ dashboard.last_gc_pause=Poslední pauza GC dashboard.gc_times=Časy GC dashboard.delete_old_actions=Odstranit všechny staré akce z databáze dashboard.delete_old_actions.started=Začalo odstraňování všech starých akcí z databáze. +dashboard.update_checker=Kontrola aktualizací users.user_manage_panel=Správa uživatelských účtů users.new_account=Vytvořit uživatelský účet @@ -2244,6 +2433,18 @@ users.still_own_repo=Tento uživatel stále vlastní jeden nebo více repozitá users.still_has_org=Uživatel je člen organizace. Nejprve odstraňte uživatele ze všech organizací. users.deletion_success=Uživatelský účet byl smazán. users.reset_2fa=Resetovat 2FA +users.list_status_filter.menu_text=Filtr +users.list_status_filter.reset=Obnovit +users.list_status_filter.is_active=Aktivní +users.list_status_filter.not_active=Neaktivní +users.list_status_filter.is_admin=Administrátor +users.list_status_filter.not_admin=Není administrátor +users.list_status_filter.is_restricted=Omezeno +users.list_status_filter.not_restricted=Není omezeno +users.list_status_filter.is_prohibit_login=Zakázat přihlášení +users.list_status_filter.not_prohibit_login=Povolit přihlášení +users.list_status_filter.is_2fa_enabled=2FA povoleno +users.list_status_filter.not_2fa_enabled=2FA zakázáno emails.email_manage_panel=Správa e-mailů uživatele emails.primary=Hlavní @@ -2276,6 +2477,17 @@ repos.forks=Rozštěpení repos.issues=Úkoly repos.size=Velikost +packages.package_manage_panel=Správa balíčků +packages.total_size=Celková velikost: %s +packages.owner=Vlastník +packages.creator=Tvůrce +packages.name=Název +packages.version=Verze +packages.type=Typ +packages.repository=Repozitář +packages.size=Velikost +packages.published=Publikováno + defaulthooks=Výchozí webové háčky defaulthooks.desc=Webové háčky automaticky vytvářejí HTTP POST dotazy na server při určitých Gitea událostech. Webové háčky definované zde jsou výchozí a budou zkopírovány do všech nových repozitářů. Přečtěte si více v průvodci webovými háčky. defaulthooks.add_webhook=Přidat výchozí webový háček @@ -2320,6 +2532,9 @@ auths.restricted_filter_helper=Ponechte prázdné, pokud nechcete nastavit žád auths.group_search_base=Základní DN pro hledání skupin auths.group_attribute_list_users=Skupinový atribut obsahující seznam uživatelů auths.user_attribute_in_group=Atribut uživatele ve skupině +auths.map_group_to_team=Mapovat LDAP skupiny do týmů organizace (ponechte pole prázdné pro přeskočení) +auths.map_group_to_team_removal=Odebrat uživatele z synchronizovaných týmů, pokud uživatel nepatří do odpovídající LDAP skupiny +auths.enable_ldap_groups=Povolit LDAP skupiny auths.ms_ad_sa=Atributy vyhledávání MS AD auths.smtp_auth=Typ ověření SMTP auths.smtphost=Server SMTP @@ -2342,6 +2557,7 @@ auths.oauth2_tokenURL=URL poukázky auths.oauth2_authURL=Autorizační URL auths.oauth2_profileURL=URL profilu auths.oauth2_emailURL=URL e-mailu +auths.skip_local_two_fa=Přeskočit lokální 2FA auths.enable_auto_register=Povolit zaregistrování se auths.sspi_auto_create_users=Automaticky vytvářet uživatele auths.sspi_auto_create_users_helper=Povolit SSPI autentizační metodě automaticky vytvářet nové účty pro uživatele, kteří se poprvé přihlásili @@ -2389,6 +2605,7 @@ config.app_ver=Verze Gitea config.app_url=Základní URL Gitea config.custom_conf=Cesta ke konfiguračnímu souboru config.custom_file_root_path=Kořenový adresář vlastních souborů +config.domain=Doména serveru config.offline_mode=Lokální režim config.disable_router_log=Vypnout log směrovače config.run_user=Spustit jako uživatel @@ -2404,6 +2621,7 @@ config.reverse_auth_user=Uživatel obráceného ověření config.ssh_config=Nastavení SSH config.ssh_enabled=Zapnutý config.ssh_start_builtin_server=Použít vestavěný server +config.ssh_domain=Doména SSH serveru config.ssh_port=Port config.ssh_listen_port=Port pro naslouchání config.ssh_root_path=Kořenová cesta @@ -2526,6 +2744,7 @@ monitor.process=Spuštěné procesy monitor.desc=Popis monitor.start=Čas zahájení monitor.execute_time=Doba provádění +monitor.last_execution_result=Výsledek monitor.process.cancel=Zrušit proces monitor.process.cancel_desc=Zrušení procesu může způsobit ztrátu dat monitor.process.cancel_notices=Zrušit: %s? @@ -2555,6 +2774,12 @@ monitor.queue.pool.flush.title=Vyprázdnit frontu monitor.queue.pool.flush.desc=Vyprázdnění přidá workera, který bude ukončen po vyprázdnění fronty nebo po vypršení časového limitu. monitor.queue.pool.flush.submit=Přidat workera pro vyprázdnění monitor.queue.pool.flush.added=Worker pro vyprázdnění přidán pro %[1]s +monitor.queue.pool.pause.title=Pozastavit frontu +monitor.queue.pool.pause.desc=Pozastavení fronty ji zabrání ve zpracovávání dat +monitor.queue.pool.pause.submit=Pozastavit frontu +monitor.queue.pool.resume.title=Pokračovat ve frontě +monitor.queue.pool.resume.desc=Nastavte tuto frontu pro pokračování práce +monitor.queue.pool.resume.submit=Pokračovat ve frontě monitor.queue.settings.title=Nastavení fondu monitor.queue.settings.desc=Fondy dynamicky rostou se zesílením v závislosti na blokování jejich pracovní fronty. Tyto změny neovlivní aktuální pracovní skupiny. @@ -2600,14 +2825,29 @@ notices.delete_success=Systémové upozornění bylo smazáno. [action] create_repo=vytvořil/a repozitář %s rename_repo=přejmenoval/a repozitář z %[1]s na %[3]s +commit_repo=nahrál/a do %[3]s v %[4]s +create_issue=`otevřel/a úkol %[3]s#%[2]s` +close_issue=`uzavřel/a úkol %[3]s#%[2]s` +reopen_issue=`znovuotevřel/a úkol %[3]s#%[2]s` +create_pull_request=`vytvořil/a požadavek na natažení %[3]s#%[2]s` +close_pull_request=`uzavřel/a požadavek na natažení %[3]s#%[2]s` +reopen_pull_request=`znovuotevřel/a požadavek na natažení %[3]s#%[2]s` +comment_issue=`okomentoval/a problém %[3]s#%[2]s` +comment_pull=`okomentoval/a požadavek na natažení %[3]s#%[2]s` +merge_pull_request=`sloučil/a požadavek na natažení %[3]s#%[2]s` transfer_repo=předal/a repozitář %s uživateli/organizaci %s delete_tag=smazána značka %[2]s z %[3]s delete_branch=smazal/a větev %[2]s z %[3]s compare_branch=Porovnat compare_commits=Porovnat %d revizí compare_commits_general=Porovnat revize +mirror_sync_push=synchronizoval/a commity do %[3]s v %[4]s ze zrcadla mirror_sync_delete=synchronizoval/a a smazal/a referenci %[2]s v %[3]s ze zrcadla +approve_pull_request=`schválil/a %[3]s#%[2]s` +publish_release=`vydal/a "%[4]s" v %[3]s` +review_dismissed=`zamítl/a posouzení z %[4]s pro %[3]s#%[2]s` review_dismissed_reason=Důvod: +create_branch=vytvořil/a větev %[3]s v %[4]s [tool] ago=před %s @@ -2660,6 +2900,81 @@ error.probable_bad_signature=VAROVÁNÍ! Přestože v databázi existuje klíč error.probable_bad_default_signature=VAROVÁNÍ! Ačkoli výchozí klíč má toto ID, neověřuje tuto revizi! Tato revize je PODEZŘELÁ. [units] +unit=Jednotka error.no_unit_allowed_repo=Nejste oprávněni přistupovat k žádné části tohoto repozitáře. error.unit_not_allowed=Nejste oprávněni přistupovat k této části repozitáře. +[packages] +title=Balíčky +desc=Správa balíčků repozitáře. +empty=Zatím nejsou žádné balíčky. +filter.type=Typ +filter.type.all=Vše +filter.no_result=Váš filtr nepřinesl žádné výsledky. +installation=Instalace +about=O tomto balíčku +requirements=Požadavky +dependencies=Závislosti +keywords=Klíčová slova +details=Podrobnosti +details.author=Autor +details.project_site=Stránka projektu +details.license=Licence +versions=Verze +versions.view_all=Zobrazit všechny +dependency.id=ID +dependency.version=Verze +composer.dependencies=Závislosti +composer.dependencies.development=Vývojové závislosti +conan.details.repository=Repozitář +conan.registry=Nastavte tento registr z příkazového řádku: +container.details.type=Typ obrazu +container.details.platform=Platforma +container.details.repository_site=Stránka repositáře +container.details.documentation_site=Stránka dokumentace +container.pull=Stáhněte obraz z příkazové řádky: +container.layers=Vrstvy obrazů +container.labels=Štítky +container.labels.key=Klíč +container.labels.value=Hodnota +generic.download=Stáhnout balíček z příkazové řádky: +helm.registry=Nastavte tento registr z příkazového řádku: +maven.registry=Nastavte tento registr ve vašem projektu pom.xml souboru: +maven.install=Pro použití balíčku uveďte následující v bloku dependencies v souboru pom.xml: +maven.install2=Spustit pomocí příkazové řádky: +maven.download=Chcete-li stáhnout závislost, spusťte přes příkazový řádek: +maven.documentation=Další informace o registru Maven naleznete v dokumentaci. +nuget.registry=Nastavte tento registr z příkazového řádku: +nuget.install=Chcete-li nainstalovat balíček pomocí NuGet, spusťte následující příkaz: +nuget.documentation=Další informace o registru NuGet naleznete v dokumentaci. +nuget.dependency.framework=Cílový Framework +npm.registry=Nastavte tento registr ve vašem projektu v souboru .npmrc: +npm.install=Pro instalaci balíčku pomocí npm spusťte následující příkaz: +npm.install2=nebo ho přidejte do souboru package.json: +npm.documentation=Další informace o npm registru naleznete v dokumentaci. +npm.dependencies=Závislosti +npm.dependencies.development=Vývojové závislosti +npm.dependencies.optional=Volitelné závislosti +npm.details.tag=Značka +pypi.requires=Vyžaduje Python +pypi.install=Pro instalaci balíčku pomocí pip spusťte následující příkaz: +pypi.documentation=Další informace o registru PyPI naleznete v dokumentaci. +rubygems.install=Pro instalaci balíčku pomocí gem spusťte následující příkaz: +rubygems.install2=nebo ho přidejte do Gemfie: +rubygems.dependencies.runtime=Běhové závislosti +rubygems.dependencies.development=Vývojové závislosti +rubygems.required.ruby=Vyžaduje verzi Ruby +rubygems.required.rubygems=Vyžaduje verzi RubyGem +rubygems.documentation=Další informace o registru RubyGems naleznete v dokumentaci. +settings.link=Propojit tento balíček s repozitářem +settings.link.description=Pokud propojíte balíček s repozitářem, je tento balíček uveden v seznamu balíčků repozitáře. +settings.link.select=Vybrat repozitář +settings.link.button=Aktualizovat odkaz na repozitář +settings.link.success=Odkaz na repozitář byl úspěšně aktualizován. +settings.link.error=Nepodařilo se aktualizovat odkaz na repozitář. +settings.delete=Odstranit balíček +settings.delete.description=Smazání balíčku je trvalé a nelze ho vrátit zpět. +settings.delete.notice=Chystáte se odstranit %s (%s). Tato operace je nevratná, jste si jisti? +settings.delete.success=Balíček byl odstraněn. +settings.delete.error=Nepodařilo se odstranit balíček. + diff --git a/options/locale/locale_de-DE.ini b/options/locale/locale_de-DE.ini index 1441903e59..576117672d 100644 --- a/options/locale/locale_de-DE.ini +++ b/options/locale/locale_de-DE.ini @@ -103,6 +103,7 @@ error404=Die Seite, die du gerade versuchst aufzurufen, existiert entwed never=Niemals + [error] occurred=Ein Fehler ist aufgetreten report_message=Wenn du dir sicher bist, dass dies ein Fehler von Gitea ist, suche bitte auf GitHub nach diesem Fehler und erstelle gegebenenfalls ein neues Issue. @@ -547,9 +548,17 @@ language=Sprache ui=Theme comment_type_group_label=Label comment_type_group_milestone=Meilenstein +comment_type_group_title=Titel comment_type_group_branch=Branch +comment_type_group_time_tracking=Zeiterfassung comment_type_group_deadline=Frist +comment_type_group_dependency=Abhängigkeit +comment_type_group_lock=Sperrstatus comment_type_group_review_request=Angeforderte Reviews +comment_type_group_pull_request_push=Hinzugefügte Commits +comment_type_group_project=Projekt +comment_type_group_issue_ref=Issue-Referenz +saved_successfully=Die Einstellungen wurden erfolgreich gespeichert. privacy=Datenschutz keep_activity_private=Aktivität auf der Profilseite ausblenden keep_activity_private_popup=Macht die Aktivität nur für dich und die Administratoren sichtbar @@ -645,7 +654,6 @@ ssh_invalid_token_signature=Der gegebene SSH-Schlüssel, Signatur oder Token sti ssh_token_required=Du musst eine Signatur für den Token unten angeben ssh_token=Token ssh_token_help=Du kannst eine Signatur wie folgt generieren: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign -n gitea -f /pfad_zu_deinem_oeffentlichen_Schluessel ssh_token_signature=SSH Textsignatur (armored signature) key_signature_ssh_placeholder=Beginnt mit „-----BEGIN PGP SIGNATURE-----“ verify_ssh_key_success=Der SSH-Key "%s" wurde verifiziert. @@ -804,6 +812,7 @@ clone_helper=Benötigst du Hilfe beim Klonen? Öffne die %s“ einchecken. @@ -1092,6 +1105,7 @@ editor.cannot_commit_to_protected_branch=Commit in den geschützten Branch „%s editor.no_commit_to_branch=Kann nicht direkt zum Branch committen, da: editor.user_no_push_to_branch=Benutzer kann nicht in die Branch pushen editor.require_signed_commit=Branch erfordert einen signierten Commit +editor.cherry_pick=Cherry-Picke %s von: commits.desc=Durchsuche die Quellcode-Änderungshistorie. commits.commits=Commits @@ -1112,6 +1126,8 @@ commits.signed_by_untrusted_user_unmatched=Signiert von nicht vertrauenswürdige commits.gpg_key_id=GPG-Schlüssel-ID commits.ssh_key_fingerprint=SSH-Key-Fingerabdruck +commit.actions=Aktionen +commit.revert=Zurücksetzen ext_issues=Zugriff auf Externe Issues ext_issues.desc=Link zu externem Issuetracker. @@ -1381,8 +1397,6 @@ issues.due_date_remove=hat %[2]s das Fälligkeitsdatum %[1]s entfernt issues.due_date_overdue=Überfällig issues.due_date_invalid=Das Fälligkeitsdatum ist ungültig oder außerhalb des zulässigen Bereichs. Bitte verwende das Format „jjjj-mm-tt“. issues.dependency.title=Abhängigkeiten -issues.dependency.issue_no_dependencies=Dieses Issue hat momentan keine Abhängigkeiten. -issues.dependency.pr_no_dependencies=Dieser Pull-Request hat momentan keine Abhängigkeiten. issues.dependency.add=Abhängigkeit hinzufügen… issues.dependency.cancel=Abbrechen issues.dependency.remove=Entfernen @@ -1543,7 +1557,6 @@ pulls.outdated_with_base_branch=Dieser Branch enthält nicht die neusten Commits pulls.closed_at=`hat diesen Pull-Request %[2]s geschlossen` pulls.reopened_at=`hat diesen Pull-Request %[2]s wieder geöffnet` pulls.merge_instruction_hint=`Siehe auch die Anleitung für die Kommandozeile.` - pulls.merge_instruction_step1_desc=Wechsle auf einen neuen Branch in deinem lokalen Repository und teste die Änderungen. pulls.merge_instruction_step2_desc=Führe die Änderungen zusammen und aktualisiere den Stand online auf Gitea. @@ -2499,6 +2512,7 @@ repos.forks=Forks repos.issues=Issues repos.size=Größe + defaulthooks=Standard-Webhooks defaulthooks.desc=Webhooks senden automatisch eine HTTP POST Anfrage an einen Server, wenn bestimmte Gitea Events ausgelöst werden. Hier definierte Webhooks sind die Standardwerte, die in alle neuen Repositories kopiert werden. Mehr Infos findest du in der Webhooks Anleitung (auf englisch). defaulthooks.add_webhook=Standard-Webhook hinzufügen @@ -2843,7 +2857,7 @@ commit_repo=hat %[3]s auf %[4]s gepusht create_issue=`hat Ticket %[3]s#%[2]s geöffnet` close_issue=`Ticket %[3]s#%[2]s geschlossen` reopen_issue=`Ticket %[3]s#%[2]s wiedereröffnet` -create_pull_request=`Pull-Request %[3]s#%[2]s wurde erstellt` +create_pull_request=`hat den Pull-Request %[3]s#%[2]s erstellt` close_pull_request=`Pull-Request %[3]s#%[2]s wurde geschlossen` reopen_pull_request=`Pull-Request %[3]s#%[2]s wurde wiedereröffnet` comment_issue=`Ticket %[3]s#%[2]s wurde kommentiert` @@ -2923,3 +2937,5 @@ unit=Einheit error.no_unit_allowed_repo=Du hast keine Berechtigung, um auf irgendeinen Bereich dieses Repositories zuzugreifen. error.unit_not_allowed=Du hast keine Berechtigung, um auf diesen Repository-Bereich zuzugreifen. +[packages] + diff --git a/options/locale/locale_el-GR.ini b/options/locale/locale_el-GR.ini index 21b48d8d08..f802f50cfa 100644 --- a/options/locale/locale_el-GR.ini +++ b/options/locale/locale_el-GR.ini @@ -105,6 +105,7 @@ error404=Η σελίδα που προσπαθείτε να φτάσετε εί never=Ποτέ + [error] occurred=Παρουσιάστηκε ένα σφάλμα report_message=Αν είστε σίγουροι ότι πρόκειται για ένα πρόβλημα στο Gitea, παρακαλώ αναζητήστε στα ζητήματα στο GitHub ή ανοίξτε ένα νέο ζήτημα εάν είναι απαραίτητο. @@ -658,7 +659,6 @@ ssh_invalid_token_signature=Το παρεχόμενο κλειδί SSH, υπογ ssh_token_required=Πρέπει να δώσετε μια υπογραφή για το παρακάτω διακριτικό ssh_token=Διακριτικό ssh_token_help=Μπορείτε να δημιουργήσετε μια υπογραφή χρησιμοποιώντας: -ssh_token_code=echo -n "%s" "ssh-keygen -Y sign -n gitea -f /path_to_your_pubkey ssh_token_signature=Θωρακισμένη υπογραφή SSH key_signature_ssh_placeholder=Ξεκινά με '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success=Το SSH κλειδί '%s' επαληθεύτηκε. @@ -1397,8 +1397,6 @@ issues.due_date_remove=αφαίρεσε την ημερομηνία παράδο issues.due_date_overdue=Εκπρόθεσμο issues.due_date_invalid=Η ημερομηνία παράδοσης δεν είναι έγκυρη ή εκτός εύρους. Παρακαλούμε χρησιμοποιήστε τη μορφή 'εεεε-μμ-ηη'. issues.dependency.title=Εξαρτήσεις -issues.dependency.issue_no_dependencies=Αυτό το ζήτημα προς το παρόν δεν έχει καμία εξάρτηση. -issues.dependency.pr_no_dependencies=Αυτό το pull request προς το παρόν δεν έχει εξαρτήσεις. issues.dependency.add=Προσθήκη εξάρτησης… issues.dependency.cancel=Ακύρωση issues.dependency.remove=Διαγραφή @@ -1559,7 +1557,6 @@ pulls.outdated_with_base_branch=Αυτός ο κλάδος δεν είναι ε pulls.closed_at=`έκλεισε αυτό το pull request %[2]s` pulls.reopened_at=`άνοιξε ξανά αυτό το pull request %[2]s` pulls.merge_instruction_hint=`Μπορείτε επίσης να δείτε τις οδηγίες της γραμμής εντολών.` - pulls.merge_instruction_step1_desc=Από το αποθετήριο του έργου σας, ελέγξτε έναν νέο κλάδο και τεστάρετε τις αλλαγές. pulls.merge_instruction_step2_desc=Συγχώνευσε τις αλλαγές και ενημέρωσε στο Gitea. @@ -2517,6 +2514,7 @@ repos.forks=Forks repos.issues=Ζητήματα repos.size=Μέγεθος + defaulthooks=Προεπιλεγμένα Webhooks defaulthooks.desc=Τα Webhooks κάνουν αυτόματα αιτήσεις HTTP POST σε ένα διακομιστή όταν συμβαίνουν ορισμένα γεγονότα στο Gitea. Τα Webhooks που ορίζονται εδώ είναι προεπιλογή και θα αντιγραφούν σε όλα τα νέα αποθετήρια. Διαβάστε περισσότερα στον οδηγό webhooks. defaulthooks.add_webhook=Προσθήκη Προεπιλεγμένου Webhook @@ -2948,3 +2946,5 @@ unit=Μονάδα error.no_unit_allowed_repo=Δεν σας επιτρέπεται να έχετε πρόσβαση σε οποιαδήποτε ενότητα αυτού του αποθετηρίου. error.unit_not_allowed=Δεν σας επιτρέπεται να έχετε πρόσβαση σε αυτήν την ενότητα αποθετηρίου. +[packages] + diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 90564c63b1..d43e34dd82 100644 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -105,6 +105,8 @@ error404 = The page you are trying to reach either does not existGitHub or open a new issue if necessary. @@ -282,6 +284,7 @@ register_helper_msg = Already have an account? Sign in now! social_register_helper_msg = Already have an account? Link it now! disable_register_prompt = Registration is disabled. Please contact your site administrator. disable_register_mail = Email confirmation for registration is disabled. +manual_activation_only = Contact your site administrator to complete activation. remember_me = Remember this Device forgot_password_title= Forgot Password forgot_password = Forgot password? @@ -487,7 +490,9 @@ auth_failed = Authentication failed: %v still_own_repo = "Your account owns one or more repositories; delete or transfer them first." still_has_org = "Your account is a member of one or more organizations; leave them first." +still_own_packages = "Your account owns one or more packages; delete them first." org_still_own_repo = "This organization still owns one or more repositories; delete or transfer them first." +org_still_own_packages = "This organization still owns one or more packages; delete them first." target_branch_not_exist = Target branch does not exist. @@ -661,7 +666,6 @@ ssh_invalid_token_signature = The provided SSH key, signature or token do not ma ssh_token_required = You must provide a signature for the below token ssh_token = Token ssh_token_help = You can generate a signature using: -ssh_token_code = echo -n "%s" | ssh-keygen -Y sign -n gitea -f /path_to_your_pubkey ssh_token_signature = Armored SSH signature key_signature_ssh_placeholder = Begins with '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success = SSH key '%s' has been verified. @@ -1007,6 +1011,7 @@ tags = Tags issues = Issues pulls = Pull Requests project_board = Projects +packages = Packages labels = Labels org_labels_desc = Organization level labels that can be used with all repositories under this organization org_labels_desc_manage = manage @@ -1037,6 +1042,7 @@ line_unicode = `This line has hidden unicode characters` escape_control_characters = Escape unescape_control_characters = Unescape file_copy_permalink = Copy Permalink +view_git_blame = View Git Blame video_not_supported_in_browser = Your browser does not support the HTML5 'video' tag. audio_not_supported_in_browser = Your browser does not support the HTML5 'audio' tag. stored_lfs = Stored with Git LFS @@ -1417,8 +1423,8 @@ issues.due_date_remove = "removed the due date %s %s" issues.due_date_overdue = "Overdue" issues.due_date_invalid = "The due date is invalid or out of range. Please use the format 'yyyy-mm-dd'." issues.dependency.title = Dependencies -issues.dependency.issue_no_dependencies = This issue currently doesn't have any dependencies. -issues.dependency.pr_no_dependencies = This pull request currently doesn't have any dependencies. +issues.dependency.issue_no_dependencies = No dependencies set. +issues.dependency.pr_no_dependencies = No dependencies set. issues.dependency.add = Add dependency… issues.dependency.cancel = Cancel issues.dependency.remove = Remove @@ -1475,6 +1481,7 @@ issues.content_history.created = created issues.content_history.delete_from_history = Delete from history issues.content_history.delete_from_history_confirm = Delete from history? issues.content_history.options = Options +issues.reference_link = Reference: %s compare.compare_base = base compare.compare_head = compare @@ -1483,7 +1490,13 @@ pulls.desc = Enable pull requests and code reviews. pulls.new = New Pull Request pulls.view = View Pull Request pulls.compare_changes = New Pull Request +pulls.allow_edits_from_maintainers = Allow edits from maintainers +pulls.allow_edits_from_maintainers_desc = Users with write access to the base branch can also push to this branch +pulls.allow_edits_from_maintainers_err = Updating failed pulls.compare_changes_desc = Select the branch to merge into and the branch to pull from. +pulls.has_viewed_file = Viewed +pulls.has_changed_since_last_review = Changed since your last review +pulls.viewed_files_label = %[1]d / %[2]d files viewed pulls.compare_base = merge into pulls.compare_compare = pull from pulls.switch_comparison_type = Switch comparison type @@ -1551,6 +1564,14 @@ pulls.squash_merge_pull_request = Create squash commit pulls.merge_manually = Manually merged pulls.merge_commit_id = The merge commit ID pulls.require_signed_wont_sign = The branch requires signed commits but this merge will not be signed +pulls.merge_pull_request_now = Merge Pull Request Now +pulls.rebase_merge_pull_request_now = Rebase and Merge Now +pulls.rebase_merge_commit_pull_request_now = Rebase and Merge Now (--no-ff) +pulls.squash_merge_pull_request_now = Squash and Merge Now +pulls.merge_pull_request_on_status_success = Merge Pull Request When All Checks Succeed +pulls.rebase_merge_pull_request_on_status_success = Rebase and Merge When All Checks Succeed +pulls.rebase_merge_commit_pull_request_on_status_success = Rebase and Merge (--no-ff) When All Checks Succeed +pulls.squash_merge_pull_request_on_status_success = Squash and Merge When All Checks Succeed pulls.invalid_merge_option = You cannot use this merge option for this pull request. pulls.merge_conflict = Merge Failed: There was a conflict whilst merging. Hint: Try a different strategy pulls.merge_conflict_summary = Error Message @@ -1579,9 +1600,16 @@ pulls.outdated_with_base_branch = This branch is out-of-date with the base branc pulls.closed_at = `closed this pull request %[2]s` pulls.reopened_at = `reopened this pull request %[2]s` pulls.merge_instruction_hint = `You can also view command line instructions.` - pulls.merge_instruction_step1_desc = From your project repository, check out a new branch and test the changes. pulls.merge_instruction_step2_desc = Merge the changes and update on Gitea. +pulls.merge_on_status_success = The pull request was scheduled to merge when all checks succeed. +pulls.merge_on_status_success_already_scheduled = This pull request is already scheduled to merge when all checks succeed. +pulls.pr_has_pending_merge_on_success = %[1]s scheduled this pull request to auto merge when all checks succeed %[2]s. +pulls.merge_pull_on_success_cancel = Cancel auto merge +pulls.pull_request_not_scheduled = This pull request is not scheduled to auto merge. +pulls.pull_request_schedule_canceled = The auto merge was canceled for this pull request. +pulls.pull_request_scheduled_auto_merge = `scheduled this pull request to auto merge when all checks succeed %[1]s` +pulls.pull_request_canceled_scheduled_auto_merge = `canceled auto merging this pull request when all checks succeed %[1]s` milestones.new = New Milestone milestones.open_tab = %d Open @@ -1792,6 +1820,7 @@ settings.pulls.allow_manual_merge = Enable Mark PR as manually merged settings.pulls.enable_autodetect_manual_merge = Enable autodetect manual merge (Note: In some special cases, misjudgments can occur) settings.pulls.allow_rebase_update = Enable updating pull request branch by rebase settings.pulls.default_delete_branch_after_merge = Delete pull request branch after merge by default +settings.packages_desc = Enable Repository Packages Registry settings.projects_desc = Enable Repository Projects settings.admin_settings = Administrator Settings settings.admin_enable_health_check = Enable Repository Health Checks (git fsck) @@ -1949,6 +1978,8 @@ settings.event_pull_request_review = Pull Request Reviewed settings.event_pull_request_review_desc = Pull request approved, rejected, or review comment. settings.event_pull_request_sync = Pull Request Synchronized settings.event_pull_request_sync_desc = Pull request synchronized. +settings.event_package = Package +settings.event_package_desc = Package created or deleted in a repository. settings.branch_filter = Branch filter settings.branch_filter_desc = Branch whitelist for push, branch creation and branch deletion events, specified as glob pattern. If empty or *, events for all branches are reported. See github.com/gobwas/glob documentation for syntax. Examples: master, {master,release*}. settings.active = Active @@ -2430,6 +2461,7 @@ dashboard.resync_all_hooks = Resynchronize pre-receive, update and post-receive dashboard.reinit_missing_repos = Reinitialize all missing Git repositories for which records exist dashboard.sync_external_users = Synchronize external user data dashboard.cleanup_hook_task_table = Cleanup hook_task table +dashboard.cleanup_packages = Cleanup expired packages dashboard.server_uptime = Server Uptime dashboard.current_goroutine = Current Goroutines dashboard.current_memory_usage = Current Memory Usage @@ -2462,6 +2494,7 @@ dashboard.gc_times = GC Times dashboard.delete_old_actions = Delete all old actions from database dashboard.delete_old_actions.started = Delete all old actions from database started. dashboard.update_checker = Update checker +dashboard.delete_old_system_notices = Delete all old system notices from database users.user_manage_panel = User Account Management users.new_account = Create User Account @@ -2496,8 +2529,10 @@ users.allow_import_local = May Import Local Repositories users.allow_create_organization = May Create Organizations users.update_profile = Update User Account users.delete_account = Delete User Account +users.cannot_delete_self = "You cannot delete yourself" users.still_own_repo = This user still owns one or more repositories. Delete or transfer these repositories first. users.still_has_org = This user is a member of an organization. Remove the user from any organizations first. +users.still_own_packages = This user still owns one or more packages. Delete these packages first. users.deletion_success = The user account has been deleted. users.reset_2fa = Reset 2FA users.list_status_filter.menu_text = Filter @@ -2544,6 +2579,17 @@ repos.forks = Forks repos.issues = Issues repos.size = Size +packages.package_manage_panel = Package Management +packages.total_size = Total Size: %s +packages.owner = Owner +packages.creator = Creator +packages.name = Name +packages.version = Version +packages.type = Type +packages.repository = Repository +packages.size = Size +packages.published = Published + defaulthooks = Default Webhooks defaulthooks.desc = Webhooks automatically make HTTP POST requests to a server when certain Gitea events trigger. Webhooks defined here are defaults and will be copied into all new repositories. Read more in the webhooks guide. defaulthooks.add_webhook = Add Default Webhook @@ -2811,9 +2857,12 @@ monitor.next = Next Time monitor.previous = Previous Time monitor.execute_times = Executions monitor.process = Running Processes +monitor.stacktrace = Stacktraces +monitor.goroutines = %d Goroutines monitor.desc = Description monitor.start = Start Time monitor.execute_time = Execution Time +monitor.last_execution_result = Result monitor.process.cancel = Cancel process monitor.process.cancel_desc = Cancelling a process may cause data loss monitor.process.cancel_notices = Cancel: %s? @@ -2979,3 +3028,95 @@ error.probable_bad_default_signature = "WARNING! Although the default key has th unit = Unit error.no_unit_allowed_repo = You are not allowed to access any section of this repository. error.unit_not_allowed = You are not allowed to access this repository section. + +[packages] +title = Packages +desc = Manage repository packages. +empty = There are no packages yet. +empty.documentation = For more information on the package registry, see the documentation. +filter.type = Type +filter.type.all = All +filter.no_result = Your filter produced no results. +filter.container.tagged = Tagged +filter.container.untagged = Untagged +published_by = Published %[1]s by %[3]s +published_by_in = Published %[1]s by %[3]s in %[5]s +installation = Installation +about = About this package +requirements = Requirements +dependencies = Dependencies +keywords = Keywords +details = Details +details.author = Author +details.project_site = Project Site +details.license = License +assets = Assets +versions = Versions +versions.on = on +versions.view_all = View all +dependency.id = ID +dependency.version = Version +composer.registry = Setup this registry in your ~/.composer/config.json file: +composer.install = To install the package using Composer, run the following command: +composer.documentation = For more information on the Composer registry, see the documentation. +composer.dependencies = Dependencies +composer.dependencies.development = Development Dependencies +conan.details.repository = Repository +conan.registry = Setup this registry from the command line: +conan.install = To install the package using Conan, run the following command: +conan.documentation = For more information on the Conan registry, see the documentation. +container.details.type = Image Type +container.details.platform = Platform +container.details.repository_site = Repository Site +container.details.documentation_site = Documentation Site +container.pull = Pull the image from the command line: +container.documentation = For more information on the Container registry, see the documentation. +container.multi_arch = OS / Arch +container.layers = Image Layers +container.labels = Labels +container.labels.key = Key +container.labels.value = Value +generic.download = Download package from the command line: +generic.documentation = For more information on the generic registry, see the documentation. +helm.registry = Setup this registry from the command line: +helm.install = To install the package, run the following command: +helm.documentation = For more information on the Helm registry, see the documentation. +maven.registry = Setup this registry in your project pom.xml file: +maven.install = To use the package include the following in the dependencies block in the pom.xml file: +maven.install2 = Run via command line: +maven.download = To download the dependency, run via command line: +maven.documentation = For more information on the Maven registry, see the documentation. +nuget.registry = Setup this registry from the command line: +nuget.install = To install the package using NuGet, run the following command: +nuget.documentation = For more information on the NuGet registry, see the documentation. +nuget.dependency.framework = Target Framework +npm.registry = Setup this registry in your project .npmrc file: +npm.install = To install the package using npm, run the following command: +npm.install2 = or add it to the package.json file: +npm.documentation = For more information on the npm registry, see the documentation. +npm.dependencies = Dependencies +npm.dependencies.development = Development Dependencies +npm.dependencies.peer = Peer Dependencies +npm.dependencies.optional = Optional Dependencies +npm.details.tag = Tag +pypi.requires = Requires Python +pypi.install = To install the package using pip, run the following command: +pypi.documentation = For more information on the PyPI registry, see the documentation. +rubygems.install = To install the package using gem, run the following command: +rubygems.install2 = or add it to the Gemfile: +rubygems.dependencies.runtime = Runtime Dependencies +rubygems.dependencies.development = Development Dependencies +rubygems.required.ruby = Requires Ruby version +rubygems.required.rubygems = Requires RubyGem version +rubygems.documentation = For more information on the RubyGems registry, see the documentation. +settings.link = Link this package to a repository +settings.link.description = If you link a package with a repository, the package is listed in the repository's package list. +settings.link.select = Select Repository +settings.link.button = Update Repository Link +settings.link.success = Repository link was successfully updated. +settings.link.error = Failed to update repository link. +settings.delete = Delete package +settings.delete.description = Deleting a package is permanent and cannot be undone. +settings.delete.notice = You are about to delete %s (%s). This operation is irreversible, are you sure? +settings.delete.success = The package has been deleted. +settings.delete.error = Failed to delete the package. diff --git a/options/locale/locale_es-ES.ini b/options/locale/locale_es-ES.ini index c9ad797a19..d8f0deabe0 100644 --- a/options/locale/locale_es-ES.ini +++ b/options/locale/locale_es-ES.ini @@ -105,6 +105,7 @@ error404=La página a la que está intentando acceder o no existeGitHub y abre un nuevo problema si es necesario. @@ -268,6 +269,7 @@ search=Buscar code=Código search.fuzzy=Parcial search.match=Coincidir +code_search_unavailable=Actualmente la búsqueda de código no está disponible. Póngase en contacto con el administrador de su sitio. repo_no_results=No se ha encontrado ningún repositorio coincidente. user_no_results=No se ha encontrado ningún usuario coincidente. org_no_results=No se ha encontrado ninguna organización coincidente. @@ -281,6 +283,7 @@ register_helper_msg=¿Ya tienes una cuenta? ¡Inicia sesión! social_register_helper_msg=¿Ya tienes una cuenta? ¡Enlázala! disable_register_prompt=Registro deshabilitado. Por favor, póngase en contacto con el administrador del sitio. disable_register_mail=Correo electrónico de confirmación de registro deshabilitado. +manual_activation_only=Póngase en contacto con el administrador del sitio para completar la activación. remember_me=Recordar este Dispositivo forgot_password_title=He olvidado mi contraseña forgot_password=¿Has olvidado tu contraseña? @@ -549,6 +552,16 @@ continue=Continuar cancel=Cancelar language=Idioma ui=Tema +hidden_comment_types=Tipos de comentarios ocultos +comment_type_group_reference=Referencia +comment_type_group_label=Etiqueta +comment_type_group_milestone=Hito +comment_type_group_assignee=Asignado +comment_type_group_title=Título +comment_type_group_branch=Rama +comment_type_group_time_tracking=Seguimiento de Tiempo +comment_type_group_deadline=Fecha límite +comment_type_group_dependency=Dependencia privacy=Privacidad keep_activity_private=Ocultar la actividad de la página del perfil keep_activity_private_popup=Hace la actividad visible sólo para ti y los administradores @@ -644,7 +657,6 @@ ssh_invalid_token_signature=La clave SSH proporcionada, la firma o el token no c ssh_token_required=Debe proporcionar una firma para el token de abajo ssh_token=Token ssh_token_help=Puede generar una firma de la siguiente manera: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign -n gitea -f /ruta_a_su_clave_publico ssh_token_signature=Firma SSH armadura key_signature_ssh_placeholder=Comienza con '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success=La clave SSH '%s' ha sido verificada. @@ -1382,8 +1394,6 @@ issues.due_date_remove=eliminó la fecha de vencimiento %s %s issues.due_date_overdue=Vencido issues.due_date_invalid=La fecha de vencimiento es inválida o está fuera de rango. Por favor utilice el formato 'aaaa-mm-dd'. issues.dependency.title=Dependencias -issues.dependency.issue_no_dependencies=Esta incidencia actualmente no tiene ninguna dependencia. -issues.dependency.pr_no_dependencies=Este pull request actualmente no tiene ninguna dependencia. issues.dependency.add=Añadir dependencia… issues.dependency.cancel=Cancelar issues.dependency.remove=Eliminar @@ -1544,7 +1554,6 @@ pulls.outdated_with_base_branch=Esta rama está desactualizada con la rama base pulls.closed_at=`cerró este pull request %[2]s` pulls.reopened_at=`reabrió este pull request %[2]s` pulls.merge_instruction_hint=`También puede ver instrucciones de línea de comandos.` - pulls.merge_instruction_step1_desc=Desde el repositorio de su proyecto, revisa una nueva rama y prueba los cambios. pulls.merge_instruction_step2_desc=Combine los cambios y actualice en Gitea. @@ -2483,6 +2492,7 @@ repos.forks=Forks repos.issues=Incidencias repos.size=Tamaño + defaulthooks=Webhooks por defecto defaulthooks.desc=Los Webhooks automáticamente hacen peticiones HTTP POST a un servidor cuando ciertos eventos de Gitea se activan. Los ganchos definidos aquí son predeterminados y serán copiados en todos los nuevos repositorios. Leer más en la guía webhooks. defaulthooks.add_webhook=Añadir Webhook por defecto @@ -2779,6 +2789,9 @@ monitor.queue.pool.flush.title=Vaciar cola monitor.queue.pool.flush.desc=Al vaciar la cola se añadirá un worker que terminará una vez que la cola esté vacía, o se agote. monitor.queue.pool.flush.submit=Añadir trabajador de vaciado monitor.queue.pool.flush.added=Trabajador de vaciado añadido por %[1]s +monitor.queue.pool.pause.title=Pausar cola +monitor.queue.pool.pause.submit=Pausar cola +monitor.queue.pool.resume.submit=Reanudar cola monitor.queue.settings.title=Ajustes del grupo monitor.queue.settings.desc=Los grupos de trabajadores se crean dinámicamente como un impulso en respuesta al bloqueo de la cola de sus trabajadores. Estos cambios no afectarán a los grupos de trabajadores actuales. @@ -2908,3 +2921,5 @@ unit=Unidad error.no_unit_allowed_repo=No tiene permisos para acceder a ninguna sección de este repositorio. error.unit_not_allowed=No tiene permisos para acceder a esta sección del repositorio. +[packages] + diff --git a/options/locale/locale_fa-IR.ini b/options/locale/locale_fa-IR.ini index 060eddfb2b..93a756fa5e 100644 --- a/options/locale/locale_fa-IR.ini +++ b/options/locale/locale_fa-IR.ini @@ -91,6 +91,7 @@ error404=صفحه موردنظر شما یا وجود ندارد%[2]s` pulls.reopened_at=`این درخواست pull را بازگشایی کرد %[2]s` pulls.merge_instruction_hint=`همچنین می‌توانید دستورالعمل‌های خط فرمان را مشاهده کنید.` - pulls.merge_instruction_step1_desc=از انبار پروژه خود، یک شاخه جدید را بگیرید و تغییرات را آزمایش کنید. pulls.merge_instruction_step2_desc=تغییرات را ادغام کنید و در Gitea به روز کنید. @@ -2400,6 +2398,7 @@ repos.forks=انشعاب‌ها repos.issues=مسائل repos.size=اندازه + defaulthooks=وب هوک های پیش فرض defaulthooks.desc=هنگامی که برخی رویدادهای Gitea فعال می شوند، Webhook ها به طور خودکار درخواست های HTTP POST را به سرور ارسال می کنند. هوک های تعریف شده در اینجا پیش فرض هستند و در تمام مخازن جدید کپی می شوند. در راهنمای هوک‌های وب بیشتر بخوانید. defaulthooks.add_webhook=اضافه کردن Webhook پیش فرض @@ -2817,3 +2816,5 @@ error.probable_bad_default_signature=هشدار! اگرچه اینجا یک کل error.no_unit_allowed_repo=شما اجازه دسترسی به هیچ قسمت از این مخزن را ندارید. error.unit_not_allowed=شما اجازه دسترسی به این قسمت مخزن را ندارید. +[packages] + diff --git a/options/locale/locale_fi-FI.ini b/options/locale/locale_fi-FI.ini index df78f526b7..361ba76e74 100644 --- a/options/locale/locale_fi-FI.ini +++ b/options/locale/locale_fi-FI.ini @@ -75,6 +75,7 @@ loading=Ladataan… error404=Sivu, jota yrität nähdä, joko ei löydy tai et ole oikeutettu katsomaan sitä. + [error] [startpage] @@ -731,7 +732,6 @@ issues.due_date_form_edit=Muokkaa issues.due_date_form_remove=Poista issues.due_date_not_set=Määräpäivää ei asetettu. issues.dependency.title=Riippuvuudet -issues.dependency.pr_no_dependencies=Tällä vetopyynnöllä ei tällä hetkellä ole riippuvuuksia. issues.dependency.add=Lisää riippuvuus… issues.dependency.cancel=Peru issues.dependency.remove=Poista @@ -756,7 +756,6 @@ pulls.can_auto_merge_desc=Tämä pull-pyyntö voidaan yhdistää automaattisesti ; %[2]s
%[3]s
- milestones.new=Uusi merkkipaalu milestones.open_tab=%d avoinna milestones.close_tab=%d suljettu @@ -1137,6 +1136,7 @@ repos.size=Koko + auths.new=Lisää todennuslähde auths.name=Nimi auths.type=Tyyppi @@ -1327,3 +1327,5 @@ mark_all_as_read=Merkitse kaikki luetuiksi [units] +[packages] + diff --git a/options/locale/locale_fr-FR.ini b/options/locale/locale_fr-FR.ini index ce6ba75212..496b3b3c0f 100644 --- a/options/locale/locale_fr-FR.ini +++ b/options/locale/locale_fr-FR.ini @@ -91,6 +91,7 @@ error404=La page que vous essayez d'atteindre n'existe pas ou < never=Jamais + [error] missing_csrf=Requête incorrecte: aucun jeton CSRF présent @@ -1253,8 +1254,6 @@ issues.due_date_remove=a supprimé l'échéance %s %s issues.due_date_overdue=En retard issues.due_date_invalid=La date d’échéance est invalide ou hors plage. Veuillez utiliser le format 'aaaa-mm-dd'. issues.dependency.title=Dépendances -issues.dependency.issue_no_dependencies=Ce ticket n'a actuellement pas de dépendance. -issues.dependency.pr_no_dependencies=La demande de fusion n'a actuellement pas de dépendance. issues.dependency.add=Ajouter une dépendance… issues.dependency.cancel=Annuler issues.dependency.remove=Supprimer @@ -1391,7 +1390,6 @@ pulls.outdated_with_base_branch=Cette branche est désynchronisée avec la branc pulls.closed_at=`a fermé cette pull request %[2]s` pulls.reopened_at=`a réouvert cette pull request %[2]s` pulls.merge_instruction_hint=`Vous pouvez également voir les instructions en ligne de commande.` - pulls.merge_instruction_step1_desc=Depuis le dépôt de votre projet, sélectionnez une nouvelle branche et testez les modifications. pulls.merge_instruction_step2_desc=Fusionner les modifications et mettre à jour sur Gitea. @@ -2218,6 +2216,7 @@ repos.issues=Tickets repos.size=Taille + systemhooks=Rappels système systemhooks.desc=Les Webhooks font automatiquement des requêtes HTTP POST à un serveur lorsque certains événements Gitea se déclenchent. Les Webhooks définis ici agiront sur tous les dépots du système, donc veuillez prendre en compte les implications en termes de performances que cela peut avoir. Lire la suite dans le guide des Webhooks. systemhooks.add_webhook=Ajouter un rappel système @@ -2590,3 +2589,5 @@ error.probable_bad_default_signature=AVERTISSEMENT ! Bien que la clé par défau error.no_unit_allowed_repo=Vous n'êtes pas autorisé à accéder à n'importe quelle section de ce dépôt. error.unit_not_allowed=Vous n'êtes pas autorisé à accéder à cette section du dépôt. +[packages] + diff --git a/options/locale/locale_hu-HU.ini b/options/locale/locale_hu-HU.ini index 2a593c56a0..9b85f57e16 100644 --- a/options/locale/locale_hu-HU.ini +++ b/options/locale/locale_hu-HU.ini @@ -80,6 +80,7 @@ step2=2. lépés: error404=Az elérni kívánt oldal vagy nem létezik, vagy nincs jogosultsága a megtekintéséhez. + [error] [startpage] @@ -930,7 +931,6 @@ issues.due_date_modified=határidő módosítva %s-ről %s %s-re issues.due_date_remove=%s %s-es határidő eltávolítva issues.due_date_overdue=Lejárt issues.dependency.title=Függőségek -issues.dependency.issue_no_dependencies=Ennek a hibajegynek jelenleg nincsenek függőségei. issues.dependency.add=Függőség hozzáadása… issues.dependency.cancel=Mégse issues.dependency.remove=Eltávolítás @@ -977,7 +977,6 @@ pulls.can_auto_merge_desc=Ez az egyesítési kérés automatikusan végrehajthat pulls.status_checking=Néhány ellenőrzés függőben van pulls.status_checks_success=Minden ellenőrzés sikeres volt - milestones.new=Új mérföldkő milestones.open_tab=%d Nyitott milestones.close_tab=%d Zárt @@ -1495,6 +1494,7 @@ repos.size=Méret + auths.new=Hitelesítési forrás hozzáadása auths.name=Név auths.type=Típus @@ -1772,3 +1772,5 @@ error.not_signed_commit=Nem aláírt commit [units] +[packages] + diff --git a/options/locale/locale_id-ID.ini b/options/locale/locale_id-ID.ini index 5a3780e3cb..77df83eb38 100644 --- a/options/locale/locale_id-ID.ini +++ b/options/locale/locale_id-ID.ini @@ -73,6 +73,7 @@ loading=Memuat… + [error] [startpage] @@ -840,7 +841,6 @@ pulls.can_auto_merge_desc=Permintaan tarik ini dapat digabung secara otomatis. ; %[2]s
%[3]s
- milestones.new=Milestone Baru milestones.open_tab=%d Terbuka milestones.close_tab=%d Tertutup @@ -1141,6 +1141,7 @@ repos.size=Ukuran + auths.name=Nama auths.type=Jenis auths.enabled=Aktif @@ -1394,3 +1395,5 @@ error.not_signed_commit=Bukan melakukan yang ditandatangani error.no_unit_allowed_repo=Anda tidak diijinkan untuk melihat semua unit dari repositori ini. error.unit_not_allowed=Anda tidak diizinkan untuk mengunjungi unit repositori ini. +[packages] + diff --git a/options/locale/locale_is-IS.ini b/options/locale/locale_is-IS.ini new file mode 100644 index 0000000000..b3d570c801 --- /dev/null +++ b/options/locale/locale_is-IS.ini @@ -0,0 +1,1372 @@ +home=Forsíða +dashboard=Stjórnborð +explore=Vafra +help=Hjálp +sign_in=Skrá Inn +sign_in_with=Skrá Inn Með +sign_out=Skrá Út +sign_up=Nýskráning +link_account=Tengja Notanda +register=Nýskráning +website=Vefsíða +version=Útgáfa +powered_by=Keyrt af %s +page=Síða +template=Sniðmát +language=Tungumál +notifications=Tilkynningar +active_stopwatch=Virk Tímamæling +create_new=Skapa… +user_profile_and_more=Notandasíða og Stillingar… +signed_in_as=Skráð(ur) inn sem +enable_javascript=Þessi vefsíða virkar betur með JavaScript virkt. +toc=Efnisyfirlit +licenses=Hugbúnaðarleyfi +return_to_gitea=Til baka að Gitea + +username=Notandanafn +email=Netfang +password=Lykilorð +access_token=Aðgangslykill +re_type=Endurtaktu Lykilorðið +captcha=CAPTCHA +twofa=Tvíþætt Auðkenning +twofa_scratch=Tveggja-Þátta Skrapkóði +passcode=Aðgangstala + +webauthn_insert_key=Settu öryggislykilinn þinn inn +webauthn_sign_in=Ýttu á hnappinn á öryggislyklinum þínum. Ef öryggislykillinn þinn hefur engan hnapp skaltu setja hann aftur inn. +webauthn_press_button=Vinsamlegast ýttu á hnappinn á öryggislyklinum þínum… +webauthn_use_twofa=Notaðu tveggja-þátta kóða úr símanum þínum +webauthn_error=Gat ekki lesið öryggislykilinn þinn. +webauthn_unsupported_browser=Vafrinn þinn styður ekki WebAuthn eins og er. +webauthn_error_unknown=Óþekkt villa kom upp. Vinsamlegast reyndu aftur. +webauthn_error_insecure=WebAuthn styður aðeins öruggar tengingar. Til að prófa yfir HTTP geturðu notað upprunann „localhost“ eða „127.0.0.1“ +webauthn_error_unable_to_process=Netþjónninn gat ekki ráðið við beiðni þína. +webauthn_error_duplicated=Öryggislykillinn er ekki leyfður fyrir þessa beiðni. Gakktu úr skugga um að lykillinn sé ekki þegar skráður. +webauthn_error_empty=Þú verður að setja nafn fyrir þennan lykil. +webauthn_error_timeout=Tímamörk náð áður en hægt var að lesa lykilinn þinn. Vinsamlegast endurhlaðið þessa síðu og reyndu aftur. +webauthn_u2f_deprecated=Lykillinn: „%s“ auðkennir með því að nota úrelta U2F aðferð. Þú ættir að endurskrá þennan lykil og fjarlægja gömlu skráninguna. +webauthn_reload=Endurhlaða + +repository=Hugbúnaðarsafn +organization=Stofnun +mirror=Speglun +new_repo=Nýtt Hugbúnaðarsafn +new_migrate=Nýr Flutningur +new_mirror=Ný Speglun +new_fork=Ný Hugbúnaðarskipting +new_org=Ný Stofnun +new_project=Nýtt Verkefni +new_project_board=Stjórn Nýs Verkefnis +manage_org=Stjórna Stofnunum +admin_panel=Stjórnborð +account_settings=Notandastillingar +settings=Stillingar +your_profile=Notandasíða +your_starred=Eftirlæti +your_settings=Stillingar + +all=Allt +sources=Eigin +mirrors=Speglanir +collaborative=Samstörf +forks=Skiptingar + +activities=Virkni +pull_requests=Sameiningarbeiðnir +issues=Vandamál +milestones=Tímamót + +ok=Í lagi +cancel=Hætta við +save=Vista +add=Bæta við +add_all=Bæta Öllu Við +remove=Fjarlægja +remove_all=Fjarlægja Allt +edit=Breyta + +copy=Afrita +copy_url=Afrita vefslóð +copy_branch=Afritaðu heiti greinar +copy_success=Afritað! +copy_error=Afritun mistókst + +write=Skrifa +preview=Forskoða +loading=Hleður… + +step1=Skref 1: +step2=Skref 2: + +error=Villa +error404=Síðan sem þú ert að reyna að fá annað hvort er ekki til eða þú hefur ekki heimild til að skoða hana. + +never=Aldrei + + +[error] +occurred=Villa kom upp +report_message=Ef þú ert viss um að þetta sé villa í Gitea þá skaltu leita að vandamálum á GitHub eða opna nýtt vandamál ef þörf krefst. +missing_csrf=Slæm beiðni: enginn CSRF lykill +invalid_csrf=Slæm beiðni: ógildur CSRF lykill +not_found=Markmiðið fannst ekki. +network_error=Netkerfisvilla + +[startpage] +app_desc=Þrautalaus og sjálfhýst Git þjónusta +install=Einföld uppsetning +install_desc=Einfaldlega keyrðu forritiðfyrir vettvanginn þinn, Docker, eða fáðu það í pakka. +platform=Fjölvettvangur +platform_desc=Gitea virkar hvar sem að Go gerir: Linux, macOS, Windows, ARM o. s. frv. Veldu það sem þú vilt! +lightweight=Létt +lightweight_desc=Gitea hefur lágar lágmarkskröfur og getur keyrt á ódýrum Raspberry Pi. Sparaðu orku! +license=Frjáls Hugbúnaður +license_desc=Sæktu code.gitea.io/gitea! Gakktu til liðs með því að taka þátt til þess að gera þetta verkefni jafnvel betra! Vertu ekki feimin(n) við að verða þátttakandi! + +[install] +install=Uppsetning +title=Upphafleg Uppsetning +docker_helper=Ef þú keyrir Gitea inni í Docker þá viltu vinsamlegast lesa leiðbeiningaritið áður en þú breytir stillingum. +require_db_desc=Gitea krefst MySQL, PostgreSQL, MSSQL, SQLite3 eða TiDB (MySQL samskiptareglur). +db_title=Gagnagrunnsstillingar +db_type=Tegund Gagnagrunns +host=Hýsill +user=Notandanafn +password=Lykilorð +db_name=Gagnagrunnsheiti +db_helper=Athugið MySQL notendur: vinsamlegast notið InnoDB geymsluvélina og ef þið notið „utf8mb4,“ verður InnoDB útgáfan ykkar að vera yfir 5.6 . +db_schema=Uppdráttur +db_schema_helper=Skildu eftir autt fyrir sjálfgefinn gagnagrunn („public“). +ssl_mode=SSL +charset=Stafatafla +path=Slóð +sqlite_helper=Skráarslóð fyrir SQLite3 gagnagrunninn.
Sláðu inn algjöra slóð ef þú keyrir Gitea sem þjónustu. +reinstall_error=Þú ert að reyna að setja upp í núverandi Gitea gagnagrunn +reinstall_confirm_message=Enduruppsetning með núverandi Gitea gagnagrunni getur valdið mörgum vandamálum. Í flestum tilfellum ættir þú að nota núverandi "app.ini" til að keyra Gitea. Ef þú veist hvað þú ert að gera skaltu staðfesta eftirfarandi: +reinstall_confirm_check_1=Gögnin sem eru dulkóðuð með SECRET_KEY í app.ini gætu glatast: notendur gætu hugsanlega ekki skráð sig inn með 2FA/OTP og speglar virka kannski ekki rétt. Með því að haka við þennan reit staðfestirðu að núverandi app.ini skrá inniheldur réttan SECRET_KEY. +reinstall_confirm_check_2=Hugbúnaðarsöfn og stillingar gætu þurft að endursamstilla. Með því að haka við þennan reit staðfestir þú að þú endursamstillir krókana fyrir hugbúnaðarsöfn og authorized_keys skrána handvirkt. Þú staðfestir að þú tryggir að hugbúnaðarsafns- og spegilstillingar séu réttar. +reinstall_confirm_check_3=Þú staðfestir að þú sért alveg viss um að þetta Gitea sé í gangi með réttri app.ini staðsetningu og að þú sért viss um að þú þurfir að setja það upp aftur. Þú staðfestir að þú viðurkennir ofangreindar áhættur. +err_empty_db_path=SQLite3 gagnagrunnsslóðin má ekki vera tóm. +no_admin_and_disable_registration=Þú getur ekki slökkt á sjálfsskráningu notenda án þess að búa til stjórnandanotanda. +err_empty_admin_password=Lykilorð stjórnanda má ekki vera tómt. +err_empty_admin_email=Netfang stjórnanda má ekki vera tómt. +err_admin_name_is_reserved=Notandanafn stjórnanda er ógilt. Notandanafnið er frátekið +err_admin_name_pattern_not_allowed=Notandanafn stjórnanda er ógilt. Notandanafnið passar við frátekið mynstur +err_admin_name_is_invalid=Notandanafn Stjórnanda er ógilt + +general_title=Almennar Stillingar +app_name=Heiti vefsvæðis +app_name_helper=Þú getur slegið inn nafn fyrirtækis þíns hér. +repo_path=Grunnsslóð Hugbúnaðarsafns +repo_path_helper=Fjarlægar Git hugbúnaðarsöfn verða vistaðar í þessari möppu. +lfs_path=Git LFS Grunnsslóð +lfs_path_helper=Skrár sem Git LFS rekur verða geymdar í þessari möppu. Skildu eftir tómt til að slökkva á. +run_user=Keyra Sem Notandanafn +run_user_helper=Sláðu inn notandanafn stýrikerfisins sem Gitea keyrir sem. Athugaðu að þessi notandi verður að hafa aðgang að grunnsslóð gugbúnaðarsafna. +domain=Lén Netþjóns +domain_helper=Lén eða hýsilfang fyrir netþjóninn. +ssh_port=SSH Netþjónsgátt +ssh_port_helper=Gátt sem SSH þjónninn þinn hlustar á. Skildu eftir tómt til að slökkva á. +http_port=Gitea HTTP Hlustunargátt +http_port_helper=Gátt sem Gitea vefþjónninn mun hlusta á. +app_url=Grunnvefslóð Gitea +app_url_helper=Grunnvistfang fyrir HTTP(S) afrit slóð og tölvupósttilkynningar. +log_root_path=Slóð Annáls +log_root_path_helper=Annálaskrár verða skrifaðar í þessa möppu. + +optional_title=Valfrjálsar Stillingar +email_title=Tölvupóstsstillingar +smtp_host=SMTP Hýsill +smtp_from=Senda Tölvupóst Sem +smtp_from_helper=Netfang sem Gitea mun nota. Sláðu inn venjulegt netfang eða notaðu „Nafn“ sniðið. +mailer_user=SMTP Notandanafn +mailer_password=SMTP Lykilorð +register_confirm=Krefjast Staðfestingar Tölvupósts Til Að Nýskrá +mail_notify=Virkja Tölvupósttilkynningar +server_service_title=Stillingar Netþjóns og Þriðja Aðila +offline_mode=Virkjaðu Staðbundin Ham +offline_mode_popup=Slökktu á efnisafhendingarnetum þriðja aðila og þjónaðu öllum gögnum á staðnum. +disable_gravatar=Óvirkja Gravatar +disable_gravatar_popup=Slökkva á Gravatar og notandamyndar þjónustum. Sjálfgefin notandamynd verður notuð ef notandi hleður ekki upp sína eigin. +federated_avatar_lookup=Virkja Samtök Notandamyndar +openid_signin=Virkja OpenID Innskráningu +openid_signin_popup=Virkja OpenID innskráningu notenda. +enable_captcha=Virkja CAPTCHA innskráningu +admin_name=Notandanafn Stjórnanda +admin_password=Lykilorð +confirm_password=Staðfestu Lykilorðið +admin_email=Netfang +install_btn_confirm=Setja upp Gitea +test_git_failed=Gat ekki prófað „git“ skipunina: %v +sqlite3_not_available=Þessi Gitea útgáfa styður ekki SQLite3. Vinsamlegast sæktu útgáfunni okkar frá %s (ekki „gobuild“ útgáfunna). +invalid_db_setting=Gagnagrunnsstillingarnar eru ógildar: %v +invalid_db_table=Gagnagrunnstaflan „%s“ er ógild: %v +invalid_repo_path=Grunnsslóð hugbúnaðarsafns er ógild: %v +invalid_log_root_path=Slóð annáls er ógild: %v +default_keep_email_private_popup=Fela sjálfgefið netföng nýrra notendareikninga. +no_reply_address_helper=Lén fyrir notendur með falið netfang. Til dæmis notandanafnið „joe“ verður skráð í Git sem „joe@noreply.example.org“ ef falið tölvupóstlén er stillt á „noreply.example.org“. + +[home] +uname_holder=Notandanafn eða Netfang +password_holder=Lykilorð +my_repos=Hugbúnaðarsöfn +show_more_repos=Sýna fleiri hugbúnaðarsöfn… +my_orgs=Stofnanir Mínar +my_mirrors=Speglanir Mínar +view_home=Skoða %s +search_repos=Finna hugbúnaðarsafn… +filter=Aðrar Síur + +show_archived=Safnvistað + +show_private=Einka +show_only_private=Að sýna aðeins einka +show_only_public=Að sýna aðeins opinber + +issues.in_your_repos=Í hugbúnaðarsöfnum þínum + +[explore] +repos=Hugbúnaðarsöfn +users=Notendur +organizations=Stofnanir +search=Leita +code=Kóði +search.fuzzy=Óljóst +code_search_unavailable=Sem stendur er kóðaleit ekki í boði. Vinsamlegast hafðu samband við síðustjórann þinn. +repo_no_results=Engin samsvarandi hugbúnaðarsöfn fundust. +user_no_results=Engir samsvarandi notendur fundust. +org_no_results=Engar samsvarandi stofnanir fundust. +code_no_results=Enginn samsvarandi frumkóði fannst eftur þínum leitarorðum. +code_search_results=Leitarniðurstöður fyrir „%s“ + +[auth] +create_new_account=Skrá Notanda +register_helper_msg=Ertu nú þegar með notanda? Skráðu þig inn núna! +social_register_helper_msg=Ertu nú þegar með reikning? Tengdu hann núna! +manual_activation_only=Hafðu samband við stjórnanda vefsvæðisins til að ljúka virkjun. +remember_me=Muna eftir þessu Tæki +forgot_password_title=Gleymt Lykilorð +forgot_password=Gleymdirðu Lykilorðinu? +sign_up_now=Vantar þig notanda? Nýskráðu núna! +sign_up_successful=Sköpun notanda tókst. +must_change_password=Uppfærðu lykilorðið þitt +active_your_account=Virkjaðu Aðganginn Þinn +account_activated=Aðgangur hefur verið virkjaður +prohibit_login=Nýskráningar Óheimilar +has_unconfirmed_mail=Halló, %s, þú ert með óstaðfest netfang (%s). Ef þú hefur ekki fengið staðfestingarpóst eða þarft nýjan, vinsamlegast smelltu á hnappinn hér að neðan. +resend_mail=Smelltu hér til að endursenda virkjunarpóstinn þinn +send_reset_mail=Senda Tölvupóst Til að Endurheimta Reikning +reset_password=Endurheimt Reiknings +reset_password_helper=Endurheimta Reikning +verify=Staðfesta +scratch_code=Skrapkóði +use_scratch_code=Nota skrapkóða +twofa_scratch_token_incorrect=Skrapkóði þinn er rangur. +login_userpass=Skrá Inn +login_openid=OpenID +oauth_signup_tab=Skrá Nýjan Notanda +oauth_signup_title=Klára Nýjum Notanda +oauth_signup_submit=Klára Notanda +oauth_signin_tab=Tengja Núverandi Reikning +oauth_signin_submit=Tengja Notanda +openid_connect_submit=Tengjast +openid_register_title=Skrá nýjan notanda +disable_forgot_password_mail=Endurheimting reiknings er óvirk vegna þess að enginn tölvupóstur er uppsettur. Vinsamlegast hafðu samband við síðustjórann þinn. +disable_forgot_password_mail_admin=Endurheimting reiknings er aðeins virk þegar tölvupóstur er uppsettur. Vinsamlegast settu upp tölvupóst til að virkja endurheimting reikningar. +authorize_application=Heimilda Forrit +authorize_application_created_by=Þetta forrit var stofnað af %s. +authorize_title=Veita „%s“ aðgang að reikningnum þínum? +authorization_failed=Heimild mistókst +authorization_failed_desc=Heimildin mistókst vegna þess að við fundum ógilda beiðni. Vinsamlegast hafðu samband við umsjónarmann forritsins sem þú hefur reynt að heimila. +sspi_auth_failed=SSPI auðkenning mistókst +password_pwned=Lykilorðið sem þú valdir er á lista yfir stolin lykilorð sem áður hafa verið afhjúpuð í opinberum gagnabrotum. Vinsamlegast reyndu aftur með öðru lykilorði. +password_pwned_err=Gat ekki klárað beiðni til HaveIBeenPwned + +[mail] +view_it_on=Skoða þetta á %s +link_not_working_do_paste=Virkar ekki? Prófaðu að afrita og líma slóðina í vafrann þinn. +hi_user_x=Halló, %s, + +activate_account=Vinsamlegast virkjaðu aðganginn þinn +activate_account.title=%s, vinsamlegast virkjaðu aðganginn þinn +activate_account.text_1=Halló, %[1]s, takk fyrir að nýskrá á %[2]s! +activate_account.text_2=Vinsamlegast smelltu á eftirfarandi tengil til að virkja reikninginn þinn innan %s: + +activate_email=Staðfestu netfangið þitt +activate_email.title=%s, vinsamlegast staðfestu netfangið þitt +activate_email.text=Vinsamlegast smelltu á eftirfarandi tengil til að staðfesta netfangið þitt innan %s: + +register_notify=Velkomin(n) í Gitea +register_notify.title=%[1]s, velkomin(n) í %[2]s +register_notify.text_1=þetta er staðfestingarpóstur þinn fyrir skráningu á %s! +register_notify.text_2=Þú getur nú skráð þig inn með notandanafni: %s. +register_notify.text_3=Ef þessi reikningur hefur verið búinn til fyrir þig, vinsamlegast stilltu lykilorðið þitt fyrst. + +reset_password=Endurheimta reikning þinn +reset_password.title=%s, þú hefur beðið um að endurheimta reikninginn þinn +reset_password.text=Vinsamlegast smelltu á eftirfarandi tengil til að endurheimta reikninginn þinn innan %s: + +register_success=Nýskráning tókst + +issue_assigned.pull=@%[1]s úthlutaði þér að sameiningarbeiðni %[2]s í hugbúnaðarsafni %[3]s. +issue_assigned.issue=@%[1]s úthlutaði þér að vandamáli %[2]s í hugbúnaðarsafni %[3]s. + +issue.x_mentioned_you=@%s minntist á þig: +issue.action.push_1=@%[1]s bætti við %[3]d framlag í %[2]s +issue.action.push_n=@%[1]s bætti við %[3]d framlög í %[2]s +issue.action.close=@%[1]s lokaði #%[2]d. +issue.action.reopen=@%[1]s enduropnaði #%[2]d. +issue.action.merge=@%[1]s sameinaði #%[2]d inni í %[3]s. +issue.action.approve=@%[1]s samþykkti þessa sameiningarbeiðni. +issue.action.reject=@%[1]s óskaði eftir breytingum á þessa sameiningarbeiðni. +issue.action.review=@%[1]s gerði ummæli á þessa sameiningarbeiðni. +issue.action.new=@%[1]s skapaði #%[2]d. +issue.in_tree_path=Í %s: + +release.new.subject=%s í %s útgefið +release.new.text=@%[1]s gaf út %[2]s í %[3]s +release.title=Heiti: %s +release.note=Athugasemd: +release.downloads=Niðurhöl: +release.download.zip=Frumkóði (ZIP) +release.download.targz=Frumkóði (TAR.GZ) + +repo.transfer.subject_to=%s langar að flytja „%s“ til %s +repo.transfer.subject_to_you=%s langar að flytja „%s“ til þín +repo.transfer.to_you=þig +repo.transfer.body=Til að samþykkja eða hafna því skaltu fara á %s eða hunsa það bara. + +repo.collaborator.added.subject=%s bætti þér við í %s +repo.collaborator.added.text=Þér hefur verið bætt við sem aðila hugbúnaðarsafns: + +[modal] +yes=Já +no=Nei +modify=Uppfæra + +[form] +UserName=Notandanafn +RepoName=Heiti Hugbúnaðarsafns +Email=Netfang +Password=Lykilorð +Retype=Endurtaktu Lykilorðið +HttpsUrl=HTTPS vefslóð +TeamName=Liðsheiti +AdminEmail=Netfang stjórnanda + +NewBranchName=Heiti nýjar greinar +CommitSummary=Framlagsútdráttur +CommitMessage=Framlagsskilaboð +CommitChoice=Framlagsval +TreeName=Skráarslóð +Content=Innihald + +SSPISeparatorReplacement=Aðgreinir +SSPIDefaultLanguage=Sjálfgefið Tungumál + +require_error=` Getur ekki verið tómt.` +alpha_dash_dot_error=` ætti aðeins að innihalda tölustafi, strik ('-'), undirstrik ('_') og punkta ('.').` +size_error=` þarf að vera stærð %s.` +email_error=` er ekki gilt netfang.` +url_error=` er ekki gild vefslóð.` +unknown_error=Óþekkt villa: +captcha_incorrect=CAPTCHA kóðinn er rangur. +password_not_match=Lykilorðin passa ekki saman. +lang_select_error=Veldu tungumál af listanum. + +username_been_taken=Notandanafnið er þegar í notkun. +username_change_not_local_user=Notendum utan staðarins er ekki heimilt að breyta notendanafni sínu. +repo_name_been_taken=Hugbúnaðarsafnsheiti er þegar notað. +org_name_been_taken=Stofnunarinnarheiti er þegar tekið. +team_name_been_taken=Liðsheiti er þegar í notkun. +email_been_used=Netfangið er þegar í notkun. +email_invalid=Netfang ógilt. +openid_been_used=OpenID vistfangið „%s“ er þegar notað. +username_password_incorrect=Notandanafn eða lykilorð er rangt. +password_complexity=Lykilorðið er ekki nógu flókið: +password_lowercase_one=Að minnsta kosti einn lágstafur +password_uppercase_one=Að minnsta kosti einn hástafur +password_digit_one=Að minnsta kosti einn tölustafur +password_special_one=Að minnsta kosti einn sérstafur (greinarmerki, sviga, gæsalappir, o. s. frv.) +enterred_invalid_repo_name=Hugbúnaðarsafnsheitið sem þú slóst inn er rangt. +enterred_invalid_org_name=Stofnunarinnarheitið sem þú slóst inn er rangt. +user_not_exist=Notandinn er ekki til. +team_not_exist=Liðið er ekki til. + + +org_still_own_repo=Þessi stofnun á enn eina eða fleiri hugbúnaðarsöfn; eyddu þeim eða flyttu þær fyrst. + + +[user] +change_avatar=Breyttu notandamyndinni þinni… +join_on=Gerðist meðlimi +repositories=Hugbúnaðarsöfn +activity=Opinber Virkni +followers=Fylgjendur +starred=Hugbúnaðarsöfn í Eftirlæti +watched=Hugbúnaðarsöfn í Áhorfi +projects=Verkefni +following=Fylgir +follow=Fylgja +unfollow=Affylgja +heatmap.loading=Hleð Hitakorti… +user_bio=Lífssaga +disabled_public_activity=Þessi notandi hefur slökkt á opinberum sýnileika virkninnar. + +form.name_reserved=Notandanafnið „%s“ er frátekið. +form.name_pattern_not_allowed=Mynstrið „%s“ er ekki leyft í notandanafni. +form.name_chars_not_allowed=Notandanafnið „%s“ inniheldur ógilda stafi. + +[settings] +profile=Notandasíða +account=Reikningur +appearance=Útlit +password=Lykilorð +security=Öryggi +avatar=Notandamynd +ssh_gpg_keys=SSH og GPG Lyklar +social=Félagsreikningar +applications=Forrit +orgs=Stjórna Stofnunum +repos=Hugbúnaðarsöfn +delete=Eyða Reikningi +twofa=Tvíþætt Auðkenning +account_link=Tengdir Reikningar +organization=Stofnanir +uid=Notandaauðkenni +webauthn=Öryggislyklar + +public_profile=Opinber Notandasíða +biography_placeholder=Segðu okkur svolítið um þig +profile_desc=Netfangið þitt verður notað fyrir tilkynningar og aðrar aðgerðir. +password_username_disabled=Notendum utan staðarins er ekki heimilt að breyta notendanafni sínu. Vinsamlegast hafðu samband við síðustjórann þinn til að fá frekari upplýsingar. +full_name=Fullt Nafn +website=Vefsíða +location=Staðsetning +update_theme=Uppfæra Þemu +update_profile=Uppfæra Notandasíðu +update_language=Uppfæra Tungumál +update_language_not_found=Tungumálið „%s“ er ekki í boði. +update_language_success=Tungumálið hefur verið uppfært. +update_profile_success=Notandasíða þín hefur verið uppfærð. +change_username=Notandanafninu þínu hefur verið breytt. +change_username_prompt=Athugaðu: breytingar á notendanafni breyta einnig vefslóð reikningsins þíns. +change_username_redirect_prompt=Gamla notendanafnið mun áframsenda á meðan það er í boði. +continue=Halda áfram +cancel=Hætta við +language=Tungumál +ui=Þema +comment_type_group_reference=Tilvísun +comment_type_group_label=Lýsing +comment_type_group_milestone=Tímamót +comment_type_group_assignee=Úthlutað að +comment_type_group_title=Heiti +comment_type_group_branch=Grein +comment_type_group_time_tracking=Tímamæling +comment_type_group_deadline=Frestur +comment_type_group_pull_request_push=Bætti við framlögum +comment_type_group_project=Verkefni +privacy=Friðhelgi +keep_activity_private=Fela virkni frá notandasíðu + +lookup_avatar_by_mail=Leita Efitr Notandamynd Með Netfangi +enable_custom_avatar=Nota Sérsniða Notandamynd +choose_new_avatar=Veldu nýja notandamynd +update_avatar=Uppfæra Notandamynd +delete_current_avatar=Eyða Núverandi Notandamynd +uploaded_avatar_not_a_image=Skráin sem hlaðin var upp er ekki mynd. +uploaded_avatar_is_too_big=Skráin sem hlaðin var upp er yfir hámarksstærð. +update_avatar_success=Notandamynd þín hefur verið uppfærð. +update_user_avatar_success=Notandamynd þessara notanda hefur verið uppfærð. + +change_password=Uppfæra Lykilorð +old_password=Núverandi Lykilorð +new_password=Nýtt Lykilorð +retype_new_password=Endurtaktu Nýja Lykilorðið +password_incorrect=Núverandi lykilorðið er rangt. + +emails=Netföng +manage_emails=Stjórna Netföngum +email_desc=Aðal netfangið þitt verður notað fyrir tilkynningar og aðrar aðgerðir. +primary=Aðal +requires_activation=Krefst virkjunar +primary_email=Gerа Аðal +activate_email=Senda Virkjun +activations_pending=Virkjanir í Bið +delete_email=Fjarlægja +email_deletion=Fjarlægja Netfang +email_deletion_desc=Netfangið og tengdar upplýsingar verða fjarlægðar af reikningnum þínum. Git framlög með þessu netfangi verða óbreyttar. Halda áfram? +email_deletion_success=Netfangið hefur verið fjarlægt. +theme_update_success=Þeman þín var uppfærð. +theme_update_error=Valin þema er ekki til. +openid_deletion=Fjarlægja OpenID Netfang +add_new_email=Bæta við Nýju Netfangi +add_email=Bæta við Netfangi +add_email_confirmation_sent=Staðfestingarpóstur hefur verið sendur á „%s“. Vinsamlegast athugaðu pósthólfið þitt innan næstu %s til að staðfesta netfangið þitt. +add_email_success=Nýja netfangið hefur verið bætt við. +email_preference_set_success=Val á tölvupósti uppfært. +keep_email_private=Fela Netfang +keep_email_private_popup=Netfangið þitt verður falið öðrum notendum. + +manage_ssh_keys=Stjórna SSH Lyklum +manage_gpg_keys=Stjórna GPG Lyklum +add_key=Bæta við lykli +ssh_helper=Vantar þér aðstoð? Skoðaðu leiðbeiningarnar frá GitHub um að skapa þína eigin SSH lykla eða um að laga algeng vandamál þú getur rekist á við þegar þú ert að vinna með SSH. +gpg_helper=Vantar þér aðstoð? Skoðaðu leiðbeiningarnar frá GitHub um GPG. +add_new_key=Bæta við SSH lykli +key_content_ssh_placeholder=Byrjar með 'ssh-ed25519', 'ssh-rsa', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', 'ecdsa-sha2-nistp521', 'sk-ecdsa-sha2-nistp256@openssh.com' eða 'sk-ssh-ed25519@openssh.com' +gpg_key_verify=Staðfesta +gpg_token=Táknlykill +ssh_key_verified=Staðfestur Lykill +ssh_key_verify=Staðfesta +ssh_token=Táknlykill +key_signature_ssh_placeholder=Byrjar með „-----BEGIN SSH SIGNATURE-----“ +verify_ssh_key_success=SSH lykill „%s“ hefur verið staðfestur. +key_id=Lykilauðkenni +key_content=Innihald +principal_content=Innihald +add_key_success=SSH lyklinum „%s“ hefur verið bætt við. +add_gpg_key_success=GPG lyklinum „%s“ hefur verið bætt við. +delete_key=Fjarlægja +ssh_key_deletion=Fjarlæga SSH Lykil +gpg_key_deletion=Fjarlæga GPG Lykil +ssh_key_deletion_success=SSH lykillinn hefur verið fjarlægður. +gpg_key_deletion_success=GPG lykillinn hefur verið fjarlægður. +add_on=Bætt við +valid_until=Gildur til +valid_forever=Gildur að eilífu +last_used=Síðast notað +can_read_info=Lesa +can_write_info=Skrifa +show_openid=Sýna á notandasíðu +hide_openid=Fela frá notandasíðu +ssh_disabled=SSH Óvirkt +unbind=Aftengja + +token_name=Táknlykills Heiti +generate_token=Mynda Táknlykil +delete_token=Eyða +access_token_deletion=Eyða Aðgangslykli + +create_oauth2_application_button=Skapa Forrit +update_oauth2_application_success=Þú hefur uppfært OAuth2 forritið. +oauth2_application_name=Forritsheiti +oauth2_select_type=Hvaða forritsgerð passar? +oauth2_type_web=Net (t.d. Node.JS, Tomcat, Go) +oauth2_type_native=Á kerfi (t.d. síma, tölvu, vafra) +oauth2_redirect_uri=Áframsendingar Vefslóð +save_application=Vista +oauth2_client_id=Auðkenni Notanda +oauth2_client_secret=Leyndarmál Notanda +oauth2_application_edit=Breyta + +authorized_oauth2_applications_description=Þú hefur veitt þessum forritum aðgang að þínum Gitea reikningi. Vinsamlegast afturkallaðu aðgang fyrir forrit sem ekki er lengur þörf á. +revoke_key=Afturkalla +revoke_oauth2_grant=Afturkalla Aðgang + +twofa_disable=Óvirkja Tveggja-Þátta Auðkenningu +twofa_scratch_token_regenerate=Endurgera Skrapkóða +or_enter_secret=Eða sláðu inn leyndarmálið: %s + +webauthn_nickname=Gælunafn + + + + +email_notifications.enable=Virkja Tölvupósttilkynningar +email_notifications.onmention=Aðeins Tölvupóst Þegar Minnst Er á Mig +email_notifications.disable=Óvirkja Tölvupósttilkynningar +email_notifications.submit=Stilla Val á Tölvupósti + +visibility=Sýnileiki notanda +visibility.public=Opinbert +visibility.private=Einka + +[repo] +new_repo_helper=Hugbúnaðarsafn inniheldur allar verkefnaskrár, þar á meðal útgáfuferil. Ertu nú þegar með það annars staðar? Flytja hugbúnaðarsafn +owner=Eigandi +template=Sniðmát +visibility=Sýnileiki +visibility_helper=Gera Hugbúnaðarsafn Einka +visibility_fork_helper=(Að breyta þessu mun hafa áhrif á allar skiptingar.) +clone_helper=Þarftu hjálp við afritun? Fáðu aðstoð. +fork_repo=Tvískipta Hugbúnaðarsafni +download_zip=Sækja ZIP +generate_repo=Mynda Hugbúnaðarsafn +repo_desc=Lýsing +repo_desc_helper=Sláðu inn stutta lýsingu (valfrjálst) +repo_lang=Tungumál +repo_gitignore_helper=Velja .gitignore sniðmát. +repo_gitignore_helper_desc=Veldu hvaða skrár á ekki að rekja af lista sniðmáta fyrir algeng tungumál. Dæmagert rusl sem myndast af byggingarverkfærum hvers tungumáls er sjálfgefið í .gitignore. +issue_labels=Vandamálslýsingar +issue_labels_helper=Veldu vandamálslýsingarsett. +license=Hugbúnaðarleyfi +license_helper=Veldu hugbúnaðarleyfisskrá. +license_helper_desc=Hugbúnaðarleyfi stjórnar því hvað aðrir mega og mega ekki gera við frumkóðann þinn. Ertu ekki viss um hvað er rétt fyrir verkefnið þitt? Sjáðu Veldu leyfi. +readme=LESTUMIG +readme_helper=Veldu LESTUMIG skráarsniðmát. +auto_init=Frumstilla Hugbúnaðarsafn. Bætir við skránum: .gitignore, License og README +create_repo=Skapa Hugbúnaðarsafn +default_branch=Sjálfgefin Grein +mirror_prune=Snyrta +mirror_address=Afrita Frá Vefslóð +mirror_address_protocol_invalid=Uppgefin vefslóð er ógild. Aðeins http(s):// eða git:// staðsetningar er hægt að spegla frá. +mirror_lfs=Stór Skráargeymsla (LFS) +mirror_lfs_endpoint=LFS Endapunktur +mirror_last_synced=Síðast Samstillt +mirror_password_placeholder=(Óbreytt) +mirror_password_blank_placeholder=(Óstillt) +mirror_password_help=Breyttu notandanafninu til að eyða vistuðu lykilorði. +watchers=Fylgjendur +forks=Skiptingar +reactions_more=og %d fleiri +language_other=Annað +delete_preexisting_label=Eyða +delete_preexisting_content=Eyða skrám í %s + + +desc.private=Einka +desc.public=Opinbert +desc.private_template=Einka sniðmát +desc.public_template=Sniðmát +desc.internal=Innra +desc.internal_template=Innra sniðmát +desc.archived=Safnvistað + +template.git_content=Git Innihald (Sjálfgefin Grein) +template.git_hooks=Git krókar +template.webhooks=Vefkrókar +template.topics=Viðfangsefni +template.avatar=Auðkennismynd +template.issue_labels=Vandamálslýsingar + + + +migrate_options_lfs=Flytja LFS skrár +migrate_options_lfs_endpoint.label=LFS Endapunktur +migrate_items_wiki=Handbók +migrate_items_milestones=Tímamót +migrate_items_labels=Skýringar +migrate_items_issues=Vandamál +migrate_items_pullrequests=Sameiningarbeiðnir +migrate_items_merge_requests=Sameiningarbeiðnir +migrate_items_releases=Útgáfur +migrate_repo=Flytja Hugbúnaðarsafn +migrate.migrate=Flytja Frá %s +migrate.migrating_failed.error=Villa: %s +migrate.git.description=Flytja hugbúnaðarsafn aðeins frá Git þjónustu. +migrate.migrating_labels=Að færa Lýsingar + +mirror_from=speglun af +forked_from=tvískipt frá +generated_from=myndað frá +unwatch=Hætta að fylgjast með +watch=Fylgjast með +unstar=Fjarlægja eftirlæti +star=Bæta við eftirlæti +fork=Tvískipta +download_archive=Hlaða Miður Geymslu + +no_desc=Engin Lýsing +quick_guide=Stuttar Leiðbeiningar +clone_this_repo=Afrita þetta hugbúnaðarsafn +create_new_repo_command=Að búa til nýja geymslu með skipanalínu +push_exist_repo=Að senda inn núverandi geymslu með skipanalínu + +code=Kóði +branch=Grein +tree=Tré +find_tag=Finna merki +branches=Greinar +tags=Merki +issues=Vandamál +pulls=Sameiningarbeiðnir +project_board=Verkefni +labels=Skýringar + +milestones=Tímamót +commits=Framlög +commit=Framlag +release=Útgáfa +releases=Útgáfur +tag=Merki +file.title=%s í %s +file_raw=Hrátt +file_history=Saga +file_view_source=Skoða Frumkóða +file_view_rendered=Skoða Unnið + +file_copy_permalink=Afrita Varanlega Slóð +stored_lfs=Geymt með Git LFS +commit_graph.hide_pr_refs=Fela Sameiningarbeiðnir +commit_graph.monochrome=Einlitað +commit_graph.color=Litað +blame=Ásaka +download_file=Sækja skrá +line=lína +lines=línur + +editor.new_file=Ný Skrá +editor.upload_file=Uppfæra Skrá +editor.edit_file=Breyta Skrá +editor.preview_changes=Forskoða Breytingar +editor.edit_this_file=Breyta Skrá +editor.this_file_locked=Skrá er læst +editor.delete_this_file=Eyða Skrá +editor.name_your_file=Nefndu skrána þína… +editor.or=eða +editor.cancel_lower=Hætta við +editor.add_tmpl=Bæta við „“ +editor.add=Bæta við „%s“ +editor.update=Uppfæra „%s“ +editor.delete=Eyða „%s“ +editor.create_new_branch=Búðu til nýja grein og sameiningarbeiðni fyrir þetta framlag. +editor.create_new_branch_np=Búðu til nýja grein fyrir þetta framlag. +editor.new_branch_name_desc=Heiti nýjar greinar… +editor.cancel=Hætta við +editor.fail_to_update_file=Skrá „%s“ mistókst að skapa eða uppfæra. +editor.fail_to_update_file_summary=Villuskilaboð: + +commits.commits=Framlög +commits.find=Leita +commits.author=Höfundur +commits.message=Skilaboð +commits.date=Dagsetning +commits.older=Eldri +commits.newer=Nýrri + + + +projects=Verkefni +projects.description=Lýsing (valfrjálst) +projects.description_placeholder=Lýsing +projects.create=Stofna Verkefni +projects.title=Heiti +projects.new=Nýtt verkefni +projects.deletion=Eyða Verkefni +projects.deletion_success=Verkefninu hefur verið eytt. +projects.edit=Breyta Verkefnum +projects.modify=Uppfæra Verkefni +projects.type.none=Ekkert +projects.type.uncategorized=Óflokkuð +projects.board.new_submit=Staðfesta +projects.board.set_default=Stilla Sjálfgildi +projects.board.color=Litur +projects.open=Opna +projects.close=Loka +projects.board.assigned_to=Úthlutað til + +issues.filter_projects=Sía Verkefni +issues.filter_labels=Sía Lýsingar +issues.new=Nýtt Vandamál +issues.new.labels=Lýsingar +issues.new.no_label=Engin Lýsing +issues.new.clear_labels=Tæma lýsingar +issues.new.projects=Verkefni +issues.new.clear_projects=Tæma Verkefnum +issues.new.no_projects=Ekkert verkefni +issues.new.open_projects=Opin Verkefni +issues.new.closed_projects=Lokuð Verkefni +issues.new.milestone=Tímamót +issues.choose.get_started=Hefjast Handa +issues.choose.blank=Sjálfgefið +issues.no_ref=Engin Grein eða Merki Tilgreint +issues.create=Skapa Vandamálsumræðu +issues.new_label=Ný Lýsing +issues.new_label_placeholder=Lýsingarheiti +issues.new_label_desc_placeholder=Lýsing +issues.create_label=Skapa Lýsingu +issues.label_templates.info=Engin merki eru til ennþá. Búðu til merki með „Nýtt merki“ eða notaðu fyrirfram skilgreint merkisett: +issues.open_tab=%d Opin +issues.close_tab=%d Lokuð +issues.filter_label=Lýsing +issues.filter_label_no_select=Allar lýsingar +issues.filter_milestone=Tímamót +issues.filter_milestone_no_select=Öll tímamót +issues.filter_type=Tegund +issues.filter_type.all_issues=Öll vandamál +issues.filter_type.assigned_to_you=Úthlutuð til þín +issues.filter_type.created_by_you=Búin til af þér +issues.filter_type.mentioning_you=Minnast á þig +issues.filter_sort=Raða +issues.filter_sort.latest=Nýjustu +issues.filter_sort.oldest=Elstu +issues.filter_sort.recentupdate=Nýlega uppfærð +issues.filter_sort.leastupdate=Síðast uppfærð +issues.filter_sort.mostcomment=Flest ummæli +issues.filter_sort.leastcomment=Fæst ummæli +issues.filter_sort.mostforks=Flestar skiptingar +issues.filter_sort.fewestforks=Fæstar skiptingar +issues.action_open=Opna +issues.action_close=Loka +issues.action_label=Lýsing +issues.action_milestone=Tímamót +issues.action_milestone_no_select=Ekkert tímamót +issues.opened_by=opnað %[1]s af %[3]s +issues.opened_by_fake=opnað %[1] af %[2]s +issues.previous=Fyrri +issues.next=Áfram +issues.open_title=Opið +issues.closed_title=Lokað +issues.num_comments=%d ummæli +issues.commented_at=`gerði ummæli %s` +issues.context.edit=Breyta +issues.context.delete=Eyða +issues.close_issue=Loka +issues.manually_pull_merged_at=`sameinaði framlag %[2]s inni í %[3]s handvirkt %[4]s` +issues.close_comment_issue=Senda ummæli og Loka +issues.reopen_issue=Enduropna +issues.reopen_comment_issue=Senda ummæli og Enduropna +issues.create_comment=Senda Ummæli +issues.closed_at=`lokaði þessu vandamáli %[2]s` +issues.reopened_at=`enduropnaði þetta vandamál %[2]s` +issues.ref_reopened_from=`enduropnaði þetta vandamál %[4]s %[2]s` +issues.owner=Eigandi +issues.edit=Breyta +issues.cancel=Hætta við +issues.save=Vista +issues.label_title=Lýsingarheiti +issues.label_description=Lýsingarskýring +issues.label_color=Lýsingarlitur +issues.label_count=%d lýsingar +issues.label_open_issues=%d opin vandamál +issues.label_edit=Breyta +issues.label_delete=Eyða +issues.label_modify=Breytta Lýsingu +issues.label_deletion=Eyða Lýsingu +issues.label_deletion_desc=Ef lýsing er eytt er hún fjarlægð af öllum vandamálum. Halda áfram? +issues.label_deletion_success=Þessi lýsing hefur verið eytt. +issues.label.filter_sort.alphabetically=Stafrófsröð +issues.label.filter_sort.by_size=Minnsta stærð +issues.label.filter_sort.reverse_by_size=Stærsta stærð +issues.subscribe=Gerast áskrifandi +issues.unsubscribe=Afturkalla áskrift +issues.lock=Læsa umræðu +issues.unlock=Aflæsa umræðu +issues.unlock_comment=aflæsti þessa umræðu %s +issues.lock_confirm=Læsa +issues.unlock_confirm=Aflæsa +issues.delete=Eyða +issues.start_tracking_short=Ræsa Tímamælir +issues.add_time=Bæta Bið Tíma Handvirkt +issues.add_time_short=Bæta Bið Tíma +issues.add_time_cancel=Hætta við +issues.add_time_history=`bætti við eyddum tíma %s` +issues.del_time_history=`fjarlægði eyddum tíma %s` +issues.add_time_hours=Klukkutímar +issues.add_time_minutes=Mínútur +issues.add_time_sum_to_small=Enginn tími var sleginn inn. +issues.time_spent_total=Heildartíma Eytt +issues.time_spent_from_all_authors=`Heildartíma Eytt: %s` +issues.due_date=Eindagi +issues.push_commit_1=bætti við %d framlag %s +issues.push_commits_n=bætti við %d framlög %s +issues.due_date_form=áááá-mm-dd +issues.due_date_form_add=Bæta við eindaga +issues.due_date_form_edit=Breyta +issues.due_date_form_remove=Fjarlægja +issues.due_date_not_set=Enginn eindagi settur. +issues.due_date_added=bætti við eindagi %s %s +issues.due_date_remove=fjarlægði eindagi %s %s +issues.due_date_overdue=Gjaldfallið +issues.dependency.title=Kröfur +issues.dependency.issue_no_dependencies=Engar kröfur innsettar. +issues.dependency.pr_no_dependencies=Engar kröfur innsettar. +issues.dependency.add=Bæta við kröfu… +issues.dependency.cancel=Hætta við +issues.dependency.remove=Fjarlægja +issues.dependency.remove_info=Fjarlægja þessa kröfu +issues.dependency.added_dependency=`bætti við nýja kröfu %s` +issues.dependency.removed_dependency=`fjarlægði kröfu %s` +issues.dependency.blocked_by_short=Krefur +issues.dependency.remove_header=Fjarlægja Kröfu +issues.dependency.add_error_dep_not_exist=Krafa er ekki til. +issues.dependency.add_error_dep_exists=Krafa er nú þegar til. +issues.review.approve=samþykkti þessar breytingar %s +issues.review.comment=fór yfir %s +issues.review.dismissed_label=Hunsað +issues.review.left_comment=gerði ummæli +issues.review.pending=Í bið +issues.review.outdated=Úrelt +issues.review.show_outdated=Sýna úrelt +issues.review.hide_outdated=Fela úreld +issues.reference_issue.body=Meginmál +issues.content_history.deleted=eytt +issues.content_history.edited=breytt +issues.content_history.created=skapað +issues.content_history.delete_from_history=Eyða úr ferlinum +issues.content_history.delete_from_history_confirm=Eyða úr ferlinum? +issues.content_history.options=Valkostir + +compare.compare_base=grunnur +compare.compare_head=bera saman + +pulls.new=Ný Sameiningarbeiðni +pulls.view=Skoða Sameiningarbeiðni +pulls.compare_changes=Ný Sameiningarbeiðni +pulls.create=Skapa Sameiningarbeiðni +pulls.title_desc=vill sameina %[1]d framlög frá %[2]s í %[3]s +pulls.tab_conversation=Umræða +pulls.tab_commits=Framlög +pulls.tab_files=Skráum Breytt +pulls.merged=Sameinað +pulls.manually_merged=Sameinað handvirkt +pulls.is_closed=Sameiningarbeiðnin hefur verið lokuð. +pulls.blocked_by_approvals=Þessi Sameiningarbeiðni hefur ekki nóg samþykki ennþá. %d/%d samþykki. +pulls.num_conflicting_files_1=%d skrá í átökum +pulls.num_conflicting_files_n=%d skrár í átökum +pulls.approve_count_1=%d samþykki +pulls.approve_count_n=%d samþykki +pulls.reject_count_1=%d breytingarbeiðni +pulls.reject_count_n=%d breytingarbeiðnir +pulls.waiting_count_1=%d bíður endurskoðunar +pulls.waiting_count_n=%d bíða endurskoðunar + +pulls.merge_manually=Sameinað handvirkt +; %[2]s
%[3]s
+pulls.status_checks_requested=Nauðsynlegt +pulls.status_checks_details=Nánar + +milestones.new=Nýtt tímamót +milestones.open_tab=%d Opin +milestones.close_tab=%d Lokuð +milestones.closed=Lokaði %s +milestones.update_ago=Uppfært fyrir %s +milestones.no_due_date=Enginn eindagi +milestones.open=Opna +milestones.close=Loka +milestones.title=Heiti +milestones.desc=Lýsing +milestones.due_date=Eindagi (valfrjálst) +milestones.clear=Hreinsa +milestones.cancel=Hætta við +milestones.filter_sort.most_issues=Flest vandamál +milestones.filter_sort.least_issues=Fæst vandamál + + + +wiki=Handbók +wiki.welcome=Velkomin(n) í handbókina. +wiki.create_first_page=Skapa Fyrstu Síðu +wiki.page=Síða +wiki.new_page=Síða +wiki.save_page=Vista Síðu +wiki.last_commit_info=%s breytti þessari síðu %s +wiki.edit_page_button=Breyta +wiki.new_page_button=Ný Síða +wiki.back_to_wiki=Aftur að handbókssíðu +wiki.delete_page_button=Eyða Síðu +wiki.page_already_exists=Handbókssíða með sömu nafni er þegar til. +wiki.reserved_page=Handbókssíðuheiti „%s“ er frátekið. +wiki.pages=Síður +wiki.last_updated=Síðast uppfært: %s + +activity=Virkni +activity.period.filter_label=Tímabil: +activity.period.daily=1 dagur +activity.period.halfweekly=3 dagar +activity.period.weekly=1 vika +activity.period.monthly=1 mánuður +activity.period.quarterly=3 mánuðir +activity.period.semiyearly=6 mánuðir +activity.period.yearly=1 ár +activity.overview=Yfirlit +activity.opened_prs_count_1=Fyrirhugað Sameiningarbeiðni +activity.opened_prs_count_n=Fyrirhuguð Sameiningarbeiðnir +activity.title.user_1=%d notandi +activity.title.user_n=%d notandar +activity.title.prs_1=%d Sameiningarbeiðni +activity.title.prs_n=%d Sameiningarbeiðnir +activity.title.prs_opened_by=%s lagt til af %s +activity.merged_prs_label=Sameinað +activity.opened_prs_label=Tillaga +activity.closed_issues_count_1=Lokað Vandamál +activity.closed_issues_count_n=Lokuð Vandamál +activity.title.issues_1=%d Vandamál +activity.title.issues_n=%d Vandamál +activity.closed_issue_label=Lokað +activity.new_issues_count_1=Nýtt Vandamál +activity.new_issues_count_n=Ný Vandamál +activity.new_issue_label=Opnað +activity.unresolved_conv_label=Opið +activity.title.releases_1=%d Útgáfa +activity.title.releases_n=%d Útgáfur +activity.git_stats_author_1=%d höfundur +activity.git_stats_author_n=%d höfundar +activity.git_stats_commit_1=%d framlag +activity.git_stats_commit_n=%d framlög +activity.git_stats_on_default_branch=Á %s, +activity.git_stats_file_1=%d skrá +activity.git_stats_file_n=%d skrár +activity.git_stats_files_changed_1=hefur breyst +activity.git_stats_files_changed_n=hafa breyst +activity.git_stats_addition_1=%d viðbót +activity.git_stats_addition_n=%d viðbætur +activity.git_stats_and_deletions=og +activity.git_stats_deletion_1=%d eyðing +activity.git_stats_deletion_n=%d eyðingar + +search=Leita +search.code_no_results=Enginn samsvarandi frumkóði fannst eftur þínum leitarorðum. + +settings=Stillingar +settings.options=Hugbúnaðarsafn +settings.collaboration.write=Skrifa +settings.collaboration.read=Lesa +settings.collaboration.owner=Eigandi +settings.collaboration.undefined=Óskilgreint +settings.hooks=Vefkrókar +settings.githooks=Git Krókar +settings.basic_settings=Grunnstillingar +settings.mirror_settings=Speglunarstillingar +settings.mirror_settings.mirrored_repository=Speglað hugbúnaðarsafn +settings.mirror_settings.direction=Stefna +settings.mirror_settings.direction.pull=Pull +settings.mirror_settings.direction.push=Push +settings.mirror_settings.last_update=Síðasta uppfærsla +settings.mirror_settings.push_mirror.remote_url=Vefslóð Git Fjarhugbúnaðarsafns +settings.email_notifications.enable=Virkja Tölvupósttilkynningar +settings.email_notifications.onmention=Aðeins Tölvupóst Þegar Minnst Er á Mig +settings.email_notifications.disable=Óvirkja Tölvupósttilkynningar +settings.email_notifications.submit=Stilla Val á Tölvupósti +settings.site=Vefsíða +settings.update_settings=Uppfæra Stillingar +settings.branches.update_default_branch=Uppfæra Sjálfgefna Grein +settings.wiki_desc=Virkja Handbók Hugbúnaðarsafns +settings.use_internal_wiki=Nota Innbyggða Handbók +settings.use_external_wiki=Nota Utanaðkomandi Handbók +settings.tracker_issue_style.numeric=Tölugildi +settings.danger_zone=Hættusvæði +settings.convert_notices_1=Þessi aðgerð mun breyta speglinum í venjulegt hugbúnaðarsafn og ekki er hægt að afturkalla hana. +settings.transfer=Flytja Eignarhald +settings.trust_model.collaboratorcommitter.desc=Gildar undirskriftir frá samstarfsaðilum hugbúnaðarsafnsins verða merktar „traust“ ef þær passa við framlagandan. Að öðrum kosti verða gildar undirskriftir merktar „ótraust“ ef undirskriftin passar við framlagandan og „ósamþykkt“ að öðru leyti. Þetta mun neyða Gitea til að vera merkt sem framlagandi á undirrituðum framlögum með raunverulega framlagandan merktan sem Co-Authored-By: og Co-Committed-By: í framlaginu. Sjálfgefinn Gitea lykill verður að passa við notanda í gagnagrunninum. +settings.wiki_delete_desc=Að eyða handbókargögn er varanlegt og ekki er hægt að afturkalla það. +settings.delete=Eyða Þetta Hugbúnaðarsafn +settings.delete_desc=Að eyða hugbúnaðarsafni er varanlegt og ekki er hægt að afturkalla það. +settings.delete_notices_fork_1=— Skiptingar þessara hugbúnaðarsafns verða sjálfstæðar eftir eyðingu. +settings.delete_collaborator=Fjarlægja +settings.teams=Lið +settings.add_webhook=Bæta við Vefkróki +settings.webhook.request=Beiðni +settings.webhook.headers=Hausar +settings.webhook.body=Meginmál +settings.update_githook=Uppfæra Krók +settings.slack_username=Notandanafn +settings.slack_icon_url=Táknmyndarvefslóð +settings.slack_color=Litur +settings.discord_username=Notandanafn +settings.discord_icon_url=Táknmyndarvefslóð +settings.event_delete=Eyða +settings.event_fork=Tvískipta +settings.event_release=Útgáfa +settings.event_repository=Hugbúnaðarsafn +settings.event_issues=Vandamál +settings.event_issues_desc=Vandamál opið, lokað, enduropnað eða breytt. +settings.event_issue_label=Vandamál Lýst +settings.event_issue_comment=Ummæli um Vandamál +settings.event_pull_request=Sameiningarbeiðni +settings.event_pull_request_desc=Sameiningarbeiðni opnuð, lokuð, enduropnuð eða breytt. +settings.active=Virkt +settings.update_webhook=Uppfæra Vefkrók +settings.slack_token=Táknlykill +settings.slack_channel=Rás +settings.web_hook_name_gitea=Gitea +settings.web_hook_name_discord=Discord +settings.web_hook_name_dingtalk=DingTalk +settings.web_hook_name_telegram=Telegram +settings.web_hook_name_msteams=Microsoft Teams +settings.web_hook_name_feishu=Feishu +settings.title=Heiti +settings.deploy_key_content=Innihald +settings.branches=Greinar +settings.edit_protected_branch=Breyta +settings.tags=Merki +settings.tags.protection.allowed.users=Leyfðir notendur +settings.tags.protection.allowed.teams=Leyfð lið +settings.tags.protection.allowed.noone=Enginn +settings.lfs=LFS +settings.lfs_locks=Lásar +settings.lfs_lock=Læsa +settings.lfs_pointers.oid=OID + +diff.commit=framlag +diff.git-notes=Athugasemd +diff.whitespace_button=Hvítbil +diff.whitespace_ignore_all_whitespace=Hunsa hvítbil þegar línur eru bornar saman +diff.stats_desc= %d breyttar skrár með %d viðbætur og %d eyðingar +diff.bin=Tvíundarkóði +diff.view_file=Skoða Skrá +diff.file_before=Fyrir +diff.file_after=Eftir +diff.file_image_width=Breidd +diff.file_image_height=Hæð +diff.file_byte_size=Stærð +diff.comment.add_single_comment=Bæta við eitt ummæli +diff.comment.add_review_comment=Leggja inn ummæli +diff.comment.reply=Svara +diff.review.comment=Senda Ummæli +diff.image.overlay=Yfirleggja + +release.releases=Útgáfur +release.tags=Merki +release.draft=Uppkast +release.compare=Bera saman +release.edit=breyta +release.tag_name=Merkisheiti +release.title=Heiti +release.content=Innihald +release.cancel=Hætta við +release.delete_tag=Eyða Merki +release.downloads=Niðurhöl + +branch.delete_head=Eyða +branch.tag_collision=Ekki er hægt að skapa grein »%s“ þar sem merki með sama nafni er þegar til í hugbúnaðarsafninu. + +tag.create_tag_operation=Skapa merki +tag.confirm_create_tag=Skapa merki + + +topic.done=Í lagi + + +[org] +repo_updated=Uppfært +people=Fólk +teams=Lið +lower_members=meðlimar +lower_repositories=hugbúnaðarsöfn +create_new_team=Nýtt Lið +org_desc=Lýsing +team_name=Liðsheiti +team_desc=Lýsing + + +settings=Stillingar +settings.full_name=Fullt nafn +settings.website=Vefsíða +settings.location=Staðsetning +settings.visibility=Sýnileiki +settings.visibility.public=Opinbert +settings.visibility.private_shortname=Einka + +settings.update_settings=Uppfæra Stillingar + + +members.private=Faldir +members.owner=Eigandi +members.member=Meðlimur +members.remove=Fjarlægja +members.leave=Yfirgefa +members.leave.detail=Viltu yfirgefa %s? +members.invite_desc=Bæta nýjum meðlimi við í %s: +members.invite_now=Bjóða Núna + +teams.join=Gerast meðlimur +teams.leave=Yfirgefa +teams.leave.detail=Viltu yfirgefa %s? +teams.can_create_org_repo=Skapa Hugbúnaðarsöfn +teams.none_access=Engin Aðgangur +teams.read_access=Lesa +teams.write_access=Skrifa +teams.settings=Stillingar +teams.update_settings=Uppfæra Stillingar +teams.all_repositories=Öll hugbúnaðarsöfn + +[admin] +repositories=Hugbúnaðarsöfn +config=Stilling +first_page=Byrjun +last_page=Síðasta +total=Samtals: %d + +dashboard.new_version_hint=Gitea %s er nú í boði en þú ert að keyra %s. Athugaðu bloggsíðuna fyrir frekari upplýsingar. +dashboard.statistic=Yfirlit +dashboard.statistic_info=Gagnasafn Gitea inniheldur %d notendur, %d stofnanir, %d dreifilykla, %d hugbúnaðarsöfn, %d áhorfir, %d eftirlæti, %d aðgerðir, %d aðganga, %d vandamál, %d ummæli, %d félagsreikningum, %d fylgjanir, %d speglanir, %d útgæfur, %d auðkenningarheimildir, %d vefkrókar, %d tímamót, %d skýringar, %d krókaverkefni, %d lið, %d uppfærsluaðgerðir, %d viðhengi. +dashboard.operation_switch=Skipta +dashboard.operation_run=Keyra +dashboard.update_mirrors=Uppfæra Speglanir +dashboard.server_uptime=Uppitími Netþjóns +dashboard.total_memory_allocated=Heildarminni úthlutað + +users.name=Notandanafn +users.full_name=Fullt Nafn +users.admin=Stjórnandi +users.2fa=Tvíþætt auðkenning +users.repos=Söfn +users.created=Búið til +users.edit=Breyta +users.local=Staðbundið +users.list_status_filter.menu_text=Sía +users.list_status_filter.reset=Endurstilla +users.list_status_filter.is_prohibit_login=Stöðva Innskráningu +users.list_status_filter.not_prohibit_login=Leyfa Innskráningu +users.list_status_filter.not_2fa_enabled=Tvíþætt Auðkenning Óvirk + +emails.primary=Aðal +emails.filter_sort.email=Tölvupóstur +emails.filter_sort.name=Notandanafn +emails.updated=Netfang uppfært + +orgs.name=Heiti +orgs.teams=Lið +orgs.members=Meðlimar + +repos.owner=Eigandi +repos.name=Heiti +repos.watches=Fylgist með +repos.stars=Eftirlæti +repos.forks=Skiptingar +repos.issues=Vandamál +repos.size=Stærð + +packages.total_size=Heildarstærð: %s +packages.name=Heiti +packages.version=Útgáfa +packages.type=Tegund +packages.repository=Hugbúnaðarsafn +packages.size=Stærð + +defaulthooks.desc=Vefkrókar senda sjálfkrafa HTTP POST beiðnir til netþjóns þegar ákveðnir Gitea atburðir koma af stað. Vefkrókar sem eru skilgreindir hér eru sjálfgefnir og verða afritaðir í allar nýjar geymslur. Frekari upplýsingar eru í handbókini. + + +auths.name=Heiti +auths.type=Tegund +auths.updated=Uppfært +auths.domain=Lén +auths.host=Hýsill +auths.port=Gátt +auths.search_page_size=Síðustærð +auths.smtphost=SMTP Hýsill +auths.smtpport=SMTP Gátt +auths.oauth2_icon_url=Táknmyndarvefslóð +auths.oauth2_profileURL=Notandasíðuslóð +auths.tips=Ábendingar +auths.tip.dropbox=Búðu til nýtt forrit á https://www.dropbox.com/developers/apps +auths.tip.yandex=Búðu til nýja umsókn á https://oauth.yandex.com/client/new. Veldu eftirfarandi heimildir úr „Yandex.Passport API“ kaflanum: "Aðgangur að netfangi", "Aðgangur að notandamynd" og "Aðgangur að notendanafni, fornafni og eftirnafni, kyni" + +config.app_name=Heiti Vefsvæðis +config.app_ver=Útgáfu Gitea +config.git_version=Útgáfa Git +config.repo_root_path=Grunnsslóð Hugbúnaðarsafns +config.lfs_root_path=LFS Grunnsslóð +config.log_file_root_path=Slóð Annáls + +config.ssh_port=Gátt +config.ssh_listen_port=Hlustunargátt +config.ssh_root_path=Grunnsslóð + +config.lfs_enabled=Virkt + +config.db_type=Tegund +config.db_host=Hýsill +config.db_name=Heiti +config.db_user=Notandanafn +config.db_schema=Uppdráttur +config.db_ssl_mode=SSL +config.db_path=Slóð + + + +config.mailer_name=Heiti +config.mailer_host=Hýsill +config.mailer_user=Notandi + + + +config.https_only=Aðeins HTTPS + + + + +monitor.name=Heiti +monitor.goroutines=%d Górútínur +monitor.desc=Lýsing +monitor.process.children=Börn +monitor.queues=Raðir +monitor.queue.name=Heiti +monitor.queue.type=Tegund + +monitor.queue.settings.submit=Uppfæra Stillingar +monitor.queue.settings.changed=Stillingar Uppfærðar +monitor.queue.settings.blocktimeout.value=%[1]v + + +notices.type=Tegund +notices.type_1=Hugbúnaðarsafn +notices.type_2=Verkefni +notices.desc=Lýsing + +[action] +create_issue=`opnaði vandamál %[3]s#%[2]s` +reopen_issue=`enduropnaði vandamál %[3]s#%[2]s` +reopen_pull_request=`enduropnaði sameiningarbeiðni %[3]s#%[2]s` +comment_issue=`gerði ummæli á vandamál %[3]s#%[2]s` +comment_pull=`gerði ummæli á sameiningarbeiðni %[3]s#%[2]s` +review_dismissed_reason=Ástæða: + +[tool] +ago=%s síðan +now=núna +future=í framtíð +1s=1 sekúnda +1m=1 mínúta +1h=1 klukkutími +1d=1 dagur +1w=1 vika +1mon=1 mánuður +1y=1 ár +seconds=%d sekúndur +minutes=%d mínútur +hours=%d klukkutímar +days=%d dagar +weeks=%d vikur +months=%d mánuðir +years=%d ár +raw_seconds=sekúndur +raw_minutes=mínútur + +[dropzone] + +[notification] +notifications=Tilkynningar +unread=Ólesnar +read=Lesnar + +[gpg] + +[units] + +[packages] +title=Pakkar +empty.documentation=Frekari upplýsingar um pakka skrána er hægt að finna hér. +filter.type=Tegund +filter.type.all=Allir +installation=Uppsetning +keywords=Stikkorð +details=Nánar +details.author=Höfundur +details.license=Hugbúnaðarleyfi +versions=Útgáfur +versions.on=á +versions.view_all=Sjá allar +dependency.id=Auðkenni +dependency.version=Útgáfa +composer.documentation=Frekari upplýsingar um Composer skrána er hægt að finna hér. +conan.details.repository=Hugbúnaðarsafn +conan.documentation=Frekari upplýsingar um Conan skrána er hægt að finna hér. +container.details.platform=Vettvangur +container.documentation=Frekari upplýsingar um Container skrána er hægt að finna hér. +container.labels=Lýsingar +container.labels.key=Lykill +container.labels.value=Gildi +generic.documentation=Frekari upplýsingar um almennu skrána er hægt að finna hér. +maven.documentation=Frekari upplýsingar um Maven skrána er hægt að finna hér. +nuget.documentation=Frekari upplýsingar um NuGet skrána er hægt að finna hér. +npm.documentation=Frekari upplýsingar um npm skrána er hægt að finna hér. +npm.details.tag=Merki +pypi.requires=Þarfnast Python +pypi.documentation=Frekari upplýsingar um PyPI skrána er hægt að finna hér. +rubygems.documentation=Frekari upplýsingar um RubyGems skrána er hægt að finna hér. + diff --git a/options/locale/locale_it-IT.ini b/options/locale/locale_it-IT.ini index 948b83cf2a..c93606c6a9 100644 --- a/options/locale/locale_it-IT.ini +++ b/options/locale/locale_it-IT.ini @@ -83,6 +83,7 @@ step2=Passo 2: error404=La pagina che stai cercando di raggiungere non esiste oppure non sei autorizzato a visualizzarla. + [error] [startpage] @@ -1135,8 +1136,6 @@ issues.due_date_remove=rimossa la data di scadenza %s %s issues.due_date_overdue=Scaduto issues.due_date_invalid=La data di scadenza non è valida o fuori intervallo. Si prega di utilizzare il formato 'aaaa-mm-dd'. issues.dependency.title=Dipendenze -issues.dependency.issue_no_dependencies=Questo problema attualmente non ha alcuna dipendenza. -issues.dependency.pr_no_dependencies=Questo problema attualmente non ha alcuna dipendenza. issues.dependency.add=Aggiungi dipendenza… issues.dependency.cancel=Annulla issues.dependency.remove=Rimuovi @@ -1251,7 +1250,6 @@ pulls.update_branch_success=Brench aggiornato con successo pulls.update_not_allowed=Non sei abilitato ad aggiornare il branch pulls.outdated_with_base_branch=Questo brench non è aggiornato con il branch di base - milestones.new=Nuova Milestone milestones.open_tab=%d Aperti milestones.close_tab=%d Chiusi @@ -2036,6 +2034,7 @@ repos.issues=Problemi repos.size=Dimensione + systemhooks=Webhooks di Sistema systemhooks.add_webhook=Aggiungi Webhook di Sistema systemhooks.update_webhook=Aggiorna Webhook di Sistema @@ -2404,3 +2403,5 @@ error.probable_bad_default_signature=ATTENZIONE! Anche se la chiave predefinita error.no_unit_allowed_repo=Non possiedi il permesso di accedere ad alcuna sezione di questo repository. error.unit_not_allowed=Non possiedi il permesso di accedere a questa sezione di repository. +[packages] + diff --git a/options/locale/locale_ja-JP.ini b/options/locale/locale_ja-JP.ini index 91d1de1ca3..63a2d95ed0 100644 --- a/options/locale/locale_ja-JP.ini +++ b/options/locale/locale_ja-JP.ini @@ -105,6 +105,8 @@ error404=アクセスしようとしたページは存在しないGitHubでIssueを検索して、見つからなければ新しいIssueを作成してください。 @@ -282,6 +284,7 @@ register_helper_msg=既にアカウントをお持ちですか? 今すぐサ social_register_helper_msg=既にアカウントをお持ちですか? 今すぐ連携しましょう! disable_register_prompt=登録は無効になっています。 サイト管理者にお問い合わせください。 disable_register_mail=登録でのメール確認は無効になっています。 +manual_activation_only=アクティベーションを完了するにはサイト管理者に連絡してください。 remember_me=このデバイスで自動サインイン forgot_password_title=パスワードを忘れた forgot_password=パスワードをお忘れですか? @@ -487,7 +490,9 @@ auth_failed=認証に失敗しました: %v still_own_repo=あなたのアカウントは1つ以上のリポジトリを所有しています。 先にそれらを削除するか移転してください。 still_has_org=あなたのアカウントは1つ以上の組織に参加しています。 先にそれらから脱退してください。 +still_own_packages=あなたのアカウントは1つ以上のパッケージを所有しています。 先にそれらを削除してください。 org_still_own_repo=組織はまだ1つ以上のリポジトリを所有しています。 先にそれらを削除するか移転してください。 +org_still_own_packages=組織はまだ1つ以上のパッケージを所有しています。 先にそれらを削除してください。 target_branch_not_exist=ターゲットのブランチが存在していません。 @@ -661,7 +666,6 @@ ssh_invalid_token_signature=入力されたSSH 鍵、署名、トークンが合 ssh_token_required=以下のトークンの署名を入力する必要があります ssh_token=トークン ssh_token_help=署名はこの方法で生成できます: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign -n gitea -f /path_to_your_pubkey ssh_token_signature=Armor形式のSSH署名 key_signature_ssh_placeholder=先頭は '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success=SSH 鍵 '%s' を確認しました。 @@ -1037,6 +1041,7 @@ line_unicode=`この行には不可視Unicode文字があります` escape_control_characters=エスケープ unescape_control_characters=エスケープ解除 file_copy_permalink=パーマリンクをコピー +view_git_blame=Git Blameを表示 video_not_supported_in_browser=このブラウザはHTML5のvideoタグをサポートしていません。 audio_not_supported_in_browser=このブラウザーはHTML5のaudioタグをサポートしていません。 stored_lfs=Git LFSで保管されています @@ -1374,6 +1379,9 @@ issues.lock.reason=ロックする理由 issues.lock.title=このイシューの会話をロックします。 issues.unlock.title=このイシューの会話をアンロックします。 issues.comment_on_locked=ロックされているイシューにコメントはできません。 +issues.delete=削除 +issues.delete.title=このイシューを削除しますか? +issues.delete.text=本当にこのイシューを削除しますか? (これはすべてのコンテンツを完全に削除します。 保存しておきたい場合は、代わりにクローズすることを検討してください) issues.tracker=タイムトラッカー issues.start_tracking_short=タイマー 開始 issues.start_tracking=タイムトラッキングを開始 @@ -1414,8 +1422,8 @@ issues.due_date_remove=が期日 %s を削除 %s issues.due_date_overdue=期日は過ぎています issues.due_date_invalid=期日が正しくないか範囲を超えています。 'yyyy-mm-dd' の形式で入力してください。 issues.dependency.title=依存関係 -issues.dependency.issue_no_dependencies=このイシューに依存関係はありません。 -issues.dependency.pr_no_dependencies=このプルリクエストに依存関係はありません。 +issues.dependency.issue_no_dependencies=依存関係が設定されていません。 +issues.dependency.pr_no_dependencies=依存関係が設定されていません。 issues.dependency.add=依存関係を追加... issues.dependency.cancel=キャンセル issues.dependency.remove=削除 @@ -1480,6 +1488,9 @@ pulls.desc=プルリクエストとコードレビューの有効化。 pulls.new=新しいプルリクエスト pulls.view=プルリクエストを表示 pulls.compare_changes=新規プルリクエスト +pulls.allow_edits_from_maintainers=メンテナーからの編集を許可する +pulls.allow_edits_from_maintainers_desc=ベースブランチへの書き込みアクセス権を持つユーザーは、このブランチにプッシュすることもできます +pulls.allow_edits_from_maintainers_err=更新に失敗しました pulls.compare_changes_desc=マージ先ブランチとプル元ブランチを選択。 pulls.compare_base=マージ先 pulls.compare_compare=プル元 @@ -1576,7 +1587,6 @@ pulls.outdated_with_base_branch=このブランチはベースブランチに対 pulls.closed_at=`がプルリクエストをクローズ %[2]s` pulls.reopened_at=`がプルリクエストを再オープン %[2]s` pulls.merge_instruction_hint=`コマンドラインの手順も確認できます。` - pulls.merge_instruction_step1_desc=あなたのプロジェクトリポジトリで新しいブランチをチェックアウトし、変更内容をテストします。 pulls.merge_instruction_step2_desc=変更内容をマージして、Giteaに反映します。 @@ -1789,6 +1799,7 @@ settings.pulls.allow_manual_merge=プルリクエストを手動マージ済み settings.pulls.enable_autodetect_manual_merge=手動マージの自動検出を有効にする (注意: 特殊なケースでは判定ミスが発生する場合があります) settings.pulls.allow_rebase_update=リベースでプルリクエストのブランチの更新を可能にする settings.pulls.default_delete_branch_after_merge=デフォルトでプルリクエストのブランチをマージ後に削除する +settings.packages_desc=リポジトリパッケージレジストリを有効にする settings.projects_desc=リポジトリプロジェクトを有効にする settings.admin_settings=管理者用設定 settings.admin_enable_health_check=リポジトリのヘルスチェックを有効にする (git fsck) @@ -1946,6 +1957,8 @@ settings.event_pull_request_review=プルリクエストのレビュー settings.event_pull_request_review_desc=プルリクエストの承認・拒否、またはレビューコメントが付いたとき。 settings.event_pull_request_sync=プルリクエストの同期 settings.event_pull_request_sync_desc=プルリクエストが同期されたとき。 +settings.event_package=パッケージ +settings.event_package_desc=リポジトリにパッケージが作成または削除されたとき。 settings.branch_filter=ブランチ フィルター settings.branch_filter_desc=プッシュ、ブランチ作成、ブランチ削除のイベントを通知するブランチを、globパターンで指定するホワイトリストです。 空か*のときは、すべてのブランチのイベントを通知します。 文法については github.com/gobwas/glob を参照してください。 例: master{master,release*} settings.active=有効 @@ -2427,6 +2440,7 @@ dashboard.resync_all_hooks=すべてのリポジトリの pre-receive, update, p dashboard.reinit_missing_repos=レコードが存在するが見当たらないすべてのGitリポジトリを再初期化する dashboard.sync_external_users=外部ユーザーデータの同期 dashboard.cleanup_hook_task_table=hook_taskテーブルのクリーンアップ +dashboard.cleanup_packages=期限切れパッケージのクリーンアップ dashboard.server_uptime=サーバーの稼働時間 dashboard.current_goroutine=現在のGoroutine数 dashboard.current_memory_usage=現在のメモリ使用量 @@ -2459,6 +2473,7 @@ dashboard.gc_times=GC実行回数 dashboard.delete_old_actions=データベースから古い操作履歴をすべて削除 dashboard.delete_old_actions.started=データベースからの古い操作履歴の削除を開始しました。 dashboard.update_checker=更新チェック +dashboard.delete_old_system_notices=データベースから古いシステム通知をすべて削除 users.user_manage_panel=ユーザーアカウント管理 users.new_account=ユーザーアカウントを作成 @@ -2495,6 +2510,7 @@ users.update_profile=ユーザーアカウントを更新 users.delete_account=ユーザーアカウントを削除 users.still_own_repo=このユーザーはまだ1つ以上のリポジトリを所有しています。 先にそれらのリポジトリを削除するか移転してください。 users.still_has_org=このユーザーは組織のメンバーになっています。 先に組織からこのユーザーを削除してください。 +users.still_own_packages=このユーザーはまだ1つ以上のパッケージを所有しています。最初にそれらのパッケージを削除してください。 users.deletion_success=ユーザーアカウントを削除しました。 users.reset_2fa=2要素認証をリセット users.list_status_filter.menu_text=フィルター @@ -2541,6 +2557,17 @@ repos.forks=フォーク repos.issues=イシュー repos.size=サイズ +packages.package_manage_panel=パッケージ管理 +packages.total_size=合計サイズ: %s +packages.owner=オーナー +packages.creator=作成者 +packages.name=名前 +packages.version=バージョン +packages.type=タイプ +packages.repository=リポジトリ +packages.size=サイズ +packages.published=配布 + defaulthooks=デフォルトWebhook defaulthooks.desc=Webhookは、特定のGiteaイベントトリガーが発生した際に、自動的にHTTP POSTリクエストをサーバーへ送信するものです。 ここで定義されたWebhookはデフォルトとなり、全ての新規リポジトリにコピーされます。 詳しくはwebhooks guideをご覧下さい。 defaulthooks.add_webhook=デフォルトWebhookの追加 @@ -2808,9 +2835,12 @@ monitor.next=次回 monitor.previous=前回 monitor.execute_times=実行回数 monitor.process=実行中のプロセス +monitor.stacktrace=スタックトレース +monitor.goroutines=%d 件のGoroutines monitor.desc=説明 monitor.start=開始日時 monitor.execute_time=実行時間 +monitor.last_execution_result=結果 monitor.process.cancel=処理をキャンセル monitor.process.cancel_desc=処理をキャンセルするとデータが失われる可能性があります monitor.process.cancel_notices=キャンセル: %s? @@ -2977,3 +3007,95 @@ unit=ユニット error.no_unit_allowed_repo=このリポジトリのどのセクションにもアクセスが許可されていません。 error.unit_not_allowed=このセクションへのアクセスが許可されていません。 +[packages] +title=パッケージ +desc=リポジトリ パッケージを管理します。 +empty=パッケージはまだありません。 +empty.documentation=パッケージレジストリの詳細については、 ドキュメント を参照してください。 +filter.type=タイプ +filter.type.all=すべて +filter.no_result=フィルタの結果、空になりました。 +filter.container.tagged=タグあり +filter.container.untagged=タグなし +published_by=%[1]sに%[3]sが配布 +published_by_in=%[1]sに%[3]s%[5]sで配布 +installation=インストール方法 +about=このパッケージについて +requirements=要求事項 +dependencies=依存関係 +keywords=キーワード +details=詳細 +details.author=著作者 +details.project_site=プロジェクトサイト +details.license=ライセンス +assets=アセット +versions=バージョン +versions.on=on +versions.view_all=すべて表示 +dependency.id=ID +dependency.version=バージョン +composer.registry=あなたの ~/.composer/config.json ファイルに、このレジストリをセットアップします: +composer.install=Composer を使用してパッケージをインストールするには、次のコマンドを実行します: +composer.documentation=Composer レジストリの詳細については、 ドキュメント を参照してください。 +composer.dependencies=依存関係 +composer.dependencies.development=開発用依存関係 +conan.details.repository=リポジトリ +conan.registry=このレジストリをコマンドラインからセットアップします: +conan.install=Conan を使用してパッケージをインストールするには、次のコマンドを実行します: +conan.documentation=Conan レジストリの詳細については、 ドキュメント を参照してください。 +container.details.type=イメージタイプ +container.details.platform=プラットフォーム +container.details.repository_site=リポジトリサイト +container.details.documentation_site=ドキュメントサイト +container.pull=コマンドラインでイメージを取得します: +container.documentation=Container レジストリの詳細については、 ドキュメント を参照してください。 +container.multi_arch=OS / アーキテクチャ +container.layers=イメージレイヤー +container.labels=ラベル +container.labels.key=キー +container.labels.value=値 +generic.download=コマンドラインでパッケージをダウンロードします: +generic.documentation=汎用 レジストリの詳細については、ドキュメント を参照してください。 +helm.registry=このレジストリをコマンドラインからセットアップします: +helm.install=パッケージをインストールするには、次のコマンドを実行します: +helm.documentation=Helm レジストリの詳細については、 ドキュメント を参照してください。 +maven.registry=あなたのプロジェクトの pom.xml ファイルに、このレジストリをセットアップします: +maven.install=パッケージを使用するため pom.xml ファイル内の dependencies ブロックに以下を含めます: +maven.install2=コマンドラインで実行します: +maven.download=依存関係をダウンロードするには、コマンドラインでこれを実行します: +maven.documentation=Mavenレジストリの詳細については、ドキュメント を参照してください。 +nuget.registry=このレジストリをコマンドラインからセットアップします: +nuget.install=NuGet を使用してパッケージをインストールするには、次のコマンドを実行します: +nuget.documentation=NuGetレジストリの詳細については、 ドキュメント を参照してください。 +nuget.dependency.framework=ターゲットフレームワーク +npm.registry=あなたのプロジェクトの .npmrc ファイルに、このレジストリをセットアップします: +npm.install=npm を使用してパッケージをインストールするには、次のコマンドを実行します: +npm.install2=または package.json ファイルに追加します: +npm.documentation=Npm レジストリの詳細については、 ドキュメント を参照してください。 +npm.dependencies=依存関係 +npm.dependencies.development=開発用依存関係 +npm.dependencies.peer=Peer依存関係 +npm.dependencies.optional=オプションの依存関係 +npm.details.tag=タグ +pypi.requires=必要なPython +pypi.install=pip を使用してパッケージをインストールするには、次のコマンドを実行します: +pypi.documentation=PyPI レジストリの詳細については、ドキュメント を参照してください。 +rubygems.install=gem を使用してパッケージをインストールするには、次のコマンドを実行します: +rubygems.install2=または Gemfile に追加します: +rubygems.dependencies.runtime=実行用依存関係 +rubygems.dependencies.development=開発用依存関係 +rubygems.required.ruby=必要なRubyバージョン +rubygems.required.rubygems=必要なRubyGemバージョン +rubygems.documentation=RubyGemsレジストリの詳細については、ドキュメント を参照してください。 +settings.link=このパッケージをリポジトリにリンク +settings.link.description=パッケージをリポジトリにリンクすると、リポジトリのパッケージリストに表示されるようになります。 +settings.link.select=リポジトリを選択 +settings.link.button=リポジトリのリンクを更新 +settings.link.success=リポジトリのリンクが正常に更新されました。 +settings.link.error=リポジトリのリンクの更新に失敗しました。 +settings.delete=パッケージ削除 +settings.delete.description=パッケージの削除は恒久的で元に戻すことはできません。 +settings.delete.notice=%s (%s) を削除しようとしています。この操作は元に戻せません。よろしいですか? +settings.delete.success=パッケージを削除しました。 +settings.delete.error=パッケージの削除に失敗しました。 + diff --git a/options/locale/locale_ko-KR.ini b/options/locale/locale_ko-KR.ini index b6c1334cca..36f616990d 100644 --- a/options/locale/locale_ko-KR.ini +++ b/options/locale/locale_ko-KR.ini @@ -74,6 +74,7 @@ loading=불러오는 중... + [error] [startpage] @@ -816,8 +817,6 @@ issues.due_date_remove=%s %s 마감일이 삭제되었습니다. issues.due_date_overdue=기한 초과 issues.due_date_invalid=기한이 올바르지 않거나 범위를 벗어났습니다. 'yyyy-mm-dd'형식을 사용해주십시오. issues.dependency.title=의존성 -issues.dependency.issue_no_dependencies=이 이슈는 어떠한 의존성도 가지지 않습니다. -issues.dependency.pr_no_dependencies=이 풀 리퀘스트는 어떠한 의존성도 가지지 않습니다. issues.dependency.add=의존성 추가... issues.dependency.cancel=취소 issues.dependency.remove=제거 @@ -865,7 +864,6 @@ pulls.no_merge_desc=모든 저장소 머지 옵션이 비활성화 되어있기 pulls.invalid_merge_option=이 풀 리퀘스트에서 설정한 머지 옵션을 사용하실 수 없습니다. ; %[2]s
%[3]s
- milestones.new=새로운 마일스톤 milestones.open_tab=%d개 열림 milestones.close_tab=%d개 닫힘 @@ -1338,6 +1336,7 @@ repos.size=크기 + auths.auth_manage_panel=인증 소스 관리 auths.new=인증 소스 추가 auths.name=이름 @@ -1586,3 +1585,5 @@ error.not_signed_commit=서명되지 않은 커밋입니다. error.no_unit_allowed_repo=이 저장소의 어떤 섹션에도 접근할 수 없습니다. error.unit_not_allowed=이 저장소 섹션에 접근할 수 없습니다. +[packages] + diff --git a/options/locale/locale_lv-LV.ini b/options/locale/locale_lv-LV.ini index 85d4487944..19c256b1e1 100644 --- a/options/locale/locale_lv-LV.ini +++ b/options/locale/locale_lv-LV.ini @@ -105,6 +105,7 @@ error404=Lapa, ko vēlaties atvērt, neeksistē vai arī GitHub vai ziņojiet par jaunu kļūdu, ja nepieciešams. @@ -644,7 +645,6 @@ ssh_invalid_token_signature=Norādītā SSH atslēga, paraksts un talons neatbil ssh_token_required=Jānorāda paraksts zemāk esošajam talonam ssh_token=Talons ssh_token_help=Parakstu ir iespējams uzģenerēt izmantojot komandu: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign -n gitea -f /ceļš/uz/atslēgu ssh_token_signature=Aizsargāts SSH paraksts key_signature_ssh_placeholder=Sākas ar '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success=SSH atslēga '%s' veiksmīgi pārbaudīta. @@ -1382,8 +1382,6 @@ issues.due_date_remove=noņēma izpildes termiņu %s %s issues.due_date_overdue=Nokavēts issues.due_date_invalid=Datums līdz nav korekts. Izmantojiet formātu 'gggg-mm-dd'. issues.dependency.title=Atkarības -issues.dependency.issue_no_dependencies=Šai problēmai pagaidām nav nevienas atkarības. -issues.dependency.pr_no_dependencies=Šim izmaiņu pieprasījumam pagaidām nav nevienas atkarības. issues.dependency.add=Pievienot atkarību… issues.dependency.cancel=Atcelt issues.dependency.remove=Noņemt @@ -1544,7 +1542,6 @@ pulls.outdated_with_base_branch=Atzars ir novecojis salīdzinot ar bāzes atzaru pulls.closed_at=`aizvēra šo izmaiņu pieprasījumu %[2]s` pulls.reopened_at=`atkārtoti atvēra šo izmaiņu pieprasījumu %[2]s` pulls.merge_instruction_hint=`Varat aplūkot arī komandrindas instrukcijas.` - pulls.merge_instruction_step1_desc=Projekta repozitorijā izveidojiet jaunu jaunu atzaru un pārbaudiet savas izmaiņas. pulls.merge_instruction_step2_desc=Sapludināt izmaiņas un atjaunot tās Gitea. @@ -2483,6 +2480,7 @@ repos.forks=Atdalītie repos.issues=Problēmas repos.size=Izmērs + defaulthooks=Noklusētie tīmekļa āķi defaulthooks.desc=Tīmekļa āķi ļauj paziņot ārējiem servisiem par noteiktiem notikumiem, kas notiek Gitea. Kad iestāsies kāds notikums, katram ārējā servisa URL tiks nosūtīts POST pieprasījums. Šeit izveidotie tīmekļa āķi tiks pievienoti visiem jaunajajiem repozitorijiem. Lai uzzinātu sīkāk skatieties tīmekļa āķu rokasgrāmatā. defaulthooks.add_webhook=Pievienot noklusēto tīmekļa āķi @@ -2908,3 +2906,5 @@ unit=Vienība error.no_unit_allowed_repo=Jums nav tiesību aplūkot nevienu šī repozitorija sadaļu. error.unit_not_allowed=Jums nav tiesību piekļūt šai repozitorija sadaļai. +[packages] + diff --git a/options/locale/locale_ml-IN.ini b/options/locale/locale_ml-IN.ini index c5392f1501..c5f7238de8 100644 --- a/options/locale/locale_ml-IN.ini +++ b/options/locale/locale_ml-IN.ini @@ -65,6 +65,7 @@ loading=ലഭ്യമാക്കുന്നു… + [error] [startpage] @@ -726,7 +727,6 @@ issues.dependency.add_error_dep_not_same_repo=രണ്ട് പ്രശ്ന ; %[2]s
%[3]s
- milestones.filter_sort.most_issues=മിക്ക ഇഷ്യൂകളും milestones.filter_sort.least_issues=കുറഞ്ഞ ഇഷ്യൂകളെങ്കിലും @@ -782,6 +782,7 @@ repos.issues=ഇഷ്യൂകള്‍ + [action] @@ -796,3 +797,5 @@ repos.issues=ഇഷ്യൂകള്‍ [units] +[packages] + diff --git a/options/locale/locale_nl-NL.ini b/options/locale/locale_nl-NL.ini index 88ded39de2..640c5364dd 100644 --- a/options/locale/locale_nl-NL.ini +++ b/options/locale/locale_nl-NL.ini @@ -90,6 +90,7 @@ error404=De pagina die u probeert te bereiken bestaat niet of < never=Nooit + [error] missing_csrf=Foutief verzoek: geen CSRF-token aanwezig @@ -1152,8 +1153,6 @@ issues.due_date_remove=heeft %[2]s de deadline %[1]s verwijderd issues.due_date_overdue=Over tijd issues.due_date_invalid=De deadline is ongeldig of buiten bereik. Gebruik het formaat 'jjjj-mm-dd'. issues.dependency.title=Afhankelijkheden -issues.dependency.issue_no_dependencies=Deze kwestie heeft momenteel geen afhankelijkheden. -issues.dependency.pr_no_dependencies=Deze pull-aanvraag heeft momenteel geen afhankelijkheden. issues.dependency.add=Voeg afhankelijkheid toe… issues.dependency.cancel=Annuleer issues.dependency.remove=Verwijder @@ -1280,7 +1279,6 @@ pulls.outdated_with_base_branch=Deze branch is verouderd met de basis branch pulls.closed_at=`heeft deze pull request gesloten %[2]s` pulls.reopened_at=`heropende deze pull request %[2]s` - milestones.new=Nieuwe mijlpaal milestones.open_tab=%d geopend milestones.close_tab=%d gesloten @@ -2100,6 +2098,7 @@ repos.issues=Kwesties repos.size=Grootte + systemhooks=Systeem webhooks systemhooks.add_webhook=Systeem Webhook toevoegen systemhooks.update_webhook=Systeem-webhook bijwerken @@ -2452,3 +2451,5 @@ error.probable_bad_default_signature=WAARSCHUWING! Hoewel de standaard sleutel d error.no_unit_allowed_repo=U heeft geen toegang tot een enkele sectie van deze repository. error.unit_not_allowed=U heeft geen toegang tot deze sectie van de repository. +[packages] + diff --git a/options/locale/locale_pl-PL.ini b/options/locale/locale_pl-PL.ini index 7f54c18c31..6967a919af 100644 --- a/options/locale/locale_pl-PL.ini +++ b/options/locale/locale_pl-PL.ini @@ -105,6 +105,7 @@ error404=Strona, do której próbujesz dotrzeć nie istnieje lu never=Nigdy + [error] occurred=Wystąpił błąd report_message=Jeśli jesteś pewien, że jest to błąd Gitea, poszukaj już istniejącego zgłoszenia na GitHub lub w razie potrzeby otwórz nowy problem. @@ -625,7 +626,6 @@ ssh_key_verify=Weryfikuj ssh_token_required=Musisz podać podpis poniższego tokenu ssh_token=Token ssh_token_help=Możesz wygenerować podpis używając: -ssh_token_code=echo -n "%s" | ssh-keygen -Y znak -n gitea -f /ścieżka_do_twojego_klucza_publicznego ssh_token_signature=Wzmocniony podpis SSH key_signature_ssh_placeholder=Zaczyna się od '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success=Klucz SSH '%s' został zweryfikowany. @@ -1312,8 +1312,6 @@ issues.due_date_remove=usuwa termin realizacji %s %s issues.due_date_overdue=Zaległe issues.due_date_invalid=Data realizacji jest niewłaściwa lub spoza zakresu. Użyj formatu 'yyyy-mm-dd'. issues.dependency.title=Zależności -issues.dependency.issue_no_dependencies=To zgłoszenie nie ma w tej chwili żadnych zależności. -issues.dependency.pr_no_dependencies=Ten Pull Request nie zawiera w tej chwili żadnych zależności. issues.dependency.add=Dodaj zależność… issues.dependency.cancel=Anuluj issues.dependency.remove=Usuń @@ -1451,7 +1449,6 @@ pulls.outdated_with_base_branch=Ta gałąź jest przestarzała w stosunku do ga pulls.closed_at=`zamknął(-ęła) ten pull request %[2]s` pulls.reopened_at=`otworzył(-a) ponownie ten Pull Request %[2]s` pulls.merge_instruction_hint=`Możesz także zobaczyć instrukcje wiersza poleceń.` - pulls.merge_instruction_step1_desc=Z repozytorium twojego projektu, sprawdź nową gałąź i przetestuj zmiany. pulls.merge_instruction_step2_desc=Połącz zmiany i zaktualizuj na Gitea. @@ -2333,6 +2330,7 @@ repos.forks=Forki repos.issues=Zgłoszenia repos.size=Rozmiar + defaulthooks=Domyślne Webhooki defaulthooks.desc=Webhooki automatycznie wysyłają zapytania HTTP POST na serwer, gdy niektóre zdarzenia Gitea je wyzwalają. Webhooki zdefiniowane tutaj są domyślne i zostaną skopiowane do wszystkich nowych repozytoriów. Przeczytaj więcej w przewodniku webhooków. defaulthooks.add_webhook=Dodaj domyślny Webhook @@ -2713,3 +2711,5 @@ error.probable_bad_default_signature=OSTRZEŻENIE! Pomimo, że domyślny klucz p error.no_unit_allowed_repo=Nie masz uprawnień do żadnej sekcji tego repozytorium. error.unit_not_allowed=Nie masz uprawnień do tej sekcji repozytorium. +[packages] + diff --git a/options/locale/locale_pt-BR.ini b/options/locale/locale_pt-BR.ini index 2ec5b3de41..607a915645 100644 --- a/options/locale/locale_pt-BR.ini +++ b/options/locale/locale_pt-BR.ini @@ -1,4 +1,4 @@ -home=Página inicial +home=Inicio dashboard=Painel explore=Explorar help=Ajuda @@ -34,6 +34,20 @@ twofa=Autenticação de dois fatores twofa_scratch=Código de backup da autenticação de dois fatores passcode=Senha +webauthn_insert_key=Insira sua chave de segurança +webauthn_sign_in=Pressione o botão na sua chave de segurança. Se a sua chave de segurança não tiver um botão, insira-a novamente. +webauthn_press_button=Por favor, pressione o botão na sua chave de segurança… +webauthn_use_twofa=Use um código de duas etapas do seu telefone +webauthn_error=Não foi possível ler sua chave de segurança. +webauthn_unsupported_browser=Seu navegador não oferece suporte ao WebAuthn. +webauthn_error_unknown=Ocorreu um erro desconhecido. Por favor, tente novamente. +webauthn_error_insecure=WebAuthn suporta apenas conexões seguras. Para testar via HTTP, você pode usar a origem "localhost" ou "127.0.0.1" +webauthn_error_unable_to_process=O servidor não pôde processar sua solicitação. +webauthn_error_duplicated=A chave de segurança não é permitida para esta solicitação. Por favor, certifique-se que a chave já não está registrada. +webauthn_error_empty=Você deve definir um nome para esta chave. +webauthn_error_timeout=Tempo limite atingido antes de sua chave poder ser lida. Por favor, recarregue esta página e tente novamente. +webauthn_u2f_deprecated=A chave: '%s' autentica utilizando o processo U2F descontinuado. Você deve registrar novamente esta chave e remover o registro antigo. +webauthn_reload=Recarregar repository=Repositório organization=Organização @@ -91,8 +105,14 @@ error404=A página que você está tentando acessar não existe never=Nunca + [error] +occurred=Ocorreu um erro +report_message=Se você tem certeza de que é um bug do Gitea, procure por issues no GitHub ou abra uma nova issue, se necessário. missing_csrf=Pedido inválido: não tem token CSRF presente +invalid_csrf=Requisição Inválida: token CSRF inválido +not_found=Não foi possível encontrar o destino. +network_error=Erro de rede [startpage] app_desc=Um serviço de hospedagem Git amigável @@ -109,6 +129,7 @@ license_desc=Está tudo no documentação cuidadosamente antes de alterar qualquer coisa nesta página. +require_db_desc=Gitea requer MySQL, PostgreSQL, MSSQL, SQLite3 ou TiDB (protocolo MySQL). db_title=Configurações de banco de dados db_type=Tipo de banco de dados host=Servidor @@ -122,6 +143,11 @@ ssl_mode=SSL charset=Charset path=Caminho sqlite_helper=Caminho do arquivo do banco de dados SQLite3.
Informe um caminho absoluto se você executar o Gitea como um serviço. +reinstall_error=Você está tentando instalar em um banco de dados existente do Gitea +reinstall_confirm_message=Reinstalar com um banco de dados Gitea existente pode causar vários problemas. Na maioria dos casos, você deve usar seu "app.ini" existente para executar o Gitea. Se você sabe o que está fazendo, confirme o seguinte: +reinstall_confirm_check_1=Os dados criptografados pelo SECRET_KEY no app.ini poderão ser perdidos: os usuários podem não conseguir fazer login com 2FA/OTP & espelhos podem não funcionar corretamente. Ao marcar esta caixa você confirma que o atual arquivo app.ini contém o SECRET_KEY correto. +reinstall_confirm_check_2=Os repositórios e configurações podem precisar ser re-sincronizados. Marcando esta caixa você confirma que irá sincronizar novamente os hooks para os repositórios e o arquivo authorized_keys manualmente. Você confirma que irá garantir que as configurações de repositório e espelhamento estão corretas. +reinstall_confirm_check_3=Você confirma que este Gitea está realmente executando com a localização correta do app.ini e que você tem certeza de que precisa reinstalar. Você confirma que tomou conhecimento dos riscos acima descritos. err_empty_db_path=O caminho do banco de dados SQLite3 não pode ser em branco. no_admin_and_disable_registration=Você não pode desabilitar o auto-cadastro do usuário sem criar uma conta de administrador. err_empty_admin_password=A senha do administrador não pode ser em branco. @@ -187,8 +213,12 @@ install_btn_confirm=Instalar Gitea test_git_failed=Falha ao testar o comando 'git': %v sqlite3_not_available=Esta versão do Gitea não suporta SQLite3. Por favor faça o download da versão binária oficial em %s (não utilize a versão 'gobuild'). invalid_db_setting=Configuração de banco de dados está inválida: %v +invalid_db_table=A tabela '%s' do banco de dados é inválida: %v invalid_repo_path=A raiz do repositório está inválida: %v +invalid_app_data_path=O caminho dos dados do aplicativo é inválido: %v run_user_not_match=O nome de usuário 'Executar como' não é o nome de usuário atual: %s -> %s +internal_token_failed=Falha ao gerar o token interno: %v +secret_key_failed=Falha ao gerar a chave secreta: %v save_config_failed=Falha ao salvar a configuração: %v invalid_admin_setting=Configuração da conta de administrador está inválida: %v install_success=Bem-vindo! Obrigado por escolher Gitea. Divertir-se. E, tome cuidado! @@ -197,8 +227,8 @@ default_keep_email_private=Ocultar endereços de e-mail por padrão default_keep_email_private_popup=Ocultar endereços de e-mail de novas contas de usuário por padrão. default_allow_create_organization=Permitir a criação de organizações por padrão default_allow_create_organization_popup=Permitir que novas contas de usuários criem organizações por padrão. -default_enable_timetracking=Habilitar o contador de tempo por padrão -default_enable_timetracking_popup=Habilitar o contador de tempo para novos repositórios por padrão. +default_enable_timetracking=Habilitar o Cronômetro por Padrão +default_enable_timetracking_popup=Habilitar o cronômetro para novos repositórios por padrão. no_reply_address=Domínio de e-mail oculto no_reply_address_helper=Nome de domínio para usuários com um endereço de e-mail oculto. Por exemplo, o nome de usuário 'joe' será registrado no Git como 'joe@noreply.example.org' se o domínio de e-mail oculto estiver definido como 'noreply.example.org'. password_algorithm=Algoritmo Hash de Senha @@ -217,6 +247,7 @@ view_home=Ver %s search_repos=Encontre um repositório… filter=Outros filtros filter_by_team_repositories=Filtrar por repositórios da equipe +feed_of=Feed de "%s" show_archived=Arquivado show_both_archived_unarchived=Mostrando arquivados e não arquivados @@ -237,6 +268,8 @@ organizations=Organizações search=Pesquisar code=Código search.fuzzy=Similar +search.match=Correspondência +code_search_unavailable=A pesquisa por código não está disponível no momento. Entre em contato com o administrador do site. repo_no_results=Nenhum repositório correspondente foi encontrado. user_no_results=Nenhum usuário correspondente foi encontrado. org_no_results=Nenhuma organização correspondente foi encontrada. @@ -250,6 +283,7 @@ register_helper_msg=Já tem uma conta? Acesse agora! social_register_helper_msg=Já tem uma conta? Vincule agora! disable_register_prompt=Cadastro está desabilitado. Entre em contato com o administrador do site. disable_register_mail=E-mail de confirmação de cadastro está desabilitado. +manual_activation_only=Entre em contato com o administrador do site para concluir a ativação. remember_me=Lembrar deste Dispositivo forgot_password_title=Esqueci minha senha forgot_password=Esqueceu sua senha? @@ -288,12 +322,17 @@ oauth_signup_submit=Completar conta oauth_signin_tab=Vincular à uma conta existente oauth_signin_title=Acesse com uma conta vinculada oauth_signin_submit=Vincular conta +oauth.signin.error=Ocorreu um erro durante o processamento do pedido de autorização. Se este erro persistir, contate o administrador. +oauth.signin.error.access_denied=O pedido de autorização foi negado. +oauth.signin.error.temporarily_unavailable=A autorização falhou porque o servidor de autenticação está temporariamente indisponível. Por favor, tente novamente mais tarde. openid_connect_submit=Conectar openid_connect_title=Conectar à uma conta existente openid_connect_desc=O URI do OpenID escolhido é desconhecido. Associe-o com uma nova conta aqui. openid_register_title=Criar uma nova conta openid_register_desc=O URI do OpenID escolhido é desconhecido. Associe-o com uma nova conta aqui. openid_signin_desc=Digite a URI do seu OpenID. Por exemplo: https://anne.me, bob.openid.org.cn ou gnusocial.net/carry. +disable_forgot_password_mail=A recuperação de conta está desativada porque nenhum e-mail está configurado. Por favor, contate o administrador do site. +disable_forgot_password_mail_admin=A recuperação de conta só está disponível quando o e-mail está configurado. Por favor, configure o e-mail para permitir a recuperação de conta. email_domain_blacklisted=Você não pode se cadastrar com seu endereço de e-mail. authorize_application=Autorizar aplicativo authorize_redirect_notice=Você será redirecionado para %s se você autorizar este aplicativo. @@ -307,27 +346,64 @@ password_pwned=A senha escolhida está em uma lista de defina sua senha primeiro. reset_password=Recuperar sua conta +reset_password.title=%s, você pediu para recuperar a sua conta +reset_password.text=Por favor clique no link a seguir para recuperar sua conta em %s: register_success=Cadastro bem-sucedido +issue_assigned.pull=@%[1]atribuiu a você o pull request %[2]s no repositório %[3]s. +issue_assigned.issue=@%[1]s atribuiu a você a issue %[2]s no repositório %[3]s. +issue.x_mentioned_you=@%s mencionou você: +issue.action.force_push=%[1]s forçou o push de %[2]s de %[3]s para %[4]s. +issue.action.push_1=@%[1]s fez o push de %[3]d commit para %[2]s +issue.action.push_n=@%[1]s fez o push de %[3]d commits para %[2]s +issue.action.close=@%[1]s fechou #%[2]d. +issue.action.reopen=@%[1]s reabriu #%[2]d. +issue.action.merge=@%[1]s aplicou o merge #%[2]d em %[3]s. +issue.action.approve=@%[1]s aprovou este pull request. +issue.action.reject=@%[1]s solicitou alterações neste pull request. +issue.action.review=@%[1]s fez um comentário neste pull request. +issue.action.review_dismissed=@%[1]s descartou a última revisão de %[2]s para este pull request. +issue.action.ready_for_review=@%[1]s marcou este pull request como pronto para revisão. +issue.action.new=@%[1]s criou #%[2]d. +issue.in_tree_path=Em %s: release.new.subject=%s em %s lançado release.new.text=@%[1]s lançou a versão %[2]s em %[3]s +release.title=Título: %s +release.note=Nota: +release.downloads=Downloads: +release.download.zip=Código fonte (ZIP) +release.download.targz=Código fonte (TAR.GZ) repo.transfer.subject_to=%s gostaria de transferir "%s" para %s repo.transfer.subject_to_you=%s gostaria de transferir "%s" para você repo.transfer.to_you=você +repo.transfer.body=Para o aceitar ou rejeitar visite %s, ou simplesmente o ignore. repo.collaborator.added.subject=%s adicionou você a %s +repo.collaborator.added.text=Você foi adicionado como um colaborador do repositório: [modal] yes=Sim @@ -368,6 +444,7 @@ email_error=` não é um endereço de e-mail válido.` url_error=`não é uma URL válida.` include_error=` deve conter '%s'.` glob_pattern_error=` padrão glob é inválido: %s.` +regex_pattern_error=` o regex é inválido: %s.` unknown_error=Erro desconhecido: captcha_incorrect=O código CAPTCHA está incorreto. password_not_match=As senhas não coincidem. @@ -376,6 +453,7 @@ lang_select_error=Selecione um idioma da lista. username_been_taken=O nome de usuário já está sendo usado. username_change_not_local_user=Usuários não-locais não são autorizados a alterar nome de usuário. repo_name_been_taken=O nome de repositório já está sendo usado. +repository_force_private=Forçar Privado está ativado: repositórios privados não podem ser tornados públicos. repository_files_already_exist=Arquivos já existem neste repositório. Contate o administrador. repository_files_already_exist.adopt=Arquivos já existem neste repositório e só podem ser adotados. repository_files_already_exist.delete=Arquivos já existem neste repositório. Você deve deletá-los. @@ -411,7 +489,9 @@ auth_failed=Autenticação falhou: %v still_own_repo=Sua conta possui um ou mais repositórios; você deve excluí-los ou transferi-los primeiro. still_has_org=Sua conta é um membro de uma ou mais organizações; você deve deixá-las primeiro. +still_own_packages=Sua conta possui um ou mais pacotes; você deve excluí-los primeiro. org_still_own_repo=Esta organização ainda possui repositórios; você deve excluí-los ou transferi-los primeiro. +org_still_own_packages=Esta organização ainda possui pacotes; você deve excluí-los primeiro. target_branch_not_exist=O branch de destino não existe. @@ -452,6 +532,7 @@ twofa=Autenticação de dois fatores account_link=Contas vinculadas organization=Organizações uid=Uid +webauthn=Chaves de segurança public_profile=Perfil público biography_placeholder=Nos conte um pouco sobre você @@ -473,6 +554,22 @@ continue=Continuar cancel=Cancelar language=Idioma ui=Tema +hidden_comment_types=Tipos de comentários ocultos +comment_type_group_reference=Referência +comment_type_group_label=Rótulo +comment_type_group_milestone=Marco +comment_type_group_assignee=Atribuído +comment_type_group_title=Título +comment_type_group_branch=Branch +comment_type_group_time_tracking=Contador de tempo +comment_type_group_deadline=Prazo final +comment_type_group_dependency=Dependência +comment_type_group_lock=Status de Bloqueio +comment_type_group_review_request=Revisar solicitação +comment_type_group_pull_request_push=Commits adicionados +comment_type_group_project=Projeto +comment_type_group_issue_ref=Referência do issue +saved_successfully=Suas configurações foram salvas com sucesso. privacy=Privacidade keep_activity_private=Ocultar a atividade da página de perfil keep_activity_private_popup=Torna a atividade visível somente para você e os administradores @@ -505,7 +602,7 @@ theme_desc=Este será o seu tema padrão em todo o site. primary=Principal activated=Ativado requires_activation=Requer ativação -primary_email=Tornar privado +primary_email=Tornar Principal activate_email=Enviar Ativação activations_pending=Ativações pendentes delete_email=Remover @@ -547,12 +644,30 @@ ssh_key_been_used=Esta chave SSH já foi adicionada ao servidor. ssh_key_name_used=Uma chave SSH com o mesmo nome já existe em sua conta. ssh_principal_been_used=Este nome principal já foi adicionada ao servidor. gpg_key_id_used=Uma chave GPG pública com a mesma ID já existe. +gpg_no_key_email_found=Esta chave GPG não corresponde a nenhum endereço de e-mail ativado associado à sua conta. Ela ainda pode ser adicionada se você assinar o token fornecido. +gpg_key_matched_identities=Identidades correspondentes: +gpg_key_matched_identities_long=As identidades incorporadas nesta chave coincidem com os seguintes endereços de email ativados para este usuário. Os commits correspondentes a estes endereços de e-mail podem ser verificados com esta chave. +gpg_key_verified=Chave validada +gpg_key_verified_long=A chave foi validada com um token e pode ser usada para verificar commits correspondentes a qualquer endereço de e-mail ativado para esse usuário, além de quaisquer identidades correspondentes para essa chave. +gpg_key_verify=Validar gpg_invalid_token_signature=A chave GPG fornecida, a assinatura ou o token não correspondem ou o token está desatualizado. gpg_token_required=Você tem que fornecer uma assinatura para o token abaixo gpg_token=Token gpg_token_help=Você pode gerar uma assinatura usando: gpg_token_code=echo "%s" | gpg -a --default-key %s --detach-sig +gpg_token_signature=Assinatura GPG blindada key_signature_gpg_placeholder=Começa com '-----BEGIN PGP SIGNATURE-----' +verify_gpg_key_success=A chave GPG '%s' foi validada. +ssh_key_verified=Chave validada +ssh_key_verified_long=A chave foi validada com um token e pode ser usada para validar commits que correspondam a qualquer dos endereços de e-mail ativados deste usuário. +ssh_key_verify=Validar +ssh_invalid_token_signature=A chave, assinatura ou token SSH fornecidos não coincidem, ou então o token expirou. +ssh_token_required=Você tem que fornecer uma assinatura para o token abaixo +ssh_token=Token +ssh_token_help=Você pode gerar uma assinatura usando: +ssh_token_signature=Assinatura SSH blindada +key_signature_ssh_placeholder=Começa com '-----BEGIN SSH SIGNATURE-----' +verify_ssh_key_success=A chave SSH '%s' foi validada. subkeys=Subchaves key_id=ID da chave key_name=Nome da Chave @@ -653,6 +768,11 @@ passcode_invalid=Esse código de acesso é inválido. Tente novamente. twofa_enrolled=Sua conta foi inscrita na autenticação de dois fatores. Armazene seu token de backup (%s) em um local seguro, pois ele é exibido apenas uma vez! twofa_failed_get_secret=Falha ao obter o segredo. +webauthn_desc=Chaves de segurança são dispositivos de hardware que contém chaves de criptografia. Elas podem ser usadas para autenticação de dois fatores. A chave de segurança deve suportar o padrão WebAuthnn Authenticator. +webauthn_register_key=Adicionar chave de segurança +webauthn_nickname=Apelido +webauthn_delete_key=Remover chave de segurança +webauthn_delete_key_desc=Se você remover uma chave de segurança, não poderá mais entrar com ela. Continuar? manage_account_links=Gerenciar contas vinculadas manage_account_links_desc=Estas contas externas estão vinculadas a sua conta de Gitea. @@ -703,6 +823,8 @@ visibility_fork_helper=(Esta alteração irá afetar todos os forks.) clone_helper=Precisa de ajuda com o clone? Visite a Ajuda. fork_repo=Fork do repositório fork_from=Fork de +already_forked=Você já fez o fork de %s +fork_to_different_account=Faça um fork para uma conta diferente fork_visibility_helper=A visibilidade do fork de um repositório não pode ser alterada. use_template=Usar este modelo clone_in_vsc=Clonar no VS Code @@ -728,9 +850,10 @@ auto_init=Inicializar o repositório (adicionando .gitignore, licença e LEIA-ME trust_model_helper=Selecione o modelo de confiança para verificação de assinatura. As opções possíveis são: trust_model_helper_collaborator=Colaborador: Confiar em assinaturas de colaboradores trust_model_helper_committer=Committer: Confiar em assinaturas que correspondem aos committers +trust_model_helper_collaborator_committer=Colaborador+Committer: Confiar em assinaturas dos colaboradores que correspondem ao committer trust_model_helper_default=Padrão: Usar o modelo de confiança padrão para esta instalação create_repo=Criar repositório -default_branch=Branch padrão +default_branch=Branch Padrão default_branch_helper=O branch padrão é o branch base para pull requests e commits de código. mirror_prune=Varrer mirror_prune_desc=Remover referências obsoletas de controle remoto @@ -784,6 +907,7 @@ desc.archived=Arquivado template.items=Itens do modelo template.git_content=Conteúdo Git (Branch padrão) template.git_hooks=Hooks do Git +template.git_hooks_tooltip=Atualmente você não pode modificar ou remover os Git Hooks adicionados. Selecione isso apenas se você confia no repositório modelo. template.webhooks=Webhooks template.topics=Tópicos template.avatar=Avatar @@ -821,6 +945,7 @@ migrate_items_releases=Versões migrate_repo=Migrar repositório migrate.clone_address=Migrar / Clonar de URL migrate.clone_address_desc=URL HTTP (S) ou Git 'clone' de um repositório existente +migrate.github_token_desc=Você pode colocar aqui um ou mais tokens separados por vírgulas para tornar a migração mais rápida para compensar o limite de taxa de API do GitHub. AVISO: abusar desse recurso pode violar a política do provedor de serviços e levar ao bloqueio da conta. migrate.clone_local_path=ou um caminho de servidor local migrate.permission_denied=Você não pode importar repositórios locais. migrate.permission_denied_blocked=Você não pode importar dos hosts não permitidos, por favor peça ao administrador para verificar as configurações ALLOWED_DOMAINS/ALLOW_LOCALNETWORKS/BLOCKED_DOMAINS. @@ -835,11 +960,13 @@ migrate.migrating=Migrando a partir de %s ... migrate.migrating_failed=Migração a partir de %s falhou. migrate.migrating_failed.error=Erro: %s migrate.migrating_failed_no_addr=A migração falhou. +migrate.github.description=Migrar dados de github.com ou de outras instâncias do GitHub. migrate.git.description=Migrar um repositório somente de qualquer serviço Git. migrate.gitlab.description=Migrar dados de gitlab.com ou de outras instâncias do GitLab. migrate.gitea.description=Migrar dados de gitea.com ou de outras instâncias do Gitea. migrate.gogs.description=Migrar dados de notabug.org ou de outras instâncias do Gogs. migrate.onedev.description=Migrar dados de code.onedev.io ou de outras instâncias do OneDev. +migrate.codebase.description=Migrar dados de codebasehq.com. migrate.gitbucket.description=Migrar dados de instâncias do GitBucket. migrate.migrating_git=Migrando dados Git migrate.migrating_topics=Migrando tópicos @@ -869,6 +996,7 @@ clone_this_repo=Clonar este repositório create_new_repo_command=Criando um novo repositório por linha de comando push_exist_repo=Realizando push para um repositório existente por linha de comando empty_message=Este repositório está vazio. +broken_message=Os dados Git subjacentes a este repositório não podem ser lidos. Entre em contato com o administrador desta instância ou exclua este repositório. code=Código code.desc=Acesso a código-fonte, arquivos, commits e branches. @@ -883,6 +1011,7 @@ issues=Issues pulls=Pull requests project_board=Projetos labels=Etiquetas +org_labels_desc=Rótulos de nível de organização que podem ser usados em todos os repositórios sob esta organização org_labels_desc_manage=gerenciar milestones=Marcos @@ -890,6 +1019,7 @@ commits=Commits commit=Commit release=Versão releases=Versões +tag=Tag released_this=lançou isto file.title=%s em %s file_raw=Original @@ -899,7 +1029,18 @@ file_view_rendered=Ver Renderizado file_view_raw=Ver original file_permalink=Link permanente file_too_large=O arquivo é muito grande para ser mostrado. +bidi_bad_header=`Este arquivo contém caracteres Unicode Bidirecionais inesperados!` +bidi_bad_description=`Este arquivo contém caracteres Unicode bidirecionais inesperados que podem ser processados de forma diferente do que aparece abaixo. Se seu caso de uso for intencional e legítimo, você pode ignorar com segurança esse aviso. Use o botão Escapar para revelar caracteres ocultos.` +bidi_bad_description_escaped=`Este arquivo contém caracteres Unicode Bidirecionais inesperados. Caracteres unicode ocultos estão escapados abaixo. Use o botão Desescapar para mostrar como eles são mostrados.` +unicode_header=`Este arquivo contém caracteres Unicode ocultos!` +unicode_description=`Este arquivo contém caracteres Unicode ocultos que podem ser processados de forma diferente do que aparece abaixo. Se seu caso de uso for intencional e legítimo, você pode ignorar com segurança esse aviso. Use o botão Escapar para revelar caracteres ocultos.` +unicode_description_escaped=`Este arquivo contém caracteres Unicode ocultos. Caracteres unicode ocultos estão escapados abaixo. Utilize o botão Desescapar para mostrar como eles são mostrados.` +line_unicode=`Esta linha possui caracteres unicode ocultos` +escape_control_characters=Escapar +unescape_control_characters=Desescapar +file_copy_permalink=Copiar Link Permanente +view_git_blame=Ver Git Blame video_not_supported_in_browser=Seu navegador não suporta a tag 'video' do HTML5. audio_not_supported_in_browser=Seu navegador não suporta a tag 'audio' do HTML5. stored_lfs=Armazenado com Git LFS @@ -938,6 +1079,10 @@ editor.add_tmpl=Adicionar '' editor.add=Adicionar '%s' editor.update=Atualizar '%s' editor.delete=Excluir '%s' +editor.patch=Aplicar Correção +editor.patching=Corrigindo: +editor.fail_to_apply_patch=Não foi possível aplicar a correção '%s' +editor.new_patch=Nova correção editor.commit_message_desc=Adicione uma descrição detalhada (opcional)... editor.signoff_desc=Adicione um assinado-por-committer no final do log do commit. editor.commit_directly_to_this_branch=Commit diretamente no branch %s. @@ -962,6 +1107,8 @@ editor.commit_empty_file_text=O arquivo que você está prestes fazer commit est editor.no_changes_to_show=Nenhuma alteração a mostrar. editor.fail_to_update_file=Falha ao atualizar/criar arquivo '%s'. editor.fail_to_update_file_summary=Mensagem de erro: +editor.push_rejected_no_message=A alteração foi rejeitada pelo servidor sem uma mensagem. Por favor, verifique os Hooks Git. +editor.push_rejected=A alteração foi rejeitada pelo servidor. Por favor, verifique os Hooks Git. editor.push_rejected_summary=Mensagem completa de rejeição: editor.add_subdir=Adicionar um subdiretório... editor.unable_to_upload_files=Houve erro ao fazer upload de arquivos para '%s': %v @@ -971,10 +1118,13 @@ editor.cannot_commit_to_protected_branch=Branch '%s' está protegido para commit editor.no_commit_to_branch=Não foi possível fazer commit diretamente no branch porque: editor.user_no_push_to_branch=O usuário não pode fazer push no branch editor.require_signed_commit=Branch requer um commit assinado +editor.cherry_pick=Cherry-pick %s para: +editor.revert=Reverter %s para: commits.desc=Veja o histórico de alterações do código de fonte. commits.commits=Commits commits.no_commits=Nenhum commit em comum. '%s' e '%s' tem histórias completamente diferentes. +commits.nothing_to_compare=Estes branches são iguais. commits.search=Pesquisar commits... commits.search.tooltip=Você pode prefixar palavras-chave com "author:", "committer:", "after:", ou "before:", por exemplo: "revert author:Alice before:2019-04-01". commits.find=Pesquisar @@ -984,12 +1134,21 @@ commits.message=Mensagem commits.date=Data commits.older=Mais Antigo commits.newer=Mais recente -commits.signed_by=Acessado por +commits.signed_by=Assinado por commits.signed_by_untrusted_user=Assinado por usuário não confiável commits.signed_by_untrusted_user_unmatched=Assinado por usuário não confiável que não corresponde ao autor da submissão commits.gpg_key_id=ID da chave GPG +commits.ssh_key_fingerprint=Impressão Digital da Chave SSH +commit.actions=Ações +commit.revert=Reverter +commit.revert-header=Reverter: %s +commit.revert-content=Selecione a branch para reverter para: +commit.cherry-pick=Cherry-pick +commit.cherry-pick-header=Cherry-pick: %s +commit.cherry-pick-content=Selecione o branch para receber o cherry-pick: +ext_issues=Acesso a Issues Externos ext_issues.desc=Link para o issue tracker externo. projects=Projetos @@ -1026,11 +1185,13 @@ projects.board.deletion_desc=Excluir um quadro de projeto move todas as issues r projects.board.color=Cor projects.open=Abrir projects.close=Fechar +projects.board.assigned_to=Atribuído a issues.desc=Organize relatórios de bugs, tarefas e marcos. issues.filter_assignees=Filtrar Atribuição issues.filter_milestones=Filtrar Marco issues.filter_projects=Filtrar Projeto +issues.filter_labels=Filtrar Rótulo issues.filter_reviewers=Filtrar Revisor issues.new=Nova issue issues.new.title_empty=Título não pode ser em branco @@ -1071,6 +1232,11 @@ issues.label_templates.info=Ainda não existem etiquetas. Crie uma etiqueta em ' issues.label_templates.helper=Selecione um conjunto de etiquetas issues.label_templates.use=Use o conjunto de etiquetas issues.label_templates.fail_to_load_file=Houve erro ao carregar arquivo de template '%s': %v +issues.add_label=adicionou o rótulo %s %s +issues.add_labels=adicionou os rótulos %s %s +issues.remove_label=removeu o rótulo %s %s +issues.remove_labels=removeu os rótulos %s %s +issues.add_remove_labels=adicionou o(s) rótulo(s) %s e removeu %s %s issues.add_milestone_at=`adicionou esta issue para o marco %s %s` issues.add_project_at=`adicionado ao projeto %s %s` issues.change_milestone_at=`modificou o marco de %s para %s %s` @@ -1116,6 +1282,7 @@ issues.filter_sort.moststars=Mais estrelas issues.filter_sort.feweststars=Menos estrelas issues.filter_sort.mostforks=Mais forks issues.filter_sort.fewestforks=Menos forks +issues.keyword_search_unavailable=A pesquisa por palavra-chave não está disponível no momento. Entre em contato com o administrador do site. issues.action_open=Abrir issues.action_close=Fechar issues.action_label=Etiqueta @@ -1124,6 +1291,11 @@ issues.action_milestone_no_select=Sem marco issues.action_assignee=Responsável issues.action_assignee_no_select=Sem responsável issues.opened_by=aberto por %[3]s %[1]s +pulls.merged_by=por %[3]s foi aplicado em %[1]s +pulls.merged_by_fake=por %[2]s foi aplicado %[1]s +issues.closed_by=por %[3]s foi fechada %[1]s +issues.opened_by_fake=%[1]s abertas por %[2]s +issues.closed_by_fake=por %[2]s foi fechada %[1]s issues.previous=Anterior issues.next=Próximo issues.open_title=Aberto @@ -1133,14 +1305,19 @@ issues.commented_at=`comentou %s` issues.delete_comment_confirm=Tem certeza que deseja excluir este comentário? issues.context.copy_link=Copiar link issues.context.quote_reply=Citar resposta +issues.context.reference_issue=Referência em uma nova issue issues.context.edit=Editar issues.context.delete=Excluir issues.no_content=Ainda não há conteúdo. issues.close_issue=Fechar +issues.pull_merged_at=`aplicou o merge do commit %[2]s em %[3]s %[4]s` +issues.manually_pull_merged_at=`aplicou o merge do commit %[2]s em %[3]s manualmente %[4]s` issues.close_comment_issue=Comentar e fechar issues.reopen_issue=Reabrir issues.reopen_comment_issue=Comentar e reabrir issues.create_comment=Comentar +issues.closed_at=`fechou esta issue %[2]s` +issues.reopened_at=`reabriu esta issue %[2]s` issues.commit_ref_at=`citou esta issue em um commit %[2]s` issues.ref_issue_from=`referenciado esta issue %[4]s %[2]s` issues.ref_pull_from=`referenciado este pull request %[4]s %[2]s` @@ -1152,7 +1329,12 @@ issues.ref_from=`de %[1]s` issues.poster=Autor issues.collaborator=Colaborador issues.owner=Proprietário +issues.re_request_review=Re-solicitar revisão +issues.is_stale=Houve alterações nessa PR desde essa revisão +issues.remove_request_review=Remover solicitação de revisão issues.remove_request_review_block=Não é possível remover a solicitação de revisão +issues.dismiss_review=Descartar revisão +issues.dismiss_review_warning=Tem certeza de que deseja descartar esta revisão? issues.sign_in_require_desc=Acesse para participar desta conversação. issues.edit=Editar issues.cancel=Cancelar @@ -1196,13 +1378,21 @@ issues.lock.reason=Motivo do bloqueio issues.lock.title=Conversação bloqueada para esta issue. issues.unlock.title=Conversação desbloqueada para esta issue. issues.comment_on_locked=Você não pode comentar em uma issue bloqueada. +issues.delete=Apagar +issues.delete.title=Apagar esta issue? +issues.delete.text=Você realmente deseja excluir esta issue? (Isto irá remover permanentemente todo o conteúdo. Considere fechá-la em vez disso, se você pretende mantê-la arquivado) issues.tracker=Contador de tempo -issues.start_tracking=Iniciar contador de tempo +issues.start_tracking_short=Iniciar Cronômetro +issues.start_tracking=Iniciar Cronômetro issues.start_tracking_history=`começou a trabalhar %s` issues.tracker_auto_close=Contador de tempo será parado automaticamente quando esta issue for fechada +issues.tracking_already_started=`Você já iniciou o cronômetro em outra issue!` +issues.stop_tracking=Parar Cronômetro issues.stop_tracking_history=`parou de trabalhar %s` -issues.cancel_tracking_history=`cancelou contador de tempo %s` +issues.cancel_tracking=Descartar +issues.cancel_tracking_history=`cancelou o cronômetro %s` issues.add_time=Adicionar tempo manualmente +issues.del_time=Apagar este registro de tempo issues.add_time_short=Adicionar tempo issues.add_time_cancel=Cancelar issues.add_time_history=`adicionou tempo gasto %s` @@ -1216,6 +1406,9 @@ issues.due_date=Data limite issues.invalid_due_date_format=Formato da data limite inválido, deve ser 'dd/mm/aaaa'. issues.error_modifying_due_date=Falha ao modificar a data limite. issues.error_removing_due_date=Falha ao remover a data limite. +issues.push_commit_1=adicionou %d commit %s +issues.push_commits_n=adicionou %d commits %s +issues.force_push_codes=`forçou o push %[1]s de %[2]s para %[4]s %[6]s` issues.due_date_form=dd/mm/aaaa issues.due_date_form_add=Adicionar data limite issues.due_date_form_edit=Editar @@ -1228,12 +1421,16 @@ issues.due_date_remove=removeu a data limite %s %s issues.due_date_overdue=Em atraso issues.due_date_invalid=A data limite é inválida ou está fora do intervalo. Por favor, use o formato 'dd/mm/aaaa'. issues.dependency.title=Dependências -issues.dependency.issue_no_dependencies=Esta issue atualmente não tem dependências. -issues.dependency.pr_no_dependencies=Atualmente este pull request não tem dependências. +issues.dependency.issue_no_dependencies=Nenhuma dependência definida. +issues.dependency.pr_no_dependencies=Nenhuma dependência definida. issues.dependency.add=Adicione… issues.dependency.cancel=Cancelar issues.dependency.remove=Remover issues.dependency.remove_info=Remover esta dependência +issues.dependency.added_dependency=`adicionou uma nova dependência %s` +issues.dependency.removed_dependency=`removeu uma dependência %s` +issues.dependency.pr_closing_blockedby=Fechamento deste pull request está bloqueado pelas seguintes issues +issues.dependency.issue_closing_blockedby=Fechamento desta issue está bloqueado pelas seguintes issues issues.dependency.issue_close_blocks=Esta issue bloqueia o fechamento das seguintes issues issues.dependency.pr_close_blocks=Este pull request bloqueia o fechamento das seguintes issues issues.dependency.issue_close_blocked=Você precisa fechar todas as issues que bloqueiam esta issue antes de poder fechá-la. @@ -1254,28 +1451,45 @@ issues.review.self.approval=Você não pode aprovar o seu próprio pull request. issues.review.self.rejection=Você não pode solicitar alterações em seu próprio pull request. issues.review.approve=aprovou estas alterações %s issues.review.comment=revisou %s +issues.review.dismissed=rejeitou a revisão de %s %s +issues.review.dismissed_label=Rejeitada +issues.review.left_comment=deixou um comentário issues.review.content.empty=Você precisa deixar um comentário indicando as alterações solicitadas. issues.review.reject=alterações solicitadas %s +issues.review.wait=foi solicitada para revisão %s +issues.review.add_review_request=solicitou revisão de %s %s +issues.review.remove_review_request=removeu a solicitação de revisão para %s %s +issues.review.remove_review_request_self=recusou revisar %s issues.review.pending=Pendente issues.review.review=Revisão issues.review.reviewers=Revisores +issues.review.outdated=Desatualizado issues.review.show_outdated=Mostrar desatualizado issues.review.hide_outdated=Ocultar desatualizado +issues.review.show_resolved=Mostrar resolvidas +issues.review.hide_resolved=Ocultar resolvidas +issues.review.resolve_conversation=Resolver conversa +issues.review.un_resolve_conversation=Conversa não resolvida +issues.review.resolved_by=marcou esta conversa como resolvida issues.assignee.error=Nem todos os responsáveis foram adicionados devido a um erro inesperado. +issues.reference_issue.body=Conteúdo issues.content_history.deleted=excluído issues.content_history.edited=editado issues.content_history.created=criado issues.content_history.delete_from_history=Excluir do histórico issues.content_history.delete_from_history_confirm=Excluir do histórico? issues.content_history.options=Opções +issues.reference_link=Referência: %s compare.compare_base=base compare.compare_head=comparar pulls.desc=Habilitar pull requests e revisões de código. pulls.new=Novo pull request +pulls.view=Ver Pull Request pulls.compare_changes=Novo pull request -pulls.compare_changes_desc=Selecione a branch de destino (push) e a branch de origem (pull) para o merge. +pulls.compare_changes_desc=Selecione o branch de destino (push) e o branch de origem (pull) para o merge. +pulls.has_viewed_file=Visto pulls.compare_base=merge em pulls.compare_compare=pull de pulls.switch_comparison_type=Mudar tipo de comparação @@ -1283,6 +1497,7 @@ pulls.switch_head_and_base=Trocar cabeça e base pulls.filter_branch=Filtrar branch pulls.no_results=Nada encontrado. pulls.nothing_to_compare=Estes branches são iguais. Não há nenhuma necessidade para criar um pull request. +pulls.nothing_to_compare_and_allow_empty_pr=Estes branches são iguais. Este PR ficará vazio. pulls.has_pull_request=`Um pull request entre esses branches já existe: %[2]s#%[3]d` pulls.create=Criar pull request pulls.title_desc=quer aplicar o merge de %[1]d commits de %[2]s em %[3]s @@ -1296,6 +1511,7 @@ pulls.cant_reopen_deleted_branch=Este pull request não pode ser reaberto porque pulls.merged=Merge aplicado pulls.merged_as=O pull request teve merge aplicado como %[2]s. pulls.manually_merged=Merge aplicado manualmente +pulls.manually_merged_as=O pull request foi aplicado manualmente como %[2]s. pulls.is_closed=O pull request foi fechado. pulls.has_merged=O merge deste pull request foi aplicado. pulls.title_wip_desc=`Inicie o título com o prefixo %s para prevenir o merge do pull request até que o mesmo esteja pronto.` @@ -1306,6 +1522,7 @@ pulls.remove_prefix=Remover o prefixo %s pulls.data_broken=Este pull request está quebrado devido a falta de informação do fork. pulls.files_conflicted=Este pull request tem alterações conflitantes com o branch de destino. pulls.is_checking=Verificação de conflitos do merge está em andamento. Tente novamente em alguns momentos. +pulls.is_empty=Este branch é igual ao branch de destino. pulls.required_status_check_failed=Algumas verificações necessárias não foram bem sucedidas. pulls.required_status_check_missing=Estão faltando algumas verificações necessárias. pulls.required_status_check_administrator=Como administrador, você ainda pode aplicar o merge deste pull request. @@ -1334,17 +1551,24 @@ pulls.no_merge_wip=O merge deste pull request não pode ser aplicado porque est pulls.no_merge_not_ready=Este pull request não está pronto para ser realizado o merge, verifique o status da revisão e as verificações de status. pulls.no_merge_access=Você não está autorizado para realizar o merge deste pull request. pulls.merge_pull_request=Criar commit de merge +pulls.rebase_merge_pull_request=Rebase e fast-forward +pulls.rebase_merge_commit_pull_request=Rebase e criar commit de merge pulls.squash_merge_pull_request=Criar commit de squash pulls.merge_manually=Merge feito manualmente pulls.merge_commit_id=A ID de merge commit pulls.require_signed_wont_sign=O branch requer commits assinados, mas este merge não será assinado pulls.invalid_merge_option=Você não pode usar esta opção de merge neste pull request. +pulls.merge_conflict=O merge falhou: Houve um conflito ao fazer merge. Dica: Tente uma estratégia diferente pulls.merge_conflict_summary=Mensagem de erro +pulls.rebase_conflict=O merge falhou: Houve um conflito durante o rebase do commit %[1]s. Dica: Tente uma estratégia diferente pulls.rebase_conflict_summary=Mensagem de Erro ; %[2]s
%[3]s
pulls.unrelated_histories=Merge falhou: O merge do principal e da base não compartilham uma história comum. Dica: Tente uma estratégia diferente pulls.merge_out_of_date=Merge falhou: durante a geração do merge, a base não foi atualizada. Dica: Tente novamente. +pulls.head_out_of_date=O merge falhou: Enquanto gerava o merge, a head foi atualizada. Dica: Tente novamente. +pulls.push_rejected=O merge falhou: O push foi rejeitado. Revise os Git Hooks para este repositório. pulls.push_rejected_summary=Mensagem completa da rejeição +pulls.push_rejected_no_message=O merge falhou: O push foi rejeitado mas não houve mensagem remota.
Revise os Git Hooks para este repositório pulls.open_unmerged_pull_exists=`Não é possível executar uma operação de reabertura pois há um pull request pendente (#%d) com propriedades idênticas.` pulls.status_checking=Algumas verificações estão pendentes pulls.status_checks_success=Todas as verificações foram bem sucedidas @@ -1353,14 +1577,17 @@ pulls.status_checks_failure=Algumas verificações falharam pulls.status_checks_error=Algumas verificações reportaram erros pulls.status_checks_requested=Obrigatário pulls.status_checks_details=Detalhes -pulls.update_branch_success=Atualização da branch foi bem-sucedida -pulls.update_not_allowed=Você não tem permissão para atualizar a branch -pulls.outdated_with_base_branch=Esta branch está desatualizado com a branch base +pulls.update_branch=Atualizar branch por merge +pulls.update_branch_rebase=Atualizar branch por rebase +pulls.update_branch_success=Atualização do branch foi bem-sucedida +pulls.update_not_allowed=Você não tem permissão para atualizar o branch +pulls.outdated_with_base_branch=Este branch está desatualizado com o branch base pulls.closed_at=`fechou este pull request %[2]s` pulls.reopened_at=`reabriu este pull request %[2]s` pulls.merge_instruction_hint=`Você também pode ver as instruções para a linha de comandos.` - +pulls.merge_instruction_step1_desc=No repositório do seu projeto, crie um novo branch e teste as alterações. pulls.merge_instruction_step2_desc=Faça merge das alterações e atualize no Gitea. +pulls.merge_pull_on_success_cancel=Cancelar merge automático milestones.new=Novo marco milestones.open_tab=%d Aberto @@ -1406,7 +1633,9 @@ signing.wont_sign.basesigned=O merge não será assinada porque o commit base n signing.wont_sign.headsigned=O merge não será assinado porque o commit principal não foi assinado signing.wont_sign.commitssigned=O merge não será assinado pois todos os commits associados não foram assinados signing.wont_sign.approved=O merge não será assinado pois o PR não foi aprovado +signing.wont_sign.not_signed_in=Você não está logado +ext_wiki=Acesso a Wiki Externo ext_wiki.desc=Link para uma wiki externa. wiki=Wiki @@ -1431,6 +1660,7 @@ wiki.page_already_exists=Uma página de wiki com o mesmo nome já existe. wiki.reserved_page=O nome da página wiki '%s' está reservada. wiki.pages=Páginas wiki.last_updated=Última atualização %s +wiki.page_name_desc=Digite um nome para esta página Wiki. Alguns nomes especiais são: 'Home', '_Sidebar' e '_Footer'. activity=Atividade activity.period.filter_label=Período: @@ -1462,6 +1692,7 @@ activity.closed_issues_count_1=Issue fechada activity.closed_issues_count_n=Issues fechadas activity.title.issues_1=+%d Issue activity.title.issues_n=+%d Issues +activity.title.issues_closed_from=%s fechada por %s activity.title.issues_created_by=%s criada por %s activity.closed_issue_label=Fechado activity.new_issues_count_1=Nova issue @@ -1483,9 +1714,9 @@ activity.git_stats_pushed_1=realizou push de activity.git_stats_pushed_n=realizaram push de activity.git_stats_commit_1=%d commit activity.git_stats_commit_n=%d commits -activity.git_stats_push_to_branch=para a %s e -activity.git_stats_push_to_all_branches=para todas as branches. -activity.git_stats_on_default_branch=Na %s, +activity.git_stats_push_to_branch=para o %s e +activity.git_stats_push_to_all_branches=para todos os branches. +activity.git_stats_on_default_branch=No %s, activity.git_stats_file_1=%d arquivo activity.git_stats_file_n=%d arquivos activity.git_stats_files_changed_1=foi modificado @@ -1500,7 +1731,10 @@ activity.git_stats_deletion_n=%d exclusões search=Pesquisar search.search_repo=Pesquisar no repositório... search.fuzzy=Aproximada +search.match=Corresponde search.results=Resultados da pesquisa para "%s" em %s +search.code_no_results=Nenhum código-fonte correspondente ao seu termo de pesquisa foi encontrado. +search.code_search_unavailable=A pesquisa por código não está disponível no momento. Entre em contato com o administrador do site. settings=Configurações settings.desc=Opções é onde você pode gerenciar as configurações para o repositório @@ -1515,7 +1749,15 @@ settings.hooks=Webhooks settings.githooks=Hooks do Git settings.basic_settings=Configurações básicas settings.mirror_settings=Opções de espelhamento +settings.mirror_settings.docs=Configure seu projeto para fazer push e/ou pull de alterações automaticamente para outro repositório. Branches, tags e commits serão sincronizados automaticamente. Como espelhar repositórios? settings.mirror_settings.mirrored_repository=Repositório espelhado +settings.mirror_settings.direction=Sentido +settings.mirror_settings.direction.pull=Pull +settings.mirror_settings.direction.push=Push +settings.mirror_settings.last_update=Última atualização +settings.mirror_settings.push_mirror.none=Nenhum espelhamento de push configurado +settings.mirror_settings.push_mirror.remote_url=URL do repositório do Git remoto +settings.mirror_settings.push_mirror.add=Adicionar Espelho de Push settings.sync_mirror=Sincronizar agora settings.mirror_sync_in_progress=Sincronização do espelhamento está em andamento. Verifique novamente em um minuto. settings.email_notifications.enable=Habilitar notificações de e-mail @@ -1524,6 +1766,7 @@ settings.email_notifications.disable=Desabilitar notificações de e-mail settings.email_notifications.submit=Atualizar preferências de e-mail settings.site=Site settings.update_settings=Atualizar configurações +settings.branches.update_default_branch=Atualizar Branch Padrão settings.advanced_settings=Configurações avançadas settings.wiki_desc=Habilitar a wiki do repositório settings.use_internal_wiki=Usar a wiki nativa @@ -1543,7 +1786,7 @@ settings.tracker_issue_style=Formato de número do issue tracker externo settings.tracker_issue_style.numeric=Numérico settings.tracker_issue_style.alphanumeric=Alfanumérico settings.tracker_url_format_desc=Use os espaços reservados {user}, {repo} e {index} para o nome de usuário, nome do repositório e o índice de problemas. -settings.enable_timetracker=Habilitar contador de tempo +settings.enable_timetracker=Habilitar Cronômetro settings.allow_only_contributors_to_track_time=Permitir que apenas os colaboradores acompanhem o contador de tempo settings.pulls_desc=Habilitar pull requests no repositório settings.pulls.ignore_whitespace=Ignorar espaço em branco em conflitos @@ -1551,8 +1794,20 @@ settings.pulls.allow_merge_commits=Habilitar commit no merge settings.pulls.allow_rebase_merge=Habilitar Rebasing em commits via merge settings.pulls.allow_rebase_merge_commit=Habilitar Rebasing com commits explícitos no merge (--no-ff) settings.pulls.allow_squash_commits=Habilitar Squashing em commits via merge +settings.pulls.allow_manual_merge=Habilitar Marcar PR como aplicado manualmente +settings.pulls.enable_autodetect_manual_merge=Habilitar a detecção automática de merge manual (Nota: Em alguns casos especiais, podem ocorrer julgamentos errados) +settings.pulls.allow_rebase_update=Ativar atualização do branch do pull request por rebase +settings.pulls.default_delete_branch_after_merge=Excluir o branch de pull request após o merge por padrão +settings.packages_desc=Habilitar Registro de Pacotes de Repositório +settings.projects_desc=Habilitar Projetos do Repositório settings.admin_settings=Configurações do administrador settings.admin_enable_health_check=Habilitar verificações de integridade (git fsck) no repositório +settings.admin_code_indexer=Indexador de código +settings.admin_stats_indexer=Indexador de Estatísticas do Código +settings.admin_indexer_commit_sha=Último SHA indexado +settings.admin_indexer_unindexed=Não indexado +settings.reindex_button=Adicionar à fila de reindexação +settings.reindex_requested=Reindexação requisitada settings.admin_enable_close_issues_via_commit_in_any_branch=Fechar issue via commit em um branch não padrão settings.danger_zone=Zona de perigo settings.new_owner_has_same_repo=O novo proprietário já tem um repositório com o mesmo nome. Por favor, escolha outro nome. @@ -1561,13 +1816,40 @@ settings.convert_desc=Você pode converter este espelhamento em um repositório settings.convert_notices_1=Esta operação vai converter este espelhamento em um repositório tradicional. Esta ação não pode ser desfeita. settings.convert_confirm=Converter o repositório settings.convert_succeed=O espelhamento foi convertido em um repositório tradicional. +settings.convert_fork=Converter Para Um Repositório Normal +settings.convert_fork_desc=Você pode converter este fork em um repositório normal. Esta ação não pode ser desfeita. +settings.convert_fork_notices_1=Esta operação irá converter o fork em um repositório normal e não pode ser desfeita. +settings.convert_fork_confirm=Converter repositório +settings.convert_fork_succeed=O fork foi convertido em um repositório normal. settings.transfer=Transferir propriedade +settings.transfer.rejected=A transferência do repositório foi rejeitada. +settings.transfer.success=A transferência do repositório foi bem sucedida. +settings.transfer_abort=Cancelar transferência +settings.transfer_abort_invalid=Não é possível cancelar uma transferência de repositório não existente. +settings.transfer_abort_success=A transferência de repositório para %s foi cancelada com sucesso. settings.transfer_desc=Transferir este repositório para outro usuário ou para uma organização onde você tem direitos de administrador. settings.transfer_form_title=Digite o nome do repositório para confirmar: +settings.transfer_in_progress=Há uma transferência em andamento. Por favor, cancele se você gostaria de transferir este repositório para outro usuário. settings.transfer_notices_1=- Você perderá o acesso ao repositório se transferir para um usuário individual. settings.transfer_notices_2=- Você manterá acesso ao repositório se transferi-lo para uma organização que você também é proprietário. +settings.transfer_notices_3=- Se o repositório for privado e for transferido para um usuário individual, esta ação certifica que o usuário tem pelo menos permissão de leitura (e altera as permissões se necessário). settings.transfer_owner=Novo proprietário +settings.transfer_perform=Executar Transferência +settings.transfer_started=Este repositório foi marcado para transferência e aguarda a confirmação de "%s" settings.transfer_succeed=O repositório foi transferido. +settings.signing_settings=Configurações de Verificação de Assinatura +settings.trust_model=Modelo de Confiança na Assinatura +settings.trust_model.default=Modelo Padrão de Confiança +settings.trust_model.default.desc=Use o modelo de confiança de repositório padrão para esta instalação. +settings.trust_model.collaborator=Colaborador +settings.trust_model.collaborator.long=Colaborador: Confiar em assinaturas feitas por colaboradores +settings.trust_model.collaborator.desc=Assinaturas válidas dos colaboradores deste repositório serão marcadas como "confiáveis" - (quer correspondam ao autor do commit ou não). Caso contrário, assinaturas válidas serão marcadas como "não confiáveis" se a assinatura corresponder ao autor do submissão e "não corresponde" se não corresponder. +settings.trust_model.committer=Committer +settings.trust_model.committer.long=Committer: Confiar nas assinaturas que correspondam aos committers (isso corresponde ao GitHub e forçará commits assinados pelo Gitea a ter o Gitea como o committer) +settings.trust_model.committer.desc=Assinaturas válidas só serão marcadas como "confiáveis" se corresponderem ao committer, caso contrário serão marcadas como "não correspondidas". Isso forçará o Gitea a ser o commiter nos commits assinados, com o autor real marcado como Co-authored-by: e Co-commited-by: no final do commit. A chave padrão do Gitea tem que corresponder a um usuário no banco de dados. +settings.trust_model.collaboratorcommitter=Colaborador+Commiter +settings.trust_model.collaboratorcommitter.long=Colaborador+Committer: Confiar na assinatura dos colaboradores que correspondem ao autor do commit +settings.trust_model.collaboratorcommitter.desc=Assinaturas válidas dos colaboradores deste repositório serão marcadas como "confiáveis" se corresponderem ao autor do commit. Caso contrário, as assinaturas válidas serão marcadas como "não confiáveis" se a assinatura corresponder ao autor do commit e "não corresponde" caso contrário. Isso forçará o Gitea a ser marcado como o autor do commit nos commits assinados com o autor marcado como Co-Authored-By: e o Committed-By: resumo do commit. A chave padrão do Gitea tem que corresponder a um usuário no banco de dados. settings.wiki_delete=Excluir dados da wiki settings.wiki_delete_desc=A exclusão de dados da wiki é permanente e não pode ser desfeita. settings.wiki_delete_notices_1=- Isso excluirá e desabilitará permanentemente a wiki do repositório %s. @@ -1593,8 +1875,13 @@ settings.search_user_placeholder=Pesquisar usuário... settings.org_not_allowed_to_be_collaborator=Organizações não podem ser adicionadas como um colaborador. settings.change_team_access_not_allowed=Alteração do acesso da equipe para o repositório está restrito ao proprietário da organização settings.team_not_in_organization=A equipe não está na mesma organização que o repositório +settings.teams=Equipes +settings.add_team=Adicionar Equipe settings.add_team_duplicate=A equipe já tem o repositório settings.add_team_success=A equipe agora tem acesso ao repositório. +settings.search_team=Pesquisar Equipe… +settings.change_team_permission_tip=A permissão da equipe está definida na página de configurações da equipe e não pode ser alterada por repositório +settings.delete_team_tip=Esta equipe tem acesso a todos os repositórios e não pode ser removida settings.remove_team_success=O acesso da equipe ao repositório foi removido. settings.add_webhook=Adicionar webhook settings.add_webhook.invalid_channel_name=Nome do canal no webhook não pode estar em branco e não pode conter somente o caractere #. @@ -1609,6 +1896,9 @@ settings.webhook.response=Resposta settings.webhook.headers=Cabeçalhos settings.webhook.payload=Conteúdo settings.webhook.body=Corpo +settings.webhook.replay.description=Executar novamente esse webhook. +settings.webhook.delivery.success=Um evento foi adicionado à fila de envio. Pode levar alguns segundos até que ele apareça no histórico de envio. +settings.githooks_desc=Hooks do Git são executados pelo próprio Git. Você pode editar arquivos de hook abaixo para configurar operações personalizadas. settings.githook_edit_desc=Se o hook não estiver ativo, o conteúdo de exemplo será apresentado. Deixar o conteúdo em branco irá desabilitar esse hook. settings.githook_name=Nome do Hook settings.githook_content=Conteúdo do Hook @@ -1627,21 +1917,49 @@ settings.event_desc=Acionado em: settings.event_push_only=Eventos de push settings.event_send_everything=Todos os eventos settings.event_choose=Eventos personalizados... +settings.event_header_repository=Eventos do Repositório settings.event_create=Criar settings.event_create_desc=Branch ou tag criado. settings.event_delete=Excluir +settings.event_delete_desc=Branch ou tag deletado. settings.event_fork=Fork +settings.event_fork_desc=Feito fork do repositório. settings.event_release=Versão settings.event_release_desc=Versão publicada, atualizada ou excluída em um repositório. settings.event_push=Push settings.event_push_desc=Git push para o repositório. settings.event_repository=Repositório settings.event_repository_desc=Repositório criado ou excluído. +settings.event_header_issue=Eventos da Issue settings.event_issues=Issues +settings.event_issues_desc=Issue aberta, fechada, reaberta ou editada. +settings.event_issue_assign=Issue Atribuída +settings.event_issue_assign_desc=Issue atribuída ou não atribuída. +settings.event_issue_label=Issue Rotulada +settings.event_issue_label_desc=Rótulos da issue atualizados ou removidos. +settings.event_issue_milestone=Marco Atribuído à Issue +settings.event_issue_milestone_desc=Marco atribuído ou desatribuído à Issue. settings.event_issue_comment=Comentário da issue settings.event_issue_comment_desc=Comentário da issue criado, editado ou excluído. +settings.event_header_pull_request=Eventos de Pull Request settings.event_pull_request=Pull request +settings.event_pull_request_desc=Pull request aberto, fechado, reaberto ou editado. +settings.event_pull_request_assign=Pull Request Atribuído +settings.event_pull_request_assign_desc=Pull request atribuído ou desatribuído. +settings.event_pull_request_label=Pull Request Rotulado +settings.event_pull_request_label_desc=Rótulos do pull request atualizados ou limpos. +settings.event_pull_request_milestone=Marco Atribuído ao Pull Request +settings.event_pull_request_milestone_desc=Marco atribuído ou desatribuído ao pull request. +settings.event_pull_request_comment=Comentário no Pull Request +settings.event_pull_request_comment_desc=Comentário criado, editado ou excluído no pull request. +settings.event_pull_request_review=Pull Request Revisado +settings.event_pull_request_review_desc=Pull request aprovado, rejeitado ou revisão comentada. +settings.event_pull_request_sync=Pull Request Sincronizado +settings.event_pull_request_sync_desc=Pull request sincronizado. +settings.event_package=Pacote +settings.event_package_desc=Pacote criado ou excluído em um repositório. settings.branch_filter=Filtro de branch +settings.branch_filter_desc=Lista dos branches a serem considerados nos eventos push, criação de branch e exclusão de branch, especificados como padrão glob. Se estiver vazio ou for *, eventos para todos os branches serão relatados. Veja github.com/gobwas/glob documentação da sintaxe. Exemplos: master, {master,release*}. settings.active=Ativo settings.active_helper=Informações sobre eventos disparados serão enviadas para esta URL do webhook. settings.add_hook_success=O webhook foi adicionado. @@ -1653,6 +1971,23 @@ settings.hook_type=Tipo de Hook settings.slack_token=Token settings.slack_domain=Domínio settings.slack_channel=Canal +settings.add_web_hook_desc=Integrar %s no seu repositório. +settings.web_hook_name_gitea=Gitea +settings.web_hook_name_gogs=Gogs +settings.web_hook_name_slack=Slack +settings.web_hook_name_discord=Discord +settings.web_hook_name_dingtalk=DingTalk +settings.web_hook_name_telegram=Telegram +settings.web_hook_name_matrix=Matrix +settings.web_hook_name_msteams=Microsoft Teams +settings.web_hook_name_feishu_or_larksuite=Feishu / Lark Suite +settings.web_hook_name_feishu=Feishu +settings.web_hook_name_larksuite=Lark Suite +settings.web_hook_name_wechatwork=WeCom (Wechat Work) +settings.web_hook_name_packagist=Packagist +settings.packagist_username=Nome de usuário no Packagist +settings.packagist_api_token=Token de API +settings.packagist_package_url=URL do pacote do Packagist settings.deploy_keys=Chaves de Deploy settings.add_deploy_key=Nova chave settings.deploy_key_desc=As chaves de deploy possuem somente acesso de leitura (pull) ao repositório. @@ -1672,8 +2007,8 @@ settings.protected_branch=Proteção de Branch settings.protected_branch_can_push=Permitir push? settings.protected_branch_can_push_yes=Você pode fazer push settings.protected_branch_can_push_no=Você não pode fazer push -settings.branch_protection=Proteção de branch para '%s' -settings.protect_this_branch=Habilitar proteção de branch +settings.branch_protection=Proteção de Branch para '%s' +settings.protect_this_branch=Habilitar Proteção de Branch settings.protect_this_branch_desc=Previne a exclusão e restringe o merge e push para o branch. settings.protect_disable_push=Desabilitar push settings.protect_disable_push_desc=Nenhum push será permitido neste branch. @@ -1681,6 +2016,7 @@ settings.protect_enable_push=Habilitar push settings.protect_enable_push_desc=Qualquer pessoa com acesso de escrita terá permissão para realizar push neste branch (mas não forçar o push). settings.protect_whitelist_committers=Lista permitida para push settings.protect_whitelist_committers_desc=Somente usuários ou equipes da lista permitida serão autorizados realizar push neste branch (mas não forçar o push). +settings.protect_whitelist_deploy_keys=Dar permissão às chaves de deploy com acesso de gravação para push. settings.protect_whitelist_users=Usuários com permissão para realizar push: settings.protect_whitelist_search_users=Pesquisar usuários... settings.protect_whitelist_teams=Equipes com permissão para realizar push: @@ -1690,6 +2026,7 @@ settings.protect_merge_whitelist_committers_desc=Permitir que determinados usuá settings.protect_merge_whitelist_users=Usuários com permissão para aplicar merge: settings.protect_merge_whitelist_teams=Equipes com permissão para aplicar merge: settings.protect_check_status_contexts=Habilitar verificação de status +settings.protect_check_status_contexts_desc=Exigir que as verificações de status passem antes de fazer merge. Escolha quais verificações de status devem passar antes que os branches possam ter o merge aplicado em um branch que corresponda a esta regra. Quando habilitado, os commits devem primeiro ser enviados para outro branch, então faça merge ou push diretamente para um branch que corresponde a esta regra após a verificação de status ter passado. Se nenhum contexto for selecionado, o último commit deve ser bem sucedido, independentemente do contexto. settings.protect_check_status_contexts_list=Verificações de status encontradas na última semana para este repositório settings.protect_required_approvals=Aprovações necessárias: settings.protect_required_approvals_desc=Permite apenas realizar merge do pull request com avaliações positivas suficientes. @@ -1700,6 +2037,9 @@ settings.protect_approvals_whitelist_teams=Equipes com permissão de revisão: settings.dismiss_stale_approvals=Descartar aprovações obsoletas settings.dismiss_stale_approvals_desc=Quando novos commits que mudam o conteúdo do pull request são enviados para o branch, as antigas aprovações serão descartadas. settings.require_signed_commits=Exibir commits assinados +settings.require_signed_commits_desc=Rejeitar pushes para este branch se não estiverem assinados ou não forem validáveis. +settings.protect_protected_file_patterns=Padrões de arquivos protegidos (separados usando ponto e vírgula '\;'): +settings.protect_protected_file_patterns_desc=Arquivos protegidos que não têm permissão para serem alterados diretamente, mesmo se o usuário tiver permissão para adicionar, editar ou apagar arquivos neste branch. Vários padrões podem ser separados usando ponto e vírgula ('\;'). Veja github.com/gobwas/glob documentação para sintaxe de padrões. Exemplos: .drone.yml, /docs/**/*.txt. settings.protect_unprotected_file_patterns=Padrões de arquivos desprotegidos (separados usando ponto e vírgula '\;'): settings.protect_unprotected_file_patterns_desc=Arquivos não protegidos que podem ser alterados diretamente se o usuário tiver acesso de gravação, ignorando as restrições de push. Vários padrões podem ser separados usando ponto e vírgula ('\;'). Veja github.com/gobwas/glob documentação para sintaxe de padrões. Exemplos: .drone.yml, /docs/**/*.txt. settings.add_protected_branch=Habilitar proteção @@ -1710,13 +2050,32 @@ settings.protected_branch_deletion=Desabilitar proteção de branch settings.protected_branch_deletion_desc=Desabilitar a proteção de branch permite que os usuários com permissão de escrita realizem push. Continuar? settings.block_rejected_reviews=Bloquear merge em revisões rejeitadas settings.block_rejected_reviews_desc=O merge não será possível quando são solicitadas alterações pelos revisores oficiais, mesmo que haja aprovação suficiente. +settings.block_on_official_review_requests=Bloquear merge em solicitações de revisão oficiais +settings.block_on_official_review_requests_desc=O merge não será possível quando tiver pedidos de revisão oficiais, mesmo que haja aprovações suficientes. +settings.block_outdated_branch=Bloquear o merge se o pull request estiver desatualizado +settings.block_outdated_branch_desc=O merge não será possível quando o branch de topo estiver atrás do branch base. settings.default_branch_desc=Selecione um branch padrão para pull requests e commits de código: +settings.default_merge_style_desc=Estilo de merge padrão para pull requests: settings.choose_branch=Escolha um branch... settings.no_protected_branch=Não há branches protegidos. settings.edit_protected_branch=Editar settings.protected_branch_required_approvals_min=Aprovações necessárias não podem ser negativas. +settings.tags=Tags +settings.tags.protection=Proteção das Tags +settings.tags.protection.pattern=Padrão de Tag +settings.tags.protection.allowed=Permitido +settings.tags.protection.allowed.users=Usuários permitidos +settings.tags.protection.allowed.teams=Equipes permitidas +settings.tags.protection.allowed.noone=Ninguém +settings.tags.protection.create=Proteger tag +settings.tags.protection.none=Não há tags protegidas. +settings.tags.protection.pattern.description=Você pode usar um só nome ou um padrão glob ou uma expressão regular para corresponder a várias tags. Para mais informações leia o guia das tags protegidas. settings.bot_token=Token do Bot settings.chat_id=ID do Chat +settings.matrix.homeserver_url=URL do Homeserver +settings.matrix.room_id=ID da Sala +settings.matrix.access_token=Token de Acesso +settings.matrix.message_type=Tipo de Mensagem settings.archive.button=Arquivar repositório settings.archive.header=Arquivar este repositório settings.archive.text=Arquivando um repositório fará com que o mesmo fique inteiramente em modo somente leitura. Ele ficará oculto no painel, não poderá receber commits e nem será possível criar issues e pull requests. @@ -1724,6 +2083,7 @@ settings.archive.success=O repositório foi arquivado com sucesso. settings.archive.error=Um erro ocorreu enquanto estava sendo arquivado o repositório. Veja o log para mais detalhes. settings.archive.error_ismirror=Você não pode arquivar um repositório espelhado. settings.archive.branchsettings_unavailable=Configurações do branch não estão disponíveis quando o repositório está arquivado. +settings.archive.tagsettings_unavailable=As configurações de tag não estão disponíveis se o repositório estiver arquivado. settings.unarchive.button=Desarquivar repositório settings.unarchive.header=Desarquivar este repositório settings.unarchive.text=Desarquivando um repositório irá restaurar a capacidade do mesmo receber commits, pushs, assim como novas issues e pull requests. @@ -1779,7 +2139,9 @@ diff.whitespace_ignore_all_whitespace=Ignorar todas as alterações de espaço e diff.whitespace_ignore_amount_changes=Ignorar alterações na quantidade de espaço em branco diff.whitespace_ignore_at_eol=Ignorar alterações com espaço em branco no final da linha diff.stats_desc= %d arquivos alterados com %d adições e %d exclusões +diff.stats_desc_file=%d alterações: %d adições e %d exclusões diff.bin=BIN +diff.bin_not_shown=Arquivo binário não exibido. diff.view_file=Ver arquivo diff.file_before=Antes diff.file_after=Depois @@ -1787,6 +2149,7 @@ diff.file_image_width=Largura diff.file_image_height=Altura diff.file_byte_size=Tamanho diff.file_suppressed=Diferenças do arquivo suprimidas por serem muito extensas +diff.file_suppressed_line_too_long=Diff do arquivo suprimido porque uma ou mais linhas são muito longas diff.too_many_files=Alguns arquivos não foram exibidos porque demasiados arquivos foram alterados neste diff diff.show_more=Mostrar mais diff.load=Carregar Diff @@ -1804,14 +2167,25 @@ diff.review.placeholder=Comentário da revisão diff.review.comment=Comentar diff.review.approve=Aprovar diff.review.reject=Solicitar alterações +diff.committed_by=commit de +diff.protected=Protegido +diff.image.side_by_side=Lado a Lado +diff.image.swipe=Deslizar +diff.image.overlay=Sobrepor +diff.has_escaped=Essa linha tem caracteres Unicode ocultos releases.desc=Acompanhe as versões e downloads do projeto. release.releases=Versões +release.detail=Detalhes da versão +release.tags=Tags release.new_release=Nova versão release.draft=Rascunho release.prerelease=Versão prévia release.stable=Estável +release.compare=Comparar release.edit=editar +release.ahead.commits=%d commits +release.ahead.target=para %s desde esta versão release.source_code=Código fonte release.new_subheader=Lançamentos organizam versões do projeto. release.edit_subheader=Lançamentos organizam versões do projeto. @@ -1827,46 +2201,68 @@ release.publish=Publicar versão release.save_draft=Salvar rascunho release.edit_release=Atualizar versão release.delete_release=Excluir versão +release.delete_tag=Apagar Tag release.deletion=Excluir versão +release.deletion_desc=A exclusão de uma versão só a remove do Gitea. As tags do Git, conteúdo e histórico do repositório permanecem inalterados. Continuar? release.deletion_success=A versão foi excluída. +release.deletion_tag_desc=A tag será excluída do repositório. Conteúdo do repositório e histórico permanecerão inalterados. Continuar? +release.deletion_tag_success=A tag foi excluída. release.tag_name_already_exist=Uma versão com esse nome de tag já existe. release.tag_name_invalid=O nome da tag não é válido. +release.tag_name_protected=O nome da tag está protegido. +release.tag_already_exist=Este nome de tag já existe. release.downloads=Downloads release.download_count=Downloads: %s +release.add_tag_msg=Use o título e o conteúdo do lançamento como mensagem da tag. +release.add_tag=Criar apenas a tag -branch.name=Nome da branch +branch.name=Nome do Branch branch.search=Pesquisar branches -branch.already_exists=Uma branch com o nome %s já existe. +branch.already_exists=Um branch com o nome %s já existe. branch.delete_head=Excluir -branch.delete=Excluir branch '%s' +branch.delete=Excluir Branch '%s' branch.delete_html=Excluir Branch -branch.delete_desc=A exclusão de uma branch é permanente. Isto NÃO PODERÁ ser desfeito. Continuar? -branch.deletion_success=A branch '%s' foi excluída. -branch.deletion_failed=Falha ao excluir a branch %s. -branch.delete_branch_has_new_commits=A branch %s não pode ser excluída porque há novos commits após o merge. +branch.delete_desc=A exclusão de um branch é permanente. Isto NÃO PODERÁ ser desfeito. Continuar? +branch.deletion_success=O branch '%s' foi excluído. +branch.deletion_failed=Falha ao excluir o branch %s. +branch.delete_branch_has_new_commits=O branch %s não pode ser excluído porque há novos commits após o merge. branch.create_branch=Criar branch %s branch.create_from=de %s -branch.create_success=A branch '%s' foi criada. +branch.create_success=O branch '%s' foi criado. branch.branch_already_exists=Branch '%s' já existe neste repositório. -branch.branch_name_conflict=O nome da branch '%s' está em conflito com a branch '%s'. -branch.tag_collision=A branch '%s' não pode ser criada como tag com o mesmo nome já existente neste repositório. +branch.branch_name_conflict=O nome do branch '%s' está em conflito com o branch '%s'. +branch.tag_collision=O branch '%s' não pode ser criado como tag com o mesmo nome já existente neste repositório. branch.deleted_by=Excluído por %s -branch.restore_success=A branch '%s' foi restaurada. -branch.restore_failed=Falha ao restaurar a branch %s. -branch.protected_deletion_failed=A branch '%s' está protegida. Ela não pode ser excluída. -branch.restore=Restaurar branch '%s' -branch.download=Baixar branch '%s' +branch.restore_success=O branch '%s' foi restaurado. +branch.restore_failed=Falha ao restaurar o branch %s. +branch.protected_deletion_failed=O branch '%s' está protegido. Ele não pode ser excluído. +branch.default_deletion_failed=Branch '%s' é o branch padrão. Não pode ser excluído. +branch.restore=Restaurar Branch '%s' +branch.download=Baixar Branch '%s' branch.included_desc=Este branch faz parte do branch padrão branch.included=Incluído +branch.create_new_branch=Criar branch a partir do branch: +branch.confirm_create_branch=Criar branch +branch.create_branch_operation=Criar branch +branch.new_branch=Criar novo branch +branch.new_branch_from=Criar novo branch a partir de '%s' branch.renamed=Branch %s foi renomeado para %s. +tag.create_tag=Criar tag %s +tag.create_tag_operation=Criar tag +tag.confirm_create_tag=Criar tag +tag.create_tag_from=Criar tag a partir de '%s' +tag.create_success=Tag '%s' foi criada. topic.manage_topics=Gerenciar Tópicos topic.done=Feito topic.count_prompt=Você não pode selecionar mais de 25 tópicos topic.format_prompt=Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres. +error.csv.too_large=Não é possível renderizar este arquivo porque ele é muito grande. +error.csv.unexpected=Não é possível renderizar este arquivo porque ele contém um caractere inesperado na linha %d e coluna %d. +error.csv.invalid_field_count=Não é possível renderizar este arquivo porque ele tem um número errado de campos na linha %d. [org] org_name_holder=Nome da organização @@ -1904,11 +2300,14 @@ settings.repoadminchangeteam=O administrador do repositório pode adicionar e re settings.visibility=Visibilidade settings.visibility.public=Pública settings.visibility.limited=Limitada (Visível apenas para usuários registrados) +settings.visibility.limited_shortname=Limitado settings.visibility.private=Privada (Visível apenas para membros da organização) +settings.visibility.private_shortname=Privado settings.update_settings=Atualizar Configurações settings.update_setting_success=Configurações da organização foram atualizadas. settings.change_orgname_prompt=Nota: alterar o nome da organização também altera a URL da mesma. +settings.change_orgname_redirect_prompt=O nome antigo irá redirecionar até que seja reivindicado. settings.update_avatar_success=O avatar da organização foi atualizado. settings.delete=Excluir organização settings.delete_account=Excluir esta organização @@ -1918,6 +2317,7 @@ settings.delete_org_title=Excluir organização settings.delete_org_desc=Essa organização será excluída permanentemente. Continuar? settings.hooks_desc=Adicionar Webhooks que serão acionados para todos os repositórios desta organização. +settings.labels_desc=Adicionar rótulos que possam ser usadas em issues para todos os repositórios desta organização. members.membership_visibility=Visibilidade da associação: members.public=Público @@ -1939,7 +2339,13 @@ teams.leave=Deixar teams.leave.detail=Sair de %s? teams.can_create_org_repo=Criar repositórios teams.can_create_org_repo_helper=Membros podem criar novos repositórios na organização. O criador terá acesso administrativo ao novo repositório. +teams.none_access=Sem Acesso +teams.none_access_helper=Os membros não podem ver ou fazer qualquer outra ação nesta unidade. +teams.general_access=Acesso Geral +teams.general_access_helper=As permissões dos membros serão decididas pela tabela de permissões abaixo. +teams.read_access=Leitura teams.read_access_helper=Os membros podem ver e clonar os repositórios da equipe. +teams.write_access=Escrita teams.write_access_helper=Os membros podem ler e realizar push para os repositórios da equipe. teams.admin_access=Acesso de administrador teams.admin_access_helper=Os membros podem realizar pull e push em repositórios da equipe e adicionar colaboradores a eles. @@ -1980,7 +2386,9 @@ dashboard=Painel users=Contas de usuário organizations=Organizações repositories=Repositórios +hooks=Webhooks authentication=Fontes de autenticação +emails=E-mails do Usuário config=Configuração notices=Avisos do sistema monitor=Monitoramento @@ -1988,6 +2396,7 @@ first_page=Primeira last_page=Última total=Total: %d +dashboard.new_version_hint=Gitea %s está disponível, você está executando %s. Verifique o blog para mais detalhes. dashboard.statistic=Resumo dashboard.operations=Operações de manutenção dashboard.system_status=Status do sistema @@ -1997,17 +2406,39 @@ dashboard.operation_switch=Trocar dashboard.operation_run=Executar dashboard.clean_unbind_oauth=Limpar conexões OAuth não vinculadas dashboard.clean_unbind_oauth_success=Todas as conexões de OAuth não vinculadas foram excluídas. +dashboard.task.started=Tarefa Iniciada: %[1]s dashboard.task.process=Tarefa: %[1]s +dashboard.task.cancelled=Tarefa: %[1]s cancelada: %[3]s +dashboard.task.error=Erro na Tarefa: %[1]: %[3]s +dashboard.task.finished=Tarefa: %[1]s iniciada por %[2]s foi finalizada dashboard.task.unknown=Tarefa desconhecida: %[1]s +dashboard.cron.started=Cron Iniciado: %[1]s +dashboard.cron.process=Cron: %[1]s +dashboard.cron.cancelled=Cron: %s cancelado: %[3]s +dashboard.cron.error=Erro no Cron: %s: %[3]s +dashboard.cron.finished=Cron: %[1]s terminou dashboard.delete_inactive_accounts=Excluir todas as contas não ativadas dashboard.delete_inactive_accounts.started=A tarefa de apagar todas as contas não ativadas foi iniciada. +dashboard.delete_repo_archives=Excluir todos os arquivos dos repositórios (ZIP, TAR.GZ, etc..) +dashboard.delete_repo_archives.started=A tarefa de remover todos os arquivos foi iniciada. dashboard.delete_missing_repos=Excluir todos os repositórios que não possuem seus arquivos Git +dashboard.delete_missing_repos.started=Foi iniciada a tarefa de excluir todos os repositórios que não têm arquivos Git. dashboard.delete_generated_repository_avatars=Excluir avatares gerados do repositório dashboard.update_mirrors=Atualizar espelhamentos +dashboard.repo_health_check=Verificar estado de saúde de todos os repositórios +dashboard.check_repo_stats=Verificar estatísticas de todos os repositórios +dashboard.archive_cleanup=Apagar arquivos antigos de repositório +dashboard.deleted_branches_cleanup=Realizar limpeza de branches apagados dashboard.git_gc_repos=Coleta de lixo em todos os repositórios +dashboard.resync_all_sshkeys=Atualizar o arquivo '.ssh/authorized_keys' com as chaves SSH do Gitea. +dashboard.resync_all_sshkeys.desc=(Não necessário para o servidor SSH embutido.) +dashboard.resync_all_sshprincipals=Atualizar o arquivo '.ssh/authorized_principals' com os diretores do Gitea SSH. +dashboard.resync_all_sshprincipals.desc=(Não necessário para o servidor SSH embutido.) dashboard.resync_all_hooks=Ressincronizar hooks pre-receive, update e post-receive de todos os repositórios. dashboard.reinit_missing_repos=Reinicializar todos os repositórios Git perdidos cujos registros existem dashboard.sync_external_users=Sincronizar dados de usuário externo +dashboard.cleanup_hook_task_table=Limpar tabela hook_task +dashboard.cleanup_packages=Limpar pacotes expirados dashboard.server_uptime=Tempo de atividade do Servidor dashboard.current_goroutine=Goroutines Atuais dashboard.current_memory_usage=Uso de memória atual @@ -2037,10 +2468,15 @@ dashboard.total_gc_time=Pausa total do GC dashboard.total_gc_pause=Pausa total do GC dashboard.last_gc_pause=Última pausa do GC dashboard.gc_times=Nº de execuções do GC +dashboard.delete_old_actions=Excluir todas as ações antigas do banco de dados +dashboard.delete_old_actions.started=A exclusão de todas as ações antigas do banco de dados foi iniciada. +dashboard.update_checker=Verificador de atualização +dashboard.delete_old_system_notices=Excluir todos os avisos de sistema antigos do banco de dados users.user_manage_panel=Gerenciamento de conta de usuário users.new_account=Criar conta de usuário users.name=Nome de usuário +users.full_name=Nome Completo users.activated=Ativado users.admin=Administrador users.restricted=Restrito @@ -2065,12 +2501,14 @@ users.prohibit_login=Desabilitar acesso users.is_admin=É administrador users.is_restricted=Está restrito users.allow_git_hook=Pode criar hooks Git +users.allow_git_hook_tooltip=Hooks Git são executados como o usuário do SO que executa Gitea e terá o mesmo nível de acesso ao servidor. Como resultado, os usuários com esse privilégio especial de Hook do Git podem acessar e modificar todos os repositórios do Gitea, bem como o banco de dados usado pelo Gitea. Por conseguinte, podem também obter privilégios de administrador do Gitea. users.allow_import_local=Pode importar repositórios locais users.allow_create_organization=Pode criar organizações users.update_profile=Atualizar conta de usuário users.delete_account=Excluir conta de usuário users.still_own_repo=Este usuário ainda possui um ou mais repositórios. Exclua ou transfira esses repositórios primeiro. users.still_has_org=Este usuário é membro de uma organização. Remova o usuário de qualquer organização primeiro. +users.still_own_packages=Este usuário ainda possui um ou mais pacotes. Exclua esses pacotes primeiro. users.deletion_success=A conta de usuário foi excluída. users.reset_2fa=Reinicializar 2FA users.list_status_filter.menu_text=Filtro @@ -2086,7 +2524,14 @@ users.list_status_filter.not_prohibit_login=Permitir login users.list_status_filter.is_2fa_enabled=2FA Ativado users.list_status_filter.not_2fa_enabled=2FA Desativado +emails.email_manage_panel=Gerenciamento de E-mail de Usuário +emails.primary=Principal +emails.activated=Ativado emails.filter_sort.email=E-mail +emails.filter_sort.email_reverse=E-mail (reverso) +emails.filter_sort.name=Nome de Usuário +emails.filter_sort.name_reverse=Nome de Usuário (reverso) +emails.updated=E-mail atualizado emails.not_updated=Falha ao atualizar o endereço de e-mail solicitado: %v emails.duplicate_active=Este endereço de e-mail já está ativo para um usuário diferente. emails.change_email_header=Atualizar Propriedades do E-mail @@ -2099,6 +2544,8 @@ orgs.members=Membros orgs.new_orga=Nova organização repos.repo_manage_panel=Gerenciamento do repositório +repos.unadopted=Repositórios Não Adotados +repos.unadopted.no_more=Não foram encontrados mais repositórios não adotados repos.owner=Proprietário repos.name=Nome repos.private=Privado @@ -2108,7 +2555,26 @@ repos.forks=Forks repos.issues=Issues repos.size=Tamanho +packages.package_manage_panel=Gerenciamento de Pacotes +packages.total_size=Tamanho Total: %s +packages.owner=Proprietário +packages.creator=Criador +packages.name=Nome +packages.version=Versão +packages.type=Tipo +packages.repository=Repositório +packages.size=Tamanho +packages.published=Publicado +defaulthooks=Webhooks Padrões +defaulthooks.desc=Webhooks automaticamente fazem requisições HTTP POST para um servidor quando acionados por determinados eventos do Gitea. Webhooks definidos aqui são os padrões e serão copiados para todos os novos repositórios. Leia mais no guia de webhooks. +defaulthooks.add_webhook=Adicionar Webhook Padrão +defaulthooks.update_webhook=Atualizar Webhook Padrão + +systemhooks=Webhooks do Sistema +systemhooks.desc=Webhooks automaticamente fazem requisições HTTP POST para um servidor quando acionados por determinados eventos do Gitea. Webhooks definidos aqui agirão em todos os repositórios do sistema, então, por favor, considere quaisquer implicações de desempenho que isso possa ter. Leia mais no guia de webhooks. +systemhooks.add_webhook=Adicionar Webhook do Sistema +systemhooks.update_webhook=Atualizar Webhook do Sistema auths.auth_manage_panel=Gerenciamento de fonte de autenticação auths.new=Adicionar fonte de autenticação @@ -2140,6 +2606,8 @@ auths.use_paged_search=Use a pesquisa paginada auths.search_page_size=Tamanho da página auths.filter=Filtro de usuário auths.admin_filter=Filtro de administrador +auths.group_attribute_list_users=Atributo do Grupo que Contém a Lista de Usuários +auths.enable_ldap_groups=Habilitar grupos do LDAP auths.ms_ad_sa=Atributos de pesquisa do MS AD auths.smtp_auth=Tipo de autenticação SMTP auths.smtphost=Host SMTP @@ -2155,6 +2623,7 @@ auths.disable_helo=Desativar HELO auths.pam_service_name=Nome de Serviço PAM auths.pam_email_domain=Domínio de e-mail do PAM (opcional) auths.oauth2_provider=Provedor OAuth2 +auths.oauth2_icon_url=URL do Ícone auths.oauth2_clientID=ID do cliente (chave) auths.oauth2_clientSecret=Senha do cliente auths.openIdConnectAutoDiscoveryURL=URL do OpenID Connect Auto Discovery @@ -2165,6 +2634,8 @@ auths.oauth2_profileURL=URL do perfil auths.oauth2_emailURL=URL de e-mail auths.skip_local_two_fa=Pular 2FA local auths.skip_local_two_fa_helper=Deixar desligado significa que os usuários locais com 2FA ligada ainda terão que fazer login com 2FA +auths.oauth2_tenant=Tenant +auths.oauth2_scopes=Escopos Adicionais auths.enable_auto_register=Habilitar cadastro automático auths.sspi_auto_create_users=Criar usuários automaticamente auths.sspi_auto_create_users_helper=Permitir que o método de autenticação SSPI crie automaticamente novas contas para usuários que fazem o login pela primeira vez @@ -2181,6 +2652,7 @@ auths.tips.oauth2.general=Autenticação OAuth2 auths.tips.oauth2.general.tip=Ao cadastrar uma nova autenticação OAuth2, o retorno de chamada/redirecionamento URL deve ser: /user/oauth2//callback auths.tip.oauth2_provider=Provedor OAuth2 auths.tip.bitbucket=Cadastrar um novo consumidor de OAuth em https://bitbucket.org/account/user/ e adicionar a permissão 'Account' - 'Read' +auths.tip.nextcloud=Registre um novo consumidor OAuth em sua instância usando o seguinte menu "Configurações -> Segurança -> Cliente OAuth 2.0" auths.tip.dropbox=Criar um novo aplicativo em https://www.dropbox.com/developers/apps auths.tip.facebook=Cadastrar um novo aplicativo em https://developers.facebook.com/apps e adicionar o produto "Facebook Login" auths.tip.github=Cadastrar um novo aplicativo de OAuth na https://github.com/settings/applications/new @@ -2190,6 +2662,8 @@ auths.tip.openid_connect=Use o OpenID Connect Discovery URL (/.well-kn auths.tip.twitter=Vá em https://dev.twitter.com/apps, crie um aplicativo e certifique-se de que está habilitada a opção “Allow this application to be used to Sign in with Twitter“ auths.tip.discord=Cadastrar um novo aplicativo em https://discordapp.com/developers/applications/me auths.tip.gitea=Cadastrar um novo aplicativo OAuth2. Guia pode ser encontrado em https://docs.gitea.io/en-us/oauth2-provider/ +auths.tip.yandex=Crie um novo aplicativo em https://oauth.yandex.com/client/new. Selecione as seguintes permissões da seção "Yandex.Passport API": "Access to email address", "Access to user avatar" and "Access to username, first name and surname, gender" +auths.tip.mastodon=Insira a URL da instância personalizada do mastodon que você deseja usar para autenticar (ou use o padrão) auths.edit=Editar fonte de autenticação auths.activated=Esta fonte de autenticação está ativada auths.new_success=A autenticação '%s' foi adicionada. @@ -2251,6 +2725,7 @@ config.db_path=Caminho config.service_config=Configuração do serviço config.register_email_confirm=Exigir confirmação de e-mail para se cadastrar config.disable_register=Desabilitar auto-cadastro +config.allow_only_internal_registration=Permitir Registro Somente Através do Próprio Gitea config.allow_only_external_registration=Permitir cadastro somente por meio de serviços externos config.enable_openid_signup=Habilitar o auto-cadastro via OpenID config.enable_openid_signin=Habilitar acesso via OpenID @@ -2263,8 +2738,8 @@ config.active_code_lives=Ativar Code Lives config.reset_password_code_lives=Tempo de expiração do código de recuperação de conta config.default_keep_email_private=Ocultar endereços de e-mail por padrão config.default_allow_create_organization=Permitir a criação de organizações por padrão -config.enable_timetracking=Habilitar contador de tempo -config.default_enable_timetracking=Habilitar o contador de tempo por padrão +config.enable_timetracking=Habilitar Cronômetro +config.default_enable_timetracking=Habilitar o Cronômetro por Padrão config.default_allow_only_contributors_to_track_time=Permitir que apenas os colaboradores acompanhem o contador de tempo config.no_reply_address=Ocultar domínio de e-mail config.default_visibility_organization=Visibilidade padrão para novas organizações @@ -2285,6 +2760,7 @@ config.mailer_use_sendmail=Usar o Sendmail config.mailer_sendmail_path=Caminho do Sendmail config.mailer_sendmail_args=Argumentos extras para o Sendmail config.mailer_sendmail_timeout=Tempo limite do Sendmail +config.test_email_placeholder=E-mail (por exemplo, teste@exemplo.com.br) config.send_test_mail=Enviar e-mail de teste config.test_mail_failed=Falha ao enviar o e-mail de teste para '%s': %v config.test_mail_sent=O e-mail de teste foi enviado para '%s'. @@ -2343,9 +2819,12 @@ monitor.next=Próxima vez monitor.previous=Vez anterior monitor.execute_times=Execuções monitor.process=Processos em execução +monitor.stacktrace=Stacktraces +monitor.goroutines=%d Goroutines monitor.desc=Descrição monitor.start=Hora de início monitor.execute_time=Tempo de execução +monitor.last_execution_result=Resultado monitor.process.cancel=Cancelar processo monitor.process.cancel_desc=Cancelar um processo pode causar perda de dados monitor.process.cancel_notices=Cancelar: %s? @@ -2357,6 +2836,7 @@ monitor.queue.type=Tipo monitor.queue.exemplar=Tipo de modelo monitor.queue.numberworkers=Número de executores monitor.queue.maxnumberworkers=Número máximo de executores +monitor.queue.numberinqueue=Número na Fila monitor.queue.review=Revisar configuração monitor.queue.review_add=Revisar/Adicionar executores monitor.queue.configuration=Configuração inicial @@ -2376,6 +2856,12 @@ monitor.queue.pool.flush.title=Fila de liberação monitor.queue.pool.flush.desc=Liberação adicionará um executor que será encerrado assim que a fila estiver vazia ou atingir o tempo limite. monitor.queue.pool.flush.submit=Adicionar executor de liberação monitor.queue.pool.flush.added=Executor de liberação adicionado para %[1]s +monitor.queue.pool.pause.title=Pausar Fila +monitor.queue.pool.pause.desc=Pausar uma Fila impedirá o processamento de dados +monitor.queue.pool.pause.submit=Pausar Fila +monitor.queue.pool.resume.title=Retomar fila +monitor.queue.pool.resume.desc=Definir esta fila para retomar o trabalho +monitor.queue.pool.resume.submit=Retomar fila monitor.queue.settings.title=Configurações do conjunto monitor.queue.settings.desc=Os conjuntos crescem dinamicamente com um aumento em resposta ao bloqueio da fila de executores. Essas alterações não afetarão os grupos de executores atuais. @@ -2445,7 +2931,9 @@ approve_pull_request=`aprovou %[3]s#%[2]s` reject_pull_request=`sugeriu modificações para %[3]s#%[2]s` publish_release=`lançou a versão "%[4]s" em %[3]s` review_dismissed=`descartou a revisão de %[4]s para %[3]s#%[2]s` +review_dismissed_reason=Motivo: create_branch=criou o branch %[3]s em %[4]s +starred_repo=favoritou %[2]s watched_repo=começou a observar %[2]s [tool] @@ -2499,6 +2987,99 @@ error.probable_bad_signature=AVISO! Embora exista uma chave com este ID no banco error.probable_bad_default_signature=AVISO! Embora a chave padrão tenha este ID, ela não verifica este commit! Este commit é SUSPEITO. [units] +unit=Unidade error.no_unit_allowed_repo=Você não tem permissão para acessar nenhuma seção deste repositório. error.unit_not_allowed=Você não tem permissão para acessar esta seção do repositório. +[packages] +title=Pacotes +desc=Gerenciar pacotes do repositório. +empty=Não há pacotes ainda. +empty.documentation=Para obter mais informações sobre o registro de pacote, consulte a documentação. +filter.type=Tipo +filter.type.all=Todos +filter.no_result=Seu filtro não produziu resultados. +filter.container.tagged=Marcado +filter.container.untagged=Desmarcado +published_by=Publicado %[1]s por %[3]s +published_by_in=Publicado %[1]s por %[3]s em %[5]s +installation=Instalação +about=Sobre este pacote +requirements=Requisitos +dependencies=Dependências +keywords=Palavras-chave +details=Detalhes +details.author=Autor +details.project_site=Site do Projeto +details.license=Licença +assets=Recursos +versions=Versões +versions.on=em +versions.view_all=Ver todas +dependency.id=ID +dependency.version=Versão +composer.registry=Configure este registro em seu arquivo ~/.composer/config.json: +composer.install=Para instalar o pacote usando o Composer, execute o seguinte comando: +composer.documentation=Para obter mais informações sobre o registro do Composer, consulte a documentação. +composer.dependencies=Dependências +composer.dependencies.development=Dependências de Desenvolvimento +conan.details.repository=Repositório +conan.registry=Configure este registro pela linha de comando: +conan.install=Para instalar o pacote usando o Conan, execute o seguinte comando: +conan.documentation=Para obter mais informações sobre o registro Conan, consulte a documentação. +container.details.type=Tipo de Imagem +container.details.platform=Plataforma +container.details.repository_site=Site do Repositório +container.details.documentation_site=Site da Documentação +container.pull=Puxe a imagem pela linha de comando: +container.documentation=Para obter mais informações sobre o registro de Container, consulte a documentação. +container.multi_arch=S.O. / Arquitetura +container.layers=Camadas da Imagem +container.labels=Rótulos +container.labels.key=Chave +container.labels.value=Valor +generic.download=Baixar pacote pela linha de comando: +generic.documentation=Para obter mais informações sobre o registro genérico, consulte a documentação. +helm.registry=Configurar este registro pela linha de comando: +helm.install=Para instalar o pacote, execute o seguinte comando: +helm.documentation=Para obter mais informações sobre o registro Helm, consulte a documentação. +maven.registry=Configure este registro no arquivo pom.xml do seu projeto: +maven.install=Para usar o pacote inclua o seguinte no bloco de dependencies no arquivo pom.xml: +maven.install2=Executar via linha de comando: +maven.download=Para baixar a dependência, execute via linha de comando: +maven.documentation=Para obter mais informações sobre o registro Maven, consulte a documentação. +nuget.registry=Configurar este registro pela linha de comando: +nuget.install=Para instalar o pacote usando NuGet, execute o seguinte comando: +nuget.documentation=Para obter mais informações sobre o registro Nuget, consulte a documentação. +nuget.dependency.framework=Estrutura Alvo +npm.registry=Configure este registro no arquivo .npmrc do seu projeto: +npm.install=Para instalar o pacote usando o npm, execute o seguinte comando: +npm.install2=ou adicione-o ao arquivo package.json: +npm.documentation=Para obter mais informações sobre o registro npm, consulte a documentação. +npm.dependencies=Dependências +npm.dependencies.development=Dependências de Desenvolvimento +npm.dependencies.peer=Dependências Peer +npm.dependencies.optional=Dependências Opcionais +npm.details.tag=Tag +pypi.requires=Requer Python +pypi.install=Para instalar o pacote usando pip, execute o seguinte comando: +pypi.documentation=Para obter mais informações sobre o registro PyPI, consulte a documentação. +rubygems.install=Para instalar o pacote usando gem, execute o seguinte comando: +rubygems.install2=ou adicione-o ao Gemfile: +rubygems.dependencies.runtime=Dependências de Execução +rubygems.dependencies.development=Dependências de Desenvolvimento +rubygems.required.ruby=Requer o Ruby versão +rubygems.required.rubygems=Requer o RubyGem versão +rubygems.documentation=Para obter mais informações sobre o registro do RubyGems, consulte a documentação. +settings.link=Vincular este pacote a um repositório +settings.link.description=Se você vincular um pacote a um repositório, o pacote será listado na lista de pacotes do repositório. +settings.link.select=Selecionar Repositório +settings.link.button=Atualizar Link do Repositório +settings.link.success=Link do repositório foi atualizado com sucesso. +settings.link.error=Falha ao atualizar o link do repositório. +settings.delete=Excluir o pacote +settings.delete.description=A exclusão de um pacote é permanente e não pode ser desfeita. +settings.delete.notice=Você está prestes a excluir %s (%s). Esta operação é irreversível, tem certeza? +settings.delete.success=O pacote foi excluído. +settings.delete.error=Falha ao excluir o pacote. + diff --git a/options/locale/locale_pt-PT.ini b/options/locale/locale_pt-PT.ini index 384d70ee2c..96d782a27f 100644 --- a/options/locale/locale_pt-PT.ini +++ b/options/locale/locale_pt-PT.ini @@ -57,8 +57,8 @@ new_migrate=Nova migração new_mirror=Novo espelho new_fork=Nova derivação do repositório new_org=Nova organização -new_project=Novo projecto -new_project_board=Novo painel para o projecto +new_project=Novo planeamento +new_project_board=Novo painel para o planeamento manage_org=Gerir organizações admin_panel=Administração do sítio account_settings=Configurações da conta @@ -105,6 +105,8 @@ error404=A página que pretende aceder não existe ou n never=Nunca +rss_feed=Fonte RSS + [error] occurred=Ocorreu um erro report_message=Se tiver certeza de que se trata de um erro do Gitea, procure, por favor, questões relacionadas no GitHub ou abra uma nova questão, se necessário. @@ -282,6 +284,7 @@ register_helper_msg=Já tem uma conta? Inicie a sessão agora! social_register_helper_msg=Já tem uma conta? Associe-a agora! disable_register_prompt=As inscrições estão desabilitadas. Entre em contacto com o administrador do sítio. disable_register_mail=A confirmação por email da inscrição está desabilitada. +manual_activation_only=Contacte o administrador para completar a habilitação. remember_me=Memorizar este dispositivo forgot_password_title=Esqueci-me da senha forgot_password=Esqueceu a sua senha? @@ -487,7 +490,9 @@ auth_failed=Falhou a autenticação: %v still_own_repo=A sua conta possui um ou mais repositórios; deve excluí-los ou transferi-los primeiro. still_has_org=A sua conta é membro de uma ou mais organizações; deixe-as primeiro. +still_own_packages=A sua conta possui um ou mais pacotes; elimine-os primeiro. org_still_own_repo=Esta organização ainda possui repositórios; deve excluí-los ou transferi-los primeiro. +org_still_own_packages=Esta organização ainda possui um ou mais pacotes; elimine-os primeiro. target_branch_not_exist=O ramo de destino não existe. @@ -499,7 +504,7 @@ activity=Trabalho público followers=Seguidores starred=Repositórios favoritos watched=Repositórios sob vigilância -projects=Projectos +projects=Planeamentos following=Que segue follow=Seguir unfollow=Deixar de seguir @@ -562,7 +567,7 @@ comment_type_group_deadline=Prazo comment_type_group_dependency=Dependência comment_type_group_review_request=Pedido de revisão comment_type_group_pull_request_push=Cometimentos adicionados -comment_type_group_project=Projecto +comment_type_group_project=Planeamento comment_type_group_issue_ref=Referência da questão saved_successfully=As suas configurações foram guardadas com sucesso. privacy=Privacidade @@ -660,7 +665,6 @@ ssh_invalid_token_signature=A chave SSH, assinatura ou código fornecidos não c ssh_token_required=Tem que fornecer uma assinatura para o código abaixo ssh_token=Código ssh_token_help=Pode gerar uma assinatura usando o seguinte comando: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign -n gitea -f /caminho_para_a_sua_chave_pública ssh_token_signature=Assinatura SSH blindada (com armadura ASCII) key_signature_ssh_placeholder=Começa com '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success=A chave SSH '%s' foi validada. @@ -801,7 +805,7 @@ visibility.private=Privada visibility.private_tooltip=Visível apenas para membros da organização [repo] -new_repo_helper=Um repositório contém todos os ficheiros do projecto, incluindo o histórico das revisões. Já o tem noutro sítio? Migre o repositório. +new_repo_helper=Um repositório contém todos os ficheiros do trabalho, incluindo o histórico das revisões. Já o tem noutro sítio? Migre o repositório. owner=Proprietário(a) owner_helper=Algumas organizações podem não aparecer na lista suspensa devido a um limite máximo de contagem de repositórios. repo_name=Nome do repositório @@ -838,10 +842,10 @@ issue_labels=Rótulos para as questões issue_labels_helper=Escolha um conjunto de rótulos para as questões. license=Licença license_helper=Escolha um ficheiro de licença. -license_helper_desc=Uma licença rege o que os outros podem, ou não, fazer com o seu código fonte. Não tem a certeza sobre qual a mais indicada para o seu projecto? Veja: Escolher uma licença. +license_helper_desc=Uma licença rege o que os outros podem, ou não, fazer com o seu código fonte. Não tem a certeza sobre qual a mais indicada para o seu trabalho? Veja: Escolher uma licença. readme=README readme_helper=Escolha um modelo de ficheiro README. -readme_helper_desc=Este é o sítio onde pode escrever uma descrição completa do seu projecto. +readme_helper_desc=Este é o sítio onde pode escrever uma descrição completa do seu trabalho. auto_init=Inicializar repositório (adiciona `.gitignore`, `LICENSE` e `README.md`) trust_model_helper=Escolha o modelo de confiança para a validação das assinaturas. As opções são: trust_model_helper_collaborator=Colaborador: Confiar nas assinaturas dos colaboradores @@ -1005,7 +1009,7 @@ branches=Ramos tags=Etiquetas issues=Questões pulls=Pedidos de integração -project_board=Projectos +project_board=Planeamentos labels=Rótulos org_labels_desc=Rótulos ao nível da organização que podem ser usados em todos os repositórios desta organização org_labels_desc_manage=gerir @@ -1146,27 +1150,27 @@ commit.cherry-pick-content=Escolha o ramo para onde vai escolher a dedo: ext_issues=Acesso a questões externas ext_issues.desc=Ligação para um rastreador de questões externo. -projects=Projectos -projects.desc=Gerir questões e integrações nos quadros do projecto. +projects=Planeamentos +projects.desc=Gerir questões e integrações nos quadros do planeamento. projects.description=Descrição (opcional) projects.description_placeholder=Descrição -projects.create=Criar projecto +projects.create=Criar planeamento projects.title=Título -projects.new=Novo projecto -projects.new_subheader=Coordene, acompanhe e modifique o seu trabalho num só lugar, para que os projectos se mantenham transparentes e cumpram o calendário. -projects.create_success=O projecto '%s' foi criado. -projects.deletion=Eliminar projecto -projects.deletion_desc=Eliminar um projecto remove-o de todas as questões relacionadas. Continuar? -projects.deletion_success=O projecto foi eliminado. -projects.edit=Editar projectos -projects.edit_subheader=Projectos organizam questões e acompanham o progresso. -projects.modify=Modificar projecto -projects.edit_success=O projecto '%s' foi modificado. +projects.new=Novo planeamento +projects.new_subheader=Coordene, acompanhe e modifique o seu trabalho num só lugar, para que os planeamentos se mantenham transparentes e cumpram o calendário. +projects.create_success=O planeamento '%s' foi criado. +projects.deletion=Eliminar planeamento +projects.deletion_desc=Eliminar um planeamento remove-o de todas as questões relacionadas. Continuar? +projects.deletion_success=O planeamento foi eliminado. +projects.edit=Editar planeamentos +projects.edit_subheader=Planeamentos organizam questões e acompanham o progresso. +projects.modify=Modificar planeamento +projects.edit_success=O planeamento '%s' foi modificado. projects.type.none=Nenhum projects.type.basic_kanban=Kanban básico projects.type.bug_triage=Triagem de erros -projects.template.desc=Modelo de projecto -projects.template.desc_helper=Escolha um modelo de projecto para começar +projects.template.desc=Modelo de planeamento +projects.template.desc_helper=Escolha um modelo de planeamento para começar projects.type.uncategorized=Sem categoria projects.board.edit=Editar painel projects.board.edit_title=Novo nome para o painel @@ -1176,7 +1180,7 @@ projects.board.new=Novo painel projects.board.set_default=Definir como padrão projects.board.set_default_desc=Definir este painel como padrão para questões e pedidos de integração não categorizados projects.board.delete=Eliminar painel -projects.board.deletion_desc=Eliminar um painel de projecto faz com que todas as questões nesse painel sejam movidas para o painel 'Sem categoria'. Continuar? +projects.board.deletion_desc=Eliminar um painel do planeamento faz com que todas as questões nesse painel sejam movidas para o painel 'Sem categoria'. Continuar? projects.board.color=Cor projects.open=Abrir projects.close=Fechar @@ -1185,7 +1189,7 @@ projects.board.assigned_to=Atribuído a issues.desc=Organize relatórios de erros, tarefas e etapas. issues.filter_assignees=Filtrar responsável issues.filter_milestones=Filtrar etapa -issues.filter_projects=Filtrar projecto +issues.filter_projects=Filtrar planeamento issues.filter_labels=Filtrar rótulo issues.filter_reviewers=Filtrar revisor issues.new=Questão nova @@ -1194,12 +1198,12 @@ issues.new.labels=Rótulos issues.new.add_labels_title=Aplicar rótulos issues.new.no_label=Sem rótulo issues.new.clear_labels=Retirar rótulos -issues.new.projects=Projectos -issues.new.add_project_title=Definir projecto -issues.new.clear_projects=Limpar projectos -issues.new.no_projects=Nenhum projecto -issues.new.open_projects=Projectos abertos -issues.new.closed_projects=Projectos fechados +issues.new.projects=Planeamentos +issues.new.add_project_title=Definir planeamento +issues.new.clear_projects=Limpar planeamentos +issues.new.no_projects=Nenhum planeamento +issues.new.open_projects=Planeamentos abertos +issues.new.closed_projects=Planeamentos fechados issues.new.no_items=Sem itens issues.new.milestone=Etapa issues.new.add_milestone_title=Definir etapa @@ -1233,11 +1237,11 @@ issues.remove_label=removeu o rótulo %s %s issues.remove_labels=removeu os rótulos %s %s issues.add_remove_labels=adicionou o(s) rótulo(s) %s e removeu %s %s issues.add_milestone_at=`adicionou esta questão à etapa %s %s` -issues.add_project_at=`adicionou esta questão ao projecto %s %s` +issues.add_project_at=`adicionou esta questão ao planeamento %s %s` issues.change_milestone_at=`modificou a etapa de %s para %s %s` -issues.change_project_at=`modificou o projecto de %s para %s %s` +issues.change_project_at=`modificou o planeamento de %s para %s %s` issues.remove_milestone_at=`removeu esta questão da etapa %s %s` -issues.remove_project_at=`removeu isto do projecto %s %s` +issues.remove_project_at=`removeu isto do planeamento %s %s` issues.deleted_milestone=`(eliminada)` issues.deleted_project=`(eliminado)` issues.self_assign_at=`atribuiu a si mesmo(a) esta questão %s` @@ -1373,6 +1377,9 @@ issues.lock.reason=Motivo do bloqueio issues.lock.title=Bloquear diálogo sobre esta questão. issues.unlock.title=Desbloquear diálogo sobre esta questão. issues.comment_on_locked=Não pode comentar numa questão bloqueada. +issues.delete=Eliminar +issues.delete.title=Pretende eliminar esta questão? +issues.delete.text=Tem a certeza que quer eliminar esta questão? Isso irá remover todo o conteúdo permanentemente. Como alternativa considere fechá-la, se pretender mantê-la em arquivo. issues.tracker=Gestor de tempo issues.start_tracking_short=Iniciar cronómetro issues.start_tracking=Iniciar contagem de tempo @@ -1413,8 +1420,8 @@ issues.due_date_remove=removeu a data de vencimento %s %s issues.due_date_overdue=Em atraso issues.due_date_invalid=A data de vencimento é inválida ou está fora do intervalo permitido. Por favor, use o formato 'aaaa-mm-dd'. issues.dependency.title=Dependências -issues.dependency.issue_no_dependencies=Esta questão não tem quaisquer dependências, neste momento. -issues.dependency.pr_no_dependencies=Este pedido de integração não tem quaisquer dependências, neste momento. +issues.dependency.issue_no_dependencies=Não estão definidas dependências. +issues.dependency.pr_no_dependencies=Não estão definidas dependências. issues.dependency.add=Adicionar dependência… issues.dependency.cancel=Cancelar issues.dependency.remove=Remover @@ -1471,6 +1478,7 @@ issues.content_history.created=criado issues.content_history.delete_from_history=Eliminar do histórico issues.content_history.delete_from_history_confirm=Eliminar do histórico? issues.content_history.options=Opções +issues.reference_link=Referência: %s compare.compare_base=base compare.compare_head=comparar @@ -1479,7 +1487,12 @@ pulls.desc=Habilitar pedidos de integração e revisão de código. pulls.new=Novo pedido de integração pulls.view=Ver pedido de integração pulls.compare_changes=Novo pedido de integração +pulls.allow_edits_from_maintainers_desc=Utilizadores com acesso de escrita no ramo base também podem fazer envios para este ramo +pulls.allow_edits_from_maintainers_err=Não foi possível fazer a modificação pulls.compare_changes_desc=Escolha o ramo de destino e o ramo de origem. +pulls.has_viewed_file=Visto +pulls.has_changed_since_last_review=Alterado desde a última revisão +pulls.viewed_files_label=%[1]d / %[2]d ficheiros vistos pulls.compare_base=integrar em pulls.compare_compare=puxar de pulls.switch_comparison_type=Trocar o tipo de comparação @@ -1547,6 +1560,14 @@ pulls.squash_merge_pull_request=Criar cometimento de compressão pulls.merge_manually=Integrado manualmente pulls.merge_commit_id=O ID de cometimento da integração pulls.require_signed_wont_sign=O ramo requer que os cometimentos sejam assinados mas esta integração não vai ser assinada +pulls.merge_pull_request_now=Executar agora a integração constante no pedido +pulls.rebase_merge_pull_request_now=Mudar a base e integrar agora +pulls.rebase_merge_commit_pull_request_now=Mudar a base e integrar agora (--no-ff) +pulls.squash_merge_pull_request_now=Condensar e integrar agora +pulls.merge_pull_request_on_status_success=Executar a integração constante no pedido quando todas as verificações forem bem sucedidas +pulls.rebase_merge_pull_request_on_status_success=Mudar a base e integrar quando todas as verificações forem bem sucedidas +pulls.rebase_merge_commit_pull_request_on_status_success=Mudar a base e integrar (--no-ff) quando todas as verificações forem bem sucedidas +pulls.squash_merge_pull_request_on_status_success=Condensar e integrar quando todas as verificações forem bem sucedidas pulls.invalid_merge_option=Não pode usar esta opção de integração neste pedido de integração. pulls.merge_conflict=A integração falhou: Houve um conflito durante a integração. Dica: tente uma estratégia diferente pulls.merge_conflict_summary=Mensagem de erro @@ -1575,9 +1596,16 @@ pulls.outdated_with_base_branch=Este ramo é obsoleto em relação ao ramo base pulls.closed_at=`fechou este pedido de integração %[2]s` pulls.reopened_at=`reabriu este pedido de integração %[2]s` pulls.merge_instruction_hint=`Também pode ver as instruções para a linha de comandos.` - -pulls.merge_instruction_step1_desc=No seu repositório do projecto, crie um novo ramo e teste as modificações. +pulls.merge_instruction_step1_desc=No seu repositório, crie um novo ramo e teste as modificações. pulls.merge_instruction_step2_desc=Integre as modificações e envie para o Gitea. +pulls.merge_on_status_success=O pedido de integração foi agendado para ser executado quando todas as verificações forem bem sucedidas. +pulls.merge_on_status_success_already_scheduled=Este pedido de integração já está agendado para ser executado quando todas as verificações forem bem sucedidas. +pulls.pr_has_pending_merge_on_success=%[1]s agendou este pedido de integração para ser executado automaticamente quando todas as verificações forem bem sucedidas %[2]s. +pulls.merge_pull_on_success_cancel=Cancelar a integração automática +pulls.pull_request_not_scheduled=Este pedido de integração não está agendado para ser executado automaticamente. +pulls.pull_request_schedule_canceled=A integração automática foi cancelada para este pedido de integração. +pulls.pull_request_scheduled_auto_merge=`agendou este pedido de integração para ser executado automaticamente quando todas as verificações forem bem sucedidas %[1]s` +pulls.pull_request_canceled_scheduled_auto_merge=`cancelou a execução automática deste pedido de integração que iria ser executando quando todas as verificações fossem bem sucedidas %[1]s` milestones.new=Nova etapa milestones.open_tab=%d abertas @@ -1788,7 +1816,8 @@ settings.pulls.allow_manual_merge=Habilitar a marcação dos pedidos de integra settings.pulls.enable_autodetect_manual_merge=Habilitar a identificação automática de integrações manuais (obs.: nalguns casos especiais a avaliação pode ser errada) settings.pulls.allow_rebase_update=Habilitar a modificação do ramo do pedido de integração através da mudança de base settings.pulls.default_delete_branch_after_merge=Eliminar o ramo do pedido de integração depois de finalizada a integração, como predefinição -settings.projects_desc=Habilitar projectos no repositório +settings.packages_desc=Habilitar o registo de pacotes do repositório +settings.projects_desc=Habilitar planeamentos no repositório settings.admin_settings=Configurações do administrador settings.admin_enable_health_check=Habilitar verificações de integridade (git fsck) no repositório settings.admin_code_indexer=Indexador de código @@ -1945,6 +1974,8 @@ settings.event_pull_request_review=Pedido de integração revisto settings.event_pull_request_review_desc=Pedido de integração aprovado, rejeitado ou comentado na revisão. settings.event_pull_request_sync=Pedido de integração sincronizado settings.event_pull_request_sync_desc=Pedido de integração sincronizado. +settings.event_package=Pacote +settings.event_package_desc=Pacote criado ou eliminado num repositório. settings.branch_filter=Filtro de ramos settings.branch_filter_desc=Lista dos ramos a serem considerados nos eventos de envio e de criação e eliminação de ramos, especificada como um padrão glob. Se estiver em branco ou for *, serão reportados eventos para todos os ramos. Veja a documentação github.com/gobwas/glob para ver os detalhes da sintaxe. Exemplos: trunk, {trunk,release*}. settings.active=Em funcionamento @@ -2426,6 +2457,7 @@ dashboard.resync_all_hooks=Voltar a sincronizar automatismos de pré-acolhimento dashboard.reinit_missing_repos=Reinicializar todos os repositórios Git em falta para os quais existam registos dashboard.sync_external_users=Sincronizar dados externos do utilizador dashboard.cleanup_hook_task_table=Limpar tabela hook_task +dashboard.cleanup_packages=Limpar pacotes expirados dashboard.server_uptime=Tempo em funcionamento contínuo do servidor dashboard.current_goroutine=Goroutines em execução dashboard.current_memory_usage=Utilização de memória corrente @@ -2458,6 +2490,7 @@ dashboard.gc_times=Tempos da recolha de lixo dashboard.delete_old_actions=Eliminar todas as operações antigas da base de dados dashboard.delete_old_actions.started=Foi iniciado o processo de eliminação de todas as operações antigas da base de dados. dashboard.update_checker=Verificador de novas versões +dashboard.delete_old_system_notices=Eliminar todas as notificações do sistema antigas da base de dados users.user_manage_panel=Gestão das contas de utilizadores users.new_account=Criar conta de utilizador @@ -2494,6 +2527,7 @@ users.update_profile=Modificar conta do utilizador users.delete_account=Eliminar conta de utilizador users.still_own_repo=Este utilizador ainda possui um ou mais repositórios. Elimine ou transfira esses repositórios primeiro. users.still_has_org=Este utilizador é membro de uma organização. Remova, primeiro, o utilizador de todas as organizações. +users.still_own_packages=Este utilizador ainda possui um ou mais pacotes. Elimine esses pacotes primeiro. users.deletion_success=A conta de utilizador foi eliminada. users.reset_2fa=Reinicializar a autenticação em dois passos users.list_status_filter.menu_text=Filtro @@ -2540,6 +2574,16 @@ repos.forks=Derivações repos.issues=Questões repos.size=Tamanho +packages.package_manage_panel=Gestão de pacotes +packages.total_size=Tamanho total: %s +packages.owner=Proprietário +packages.creator=Criador +packages.name=Nome +packages.version=Versão +packages.type=Tipo +packages.repository=Repositório +packages.size=Tamanho + defaulthooks=Automatismos web padrão defaulthooks.desc=Os automatismos web fazem pedidos HTTP POST automaticamente a um servidor quando são despoletados determinados eventos do Gitea. Os automatismos web definidos aqui são os padrões e serão copiados para todos os novos repositórios. Leia mais no guia de automatismos web. defaulthooks.add_webhook=Adicionar automatismo web padrão @@ -2810,6 +2854,7 @@ monitor.process=Processos em execução monitor.desc=Descrição monitor.start=Início monitor.execute_time=Tempo de execução +monitor.last_execution_result=Resultado monitor.process.cancel=Cancelar processo monitor.process.cancel_desc=Cancelar um processo pode resultar na perda de dados monitor.process.cancel_notices=Cancelar: %s? @@ -2976,3 +3021,81 @@ unit=Unidade error.no_unit_allowed_repo=Não tem permissão para aceder a nenhuma parte deste repositório. error.unit_not_allowed=Não tem permissão para aceder a esta parte do repositório. +[packages] +title=Pacotes +desc=Gerir pacotes do repositório. +empty=Ainda não há pacotes. +empty.documentation=Para obter mais informação sobre o registo de pacotes, veja a documentação. +filter.type=Tipo +filter.type.all=Todos +filter.no_result=O seu filtro não produziu quaisquer resultados. +published_by=Publicado %[1]s por %[3]s +published_by_in=Publicado %[1]s por %[3]s em %[5]s +installation=Instalação +about=Sobre este pacote +requirements=Requisitos +dependencies=Dependências +keywords=Palavras-chave +details=Detalhes +details.author=Autor(a) +details.license=Licença +assets=Recursos +versions=Versões +versions.view_all=Ver todas +dependency.id=ID +dependency.version=Versão +composer.registry=Configure este registo no seu ficheiro ~/.composer/config.json: +composer.install=Para instalar o pacote usando o Composer, execute o seguinte comando: +composer.documentation=Para obter mais informações sobre o registo do Composer, consulte a documentação. +composer.dependencies=Dependências +composer.dependencies.development=Dependências de desenvolvimento +conan.details.repository=Repositório +conan.registry=Configurar este registo usando a linha de comandos: +conan.install=Para instalar o pacote usando o Conan, execute o seguinte comando: +conan.documentation=Para obter mais informações sobre o registo do Conan, consulte a documentação. +container.details.type=Tipo de imagem +container.details.platform=Plataforma +container.details.repository_site=Página web do repositório +container.details.documentation_site=Página web da documentação +container.pull=Puxar a imagem usando a linha de comandos: +container.documentation=Para obter mais informações sobre o registo do Container, consulte a documentação. +container.multi_arch=S.O. / Arquit. +container.labels=Rótulos +container.labels.key=Chave +container.labels.value=Valor +generic.download=Descarregar pacote usando a linha de comandos: +generic.documentation=Para obter mais informações sobre o registo genérico, consulte a documentação. +helm.registry=Configurar este registo usando a linha de comandos: +helm.install=Para instalar o pacote, execute o seguinte comando: +helm.documentation=Para obter mais informações sobre o registo do Helm, consulte a documentação. +maven.registry=Configure este registo no seu ficheiro pom.xml do projecto: +maven.install=Para usar este pacote, inclua no bloco dependencies do ficheiro pom.xml o seguinte: +maven.install2=Executar usando a linha de comandos: +maven.download=Para descarregar a dependência, execute na linha de comandos: +maven.documentation=Para obter mais informações sobre o registo do Maven, consulte a documentação. +nuget.registry=Configurar este registo usando a linha de comandos: +nuget.install=Para instalar o pacote usando NuGet, execute o seguinte comando: +nuget.documentation=Para obter mais informações sobre o registo do Nuget, consulte a documentação. +npm.install=Para instalar o pacote usando o npm, execute o seguinte comando: +npm.documentation=Para obter mais informações sobre o registo do npm, consulte a documentação. +npm.dependencies=Dependências +npm.dependencies.development=Dependências de desenvolvimento +npm.dependencies.optional=Dependências opcionais +pypi.requires=Requer Python +pypi.install=Para instalar o pacote usando o pip, execute o seguinte comando: +pypi.documentation=Para obter mais informações sobre o registo do PyPI, consulte a documentação. +rubygems.install=Para instalar o pacote usando o gem, execute o seguinte comando: +rubygems.dependencies.development=Dependências de desenvolvimento +rubygems.documentation=Para obter mais informações sobre o registo do RubyGems, consulte a documentação. +settings.link=Vincular este pacote a um repositório +settings.link.description=Se você vincular um pacote a um repositório, o pacote será listado na lista de pacotes do repositório. +settings.link.select=Escolha o repositório +settings.link.button=Modificar vínculo ao repositório +settings.link.success=O vínculo ao repositório foi modificado com sucesso. +settings.link.error=Falhou a modificação do vínculo ao repositório. +settings.delete=Eliminar pacote +settings.delete.description=Eliminar o pacote é permanente e não pode ser desfeito. +settings.delete.notice=Está prestes a eliminar %s (%s). Esta operação é irreversível. Tem a certeza? +settings.delete.success=O pacote foi eliminado. +settings.delete.error=Falhou a eliminação do pacote. + diff --git a/options/locale/locale_ru-RU.ini b/options/locale/locale_ru-RU.ini index 87b4f542c7..bf29e2b411 100644 --- a/options/locale/locale_ru-RU.ini +++ b/options/locale/locale_ru-RU.ini @@ -45,6 +45,8 @@ webauthn_error_insecure=WebAuthn поддерживает только безо webauthn_error_unable_to_process=Сервер не смог обработать ваш запрос. webauthn_error_duplicated=Представленный ключ не подходит для этого запроса. Если вы пытаетесь зарегистрировать его, убедитесь, что ключ ещё не зарегистрирован. webauthn_error_empty=Вы должны указать имя для этого ключа. +webauthn_error_timeout=Тайм-аут достигнут до того, как ваш ключ был прочитан. Перезагрузите эту страницу и повторите попытку. +webauthn_reload=Обновить repository=Репозиторий organization=Организация @@ -102,8 +104,13 @@ error404=Страница, которую вы пытаетесь открыть never=Никогда +rss_feed=RSS-лента + [error] +occurred=Произошла ошибка missing_csrf=Некорректный запрос: CSRF токен отсутствует +invalid_csrf=Некорректный запрос: неверный CSRF токен +network_error=Ошибка сети [startpage] app_desc=Удобный сервис собственного хостинга репозиториев Git @@ -260,6 +267,7 @@ search=Поиск code=Код search.fuzzy=Неточный search.match=Соответствие +code_search_unavailable=В настоящее время поиск по коду недоступен. Обратитесь к администратору сайта. repo_no_results=Подходящие репозитории не найдены. user_no_results=Подходящие пользователи не найдены. org_no_results=Подходящие организации не найдены. @@ -273,6 +281,7 @@ register_helper_msg=Уже есть аккаунт? Авторизуйтесь! social_register_helper_msg=Уже есть аккаунт? Свяжите его сейчас! disable_register_prompt=Извините, возможность регистрации отключена. Пожалуйста, свяжитесь с администратором сайта. disable_register_mail=Подтверждение регистрации по электронной почте отключено. +manual_activation_only=Обратитесь к администратору сайта для завершения активации. remember_me=Запомнить это устройство forgot_password_title=Восстановить пароль forgot_password=Забыли пароль? @@ -311,6 +320,9 @@ oauth_signup_submit=Полная учётная запись oauth_signin_tab=Ссылка на существующую учётную запись oauth_signin_title=Войдите, чтобы авторизовать связанную учётную запись oauth_signin_submit=Привязать учётную запись +oauth.signin.error=Произошла ошибка при обработке запроса авторизации. Если эта ошибка повторяется, обратитесь к администратору сайта. +oauth.signin.error.access_denied=Запрос на авторизацию был отклонен. +oauth.signin.error.temporarily_unavailable=Ошибка авторизации, так как сервер временно недоступен. Пожалуйста, повторите попытку позже. openid_connect_submit=Подключить openid_connect_title=Подключение к существующей учетной записи openid_connect_desc=Выбранный OpenID URI неизвестен. Свяжите с новой учетной записью здесь. @@ -475,7 +487,9 @@ auth_failed=Ошибка аутентификации: %v still_own_repo=Ваша учётная запись владеет одним или несколькими репозиториями; сначала удалите или перенесите их. still_has_org=Ваша учётная запись является членом одной или нескольких организаций; сначала выйдите из них. +still_own_packages=Ваша учётная запись владеет одним или несколькими пакетами, сначала удалите их. org_still_own_repo=Эта организация по-прежнему владеет одним или несколькими репозиториями; сначала удалите или перенесите их. +org_still_own_packages=Эта организация всё ещё имеет пакеты, сначала удалите их. target_branch_not_exist=Целевая ветка не существует. @@ -516,6 +530,7 @@ twofa=Двухфакторная аутентификация account_link=Привязанные аккаунты organization=Организации uid=UID +webauthn=Ключи безопасности public_profile=Открытый профиль biography_placeholder=Расскажите немного о себе @@ -537,6 +552,19 @@ continue=Далее cancel=Отмена language=Язык ui=Тема +hidden_comment_types=Скрытые типы комментариев +comment_type_group_reference=Упоминания +comment_type_group_label=Операции с метками +comment_type_group_milestone=Этап +comment_type_group_assignee=Назначения +comment_type_group_title=Правки заголовков +comment_type_group_branch=Операции с ветками +comment_type_group_time_tracking=Отслеживание времени +comment_type_group_deadline=Модификации сроков выполнения +comment_type_group_dependency=Модификации зависимостей +comment_type_group_lock=Смена статуса ограничения на обсуждение +comment_type_group_review_request=Запросы на рецензию +saved_successfully=Ваши настройки успешно сохранены. privacy=Приватность keep_activity_private=Скрыть активность со страницы профиля keep_activity_private_popup=Делает активность видимой только для вас и администраторов @@ -632,7 +660,6 @@ ssh_invalid_token_signature=Предоставленный SSH ключ, под ssh_token_required=Вы должны предоставить подпись для токена ниже ssh_token=Токен ssh_token_help=Вы можете сгенерировать подпись с помощью: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign -n gitea -f /path_to_your_pubkey ssh_token_signature=Бронированная SSH подпись key_signature_ssh_placeholder=Начинается с '-----BEGIN SSH SIGNATURE-----' verify_ssh_key_success=SSH ключ '%s' проверен. @@ -737,6 +764,10 @@ twofa_enrolled=Для вашего аккаунта была включена д twofa_failed_get_secret=Не удалось получить ключ. webauthn_desc=Ключи безопасности - это аппаратные устройства, содержащие криптографические ключи. Они могут использоваться для двухфакторной аутентификации. Ключи безопасности должны поддерживать стандарт WebAuthn Authenticator. +webauthn_register_key=Добавить ключ безопасности +webauthn_nickname=Имя пользователя +webauthn_delete_key=Удалить ключ безопасности +webauthn_delete_key_desc=Если вы удалите ключ безопасности, вы больше не сможете войти с его помощью. Продолжить? manage_account_links=Управление привязанными аккаунтами manage_account_links_desc=Эти внешние аккаунты привязаны к вашему аккаунту Gitea. @@ -787,6 +818,7 @@ visibility_fork_helper=(Изменение этого повлияет на вс clone_helper=Нужна помощь в клонировании? Посетите страницу помощи. fork_repo=Форкнуть репозиторий fork_from=Форк от +already_forked=Вы уже форкнули %s fork_visibility_helper=Видимость форкнутого репозитория изменить нельзя. use_template=Использовать этот шаблон clone_in_vsc=Клонировать в VS Code @@ -921,6 +953,7 @@ migrate.migrating=Перенос из %s... migrate.migrating_failed=Перенос из %s не удался. migrate.migrating_failed.error=Ошибка: %s migrate.migrating_failed_no_addr=Миграция не удалась. +migrate.github.description=Переносите данные с github.com или других серверов GitHub. migrate.git.description=Перенести только репозиторий из любого Git сервиса. migrate.gitlab.description=Перенести данные с gitlab.com или других экземпляров GitLab. migrate.gitea.description=Перенести данные с gitea.com или других экземпляров Gitea. @@ -930,6 +963,7 @@ migrate.codebase.description=Перенос данных с codebasehq.com. migrate.gitbucket.description=Перенести данные из экземпляров GitBucket. migrate.migrating_git=Перенос Git данных migrate.migrating_topics=Миграция тем +migrate.migrating_milestones=Перенос этапов migrate.migrating_labels=Миграция меток migrate.migrating_issues=Миграция задач migrate.migrating_pulls=Миграция запросов на слияние @@ -1026,6 +1060,8 @@ editor.add_tmpl=Добавить '' editor.add=Создал(а) '%s' editor.update=Изменил(а) на '%s' editor.delete=Удалить '%s' +editor.patch=Применить патч +editor.new_patch=Новый патч editor.commit_message_desc=Добавьте необязательное расширенное описание… editor.signoff_desc=Добавить Signed-off-by коммитом в конце сообщения журнала коммитов. editor.commit_directly_to_this_branch=Сделайте коммит прямо в ветку %s. @@ -1078,6 +1114,12 @@ commits.signed_by_untrusted_user=Подписано ненадежным пол commits.signed_by_untrusted_user_unmatched=Подписан ненадежным пользователем, который не соответствует коммиту commits.gpg_key_id=Идентификатор GPG ключа +commit.revert=Откатить +commit.revert-header=Откат: %s +commit.revert-content=Выбрать ветку для отката: +commit.cherry-pick=Cherry-pick +commit.cherry-pick-header=Cherry-pick: %s +commit.cherry-pick-content=Выбрать ветку для cherry-pick: ext_issues.desc=Ссылка на внешнюю систему отслеживания ошибок. @@ -1303,6 +1345,9 @@ issues.lock.reason=Причина для ограничения issues.lock.title=Ограничить обсуждение данной задачи. issues.unlock.title=Снять ограничение обсуждения данной задачи. issues.comment_on_locked=Вы не можете оставить комментарий по задаче, ограниченной для обсуждения. +issues.delete=Удалить +issues.delete.title=Удалить эту задачу? +issues.delete.text=Вы действительно хотите удалить эту задачу? Это навсегда удалит всё содержимое. Возможно лучше закрыть её в архивных целях. issues.tracker=Отслеживание времени issues.start_tracking_short=Запустить таймер issues.start_tracking=Начать отслеживание времени @@ -1343,8 +1388,8 @@ issues.due_date_remove=удалён срок выполнения %s %s issues.due_date_overdue=Просроченные issues.due_date_invalid=Срок действия недействителен или находится за пределами допустимого диапазона. Пожалуйста, используйте формат 'гггг-мм-дд'. issues.dependency.title=Зависимости -issues.dependency.issue_no_dependencies=В настоящее время эта задача не имеет зависимостей. -issues.dependency.pr_no_dependencies=Этот запрос на слияние в настоящее время не имеет никаких зависимостей. +issues.dependency.issue_no_dependencies=Зависимостей нет. +issues.dependency.pr_no_dependencies=Зависимостей нет. issues.dependency.add=Добавить зависимость… issues.dependency.cancel=Отменить issues.dependency.remove=Удалить @@ -1410,7 +1455,7 @@ pulls.new=Новый запрос на слияние pulls.view=Просмотр запроса на слияние pulls.compare_changes=Новый запрос на слияние pulls.compare_changes_desc=Сравнить две ветки и создать запрос на слияние для изменений. -pulls.compare_base=родительская ветка +pulls.compare_base=базовая ветка pulls.compare_compare=взять из pulls.switch_comparison_type=Переключить тип сравнения pulls.switch_head_and_base=Поменять исходную и целевую ветки местами @@ -1434,7 +1479,7 @@ pulls.manually_merged=Слито вручную pulls.manually_merged_as=Запрос на слияние был объединён вручную, как %[2]s. pulls.is_closed=Запрос на слияние был закрыт. pulls.has_merged=Слияние этого запроса успешно завершено. -pulls.title_wip_desc=`Добавьте %s в начало заголовка для защиты от случайного досрочного принятия запроса на слияние +pulls.title_wip_desc=`Добавьте %s в начало заголовка для защиты от случайного досрочного принятия запроса на слияние` pulls.cannot_merge_work_in_progress=Этот запрос на слияние помечен как в процессе работы. pulls.still_in_progress=Всё ещё в процессе? pulls.add_prefix=Добавить %s префикс @@ -1485,7 +1530,9 @@ pulls.rebase_conflict_summary=Сообщение об ошибке ; %[2]s
%[3]s
pulls.unrelated_histories=Слияние не удалось: У источника и цели слияния нет общей истории. Совет: попробуйте другую стратегию pulls.merge_out_of_date=Ошибка слияния: при создании слияния база данных была обновлена. Подсказка: попробуйте ещё раз. +pulls.push_rejected=Слияние не удалось: push был отклонён. Проверьте Git-хуки для этого репозитория. pulls.push_rejected_summary=Полная ошибка отклонения +pulls.push_rejected_no_message=Слияние не удалось: push был отклонён, но сервер не указал причину.
Проверьте Git-хуки для этого репозитория pulls.open_unmerged_pull_exists=`Вы не можете снова открыть, поскольку уже существует запрос на слияние (#%d) из того же репозитория с той же информацией о слиянии и ожидающий слияния.` pulls.status_checking=Выполняются некоторые проверки pulls.status_checks_success=Все проверки выполнены успешно @@ -1502,7 +1549,6 @@ pulls.outdated_with_base_branch=Эта ветка отстает от базов pulls.closed_at=`закрыл этот запрос на слияние %[2]s` pulls.reopened_at=`переоткрыл этот запрос на слияние %[2]s` pulls.merge_instruction_hint=`Вы также можете просмотреть инструкции командной строки.` - pulls.merge_instruction_step1_desc=В репозитории вашего проекта посмотрите новую ветку и протестируйте изменения. pulls.merge_instruction_step2_desc=Объединить изменения и обновить на Gitea. @@ -1649,6 +1695,8 @@ search.search_repo=Поиск по репозиторию search.fuzzy=Неточный search.match=Соответствие search.results=Результаты поиска "%s" в %s +search.code_no_results=Не найдено исходного кода, соответствующего поисковому запросу. +search.code_search_unavailable=В настоящее время поиск по коду недоступен. Обратитесь к администратору сайта. settings=Настройки settings.desc=В настройках вы можете менять различные параметры этого репозитория @@ -1808,6 +1856,7 @@ settings.webhook.response=Ответ settings.webhook.headers=Заголовки settings.webhook.payload=Содержимое settings.webhook.body=Тело ответа +settings.webhook.replay.description=Повторить этот веб-хук. settings.githook_edit_desc=Если хук не активен, будет подставлен пример содержимого. Пустое значение в этом поле приведёт к отключению хука. settings.githook_name=Название Hook'a settings.githook_content=Содержание hook'а @@ -1865,6 +1914,8 @@ settings.event_pull_request_review=Запрос на слияние рассмо settings.event_pull_request_review_desc=Запрос на слияние утвержден, отклонён или оставлен комментарий. settings.event_pull_request_sync=Синхронизация запроса на слияние settings.event_pull_request_sync_desc=Запрос на слияние синхронизирован. +settings.event_package=Пакеты +settings.event_package_desc=Пакет создан или удален в репозитории. settings.branch_filter=Фильтр веток settings.branch_filter_desc=Белый список ветвей для событий Push, создания ветвей и удаления ветвей, указанных в виде глоб-шаблона. Если пустой или *, то все событий для всех ветвей будут зарегистрированы. Перейдите по ссылке github.com/gobwas/glob на документацию по синтаксису. Примеры: master, {master,release*}. settings.active=Активный @@ -1878,6 +1929,13 @@ settings.hook_type=Тип hook'а settings.slack_token=Slack токен settings.slack_domain=Домен settings.slack_channel=Канал +settings.web_hook_name_gitea=Gitea +settings.web_hook_name_gogs=Gogs +settings.web_hook_name_slack=Slack +settings.web_hook_name_discord=Discord +settings.web_hook_name_dingtalk=DingTalk +settings.web_hook_name_msteams=Microsoft Teams +settings.web_hook_name_feishu_or_larksuite=Feishu / Lark Suite settings.deploy_keys=Ключи развертывания settings.add_deploy_key=Добавить ключ развертывания settings.deploy_key_desc=Ключи развёртывания доступны только для чтения. Это не то же самое что и SSH-ключи аккаунта. @@ -2132,11 +2190,14 @@ branch.included_desc=Эта ветка является частью ветки branch.included=Включено branch.create_new_branch=Создать ветку из ветви: branch.confirm_create_branch=Создать ветку +branch.create_branch_operation=Создать ветку branch.new_branch=Создать новую ветку branch.new_branch_from=Создать новую ветку из '%s' branch.renamed=Ветка %s была переименована в %s. tag.create_tag=Создать тег %s +tag.create_tag_operation=Создать тег +tag.confirm_create_tag=Создать тег tag.create_success=Тег '%s' был создан. @@ -2224,7 +2285,9 @@ teams.leave=Выйти teams.leave.detail=Покинуть %s? teams.can_create_org_repo=Создать репозитории teams.can_create_org_repo_helper=Участники могут создавать новые репозитории в организации. Создатель получит администраторский доступ к новому репозиторию. +teams.read_access=Чтение teams.read_access_helper=Участники могут просматривать и клонировать командные репозитории. +teams.write_access=Запись teams.write_access_helper=Участники могут читать и выполнять push в командные репозитории. teams.admin_access=Доступ администратора teams.admin_access_helper=Участники могут выполнять pull, push в командные репозитории и добавлять соавторов в команду. @@ -2348,6 +2411,7 @@ dashboard.last_gc_pause=Последняя пауза сборщика мусо dashboard.gc_times=Количество сборок мусора dashboard.delete_old_actions=Удалите все старые действия из базы данных dashboard.delete_old_actions.started=Удалите все старые действия из запущенной базы данных. +dashboard.update_checker=Проверка обновлений users.user_manage_panel=Панель управления пользователями users.new_account=Создать новый аккаунт @@ -2430,6 +2494,15 @@ repos.forks=Форки repos.issues=Задачи repos.size=Размер +packages.owner=Владелец +packages.creator=Автор +packages.name=Наименование +packages.version=Версия +packages.type=Тип +packages.repository=Репозиторий +packages.size=Размер +packages.published=Опубликовано + defaulthooks=Стандартные Веб-хуки defaulthooks.desc=Вебхуки автоматически делают HTTP-POST запросы на сервер, когда вызываются определенные события Gitea. Вебхуки, определённые здесь, по умолчанию и будут скопированы во все новые репозитории. Подробнее читайте в руководстве по вебхукам. defaulthooks.add_webhook=Добавить стандартный Веб-хук @@ -2685,9 +2758,11 @@ monitor.next=Следующий раз monitor.previous=Предыдущий раз monitor.execute_times=Количество выполнений monitor.process=Запущенные процессы +monitor.goroutines=%d горутин monitor.desc=Описание monitor.start=Время начала monitor.execute_time=Время выполнения +monitor.last_execution_result=Результат monitor.process.cancel=Отменить процесс monitor.process.cancel_desc=Отмена процесса может привести к потере данных monitor.process.cancel_notices=Отменить: %s? @@ -2718,6 +2793,11 @@ monitor.queue.pool.flush.title=Очистить очередь monitor.queue.pool.flush.desc=При сбросе будет добавлен работник, который будет закрыт, когда очередь будет пустой, или истечет время время. monitor.queue.pool.flush.submit=Добавить чистящего работника monitor.queue.pool.flush.added=Добавлен чистящий рабочий на %[1]s +monitor.queue.pool.pause.desc=Приостановка очереди приостановит обработку данных +monitor.queue.pool.pause.submit=Приостановить очередь +monitor.queue.pool.resume.title=Возобновить очередь +monitor.queue.pool.resume.desc=Эта очередь возобновит работу +monitor.queue.pool.resume.submit=Возобновить очередь monitor.queue.settings.title=Настройки пула monitor.queue.settings.desc=Пулы динамично растут с ускорением в ответ на блокировку их рабочих очередей. Эти изменения не повлияют на текущие рабочие группы. @@ -2846,3 +2926,19 @@ error.probable_bad_default_signature=ПРЕДУПРЕЖДЕНИЕ! Хотя кл error.no_unit_allowed_repo=У вас нет доступа ни к одному разделу этого репозитория. error.unit_not_allowed=У вас нет доступа к этому разделу репозитория. +[packages] +title=Пакеты +desc=Управление пакетами репозитория. +empty=Пока нет пакетов. +empty.documentation=Дополнительную информацию о реестре пакетов можно найти в документации. +filter.type=Тип +filter.type.all=Все +filter.no_result=Фильтр не дал результатов. +installation=Установка +about=Об этом пакете +requirements=Требования +dependencies=Зависимости +container.multi_arch=ОС / архитектура +container.labels.key=Ключ +container.labels.value=Значение + diff --git a/options/locale/locale_si-LK.ini b/options/locale/locale_si-LK.ini index 8734fe79b1..f9aef5317c 100644 --- a/options/locale/locale_si-LK.ini +++ b/options/locale/locale_si-LK.ini @@ -91,6 +91,7 @@ error404=ඔබ ළඟා වීමට උත්සාහ කරන පිටු never=කිසි විටෙකත් + [error] missing_csrf=නරක ඉල්ලීම: CSRF ටෝකන් නොමැත @@ -1264,8 +1265,6 @@ issues.due_date_remove=නියමිත දිනය ඉවත් කරන issues.due_date_overdue=කල් ඉකුත්වීම issues.due_date_invalid=නියමිත දිනය අවලංගු හෝ පරාසයෙන් බැහැර වේ. කරුණාකර 'yyyy-mm-dd' ආකෘතිය භාවිතා කරන්න. issues.dependency.title=පරායත්ත -issues.dependency.issue_no_dependencies=මෙම ගැටළුව දැනට කිසිදු යැපීමක් නොමැත. -issues.dependency.pr_no_dependencies=මෙම අදින්න ඉල්ලීම දැනට කිසිදු යැපීමක් නොමැත. issues.dependency.add=පරායත්ත එක් කරන්න… issues.dependency.cancel=අවලංගු කරන්න issues.dependency.remove=ඉවත් කරන්න @@ -1422,7 +1421,6 @@ pulls.outdated_with_base_branch=මෙම ශාඛාව මූලික ශා pulls.closed_at=`මෙම අදින්න ඉල්ලීම වසා %[2]s` pulls.reopened_at=`මෙම අදින්න ඉල්ලීම නැවත විවෘත කරන ලදි %[2]s` pulls.merge_instruction_hint=`ඔබට විධාන රේඛා උපදෙස්ද නැරඹිය හැකිය. ` - pulls.merge_instruction_step1_desc=ඔබගේ ව්යාපෘති ගබඩාවෙන්, නව ශාඛාවක් පරීක්ෂා කර වෙනස්කම් පරීක්ෂා කරන්න. pulls.merge_instruction_step2_desc=Gitea හි වෙනස්කම් සහ යාවත්කාලීන කිරීම ඒකාබද්ධ කරන්න. @@ -2335,6 +2333,7 @@ repos.forks=දෙබලක repos.issues=ගැටළු repos.size=ප්‍රමාණය + defaulthooks=පෙරනිමි වෙබ් කොකු defaulthooks.desc=ඇතැම් Gitea සිදුවීම් අවුලුවාලන විට වෙබ් හූක්ස් ස්වයංක්රීයව සේවාදායකයකට HTTP පෝස්ට් ඉල්ලීම් කරයි. මෙහි අර්ථ දක්වා ඇති වෙබ්කොකු පැහැර හැරීම් වන අතර සියලු නව ගබඩාවන් වෙත පිටපත් කරනු ලැබේ. තව දුරටත් කියවන්න වෙබ් කොකු මාර්ගෝපදේශය. defaulthooks.add_webhook=පෙරනිමි වෙබ් හූක් එකතු කරන්න @@ -2727,3 +2726,5 @@ no_read=කියවූ දැනුම්දීම් නැත. [units] +[packages] + diff --git a/options/locale/locale_sv-SE.ini b/options/locale/locale_sv-SE.ini index 868cc9591f..e111a81d6a 100644 --- a/options/locale/locale_sv-SE.ini +++ b/options/locale/locale_sv-SE.ini @@ -79,6 +79,7 @@ loading=Laddar… error404=Sidan du försöker nå finns inte eller så har du inte behörighet att se den. + [error] [startpage] @@ -1079,8 +1080,6 @@ issues.due_date_remove=tog bort förfallodatumet %s %s issues.due_date_overdue=Försenad issues.due_date_invalid=Förfallodatumet är ogiltigt eller utanför gränserna. Använd formatet 'åååå-mm-dd'. issues.dependency.title=Beroenden -issues.dependency.issue_no_dependencies=Det här ärendet har för närvarande inga beroenden. -issues.dependency.pr_no_dependencies=Denna pull-förfrågan har för närvarande inga beroenden. issues.dependency.add=Lägg till beroende… issues.dependency.cancel=Avbryt issues.dependency.remove=Ta bort @@ -1168,7 +1167,6 @@ pulls.open_unmerged_pull_exists=`Du kan inte återuppliva denna pull-request då pulls.update_branch_success=Uppdatering av branchen lyckades pulls.outdated_with_base_branch=Denna branch är föråldrad gentemot bas-branchen - milestones.new=Ny milstolpe milestones.open_tab=%d Öppna milestones.close_tab=%d Stängda @@ -1844,6 +1842,7 @@ repos.issues=Ärenden repos.size=Storlek + systemhooks=Systemets webbhooks auths.auth_manage_panel=Hantering av autentiseringkälla @@ -2140,3 +2139,5 @@ error.probable_bad_default_signature=VARNING! Även om standard-nyckeln har dett error.no_unit_allowed_repo=Du tillåts inte åtkomst till någon del av denna utvecklingskatalog. error.unit_not_allowed=Du har inte åtkomst till denna del av utvecklingskatalogen. +[packages] + diff --git a/options/locale/locale_tr-TR.ini b/options/locale/locale_tr-TR.ini index e32ad0357a..df54622e43 100644 --- a/options/locale/locale_tr-TR.ini +++ b/options/locale/locale_tr-TR.ini @@ -91,6 +91,7 @@ error404=Ulaşmaya çalıştığınız sayfa mevcut değil veya never=Asla + [error] missing_csrf=Hatalı İstek: CSRF anahtarı yok @@ -1286,8 +1287,6 @@ issues.due_date_remove=%[2]s %[1]s bitiş tarihini kaldırdı issues.due_date_overdue=Süresi Geçmiş issues.due_date_invalid=Bitiş tarihi geçersiz veya aralık dışında. Lütfen 'yyyy-aa-gg' biçimini kullanın. issues.dependency.title=Bağımlılıklar -issues.dependency.issue_no_dependencies=Bu konu henüz bir bağımlılık içermiyor. -issues.dependency.pr_no_dependencies=Bu değişiklik isteği henüz bir bağımlılık içermiyor. issues.dependency.add=Bağımlılık ekle… issues.dependency.cancel=İptal issues.dependency.remove=Kaldır @@ -1437,7 +1436,6 @@ pulls.outdated_with_base_branch=Bu dal, temel dal ile güncel değil pulls.closed_at=`%[2]s değişiklik isteğini kapattı` pulls.reopened_at=`%[2]s değişiklik isteğini yeniden açtı` pulls.merge_instruction_hint=`komut satırı talimatlarını da görüntüleyebilirsiniz.` - pulls.merge_instruction_step1_desc=Proje deponuzdan yeni bir dala göz atın ve değişiklikleri test edin. pulls.merge_instruction_step2_desc=Gitea'daki değişiklikleri ve güncellemeleri birleştirin. @@ -2335,6 +2333,7 @@ repos.forks=Çatallar repos.issues=Konular repos.size=Boyut + defaulthooks=Varsayılan Web İstemcileri defaulthooks.desc=Web İstemcileri, belirli Gitea olayları tetiklendiğinde otomatik olarak HTTP POST isteklerini sunucuya yapar. Burada tanımlanan Web İstemcileri varsayılandır ve tüm yeni depolara kopyalanır. web istemcileri kılavuzunda daha fazla bilgi edinin. defaulthooks.add_webhook=Varsayılan Web İstemcisi Ekle @@ -2728,3 +2727,5 @@ error.probable_bad_default_signature=UYARI! Varsayılan anahtarın bu kimliği o error.no_unit_allowed_repo=Bu deponun hiçbir bölümüne erişme izniniz yok. error.unit_not_allowed=Bu depo bölümüne erişme izniniz yok. +[packages] + diff --git a/options/locale/locale_uk-UA.ini b/options/locale/locale_uk-UA.ini index d0a93e8508..1b320c9463 100644 --- a/options/locale/locale_uk-UA.ini +++ b/options/locale/locale_uk-UA.ini @@ -92,6 +92,7 @@ error404=Сторінка, до якої ви намагаєтеся зверн never=Ніколи + [error] occurred=Сталася помилка missing_csrf=Некоректний запит: токен CSRF не задано @@ -1328,8 +1329,6 @@ issues.due_date_remove=видалив(ла) дату завершення %s %s issues.due_date_overdue=Прострочено issues.due_date_invalid=Термін дії не дійсний або знаходиться за межами допустимого діапазону. Будь ласка використовуйте формат 'yyyy-mm-dd'. issues.dependency.title=Залежності -issues.dependency.issue_no_dependencies=Ця задача тепер не має залежностей. -issues.dependency.pr_no_dependencies=Цей запит на злиття в даний час не має залежностей. issues.dependency.add=Додати залежність… issues.dependency.cancel=Відмінити issues.dependency.remove=Видалити @@ -1486,7 +1485,6 @@ pulls.outdated_with_base_branch=Ця гілка застаріла віднос pulls.closed_at=`закрив цей запит на злиття %[2]s` pulls.reopened_at=`повторно відкрив цей запит на злиття %[2]s` pulls.merge_instruction_hint=`Також можна переглянути інструкції для командного рядка.` - pulls.merge_instruction_step1_desc=У репозиторії вашого проєкту перевірте нову гілку і протестуйте зміни. pulls.merge_instruction_step2_desc=Об'єднати зміни і оновити на Gitea. @@ -2408,6 +2406,7 @@ repos.forks=Форки repos.issues=Задачі repos.size=Розмір + defaulthooks=Веб-хуки за замовчуванням defaulthooks.desc=Веб-хуки автоматично створюють HTTP POST-запити до сервера, коли виконуються певні події Gitea. Визначені тут веб-хуки є типовими і копіюються у всі нові сховища. Детальніше читайте в інструкції по використанню web-хуків. defaulthooks.add_webhook=Додати веб-хук за замовчуванням @@ -2823,3 +2822,5 @@ error.probable_bad_default_signature=УВАГА! Хоча типовий клю error.no_unit_allowed_repo=У вас немає доступу до жодного розділу цього репозитория. error.unit_not_allowed=У вас немає доступу до жодного розділу цього репозитория. +[packages] + diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index b92151447d..7a5d8250e5 100644 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -105,6 +105,7 @@ error404=您正尝试访问的页面 不存在您 never=从不 + [error] occurred=发生了一个错误 report_message=如果您确定这是一个 Gitea bug,请在 GitHub 上搜索问题,或在必要时打开一个新问题。 @@ -282,6 +283,7 @@ register_helper_msg=已经注册?立即登录! social_register_helper_msg=已经注册?立即绑定! disable_register_prompt=对不起,注册功能已被关闭。请联系网站管理员。 disable_register_mail=已禁用注册的电子邮件确认。 +manual_activation_only=请联系您的站点管理员来完成激活。 remember_me=记住此设备 forgot_password_title=忘记密码 forgot_password=忘记密码? @@ -351,11 +353,11 @@ hi_user_x=%s 您好, activate_account=请激活您的帐户 activate_account.title=%s,请激活您的帐户 activate_account.text_1=%[1]s 您好,感谢注册 %[2]s ! -activate_account.text_2=请点击以下链接激活您在 %s 的帐户: +activate_account.text_2=请在 %s 时间内,点击以下链接激活您的账户: activate_email=请验证您的邮箱地址 activate_email.title=%s,请验证您的电子邮件地址 -activate_email.text=请点击以下链接,以验证你的电子邮件地址在 %s 中 +activate_email.text=请在 %s 时间内,点击以下链接,以验证你的电子邮件地址: register_notify=欢迎来到 Gitea register_notify.title=%[1]s,欢迎来到 %[2]s @@ -365,7 +367,7 @@ register_notify.text_3=如果此账户已为您创建,请先 设 reset_password=恢复您的账户 reset_password.title=%s,您已请求恢复您的帐户 -reset_password.text=请点击以下链接,恢复你在 %s 的账户: +reset_password.text=请在 %s 时间内,点击以下链接,恢复你的账户: register_success=注册成功 @@ -487,7 +489,9 @@ auth_failed=授权验证失败:%v still_own_repo=此帐户仍拥有至少一个仓库,您需要先删除或转移它们。 still_has_org=此帐户仍隶属于一个或多个组织,您需要退出他们。 +still_own_packages=您的账户拥有一个或多个软件包;请先删除它们。 org_still_own_repo=该组织仍然是某些仓库的拥有者,您必须先转移或删除它们才能执行删除组织操作! +org_still_own_packages=该组织仍然是一个或多个软件包的拥有者,您必须先删除它们。 target_branch_not_exist=目标分支不存在。 @@ -661,7 +665,6 @@ ssh_invalid_token_signature=提供的 SSH 密钥、签名或令牌不匹配或 ssh_token_required=您必须为下面的令牌提供签名 ssh_token=令牌 ssh_token_help=您可以使用以下方式生成签名: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign-n gitea -f /path_to_your_pubkey ssh_token_signature=增强 SSH 签名 key_signature_ssh_placeholder=以 '-----BEGIN SSH SIGNATURE -----' 开头 verify_ssh_key_success=SSH 密钥 '%s' 已被验证。 @@ -1181,6 +1184,7 @@ projects.board.deletion_desc=删除项目看板会将所有相关问题移至“ projects.board.color=颜色 projects.open=开启 projects.close=关闭 +projects.board.assigned_to=指派给 issues.desc=组织 bug 报告、任务和里程碑。 issues.filter_assignees=筛选指派人 @@ -1268,7 +1272,7 @@ issues.filter_sort=排序 issues.filter_sort.latest=最新创建 issues.filter_sort.oldest=最早创建 issues.filter_sort.recentupdate=最近更新 -issues.filter_sort.leastupdate=最少更新 +issues.filter_sort.leastupdate=最早更新 issues.filter_sort.mostcomment=最多评论 issues.filter_sort.leastcomment=最少评论 issues.filter_sort.nearduedate=到期日从近到远 @@ -1373,6 +1377,9 @@ issues.lock.reason=锁定原因 issues.lock.title=锁定有关此问题的对话。 issues.unlock.title=解锁有关此问题的对话。 issues.comment_on_locked=您不能对锁定的问题发表评论。 +issues.delete=删除 +issues.delete.title=是否删除工单? +issues.delete.text=您真的要删除这个工单吗?(该操作将会永久删除所有内容。如果您需要保留,请关闭它) issues.tracker=时间跟踪 issues.start_tracking_short=启动计时器 issues.start_tracking=开始时间跟踪 @@ -1413,8 +1420,8 @@ issues.due_date_remove=到期时间 %s %s 已删除 issues.due_date_overdue=过期 issues.due_date_invalid=到期日期无效或超出范围。请使用 'yyyy-mm-dd' 格式。 issues.dependency.title=依赖工单 -issues.dependency.issue_no_dependencies=此工单当前没有任何依赖。 -issues.dependency.pr_no_dependencies=此合并请求当前没有任何依赖。 +issues.dependency.issue_no_dependencies=没有设置依赖项。 +issues.dependency.pr_no_dependencies=没有设置依赖项。 issues.dependency.add=添加依赖工单... issues.dependency.cancel=取消 issues.dependency.remove=删除 @@ -1575,7 +1582,6 @@ pulls.outdated_with_base_branch=此分支相比基础分支已过期 pulls.closed_at=`于 %[2]s 关闭此合并请求 ` pulls.reopened_at=`重新打开此合并请求 %[2]s` pulls.merge_instruction_hint=`你也可以查看 命令行指令` - pulls.merge_instruction_step1_desc=从你的仓库中签出一个新的分支并测试变更。 pulls.merge_instruction_step2_desc=合并变更并更新到 Gitea 上 @@ -1786,7 +1792,9 @@ settings.pulls.allow_rebase_merge_commit=启用变基显式合并 (--no-ff) settings.pulls.allow_squash_commits=启用Squash合并提交 settings.pulls.allow_manual_merge=允许将合并请求标记为手动合并 settings.pulls.enable_autodetect_manual_merge=启用自动检测手动合并 (注意:在某些特殊情况下可能发生错误判断) +settings.pulls.allow_rebase_update=允许通过变基更新拉取请求分支 settings.pulls.default_delete_branch_after_merge=默认合并后删除合并请求分支 +settings.packages_desc=启用仓库软件包注册中心 settings.projects_desc=启用仓库项目 settings.admin_settings=管理员设置 settings.admin_enable_health_check=启用仓库健康检查 (git fsck) @@ -1944,6 +1952,8 @@ settings.event_pull_request_review=已审核的合并请求 settings.event_pull_request_review_desc=合并请求被批准、拒绝或提出审查意见 settings.event_pull_request_sync=合并请求被同步 settings.event_pull_request_sync_desc=合并请求被同步。 +settings.event_package=软件包 +settings.event_package_desc=软件包已在仓库中被创建或删除。 settings.branch_filter=分支过滤 settings.branch_filter_desc=推送、创建,删除分支事件的分支白名单,使用 glob 模式匹配指定。若为空或 *,则将报告所有分支的事件。语法文档见 github.com/gobwas/glob。示例:master,{master,release*}。 settings.active=激活 @@ -2425,6 +2435,7 @@ dashboard.resync_all_hooks=重新同步所有仓库的 pre-receive、update 和 dashboard.reinit_missing_repos=重新初始化所有丢失的 Git 仓库存在的记录 dashboard.sync_external_users=同步外部用户数据 dashboard.cleanup_hook_task_table=清理 hook_task 表 +dashboard.cleanup_packages=清理过期的软件包 dashboard.server_uptime=服务运行时间 dashboard.current_goroutine=当前 Goroutines 数量 dashboard.current_memory_usage=当前内存使用量 @@ -2457,6 +2468,7 @@ dashboard.gc_times=GC 执行次数 dashboard.delete_old_actions=从数据库中删除所有旧操作记录 dashboard.delete_old_actions.started=已开始从数据库中删除所有旧操作记录。 dashboard.update_checker=更新检查器 +dashboard.delete_old_system_notices=从数据库中删除所有旧系统通知 users.user_manage_panel=用户帐户管理 users.new_account=创建新帐户 @@ -2493,6 +2505,7 @@ users.update_profile=更新帐户 users.delete_account=删除帐户 users.still_own_repo=此用户仍然拥有一个或多个仓库。必须首先删除或转让这些仓库。 users.still_has_org=此用户是组织的成员。必须先从组织中删除用户。 +users.still_own_packages=此用户仍然拥有一个或多个软件包。请先删除这些软件包。 users.deletion_success=用户帐户已被删除。 users.reset_2fa=重置两步验证 users.list_status_filter.menu_text=过滤 @@ -2539,6 +2552,17 @@ repos.forks=派生数 repos.issues=工单数 repos.size=大小 +packages.package_manage_panel=软件包管理 +packages.total_size=总大小:%s +packages.owner=所有者 +packages.creator=创建者 +packages.name=名称 +packages.version=版本 +packages.type=类型 +packages.repository=仓库 +packages.size=大小 +packages.published=已发布 + defaulthooks=默认Web钩子 defaulthooks.desc=当某些 Gitea 事件触发时,Web 钩子自动向服务器发出 HTTP POST 请求。这里定义的 Web 钩子是默认配置,将被复制到所有新的仓库中。详情请访问 Web 钩子指南。 defaulthooks.add_webhook=添加默认Web 钩子 @@ -2806,9 +2830,12 @@ monitor.next=下次执行时间 monitor.previous=上次执行时间 monitor.execute_times=执行次数 monitor.process=运行中进程 +monitor.stacktrace=调用栈踪迹 +monitor.goroutines=%d 个 Goroutine monitor.desc=进程描述 monitor.start=开始时间 monitor.execute_time=执行时长 +monitor.last_execution_result=结果 monitor.process.cancel=中止进程 monitor.process.cancel_desc=中止一个进程可能导致数据丢失 monitor.process.cancel_notices=中止:%s ? @@ -2975,3 +3002,95 @@ unit=单元 error.no_unit_allowed_repo=您没有被允许访问此仓库的任何单元。 error.unit_not_allowed=您没有权限访问此仓库单元 +[packages] +title=软件包 +desc=管理仓库软件包。 +empty=还没有软件包。 +empty.documentation=关于软件包注册中心的更多信息,请参阅 文档 。 +filter.type=类型 +filter.type.all=所有 +filter.no_result=您的过滤器没有产生任何结果。 +filter.container.tagged=已加标签 +filter.container.untagged=未加标签 +published_by=于 %[1]s 发布了 %[3]s +published_by_in=%[3]s 于 %[1]s 发布了 %[5]s +installation=安装 +about=关于这个软件包 +requirements=要求 +dependencies=依赖 +keywords=关键词 +details=详情 +details.author=作者 +details.project_site=项目站点 +details.license=许可协议 +assets=文件 +versions=版本 +versions.on=于 +versions.view_all=查看全部 +dependency.id=ID +dependency.version=版本 +composer.registry=在您的 ~/.composer/config.json 文件中设置此注册中心: +composer.install=要使用 Composer 安装软件包,请运行以下命令: +composer.documentation=关于 Composer 注册中心的更多信息,请参阅 文档 。 +composer.dependencies=依赖 +composer.dependencies.development=开发依赖 +conan.details.repository=仓库 +conan.registry=从命令行设置此注册中心: +conan.install=要使用 Conan 安装软件包,请运行以下命令: +conan.documentation=关于 Conan 注册中心的更多信息,请参阅 文档。 +container.details.type=镜像类型 +container.details.platform=平台 +container.details.repository_site=仓库站点 +container.details.documentation_site=文档网站 +container.pull=从命令行拉取镜像: +container.documentation=关于 Container 注册中心的更多信息,请参阅 文档。 +container.multi_arch=OS / Arch +container.layers=镜像层 +container.labels=标签 +container.labels.key=键 +container.labels.value=值 +generic.download=从命令行下载软件包: +generic.documentation=关于通用注册中心的更多信息,请参阅 文档。 +helm.registry=从命令行设置此注册中心: +helm.install=要安装包,请运行以下命令: +helm.documentation=关于 Helm 注册中心的更多信息,请参阅 文档。 +maven.registry=在您项目的 pom.xml 文件中设置此注册中心: +maven.install=要使用这个软件包,在 pom.xml 文件中的 依赖项 块中包含以下内容: +maven.install2=通过命令行运行: +maven.download=要下载依赖项,请通过命令行运行: +maven.documentation=关于 Maven 注册中心的更多信息,请参阅 文档。 +nuget.registry=从命令行设置此注册中心: +nuget.install=要使用 Nuget 安装软件包,请运行以下命令: +nuget.documentation=关于 Nuget 注册中心的更多信息,请参阅 文档。 +nuget.dependency.framework=目标框架 +npm.registry=在您项目的 .npmrc 文件中设置此注册中心: +npm.install=要使用 npm 安装软件包,请运行以下命令: +npm.install2=或将其添加到 package.json 文件: +npm.documentation=关于 npm 注册中心的更多信息,请参阅 文档。 +npm.dependencies=依赖项 +npm.dependencies.development=开发依赖 +npm.dependencies.peer=Peer 依赖 +npm.dependencies.optional=可选依赖 +npm.details.tag=标签 +pypi.requires=需要 Python +pypi.install=要使用 pip 安装软件包,请运行以下命令: +pypi.documentation=关于 PyPI 注册中心的信息,请参阅 文档。 +rubygems.install=要使用 gem 安装软件包,请运行以下命令: +rubygems.install2=或将它添加到 Gemfile: +rubygems.dependencies.runtime=运行时依赖 +rubygems.dependencies.development=开发依赖 +rubygems.required.ruby=需要 Ruby 版本 +rubygems.required.rubygems=需要 RubyGem 版本 +rubygems.documentation=关于 RubyGems 注册中心的更多信息,请参阅 文档。 +settings.link=将此软件包链接到仓库 +settings.link.description=如果您将一个软件包与一个代码库链接起来,软件包将显示在代码库的软件包列表中。 +settings.link.select=选择仓库 +settings.link.button=更新仓库链接 +settings.link.success=仓库链接已成功更新。 +settings.link.error=更新仓库链接失败。 +settings.delete=删除软件包 +settings.delete.description=删除软件包是永久性的,无法撤消。 +settings.delete.notice=您将要删除 %s (%s)。此操作是不可逆的,您确定吗? +settings.delete.success=软件包已被删除。 +settings.delete.error=删除软件包失败。 + diff --git a/options/locale/locale_zh-HK.ini b/options/locale/locale_zh-HK.ini index 643c0c386f..b12fda1600 100644 --- a/options/locale/locale_zh-HK.ini +++ b/options/locale/locale_zh-HK.ini @@ -46,6 +46,7 @@ cancel=取消 + [error] [startpage] @@ -432,7 +433,6 @@ pulls.can_auto_merge_desc=這個拉請求可以自動合併。 ; %[2]s
%[3]s
- milestones.new=新的里程碑 milestones.open_tab=%d 開啟中 milestones.close_tab=%d 已關閉 @@ -677,6 +677,7 @@ repos.size=大小 + auths.name=認證名稱 auths.type=認證類型 auths.enabled=已啟用 @@ -873,3 +874,5 @@ error.not_signed_commit=未簽名的提交 [units] +[packages] + diff --git a/options/locale/locale_zh-TW.ini b/options/locale/locale_zh-TW.ini index 926d6c0875..7acfb5b810 100644 --- a/options/locale/locale_zh-TW.ini +++ b/options/locale/locale_zh-TW.ini @@ -105,6 +105,7 @@ error404=您正嘗試訪問的頁面 不存在您 never=從來沒有 + [error] occurred=發生錯誤 report_message=如果您確定這是一個 Gitea 的 bug,請到 GitHub 搜尋相關的問題,如果有需要您也可以建立新問題。 @@ -282,6 +283,7 @@ register_helper_msg=已經有帳戶了?立即登入! social_register_helper_msg=已經有帳戶了?立即連結! disable_register_prompt=註冊功能已停用。 請聯繫您的網站管理員。 disable_register_mail=已停用註冊確認電子郵件。 +manual_activation_only=請聯絡您的網站管理員以完成啟用程序。 remember_me=記得這個裝置 forgot_password_title=忘記密碼 forgot_password=忘記密碼? @@ -661,7 +663,6 @@ ssh_invalid_token_signature=提供的 SSH 金鑰、簽署、Token 不符合或 T ssh_token_required=您必須為下列的 Token 提供簽署 ssh_token=Token ssh_token_help=您可以使用以下方法產生簽署: -ssh_token_code=echo -n "%s" | ssh-keygen -Y sign -n gitea -f /path_to_your_pubkey ssh_token_signature=Armored SSH 簽署 key_signature_ssh_placeholder=以「-----BEGIN SSH SIGNATURE-----」開頭 verify_ssh_key_success=已驗證 SSH 金鑰「%s」。 @@ -1181,6 +1182,7 @@ projects.board.deletion_desc=刪除專案看板會將所有相關的問題移動 projects.board.color=顏色 projects.open=開啟 projects.close=關閉 +projects.board.assigned_to=指派給 issues.desc=管理錯誤報告、任務和里程碑。 issues.filter_assignees=篩選負責人 @@ -1373,6 +1375,9 @@ issues.lock.reason=鎖定原因 issues.lock.title=鎖定此問題的對話。 issues.unlock.title=解鎖此問題的對話。 issues.comment_on_locked=您無法在已鎖定的問題上留言。 +issues.delete=刪除 +issues.delete.title=刪除此問題? +issues.delete.text=您真的要刪除此問題嗎?(這將會永久移除所有內容。若您還想保留,請考慮改為關閉它。) issues.tracker=時間追蹤 issues.start_tracking_short=開始計時 issues.start_tracking=開始時間追蹤 @@ -1413,8 +1418,8 @@ issues.due_date_remove=移除了截止日期 %s %s issues.due_date_overdue=逾期 issues.due_date_invalid=截止日期無效或超出範圍,請使用「yyyy-mm-dd」的格式。 issues.dependency.title=先決條件 -issues.dependency.issue_no_dependencies=此問題目前沒有任何先決條件。 -issues.dependency.pr_no_dependencies=此合併求目前沒有任何先決條件。 +issues.dependency.issue_no_dependencies=未設定先決條件。 +issues.dependency.pr_no_dependencies=未設定先決條件。 issues.dependency.add=加入先決條件... issues.dependency.cancel=取消 issues.dependency.remove=移除 @@ -1575,7 +1580,6 @@ pulls.outdated_with_base_branch=相對於基底分支,此分支已過時 pulls.closed_at=`關閉了這個合併請求 %[2]s` pulls.reopened_at=`重新開放了這個合併請求 %[2]s` pulls.merge_instruction_hint=`您也可以查看命令列指南。` - pulls.merge_instruction_step1_desc=在您的儲存庫中切換到新分支並測試變更。 pulls.merge_instruction_step2_desc=合併變更並更新到 Gitea。 @@ -1786,6 +1790,7 @@ settings.pulls.allow_rebase_merge_commit=啟用 Rebase 顯式合併提交(--no-f settings.pulls.allow_squash_commits=啟用 Squash 合併提交 settings.pulls.allow_manual_merge=允許將合併請求標記為手動合併 settings.pulls.enable_autodetect_manual_merge=啟用自動偵測手動合併(注意:在某些特殊情況下可能發生誤判) +settings.pulls.allow_rebase_update=啟用透過 Rebase 更新合併請求分支 settings.pulls.default_delete_branch_after_merge=預設在合併後刪除合併請求分支 settings.projects_desc=啟用儲存庫專案 settings.admin_settings=管理員設定 @@ -2457,6 +2462,7 @@ dashboard.gc_times=GC 執行次數 dashboard.delete_old_actions=從資料庫刪除所有舊行為 dashboard.delete_old_actions.started=從資料庫刪除所有舊行為的任務已啟動。 dashboard.update_checker=更新檢查器 +dashboard.delete_old_system_notices=從資料庫刪除所有舊系統提示 users.user_manage_panel=使用者帳戶管理 users.new_account=建立使用者帳戶 @@ -2539,6 +2545,7 @@ repos.forks=Fork 數 repos.issues=問題數 repos.size=大小 + defaulthooks=預設 Webhook defaulthooks.desc=當觸發某些 Gitea 事件時,Webhook 會自動發出 HTTP POST 請求到指定的伺服器。這裡所定義的 Webhook 是預設的,並且會複製到所有新儲存庫。在 Webhook 指南閱讀更多內容。 defaulthooks.add_webhook=新增預設 Webhook @@ -2809,6 +2816,7 @@ monitor.process=執行中的處理程序 monitor.desc=描述 monitor.start=開始時間 monitor.execute_time=已執行時間 +monitor.last_execution_result=結果 monitor.process.cancel=結束處理程序 monitor.process.cancel_desc=結束處理程序可能造成資料遺失 monitor.process.cancel_notices=結束: %s? @@ -2975,3 +2983,5 @@ unit=單元 error.no_unit_allowed_repo=您未被允許存取此儲存庫的任何區域。 error.unit_not_allowed=您未被允許訪問此儲存庫區域 +[packages] + diff --git a/package-lock.json b/package-lock.json index da212eb839..8b63409e28 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,9 +8,9 @@ "license": "MIT", "dependencies": { "@claviska/jquery-minicolors": "2.3.6", - "@primer/octicons": "16.3.1", + "@primer/octicons": "17.0.0", "add-asset-webpack-plugin": "2.0.1", - "css-loader": "6.6.0", + "css-loader": "6.7.1", "dropzone": "6.0.0-beta.2", "easymde": "2.16.1", "esbuild-loader": "2.18.0", @@ -23,12 +23,12 @@ "less-loader": "10.2.0", "license-checker-webpack-plugin": "0.2.1", "mermaid": "8.14.0", - "mini-css-extract-plugin": "2.5.3", - "monaco-editor": "0.32.1", + "mini-css-extract-plugin": "2.6.0", + "monaco-editor": "0.33.0", "monaco-editor-webpack-plugin": "7.0.1", "pretty-ms": "7.0.1", - "sortablejs": "1.14.0", - "swagger-ui-dist": "4.5.2", + "sortablejs": "1.15.0", + "swagger-ui-dist": "4.10.0", "tributejs": "5.1.3", "uint8-to-base64": "0.2.0", "vue": "2.6.14", @@ -36,30 +36,30 @@ "vue-calendar-heatmap": "0.8.4", "vue-loader": "15.9.8", "vue-template-compiler": "2.6.14", - "webpack": "5.69.1", + "webpack": "5.70.0", "webpack-cli": "4.9.2", - "workbox-routing": "6.5.0", - "workbox-strategies": "6.5.0", + "workbox-routing": "6.5.2", + "workbox-strategies": "6.5.2", "worker-loader": "3.0.8", "wrap-ansi": "8.0.1" }, "devDependencies": { - "eslint": "8.9.0", + "eslint": "8.12.0", "eslint-plugin-html": "6.2.0", "eslint-plugin-import": "2.25.4", - "eslint-plugin-unicorn": "41.0.0", + "eslint-plugin-unicorn": "41.0.1", "eslint-plugin-vue": "8.5.0", "jest": "27.5.1", "jest-extended": "2.0.0", "jest-raw-loader": "1.0.1", "postcss-less": "6.0.0", - "stylelint": "14.5.3", + "stylelint": "14.6.1", "stylelint-config-standard": "25.0.0", "svgo": "2.8.0", - "updates": "13.0.2" + "updates": "13.0.4" }, "engines": { - "node": ">= 12.17.0" + "node": ">= 14" } }, "node_modules/@ampproject/remapping": { @@ -685,16 +685,16 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.1.0.tgz", - "integrity": "sha512-C1DfL7XX4nPqGd6jcP01W9pVM1HYCuUkFk1432D7F0v3JSlUIeOYn9oCoi3eoLZ+iwBSb29BMFxxny0YrrEZqg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.2.1.tgz", + "integrity": "sha512-bxvbYnBPN1Gibwyp6NrpnFzA3YtRL3BBAyEAFVIpNTm2Rn4Vy87GA5M4aSn3InRrlsbX5N0GW7XIx+U4SAEKdQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^9.3.1", "globals": "^13.9.0", - "ignore": "^4.0.6", + "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.0.4", @@ -704,15 +704,6 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/@eslint/eslintrc/node_modules/ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, "node_modules/@humanwhocodes/config-array": { "version": "0.9.3", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.3.tgz", @@ -1157,9 +1148,9 @@ } }, "node_modules/@primer/octicons": { - "version": "16.3.1", - "resolved": "https://registry.npmjs.org/@primer/octicons/-/octicons-16.3.1.tgz", - "integrity": "sha512-J3IlK0Ok88RQZVB//af7Lnl1Vw2buyyr5G3oEvK1wRSYTJi/E/HBm5JZUihmDAtm/unr85FC534DwA5e+4LR2w==", + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/@primer/octicons/-/octicons-17.0.0.tgz", + "integrity": "sha512-DiIjtous4XPuR2deTctD3/RVZy/vRzVYBgYYvHV313MmTfkbVP60qLH5txrT3/bYNvnb0poNDelLS6U0kqlvHA==", "dependencies": { "object-assign": "^4.1.1" } @@ -2434,12 +2425,12 @@ } }, "node_modules/css-loader": { - "version": "6.6.0", - "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.6.0.tgz", - "integrity": "sha512-FK7H2lisOixPT406s5gZM1S3l8GrfhEBT3ZiL2UX1Ng1XWs0y2GPllz/OTyvbaHe12VgQrIXIzuEGVlbUhodqg==", + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz", + "integrity": "sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw==", "dependencies": { "icss-utils": "^5.1.0", - "postcss": "^8.4.5", + "postcss": "^8.4.7", "postcss-modules-extract-imports": "^3.0.0", "postcss-modules-local-by-default": "^4.0.0", "postcss-modules-scope": "^3.0.0", @@ -3247,9 +3238,9 @@ "integrity": "sha1-sgOOhG3DO6pXlhKNCAS0VbjB4h0=" }, "node_modules/debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -3539,9 +3530,9 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.8.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz", - "integrity": "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==", + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.2.tgz", + "integrity": "sha512-GIm3fQfwLJ8YZx2smuHpBKkXC1yOk+OBEmKckVyL0i/ea8mqDEykK3ld5dgH1QYPNyT/lIllxV2LULnxCHaHkA==", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -4076,12 +4067,12 @@ } }, "node_modules/eslint": { - "version": "8.9.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.9.0.tgz", - "integrity": "sha512-PB09IGwv4F4b0/atrbcMFboF/giawbBLVC7fyDamk5Wtey4Jh2K+rYaBhCAbUyEI4QzB1ly09Uglc9iCtFaG2Q==", + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.12.0.tgz", + "integrity": "sha512-it1oBL9alZg1S8UycLm5YDMAkIhtH6FtAzuZs6YvoGVldWjbS08BkAdb/ymP9LlAyq8koANu32U7Ib/w+UNh8Q==", "dev": true, "dependencies": { - "@eslint/eslintrc": "^1.1.0", + "@eslint/eslintrc": "^1.2.1", "@humanwhocodes/config-array": "^0.9.2", "ajv": "^6.10.0", "chalk": "^4.0.0", @@ -4232,9 +4223,9 @@ "dev": true }, "node_modules/eslint-plugin-unicorn": { - "version": "41.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-41.0.0.tgz", - "integrity": "sha512-xoJCaRc1uy5REg9DkVga1BkZV57jJxoqOcrU28QHZB89Lk5LdSqdVyTIt9JQVfHNKaiyJ7X+3iLlIn+VEHWEzA==", + "version": "41.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-41.0.1.tgz", + "integrity": "sha512-gF5vo2dIj0YdNMQ/IMegiBkQdQ22GBFFVpdkJP+0og3w7XD4ypea0xQVRv6iofkLVR2w0phAdikcnU01ybd4Ow==", "dev": true, "dependencies": { "@babel/helper-validator-identifier": "^7.15.7", @@ -4787,9 +4778,9 @@ } }, "node_modules/globals": { - "version": "13.12.1", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.1.tgz", - "integrity": "sha512-317dFlgY2pdJZ9rspXDks7073GpDmXdfbM3vYYp0HAMKGDh1FfWPleI2ljVNLQX5M5lXcAslTcPTrOrMEFOjyw==", + "version": "13.13.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.13.0.tgz", + "integrity": "sha512-EQ7Q18AJlPwp3vUDL4mKA0KXrXyNIQyWon6T6XQiBQF0XHvRsiCSrWmmeATpUzdJN2HhWZU6Pdl0a9zdep5p6A==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -6800,9 +6791,9 @@ } }, "node_modules/mini-css-extract-plugin": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.5.3.tgz", - "integrity": "sha512-YseMB8cs8U/KCaAGQoqYmfUuhhGW0a9p9XvWXrxVOkE3/IiISTLw4ALNt7JR5B2eYauFM+PQGSbXMDmVbR7Tfw==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.0.tgz", + "integrity": "sha512-ndG8nxCEnAemsg4FSgS+yNyHKgkTB4nPKqCOgh65j3/30qqC5RaSQQXMm++Y6sb6E1zRSxPkztj9fqxhS1Eo6w==", "dependencies": { "schema-utils": "^4.0.0" }, @@ -6829,9 +6820,9 @@ } }, "node_modules/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, "node_modules/minimist-options": { "version": "4.1.0", @@ -6853,9 +6844,9 @@ "integrity": "sha512-9ARkWHBs+6YJIvrIp0Ik5tyTTtP9PoV0Ssu2Ocq5y9v8+NOOpWiRshAp8c4rZVWTOe+157on/5G+zj5pwIQFEQ==" }, "node_modules/monaco-editor": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.32.1.tgz", - "integrity": "sha512-LUt2wsUvQmEi2tfTOK+tjAPvt7eQ+K5C4rZPr6SeuyzjAuAHrIvlUloTcOiGjZW3fn3a/jFQCONrEJbNOaCqbA==" + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.33.0.tgz", + "integrity": "sha512-VcRWPSLIUEgQJQIE0pVT8FcGBIgFoxz7jtqctE+IiCxWugD0DwgyQBcZBhdSrdMC84eumoqMZsGl2GTreOzwqw==" }, "node_modules/monaco-editor-webpack-plugin": { "version": "7.0.1", @@ -6876,9 +6867,9 @@ "devOptional": true }, "node_modules/nanoid": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", - "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.2.tgz", + "integrity": "sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA==", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -7364,20 +7355,26 @@ } }, "node_modules/postcss": { - "version": "8.4.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.6.tgz", - "integrity": "sha512-OovjwIzs9Te46vlEx7+uXB0PLijpwjXGKXjVGGPIGubGpq7uh5Xgf6D6FiJ/SzJMBosHDp6a2hiXOS97iBXcaA==", + "version": "8.4.12", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.12.tgz", + "integrity": "sha512-lg6eITwYe9v6Hr5CncVbK70SoioNQIq81nsaG86ev5hAidQvmOeETBqs7jm43K2F5/Ley3ytDtriImV6TpNiSg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + } + ], "dependencies": { - "nanoid": "^3.2.0", + "nanoid": "^3.3.1", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" }, "engines": { "node": "^10 || ^12 || >=14" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" } }, "node_modules/postcss-less": { @@ -8119,9 +8116,9 @@ } }, "node_modules/sortablejs": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/sortablejs/-/sortablejs-1.14.0.tgz", - "integrity": "sha512-pBXvQCs5/33fdN1/39pPL0NZF20LeRbLQ5jtnheIPN9JQAaufGjKdWduZn4U7wCtVuzKhmRkI0DFYHYRbB2H1w==" + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/sortablejs/-/sortablejs-1.15.0.tgz", + "integrity": "sha512-bv9qgVMjUMf89wAvM6AxVvS/4MX3sPeN0+agqShejLU5z5GX4C75ow1O2e5k4L6XItUyAK3gH6AxSbXrOM5e8w==" }, "node_modules/source-list-map": { "version": "2.0.1", @@ -8368,16 +8365,16 @@ "dev": true }, "node_modules/stylelint": { - "version": "14.5.3", - "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-14.5.3.tgz", - "integrity": "sha512-omHETL+kGHR+fCXFK1SkZD/A+emCP9esggAdWEl8GPjTNeyRYj+H6uetRDcU+7E451zwWiUYGVAX+lApsAZgsQ==", + "version": "14.6.1", + "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-14.6.1.tgz", + "integrity": "sha512-FfNdvZUZdzh9KDQxDnO7Opp+prKh8OQVuSW8S13cBtxrooCbm6J6royhUeb++53WPMt04VB+ZbOz/QmzAijs6Q==", "dev": true, "dependencies": { "balanced-match": "^2.0.0", "colord": "^2.9.2", "cosmiconfig": "^7.0.1", "css-functions-list": "^3.0.1", - "debug": "^4.3.3", + "debug": "^4.3.4", "execall": "^2.0.0", "fast-glob": "^3.2.11", "fastest-levenshtein": "^1.0.12", @@ -8398,7 +8395,7 @@ "normalize-path": "^3.0.0", "normalize-selector": "^0.2.0", "picocolors": "^1.0.0", - "postcss": "^8.4.6", + "postcss": "^8.4.12", "postcss-media-query-parser": "^0.2.3", "postcss-resolve-nested-selector": "^0.1.1", "postcss-safe-parser": "^6.0.0", @@ -8549,9 +8546,9 @@ } }, "node_modules/swagger-ui-dist": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-4.5.2.tgz", - "integrity": "sha512-wV4w54eW9z+VKbYJBJfULfqO05otCbM9jwgRIkwRl9CrfTVKelDzyhhEvdUQkGUzro+Ir8TOZPiZgKIdIdolWQ==" + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-4.10.0.tgz", + "integrity": "sha512-+RBJA/beHLg0hO4rJZIhgUdxmZE7AaNfc11PCSzZdnzkmwSJv8Qg0HZbr7BQPQjkC6z4xVWq2h1itOPk1FQBrA==" }, "node_modules/symbol-tree": { "version": "3.2.4", @@ -8915,9 +8912,9 @@ } }, "node_modules/updates": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/updates/-/updates-13.0.2.tgz", - "integrity": "sha512-bTC+36YoHGzK8vdKHebToVYsa5XTHCBe7X41H39wUt1A9OK1GhoY7pGzkOfXgWVS6yvVK9BSWfqVg0VA98fahQ==", + "version": "13.0.4", + "resolved": "https://registry.npmjs.org/updates/-/updates-13.0.4.tgz", + "integrity": "sha512-RgHZnmTlcoRdn2yA8FZUwlRj7ltEANZQvh3ISAoSZcxunIv2s5EpFnZh8jgU7DigtX4ogm4XSn0r5O4u+cF7sg==", "dev": true, "bin": { "updates": "bin/updates.js" @@ -9198,9 +9195,9 @@ } }, "node_modules/webpack": { - "version": "5.69.1", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.69.1.tgz", - "integrity": "sha512-+VyvOSJXZMT2V5vLzOnDuMz5GxEqLk7hKWQ56YxPW/PQRUuKimPqmEIJOx8jHYeyo65pKbapbW464mvsKbaj4A==", + "version": "5.70.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.70.0.tgz", + "integrity": "sha512-ZMWWy8CeuTTjCxbeaQI21xSswseF2oNOwc70QSKNePvmxE7XW36i7vpBMYZFAUHPwQiEbNGCEYIOOlyRbdGmxw==", "dependencies": { "@types/eslint-scope": "^3.7.3", "@types/estree": "^0.0.51", @@ -9211,7 +9208,7 @@ "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.8.3", + "enhanced-resolve": "^5.9.2", "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", @@ -9440,24 +9437,24 @@ } }, "node_modules/workbox-core": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/workbox-core/-/workbox-core-6.5.0.tgz", - "integrity": "sha512-5SPwNipUzYBhrneLVT02JFA0fw3LG82jFAN/G2NzxkIW10t4MVZuML2nU94bbkgjq25u0fkY8+4JXzMfHgxEWQ==" + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/workbox-core/-/workbox-core-6.5.2.tgz", + "integrity": "sha512-IlxLGQf+wJHCR+NM0UWqDh4xe/Gu6sg2i4tfZk6WIij34IVk9BdOQgi6WvqSHd879jbQIUgL2fBdJUJyAP5ypQ==" }, "node_modules/workbox-routing": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/workbox-routing/-/workbox-routing-6.5.0.tgz", - "integrity": "sha512-w1A9OVa/yYStu9ds0Dj+TC6zOAoskKlczf+wZI5mrM9nFCt/KOMQiFp1/41DMFPrrN/8KlZTS3Cel/Ttutw93Q==", + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/workbox-routing/-/workbox-routing-6.5.2.tgz", + "integrity": "sha512-nR1w5PjF6IVwo0SX3oE88LhmGFmTnqqU7zpGJQQPZiKJfEKgDENQIM9mh3L1ksdFd9Y3CZVkusopHfxQvit/BA==", "dependencies": { - "workbox-core": "6.5.0" + "workbox-core": "6.5.2" } }, "node_modules/workbox-strategies": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/workbox-strategies/-/workbox-strategies-6.5.0.tgz", - "integrity": "sha512-Ngnwo+tfGw4uKSlTz3h1fYKb/lCV7SDI/dtTb8VaJzRl0N9XssloDGYERBmF6BN/DV/x3bnRsshfobnKI/3z0g==", + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/workbox-strategies/-/workbox-strategies-6.5.2.tgz", + "integrity": "sha512-fgbwaUMxbG39BHjJIs2y2X21C0bmf1Oq3vMQxJ1hr6y5JMJIm8rvKCcf1EIdAr+PjKdSk4ddmgyBQ4oO8be4Uw==", "dependencies": { - "workbox-core": "6.5.0" + "workbox-core": "6.5.2" } }, "node_modules/worker-loader": { @@ -10145,28 +10142,20 @@ "integrity": "sha512-ws57AidsDvREKrZKYffXddNkyaF14iHNHm8VQnZH6t99E8gczjNN0GpvcGny0imC80yQ0tHz1xVUKk/KFQSUyA==" }, "@eslint/eslintrc": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.1.0.tgz", - "integrity": "sha512-C1DfL7XX4nPqGd6jcP01W9pVM1HYCuUkFk1432D7F0v3JSlUIeOYn9oCoi3eoLZ+iwBSb29BMFxxny0YrrEZqg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.2.1.tgz", + "integrity": "sha512-bxvbYnBPN1Gibwyp6NrpnFzA3YtRL3BBAyEAFVIpNTm2Rn4Vy87GA5M4aSn3InRrlsbX5N0GW7XIx+U4SAEKdQ==", "dev": true, "requires": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^9.3.1", "globals": "^13.9.0", - "ignore": "^4.0.6", + "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.0.4", "strip-json-comments": "^3.1.1" - }, - "dependencies": { - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true - } } }, "@humanwhocodes/config-array": { @@ -10518,9 +10507,9 @@ } }, "@primer/octicons": { - "version": "16.3.1", - "resolved": "https://registry.npmjs.org/@primer/octicons/-/octicons-16.3.1.tgz", - "integrity": "sha512-J3IlK0Ok88RQZVB//af7Lnl1Vw2buyyr5G3oEvK1wRSYTJi/E/HBm5JZUihmDAtm/unr85FC534DwA5e+4LR2w==", + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/@primer/octicons/-/octicons-17.0.0.tgz", + "integrity": "sha512-DiIjtous4XPuR2deTctD3/RVZy/vRzVYBgYYvHV313MmTfkbVP60qLH5txrT3/bYNvnb0poNDelLS6U0kqlvHA==", "requires": { "object-assign": "^4.1.1" } @@ -11562,12 +11551,12 @@ "dev": true }, "css-loader": { - "version": "6.6.0", - "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.6.0.tgz", - "integrity": "sha512-FK7H2lisOixPT406s5gZM1S3l8GrfhEBT3ZiL2UX1Ng1XWs0y2GPllz/OTyvbaHe12VgQrIXIzuEGVlbUhodqg==", + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz", + "integrity": "sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw==", "requires": { "icss-utils": "^5.1.0", - "postcss": "^8.4.5", + "postcss": "^8.4.7", "postcss-modules-extract-imports": "^3.0.0", "postcss-modules-local-by-default": "^4.0.0", "postcss-modules-scope": "^3.0.0", @@ -12221,9 +12210,9 @@ "integrity": "sha1-sgOOhG3DO6pXlhKNCAS0VbjB4h0=" }, "debug": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", - "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dev": true, "requires": { "ms": "2.1.2" @@ -12445,9 +12434,9 @@ "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==" }, "enhanced-resolve": { - "version": "5.8.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz", - "integrity": "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==", + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.2.tgz", + "integrity": "sha512-GIm3fQfwLJ8YZx2smuHpBKkXC1yOk+OBEmKckVyL0i/ea8mqDEykK3ld5dgH1QYPNyT/lIllxV2LULnxCHaHkA==", "requires": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -12743,12 +12732,12 @@ } }, "eslint": { - "version": "8.9.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.9.0.tgz", - "integrity": "sha512-PB09IGwv4F4b0/atrbcMFboF/giawbBLVC7fyDamk5Wtey4Jh2K+rYaBhCAbUyEI4QzB1ly09Uglc9iCtFaG2Q==", + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.12.0.tgz", + "integrity": "sha512-it1oBL9alZg1S8UycLm5YDMAkIhtH6FtAzuZs6YvoGVldWjbS08BkAdb/ymP9LlAyq8koANu32U7Ib/w+UNh8Q==", "dev": true, "requires": { - "@eslint/eslintrc": "^1.1.0", + "@eslint/eslintrc": "^1.2.1", "@humanwhocodes/config-array": "^0.9.2", "ajv": "^6.10.0", "chalk": "^4.0.0", @@ -12884,9 +12873,9 @@ } }, "eslint-plugin-unicorn": { - "version": "41.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-41.0.0.tgz", - "integrity": "sha512-xoJCaRc1uy5REg9DkVga1BkZV57jJxoqOcrU28QHZB89Lk5LdSqdVyTIt9JQVfHNKaiyJ7X+3iLlIn+VEHWEzA==", + "version": "41.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-41.0.1.tgz", + "integrity": "sha512-gF5vo2dIj0YdNMQ/IMegiBkQdQ22GBFFVpdkJP+0og3w7XD4ypea0xQVRv6iofkLVR2w0phAdikcnU01ybd4Ow==", "dev": true, "requires": { "@babel/helper-validator-identifier": "^7.15.7", @@ -13293,9 +13282,9 @@ } }, "globals": { - "version": "13.12.1", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.1.tgz", - "integrity": "sha512-317dFlgY2pdJZ9rspXDks7073GpDmXdfbM3vYYp0HAMKGDh1FfWPleI2ljVNLQX5M5lXcAslTcPTrOrMEFOjyw==", + "version": "13.13.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.13.0.tgz", + "integrity": "sha512-EQ7Q18AJlPwp3vUDL4mKA0KXrXyNIQyWon6T6XQiBQF0XHvRsiCSrWmmeATpUzdJN2HhWZU6Pdl0a9zdep5p6A==", "dev": true, "requires": { "type-fest": "^0.20.2" @@ -14791,9 +14780,9 @@ "dev": true }, "mini-css-extract-plugin": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.5.3.tgz", - "integrity": "sha512-YseMB8cs8U/KCaAGQoqYmfUuhhGW0a9p9XvWXrxVOkE3/IiISTLw4ALNt7JR5B2eYauFM+PQGSbXMDmVbR7Tfw==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.0.tgz", + "integrity": "sha512-ndG8nxCEnAemsg4FSgS+yNyHKgkTB4nPKqCOgh65j3/30qqC5RaSQQXMm++Y6sb6E1zRSxPkztj9fqxhS1Eo6w==", "requires": { "schema-utils": "^4.0.0" } @@ -14807,9 +14796,9 @@ } }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, "minimist-options": { "version": "4.1.0", @@ -14828,9 +14817,9 @@ "integrity": "sha512-9ARkWHBs+6YJIvrIp0Ik5tyTTtP9PoV0Ssu2Ocq5y9v8+NOOpWiRshAp8c4rZVWTOe+157on/5G+zj5pwIQFEQ==" }, "monaco-editor": { - "version": "0.32.1", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.32.1.tgz", - "integrity": "sha512-LUt2wsUvQmEi2tfTOK+tjAPvt7eQ+K5C4rZPr6SeuyzjAuAHrIvlUloTcOiGjZW3fn3a/jFQCONrEJbNOaCqbA==" + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.33.0.tgz", + "integrity": "sha512-VcRWPSLIUEgQJQIE0pVT8FcGBIgFoxz7jtqctE+IiCxWugD0DwgyQBcZBhdSrdMC84eumoqMZsGl2GTreOzwqw==" }, "monaco-editor-webpack-plugin": { "version": "7.0.1", @@ -14847,9 +14836,9 @@ "devOptional": true }, "nanoid": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz", - "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==" + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.2.tgz", + "integrity": "sha512-CuHBogktKwpm5g2sRgv83jEy2ijFzBwMoYA60orPDR7ynsLijJDqgsi4RDGj3OJpy3Ieb+LYwiRmIOGyytgITA==" }, "natural-compare": { "version": "1.4.0", @@ -15204,11 +15193,11 @@ "integrity": "sha512-Wb4p1J4zyFTbM+u6WuO4XstYx4Ky9Cewe4DWrel7B0w6VVICvPwdOpotjzcf6eD8TsckVnIMNONQyPIUFOUbCQ==" }, "postcss": { - "version": "8.4.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.6.tgz", - "integrity": "sha512-OovjwIzs9Te46vlEx7+uXB0PLijpwjXGKXjVGGPIGubGpq7uh5Xgf6D6FiJ/SzJMBosHDp6a2hiXOS97iBXcaA==", + "version": "8.4.12", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.12.tgz", + "integrity": "sha512-lg6eITwYe9v6Hr5CncVbK70SoioNQIq81nsaG86ev5hAidQvmOeETBqs7jm43K2F5/Ley3ytDtriImV6TpNiSg==", "requires": { - "nanoid": "^3.2.0", + "nanoid": "^3.3.1", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } @@ -15732,9 +15721,9 @@ } }, "sortablejs": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/sortablejs/-/sortablejs-1.14.0.tgz", - "integrity": "sha512-pBXvQCs5/33fdN1/39pPL0NZF20LeRbLQ5jtnheIPN9JQAaufGjKdWduZn4U7wCtVuzKhmRkI0DFYHYRbB2H1w==" + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/sortablejs/-/sortablejs-1.15.0.tgz", + "integrity": "sha512-bv9qgVMjUMf89wAvM6AxVvS/4MX3sPeN0+agqShejLU5z5GX4C75ow1O2e5k4L6XItUyAK3gH6AxSbXrOM5e8w==" }, "source-list-map": { "version": "2.0.1", @@ -15938,16 +15927,16 @@ "dev": true }, "stylelint": { - "version": "14.5.3", - "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-14.5.3.tgz", - "integrity": "sha512-omHETL+kGHR+fCXFK1SkZD/A+emCP9esggAdWEl8GPjTNeyRYj+H6uetRDcU+7E451zwWiUYGVAX+lApsAZgsQ==", + "version": "14.6.1", + "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-14.6.1.tgz", + "integrity": "sha512-FfNdvZUZdzh9KDQxDnO7Opp+prKh8OQVuSW8S13cBtxrooCbm6J6royhUeb++53WPMt04VB+ZbOz/QmzAijs6Q==", "dev": true, "requires": { "balanced-match": "^2.0.0", "colord": "^2.9.2", "cosmiconfig": "^7.0.1", "css-functions-list": "^3.0.1", - "debug": "^4.3.3", + "debug": "^4.3.4", "execall": "^2.0.0", "fast-glob": "^3.2.11", "fastest-levenshtein": "^1.0.12", @@ -15968,7 +15957,7 @@ "normalize-path": "^3.0.0", "normalize-selector": "^0.2.0", "picocolors": "^1.0.0", - "postcss": "^8.4.6", + "postcss": "^8.4.12", "postcss-media-query-parser": "^0.2.3", "postcss-resolve-nested-selector": "^0.1.1", "postcss-safe-parser": "^6.0.0", @@ -16082,9 +16071,9 @@ } }, "swagger-ui-dist": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-4.5.2.tgz", - "integrity": "sha512-wV4w54eW9z+VKbYJBJfULfqO05otCbM9jwgRIkwRl9CrfTVKelDzyhhEvdUQkGUzro+Ir8TOZPiZgKIdIdolWQ==" + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-4.10.0.tgz", + "integrity": "sha512-+RBJA/beHLg0hO4rJZIhgUdxmZE7AaNfc11PCSzZdnzkmwSJv8Qg0HZbr7BQPQjkC6z4xVWq2h1itOPk1FQBrA==" }, "symbol-tree": { "version": "3.2.4", @@ -16353,9 +16342,9 @@ "dev": true }, "updates": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/updates/-/updates-13.0.2.tgz", - "integrity": "sha512-bTC+36YoHGzK8vdKHebToVYsa5XTHCBe7X41H39wUt1A9OK1GhoY7pGzkOfXgWVS6yvVK9BSWfqVg0VA98fahQ==", + "version": "13.0.4", + "resolved": "https://registry.npmjs.org/updates/-/updates-13.0.4.tgz", + "integrity": "sha512-RgHZnmTlcoRdn2yA8FZUwlRj7ltEANZQvh3ISAoSZcxunIv2s5EpFnZh8jgU7DigtX4ogm4XSn0r5O4u+cF7sg==", "dev": true }, "uri-js": { @@ -16585,9 +16574,9 @@ "dev": true }, "webpack": { - "version": "5.69.1", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.69.1.tgz", - "integrity": "sha512-+VyvOSJXZMT2V5vLzOnDuMz5GxEqLk7hKWQ56YxPW/PQRUuKimPqmEIJOx8jHYeyo65pKbapbW464mvsKbaj4A==", + "version": "5.70.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.70.0.tgz", + "integrity": "sha512-ZMWWy8CeuTTjCxbeaQI21xSswseF2oNOwc70QSKNePvmxE7XW36i7vpBMYZFAUHPwQiEbNGCEYIOOlyRbdGmxw==", "requires": { "@types/eslint-scope": "^3.7.3", "@types/estree": "^0.0.51", @@ -16598,7 +16587,7 @@ "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.8.3", + "enhanced-resolve": "^5.9.2", "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", @@ -16753,24 +16742,24 @@ "dev": true }, "workbox-core": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/workbox-core/-/workbox-core-6.5.0.tgz", - "integrity": "sha512-5SPwNipUzYBhrneLVT02JFA0fw3LG82jFAN/G2NzxkIW10t4MVZuML2nU94bbkgjq25u0fkY8+4JXzMfHgxEWQ==" + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/workbox-core/-/workbox-core-6.5.2.tgz", + "integrity": "sha512-IlxLGQf+wJHCR+NM0UWqDh4xe/Gu6sg2i4tfZk6WIij34IVk9BdOQgi6WvqSHd879jbQIUgL2fBdJUJyAP5ypQ==" }, "workbox-routing": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/workbox-routing/-/workbox-routing-6.5.0.tgz", - "integrity": "sha512-w1A9OVa/yYStu9ds0Dj+TC6zOAoskKlczf+wZI5mrM9nFCt/KOMQiFp1/41DMFPrrN/8KlZTS3Cel/Ttutw93Q==", + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/workbox-routing/-/workbox-routing-6.5.2.tgz", + "integrity": "sha512-nR1w5PjF6IVwo0SX3oE88LhmGFmTnqqU7zpGJQQPZiKJfEKgDENQIM9mh3L1ksdFd9Y3CZVkusopHfxQvit/BA==", "requires": { - "workbox-core": "6.5.0" + "workbox-core": "6.5.2" } }, "workbox-strategies": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/workbox-strategies/-/workbox-strategies-6.5.0.tgz", - "integrity": "sha512-Ngnwo+tfGw4uKSlTz3h1fYKb/lCV7SDI/dtTb8VaJzRl0N9XssloDGYERBmF6BN/DV/x3bnRsshfobnKI/3z0g==", + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/workbox-strategies/-/workbox-strategies-6.5.2.tgz", + "integrity": "sha512-fgbwaUMxbG39BHjJIs2y2X21C0bmf1Oq3vMQxJ1hr6y5JMJIm8rvKCcf1EIdAr+PjKdSk4ddmgyBQ4oO8be4Uw==", "requires": { - "workbox-core": "6.5.0" + "workbox-core": "6.5.2" } }, "worker-loader": { diff --git a/package.json b/package.json index b8ab231540..db3d877dc2 100644 --- a/package.json +++ b/package.json @@ -4,13 +4,13 @@ "private": true, "type": "module", "engines": { - "node": ">= 12.17.0" + "node": ">= 14" }, "dependencies": { "@claviska/jquery-minicolors": "2.3.6", - "@primer/octicons": "16.3.1", + "@primer/octicons": "17.0.0", "add-asset-webpack-plugin": "2.0.1", - "css-loader": "6.6.0", + "css-loader": "6.7.1", "dropzone": "6.0.0-beta.2", "easymde": "2.16.1", "esbuild-loader": "2.18.0", @@ -23,12 +23,12 @@ "less-loader": "10.2.0", "license-checker-webpack-plugin": "0.2.1", "mermaid": "8.14.0", - "mini-css-extract-plugin": "2.5.3", - "monaco-editor": "0.32.1", + "mini-css-extract-plugin": "2.6.0", + "monaco-editor": "0.33.0", "monaco-editor-webpack-plugin": "7.0.1", "pretty-ms": "7.0.1", - "sortablejs": "1.14.0", - "swagger-ui-dist": "4.5.2", + "sortablejs": "1.15.0", + "swagger-ui-dist": "4.10.0", "tributejs": "5.1.3", "uint8-to-base64": "0.2.0", "vue": "2.6.14", @@ -36,27 +36,27 @@ "vue-calendar-heatmap": "0.8.4", "vue-loader": "15.9.8", "vue-template-compiler": "2.6.14", - "webpack": "5.69.1", + "webpack": "5.70.0", "webpack-cli": "4.9.2", - "workbox-routing": "6.5.0", - "workbox-strategies": "6.5.0", + "workbox-routing": "6.5.2", + "workbox-strategies": "6.5.2", "worker-loader": "3.0.8", "wrap-ansi": "8.0.1" }, "devDependencies": { - "eslint": "8.9.0", + "eslint": "8.12.0", "eslint-plugin-html": "6.2.0", "eslint-plugin-import": "2.25.4", - "eslint-plugin-unicorn": "41.0.0", + "eslint-plugin-unicorn": "41.0.1", "eslint-plugin-vue": "8.5.0", "jest": "27.5.1", "jest-extended": "2.0.0", "jest-raw-loader": "1.0.1", "postcss-less": "6.0.0", - "stylelint": "14.5.3", + "stylelint": "14.6.1", "stylelint-config-standard": "25.0.0", "svgo": "2.8.0", - "updates": "13.0.2" + "updates": "13.0.4" }, "browserslist": [ "defaults", diff --git a/public/img/svg/gitea-composer.svg b/public/img/svg/gitea-composer.svg new file mode 100644 index 0000000000..1285b1bf91 --- /dev/null +++ b/public/img/svg/gitea-composer.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/gitea-conan.svg b/public/img/svg/gitea-conan.svg new file mode 100644 index 0000000000..d7d5ad5f18 --- /dev/null +++ b/public/img/svg/gitea-conan.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/gitea-helm.svg b/public/img/svg/gitea-helm.svg new file mode 100644 index 0000000000..5ab50dd29e --- /dev/null +++ b/public/img/svg/gitea-helm.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/gitea-maven.svg b/public/img/svg/gitea-maven.svg new file mode 100644 index 0000000000..e83e728276 --- /dev/null +++ b/public/img/svg/gitea-maven.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/gitea-npm.svg b/public/img/svg/gitea-npm.svg new file mode 100644 index 0000000000..4435e092f2 --- /dev/null +++ b/public/img/svg/gitea-npm.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/gitea-nuget.svg b/public/img/svg/gitea-nuget.svg new file mode 100644 index 0000000000..a5e38de3f6 --- /dev/null +++ b/public/img/svg/gitea-nuget.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/gitea-python.svg b/public/img/svg/gitea-python.svg new file mode 100644 index 0000000000..07548897e6 --- /dev/null +++ b/public/img/svg/gitea-python.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/gitea-rubygems.svg b/public/img/svg/gitea-rubygems.svg new file mode 100644 index 0000000000..5f54dce48d --- /dev/null +++ b/public/img/svg/gitea-rubygems.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/octicon-feed-forked.svg b/public/img/svg/octicon-feed-forked.svg new file mode 100644 index 0000000000..d93d48aaf7 --- /dev/null +++ b/public/img/svg/octicon-feed-forked.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/octicon-feed-merged.svg b/public/img/svg/octicon-feed-merged.svg new file mode 100644 index 0000000000..9313931087 --- /dev/null +++ b/public/img/svg/octicon-feed-merged.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/octicon-feed-trophy.svg b/public/img/svg/octicon-feed-trophy.svg new file mode 100644 index 0000000000..b19b85afe5 --- /dev/null +++ b/public/img/svg/octicon-feed-trophy.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/octicon-file-directory-fill.svg b/public/img/svg/octicon-file-directory-fill.svg new file mode 100644 index 0000000000..7ec313489b --- /dev/null +++ b/public/img/svg/octicon-file-directory-fill.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/octicon-file-directory.svg b/public/img/svg/octicon-file-directory.svg index d5fbf1efbe..ca3345a4d3 100644 --- a/public/img/svg/octicon-file-directory.svg +++ b/public/img/svg/octicon-file-directory.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/public/img/svg/octicon-repo-locked.svg b/public/img/svg/octicon-repo-locked.svg new file mode 100644 index 0000000000..1da51110b2 --- /dev/null +++ b/public/img/svg/octicon-repo-locked.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/img/svg/octicon-trophy.svg b/public/img/svg/octicon-trophy.svg new file mode 100644 index 0000000000..57cf90ccb4 --- /dev/null +++ b/public/img/svg/octicon-trophy.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/routers/api/packages/api.go b/routers/api/packages/api.go new file mode 100644 index 0000000000..b5fdc739d7 --- /dev/null +++ b/routers/api/packages/api.go @@ -0,0 +1,403 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "net/http" + "regexp" + "strings" + + "code.gitea.io/gitea/models/perm" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/web" + "code.gitea.io/gitea/routers/api/packages/composer" + "code.gitea.io/gitea/routers/api/packages/conan" + "code.gitea.io/gitea/routers/api/packages/container" + "code.gitea.io/gitea/routers/api/packages/generic" + "code.gitea.io/gitea/routers/api/packages/helm" + "code.gitea.io/gitea/routers/api/packages/maven" + "code.gitea.io/gitea/routers/api/packages/npm" + "code.gitea.io/gitea/routers/api/packages/nuget" + "code.gitea.io/gitea/routers/api/packages/pypi" + "code.gitea.io/gitea/routers/api/packages/rubygems" + "code.gitea.io/gitea/services/auth" + context_service "code.gitea.io/gitea/services/context" +) + +func reqPackageAccess(accessMode perm.AccessMode) func(ctx *context.Context) { + return func(ctx *context.Context) { + if ctx.Package.AccessMode < accessMode && !ctx.IsUserSiteAdmin() { + ctx.Resp.Header().Set("WWW-Authenticate", `Basic realm="Gitea Package API"`) + ctx.Error(http.StatusUnauthorized, "reqPackageAccess", "user should have specific permission or be a site admin") + return + } + } +} + +func Routes() *web.Route { + r := web.NewRoute() + + r.Use(context.PackageContexter()) + + authMethods := []auth.Method{ + &auth.OAuth2{}, + &auth.Basic{}, + &conan.Auth{}, + } + if setting.Service.EnableReverseProxyAuth { + authMethods = append(authMethods, &auth.ReverseProxy{}) + } + + authGroup := auth.NewGroup(authMethods...) + r.Use(func(ctx *context.Context) { + ctx.Doer = authGroup.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) + }) + + r.Group("/{username}", func() { + r.Group("/composer", func() { + r.Get("/packages.json", composer.ServiceIndex) + r.Get("/search.json", composer.SearchPackages) + r.Get("/list.json", composer.EnumeratePackages) + r.Get("/p2/{vendorname}/{projectname}~dev.json", composer.PackageMetadata) + r.Get("/p2/{vendorname}/{projectname}.json", composer.PackageMetadata) + r.Get("/files/{package}/{version}/{filename}", composer.DownloadPackageFile) + r.Put("", reqPackageAccess(perm.AccessModeWrite), composer.UploadPackage) + }) + r.Group("/conan", func() { + r.Group("/v1", func() { + r.Get("/ping", conan.Ping) + r.Group("/users", func() { + r.Get("/authenticate", conan.Authenticate) + r.Get("/check_credentials", conan.CheckCredentials) + }) + r.Group("/conans", func() { + r.Get("/search", conan.SearchRecipes) + r.Group("/{name}/{version}/{user}/{channel}", func() { + r.Get("", conan.RecipeSnapshot) + r.Delete("", reqPackageAccess(perm.AccessModeWrite), conan.DeleteRecipeV1) + r.Get("/search", conan.SearchPackagesV1) + r.Get("/digest", conan.RecipeDownloadURLs) + r.Post("/upload_urls", reqPackageAccess(perm.AccessModeWrite), conan.RecipeUploadURLs) + r.Get("/download_urls", conan.RecipeDownloadURLs) + r.Group("/packages", func() { + r.Post("/delete", reqPackageAccess(perm.AccessModeWrite), conan.DeletePackageV1) + r.Group("/{package_reference}", func() { + r.Get("", conan.PackageSnapshot) + r.Get("/digest", conan.PackageDownloadURLs) + r.Post("/upload_urls", reqPackageAccess(perm.AccessModeWrite), conan.PackageUploadURLs) + r.Get("/download_urls", conan.PackageDownloadURLs) + }) + }) + }, conan.ExtractPathParameters) + }) + r.Group("/files/{name}/{version}/{user}/{channel}/{recipe_revision}", func() { + r.Group("/recipe/{filename}", func() { + r.Get("", conan.DownloadRecipeFile) + r.Put("", reqPackageAccess(perm.AccessModeWrite), conan.UploadRecipeFile) + }) + r.Group("/package/{package_reference}/{package_revision}/{filename}", func() { + r.Get("", conan.DownloadPackageFile) + r.Put("", reqPackageAccess(perm.AccessModeWrite), conan.UploadPackageFile) + }) + }, conan.ExtractPathParameters) + }) + r.Group("/v2", func() { + r.Get("/ping", conan.Ping) + r.Group("/users", func() { + r.Get("/authenticate", conan.Authenticate) + r.Get("/check_credentials", conan.CheckCredentials) + }) + r.Group("/conans", func() { + r.Get("/search", conan.SearchRecipes) + r.Group("/{name}/{version}/{user}/{channel}", func() { + r.Delete("", reqPackageAccess(perm.AccessModeWrite), conan.DeleteRecipeV2) + r.Get("/search", conan.SearchPackagesV2) + r.Get("/latest", conan.LatestRecipeRevision) + r.Group("/revisions", func() { + r.Get("", conan.ListRecipeRevisions) + r.Group("/{recipe_revision}", func() { + r.Delete("", reqPackageAccess(perm.AccessModeWrite), conan.DeleteRecipeV2) + r.Get("/search", conan.SearchPackagesV2) + r.Group("/files", func() { + r.Get("", conan.ListRecipeRevisionFiles) + r.Group("/{filename}", func() { + r.Get("", conan.DownloadRecipeFile) + r.Put("", reqPackageAccess(perm.AccessModeWrite), conan.UploadRecipeFile) + }) + }) + r.Group("/packages", func() { + r.Delete("", reqPackageAccess(perm.AccessModeWrite), conan.DeletePackageV2) + r.Group("/{package_reference}", func() { + r.Delete("", reqPackageAccess(perm.AccessModeWrite), conan.DeletePackageV2) + r.Get("/latest", conan.LatestPackageRevision) + r.Group("/revisions", func() { + r.Get("", conan.ListPackageRevisions) + r.Group("/{package_revision}", func() { + r.Delete("", reqPackageAccess(perm.AccessModeWrite), conan.DeletePackageV2) + r.Group("/files", func() { + r.Get("", conan.ListPackageRevisionFiles) + r.Group("/{filename}", func() { + r.Get("", conan.DownloadPackageFile) + r.Put("", reqPackageAccess(perm.AccessModeWrite), conan.UploadPackageFile) + }) + }) + }) + }) + }) + }) + }) + }) + }, conan.ExtractPathParameters) + }) + }) + }) + r.Group("/generic", func() { + r.Group("/{packagename}/{packageversion}/{filename}", func() { + r.Get("", generic.DownloadPackageFile) + r.Group("", func() { + r.Put("", generic.UploadPackage) + r.Delete("", generic.DeletePackage) + }, reqPackageAccess(perm.AccessModeWrite)) + }) + }) + r.Group("/helm", func() { + r.Get("/index.yaml", helm.Index) + r.Get("/{filename}", helm.DownloadPackageFile) + r.Post("/api/charts", reqPackageAccess(perm.AccessModeWrite), helm.UploadPackage) + }) + r.Group("/maven", func() { + r.Put("/*", reqPackageAccess(perm.AccessModeWrite), maven.UploadPackageFile) + r.Get("/*", maven.DownloadPackageFile) + }) + r.Group("/nuget", func() { + r.Get("/index.json", nuget.ServiceIndex) + r.Get("/query", nuget.SearchService) + r.Group("/registration/{id}", func() { + r.Get("/index.json", nuget.RegistrationIndex) + r.Get("/{version}", nuget.RegistrationLeaf) + }) + r.Group("/package/{id}", func() { + r.Get("/index.json", nuget.EnumeratePackageVersions) + r.Get("/{version}/{filename}", nuget.DownloadPackageFile) + }) + r.Group("", func() { + r.Put("/", nuget.UploadPackage) + r.Put("/symbolpackage", nuget.UploadSymbolPackage) + r.Delete("/{id}/{version}", nuget.DeletePackage) + }, reqPackageAccess(perm.AccessModeWrite)) + r.Get("/symbols/{filename}/{guid:[0-9a-f]{32}}FFFFFFFF/{filename2}", nuget.DownloadSymbolFile) + }) + r.Group("/npm", func() { + r.Group("/@{scope}/{id}", func() { + r.Get("", npm.PackageMetadata) + r.Put("", reqPackageAccess(perm.AccessModeWrite), npm.UploadPackage) + r.Get("/-/{version}/{filename}", npm.DownloadPackageFile) + }) + r.Group("/{id}", func() { + r.Get("", npm.PackageMetadata) + r.Put("", reqPackageAccess(perm.AccessModeWrite), npm.UploadPackage) + r.Get("/-/{version}/{filename}", npm.DownloadPackageFile) + }) + r.Group("/-/package/@{scope}/{id}/dist-tags", func() { + r.Get("", npm.ListPackageTags) + r.Group("/{tag}", func() { + r.Put("", npm.AddPackageTag) + r.Delete("", npm.DeletePackageTag) + }, reqPackageAccess(perm.AccessModeWrite)) + }) + r.Group("/-/package/{id}/dist-tags", func() { + r.Get("", npm.ListPackageTags) + r.Group("/{tag}", func() { + r.Put("", npm.AddPackageTag) + r.Delete("", npm.DeletePackageTag) + }, reqPackageAccess(perm.AccessModeWrite)) + }) + }) + r.Group("/pypi", func() { + r.Post("/", reqPackageAccess(perm.AccessModeWrite), pypi.UploadPackageFile) + r.Get("/files/{id}/{version}/{filename}", pypi.DownloadPackageFile) + r.Get("/simple/{id}", pypi.PackageMetadata) + }) + r.Group("/rubygems", func() { + r.Get("/specs.4.8.gz", rubygems.EnumeratePackages) + r.Get("/latest_specs.4.8.gz", rubygems.EnumeratePackagesLatest) + r.Get("/prerelease_specs.4.8.gz", rubygems.EnumeratePackagesPreRelease) + r.Get("/quick/Marshal.4.8/{filename}", rubygems.ServePackageSpecification) + r.Get("/gems/{filename}", rubygems.DownloadPackageFile) + r.Group("/api/v1/gems", func() { + r.Post("/", rubygems.UploadPackageFile) + r.Delete("/yank", rubygems.DeletePackage) + }, reqPackageAccess(perm.AccessModeWrite)) + }) + }, context_service.UserAssignmentWeb(), context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead)) + + return r +} + +func ContainerRoutes() *web.Route { + r := web.NewRoute() + + r.Use(context.PackageContexter()) + + authMethods := []auth.Method{ + &auth.Basic{}, + &container.Auth{}, + } + if setting.Service.EnableReverseProxyAuth { + authMethods = append(authMethods, &auth.ReverseProxy{}) + } + + authGroup := auth.NewGroup(authMethods...) + r.Use(func(ctx *context.Context) { + ctx.Doer = authGroup.Verify(ctx.Req, ctx.Resp, ctx, ctx.Session) + }) + + r.Get("", container.ReqContainerAccess, container.DetermineSupport) + r.Get("/token", container.Authenticate) + r.Group("/{username}", func() { + r.Group("/{image}", func() { + r.Group("/blobs/uploads", func() { + r.Post("", container.InitiateUploadBlob) + r.Group("/{uuid}", func() { + r.Patch("", container.UploadBlob) + r.Put("", container.EndUploadBlob) + }) + }, reqPackageAccess(perm.AccessModeWrite)) + r.Group("/blobs/{digest}", func() { + r.Head("", container.HeadBlob) + r.Get("", container.GetBlob) + r.Delete("", reqPackageAccess(perm.AccessModeWrite), container.DeleteBlob) + }) + r.Group("/manifests/{reference}", func() { + r.Put("", reqPackageAccess(perm.AccessModeWrite), container.UploadManifest) + r.Head("", container.HeadManifest) + r.Get("", container.GetManifest) + r.Delete("", reqPackageAccess(perm.AccessModeWrite), container.DeleteManifest) + }) + r.Get("/tags/list", container.GetTagList) + }, container.VerifyImageName) + + var ( + blobsUploadsPattern = regexp.MustCompile(`\A(.+)/blobs/uploads/([a-zA-Z0-9-_.=]+)\z`) + blobsPattern = regexp.MustCompile(`\A(.+)/blobs/([^/]+)\z`) + manifestsPattern = regexp.MustCompile(`\A(.+)/manifests/([^/]+)\z`) + ) + + // Manual mapping of routes because {image} can contain slashes which chi does not support + r.Route("/*", "HEAD,GET,POST,PUT,PATCH,DELETE", func(ctx *context.Context) { + path := ctx.Params("*") + isHead := ctx.Req.Method == "HEAD" + isGet := ctx.Req.Method == "GET" + isPost := ctx.Req.Method == "POST" + isPut := ctx.Req.Method == "PUT" + isPatch := ctx.Req.Method == "PATCH" + isDelete := ctx.Req.Method == "DELETE" + + if isPost && strings.HasSuffix(path, "/blobs/uploads") { + reqPackageAccess(perm.AccessModeWrite)(ctx) + if ctx.Written() { + return + } + + ctx.SetParams("image", path[:len(path)-14]) + container.VerifyImageName(ctx) + if ctx.Written() { + return + } + + container.InitiateUploadBlob(ctx) + return + } + if isGet && strings.HasSuffix(path, "/tags/list") { + ctx.SetParams("image", path[:len(path)-10]) + container.VerifyImageName(ctx) + if ctx.Written() { + return + } + + container.GetTagList(ctx) + return + } + + m := blobsUploadsPattern.FindStringSubmatch(path) + if len(m) == 3 && (isPut || isPatch) { + reqPackageAccess(perm.AccessModeWrite)(ctx) + if ctx.Written() { + return + } + + ctx.SetParams("image", m[1]) + container.VerifyImageName(ctx) + if ctx.Written() { + return + } + + ctx.SetParams("uuid", m[2]) + + if isPatch { + container.UploadBlob(ctx) + } else { + container.EndUploadBlob(ctx) + } + return + } + m = blobsPattern.FindStringSubmatch(path) + if len(m) == 3 && (isHead || isGet || isDelete) { + ctx.SetParams("image", m[1]) + container.VerifyImageName(ctx) + if ctx.Written() { + return + } + + ctx.SetParams("digest", m[2]) + + if isHead { + container.HeadBlob(ctx) + } else if isGet { + container.GetBlob(ctx) + } else { + reqPackageAccess(perm.AccessModeWrite)(ctx) + if ctx.Written() { + return + } + container.DeleteBlob(ctx) + } + return + } + m = manifestsPattern.FindStringSubmatch(path) + if len(m) == 3 && (isHead || isGet || isPut || isDelete) { + ctx.SetParams("image", m[1]) + container.VerifyImageName(ctx) + if ctx.Written() { + return + } + + ctx.SetParams("reference", m[2]) + + if isHead { + container.HeadManifest(ctx) + } else if isGet { + container.GetManifest(ctx) + } else { + reqPackageAccess(perm.AccessModeWrite)(ctx) + if ctx.Written() { + return + } + if isPut { + container.UploadManifest(ctx) + } else { + container.DeleteManifest(ctx) + } + } + return + } + + ctx.Status(http.StatusNotFound) + }) + }, container.ReqContainerAccess, context_service.UserAssignmentWeb(), context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead)) + + return r +} diff --git a/routers/api/packages/composer/api.go b/routers/api/packages/composer/api.go new file mode 100644 index 0000000000..d8f67d130c --- /dev/null +++ b/routers/api/packages/composer/api.go @@ -0,0 +1,118 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package composer + +import ( + "fmt" + "net/url" + "time" + + packages_model "code.gitea.io/gitea/models/packages" + composer_module "code.gitea.io/gitea/modules/packages/composer" +) + +// ServiceIndexResponse contains registry endpoints +type ServiceIndexResponse struct { + SearchTemplate string `json:"search"` + MetadataTemplate string `json:"metadata-url"` + PackageList string `json:"list"` +} + +func createServiceIndexResponse(registryURL string) *ServiceIndexResponse { + return &ServiceIndexResponse{ + SearchTemplate: registryURL + "/search.json?q=%query%&type=%type%", + MetadataTemplate: registryURL + "/p2/%package%.json", + PackageList: registryURL + "/list.json", + } +} + +// SearchResultResponse contains search results +type SearchResultResponse struct { + Total int64 `json:"total"` + Results []*SearchResult `json:"results"` + NextLink string `json:"next,omitempty"` +} + +// SearchResult contains a search result +type SearchResult struct { + Name string `json:"name"` + Description string `json:"description"` + Downloads int64 `json:"downloads"` +} + +func createSearchResultResponse(total int64, pds []*packages_model.PackageDescriptor, nextLink string) *SearchResultResponse { + results := make([]*SearchResult, 0, len(pds)) + + for _, pd := range pds { + results = append(results, &SearchResult{ + Name: pd.Package.Name, + Description: pd.Metadata.(*composer_module.Metadata).Description, + Downloads: pd.Version.DownloadCount, + }) + } + + return &SearchResultResponse{ + Total: total, + Results: results, + NextLink: nextLink, + } +} + +// PackageMetadataResponse contains packages metadata +type PackageMetadataResponse struct { + Minified string `json:"minified"` + Packages map[string][]*PackageVersionMetadata `json:"packages"` +} + +// PackageVersionMetadata contains package metadata +type PackageVersionMetadata struct { + *composer_module.Metadata + Name string `json:"name"` + Version string `json:"version"` + Type string `json:"type"` + Created time.Time `json:"time"` + Dist Dist `json:"dist"` +} + +// Dist contains package download informations +type Dist struct { + Type string `json:"type"` + URL string `json:"url"` + Checksum string `json:"shasum"` +} + +func createPackageMetadataResponse(registryURL string, pds []*packages_model.PackageDescriptor) *PackageMetadataResponse { + versions := make([]*PackageVersionMetadata, 0, len(pds)) + + for _, pd := range pds { + packageType := "" + for _, pvp := range pd.Properties { + if pvp.Name == composer_module.TypeProperty { + packageType = pvp.Value + break + } + } + + versions = append(versions, &PackageVersionMetadata{ + Name: pd.Package.Name, + Version: pd.Version.Version, + Type: packageType, + Created: time.Unix(int64(pd.Version.CreatedUnix), 0), + Metadata: pd.Metadata.(*composer_module.Metadata), + Dist: Dist{ + Type: "zip", + URL: fmt.Sprintf("%s/files/%s/%s/%s", registryURL, url.PathEscape(pd.Package.LowerName), url.PathEscape(pd.Version.LowerVersion), url.PathEscape(pd.Files[0].File.LowerName)), + Checksum: pd.Files[0].Blob.HashSHA1, + }, + }) + } + + return &PackageMetadataResponse{ + Minified: "composer/2.0", + Packages: map[string][]*PackageVersionMetadata{ + pds[0].Package.Name: versions, + }, + } +} diff --git a/routers/api/packages/composer/composer.go b/routers/api/packages/composer/composer.go new file mode 100644 index 0000000000..23de28c7f9 --- /dev/null +++ b/routers/api/packages/composer/composer.go @@ -0,0 +1,250 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package composer + +import ( + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" + packages_module "code.gitea.io/gitea/modules/packages" + composer_module "code.gitea.io/gitea/modules/packages/composer" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" + + "github.com/hashicorp/go-version" +) + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + type Error struct { + Status int `json:"status"` + Message string `json:"message"` + } + ctx.JSON(status, struct { + Errors []Error `json:"errors"` + }{ + Errors: []Error{ + {Status: status, Message: message}, + }, + }) + }) +} + +// ServiceIndex displays registry endpoints +func ServiceIndex(ctx *context.Context) { + resp := createServiceIndexResponse(setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/composer") + + ctx.JSON(http.StatusOK, resp) +} + +// SearchPackages searches packages, only "q" is supported +// https://packagist.org/apidoc#search-packages +func SearchPackages(ctx *context.Context) { + page := ctx.FormInt("page") + if page < 1 { + page = 1 + } + perPage := ctx.FormInt("per_page") + paginator := db.ListOptions{ + Page: page, + PageSize: convert.ToCorrectPageSize(perPage), + } + + opts := &packages_model.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages_model.TypeComposer, + Name: packages_model.SearchValue{Value: ctx.FormTrim("q")}, + Paginator: &paginator, + } + if ctx.FormTrim("type") != "" { + opts.Properties = map[string]string{ + composer_module.TypeProperty: ctx.FormTrim("type"), + } + } + + pvs, total, err := packages_model.SearchLatestVersions(ctx, opts) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + nextLink := "" + if len(pvs) == paginator.PageSize { + u, err := url.Parse(setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/composer/search.json") + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + q := u.Query() + q.Set("q", ctx.FormTrim("q")) + q.Set("type", ctx.FormTrim("type")) + q.Set("page", strconv.Itoa(page+1)) + if perPage != 0 { + q.Set("per_page", strconv.Itoa(perPage)) + } + u.RawQuery = q.Encode() + + nextLink = u.String() + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + resp := createSearchResultResponse(total, pds, nextLink) + + ctx.JSON(http.StatusOK, resp) +} + +// EnumeratePackages lists all package names +// https://packagist.org/apidoc#list-packages +func EnumeratePackages(ctx *context.Context) { + ps, err := packages_model.GetPackagesByType(db.DefaultContext, ctx.Package.Owner.ID, packages_model.TypeComposer) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + names := make([]string, 0, len(ps)) + for _, p := range ps { + names = append(names, p.Name) + } + + ctx.JSON(http.StatusOK, map[string][]string{ + "packageNames": names, + }) +} + +// PackageMetadata returns the metadata for a single package +// https://packagist.org/apidoc#get-package-data +func PackageMetadata(ctx *context.Context) { + vendorName := ctx.Params("vendorname") + projectName := ctx.Params("projectname") + + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeComposer, vendorName+"/"+projectName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pvs) == 0 { + apiError(ctx, http.StatusNotFound, packages_model.ErrPackageNotExist) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + resp := createPackageMetadataResponse( + setting.AppURL+"api/packages/"+ctx.Package.Owner.Name+"/composer", + pds, + ) + + ctx.JSON(http.StatusOK, resp) +} + +// DownloadPackageFile serves the content of a package +func DownloadPackageFile(ctx *context.Context) { + s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion( + ctx, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeComposer, + Name: ctx.Params("package"), + Version: ctx.Params("version"), + }, + &packages_service.PackageFileInfo{ + Filename: ctx.Params("filename"), + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackage creates a new package +func UploadPackage(ctx *context.Context) { + buf, err := packages_module.CreateHashedBufferFromReader(ctx.Req.Body, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + cp, err := composer_module.ParsePackage(buf, buf.Size()) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if cp.Version == "" { + v, err := version.NewVersion(ctx.FormTrim("version")) + if err != nil { + apiError(ctx, http.StatusBadRequest, composer_module.ErrInvalidVersion) + return + } + cp.Version = v.String() + } + + _, _, err = packages_service.CreatePackageAndAddFile( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeComposer, + Name: cp.Name, + Version: cp.Version, + }, + SemverCompatible: true, + Creator: ctx.Doer, + Metadata: cp.Metadata, + Properties: map[string]string{ + composer_module.TypeProperty: cp.Type, + }, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: strings.ToLower(fmt.Sprintf("%s.%s.zip", strings.ReplaceAll(cp.Name, "/", "-"), cp.Version)), + }, + Data: buf, + IsLead: true, + }, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} diff --git a/routers/api/packages/conan/auth.go b/routers/api/packages/conan/auth.go new file mode 100644 index 0000000000..00855a97a4 --- /dev/null +++ b/routers/api/packages/conan/auth.go @@ -0,0 +1,41 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "net/http" + + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/services/auth" + "code.gitea.io/gitea/services/packages" +) + +type Auth struct{} + +func (a *Auth) Name() string { + return "conan" +} + +// Verify extracts the user from the Bearer token +func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataStore, sess auth.SessionStore) *user_model.User { + uid, err := packages.ParseAuthorizationToken(req) + if err != nil { + log.Trace("ParseAuthorizationToken: %v", err) + return nil + } + + if uid == 0 { + return nil + } + + u, err := user_model.GetUserByID(uid) + if err != nil { + log.Error("GetUserByID: %v", err) + return nil + } + + return u +} diff --git a/routers/api/packages/conan/conan.go b/routers/api/packages/conan/conan.go new file mode 100644 index 0000000000..0a27f18fd1 --- /dev/null +++ b/routers/api/packages/conan/conan.go @@ -0,0 +1,818 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "fmt" + "io" + "net/http" + "strings" + "time" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + conan_model "code.gitea.io/gitea/models/packages/conan" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/notification" + packages_module "code.gitea.io/gitea/modules/packages" + conan_module "code.gitea.io/gitea/modules/packages/conan" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" +) + +const ( + conanfileFile = "conanfile.py" + conaninfoFile = "conaninfo.txt" + + recipeReferenceKey = "RecipeReference" + packageReferenceKey = "PackageReference" +) + +type stringSet map[string]struct{} + +var ( + recipeFileList = stringSet{ + conanfileFile: struct{}{}, + "conanmanifest.txt": struct{}{}, + "conan_sources.tgz": struct{}{}, + "conan_export.tgz": struct{}{}, + } + packageFileList = stringSet{ + conaninfoFile: struct{}{}, + "conanmanifest.txt": struct{}{}, + "conan_package.tgz": struct{}{}, + } +) + +func jsonResponse(ctx *context.Context, status int, obj interface{}) { + // https://github.com/conan-io/conan/issues/6613 + ctx.Resp.Header().Set("Content-Type", "application/json") + ctx.Status(status) + if err := json.NewEncoder(ctx.Resp).Encode(obj); err != nil { + log.Error("JSON encode: %v", err) + } +} + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + jsonResponse(ctx, status, map[string]string{ + "message": message, + }) + }) +} + +func baseURL(ctx *context.Context) string { + return setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/conan" +} + +// ExtractPathParameters is a middleware to extract common parameters from path +func ExtractPathParameters(ctx *context.Context) { + rref, err := conan_module.NewRecipeReference( + ctx.Params("name"), + ctx.Params("version"), + ctx.Params("user"), + ctx.Params("channel"), + ctx.Params("recipe_revision"), + ) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + ctx.Data[recipeReferenceKey] = rref + + reference := ctx.Params("package_reference") + + var pref *conan_module.PackageReference + if reference != "" { + pref, err = conan_module.NewPackageReference( + rref, + reference, + ctx.Params("package_revision"), + ) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + } + + ctx.Data[packageReferenceKey] = pref +} + +// Ping reports the server capabilities +func Ping(ctx *context.Context) { + ctx.RespHeader().Add("X-Conan-Server-Capabilities", "revisions") // complex_search,checksum_deploy,matrix_params + + ctx.Status(http.StatusOK) +} + +// Authenticate creates an authentication token for the user +func Authenticate(ctx *context.Context) { + if ctx.Doer == nil { + apiError(ctx, http.StatusBadRequest, nil) + return + } + + token, err := packages_service.CreateAuthorizationToken(ctx.Doer) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.PlainText(http.StatusOK, token) +} + +// CheckCredentials tests if the provided authentication token is valid +func CheckCredentials(ctx *context.Context) { + if ctx.Doer == nil { + ctx.Status(http.StatusUnauthorized) + } else { + ctx.Status(http.StatusOK) + } +} + +// RecipeSnapshot displays the recipe files with their md5 hash +func RecipeSnapshot(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + serveSnapshot(ctx, rref.AsKey()) +} + +// RecipeSnapshot displays the package files with their md5 hash +func PackageSnapshot(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + serveSnapshot(ctx, pref.AsKey()) +} + +func serveSnapshot(ctx *context.Context, fileKey string) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeConan, rref.Name, rref.Version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{ + VersionID: pv.ID, + CompositeKey: fileKey, + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pfs) == 0 { + apiError(ctx, http.StatusNotFound, nil) + return + } + + files := make(map[string]string) + for _, pf := range pfs { + pb, err := packages_model.GetBlobByID(ctx, pf.BlobID) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + files[pf.Name] = pb.HashMD5 + } + + jsonResponse(ctx, http.StatusOK, files) +} + +// RecipeDownloadURLs displays the recipe files with their download url +func RecipeDownloadURLs(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + serveDownloadURLs( + ctx, + rref.AsKey(), + fmt.Sprintf(baseURL(ctx)+"/v1/files/%s/recipe", rref.LinkName()), + ) +} + +// PackageDownloadURLs displays the package files with their download url +func PackageDownloadURLs(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + serveDownloadURLs( + ctx, + pref.AsKey(), + fmt.Sprintf(baseURL(ctx)+"/v1/files/%s/package/%s", pref.Recipe.LinkName(), pref.LinkName()), + ) +} + +func serveDownloadURLs(ctx *context.Context, fileKey, downloadURL string) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeConan, rref.Name, rref.Version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{ + VersionID: pv.ID, + CompositeKey: fileKey, + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if len(pfs) == 0 { + apiError(ctx, http.StatusNotFound, nil) + return + } + + urls := make(map[string]string) + for _, pf := range pfs { + urls[pf.Name] = fmt.Sprintf("%s/%s", downloadURL, pf.Name) + } + + jsonResponse(ctx, http.StatusOK, urls) +} + +// RecipeUploadURLs displays the upload urls for the provided recipe files +func RecipeUploadURLs(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + serveUploadURLs( + ctx, + recipeFileList, + fmt.Sprintf(baseURL(ctx)+"/v1/files/%s/recipe", rref.LinkName()), + ) +} + +// PackageUploadURLs displays the upload urls for the provided package files +func PackageUploadURLs(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + serveUploadURLs( + ctx, + packageFileList, + fmt.Sprintf(baseURL(ctx)+"/v1/files/%s/package/%s", pref.Recipe.LinkName(), pref.LinkName()), + ) +} + +func serveUploadURLs(ctx *context.Context, fileFilter stringSet, uploadURL string) { + defer ctx.Req.Body.Close() + + var files map[string]int64 + if err := json.NewDecoder(ctx.Req.Body).Decode(&files); err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + urls := make(map[string]string) + for file := range files { + if _, ok := fileFilter[file]; ok { + urls[file] = fmt.Sprintf("%s/%s", uploadURL, file) + } + } + + jsonResponse(ctx, http.StatusOK, urls) +} + +// UploadRecipeFile handles the upload of a recipe file +func UploadRecipeFile(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + uploadFile(ctx, recipeFileList, rref.AsKey()) +} + +// UploadPackageFile handles the upload of a package file +func UploadPackageFile(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + uploadFile(ctx, packageFileList, pref.AsKey()) +} + +func uploadFile(ctx *context.Context, fileFilter stringSet, fileKey string) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + filename := ctx.Params("filename") + if _, ok := fileFilter[filename]; !ok { + apiError(ctx, http.StatusBadRequest, nil) + return + } + + upload, close, err := ctx.UploadStream() + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + if close { + defer upload.Close() + } + + buf, err := packages_module.CreateHashedBufferFromReader(upload, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + if buf.Size() == 0 { + // ignore empty uploads, second request contains content + jsonResponse(ctx, http.StatusOK, nil) + return + } + + isConanfileFile := filename == conanfileFile + + pci := &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeConan, + Name: rref.Name, + Version: rref.Version, + }, + SemverCompatible: true, + Creator: ctx.Doer, + } + pfci := &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: strings.ToLower(filename), + CompositeKey: fileKey, + }, + Data: buf, + IsLead: isConanfileFile, + Properties: map[string]string{ + conan_module.PropertyRecipeUser: rref.User, + conan_module.PropertyRecipeChannel: rref.Channel, + conan_module.PropertyRecipeRevision: rref.RevisionOrDefault(), + }, + OverwriteExisting: true, + } + + if pref != nil { + pfci.Properties[conan_module.PropertyPackageReference] = pref.Reference + pfci.Properties[conan_module.PropertyPackageRevision] = pref.RevisionOrDefault() + } + + if isConanfileFile || filename == conaninfoFile { + if isConanfileFile { + metadata, err := conan_module.ParseConanfile(buf) + if err != nil { + log.Error("Error parsing package metadata: %v", err) + apiError(ctx, http.StatusInternalServerError, err) + return + } + pv, err := packages_model.GetVersionByNameAndVersion(ctx, pci.Owner.ID, pci.PackageType, pci.Name, pci.Version) + if err != nil && err != packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if pv != nil { + raw, err := json.Marshal(metadata) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + pv.MetadataJSON = string(raw) + if err := packages_model.UpdateVersion(ctx, pv); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } else { + pci.Metadata = metadata + } + } else { + info, err := conan_module.ParseConaninfo(buf) + if err != nil { + log.Error("Error parsing conan info: %v", err) + apiError(ctx, http.StatusInternalServerError, err) + return + } + raw, err := json.Marshal(info) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + pfci.Properties[conan_module.PropertyPackageInfo] = string(raw) + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } + + _, _, err = packages_service.CreatePackageOrAddFileToExisting( + pci, + pfci, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageFile { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} + +// DownloadRecipeFile serves the conent of the requested recipe file +func DownloadRecipeFile(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + downloadFile(ctx, recipeFileList, rref.AsKey()) +} + +// DownloadPackageFile serves the conent of the requested package file +func DownloadPackageFile(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + downloadFile(ctx, packageFileList, pref.AsKey()) +} + +func downloadFile(ctx *context.Context, fileFilter stringSet, fileKey string) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + filename := ctx.Params("filename") + if _, ok := fileFilter[filename]; !ok { + apiError(ctx, http.StatusBadRequest, nil) + return + } + + s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion( + ctx, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeConan, + Name: rref.Name, + Version: rref.Version, + }, + &packages_service.PackageFileInfo{ + Filename: filename, + CompositeKey: fileKey, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// DeleteRecipeV1 deletes the requested recipe(s) +func DeleteRecipeV1(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + if err := deleteRecipeOrPackage(ctx, rref, true, nil, false); err != nil { + if err == packages_model.ErrPackageNotExist || err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + ctx.Status(http.StatusOK) +} + +// DeleteRecipeV2 deletes the requested recipe(s) respecting its revisions +func DeleteRecipeV2(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + if err := deleteRecipeOrPackage(ctx, rref, rref.Revision == "", nil, false); err != nil { + if err == packages_model.ErrPackageNotExist || err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + ctx.Status(http.StatusOK) +} + +// DeletePackageV1 deletes the requested package(s) +func DeletePackageV1(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + type PackageReferences struct { + References []string `json:"package_ids"` + } + + var ids *PackageReferences + if err := json.NewDecoder(ctx.Req.Body).Decode(&ids); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + revisions, err := conan_model.GetRecipeRevisions(ctx, ctx.Package.Owner.ID, rref) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + for _, revision := range revisions { + currentRref := rref.WithRevision(revision.Value) + + var references []*conan_model.PropertyValue + if len(ids.References) == 0 { + if references, err = conan_model.GetPackageReferences(ctx, ctx.Package.Owner.ID, currentRref); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } else { + for _, reference := range ids.References { + references = append(references, &conan_model.PropertyValue{Value: reference}) + } + } + + for _, reference := range references { + pref, _ := conan_module.NewPackageReference(currentRref, reference.Value, conan_module.DefaultRevision) + if err := deleteRecipeOrPackage(ctx, currentRref, true, pref, true); err != nil { + if err == packages_model.ErrPackageNotExist || err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + } + } + ctx.Status(http.StatusOK) +} + +// DeletePackageV2 deletes the requested package(s) respecting its revisions +func DeletePackageV2(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + if pref != nil { // has package reference + if err := deleteRecipeOrPackage(ctx, rref, false, pref, pref.Revision == ""); err != nil { + if err == packages_model.ErrPackageNotExist || err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + } else { + ctx.Status(http.StatusOK) + } + return + } + + references, err := conan_model.GetPackageReferences(ctx, ctx.Package.Owner.ID, rref) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(references) == 0 { + apiError(ctx, http.StatusNotFound, conan_model.ErrPackageReferenceNotExist) + return + } + + for _, reference := range references { + pref, _ := conan_module.NewPackageReference(rref, reference.Value, conan_module.DefaultRevision) + + if err := deleteRecipeOrPackage(ctx, rref, false, pref, true); err != nil { + if err == packages_model.ErrPackageNotExist || err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + } + + ctx.Status(http.StatusOK) +} + +func deleteRecipeOrPackage(apictx *context.Context, rref *conan_module.RecipeReference, ignoreRecipeRevision bool, pref *conan_module.PackageReference, ignorePackageRevision bool) error { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, apictx.Package.Owner.ID, packages_model.TypeConan, rref.Name, rref.Version) + if err != nil { + return err + } + + pd, err := packages_model.GetPackageDescriptor(ctx, pv) + if err != nil { + return err + } + + filter := map[string]string{ + conan_module.PropertyRecipeUser: rref.User, + conan_module.PropertyRecipeChannel: rref.Channel, + } + if !ignoreRecipeRevision { + filter[conan_module.PropertyRecipeRevision] = rref.RevisionOrDefault() + } + if pref != nil { + filter[conan_module.PropertyPackageReference] = pref.Reference + if !ignorePackageRevision { + filter[conan_module.PropertyPackageRevision] = pref.RevisionOrDefault() + } + } + + pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{ + VersionID: pv.ID, + Properties: filter, + }) + if err != nil { + return err + } + if len(pfs) == 0 { + return conan_model.ErrPackageReferenceNotExist + } + + for _, pf := range pfs { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeFile, pf.ID); err != nil { + return err + } + if err := packages_model.DeleteFileByID(ctx, pf.ID); err != nil { + return err + } + } + + versionDeleted := false + has, err := packages_model.HasVersionFileReferences(ctx, pv.ID) + if err != nil { + return err + } + if !has { + versionDeleted = true + + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeVersion, pv.ID); err != nil { + return err + } + + if err := packages_model.DeleteVersionByID(ctx, pv.ID); err != nil { + return err + } + } + + if err := committer.Commit(); err != nil { + return err + } + + if versionDeleted { + notification.NotifyPackageDelete(apictx.Doer, pd) + } + + return nil +} + +// ListRecipeRevisions gets a list of all recipe revisions +func ListRecipeRevisions(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + revisions, err := conan_model.GetRecipeRevisions(ctx, ctx.Package.Owner.ID, rref) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + listRevisions(ctx, revisions) +} + +// ListPackageRevisions gets a list of all package revisions +func ListPackageRevisions(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + revisions, err := conan_model.GetPackageRevisions(ctx, ctx.Package.Owner.ID, pref) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + listRevisions(ctx, revisions) +} + +type revisionInfo struct { + Revision string `json:"revision"` + Time time.Time `json:"time"` +} + +func listRevisions(ctx *context.Context, revisions []*conan_model.PropertyValue) { + if len(revisions) == 0 { + apiError(ctx, http.StatusNotFound, conan_model.ErrRecipeReferenceNotExist) + return + } + + type RevisionList struct { + Revisions []*revisionInfo `json:"revisions"` + } + + revs := make([]*revisionInfo, 0, len(revisions)) + for _, rev := range revisions { + revs = append(revs, &revisionInfo{Revision: rev.Value, Time: time.Unix(int64(rev.CreatedUnix), 0)}) + } + + jsonResponse(ctx, http.StatusOK, &RevisionList{revs}) +} + +// LatestRecipeRevision gets the latest recipe revision +func LatestRecipeRevision(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + revision, err := conan_model.GetLastRecipeRevision(ctx, ctx.Package.Owner.ID, rref) + if err != nil { + if err == conan_model.ErrRecipeReferenceNotExist || err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + jsonResponse(ctx, http.StatusOK, &revisionInfo{Revision: revision.Value, Time: time.Unix(int64(revision.CreatedUnix), 0)}) +} + +// LatestPackageRevision gets the latest package revision +func LatestPackageRevision(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + revision, err := conan_model.GetLastPackageRevision(ctx, ctx.Package.Owner.ID, pref) + if err != nil { + if err == conan_model.ErrRecipeReferenceNotExist || err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + jsonResponse(ctx, http.StatusOK, &revisionInfo{Revision: revision.Value, Time: time.Unix(int64(revision.CreatedUnix), 0)}) +} + +// ListRecipeRevisionFiles gets a list of all recipe revision files +func ListRecipeRevisionFiles(ctx *context.Context) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + listRevisionFiles(ctx, rref.AsKey()) +} + +// ListPackageRevisionFiles gets a list of all package revision files +func ListPackageRevisionFiles(ctx *context.Context) { + pref := ctx.Data[packageReferenceKey].(*conan_module.PackageReference) + + listRevisionFiles(ctx, pref.AsKey()) +} + +func listRevisionFiles(ctx *context.Context, fileKey string) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeConan, rref.Name, rref.Version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{ + VersionID: pv.ID, + CompositeKey: fileKey, + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pfs) == 0 { + apiError(ctx, http.StatusNotFound, nil) + return + } + + files := make(map[string]interface{}) + for _, pf := range pfs { + files[pf.Name] = nil + } + + type FileList struct { + Files map[string]interface{} `json:"files"` + } + + jsonResponse(ctx, http.StatusOK, &FileList{ + Files: files, + }) +} diff --git a/routers/api/packages/conan/search.go b/routers/api/packages/conan/search.go new file mode 100644 index 0000000000..39dd6362aa --- /dev/null +++ b/routers/api/packages/conan/search.go @@ -0,0 +1,164 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package conan + +import ( + "net/http" + "strings" + + conan_model "code.gitea.io/gitea/models/packages/conan" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/json" + conan_module "code.gitea.io/gitea/modules/packages/conan" +) + +// SearchResult contains the found recipe names +type SearchResult struct { + Results []string `json:"results"` +} + +// SearchRecipes searches all recipes matching the query +func SearchRecipes(ctx *context.Context) { + q := ctx.FormTrim("q") + + opts := parseQuery(ctx.Package.Owner, q) + + results, err := conan_model.SearchRecipes(ctx, opts) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + jsonResponse(ctx, http.StatusOK, &SearchResult{ + Results: results, + }) +} + +// parseQuery creates search options for the given query +func parseQuery(owner *user_model.User, query string) *conan_model.RecipeSearchOptions { + opts := &conan_model.RecipeSearchOptions{ + OwnerID: owner.ID, + } + + if query != "" { + parts := strings.Split(strings.ReplaceAll(query, "@", "/"), "/") + + opts.Name = parts[0] + if len(parts) > 1 && parts[1] != "*" { + opts.Version = parts[1] + } + if len(parts) > 2 && parts[2] != "*" { + opts.User = parts[2] + } + if len(parts) > 3 && parts[3] != "*" { + opts.Channel = parts[3] + } + } + + return opts +} + +// SearchPackagesV1 searches all packages of a recipe (Conan v1 endpoint) +func SearchPackagesV1(ctx *context.Context) { + searchPackages(ctx, true) +} + +// SearchPackagesV2 searches all packages of a recipe (Conan v2 endpoint) +func SearchPackagesV2(ctx *context.Context) { + searchPackages(ctx, false) +} + +func searchPackages(ctx *context.Context, searchAllRevisions bool) { + rref := ctx.Data[recipeReferenceKey].(*conan_module.RecipeReference) + + if !searchAllRevisions && rref.Revision == "" { + lastRevision, err := conan_model.GetLastRecipeRevision(ctx, ctx.Package.Owner.ID, rref) + if err != nil { + if err == conan_model.ErrRecipeReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + rref = rref.WithRevision(lastRevision.Value) + } else { + has, err := conan_model.RecipeExists(ctx, ctx.Package.Owner.ID, rref) + if err != nil { + if err == conan_model.ErrRecipeReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + if !has { + apiError(ctx, http.StatusNotFound, nil) + return + } + } + + recipeRevisions := []*conan_model.PropertyValue{{Value: rref.Revision}} + if searchAllRevisions { + var err error + recipeRevisions, err = conan_model.GetRecipeRevisions(ctx, ctx.Package.Owner.ID, rref) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } + + result := make(map[string]*conan_module.Conaninfo) + + for _, recipeRevision := range recipeRevisions { + currentRef := rref + if recipeRevision.Value != "" { + currentRef = rref.WithRevision(recipeRevision.Value) + } + packageReferences, err := conan_model.GetPackageReferences(ctx, ctx.Package.Owner.ID, currentRef) + if err != nil { + if err == conan_model.ErrRecipeReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + for _, packageReference := range packageReferences { + if _, ok := result[packageReference.Value]; ok { + continue + } + pref, _ := conan_module.NewPackageReference(currentRef, packageReference.Value, "") + lastPackageRevision, err := conan_model.GetLastPackageRevision(ctx, ctx.Package.Owner.ID, pref) + if err != nil { + if err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + pref = pref.WithRevision(lastPackageRevision.Value) + infoRaw, err := conan_model.GetPackageInfo(ctx, ctx.Package.Owner.ID, pref) + if err != nil { + if err == conan_model.ErrPackageReferenceNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + var info *conan_module.Conaninfo + if err := json.Unmarshal([]byte(infoRaw), &info); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + result[pref.Reference] = info + } + } + + jsonResponse(ctx, http.StatusOK, result) +} diff --git a/routers/api/packages/container/auth.go b/routers/api/packages/container/auth.go new file mode 100644 index 0000000000..770068a3bf --- /dev/null +++ b/routers/api/packages/container/auth.go @@ -0,0 +1,45 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "net/http" + + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/services/auth" + "code.gitea.io/gitea/services/packages" +) + +type Auth struct{} + +func (a *Auth) Name() string { + return "container" +} + +// Verify extracts the user from the Bearer token +// If it's an anonymous session a ghost user is returned +func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataStore, sess auth.SessionStore) *user_model.User { + uid, err := packages.ParseAuthorizationToken(req) + if err != nil { + log.Trace("ParseAuthorizationToken: %v", err) + return nil + } + + if uid == 0 { + return nil + } + if uid == -1 { + return user_model.NewGhostUser() + } + + u, err := user_model.GetUserByID(uid) + if err != nil { + log.Error("GetUserByID: %v", err) + return nil + } + + return u +} diff --git a/routers/api/packages/container/blob.go b/routers/api/packages/container/blob.go new file mode 100644 index 0000000000..8f6254f583 --- /dev/null +++ b/routers/api/packages/container/blob.go @@ -0,0 +1,136 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "context" + "encoding/hex" + "fmt" + "strings" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + container_model "code.gitea.io/gitea/models/packages/container" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + container_module "code.gitea.io/gitea/modules/packages/container" + packages_service "code.gitea.io/gitea/services/packages" +) + +// saveAsPackageBlob creates a package blob from an upload +// The uploaded blob gets stored in a special upload version to link them to the package/image +func saveAsPackageBlob(hsr packages_module.HashedSizeReader, pi *packages_service.PackageInfo) (*packages_model.PackageBlob, error) { + pb := packages_service.NewPackageBlob(hsr) + + exists := false + + contentStore := packages_module.NewContentStore() + + err := db.WithTx(func(ctx context.Context) error { + p := &packages_model.Package{ + OwnerID: pi.Owner.ID, + Type: packages_model.TypeContainer, + Name: strings.ToLower(pi.Name), + LowerName: strings.ToLower(pi.Name), + } + var err error + if p, err = packages_model.TryInsertPackage(ctx, p); err != nil { + if err != packages_model.ErrDuplicatePackage { + log.Error("Error inserting package: %v", err) + return err + } + } + + pv := &packages_model.PackageVersion{ + PackageID: p.ID, + CreatorID: pi.Owner.ID, + Version: container_model.UploadVersion, + LowerVersion: container_model.UploadVersion, + IsInternal: true, + MetadataJSON: "null", + } + if pv, err = packages_model.GetOrInsertVersion(ctx, pv); err != nil { + if err != packages_model.ErrDuplicatePackageVersion { + log.Error("Error inserting package: %v", err) + return err + } + } + + pb, exists, err = packages_model.GetOrInsertBlob(ctx, pb) + if err != nil { + log.Error("Error inserting package blob: %v", err) + return err + } + if !exists { + if err := contentStore.Save(packages_module.BlobHash256Key(pb.HashSHA256), hsr, hsr.Size()); err != nil { + log.Error("Error saving package blob in content store: %v", err) + return err + } + } + + filename := strings.ToLower(fmt.Sprintf("sha256_%s", pb.HashSHA256)) + + pf := &packages_model.PackageFile{ + VersionID: pv.ID, + BlobID: pb.ID, + Name: filename, + LowerName: filename, + CompositeKey: packages_model.EmptyFileKey, + } + if pf, err = packages_model.TryInsertFile(ctx, pf); err != nil { + if err == packages_model.ErrDuplicatePackageFile { + return nil + } + log.Error("Error inserting package file: %v", err) + return err + } + + if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeFile, pf.ID, container_module.PropertyDigest, digestFromPackageBlob(pb)); err != nil { + log.Error("Error setting package file property: %v", err) + return err + } + + return nil + }) + if err != nil { + if !exists { + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob from content store: %v", err) + } + } + return nil, err + } + + return pb, nil +} + +func deleteBlob(ownerID int64, image, digest string) error { + return db.WithTx(func(ctx context.Context) error { + pfds, err := container_model.GetContainerBlobs(ctx, &container_model.BlobSearchOptions{ + OwnerID: ownerID, + Image: image, + Digest: digest, + }) + if err != nil { + return err + } + + for _, file := range pfds { + if err := packages_service.DeletePackageFile(ctx, file.File); err != nil { + return err + } + } + return nil + }) +} + +func digestFromHashSummer(h packages_module.HashSummer) string { + _, _, hashSHA256, _ := h.Sums() + return "sha256:" + hex.EncodeToString(hashSHA256) +} + +func digestFromPackageBlob(pb *packages_model.PackageBlob) string { + return "sha256:" + pb.HashSHA256 +} diff --git a/routers/api/packages/container/container.go b/routers/api/packages/container/container.go new file mode 100644 index 0000000000..08b6b421b0 --- /dev/null +++ b/routers/api/packages/container/container.go @@ -0,0 +1,613 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "errors" + "fmt" + "io" + "net/http" + "net/url" + "regexp" + "strconv" + "strings" + + packages_model "code.gitea.io/gitea/models/packages" + container_model "code.gitea.io/gitea/models/packages/container" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + container_module "code.gitea.io/gitea/modules/packages/container" + "code.gitea.io/gitea/modules/packages/container/oci" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" + container_service "code.gitea.io/gitea/services/packages/container" +) + +// maximum size of a container manifest +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-manifests +const maxManifestSize = 10 * 1024 * 1024 + +var imageNamePattern = regexp.MustCompile(`\A[a-z0-9]+([._-][a-z0-9]+)*(/[a-z0-9]+([._-][a-z0-9]+)*)*\z`) + +type containerHeaders struct { + Status int + ContentDigest string + UploadUUID string + Range string + Location string + ContentType string + ContentLength int64 +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#legacy-docker-support-http-headers +func setResponseHeaders(resp http.ResponseWriter, h *containerHeaders) { + if h.Location != "" { + resp.Header().Set("Location", h.Location) + } + if h.Range != "" { + resp.Header().Set("Range", h.Range) + } + if h.ContentType != "" { + resp.Header().Set("Content-Type", h.ContentType) + } + if h.ContentLength != 0 { + resp.Header().Set("Content-Length", strconv.FormatInt(h.ContentLength, 10)) + } + if h.UploadUUID != "" { + resp.Header().Set("Docker-Upload-Uuid", h.UploadUUID) + } + if h.ContentDigest != "" { + resp.Header().Set("Docker-Content-Digest", h.ContentDigest) + resp.Header().Set("ETag", fmt.Sprintf(`"%s"`, h.ContentDigest)) + } + resp.Header().Set("Docker-Distribution-Api-Version", "registry/2.0") + resp.WriteHeader(h.Status) +} + +func jsonResponse(ctx *context.Context, status int, obj interface{}) { + setResponseHeaders(ctx.Resp, &containerHeaders{ + Status: status, + ContentType: "application/json", + }) + if err := json.NewEncoder(ctx.Resp).Encode(obj); err != nil { + log.Error("JSON encode: %v", err) + } +} + +func apiError(ctx *context.Context, status int, err error) { + helper.LogAndProcessError(ctx, status, err, func(message string) { + setResponseHeaders(ctx.Resp, &containerHeaders{ + Status: status, + }) + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#error-codes +func apiErrorDefined(ctx *context.Context, err *namedError) { + type ContainerError struct { + Code string `json:"code"` + Message string `json:"message"` + } + + type ContainerErrors struct { + Errors []ContainerError `json:"errors"` + } + + jsonResponse(ctx, err.StatusCode, ContainerErrors{ + Errors: []ContainerError{ + { + Code: err.Code, + Message: err.Message, + }, + }, + }) +} + +// ReqContainerAccess is a middleware which checks the current user valid (real user or ghost for anonymous access) +func ReqContainerAccess(ctx *context.Context) { + if ctx.Doer == nil { + ctx.Resp.Header().Add("WWW-Authenticate", `Bearer realm="`+setting.AppURL+`v2/token"`) + ctx.Resp.Header().Add("WWW-Authenticate", `Basic`) + apiErrorDefined(ctx, errUnauthorized) + } +} + +// VerifyImageName is a middleware which checks if the image name is allowed +func VerifyImageName(ctx *context.Context) { + if !imageNamePattern.MatchString(ctx.Params("image")) { + apiErrorDefined(ctx, errNameInvalid) + } +} + +// DetermineSupport is used to test if the registry supports OCI +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#determining-support +func DetermineSupport(ctx *context.Context) { + setResponseHeaders(ctx.Resp, &containerHeaders{ + Status: http.StatusOK, + }) +} + +// Authenticate creates a token for the current user +// If the current user is anonymous, the ghost user is used +func Authenticate(ctx *context.Context) { + u := ctx.Doer + if u == nil { + u = user_model.NewGhostUser() + } + + token, err := packages_service.CreateAuthorizationToken(u) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.JSON(http.StatusOK, map[string]string{ + "token": token, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#mounting-a-blob-from-another-repository +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#single-post +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-a-blob-in-chunks +func InitiateUploadBlob(ctx *context.Context) { + image := ctx.Params("image") + + mount := ctx.FormTrim("mount") + from := ctx.FormTrim("from") + if mount != "" { + blob, _ := container_model.GetContainerBlob(ctx, &container_model.BlobSearchOptions{ + Image: from, + Digest: mount, + }) + if blob != nil { + setResponseHeaders(ctx.Resp, &containerHeaders{ + Location: fmt.Sprintf("/v2/%s/%s/blobs/%s", ctx.Package.Owner.LowerName, image, mount), + ContentDigest: mount, + Status: http.StatusCreated, + }) + return + } + } + + digest := ctx.FormTrim("digest") + if digest != "" { + buf, err := packages_module.CreateHashedBufferFromReader(ctx.Req.Body, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + if digest != digestFromHashSummer(buf) { + apiErrorDefined(ctx, errDigestInvalid) + return + } + + if _, err := saveAsPackageBlob(buf, &packages_service.PackageInfo{Owner: ctx.Package.Owner, Name: image}); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + Location: fmt.Sprintf("/v2/%s/%s/blobs/%s", ctx.Package.Owner.LowerName, image, digest), + ContentDigest: digest, + Status: http.StatusCreated, + }) + return + } + + upload, err := packages_model.CreateBlobUpload(ctx) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + Location: fmt.Sprintf("/v2/%s/%s/blobs/uploads/%s", ctx.Package.Owner.LowerName, image, upload.ID), + Range: "0-0", + UploadUUID: upload.ID, + Status: http.StatusAccepted, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-a-blob-in-chunks +func UploadBlob(ctx *context.Context) { + image := ctx.Params("image") + + uploader, err := container_service.NewBlobUploader(ctx, ctx.Params("uuid")) + if err != nil { + if err == packages_model.ErrPackageBlobUploadNotExist { + apiErrorDefined(ctx, errBlobUploadUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + defer uploader.Close() + + contentRange := ctx.Req.Header.Get("Content-Range") + if contentRange != "" { + start, end := 0, 0 + if _, err := fmt.Sscanf(contentRange, "%d-%d", &start, &end); err != nil { + apiErrorDefined(ctx, errBlobUploadInvalid) + return + } + + if int64(start) != uploader.Size() { + apiErrorDefined(ctx, errBlobUploadInvalid.WithStatusCode(http.StatusRequestedRangeNotSatisfiable)) + return + } + } else if uploader.Size() != 0 { + apiErrorDefined(ctx, errBlobUploadInvalid.WithMessage("Stream uploads after first write are not allowed")) + return + } + + if err := uploader.Append(ctx, ctx.Req.Body); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + Location: fmt.Sprintf("/v2/%s/%s/blobs/uploads/%s", ctx.Package.Owner.LowerName, image, uploader.ID), + Range: fmt.Sprintf("0-%d", uploader.Size()-1), + UploadUUID: uploader.ID, + Status: http.StatusAccepted, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-a-blob-in-chunks +func EndUploadBlob(ctx *context.Context) { + image := ctx.Params("image") + + digest := ctx.FormTrim("digest") + if digest == "" { + apiErrorDefined(ctx, errDigestInvalid) + return + } + + uploader, err := container_service.NewBlobUploader(ctx, ctx.Params("uuid")) + if err != nil { + if err == packages_model.ErrPackageBlobUploadNotExist { + apiErrorDefined(ctx, errBlobUploadUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + close := true + defer func() { + if close { + uploader.Close() + } + }() + + if ctx.Req.Body != nil { + if err := uploader.Append(ctx, ctx.Req.Body); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } + + if digest != digestFromHashSummer(uploader) { + apiErrorDefined(ctx, errDigestInvalid) + return + } + + if _, err := saveAsPackageBlob(uploader, &packages_service.PackageInfo{Owner: ctx.Package.Owner, Name: image}); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if err := uploader.Close(); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + close = false + + if err := container_service.RemoveBlobUploadByID(ctx, uploader.ID); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + Location: fmt.Sprintf("/v2/%s/%s/blobs/%s", ctx.Package.Owner.LowerName, image, digest), + ContentDigest: digest, + Status: http.StatusCreated, + }) +} + +func getBlobFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) { + digest := ctx.Params("digest") + + if !oci.Digest(digest).Validate() { + return nil, container_model.ErrContainerBlobNotExist + } + + return container_model.GetContainerBlob(ctx, &container_model.BlobSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Image: ctx.Params("image"), + Digest: digest, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#checking-if-content-exists-in-the-registry +func HeadBlob(ctx *context.Context) { + blob, err := getBlobFromContext(ctx) + if err != nil { + if err == container_model.ErrContainerBlobNotExist { + apiErrorDefined(ctx, errBlobUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + ContentDigest: blob.Properties.GetByName(container_module.PropertyDigest), + ContentLength: blob.Blob.Size, + Status: http.StatusOK, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pulling-blobs +func GetBlob(ctx *context.Context) { + blob, err := getBlobFromContext(ctx) + if err != nil { + if err == container_model.ErrContainerBlobNotExist { + apiErrorDefined(ctx, errBlobUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + s, _, err := packages_service.GetPackageFileStream(ctx, blob.File) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + setResponseHeaders(ctx.Resp, &containerHeaders{ + ContentDigest: blob.Properties.GetByName(container_module.PropertyDigest), + ContentType: blob.Properties.GetByName(container_module.PropertyMediaType), + ContentLength: blob.Blob.Size, + Status: http.StatusOK, + }) + if _, err := io.Copy(ctx.Resp, s); err != nil { + log.Error("Error whilst copying content to response: %v", err) + } +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-blobs +func DeleteBlob(ctx *context.Context) { + digest := ctx.Params("digest") + + if !oci.Digest(digest).Validate() { + apiErrorDefined(ctx, errBlobUnknown) + return + } + + if err := deleteBlob(ctx.Package.Owner.ID, ctx.Params("image"), digest); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + Status: http.StatusAccepted, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pushing-manifests +func UploadManifest(ctx *context.Context) { + reference := ctx.Params("reference") + + mci := &manifestCreationInfo{ + MediaType: oci.MediaType(ctx.Req.Header.Get("Content-Type")), + Owner: ctx.Package.Owner, + Creator: ctx.Doer, + Image: ctx.Params("image"), + Reference: reference, + IsTagged: !oci.Digest(reference).Validate(), + } + + if mci.IsTagged && !oci.Reference(reference).Validate() { + apiErrorDefined(ctx, errManifestInvalid.WithMessage("Tag is invalid")) + return + } + + maxSize := maxManifestSize + 1 + buf, err := packages_module.CreateHashedBufferFromReader(&io.LimitedReader{R: ctx.Req.Body, N: int64(maxSize)}, maxSize) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + if buf.Size() > maxManifestSize { + apiErrorDefined(ctx, errManifestInvalid.WithMessage("Manifest exceeds maximum size").WithStatusCode(http.StatusRequestEntityTooLarge)) + return + } + + digest, err := processManifest(mci, buf) + if err != nil { + var namedError *namedError + if errors.As(err, &namedError) { + apiErrorDefined(ctx, namedError) + } else if errors.Is(err, container_model.ErrContainerBlobNotExist) { + apiErrorDefined(ctx, errBlobUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + Location: fmt.Sprintf("/v2/%s/%s/manifests/%s", ctx.Package.Owner.LowerName, mci.Image, reference), + ContentDigest: digest, + Status: http.StatusCreated, + }) +} + +func getManifestFromContext(ctx *context.Context) (*packages_model.PackageFileDescriptor, error) { + reference := ctx.Params("reference") + + opts := &container_model.BlobSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Image: ctx.Params("image"), + IsManifest: true, + } + if oci.Digest(reference).Validate() { + opts.Digest = reference + } else if oci.Reference(reference).Validate() { + opts.Tag = reference + } else { + return nil, container_model.ErrContainerBlobNotExist + } + + return container_model.GetContainerBlob(ctx, opts) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#checking-if-content-exists-in-the-registry +func HeadManifest(ctx *context.Context) { + manifest, err := getManifestFromContext(ctx) + if err != nil { + if err == container_model.ErrContainerBlobNotExist { + apiErrorDefined(ctx, errManifestUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + ContentDigest: manifest.Properties.GetByName(container_module.PropertyDigest), + ContentType: manifest.Properties.GetByName(container_module.PropertyMediaType), + ContentLength: manifest.Blob.Size, + Status: http.StatusOK, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#pulling-manifests +func GetManifest(ctx *context.Context) { + manifest, err := getManifestFromContext(ctx) + if err != nil { + if err == container_model.ErrContainerBlobNotExist { + apiErrorDefined(ctx, errManifestUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + s, _, err := packages_service.GetPackageFileStream(ctx, manifest.File) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + setResponseHeaders(ctx.Resp, &containerHeaders{ + ContentDigest: manifest.Properties.GetByName(container_module.PropertyDigest), + ContentType: manifest.Properties.GetByName(container_module.PropertyMediaType), + ContentLength: manifest.Blob.Size, + Status: http.StatusOK, + }) + if _, err := io.Copy(ctx.Resp, s); err != nil { + log.Error("Error whilst copying content to response: %v", err) + } +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-tags +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#deleting-manifests +func DeleteManifest(ctx *context.Context) { + reference := ctx.Params("reference") + + opts := &container_model.BlobSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Image: ctx.Params("image"), + IsManifest: true, + } + if oci.Digest(reference).Validate() { + opts.Digest = reference + } else if oci.Reference(reference).Validate() { + opts.Tag = reference + } else { + apiErrorDefined(ctx, errManifestUnknown) + return + } + + pvs, err := container_model.GetManifestVersions(ctx, opts) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if len(pvs) == 0 { + apiErrorDefined(ctx, errManifestUnknown) + return + } + + for _, pv := range pvs { + if err := packages_service.RemovePackageVersion(ctx.Doer, pv); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } + + setResponseHeaders(ctx.Resp, &containerHeaders{ + Status: http.StatusAccepted, + }) +} + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#content-discovery +func GetTagList(ctx *context.Context) { + image := ctx.Params("image") + + if _, err := packages_model.GetPackageByName(ctx, ctx.Package.Owner.ID, packages_model.TypeContainer, image); err != nil { + if err == packages_model.ErrPackageNotExist { + apiErrorDefined(ctx, errNameUnknown) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + n := -1 + if ctx.FormTrim("n") != "" { + n = ctx.FormInt("n") + } + last := ctx.FormTrim("last") + + tags, err := container_model.GetImageTags(ctx, ctx.Package.Owner.ID, image, n, last) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + type TagList struct { + Name string `json:"name"` + Tags []string `json:"tags"` + } + + if len(tags) > 0 { + v := url.Values{} + if n > 0 { + v.Add("n", strconv.Itoa(n)) + } + v.Add("last", tags[len(tags)-1]) + + ctx.Resp.Header().Set("Link", fmt.Sprintf(`; rel="next"`, ctx.Package.Owner.LowerName, image, v.Encode())) + } + + jsonResponse(ctx, http.StatusOK, TagList{ + Name: strings.ToLower(ctx.Package.Owner.LowerName + "/" + image), + Tags: tags, + }) +} diff --git a/routers/api/packages/container/errors.go b/routers/api/packages/container/errors.go new file mode 100644 index 0000000000..0efbb081ca --- /dev/null +++ b/routers/api/packages/container/errors.go @@ -0,0 +1,53 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "net/http" +) + +// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#error-codes +var ( + errBlobUnknown = &namedError{Code: "BLOB_UNKNOWN", StatusCode: http.StatusNotFound} + errBlobUploadInvalid = &namedError{Code: "BLOB_UPLOAD_INVALID", StatusCode: http.StatusBadRequest} + errBlobUploadUnknown = &namedError{Code: "BLOB_UPLOAD_UNKNOWN", StatusCode: http.StatusNotFound} + errDigestInvalid = &namedError{Code: "DIGEST_INVALID", StatusCode: http.StatusBadRequest} + errManifestBlobUnknown = &namedError{Code: "MANIFEST_BLOB_UNKNOWN", StatusCode: http.StatusNotFound} + errManifestInvalid = &namedError{Code: "MANIFEST_INVALID", StatusCode: http.StatusBadRequest} + errManifestUnknown = &namedError{Code: "MANIFEST_UNKNOWN", StatusCode: http.StatusNotFound} + errNameInvalid = &namedError{Code: "NAME_INVALID", StatusCode: http.StatusBadRequest} + errNameUnknown = &namedError{Code: "NAME_UNKNOWN", StatusCode: http.StatusNotFound} + errSizeInvalid = &namedError{Code: "SIZE_INVALID", StatusCode: http.StatusBadRequest} + errUnauthorized = &namedError{Code: "UNAUTHORIZED", StatusCode: http.StatusUnauthorized} + errUnsupported = &namedError{Code: "UNSUPPORTED", StatusCode: http.StatusNotImplemented} +) + +type namedError struct { + Code string + StatusCode int + Message string +} + +func (e *namedError) Error() string { + return e.Message +} + +// WithMessage creates a new instance of the error with a different message +func (e *namedError) WithMessage(message string) *namedError { + return &namedError{ + Code: e.Code, + StatusCode: e.StatusCode, + Message: message, + } +} + +// WithStatusCode creates a new instance of the error with a different status code +func (e *namedError) WithStatusCode(statusCode int) *namedError { + return &namedError{ + Code: e.Code, + StatusCode: statusCode, + Message: e.Message, + } +} diff --git a/routers/api/packages/container/manifest.go b/routers/api/packages/container/manifest.go new file mode 100644 index 0000000000..d899ac8ee2 --- /dev/null +++ b/routers/api/packages/container/manifest.go @@ -0,0 +1,412 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "context" + "fmt" + "io" + "strings" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + container_model "code.gitea.io/gitea/models/packages/container" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + container_module "code.gitea.io/gitea/modules/packages/container" + "code.gitea.io/gitea/modules/packages/container/oci" + packages_service "code.gitea.io/gitea/services/packages" +) + +// manifestCreationInfo describes a manifest to create +type manifestCreationInfo struct { + MediaType oci.MediaType + Owner *user_model.User + Creator *user_model.User + Image string + Reference string + IsTagged bool + Properties map[string]string +} + +func processManifest(mci *manifestCreationInfo, buf *packages_module.HashedBuffer) (string, error) { + var schema oci.SchemaMediaBase + if err := json.NewDecoder(buf).Decode(&schema); err != nil { + return "", err + } + + if schema.SchemaVersion != 2 { + return "", errUnsupported.WithMessage("Schema version is not supported") + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + return "", err + } + + if !mci.MediaType.IsValid() { + mci.MediaType = schema.MediaType + if !mci.MediaType.IsValid() { + return "", errManifestInvalid.WithMessage("MediaType not recognized") + } + } + + if mci.MediaType.IsImageManifest() { + d, err := processImageManifest(mci, buf) + return d, err + } else if mci.MediaType.IsImageIndex() { + d, err := processImageManifestIndex(mci, buf) + return d, err + } + return "", errManifestInvalid +} + +func processImageManifest(mci *manifestCreationInfo, buf *packages_module.HashedBuffer) (string, error) { + manifestDigest := "" + + err := func() error { + var manifest oci.Manifest + if err := json.NewDecoder(buf).Decode(&manifest); err != nil { + return err + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + return err + } + + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + configDescriptor, err := container_model.GetContainerBlob(ctx, &container_model.BlobSearchOptions{ + OwnerID: mci.Owner.ID, + Image: mci.Image, + Digest: string(manifest.Config.Digest), + }) + if err != nil { + return err + } + + configReader, err := packages_module.NewContentStore().Get(packages_module.BlobHash256Key(configDescriptor.Blob.HashSHA256)) + if err != nil { + return err + } + defer configReader.Close() + + metadata, err := container_module.ParseImageConfig(manifest.Config.MediaType, configReader) + if err != nil { + return err + } + + blobReferences := make([]*blobReference, 0, 1+len(manifest.Layers)) + + blobReferences = append(blobReferences, &blobReference{ + Digest: manifest.Config.Digest, + MediaType: manifest.Config.MediaType, + File: configDescriptor, + ExpectedSize: manifest.Config.Size, + }) + + for _, layer := range manifest.Layers { + pfd, err := container_model.GetContainerBlob(ctx, &container_model.BlobSearchOptions{ + OwnerID: mci.Owner.ID, + Image: mci.Image, + Digest: string(layer.Digest), + }) + if err != nil { + return err + } + + blobReferences = append(blobReferences, &blobReference{ + Digest: layer.Digest, + MediaType: layer.MediaType, + File: pfd, + ExpectedSize: layer.Size, + }) + } + + pv, err := createPackageAndVersion(ctx, mci, metadata) + if err != nil { + return err + } + + uploadVersion, err := packages_model.GetInternalVersionByNameAndVersion(ctx, mci.Owner.ID, packages_model.TypeContainer, mci.Image, container_model.UploadVersion) + if err != nil && err != packages_model.ErrPackageNotExist { + return err + } + + for _, ref := range blobReferences { + if err := createFileFromBlobReference(ctx, pv, uploadVersion, ref); err != nil { + return err + } + } + + pb, created, digest, err := createManifestBlob(ctx, mci, pv, buf) + removeBlob := false + defer func() { + if removeBlob { + contentStore := packages_module.NewContentStore() + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob from content store: %v", err) + } + } + }() + if err != nil { + removeBlob = created + return err + } + + if err := committer.Commit(); err != nil { + removeBlob = created + return err + } + + manifestDigest = digest + + return nil + }() + if err != nil { + return "", err + } + + return manifestDigest, nil +} + +func processImageManifestIndex(mci *manifestCreationInfo, buf *packages_module.HashedBuffer) (string, error) { + manifestDigest := "" + + err := func() error { + var index oci.Index + if err := json.NewDecoder(buf).Decode(&index); err != nil { + return err + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + return err + } + + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + metadata := &container_module.Metadata{ + Type: container_module.TypeOCI, + MultiArch: make(map[string]string), + } + + for _, manifest := range index.Manifests { + if !manifest.MediaType.IsImageManifest() { + return errManifestInvalid + } + + platform := container_module.DefaultPlatform + if manifest.Platform != nil { + platform = fmt.Sprintf("%s/%s", manifest.Platform.OS, manifest.Platform.Architecture) + if manifest.Platform.Variant != "" { + platform = fmt.Sprintf("%s/%s", platform, manifest.Platform.Variant) + } + } + + _, err := container_model.GetContainerBlob(ctx, &container_model.BlobSearchOptions{ + OwnerID: mci.Owner.ID, + Image: mci.Image, + Digest: string(manifest.Digest), + IsManifest: true, + }) + if err != nil { + if err == container_model.ErrContainerBlobNotExist { + return errManifestBlobUnknown + } + return err + } + + metadata.MultiArch[platform] = string(manifest.Digest) + } + + pv, err := createPackageAndVersion(ctx, mci, metadata) + if err != nil { + return err + } + + pb, created, digest, err := createManifestBlob(ctx, mci, pv, buf) + removeBlob := false + defer func() { + if removeBlob { + contentStore := packages_module.NewContentStore() + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob from content store: %v", err) + } + } + }() + if err != nil { + removeBlob = created + return err + } + + if err := committer.Commit(); err != nil { + removeBlob = created + return err + } + + manifestDigest = digest + + return nil + }() + if err != nil { + return "", err + } + + return manifestDigest, nil +} + +func createPackageAndVersion(ctx context.Context, mci *manifestCreationInfo, metadata *container_module.Metadata) (*packages_model.PackageVersion, error) { + p := &packages_model.Package{ + OwnerID: mci.Owner.ID, + Type: packages_model.TypeContainer, + Name: strings.ToLower(mci.Image), + LowerName: strings.ToLower(mci.Image), + } + var err error + if p, err = packages_model.TryInsertPackage(ctx, p); err != nil { + if err != packages_model.ErrDuplicatePackage { + log.Error("Error inserting package: %v", err) + return nil, err + } + } + + metadata.IsTagged = mci.IsTagged + + metadataJSON, err := json.Marshal(metadata) + if err != nil { + return nil, err + } + + _pv := &packages_model.PackageVersion{ + PackageID: p.ID, + CreatorID: mci.Creator.ID, + Version: strings.ToLower(mci.Reference), + LowerVersion: strings.ToLower(mci.Reference), + MetadataJSON: string(metadataJSON), + } + var pv *packages_model.PackageVersion + if pv, err = packages_model.GetOrInsertVersion(ctx, _pv); err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { + return nil, err + } + + if pv, err = packages_model.GetOrInsertVersion(ctx, _pv); err != nil { + log.Error("Error inserting package: %v", err) + return nil, err + } + } else { + log.Error("Error inserting package: %v", err) + return nil, err + } + } + + if mci.IsTagged { + if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeVersion, pv.ID, container_module.PropertyManifestTagged, ""); err != nil { + log.Error("Error setting package version property: %v", err) + return nil, err + } + } + for _, digest := range metadata.MultiArch { + if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeVersion, pv.ID, container_module.PropertyManifestReference, digest); err != nil { + log.Error("Error setting package version property: %v", err) + return nil, err + } + } + + return pv, nil +} + +type blobReference struct { + Digest oci.Digest + MediaType oci.MediaType + Name string + File *packages_model.PackageFileDescriptor + ExpectedSize int64 + IsLead bool +} + +func createFileFromBlobReference(ctx context.Context, pv, uploadVersion *packages_model.PackageVersion, ref *blobReference) error { + if ref.File.Blob.Size != ref.ExpectedSize { + return errSizeInvalid + } + + if ref.Name == "" { + ref.Name = strings.ToLower(fmt.Sprintf("sha256_%s", ref.File.Blob.HashSHA256)) + } + + pf := &packages_model.PackageFile{ + VersionID: pv.ID, + BlobID: ref.File.Blob.ID, + Name: ref.Name, + LowerName: ref.Name, + IsLead: ref.IsLead, + } + var err error + if pf, err = packages_model.TryInsertFile(ctx, pf); err != nil { + if err == packages_model.ErrDuplicatePackageFile { + // Skip this blob because the manifest contains the same filesystem layer multiple times. + return nil + } + log.Error("Error inserting package file: %v", err) + return err + } + + props := map[string]string{ + container_module.PropertyMediaType: string(ref.MediaType), + container_module.PropertyDigest: string(ref.Digest), + } + for name, value := range props { + if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeFile, pf.ID, name, value); err != nil { + log.Error("Error setting package file property: %v", err) + return err + } + } + + // Remove the file from the blob upload version + if uploadVersion != nil && ref.File.File != nil && uploadVersion.ID == ref.File.File.VersionID { + if err := packages_service.DeletePackageFile(ctx, ref.File.File); err != nil { + return err + } + } + + return nil +} + +func createManifestBlob(ctx context.Context, mci *manifestCreationInfo, pv *packages_model.PackageVersion, buf *packages_module.HashedBuffer) (*packages_model.PackageBlob, bool, string, error) { + pb, exists, err := packages_model.GetOrInsertBlob(ctx, packages_service.NewPackageBlob(buf)) + if err != nil { + log.Error("Error inserting package blob: %v", err) + return nil, false, "", err + } + if !exists { + contentStore := packages_module.NewContentStore() + if err := contentStore.Save(packages_module.BlobHash256Key(pb.HashSHA256), buf, buf.Size()); err != nil { + log.Error("Error saving package blob in content store: %v", err) + return nil, false, "", err + } + } + + manifestDigest := digestFromHashSummer(buf) + err = createFileFromBlobReference(ctx, pv, nil, &blobReference{ + Digest: oci.Digest(manifestDigest), + MediaType: mci.MediaType, + Name: container_model.ManifestFilename, + File: &packages_model.PackageFileDescriptor{Blob: pb}, + ExpectedSize: pb.Size, + IsLead: true, + }) + + return pb, !exists, manifestDigest, err +} diff --git a/routers/api/packages/generic/generic.go b/routers/api/packages/generic/generic.go new file mode 100644 index 0000000000..d862f77259 --- /dev/null +++ b/routers/api/packages/generic/generic.go @@ -0,0 +1,166 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package generic + +import ( + "errors" + "net/http" + "regexp" + + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" + + "github.com/hashicorp/go-version" +) + +var ( + packageNameRegex = regexp.MustCompile(`\A[A-Za-z0-9\.\_\-\+]+\z`) + filenameRegex = packageNameRegex +) + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + ctx.PlainText(status, message) + }) +} + +// DownloadPackageFile serves the specific generic package. +func DownloadPackageFile(ctx *context.Context) { + packageName, packageVersion, filename, err := sanitizeParameters(ctx) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion( + ctx, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeGeneric, + Name: packageName, + Version: packageVersion, + }, + &packages_service.PackageFileInfo{ + Filename: filename, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackage uploads the specific generic package. +// Duplicated packages get rejected. +func UploadPackage(ctx *context.Context) { + packageName, packageVersion, filename, err := sanitizeParameters(ctx) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + upload, close, err := ctx.UploadStream() + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if close { + defer upload.Close() + } + + buf, err := packages_module.CreateHashedBufferFromReader(upload, 32*1024*1024) + if err != nil { + log.Error("Error creating hashed buffer: %v", err) + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + _, _, err = packages_service.CreatePackageAndAddFile( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeGeneric, + Name: packageName, + Version: packageVersion, + }, + SemverCompatible: true, + Creator: ctx.Doer, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: filename, + }, + Data: buf, + IsLead: true, + }, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} + +// DeletePackage deletes the specific generic package. +func DeletePackage(ctx *context.Context) { + packageName, packageVersion, _, err := sanitizeParameters(ctx) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + err = packages_service.RemovePackageVersionByNameAndVersion( + ctx.Doer, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeGeneric, + Name: packageName, + Version: packageVersion, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusOK) +} + +func sanitizeParameters(ctx *context.Context) (string, string, string, error) { + packageName := ctx.Params("packagename") + filename := ctx.Params("filename") + + if !packageNameRegex.MatchString(packageName) || !filenameRegex.MatchString(filename) { + return "", "", "", errors.New("Invalid package name or filename") + } + + v, err := version.NewSemver(ctx.Params("packageversion")) + if err != nil { + return "", "", "", err + } + + return packageName, v.String(), filename, nil +} diff --git a/routers/api/packages/helm/helm.go b/routers/api/packages/helm/helm.go new file mode 100644 index 0000000000..ae0643a35a --- /dev/null +++ b/routers/api/packages/helm/helm.go @@ -0,0 +1,205 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package helm + +import ( + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + helm_module "code.gitea.io/gitea/modules/packages/helm" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" + + "gopkg.in/yaml.v2" +) + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + type Error struct { + Error string `json:"error"` + } + ctx.JSON(status, Error{ + Error: message, + }) + }) +} + +// Index generates the Helm charts index +func Index(ctx *context.Context) { + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages_model.TypeHelm, + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + baseURL := setting.AppURL + "api/packages/" + url.PathEscape(ctx.Package.Owner.Name) + "/helm" + + type ChartVersion struct { + helm_module.Metadata `yaml:",inline"` + URLs []string `yaml:"urls"` + Created time.Time `yaml:"created,omitempty"` + Removed bool `yaml:"removed,omitempty"` + Digest string `yaml:"digest,omitempty"` + } + + type ServerInfo struct { + ContextPath string `yaml:"contextPath,omitempty"` + } + + type Index struct { + APIVersion string `yaml:"apiVersion"` + Entries map[string][]*ChartVersion `yaml:"entries"` + Generated time.Time `yaml:"generated,omitempty"` + ServerInfo *ServerInfo `yaml:"serverInfo,omitempty"` + } + + entries := make(map[string][]*ChartVersion) + for _, pv := range pvs { + metadata := &helm_module.Metadata{} + if err := json.Unmarshal([]byte(pv.MetadataJSON), &metadata); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + entries[metadata.Name] = append(entries[metadata.Name], &ChartVersion{ + Metadata: *metadata, + Created: pv.CreatedUnix.AsTime(), + URLs: []string{fmt.Sprintf("%s/%s", baseURL, url.PathEscape(createFilename(metadata)))}, + }) + } + + ctx.Resp.WriteHeader(http.StatusOK) + if err := yaml.NewEncoder(ctx.Resp).Encode(&Index{ + APIVersion: "v1", + Entries: entries, + Generated: time.Now(), + ServerInfo: &ServerInfo{ + ContextPath: setting.AppSubURL + "/api/packages/" + url.PathEscape(ctx.Package.Owner.Name) + "/helm", + }, + }); err != nil { + log.Error("YAML encode failed: %v", err) + } +} + +// DownloadPackageFile serves the content of a package +func DownloadPackageFile(ctx *context.Context) { + filename := ctx.Params("filename") + + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages_model.TypeHelm, + Name: packages_model.SearchValue{ + ExactMatch: true, + Value: ctx.Params("package"), + }, + HasFileWithName: filename, + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pvs) != 1 { + apiError(ctx, http.StatusNotFound, nil) + return + } + + s, pf, err := packages_service.GetFileStreamByPackageVersion( + ctx, + pvs[0], + &packages_service.PackageFileInfo{ + Filename: filename, + }, + ) + if err != nil { + if err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackage creates a new package +func UploadPackage(ctx *context.Context) { + upload, needToClose, err := ctx.UploadStream() + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if needToClose { + defer upload.Close() + } + + buf, err := packages_module.CreateHashedBufferFromReader(upload, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + metadata, err := helm_module.ParseChartArchive(buf) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + _, _, err = packages_service.CreatePackageOrAddFileToExisting( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeHelm, + Name: metadata.Name, + Version: metadata.Version, + }, + SemverCompatible: true, + Creator: ctx.Doer, + Metadata: metadata, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: createFilename(metadata), + }, + Data: buf, + IsLead: true, + OverwriteExisting: true, + }, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + apiError(ctx, http.StatusConflict, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} + +func createFilename(metadata *helm_module.Metadata) string { + return strings.ToLower(fmt.Sprintf("%s-%s.tgz", metadata.Name, metadata.Version)) +} diff --git a/routers/api/packages/helper/helper.go b/routers/api/packages/helper/helper.go new file mode 100644 index 0000000000..8cde84023f --- /dev/null +++ b/routers/api/packages/helper/helper.go @@ -0,0 +1,38 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package helper + +import ( + "fmt" + "net/http" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" +) + +// LogAndProcessError logs an error and calls a custom callback with the processed error message. +// If the error is an InternalServerError the message is stripped if the user is not an admin. +func LogAndProcessError(ctx *context.Context, status int, obj interface{}, cb func(string)) { + var message string + if err, ok := obj.(error); ok { + message = err.Error() + } else if obj != nil { + message = fmt.Sprintf("%s", obj) + } + if status == http.StatusInternalServerError { + log.ErrorWithSkip(1, message) + + if setting.IsProd && (ctx.Doer == nil || !ctx.Doer.IsAdmin) { + message = "" + } + } else { + log.Debug(message) + } + + if cb != nil { + cb(message) + } +} diff --git a/routers/api/packages/maven/api.go b/routers/api/packages/maven/api.go new file mode 100644 index 0000000000..b60a317814 --- /dev/null +++ b/routers/api/packages/maven/api.go @@ -0,0 +1,56 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package maven + +import ( + "encoding/xml" + "sort" + "strings" + + packages_model "code.gitea.io/gitea/models/packages" + maven_module "code.gitea.io/gitea/modules/packages/maven" +) + +// MetadataResponse https://maven.apache.org/ref/3.2.5/maven-repository-metadata/repository-metadata.html +type MetadataResponse struct { + XMLName xml.Name `xml:"metadata"` + GroupID string `xml:"groupId"` + ArtifactID string `xml:"artifactId"` + Release string `xml:"versioning>release,omitempty"` + Latest string `xml:"versioning>latest"` + Version []string `xml:"versioning>versions>version"` +} + +func createMetadataResponse(pds []*packages_model.PackageDescriptor) *MetadataResponse { + sort.Slice(pds, func(i, j int) bool { + // Maven and Gradle order packages by their creation timestamp and not by their version string + return pds[i].Version.CreatedUnix < pds[j].Version.CreatedUnix + }) + + var release *packages_model.PackageDescriptor + + versions := make([]string, 0, len(pds)) + for _, pd := range pds { + if !strings.HasSuffix(pd.Version.Version, "-SNAPSHOT") { + release = pd + } + versions = append(versions, pd.Version.Version) + } + + latest := pds[len(pds)-1] + + metadata := latest.Metadata.(*maven_module.Metadata) + + resp := &MetadataResponse{ + GroupID: metadata.GroupID, + ArtifactID: metadata.ArtifactID, + Latest: latest.Version.Version, + Version: versions, + } + if release != nil { + resp.Release = release.Version.Version + } + return resp +} diff --git a/routers/api/packages/maven/maven.go b/routers/api/packages/maven/maven.go new file mode 100644 index 0000000000..bba4babf04 --- /dev/null +++ b/routers/api/packages/maven/maven.go @@ -0,0 +1,378 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package maven + +import ( + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/xml" + "errors" + "fmt" + "io" + "net/http" + "path/filepath" + "regexp" + "strings" + + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + maven_module "code.gitea.io/gitea/modules/packages/maven" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" +) + +const ( + mavenMetadataFile = "maven-metadata.xml" + extensionMD5 = ".md5" + extensionSHA1 = ".sha1" + extensionSHA256 = ".sha256" + extensionSHA512 = ".sha512" +) + +var ( + errInvalidParameters = errors.New("request parameters are invalid") + illegalCharacters = regexp.MustCompile(`[\\/:"<>|?\*]`) +) + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + ctx.PlainText(status, message) + }) +} + +// DownloadPackageFile serves the content of a package +func DownloadPackageFile(ctx *context.Context) { + params, err := extractPathParameters(ctx) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + if params.IsMeta && params.Version == "" { + serveMavenMetadata(ctx, params) + } else { + servePackageFile(ctx, params) + } +} + +func serveMavenMetadata(ctx *context.Context, params parameters) { + // /com/foo/project/maven-metadata.xml[.md5/.sha1/.sha256/.sha512] + + packageName := params.GroupID + "-" + params.ArtifactID + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeMaven, packageName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pvs) == 0 { + apiError(ctx, http.StatusNotFound, packages_model.ErrPackageNotExist) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + xmlMetadata, err := xml.Marshal(createMetadataResponse(pds)) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + xmlMetadataWithHeader := append([]byte(xml.Header), xmlMetadata...) + + ext := strings.ToLower(filepath.Ext(params.Filename)) + if isChecksumExtension(ext) { + var hash []byte + switch ext { + case extensionMD5: + tmp := md5.Sum(xmlMetadataWithHeader) + hash = tmp[:] + case extensionSHA1: + tmp := sha1.Sum(xmlMetadataWithHeader) + hash = tmp[:] + case extensionSHA256: + tmp := sha256.Sum256(xmlMetadataWithHeader) + hash = tmp[:] + case extensionSHA512: + tmp := sha512.Sum512(xmlMetadataWithHeader) + hash = tmp[:] + } + ctx.PlainText(http.StatusOK, fmt.Sprintf("%x", hash)) + return + } + + ctx.PlainTextBytes(http.StatusOK, xmlMetadataWithHeader) +} + +func servePackageFile(ctx *context.Context, params parameters) { + packageName := params.GroupID + "-" + params.ArtifactID + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeMaven, packageName, params.Version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + filename := params.Filename + + ext := strings.ToLower(filepath.Ext(filename)) + if isChecksumExtension(ext) { + filename = filename[:len(filename)-len(ext)] + } + + pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, filename, packages_model.EmptyFileKey) + if err != nil { + if err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + pb, err := packages_model.GetBlobByID(ctx, pf.BlobID) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if isChecksumExtension(ext) { + var hash string + switch ext { + case extensionMD5: + hash = pb.HashMD5 + case extensionSHA1: + hash = pb.HashSHA1 + case extensionSHA256: + hash = pb.HashSHA256 + case extensionSHA512: + hash = pb.HashSHA512 + } + ctx.PlainText(http.StatusOK, hash) + return + } + + s, err := packages_module.NewContentStore().Get(packages_module.BlobHash256Key(pb.HashSHA256)) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + } + defer s.Close() + + if pf.IsLead { + if err := packages_model.IncrementDownloadCounter(ctx, pv.ID); err != nil { + log.Error("Error incrementing download counter: %v", err) + } + } + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackageFile adds a file to the package. If the package does not exist, it gets created. +func UploadPackageFile(ctx *context.Context) { + params, err := extractPathParameters(ctx) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + log.Trace("Parameters: %+v", params) + + // Ignore the package index //maven-metadata.xml + if params.IsMeta && params.Version == "" { + ctx.Status(http.StatusOK) + return + } + + packageName := params.GroupID + "-" + params.ArtifactID + + buf, err := packages_module.CreateHashedBufferFromReader(ctx.Req.Body, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + pvci := &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeMaven, + Name: packageName, + Version: params.Version, + }, + SemverCompatible: false, + Creator: ctx.Doer, + } + + ext := filepath.Ext(params.Filename) + + // Do not upload checksum files but compare the hashes. + if isChecksumExtension(ext) { + pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvci.Owner.ID, pvci.PackageType, pvci.Name, pvci.Version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, params.Filename[:len(params.Filename)-len(ext)], packages_model.EmptyFileKey) + if err != nil { + if err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + pb, err := packages_model.GetBlobByID(ctx, pf.BlobID) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + hash, err := io.ReadAll(buf) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if (ext == extensionMD5 && pb.HashMD5 != string(hash)) || + (ext == extensionSHA1 && pb.HashSHA1 != string(hash)) || + (ext == extensionSHA256 && pb.HashSHA256 != string(hash)) || + (ext == extensionSHA512 && pb.HashSHA512 != string(hash)) { + apiError(ctx, http.StatusBadRequest, "hash mismatch") + return + } + + ctx.Status(http.StatusOK) + return + } + + pfci := &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: params.Filename, + }, + Data: buf, + IsLead: false, + } + + // If it's the package pom file extract the metadata + if ext == ".pom" { + pfci.IsLead = true + + var err error + pvci.Metadata, err = maven_module.ParsePackageMetaData(buf) + if err != nil { + log.Error("Error parsing package metadata: %v", err) + } + + if pvci.Metadata != nil { + pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvci.Owner.ID, pvci.PackageType, pvci.Name, pvci.Version) + if err != nil && err != packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if pv != nil { + raw, err := json.Marshal(pvci.Metadata) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + pv.MetadataJSON = string(raw) + if err := packages_model.UpdateVersion(ctx, pv); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + } + + _, _, err = packages_service.CreatePackageOrAddFileToExisting( + pvci, + pfci, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageFile { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} + +func isChecksumExtension(ext string) bool { + return ext == extensionMD5 || ext == extensionSHA1 || ext == extensionSHA256 || ext == extensionSHA512 +} + +type parameters struct { + GroupID string + ArtifactID string + Version string + Filename string + IsMeta bool +} + +func extractPathParameters(ctx *context.Context) (parameters, error) { + parts := strings.Split(ctx.Params("*"), "/") + + p := parameters{ + Filename: parts[len(parts)-1], + } + + p.IsMeta = p.Filename == mavenMetadataFile || + p.Filename == mavenMetadataFile+extensionMD5 || + p.Filename == mavenMetadataFile+extensionSHA1 || + p.Filename == mavenMetadataFile+extensionSHA256 || + p.Filename == mavenMetadataFile+extensionSHA512 + + parts = parts[:len(parts)-1] + if len(parts) == 0 { + return p, errInvalidParameters + } + + p.Version = parts[len(parts)-1] + if p.IsMeta && !strings.HasSuffix(p.Version, "-SNAPSHOT") { + p.Version = "" + } else { + parts = parts[:len(parts)-1] + } + + if illegalCharacters.MatchString(p.Version) { + return p, errInvalidParameters + } + + if len(parts) < 2 { + return p, errInvalidParameters + } + + p.ArtifactID = parts[len(parts)-1] + p.GroupID = strings.Join(parts[:len(parts)-1], ".") + + if illegalCharacters.MatchString(p.GroupID) || illegalCharacters.MatchString(p.ArtifactID) { + return p, errInvalidParameters + } + + return p, nil +} diff --git a/routers/api/packages/npm/api.go b/routers/api/packages/npm/api.go new file mode 100644 index 0000000000..56c8977043 --- /dev/null +++ b/routers/api/packages/npm/api.go @@ -0,0 +1,73 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package npm + +import ( + "encoding/base64" + "encoding/hex" + "fmt" + "net/url" + "sort" + + packages_model "code.gitea.io/gitea/models/packages" + npm_module "code.gitea.io/gitea/modules/packages/npm" +) + +func createPackageMetadataResponse(registryURL string, pds []*packages_model.PackageDescriptor) *npm_module.PackageMetadata { + sort.Slice(pds, func(i, j int) bool { + return pds[i].SemVer.LessThan(pds[j].SemVer) + }) + + versions := make(map[string]*npm_module.PackageMetadataVersion) + distTags := make(map[string]string) + for _, pd := range pds { + versions[pd.SemVer.String()] = createPackageMetadataVersion(registryURL, pd) + + for _, pvp := range pd.Properties { + if pvp.Name == npm_module.TagProperty { + distTags[pvp.Value] = pd.Version.Version + } + } + } + + latest := pds[len(pds)-1] + + metadata := latest.Metadata.(*npm_module.Metadata) + + return &npm_module.PackageMetadata{ + ID: latest.Package.Name, + Name: latest.Package.Name, + DistTags: distTags, + Description: metadata.Description, + Readme: metadata.Readme, + Homepage: metadata.ProjectURL, + Author: npm_module.User{Name: metadata.Author}, + License: metadata.License, + Versions: versions, + } +} + +func createPackageMetadataVersion(registryURL string, pd *packages_model.PackageDescriptor) *npm_module.PackageMetadataVersion { + hashBytes, _ := hex.DecodeString(pd.Files[0].Blob.HashSHA512) + + metadata := pd.Metadata.(*npm_module.Metadata) + + return &npm_module.PackageMetadataVersion{ + ID: fmt.Sprintf("%s@%s", pd.Package.Name, pd.Version.Version), + Name: pd.Package.Name, + Version: pd.Version.Version, + Description: metadata.Description, + Author: npm_module.User{Name: metadata.Author}, + Homepage: metadata.ProjectURL, + License: metadata.License, + Dependencies: metadata.Dependencies, + Readme: metadata.Readme, + Dist: npm_module.PackageDistribution{ + Shasum: pd.Files[0].Blob.HashSHA1, + Integrity: "sha512-" + base64.StdEncoding.EncodeToString(hashBytes), + Tarball: fmt.Sprintf("%s/%s/-/%s/%s", registryURL, url.QueryEscape(pd.Package.Name), url.PathEscape(pd.Version.Version), url.PathEscape(pd.Files[0].File.LowerName)), + }, + } +} diff --git a/routers/api/packages/npm/npm.go b/routers/api/packages/npm/npm.go new file mode 100644 index 0000000000..d127134d44 --- /dev/null +++ b/routers/api/packages/npm/npm.go @@ -0,0 +1,293 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package npm + +import ( + "bytes" + "errors" + "fmt" + "io" + "net/http" + "strings" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + packages_module "code.gitea.io/gitea/modules/packages" + npm_module "code.gitea.io/gitea/modules/packages/npm" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" + + "github.com/hashicorp/go-version" +) + +// errInvalidTagName indicates an invalid tag name +var errInvalidTagName = errors.New("The tag name is invalid") + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + ctx.JSON(status, map[string]string{ + "error": message, + }) + }) +} + +// packageNameFromParams gets the package name from the url parameters +// Variations: /name/, /@scope/name/, /@scope%2Fname/ +func packageNameFromParams(ctx *context.Context) string { + scope := ctx.Params("scope") + id := ctx.Params("id") + if scope != "" { + return fmt.Sprintf("@%s/%s", scope, id) + } + return id +} + +// PackageMetadata returns the metadata for a single package +func PackageMetadata(ctx *context.Context) { + packageName := packageNameFromParams(ctx) + + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeNpm, packageName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pvs) == 0 { + apiError(ctx, http.StatusNotFound, err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + resp := createPackageMetadataResponse( + setting.AppURL+"api/packages/"+ctx.Package.Owner.Name+"/npm", + pds, + ) + + ctx.JSON(http.StatusOK, resp) +} + +// DownloadPackageFile serves the content of a package +func DownloadPackageFile(ctx *context.Context) { + packageName := packageNameFromParams(ctx) + packageVersion := ctx.Params("version") + filename := ctx.Params("filename") + + s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion( + ctx, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeNpm, + Name: packageName, + Version: packageVersion, + }, + &packages_service.PackageFileInfo{ + Filename: filename, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackage creates a new package +func UploadPackage(ctx *context.Context) { + npmPackage, err := npm_module.ParsePackage(ctx.Req.Body) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + buf, err := packages_module.CreateHashedBufferFromReader(bytes.NewReader(npmPackage.Data), 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + pv, _, err := packages_service.CreatePackageAndAddFile( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeNpm, + Name: npmPackage.Name, + Version: npmPackage.Version, + }, + SemverCompatible: true, + Creator: ctx.Doer, + Metadata: npmPackage.Metadata, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: npmPackage.Filename, + }, + Data: buf, + IsLead: true, + }, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + for _, tag := range npmPackage.DistTags { + if err := setPackageTag(tag, pv, false); err != nil { + if err == errInvalidTagName { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + } + + ctx.Status(http.StatusCreated) +} + +// ListPackageTags returns all tags for a package +func ListPackageTags(ctx *context.Context) { + packageName := packageNameFromParams(ctx) + + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeNpm, packageName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + tags := make(map[string]string) + for _, pv := range pvs { + pvps, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeVersion, pv.ID, npm_module.TagProperty) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + for _, pvp := range pvps { + tags[pvp.Value] = pv.Version + } + } + + ctx.JSON(http.StatusOK, tags) +} + +// AddPackageTag adds a tag to the package +func AddPackageTag(ctx *context.Context) { + packageName := packageNameFromParams(ctx) + + body, err := io.ReadAll(ctx.Req.Body) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + version := strings.Trim(string(body), "\"") // is as "version" in the body + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeNpm, packageName, version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if err := setPackageTag(ctx.Params("tag"), pv, false); err != nil { + if err == errInvalidTagName { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } +} + +// DeletePackageTag deletes a package tag +func DeletePackageTag(ctx *context.Context) { + packageName := packageNameFromParams(ctx) + + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeNpm, packageName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if len(pvs) != 0 { + if err := setPackageTag(ctx.Params("tag"), pvs[0], true); err != nil { + if err == errInvalidTagName { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + } +} + +func setPackageTag(tag string, pv *packages_model.PackageVersion, deleteOnly bool) error { + if tag == "" { + return errInvalidTagName + } + _, err := version.NewVersion(tag) + if err == nil { + return errInvalidTagName + } + + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: pv.PackageID, + Properties: map[string]string{ + npm_module.TagProperty: tag, + }, + }) + if err != nil { + return err + } + + if len(pvs) == 1 { + pvps, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeVersion, pvs[0].ID, npm_module.TagProperty) + if err != nil { + return err + } + + for _, pvp := range pvps { + if pvp.Value == tag { + if err := packages_model.DeletePropertyByID(ctx, pvp.ID); err != nil { + return err + } + break + } + } + } + + if !deleteOnly { + _, err = packages_model.InsertProperty(ctx, packages_model.PropertyTypeVersion, pv.ID, npm_module.TagProperty, tag) + if err != nil { + return err + } + } + + return committer.Commit() +} diff --git a/routers/api/packages/nuget/api.go b/routers/api/packages/nuget/api.go new file mode 100644 index 0000000000..b449cfc5bb --- /dev/null +++ b/routers/api/packages/nuget/api.go @@ -0,0 +1,287 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nuget + +import ( + "bytes" + "fmt" + "sort" + "time" + + packages_model "code.gitea.io/gitea/models/packages" + nuget_module "code.gitea.io/gitea/modules/packages/nuget" + + "github.com/hashicorp/go-version" +) + +// ServiceIndexResponse https://docs.microsoft.com/en-us/nuget/api/service-index#resources +type ServiceIndexResponse struct { + Version string `json:"version"` + Resources []ServiceResource `json:"resources"` +} + +// ServiceResource https://docs.microsoft.com/en-us/nuget/api/service-index#resource +type ServiceResource struct { + ID string `json:"@id"` + Type string `json:"@type"` +} + +func createServiceIndexResponse(root string) *ServiceIndexResponse { + return &ServiceIndexResponse{ + Version: "3.0.0", + Resources: []ServiceResource{ + {ID: root + "/query", Type: "SearchQueryService"}, + {ID: root + "/query", Type: "SearchQueryService/3.0.0-beta"}, + {ID: root + "/query", Type: "SearchQueryService/3.0.0-rc"}, + {ID: root + "/registration", Type: "RegistrationsBaseUrl"}, + {ID: root + "/registration", Type: "RegistrationsBaseUrl/3.0.0-beta"}, + {ID: root + "/registration", Type: "RegistrationsBaseUrl/3.0.0-rc"}, + {ID: root + "/package", Type: "PackageBaseAddress/3.0.0"}, + {ID: root, Type: "PackagePublish/2.0.0"}, + {ID: root + "/symbolpackage", Type: "SymbolPackagePublish/4.9.0"}, + }, + } +} + +// RegistrationIndexResponse https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#response +type RegistrationIndexResponse struct { + RegistrationIndexURL string `json:"@id"` + Type []string `json:"@type"` + Count int `json:"count"` + Pages []*RegistrationIndexPage `json:"items"` +} + +// RegistrationIndexPage https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#registration-page-object +type RegistrationIndexPage struct { + RegistrationPageURL string `json:"@id"` + Lower string `json:"lower"` + Upper string `json:"upper"` + Count int `json:"count"` + Items []*RegistrationIndexPageItem `json:"items"` +} + +// RegistrationIndexPageItem https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#registration-leaf-object-in-a-page +type RegistrationIndexPageItem struct { + RegistrationLeafURL string `json:"@id"` + PackageContentURL string `json:"packageContent"` + CatalogEntry *CatalogEntry `json:"catalogEntry"` +} + +// CatalogEntry https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#catalog-entry +type CatalogEntry struct { + CatalogLeafURL string `json:"@id"` + PackageContentURL string `json:"packageContent"` + ID string `json:"id"` + Version string `json:"version"` + Description string `json:"description"` + ReleaseNotes string `json:"releaseNotes"` + Authors string `json:"authors"` + RequireLicenseAcceptance bool `json:"requireLicenseAcceptance"` + ProjectURL string `json:"projectURL"` + DependencyGroups []*PackageDependencyGroup `json:"dependencyGroups"` +} + +// PackageDependencyGroup https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#package-dependency-group +type PackageDependencyGroup struct { + TargetFramework string `json:"targetFramework"` + Dependencies []*PackageDependency `json:"dependencies"` +} + +// PackageDependency https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#package-dependency +type PackageDependency struct { + ID string `json:"id"` + Range string `json:"range"` +} + +func createRegistrationIndexResponse(l *linkBuilder, pds []*packages_model.PackageDescriptor) *RegistrationIndexResponse { + sort.Slice(pds, func(i, j int) bool { + return pds[i].SemVer.LessThan(pds[j].SemVer) + }) + + items := make([]*RegistrationIndexPageItem, 0, len(pds)) + for _, p := range pds { + items = append(items, createRegistrationIndexPageItem(l, p)) + } + + return &RegistrationIndexResponse{ + RegistrationIndexURL: l.GetRegistrationIndexURL(pds[0].Package.Name), + Type: []string{"catalog:CatalogRoot", "PackageRegistration", "catalog:Permalink"}, + Count: 1, + Pages: []*RegistrationIndexPage{ + { + RegistrationPageURL: l.GetRegistrationIndexURL(pds[0].Package.Name), + Count: len(pds), + Lower: normalizeVersion(pds[0].SemVer), + Upper: normalizeVersion(pds[len(pds)-1].SemVer), + Items: items, + }, + }, + } +} + +func createRegistrationIndexPageItem(l *linkBuilder, pd *packages_model.PackageDescriptor) *RegistrationIndexPageItem { + metadata := pd.Metadata.(*nuget_module.Metadata) + + return &RegistrationIndexPageItem{ + RegistrationLeafURL: l.GetRegistrationLeafURL(pd.Package.Name, pd.Version.Version), + PackageContentURL: l.GetPackageDownloadURL(pd.Package.Name, pd.Version.Version), + CatalogEntry: &CatalogEntry{ + CatalogLeafURL: l.GetRegistrationLeafURL(pd.Package.Name, pd.Version.Version), + PackageContentURL: l.GetPackageDownloadURL(pd.Package.Name, pd.Version.Version), + ID: pd.Package.Name, + Version: pd.Version.Version, + Description: metadata.Description, + ReleaseNotes: metadata.ReleaseNotes, + Authors: metadata.Authors, + ProjectURL: metadata.ProjectURL, + DependencyGroups: createDependencyGroups(pd), + }, + } +} + +func createDependencyGroups(pd *packages_model.PackageDescriptor) []*PackageDependencyGroup { + metadata := pd.Metadata.(*nuget_module.Metadata) + + dependencyGroups := make([]*PackageDependencyGroup, 0, len(metadata.Dependencies)) + for k, v := range metadata.Dependencies { + dependencies := make([]*PackageDependency, 0, len(v)) + for _, dep := range v { + dependencies = append(dependencies, &PackageDependency{ + ID: dep.ID, + Range: dep.Version, + }) + } + + dependencyGroups = append(dependencyGroups, &PackageDependencyGroup{ + TargetFramework: k, + Dependencies: dependencies, + }) + } + return dependencyGroups +} + +// RegistrationLeafResponse https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#registration-leaf +type RegistrationLeafResponse struct { + RegistrationLeafURL string `json:"@id"` + Type []string `json:"@type"` + Listed bool `json:"listed"` + PackageContentURL string `json:"packageContent"` + Published time.Time `json:"published"` + RegistrationIndexURL string `json:"registration"` +} + +func createRegistrationLeafResponse(l *linkBuilder, pd *packages_model.PackageDescriptor) *RegistrationLeafResponse { + return &RegistrationLeafResponse{ + Type: []string{"Package", "http://schema.nuget.org/catalog#Permalink"}, + Listed: true, + Published: time.Unix(int64(pd.Version.CreatedUnix), 0), + RegistrationLeafURL: l.GetRegistrationLeafURL(pd.Package.Name, pd.Version.Version), + PackageContentURL: l.GetPackageDownloadURL(pd.Package.Name, pd.Version.Version), + RegistrationIndexURL: l.GetRegistrationIndexURL(pd.Package.Name), + } +} + +// PackageVersionsResponse https://docs.microsoft.com/en-us/nuget/api/package-base-address-resource#response +type PackageVersionsResponse struct { + Versions []string `json:"versions"` +} + +func createPackageVersionsResponse(pds []*packages_model.PackageDescriptor) *PackageVersionsResponse { + versions := make([]string, 0, len(pds)) + for _, pd := range pds { + versions = append(versions, normalizeVersion(pd.SemVer)) + } + + return &PackageVersionsResponse{ + Versions: versions, + } +} + +// SearchResultResponse https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource#response +type SearchResultResponse struct { + TotalHits int64 `json:"totalHits"` + Data []*SearchResult `json:"data"` +} + +// SearchResult https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource#search-result +type SearchResult struct { + ID string `json:"id"` + Version string `json:"version"` + Versions []*SearchResultVersion `json:"versions"` + Description string `json:"description"` + Authors string `json:"authors"` + ProjectURL string `json:"projectURL"` + RegistrationIndexURL string `json:"registration"` +} + +// SearchResultVersion https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource#search-result +type SearchResultVersion struct { + RegistrationLeafURL string `json:"@id"` + Version string `json:"version"` + Downloads int64 `json:"downloads"` +} + +func createSearchResultResponse(l *linkBuilder, totalHits int64, pds []*packages_model.PackageDescriptor) *SearchResultResponse { + data := make([]*SearchResult, 0, len(pds)) + + if len(pds) > 0 { + groupID := pds[0].Package.Name + group := make([]*packages_model.PackageDescriptor, 0, 10) + + for i := 0; i < len(pds); i++ { + if groupID != pds[i].Package.Name { + data = append(data, createSearchResult(l, group)) + groupID = pds[i].Package.Name + group = group[:0] + } + group = append(group, pds[i]) + } + data = append(data, createSearchResult(l, group)) + } + + return &SearchResultResponse{ + TotalHits: totalHits, + Data: data, + } +} + +func createSearchResult(l *linkBuilder, pds []*packages_model.PackageDescriptor) *SearchResult { + latest := pds[0] + versions := make([]*SearchResultVersion, 0, len(pds)) + for _, pd := range pds { + if latest.SemVer.LessThan(pd.SemVer) { + latest = pd + } + + versions = append(versions, &SearchResultVersion{ + RegistrationLeafURL: l.GetRegistrationLeafURL(pd.Package.Name, pd.Version.Version), + Version: pd.Version.Version, + }) + } + + metadata := latest.Metadata.(*nuget_module.Metadata) + + return &SearchResult{ + ID: latest.Package.Name, + Version: latest.Version.Version, + Versions: versions, + Description: metadata.Description, + Authors: metadata.Authors, + ProjectURL: metadata.ProjectURL, + RegistrationIndexURL: l.GetRegistrationIndexURL(latest.Package.Name), + } +} + +// normalizeVersion removes the metadata +func normalizeVersion(v *version.Version) string { + var buf bytes.Buffer + segments := v.Segments64() + fmt.Fprintf(&buf, "%d.%d.%d", segments[0], segments[1], segments[2]) + pre := v.Prerelease() + if pre != "" { + fmt.Fprintf(&buf, "-%s", pre) + } + return buf.String() +} diff --git a/routers/api/packages/nuget/links.go b/routers/api/packages/nuget/links.go new file mode 100644 index 0000000000..f782c7f2cb --- /dev/null +++ b/routers/api/packages/nuget/links.go @@ -0,0 +1,28 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nuget + +import ( + "fmt" +) + +type linkBuilder struct { + Base string +} + +// GetRegistrationIndexURL builds the registration index url +func (l *linkBuilder) GetRegistrationIndexURL(id string) string { + return fmt.Sprintf("%s/registration/%s/index.json", l.Base, id) +} + +// GetRegistrationLeafURL builds the registration leaf url +func (l *linkBuilder) GetRegistrationLeafURL(id, version string) string { + return fmt.Sprintf("%s/registration/%s/%s.json", l.Base, id, version) +} + +// GetPackageDownloadURL builds the download url +func (l *linkBuilder) GetPackageDownloadURL(id, version string) string { + return fmt.Sprintf("%s/package/%s/%s/%s.%s.nupkg", l.Base, id, version, id, version) +} diff --git a/routers/api/packages/nuget/nuget.go b/routers/api/packages/nuget/nuget.go new file mode 100644 index 0000000000..013c0c1e33 --- /dev/null +++ b/routers/api/packages/nuget/nuget.go @@ -0,0 +1,415 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package nuget + +import ( + "errors" + "fmt" + "io" + "net/http" + "strings" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + packages_module "code.gitea.io/gitea/modules/packages" + nuget_module "code.gitea.io/gitea/modules/packages/nuget" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" +) + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + ctx.JSON(status, map[string]string{ + "Message": message, + }) + }) +} + +// ServiceIndex https://docs.microsoft.com/en-us/nuget/api/service-index +func ServiceIndex(ctx *context.Context) { + resp := createServiceIndexResponse(setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/nuget") + + ctx.JSON(http.StatusOK, resp) +} + +// SearchService https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource#search-for-packages +func SearchService(ctx *context.Context) { + pvs, count, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages_model.TypeNuGet, + Name: packages_model.SearchValue{Value: ctx.FormTrim("q")}, + Paginator: db.NewAbsoluteListOptions( + ctx.FormInt("skip"), + ctx.FormInt("take"), + ), + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + resp := createSearchResultResponse( + &linkBuilder{setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/nuget"}, + count, + pds, + ) + + ctx.JSON(http.StatusOK, resp) +} + +// RegistrationIndex https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#registration-index +func RegistrationIndex(ctx *context.Context) { + packageName := ctx.Params("id") + + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeNuGet, packageName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pvs) == 0 { + apiError(ctx, http.StatusNotFound, err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + resp := createRegistrationIndexResponse( + &linkBuilder{setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/nuget"}, + pds, + ) + + ctx.JSON(http.StatusOK, resp) +} + +// RegistrationLeaf https://docs.microsoft.com/en-us/nuget/api/registration-base-url-resource#registration-leaf +func RegistrationLeaf(ctx *context.Context) { + packageName := ctx.Params("id") + packageVersion := strings.TrimSuffix(ctx.Params("version"), ".json") + + pv, err := packages_model.GetVersionByNameAndVersion(db.DefaultContext, ctx.Package.Owner.ID, packages_model.TypeNuGet, packageName, packageVersion) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + pd, err := packages_model.GetPackageDescriptor(ctx, pv) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + resp := createRegistrationLeafResponse( + &linkBuilder{setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/nuget"}, + pd, + ) + + ctx.JSON(http.StatusOK, resp) +} + +// EnumeratePackageVersions https://docs.microsoft.com/en-us/nuget/api/package-base-address-resource#enumerate-package-versions +func EnumeratePackageVersions(ctx *context.Context) { + packageName := ctx.Params("id") + + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeNuGet, packageName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pvs) == 0 { + apiError(ctx, http.StatusNotFound, err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + resp := createPackageVersionsResponse(pds) + + ctx.JSON(http.StatusOK, resp) +} + +// DownloadPackageFile https://docs.microsoft.com/en-us/nuget/api/package-base-address-resource#download-package-content-nupkg +func DownloadPackageFile(ctx *context.Context) { + packageName := ctx.Params("id") + packageVersion := ctx.Params("version") + filename := ctx.Params("filename") + + s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion( + ctx, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeNuGet, + Name: packageName, + Version: packageVersion, + }, + &packages_service.PackageFileInfo{ + Filename: filename, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackage creates a new package with the metadata contained in the uploaded nupgk file +// https://docs.microsoft.com/en-us/nuget/api/package-publish-resource#push-a-package +func UploadPackage(ctx *context.Context) { + np, buf, closables := processUploadedFile(ctx, nuget_module.DependencyPackage) + defer func() { + for _, c := range closables { + c.Close() + } + }() + if np == nil { + return + } + + _, _, err := packages_service.CreatePackageAndAddFile( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeNuGet, + Name: np.ID, + Version: np.Version, + }, + SemverCompatible: true, + Creator: ctx.Doer, + Metadata: np.Metadata, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: strings.ToLower(fmt.Sprintf("%s.%s.nupkg", np.ID, np.Version)), + }, + Data: buf, + IsLead: true, + }, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} + +// UploadSymbolPackage adds a symbol package to an existing package +// https://docs.microsoft.com/en-us/nuget/api/symbol-package-publish-resource +func UploadSymbolPackage(ctx *context.Context) { + np, buf, closables := processUploadedFile(ctx, nuget_module.SymbolsPackage) + defer func() { + for _, c := range closables { + c.Close() + } + }() + if np == nil { + return + } + + pdbs, err := nuget_module.ExtractPortablePdb(buf, buf.Size()) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + defer pdbs.Close() + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + pi := &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeNuGet, + Name: np.ID, + Version: np.Version, + } + + _, _, err = packages_service.AddFileToExistingPackage( + pi, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: strings.ToLower(fmt.Sprintf("%s.%s.snupkg", np.ID, np.Version)), + }, + Data: buf, + IsLead: false, + }, + ) + if err != nil { + switch err { + case packages_model.ErrPackageNotExist: + apiError(ctx, http.StatusNotFound, err) + case packages_model.ErrDuplicatePackageFile: + apiError(ctx, http.StatusBadRequest, err) + default: + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + for _, pdb := range pdbs { + _, _, err := packages_service.AddFileToExistingPackage( + pi, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: strings.ToLower(pdb.Name), + CompositeKey: strings.ToLower(pdb.ID), + }, + Data: pdb.Content, + IsLead: false, + Properties: map[string]string{ + nuget_module.PropertySymbolID: strings.ToLower(pdb.ID), + }, + }, + ) + if err != nil { + switch err { + case packages_model.ErrDuplicatePackageFile: + apiError(ctx, http.StatusBadRequest, err) + default: + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + } + + ctx.Status(http.StatusCreated) +} + +func processUploadedFile(ctx *context.Context, expectedType nuget_module.PackageType) (*nuget_module.Package, *packages_module.HashedBuffer, []io.Closer) { + closables := make([]io.Closer, 0, 2) + + upload, close, err := ctx.UploadStream() + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return nil, nil, closables + } + + if close { + closables = append(closables, upload) + } + + buf, err := packages_module.CreateHashedBufferFromReader(upload, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return nil, nil, closables + } + closables = append(closables, buf) + + np, err := nuget_module.ParsePackageMetaData(buf, buf.Size()) + if err != nil { + if err == nuget_module.ErrMissingNuspecFile || err == nuget_module.ErrNuspecFileTooLarge || err == nuget_module.ErrNuspecInvalidID || err == nuget_module.ErrNuspecInvalidVersion { + apiError(ctx, http.StatusBadRequest, err) + } else { + apiError(ctx, http.StatusInternalServerError, err) + } + return nil, nil, closables + } + if np.PackageType != expectedType { + apiError(ctx, http.StatusBadRequest, errors.New("unexpected package type")) + return nil, nil, closables + } + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return nil, nil, closables + } + return np, buf, closables +} + +// DownloadSymbolFile https://github.com/dotnet/symstore/blob/main/docs/specs/Simple_Symbol_Query_Protocol.md#request +func DownloadSymbolFile(ctx *context.Context) { + filename := ctx.Params("filename") + guid := ctx.Params("guid") + filename2 := ctx.Params("filename2") + + if filename != filename2 { + apiError(ctx, http.StatusBadRequest, nil) + return + } + + pfs, _, err := packages_model.SearchFiles(ctx, &packages_model.PackageFileSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + PackageType: string(packages_model.TypeNuGet), + Query: filename, + Properties: map[string]string{ + nuget_module.PropertySymbolID: strings.ToLower(guid), + }, + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pfs) != 1 { + apiError(ctx, http.StatusNotFound, nil) + return + } + + s, _, err := packages_service.GetPackageFileStream(ctx, pfs[0]) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pfs[0].Name) +} + +// DeletePackage hard deletes the package +// https://docs.microsoft.com/en-us/nuget/api/package-publish-resource#delete-a-package +func DeletePackage(ctx *context.Context) { + packageName := ctx.Params("id") + packageVersion := ctx.Params("version") + + err := packages_service.RemovePackageVersionByNameAndVersion( + ctx.Doer, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeNuGet, + Name: packageName, + Version: packageVersion, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + } +} diff --git a/routers/api/packages/pypi/pypi.go b/routers/api/packages/pypi/pypi.go new file mode 100644 index 0000000000..9209c4edd5 --- /dev/null +++ b/routers/api/packages/pypi/pypi.go @@ -0,0 +1,174 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package pypi + +import ( + "fmt" + "io" + "net/http" + "regexp" + "strings" + + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + packages_module "code.gitea.io/gitea/modules/packages" + pypi_module "code.gitea.io/gitea/modules/packages/pypi" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/modules/validation" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" +) + +// https://www.python.org/dev/peps/pep-0503/#normalized-names +var normalizer = strings.NewReplacer(".", "-", "_", "-") +var nameMatcher = regexp.MustCompile(`\A[a-z0-9\.\-_]+\z`) + +// https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions +var versionMatcher = regexp.MustCompile(`^([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$`) + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + ctx.PlainText(status, message) + }) +} + +// PackageMetadata returns the metadata for a single package +func PackageMetadata(ctx *context.Context) { + packageName := normalizer.Replace(ctx.Params("id")) + + pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypePyPI, packageName) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pvs) == 0 { + apiError(ctx, http.StatusNotFound, err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Data["RegistryURL"] = setting.AppURL + "api/packages/" + ctx.Package.Owner.Name + "/pypi" + ctx.Data["PackageDescriptor"] = pds[0] + ctx.Data["PackageDescriptors"] = pds + ctx.Render = templates.HTMLRenderer() + ctx.HTML(http.StatusOK, "api/packages/pypi/simple") +} + +// DownloadPackageFile serves the content of a package +func DownloadPackageFile(ctx *context.Context) { + packageName := normalizer.Replace(ctx.Params("id")) + packageVersion := ctx.Params("version") + filename := ctx.Params("filename") + + s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion( + ctx, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypePyPI, + Name: packageName, + Version: packageVersion, + }, + &packages_service.PackageFileInfo{ + Filename: filename, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackageFile adds a file to the package. If the package does not exist, it gets created. +func UploadPackageFile(ctx *context.Context) { + file, fileHeader, err := ctx.Req.FormFile("content") + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + defer file.Close() + + buf, err := packages_module.CreateHashedBufferFromReader(file, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + _, _, hashSHA256, _ := buf.Sums() + + if !strings.EqualFold(ctx.Req.FormValue("sha256_digest"), fmt.Sprintf("%x", hashSHA256)) { + apiError(ctx, http.StatusBadRequest, "hash mismatch") + return + } + + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + packageName := normalizer.Replace(ctx.Req.FormValue("name")) + packageVersion := ctx.Req.FormValue("version") + if !nameMatcher.MatchString(packageName) || !versionMatcher.MatchString(packageVersion) { + apiError(ctx, http.StatusBadRequest, "invalid name or version") + return + } + + projectURL := ctx.Req.FormValue("home_page") + if !validation.IsValidURL(projectURL) { + projectURL = "" + } + + _, _, err = packages_service.CreatePackageOrAddFileToExisting( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypePyPI, + Name: packageName, + Version: packageVersion, + }, + SemverCompatible: true, + Creator: ctx.Doer, + Metadata: &pypi_module.Metadata{ + Author: ctx.Req.FormValue("author"), + Description: ctx.Req.FormValue("description"), + LongDescription: ctx.Req.FormValue("long_description"), + Summary: ctx.Req.FormValue("summary"), + ProjectURL: projectURL, + License: ctx.Req.FormValue("license"), + RequiresPython: ctx.Req.FormValue("requires_python"), + }, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: fileHeader.Filename, + }, + Data: buf, + IsLead: true, + }, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageFile { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} diff --git a/routers/api/packages/rubygems/rubygems.go b/routers/api/packages/rubygems/rubygems.go new file mode 100644 index 0000000000..6fdd03e8ea --- /dev/null +++ b/routers/api/packages/rubygems/rubygems.go @@ -0,0 +1,294 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package rubygems + +import ( + "compress/gzip" + "compress/zlib" + "fmt" + "io" + "net/http" + "strings" + + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + packages_module "code.gitea.io/gitea/modules/packages" + rubygems_module "code.gitea.io/gitea/modules/packages/rubygems" + "code.gitea.io/gitea/routers/api/packages/helper" + packages_service "code.gitea.io/gitea/services/packages" +) + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + ctx.PlainText(status, message) + }) +} + +// EnumeratePackages serves the package list +func EnumeratePackages(ctx *context.Context) { + packages, err := packages_model.GetVersionsByPackageType(ctx, ctx.Package.Owner.ID, packages_model.TypeRubyGems) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + enumeratePackages(ctx, "specs.4.8", packages) +} + +// EnumeratePackagesLatest serves the list of the lastest version of every package +func EnumeratePackagesLatest(ctx *context.Context) { + pvs, _, err := packages_model.SearchLatestVersions(ctx, &packages_model.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages_model.TypeRubyGems, + }) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + enumeratePackages(ctx, "latest_specs.4.8", pvs) +} + +// EnumeratePackagesPreRelease is not supported and serves an empty list +func EnumeratePackagesPreRelease(ctx *context.Context) { + enumeratePackages(ctx, "prerelease_specs.4.8", []*packages_model.PackageVersion{}) +} + +func enumeratePackages(ctx *context.Context, filename string, pvs []*packages_model.PackageVersion) { + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + specs := make([]interface{}, 0, len(pds)) + for _, p := range pds { + specs = append(specs, []interface{}{ + p.Package.Name, + &rubygems_module.RubyUserMarshal{ + Name: "Gem::Version", + Value: []string{p.Version.Version}, + }, + p.Metadata.(*rubygems_module.Metadata).Platform, + }) + } + + ctx.SetServeHeaders(filename + ".gz") + + zw := gzip.NewWriter(ctx.Resp) + defer zw.Close() + + zw.Name = filename + + if err := rubygems_module.NewMarshalEncoder(zw).Encode(specs); err != nil { + ctx.ServerError("Download file failed", err) + } +} + +// ServePackageSpecification serves the compressed Gemspec file of a package +func ServePackageSpecification(ctx *context.Context) { + filename := ctx.Params("filename") + + if !strings.HasSuffix(filename, ".gemspec.rz") { + apiError(ctx, http.StatusNotImplemented, nil) + return + } + + pvs, err := getVersionsByFilename(ctx, filename[:len(filename)-10]+"gem") + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if len(pvs) != 1 { + apiError(ctx, http.StatusNotFound, nil) + return + } + + pd, err := packages_model.GetPackageDescriptor(ctx, pvs[0]) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.SetServeHeaders(filename) + + zw := zlib.NewWriter(ctx.Resp) + defer zw.Close() + + metadata := pd.Metadata.(*rubygems_module.Metadata) + + // create a Ruby Gem::Specification object + spec := &rubygems_module.RubyUserDef{ + Name: "Gem::Specification", + Value: []interface{}{ + "3.2.3", // @rubygems_version + 4, // @specification_version, + pd.Package.Name, + &rubygems_module.RubyUserMarshal{ + Name: "Gem::Version", + Value: []string{pd.Version.Version}, + }, + nil, // date + metadata.Summary, // @summary + nil, // @required_ruby_version + nil, // @required_rubygems_version + metadata.Platform, // @original_platform + []interface{}{}, // @dependencies + nil, // rubyforge_project + "", // @email + metadata.Authors, + metadata.Description, + metadata.ProjectURL, + true, // has_rdoc + metadata.Platform, // @new_platform + nil, + metadata.Licenses, + }, + } + + if err := rubygems_module.NewMarshalEncoder(zw).Encode(spec); err != nil { + ctx.ServerError("Download file failed", err) + } +} + +// DownloadPackageFile serves the content of a package +func DownloadPackageFile(ctx *context.Context) { + filename := ctx.Params("filename") + + pvs, err := getVersionsByFilename(ctx, filename) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if len(pvs) != 1 { + apiError(ctx, http.StatusNotFound, nil) + return + } + + s, pf, err := packages_service.GetFileStreamByPackageVersion( + ctx, + pvs[0], + &packages_service.PackageFileInfo{ + Filename: filename, + }, + ) + if err != nil { + if err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} + +// UploadPackageFile adds a file to the package. If the package does not exist, it gets created. +func UploadPackageFile(ctx *context.Context) { + upload, close, err := ctx.UploadStream() + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + if close { + defer upload.Close() + } + + buf, err := packages_module.CreateHashedBufferFromReader(upload, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + rp, err := rubygems_module.ParsePackageMetaData(buf) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if _, err := buf.Seek(0, io.SeekStart); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + var filename string + if rp.Metadata.Platform == "" || rp.Metadata.Platform == "ruby" { + filename = strings.ToLower(fmt.Sprintf("%s-%s.gem", rp.Name, rp.Version)) + } else { + filename = strings.ToLower(fmt.Sprintf("%s-%s-%s.gem", rp.Name, rp.Version, rp.Metadata.Platform)) + } + + _, _, err = packages_service.CreatePackageAndAddFile( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeRubyGems, + Name: rp.Name, + Version: rp.Version, + }, + SemverCompatible: true, + Creator: ctx.Doer, + Metadata: rp.Metadata, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: filename, + }, + Data: buf, + IsLead: true, + }, + ) + if err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + apiError(ctx, http.StatusBadRequest, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.Status(http.StatusCreated) +} + +// DeletePackage deletes a package +func DeletePackage(ctx *context.Context) { + // Go populates the form only for POST, PUT and PATCH requests + if err := ctx.Req.ParseMultipartForm(32 << 20); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + packageName := ctx.FormString("gem_name") + packageVersion := ctx.FormString("version") + + err := packages_service.RemovePackageVersionByNameAndVersion( + ctx.Doer, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeRubyGems, + Name: packageName, + Version: packageVersion, + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + } +} + +func getVersionsByFilename(ctx *context.Context, filename string) ([]*packages_model.PackageVersion, error) { + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages_model.TypeRubyGems, + HasFileWithName: filename, + }) + return pvs, err +} diff --git a/routers/api/v1/admin/adopt.go b/routers/api/v1/admin/adopt.go index db1754c8d0..3c39d7c2bc 100644 --- a/routers/api/v1/admin/adopt.go +++ b/routers/api/v1/admin/adopt.go @@ -110,7 +110,7 @@ func AdoptRepository(ctx *context.APIContext) { ctx.NotFound() return } - if _, err := repo_service.AdoptRepository(ctx.User, ctxUser, models.CreateRepoOptions{ + if _, err := repo_service.AdoptRepository(ctx.Doer, ctxUser, models.CreateRepoOptions{ Name: repoName, IsPrivate: true, }); err != nil { @@ -173,7 +173,7 @@ func DeleteUnadoptedRepository(ctx *context.APIContext) { return } - if err := repo_service.DeleteUnadoptedRepository(ctx.User, ctxUser, repoName); err != nil { + if err := repo_service.DeleteUnadoptedRepository(ctx.Doer, ctxUser, repoName); err != nil { ctx.InternalServerError(err) return } diff --git a/routers/api/v1/admin/cron.go b/routers/api/v1/admin/cron.go index 1476872a90..0c4333b892 100644 --- a/routers/api/v1/admin/cron.go +++ b/routers/api/v1/admin/cron.go @@ -81,7 +81,7 @@ func PostCronTask(ctx *context.APIContext) { return } task.Run() - log.Trace("Cron Task %s started by admin(%s)", task.Name, ctx.User.Name) + log.Trace("Cron Task %s started by admin(%s)", task.Name, ctx.Doer.Name) ctx.Status(http.StatusNoContent) } diff --git a/routers/api/v1/admin/org.go b/routers/api/v1/admin/org.go index aaa27afb9e..727f3193cf 100644 --- a/routers/api/v1/admin/org.go +++ b/routers/api/v1/admin/org.go @@ -8,14 +8,13 @@ package admin import ( "net/http" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web" - "code.gitea.io/gitea/routers/api/v1/user" "code.gitea.io/gitea/routers/api/v1/utils" ) @@ -45,18 +44,15 @@ func CreateOrg(ctx *context.APIContext) { // "$ref": "#/responses/forbidden" // "422": // "$ref": "#/responses/validationError" + form := web.GetForm(ctx).(*api.CreateOrgOption) - u := user.GetUserByParams(ctx) - if ctx.Written() { - return - } visibility := api.VisibleTypePublic if form.Visibility != "" { visibility = api.VisibilityModes[form.Visibility] } - org := &models.Organization{ + org := &organization.Organization{ Name: form.UserName, FullName: form.FullName, Description: form.Description, @@ -67,7 +63,7 @@ func CreateOrg(ctx *context.APIContext) { Visibility: visibility, } - if err := models.CreateOrganization(org, u); err != nil { + if err := organization.CreateOrganization(org, ctx.ContextUser); err != nil { if user_model.IsErrUserAlreadyExist(err) || db.IsErrNameReserved(err) || db.IsErrNameCharsNotAllowed(err) || @@ -107,7 +103,7 @@ func GetAllOrgs(ctx *context.APIContext) { listOptions := utils.GetListOptions(ctx) users, maxResults, err := user_model.SearchUsers(&user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, Type: user_model.UserTypeOrganization, OrderBy: db.SearchOrderByAlphabetically, ListOptions: listOptions, @@ -119,7 +115,7 @@ func GetAllOrgs(ctx *context.APIContext) { } orgs := make([]*api.Organization, len(users)) for i := range users { - orgs[i] = convert.ToOrganization(models.OrgFromUser(users[i])) + orgs[i] = convert.ToOrganization(organization.OrgFromUser(users[i])) } ctx.SetLinkHeader(int(maxResults), listOptions.PageSize) diff --git a/routers/api/v1/admin/repo.go b/routers/api/v1/admin/repo.go index 467f8a22ff..712ced89c9 100644 --- a/routers/api/v1/admin/repo.go +++ b/routers/api/v1/admin/repo.go @@ -9,7 +9,6 @@ import ( api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/api/v1/repo" - "code.gitea.io/gitea/routers/api/v1/user" ) // CreateRepo api for creating a repository @@ -42,11 +41,8 @@ func CreateRepo(ctx *context.APIContext) { // "$ref": "#/responses/error" // "422": // "$ref": "#/responses/validationError" - form := web.GetForm(ctx).(*api.CreateRepoOption) - owner := user.GetUserByParams(ctx) - if ctx.Written() { - return - } - repo.CreateUserRepo(ctx, owner, *form) + form := web.GetForm(ctx).(*api.CreateRepoOption) + + repo.CreateUserRepo(ctx, ctx.ContextUser, *form) } diff --git a/routers/api/v1/admin/user.go b/routers/api/v1/admin/user.go index 0ecebad5d7..6263a67048 100644 --- a/routers/api/v1/admin/user.go +++ b/routers/api/v1/admin/user.go @@ -22,6 +22,7 @@ import ( "code.gitea.io/gitea/modules/password" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/api/v1/user" "code.gitea.io/gitea/routers/api/v1/utils" @@ -73,6 +74,7 @@ func CreateUser(ctx *context.APIContext) { // "$ref": "#/responses/forbidden" // "422": // "$ref": "#/responses/validationError" + form := web.GetForm(ctx).(*api.CreateUserOption) u := &user_model.User{ @@ -81,7 +83,6 @@ func CreateUser(ctx *context.APIContext) { Email: form.Email, Passwd: form.Password, MustChangePassword: true, - IsActive: true, LoginType: auth.Plain, } if form.MustChangePassword != nil { @@ -107,11 +108,17 @@ func CreateUser(ctx *context.APIContext) { return } - var overwriteDefault *user_model.CreateUserOverwriteOptions + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolTrue, + } + + if form.Restricted != nil { + overwriteDefault.IsRestricted = util.OptionalBoolOf(*form.Restricted) + } + if form.Visibility != "" { - overwriteDefault = &user_model.CreateUserOverwriteOptions{ - Visibility: api.VisibilityModes[form.Visibility], - } + visibility := api.VisibilityModes[form.Visibility] + overwriteDefault.Visibility = &visibility } if err := user_model.CreateUser(u, overwriteDefault); err != nil { @@ -119,6 +126,7 @@ func CreateUser(ctx *context.APIContext) { user_model.IsErrEmailAlreadyUsed(err) || db.IsErrNameReserved(err) || db.IsErrNameCharsNotAllowed(err) || + user_model.IsErrEmailCharIsNotSupported(err) || user_model.IsErrEmailInvalid(err) || db.IsErrNamePatternNotAllowed(err) { ctx.Error(http.StatusUnprocessableEntity, "", err) @@ -127,13 +135,13 @@ func CreateUser(ctx *context.APIContext) { } return } - log.Trace("Account created by admin (%s): %s", ctx.User.Name, u.Name) + log.Trace("Account created by admin (%s): %s", ctx.Doer.Name, u.Name) // Send email notification. if form.SendNotify { mailer.SendRegisterNotifyMail(u) } - ctx.JSON(http.StatusCreated, convert.ToUser(u, ctx.User)) + ctx.JSON(http.StatusCreated, convert.ToUser(u, ctx.Doer)) } // EditUser api for modifying a user's information @@ -162,13 +170,10 @@ func EditUser(ctx *context.APIContext) { // "$ref": "#/responses/forbidden" // "422": // "$ref": "#/responses/validationError" - form := web.GetForm(ctx).(*api.EditUserOption) - u := user.GetUserByParams(ctx) - if ctx.Written() { - return - } - parseAuthSource(ctx, u, form.SourceID, form.LoginName) + form := web.GetForm(ctx).(*api.EditUserOption) + + parseAuthSource(ctx, ctx.ContextUser, form.SourceID, form.LoginName) if ctx.Written() { return } @@ -192,24 +197,24 @@ func EditUser(ctx *context.APIContext) { ctx.Error(http.StatusBadRequest, "PasswordPwned", errors.New("PasswordPwned")) return } - if u.Salt, err = user_model.GetUserSalt(); err != nil { + if ctx.ContextUser.Salt, err = user_model.GetUserSalt(); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateUser", err) return } - if err = u.SetPassword(form.Password); err != nil { + if err = ctx.ContextUser.SetPassword(form.Password); err != nil { ctx.InternalServerError(err) return } } if form.MustChangePassword != nil { - u.MustChangePassword = *form.MustChangePassword + ctx.ContextUser.MustChangePassword = *form.MustChangePassword } - u.LoginName = form.LoginName + ctx.ContextUser.LoginName = form.LoginName if form.FullName != nil { - u.FullName = *form.FullName + ctx.ContextUser.FullName = *form.FullName } var emailChanged bool if form.Email != nil { @@ -224,57 +229,59 @@ func EditUser(ctx *context.APIContext) { return } - emailChanged = !strings.EqualFold(u.Email, email) - u.Email = email + emailChanged = !strings.EqualFold(ctx.ContextUser.Email, email) + ctx.ContextUser.Email = email } if form.Website != nil { - u.Website = *form.Website + ctx.ContextUser.Website = *form.Website } if form.Location != nil { - u.Location = *form.Location + ctx.ContextUser.Location = *form.Location } if form.Description != nil { - u.Description = *form.Description + ctx.ContextUser.Description = *form.Description } if form.Active != nil { - u.IsActive = *form.Active + ctx.ContextUser.IsActive = *form.Active } if len(form.Visibility) != 0 { - u.Visibility = api.VisibilityModes[form.Visibility] + ctx.ContextUser.Visibility = api.VisibilityModes[form.Visibility] } if form.Admin != nil { - u.IsAdmin = *form.Admin + ctx.ContextUser.IsAdmin = *form.Admin } if form.AllowGitHook != nil { - u.AllowGitHook = *form.AllowGitHook + ctx.ContextUser.AllowGitHook = *form.AllowGitHook } if form.AllowImportLocal != nil { - u.AllowImportLocal = *form.AllowImportLocal + ctx.ContextUser.AllowImportLocal = *form.AllowImportLocal } if form.MaxRepoCreation != nil { - u.MaxRepoCreation = *form.MaxRepoCreation + ctx.ContextUser.MaxRepoCreation = *form.MaxRepoCreation } if form.AllowCreateOrganization != nil { - u.AllowCreateOrganization = *form.AllowCreateOrganization + ctx.ContextUser.AllowCreateOrganization = *form.AllowCreateOrganization } if form.ProhibitLogin != nil { - u.ProhibitLogin = *form.ProhibitLogin + ctx.ContextUser.ProhibitLogin = *form.ProhibitLogin } if form.Restricted != nil { - u.IsRestricted = *form.Restricted + ctx.ContextUser.IsRestricted = *form.Restricted } - if err := user_model.UpdateUser(u, emailChanged); err != nil { - if user_model.IsErrEmailAlreadyUsed(err) || user_model.IsErrEmailInvalid(err) { + if err := user_model.UpdateUser(ctx.ContextUser, emailChanged); err != nil { + if user_model.IsErrEmailAlreadyUsed(err) || + user_model.IsErrEmailCharIsNotSupported(err) || + user_model.IsErrEmailInvalid(err) { ctx.Error(http.StatusUnprocessableEntity, "", err) } else { ctx.Error(http.StatusInternalServerError, "UpdateUser", err) } return } - log.Trace("Account profile updated by admin (%s): %s", ctx.User.Name, u.Name) + log.Trace("Account profile updated by admin (%s): %s", ctx.Doer.Name, ctx.ContextUser.Name) - ctx.JSON(http.StatusOK, convert.ToUser(u, ctx.User)) + ctx.JSON(http.StatusOK, convert.ToUser(ctx.ContextUser, ctx.Doer)) } // DeleteUser api for deleting a user @@ -298,26 +305,28 @@ func DeleteUser(ctx *context.APIContext) { // "422": // "$ref": "#/responses/validationError" - u := user.GetUserByParams(ctx) - if ctx.Written() { + if ctx.ContextUser.IsOrganization() { + ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("%s is an organization not a user", ctx.ContextUser.Name)) return } - if u.IsOrganization() { - ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("%s is an organization not a user", u.Name)) + // admin should not delete themself + if ctx.ContextUser.ID == ctx.Doer.ID { + ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("you cannot delete yourself")) return } - if err := user_service.DeleteUser(u); err != nil { + if err := user_service.DeleteUser(ctx.ContextUser); err != nil { if models.IsErrUserOwnRepos(err) || - models.IsErrUserHasOrgs(err) { + models.IsErrUserHasOrgs(err) || + models.IsErrUserOwnPackages(err) { ctx.Error(http.StatusUnprocessableEntity, "", err) } else { ctx.Error(http.StatusInternalServerError, "DeleteUser", err) } return } - log.Trace("Account deleted by admin(%s): %s", ctx.User.Name, u.Name) + log.Trace("Account deleted by admin(%s): %s", ctx.Doer.Name, ctx.ContextUser.Name) ctx.Status(http.StatusNoContent) } @@ -348,12 +357,10 @@ func CreatePublicKey(ctx *context.APIContext) { // "$ref": "#/responses/forbidden" // "422": // "$ref": "#/responses/validationError" + form := web.GetForm(ctx).(*api.CreateKeyOption) - u := user.GetUserByParams(ctx) - if ctx.Written() { - return - } - user.CreateUserPublicKey(ctx, *form, u.ID) + + user.CreateUserPublicKey(ctx, *form, ctx.ContextUser.ID) } // DeleteUserPublicKey api for deleting a user's public key @@ -383,12 +390,7 @@ func DeleteUserPublicKey(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - u := user.GetUserByParams(ctx) - if ctx.Written() { - return - } - - if err := asymkey_service.DeletePublicKey(u, ctx.ParamsInt64(":id")); err != nil { + if err := asymkey_service.DeletePublicKey(ctx.ContextUser, ctx.ParamsInt64(":id")); err != nil { if asymkey_model.IsErrKeyNotExist(err) { ctx.NotFound() } else if asymkey_model.IsErrKeyAccessDenied(err) { @@ -398,7 +400,7 @@ func DeleteUserPublicKey(ctx *context.APIContext) { } return } - log.Trace("Key deleted by admin(%s): %s", ctx.User.Name, u.Name) + log.Trace("Key deleted by admin(%s): %s", ctx.Doer.Name, ctx.ContextUser.Name) ctx.Status(http.StatusNoContent) } @@ -428,7 +430,7 @@ func GetAllUsers(ctx *context.APIContext) { listOptions := utils.GetListOptions(ctx) users, maxResults, err := user_model.SearchUsers(&user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, Type: user_model.UserTypeIndividual, OrderBy: db.SearchOrderByAlphabetically, ListOptions: listOptions, @@ -440,7 +442,7 @@ func GetAllUsers(ctx *context.APIContext) { results := make([]*api.User, len(users)) for i := range users { - results[i] = convert.ToUser(users[i], ctx.User) + results[i] = convert.ToUser(users[i], ctx.Doer) } ctx.SetLinkHeader(int(maxResults), listOptions.PageSize) diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index fb1af75ec5..0134790e2b 100644 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -71,6 +71,8 @@ import ( "strings" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" + "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -84,10 +86,12 @@ import ( "code.gitea.io/gitea/routers/api/v1/misc" "code.gitea.io/gitea/routers/api/v1/notify" "code.gitea.io/gitea/routers/api/v1/org" + "code.gitea.io/gitea/routers/api/v1/packages" "code.gitea.io/gitea/routers/api/v1/repo" "code.gitea.io/gitea/routers/api/v1/settings" "code.gitea.io/gitea/routers/api/v1/user" "code.gitea.io/gitea/services/auth" + context_service "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/forms" _ "code.gitea.io/gitea/routers/api/v1/swagger" // for swagger generation @@ -104,7 +108,7 @@ func sudo() func(ctx *context.APIContext) { } if len(sudo) > 0 { - if ctx.IsSigned && ctx.User.IsAdmin { + if ctx.IsSigned && ctx.Doer.IsAdmin { user, err := user_model.GetUserByName(sudo) if err != nil { if user_model.IsErrUserNotExist(err) { @@ -114,8 +118,8 @@ func sudo() func(ctx *context.APIContext) { } return } - log.Trace("Sudo from (%s) to: %s", ctx.User.Name, user.Name) - ctx.User = user + log.Trace("Sudo from (%s) to: %s", ctx.Doer.Name, user.Name) + ctx.Doer = user } else { ctx.JSON(http.StatusForbidden, map[string]string{ "message": "Only administrators allowed to sudo.", @@ -137,8 +141,8 @@ func repoAssignment() func(ctx *context.APIContext) { ) // Check if the user is the same as the repository owner. - if ctx.IsSigned && ctx.User.LowerName == strings.ToLower(userName) { - owner = ctx.User + if ctx.IsSigned && ctx.Doer.LowerName == strings.ToLower(userName) { + owner = ctx.Doer } else { owner, err = user_model.GetUserByName(userName) if err != nil { @@ -157,6 +161,7 @@ func repoAssignment() func(ctx *context.APIContext) { } } ctx.Repo.Owner = owner + ctx.ContextUser = owner // Get repository. repo, err := repo_model.GetRepositoryByName(owner.ID, repoName) @@ -179,7 +184,7 @@ func repoAssignment() func(ctx *context.APIContext) { repo.Owner = owner ctx.Repo.Repository = repo - ctx.Repo.Permission, err = models.GetUserRepoPermission(repo, ctx.User) + ctx.Repo.Permission, err = models.GetUserRepoPermission(ctx, repo, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) return @@ -192,6 +197,15 @@ func repoAssignment() func(ctx *context.APIContext) { } } +func reqPackageAccess(accessMode perm.AccessMode) func(ctx *context.APIContext) { + return func(ctx *context.APIContext) { + if ctx.Package.AccessMode < accessMode && !ctx.IsUserSiteAdmin() { + ctx.Error(http.StatusForbidden, "reqPackageAccess", "user should have specific permission or be a site admin") + return + } + } +} + // Contexter middleware already checks token for user sign in process. func reqToken() func(ctx *context.APIContext) { return func(ctx *context.APIContext) { @@ -203,7 +217,6 @@ func reqToken() func(ctx *context.APIContext) { return } if ctx.IsSigned { - ctx.RequireCSRF() return } ctx.Error(http.StatusUnauthorized, "reqToken", "token is required") @@ -271,6 +284,15 @@ func reqRepoWriter(unitTypes ...unit.Type) func(ctx *context.APIContext) { } } +// reqRepoBranchWriter user should have a permission to write to a branch, or be a site admin +func reqRepoBranchWriter(ctx *context.APIContext) { + options, ok := web.GetForm(ctx).(api.FileOptionInterface) + if !ok || (!ctx.Repo.CanWriteToBranch(ctx.Doer, options.Branch()) && !ctx.IsUserSiteAdmin()) { + ctx.Error(http.StatusForbidden, "reqRepoBranchWriter", "user should have a permission to write to this branch") + return + } +} + // reqRepoReader user should have specific read permission or be a repo admin or a site admin func reqRepoReader(unitType unit.Type) func(ctx *context.APIContext) { return func(ctx *context.APIContext) { @@ -308,7 +330,7 @@ func reqOrgOwnership() func(ctx *context.APIContext) { return } - isOwner, err := models.IsOrganizationOwner(orgID, ctx.User.ID) + isOwner, err := organization.IsOrganizationOwner(ctx, orgID, ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrganizationOwner", err) return @@ -335,7 +357,7 @@ func reqTeamMembership() func(ctx *context.APIContext) { } orgID := ctx.Org.Team.OrgID - isOwner, err := models.IsOrganizationOwner(orgID, ctx.User.ID) + isOwner, err := organization.IsOrganizationOwner(ctx, orgID, ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrganizationOwner", err) return @@ -343,11 +365,11 @@ func reqTeamMembership() func(ctx *context.APIContext) { return } - if isTeamMember, err := models.IsTeamMember(orgID, ctx.Org.Team.ID, ctx.User.ID); err != nil { + if isTeamMember, err := organization.IsTeamMember(ctx, orgID, ctx.Org.Team.ID, ctx.Doer.ID); err != nil { ctx.Error(http.StatusInternalServerError, "IsTeamMember", err) return } else if !isTeamMember { - isOrgMember, err := models.IsOrganizationMember(orgID, ctx.User.ID) + isOrgMember, err := organization.IsOrganizationMember(ctx, orgID, ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrganizationMember", err) } else if isOrgMember { @@ -377,7 +399,7 @@ func reqOrgMembership() func(ctx *context.APIContext) { return } - if isMember, err := models.IsOrganizationMember(orgID, ctx.User.ID); err != nil { + if isMember, err := organization.IsOrganizationMember(ctx, orgID, ctx.Doer.ID); err != nil { ctx.Error(http.StatusInternalServerError, "IsOrganizationMember", err) return } else if !isMember { @@ -393,7 +415,7 @@ func reqOrgMembership() func(ctx *context.APIContext) { func reqGitHook() func(ctx *context.APIContext) { return func(ctx *context.APIContext) { - if !ctx.User.CanEditGitHook() { + if !ctx.Doer.CanEditGitHook() { ctx.Error(http.StatusForbidden, "", "must be allowed to edit Git hooks") return } @@ -426,9 +448,9 @@ func orgAssignment(args ...bool) func(ctx *context.APIContext) { var err error if assignOrg { - ctx.Org.Organization, err = models.GetOrgByName(ctx.Params(":org")) + ctx.Org.Organization, err = organization.GetOrgByName(ctx.Params(":org")) if err != nil { - if models.IsErrOrgNotExist(err) { + if organization.IsErrOrgNotExist(err) { redirectUserID, err := user_model.LookupUserRedirect(ctx.Params(":org")) if err == nil { context.RedirectToUser(ctx.Context, ctx.Params(":org"), redirectUserID) @@ -442,12 +464,13 @@ func orgAssignment(args ...bool) func(ctx *context.APIContext) { } return } + ctx.ContextUser = ctx.Org.Organization.AsUser() } if assignTeam { - ctx.Org.Team, err = models.GetTeamByID(ctx.ParamsInt64(":teamid")) + ctx.Org.Team, err = organization.GetTeamByID(ctx.ParamsInt64(":teamid")) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.NotFound() } else { ctx.Error(http.StatusInternalServerError, "GetTeamById", err) @@ -464,7 +487,7 @@ func mustEnableIssues(ctx *context.APIContext) { if ctx.IsSigned { log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, unit.TypeIssues, ctx.Repo.Repository, ctx.Repo.Permission) @@ -487,7 +510,7 @@ func mustAllowPulls(ctx *context.APIContext) { if ctx.IsSigned { log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, unit.TypePullRequests, ctx.Repo.Repository, ctx.Repo.Permission) @@ -511,7 +534,7 @@ func mustEnableIssuesOrPulls(ctx *context.APIContext) { if ctx.IsSigned { log.Trace("Permission Denied: User %-v cannot read %-v and %-v in Repo %-v\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, unit.TypeIssues, unit.TypePullRequests, ctx.Repo.Repository, @@ -561,11 +584,28 @@ func bind(obj interface{}) http.HandlerFunc { }) } -// Routes registers all v1 APIs routes to web application. -func Routes(sessioner func(http.Handler) http.Handler) *web.Route { - m := web.NewRoute() +// The OAuth2 plugin is expected to be executed first, as it must ignore the user id stored +// in the session (if there is a user id stored in session other plugins might return the user +// object for that id). +// +// The Session plugin is expected to be executed second, in order to skip authentication +// for users that have already signed in. +func buildAuthGroup() *auth.Group { + group := auth.NewGroup( + &auth.OAuth2{}, + &auth.Basic{}, // FIXME: this should be removed once we don't allow basic auth in API + ) + if setting.Service.EnableReverseProxyAuth { + group.Add(&auth.ReverseProxy{}) + } + specialAdd(group) - m.Use(sessioner) + return group +} + +// Routes registers all v1 APIs routes to web application. +func Routes() *web.Route { + m := web.NewRoute() m.Use(securityHeaders()) if setting.CORSConfig.Enabled { @@ -575,14 +615,19 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { // setting.CORSConfig.AllowSubdomain // FIXME: the cors middleware needs allowSubdomain option AllowedMethods: setting.CORSConfig.Methods, AllowCredentials: setting.CORSConfig.AllowCredentials, - AllowedHeaders: []string{"Authorization", "X-CSRFToken", "X-Gitea-OTP"}, + AllowedHeaders: []string{"Authorization", "X-Gitea-OTP"}, MaxAge: int(setting.CORSConfig.MaxAge.Seconds()), })) } m.Use(context.APIContexter()) + group := buildAuthGroup() + if err := group.Init(); err != nil { + log.Error("Could not initialize '%s' auth method, error: %s", group.Name(), err) + } + // Get user from session if logged in. - m.Use(context.APIAuth(auth.NewGroup(auth.Methods()...))) + m.Use(context.APIAuth(group)) m.Use(context.ToggleAPI(&context.ToggleOptions{ SignInRequired: setting.Service.RequireSignInView, @@ -641,7 +686,7 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { Post(bind(api.CreateAccessTokenOption{}), user.CreateAccessToken) m.Combo("/{id}").Delete(user.DeleteAccessToken) }, reqBasicOrRevProxyAuth()) - }) + }, context_service.UserAssignmentAPI()) }) m.Group("/users", func() { @@ -658,7 +703,7 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { m.Get("/starred", user.GetStarredRepos) m.Get("/subscriptions", user.GetWatchedRepos) - }) + }, context_service.UserAssignmentAPI()) }, reqToken()) m.Group("/user", func() { @@ -674,7 +719,11 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { m.Get("/followers", user.ListMyFollowers) m.Group("/following", func() { m.Get("", user.ListMyFollowing) - m.Combo("/{username}").Get(user.CheckMyFollowing).Put(user.Follow).Delete(user.Unfollow) + m.Group("/{username}", func() { + m.Get("", user.CheckMyFollowing) + m.Put("", user.Follow) + m.Delete("", user.Unfollow) + }, context_service.UserAssignmentAPI()) }) m.Group("/keys", func() { @@ -761,14 +810,17 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { m.Combo("").Get(repo.GetHook). Patch(bind(api.EditHookOption{}), repo.EditHook). Delete(repo.DeleteHook) - m.Post("/tests", context.RepoRefForAPI, repo.TestHook) + m.Post("/tests", context.ReferencesGitRepo(), context.RepoRefForAPI, repo.TestHook) }) }, reqToken(), reqAdmin(), reqWebhooksEnabled()) m.Group("/collaborators", func() { m.Get("", reqAnyRepoReader(), repo.ListCollaborators) - m.Combo("/{collaborator}").Get(reqAnyRepoReader(), repo.IsCollaborator). - Put(reqAdmin(), bind(api.AddCollaboratorOption{}), repo.AddCollaborator). - Delete(reqAdmin(), repo.DeleteCollaborator) + m.Group("/{collaborator}", func() { + m.Combo("").Get(reqAnyRepoReader(), repo.IsCollaborator). + Put(reqAdmin(), bind(api.AddCollaboratorOption{}), repo.AddCollaborator). + Delete(reqAdmin(), repo.DeleteCollaborator) + m.Get("/permission", repo.GetRepoPermissions) + }, reqToken()) }, reqToken()) m.Get("/assignees", reqToken(), reqAnyRepoReader(), repo.GetAssignees) m.Get("/reviewers", reqToken(), reqAnyRepoReader(), repo.GetReviewers) @@ -778,16 +830,16 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { Put(reqAdmin(), repo.AddTeam). Delete(reqAdmin(), repo.DeleteTeam) }, reqToken()) - m.Get("/raw/*", context.RepoRefForAPI, reqRepoReader(unit.TypeCode), repo.GetRawFile) + m.Get("/raw/*", context.ReferencesGitRepo(), context.RepoRefForAPI, reqRepoReader(unit.TypeCode), repo.GetRawFile) m.Get("/archive/*", reqRepoReader(unit.TypeCode), repo.GetArchive) m.Combo("/forks").Get(repo.ListForks). Post(reqToken(), reqRepoReader(unit.TypeCode), bind(api.CreateForkOption{}), repo.CreateFork) m.Group("/branches", func() { - m.Get("", context.ReferencesGitRepo(false), repo.ListBranches) - m.Get("/*", context.ReferencesGitRepo(false), repo.GetBranch) - m.Delete("/*", reqRepoWriter(unit.TypeCode), context.ReferencesGitRepo(false), repo.DeleteBranch) - m.Post("", reqRepoWriter(unit.TypeCode), context.ReferencesGitRepo(false), bind(api.CreateBranchRepoOption{}), repo.CreateBranch) - }, reqRepoReader(unit.TypeCode)) + m.Get("", repo.ListBranches) + m.Get("/*", repo.GetBranch) + m.Delete("/*", reqRepoWriter(unit.TypeCode), repo.DeleteBranch) + m.Post("", reqRepoWriter(unit.TypeCode), bind(api.CreateBranchRepoOption{}), repo.CreateBranch) + }, context.ReferencesGitRepo(), reqRepoReader(unit.TypeCode)) m.Group("/branch_protections", func() { m.Get("", repo.ListBranchProtections) m.Post("", bind(api.CreateBranchProtectionOption{}), repo.CreateBranchProtection) @@ -906,10 +958,10 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { }) m.Group("/releases", func() { m.Combo("").Get(repo.ListReleases). - Post(reqToken(), reqRepoWriter(unit.TypeReleases), context.ReferencesGitRepo(false), bind(api.CreateReleaseOption{}), repo.CreateRelease) + Post(reqToken(), reqRepoWriter(unit.TypeReleases), context.ReferencesGitRepo(), bind(api.CreateReleaseOption{}), repo.CreateRelease) m.Group("/{id}", func() { m.Combo("").Get(repo.GetRelease). - Patch(reqToken(), reqRepoWriter(unit.TypeReleases), context.ReferencesGitRepo(false), bind(api.EditReleaseOption{}), repo.EditRelease). + Patch(reqToken(), reqRepoWriter(unit.TypeReleases), context.ReferencesGitRepo(), bind(api.EditReleaseOption{}), repo.EditRelease). Delete(reqToken(), reqRepoWriter(unit.TypeReleases), repo.DeleteRelease) m.Group("/assets", func() { m.Combo("").Get(repo.ListReleaseAttachments). @@ -926,7 +978,7 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { }) }, reqRepoReader(unit.TypeReleases)) m.Post("/mirror-sync", reqToken(), reqRepoWriter(unit.TypeCode), repo.MirrorSync) - m.Get("/editorconfig/{filename}", context.RepoRefForAPI, reqRepoReader(unit.TypeCode), repo.GetEditorconfig) + m.Get("/editorconfig/{filename}", context.ReferencesGitRepo(), context.RepoRefForAPI, reqRepoReader(unit.TypeCode), repo.GetEditorconfig) m.Group("/pulls", func() { m.Combo("").Get(repo.ListPullRequests). Post(reqToken(), mustNotBeArchived, bind(api.CreatePullRequestOption{}), repo.CreatePullRequest) @@ -937,7 +989,8 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { m.Post("/update", reqToken(), repo.UpdatePullRequest) m.Get("/commits", repo.GetPullRequestCommits) m.Combo("/merge").Get(repo.IsPullRequestMerged). - Post(reqToken(), mustNotBeArchived, bind(forms.MergePullRequestForm{}), repo.MergePullRequest) + Post(reqToken(), mustNotBeArchived, bind(forms.MergePullRequestForm{}), repo.MergePullRequest). + Delete(reqToken(), mustNotBeArchived, repo.CancelScheduledAutoMerge) m.Group("/reviews", func() { m.Combo(""). Get(repo.ListPullReviews). @@ -957,39 +1010,39 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { Delete(reqToken(), bind(api.PullReviewRequestOptions{}), repo.DeleteReviewRequests). Post(reqToken(), bind(api.PullReviewRequestOptions{}), repo.CreateReviewRequests) }) - }, mustAllowPulls, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo(false)) + }, mustAllowPulls, reqRepoReader(unit.TypeCode), context.ReferencesGitRepo()) m.Group("/statuses", func() { m.Combo("/{sha}").Get(repo.GetCommitStatuses). Post(reqToken(), bind(api.CreateStatusOption{}), repo.NewCommitStatus) }, reqRepoReader(unit.TypeCode)) m.Group("/commits", func() { - m.Get("", context.ReferencesGitRepo(false), repo.GetAllCommits) + m.Get("", context.ReferencesGitRepo(), repo.GetAllCommits) m.Group("/{ref}", func() { m.Get("/status", repo.GetCombinedCommitStatusByRef) m.Get("/statuses", repo.GetCommitStatusesByRef) - }) + }, context.ReferencesGitRepo()) }, reqRepoReader(unit.TypeCode)) m.Group("/git", func() { m.Group("/commits", func() { - m.Get("/{sha}", context.ReferencesGitRepo(false), repo.GetSingleCommit) + m.Get("/{sha}", repo.GetSingleCommit) m.Get("/{sha}.{diffType:diff|patch}", repo.DownloadCommitDiffOrPatch) }) m.Get("/refs", repo.GetGitAllRefs) m.Get("/refs/*", repo.GetGitRefs) - m.Get("/trees/{sha}", context.RepoRefForAPI, repo.GetTree) - m.Get("/blobs/{sha}", context.RepoRefForAPI, repo.GetBlob) - m.Get("/tags/{sha}", context.RepoRefForAPI, repo.GetAnnotatedTag) + m.Get("/trees/{sha}", repo.GetTree) + m.Get("/blobs/{sha}", repo.GetBlob) + m.Get("/tags/{sha}", repo.GetAnnotatedTag) m.Get("/notes/{sha}", repo.GetNote) - }, reqRepoReader(unit.TypeCode)) + }, context.ReferencesGitRepo(), reqRepoReader(unit.TypeCode)) m.Post("/diffpatch", reqRepoWriter(unit.TypeCode), reqToken(), bind(api.ApplyDiffPatchFileOptions{}), repo.ApplyDiffPatch) m.Group("/contents", func() { m.Get("", repo.GetContentsList) m.Get("/*", repo.GetContents) m.Group("/*", func() { - m.Post("", bind(api.CreateFileOptions{}), repo.CreateFile) - m.Put("", bind(api.UpdateFileOptions{}), repo.UpdateFile) - m.Delete("", bind(api.DeleteFileOptions{}), repo.DeleteFile) - }, reqRepoWriter(unit.TypeCode), reqToken()) + m.Post("", bind(api.CreateFileOptions{}), reqRepoBranchWriter, repo.CreateFile) + m.Put("", bind(api.UpdateFileOptions{}), reqRepoBranchWriter, repo.UpdateFile) + m.Delete("", bind(api.DeleteFileOptions{}), reqRepoBranchWriter, repo.DeleteFile) + }, reqToken()) }, reqRepoReader(unit.TypeCode)) m.Get("/signing-key.gpg", misc.SigningKey) m.Group("/topics", func() { @@ -1000,17 +1053,26 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { Delete(reqToken(), repo.DeleteTopic) }, reqAdmin()) }, reqAnyRepoReader()) - m.Get("/issue_templates", context.ReferencesGitRepo(false), repo.GetIssueTemplates) + m.Get("/issue_templates", context.ReferencesGitRepo(), repo.GetIssueTemplates) m.Get("/languages", reqRepoReader(unit.TypeCode), repo.GetLanguages) }, repoAssignment()) }) + m.Group("/packages/{username}", func() { + m.Group("/{type}/{name}/{version}", func() { + m.Get("", packages.GetPackage) + m.Delete("", reqPackageAccess(perm.AccessModeWrite), packages.DeletePackage) + m.Get("/files", packages.ListPackageFiles) + }) + m.Get("/", packages.ListPackages) + }, context_service.UserAssignmentAPI(), context.PackageAssignmentAPI(), reqPackageAccess(perm.AccessModeRead)) + // Organizations m.Get("/user/orgs", reqToken(), org.ListMyOrgs) m.Group("/users/{username}/orgs", func() { m.Get("", org.ListUserOrgs) m.Get("/{org}/permissions", reqToken(), org.GetUserOrgsPermissions) - }) + }, context_service.UserAssignmentAPI()) m.Post("/orgs", reqToken(), bind(api.CreateOrgOption{}), org.Create) m.Get("/orgs", org.GetAll) m.Group("/orgs/{org}", func() { @@ -1065,7 +1127,8 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { m.Get("", org.GetTeamRepos) m.Combo("/{org}/{reponame}"). Put(org.AddTeamRepository). - Delete(org.RemoveTeamRepository) + Delete(org.RemoveTeamRepository). + Get(org.GetTeamRepo) }) }, orgAssignment(false, true), reqToken(), reqTeamMembership()) @@ -1088,7 +1151,7 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { m.Get("/orgs", org.ListUserOrgs) m.Post("/orgs", bind(api.CreateOrgOption{}), admin.CreateOrg) m.Post("/repos", bind(api.CreateRepoOption{}), admin.CreateRepo) - }) + }, context_service.UserAssignmentAPI()) }) m.Group("/unadopted", func() { m.Get("", admin.ListUnadoptedRepositories) diff --git a/routers/api/v1/auth.go b/routers/api/v1/auth.go new file mode 100644 index 0000000000..becf45f643 --- /dev/null +++ b/routers/api/v1/auth.go @@ -0,0 +1,11 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +//go:build !windows + +package v1 + +import auth_service "code.gitea.io/gitea/services/auth" + +func specialAdd(group *auth_service.Group) {} diff --git a/routers/api/v1/auth_windows.go b/routers/api/v1/auth_windows.go new file mode 100644 index 0000000000..d41c4bb223 --- /dev/null +++ b/routers/api/v1/auth_windows.go @@ -0,0 +1,20 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package v1 + +import ( + "code.gitea.io/gitea/models/auth" + auth_service "code.gitea.io/gitea/services/auth" +) + +// specialAdd registers the SSPI auth method as the last method in the list. +// The SSPI plugin is expected to be executed last, as it returns 401 status code if negotiation +// fails (or if negotiation should continue), which would prevent other authentication methods +// to execute at all. +func specialAdd(group *auth_service.Group) { + if auth.IsSSPIEnabled() { + group.Add(&auth_service.SSPI{}) + } +} diff --git a/routers/api/v1/misc/markdown_test.go b/routers/api/v1/misc/markdown_test.go index 349498e2ab..9beb88be16 100644 --- a/routers/api/v1/misc/markdown_test.go +++ b/routers/api/v1/misc/markdown_test.go @@ -37,6 +37,8 @@ func createContext(req *http.Request) (*context.Context, *httptest.ResponseRecor Render: rnd, Data: make(map[string]interface{}), } + defer c.Close() + return c, resp } diff --git a/routers/api/v1/misc/nodeinfo.go b/routers/api/v1/misc/nodeinfo.go index bc36fa1be1..ce1f9ec0f7 100644 --- a/routers/api/v1/misc/nodeinfo.go +++ b/routers/api/v1/misc/nodeinfo.go @@ -6,12 +6,17 @@ package misc import ( "net/http" + "time" + "code.gitea.io/gitea/models" + user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" ) +const cacheKeyNodeInfoUsage = "API_NodeInfoUsage" + // NodeInfo returns the NodeInfo for the Gitea instance to allow for federation func NodeInfo(ctx *context.APIContext) { // swagger:operation GET /nodeinfo miscellaneous getNodeInfo @@ -23,6 +28,37 @@ func NodeInfo(ctx *context.APIContext) { // "200": // "$ref": "#/responses/NodeInfo" + nodeInfoUsage := structs.NodeInfoUsage{} + if setting.Federation.ShareUserStatistics { + info, ok := ctx.Cache.Get(cacheKeyNodeInfoUsage).(structs.NodeInfoUsage) + if !ok { + usersTotal := int(user_model.CountUsers(nil)) + now := time.Now() + timeOneMonthAgo := now.AddDate(0, -1, 0).Unix() + timeHaveYearAgo := now.AddDate(0, -6, 0).Unix() + usersActiveMonth := int(user_model.CountUsers(&user_model.CountUserFilter{LastLoginSince: &timeOneMonthAgo})) + usersActiveHalfyear := int(user_model.CountUsers(&user_model.CountUserFilter{LastLoginSince: &timeHaveYearAgo})) + + allIssues, _ := models.CountIssues(&models.IssuesOptions{}) + allComments, _ := models.CountComments(&models.FindCommentsOptions{}) + + info = structs.NodeInfoUsage{ + Users: structs.NodeInfoUsageUsers{ + Total: usersTotal, + ActiveMonth: usersActiveMonth, + ActiveHalfyear: usersActiveHalfyear, + }, + LocalPosts: int(allIssues), + LocalComments: int(allComments), + } + if err := ctx.Cache.Put(cacheKeyNodeInfoUsage, nodeInfoUsage, 180); err != nil { + ctx.InternalServerError(err) + return + } + } + nodeInfoUsage = info + } + nodeInfo := &structs.NodeInfo{ Version: "2.1", Software: structs.NodeInfoSoftware{ @@ -34,12 +70,10 @@ func NodeInfo(ctx *context.APIContext) { Protocols: []string{"activitypub"}, Services: structs.NodeInfoServices{ Inbound: []string{}, - Outbound: []string{}, + Outbound: []string{"rss2.0"}, }, OpenRegistrations: setting.Service.ShowRegistrationButton, - Usage: structs.NodeInfoUsage{ - Users: structs.NodeInfoUsageUsers{}, - }, + Usage: nodeInfoUsage, } ctx.JSON(http.StatusOK, nodeInfo) } diff --git a/routers/api/v1/notify/notifications.go b/routers/api/v1/notify/notifications.go index 9dd9da85c5..c707cf4524 100644 --- a/routers/api/v1/notify/notifications.go +++ b/routers/api/v1/notify/notifications.go @@ -22,18 +22,18 @@ func NewAvailable(ctx *context.APIContext) { // responses: // "200": // "$ref": "#/responses/NotificationCount" - ctx.JSON(http.StatusOK, api.NotificationCount{New: models.CountUnread(ctx.User)}) + ctx.JSON(http.StatusOK, api.NotificationCount{New: models.CountUnread(ctx.Doer)}) } func getFindNotificationOptions(ctx *context.APIContext) *models.FindNotificationOptions { - before, since, err := utils.GetQueryBeforeSince(ctx) + before, since, err := context.GetQueryBeforeSince(ctx.Context) if err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return nil } opts := &models.FindNotificationOptions{ ListOptions: utils.GetListOptions(ctx), - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, UpdatedBeforeUnix: before, UpdatedAfterUnix: since, } diff --git a/routers/api/v1/notify/repo.go b/routers/api/v1/notify/repo.go index 30357ebd3f..0f6b90b05d 100644 --- a/routers/api/v1/notify/repo.go +++ b/routers/api/v1/notify/repo.go @@ -193,7 +193,7 @@ func ReadRepoNotifications(ctx *context.APIContext) { } opts := &models.FindNotificationOptions{ - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, RepoID: ctx.Repo.Repository.ID, UpdatedBeforeUnix: lastRead, } @@ -214,10 +214,10 @@ func ReadRepoNotifications(ctx *context.APIContext) { targetStatus = models.NotificationStatusRead } - changed := make([]*structs.NotificationThread, len(nl)) + changed := make([]*structs.NotificationThread, 0, len(nl)) for _, n := range nl { - notif, err := models.SetNotificationStatus(n.ID, ctx.User, targetStatus) + notif, err := models.SetNotificationStatus(n.ID, ctx.Doer, targetStatus) if err != nil { ctx.InternalServerError(err) return diff --git a/routers/api/v1/notify/threads.go b/routers/api/v1/notify/threads.go index 5bfdd4d963..4effd6b3e0 100644 --- a/routers/api/v1/notify/threads.go +++ b/routers/api/v1/notify/threads.go @@ -9,6 +9,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" ) @@ -87,7 +88,7 @@ func ReadThread(ctx *context.APIContext) { targetStatus = models.NotificationStatusRead } - notif, err := models.SetNotificationStatus(n.ID, ctx.User, targetStatus) + notif, err := models.SetNotificationStatus(n.ID, ctx.Doer, targetStatus) if err != nil { ctx.InternalServerError(err) return @@ -102,14 +103,14 @@ func ReadThread(ctx *context.APIContext) { func getThread(ctx *context.APIContext) *models.Notification { n, err := models.GetNotificationByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrNotExist(err) { + if db.IsErrNotExist(err) { ctx.Error(http.StatusNotFound, "GetNotificationByID", err) } else { ctx.InternalServerError(err) } return nil } - if n.UserID != ctx.User.ID && !ctx.User.IsAdmin { + if n.UserID != ctx.Doer.ID && !ctx.Doer.IsAdmin { ctx.Error(http.StatusForbidden, "GetNotificationByID", fmt.Errorf("only user itself and admin are allowed to read/change this thread %d", n.ID)) return nil } diff --git a/routers/api/v1/notify/user.go b/routers/api/v1/notify/user.go index 6e4c19d1bf..ac3d0591d0 100644 --- a/routers/api/v1/notify/user.go +++ b/routers/api/v1/notify/user.go @@ -141,7 +141,7 @@ func ReadNotifications(ctx *context.APIContext) { } } opts := &models.FindNotificationOptions{ - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, UpdatedBeforeUnix: lastRead, } if !ctx.FormBool("all") { @@ -162,7 +162,7 @@ func ReadNotifications(ctx *context.APIContext) { changed := make([]*structs.NotificationThread, 0, len(nl)) for _, n := range nl { - notif, err := models.SetNotificationStatus(n.ID, ctx.User, targetStatus) + notif, err := models.SetNotificationStatus(n.ID, ctx.Doer, targetStatus) if err != nil { ctx.InternalServerError(err) return diff --git a/routers/api/v1/org/label.go b/routers/api/v1/org/label.go index 7d4c12d364..d36b1d9a98 100644 --- a/routers/api/v1/org/label.go +++ b/routers/api/v1/org/label.go @@ -99,7 +99,7 @@ func CreateLabel(ctx *context.APIContext) { OrgID: ctx.Org.Organization.ID, Description: form.Description, } - if err := models.NewLabel(label); err != nil { + if err := models.NewLabel(ctx, label); err != nil { ctx.Error(http.StatusInternalServerError, "NewLabel", err) return } diff --git a/routers/api/v1/org/member.go b/routers/api/v1/org/member.go index d818321790..85fe2ded4d 100644 --- a/routers/api/v1/org/member.go +++ b/routers/api/v1/org/member.go @@ -9,6 +9,7 @@ import ( "net/url" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/setting" @@ -19,19 +20,19 @@ import ( // listMembers list an organization's members func listMembers(ctx *context.APIContext, publicOnly bool) { - opts := &models.FindOrgMembersOpts{ + opts := &organization.FindOrgMembersOpts{ OrgID: ctx.Org.Organization.ID, PublicOnly: publicOnly, ListOptions: utils.GetListOptions(ctx), } - count, err := models.CountOrgMembers(opts) + count, err := organization.CountOrgMembers(opts) if err != nil { ctx.InternalServerError(err) return } - members, _, err := models.FindOrgMembers(opts) + members, _, err := organization.FindOrgMembers(opts) if err != nil { ctx.InternalServerError(err) return @@ -39,7 +40,7 @@ func listMembers(ctx *context.APIContext, publicOnly bool) { apiMembers := make([]*api.User, len(members)) for i, member := range members { - apiMembers[i] = convert.ToUser(member, ctx.User) + apiMembers[i] = convert.ToUser(member, ctx.Doer) } ctx.SetTotalCountHeader(count) @@ -72,13 +73,13 @@ func ListMembers(ctx *context.APIContext) { // "$ref": "#/responses/UserList" publicOnly := true - if ctx.User != nil { - isMember, err := ctx.Org.Organization.IsOrgMember(ctx.User.ID) + if ctx.Doer != nil { + isMember, err := ctx.Org.Organization.IsOrgMember(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrgMember", err) return } - publicOnly = !isMember && !ctx.User.IsAdmin + publicOnly = !isMember && !ctx.Doer.IsAdmin } listMembers(ctx, publicOnly) } @@ -130,7 +131,7 @@ func IsMember(ctx *context.APIContext) { // responses: // "204": // description: user is a member - // "302": + // "303": // description: redirection to /orgs/{org}/public_members/{username} // "404": // description: user is not a member @@ -139,12 +140,12 @@ func IsMember(ctx *context.APIContext) { if ctx.Written() { return } - if ctx.User != nil { - userIsMember, err := ctx.Org.Organization.IsOrgMember(ctx.User.ID) + if ctx.Doer != nil { + userIsMember, err := ctx.Org.Organization.IsOrgMember(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrgMember", err) return - } else if userIsMember || ctx.User.IsAdmin { + } else if userIsMember || ctx.Doer.IsAdmin { userToCheckIsMember, err := ctx.Org.Organization.IsOrgMember(userToCheck.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrgMember", err) @@ -154,14 +155,14 @@ func IsMember(ctx *context.APIContext) { ctx.NotFound() } return - } else if ctx.User.ID == userToCheck.ID { + } else if ctx.Doer.ID == userToCheck.ID { ctx.NotFound() return } } redirectURL := setting.AppSubURL + "/api/v1/orgs/" + url.PathEscape(ctx.Org.Organization.Name) + "/public_members/" + url.PathEscape(userToCheck.Name) - ctx.Redirect(redirectURL, 302) + ctx.Redirect(redirectURL) } // IsPublicMember check if a user is a public member of an organization @@ -190,7 +191,7 @@ func IsPublicMember(ctx *context.APIContext) { if ctx.Written() { return } - is, err := models.IsPublicMembership(ctx.Org.Organization.ID, userToCheck.ID) + is, err := organization.IsPublicMembership(ctx.Org.Organization.ID, userToCheck.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsPublicMembership", err) return @@ -230,11 +231,11 @@ func PublicizeMember(ctx *context.APIContext) { if ctx.Written() { return } - if userToPublicize.ID != ctx.User.ID { + if userToPublicize.ID != ctx.Doer.ID { ctx.Error(http.StatusForbidden, "", "Cannot publicize another member") return } - err := models.ChangeOrgUserStatus(ctx.Org.Organization.ID, userToPublicize.ID, true) + err := organization.ChangeOrgUserStatus(ctx.Org.Organization.ID, userToPublicize.ID, true) if err != nil { ctx.Error(http.StatusInternalServerError, "ChangeOrgUserStatus", err) return @@ -270,11 +271,11 @@ func ConcealMember(ctx *context.APIContext) { if ctx.Written() { return } - if userToConceal.ID != ctx.User.ID { + if userToConceal.ID != ctx.Doer.ID { ctx.Error(http.StatusForbidden, "", "Cannot conceal another member") return } - err := models.ChangeOrgUserStatus(ctx.Org.Organization.ID, userToConceal.ID, false) + err := organization.ChangeOrgUserStatus(ctx.Org.Organization.ID, userToConceal.ID, false) if err != nil { ctx.Error(http.StatusInternalServerError, "ChangeOrgUserStatus", err) return @@ -308,8 +309,8 @@ func DeleteMember(ctx *context.APIContext) { if ctx.Written() { return } - if err := ctx.Org.Organization.RemoveMember(member.ID); err != nil { - ctx.Error(http.StatusInternalServerError, "RemoveMember", err) + if err := models.RemoveOrgUser(ctx.Org.Organization.ID, member.ID); err != nil { + ctx.Error(http.StatusInternalServerError, "RemoveOrgUser", err) } ctx.Status(http.StatusNoContent) } diff --git a/routers/api/v1/org/org.go b/routers/api/v1/org/org.go index 1f097225f2..d55a4a4514 100644 --- a/routers/api/v1/org/org.go +++ b/routers/api/v1/org/org.go @@ -8,8 +8,8 @@ package org import ( "net/http" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" @@ -23,19 +23,19 @@ import ( func listUserOrgs(ctx *context.APIContext, u *user_model.User) { listOptions := utils.GetListOptions(ctx) - showPrivate := ctx.IsSigned && (ctx.User.IsAdmin || ctx.User.ID == u.ID) + showPrivate := ctx.IsSigned && (ctx.Doer.IsAdmin || ctx.Doer.ID == u.ID) - opts := models.FindOrgOptions{ + opts := organization.FindOrgOptions{ ListOptions: listOptions, UserID: u.ID, IncludePrivate: showPrivate, } - orgs, err := models.FindOrgs(opts) + orgs, err := organization.FindOrgs(opts) if err != nil { ctx.Error(http.StatusInternalServerError, "FindOrgs", err) return } - maxResults, err := models.CountOrgs(opts) + maxResults, err := organization.CountOrgs(opts) if err != nil { ctx.Error(http.StatusInternalServerError, "CountOrgs", err) return @@ -71,7 +71,7 @@ func ListMyOrgs(ctx *context.APIContext) { // "200": // "$ref": "#/responses/OrganizationList" - listUserOrgs(ctx, ctx.User) + listUserOrgs(ctx, ctx.Doer) } // ListUserOrgs list user's orgs @@ -99,11 +99,7 @@ func ListUserOrgs(ctx *context.APIContext) { // "200": // "$ref": "#/responses/OrganizationList" - u := user.GetUserByParams(ctx) - if ctx.Written() { - return - } - listUserOrgs(ctx, u) + listUserOrgs(ctx, ctx.ContextUser) } // GetUserOrgsPermissions get user permissions in organization @@ -132,11 +128,6 @@ func GetUserOrgsPermissions(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - var u *user_model.User - if u = user.GetUserByParams(ctx); u == nil { - return - } - var o *user_model.User if o = user.GetUserByParamsName(ctx, ":org"); o == nil { return @@ -144,13 +135,13 @@ func GetUserOrgsPermissions(ctx *context.APIContext) { op := api.OrganizationPermissions{} - if !models.HasOrgOrUserVisible(o, u) { + if !organization.HasOrgOrUserVisible(ctx, o, ctx.ContextUser) { ctx.NotFound("HasOrgOrUserVisible", nil) return } - org := models.OrgFromUser(o) - authorizeLevel, err := org.GetOrgUserMaxAuthorizeLevel(u.ID) + org := organization.OrgFromUser(o) + authorizeLevel, err := org.GetOrgUserMaxAuthorizeLevel(ctx.ContextUser.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "GetOrgUserAuthorizeLevel", err) return @@ -169,7 +160,7 @@ func GetUserOrgsPermissions(ctx *context.APIContext) { op.IsOwner = true } - op.CanCreateRepository, err = org.CanCreateOrgRepo(u.ID) + op.CanCreateRepository, err = org.CanCreateOrgRepo(ctx.ContextUser.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "CanCreateOrgRepo", err) return @@ -201,7 +192,7 @@ func GetAll(ctx *context.APIContext) { vMode := []api.VisibleType{api.VisibleTypePublic} if ctx.IsSigned { vMode = append(vMode, api.VisibleTypeLimited) - if ctx.User.IsAdmin { + if ctx.Doer.IsAdmin { vMode = append(vMode, api.VisibleTypePrivate) } } @@ -209,7 +200,7 @@ func GetAll(ctx *context.APIContext) { listOptions := utils.GetListOptions(ctx) publicOrgs, maxResults, err := user_model.SearchUsers(&user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, ListOptions: listOptions, Type: user_model.UserTypeOrganization, OrderBy: db.SearchOrderByAlphabetically, @@ -221,7 +212,7 @@ func GetAll(ctx *context.APIContext) { } orgs := make([]*api.Organization, len(publicOrgs)) for i := range publicOrgs { - orgs[i] = convert.ToOrganization(models.OrgFromUser(publicOrgs[i])) + orgs[i] = convert.ToOrganization(organization.OrgFromUser(publicOrgs[i])) } ctx.SetLinkHeader(int(maxResults), listOptions.PageSize) @@ -251,7 +242,7 @@ func Create(ctx *context.APIContext) { // "422": // "$ref": "#/responses/validationError" form := web.GetForm(ctx).(*api.CreateOrgOption) - if !ctx.User.CanCreateOrganization() { + if !ctx.Doer.CanCreateOrganization() { ctx.Error(http.StatusForbidden, "Create organization not allowed", nil) return } @@ -261,7 +252,7 @@ func Create(ctx *context.APIContext) { visibility = api.VisibilityModes[form.Visibility] } - org := &models.Organization{ + org := &organization.Organization{ Name: form.UserName, FullName: form.FullName, Description: form.Description, @@ -272,7 +263,7 @@ func Create(ctx *context.APIContext) { Visibility: visibility, RepoAdminChangeTeamAccess: form.RepoAdminChangeTeamAccess, } - if err := models.CreateOrganization(org, ctx.User); err != nil { + if err := organization.CreateOrganization(org, ctx.Doer); err != nil { if user_model.IsErrUserAlreadyExist(err) || db.IsErrNameReserved(err) || db.IsErrNameCharsNotAllowed(err) || @@ -304,7 +295,7 @@ func Get(ctx *context.APIContext) { // "200": // "$ref": "#/responses/Organization" - if !models.HasOrgOrUserVisible(ctx.Org.Organization.AsUser(), ctx.User) { + if !organization.HasOrgOrUserVisible(ctx, ctx.Org.Organization.AsUser(), ctx.Doer) { ctx.NotFound("HasOrgOrUserVisible", nil) return } @@ -346,7 +337,7 @@ func Edit(ctx *context.APIContext) { if form.RepoAdminChangeTeamAccess != nil { org.RepoAdminChangeTeamAccess = *form.RepoAdminChangeTeamAccess } - if err := user_model.UpdateUserCols(db.DefaultContext, org.AsUser(), + if err := user_model.UpdateUserCols(ctx, org.AsUser(), "full_name", "description", "website", "location", "visibility", "repo_admin_change_team_access", ); err != nil { diff --git a/routers/api/v1/org/team.go b/routers/api/v1/org/team.go index 62e6c0a6b4..b24c8a6235 100644 --- a/routers/api/v1/org/team.go +++ b/routers/api/v1/org/team.go @@ -10,6 +10,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" unit_model "code.gitea.io/gitea/models/unit" @@ -47,7 +48,7 @@ func ListTeams(ctx *context.APIContext) { // "200": // "$ref": "#/responses/TeamList" - teams, count, err := models.SearchOrgTeams(&models.SearchOrgTeamOptions{ + teams, count, err := organization.SearchTeam(&organization.SearchTeamOptions{ ListOptions: utils.GetListOptions(ctx), OrgID: ctx.Org.Organization.ID, }) @@ -90,9 +91,9 @@ func ListUserTeams(ctx *context.APIContext) { // "200": // "$ref": "#/responses/TeamList" - teams, count, err := models.GetUserTeams(&models.GetUserTeamOptions{ + teams, count, err := organization.SearchTeam(&organization.SearchTeamOptions{ ListOptions: utils.GetListOptions(ctx), - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, }) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserTeams", err) @@ -104,7 +105,7 @@ func ListUserTeams(ctx *context.APIContext) { for i := range teams { apiOrg, ok := cache[teams[i].OrgID] if !ok { - org, err := models.GetOrgByID(teams[i].OrgID) + org, err := organization.GetOrgByID(teams[i].OrgID) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserByID", err) return @@ -150,11 +151,11 @@ func GetTeam(ctx *context.APIContext) { ctx.JSON(http.StatusOK, convert.ToTeam(ctx.Org.Team)) } -func attachTeamUnits(team *models.Team, units []string) { +func attachTeamUnits(team *organization.Team, units []string) { unitTypes := unit_model.FindUnitTypes(units...) - team.Units = make([]*models.TeamUnit, 0, len(units)) + team.Units = make([]*organization.TeamUnit, 0, len(units)) for _, tp := range unitTypes { - team.Units = append(team.Units, &models.TeamUnit{ + team.Units = append(team.Units, &organization.TeamUnit{ OrgID: team.OrgID, Type: tp, AccessMode: team.AccessMode, @@ -170,10 +171,10 @@ func convertUnitsMap(unitsMap map[string]string) map[unit_model.Type]perm.Access return res } -func attachTeamUnitsMap(team *models.Team, unitsMap map[string]string) { - team.Units = make([]*models.TeamUnit, 0, len(unitsMap)) +func attachTeamUnitsMap(team *organization.Team, unitsMap map[string]string) { + team.Units = make([]*organization.TeamUnit, 0, len(unitsMap)) for unitKey, p := range unitsMap { - team.Units = append(team.Units, &models.TeamUnit{ + team.Units = append(team.Units, &organization.TeamUnit{ OrgID: team.OrgID, Type: unit_model.TypeFromKey(unitKey), AccessMode: perm.ParseAccessMode(p), @@ -210,7 +211,7 @@ func CreateTeam(ctx *context.APIContext) { if p < perm.AccessModeAdmin && len(form.UnitsMap) > 0 { p = unit_model.MinUnitAccessMode(convertUnitsMap(form.UnitsMap)) } - team := &models.Team{ + team := &organization.Team{ OrgID: ctx.Org.Organization.ID, Name: form.Name, Description: form.Description, @@ -231,7 +232,7 @@ func CreateTeam(ctx *context.APIContext) { } if err := models.NewTeam(team); err != nil { - if models.IsErrTeamAlreadyExist(err) { + if organization.IsErrTeamAlreadyExist(err) { ctx.Error(http.StatusUnprocessableEntity, "", err) } else { ctx.Error(http.StatusInternalServerError, "NewTeam", err) @@ -368,24 +369,27 @@ func GetTeamMembers(ctx *context.APIContext) { // "200": // "$ref": "#/responses/UserList" - isMember, err := models.IsOrganizationMember(ctx.Org.Team.OrgID, ctx.User.ID) + isMember, err := organization.IsOrganizationMember(ctx, ctx.Org.Team.OrgID, ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrganizationMember", err) return - } else if !isMember && !ctx.User.IsAdmin { + } else if !isMember && !ctx.Doer.IsAdmin { ctx.NotFound() return } - if err := ctx.Org.Team.GetMembers(&models.SearchMembersOptions{ + teamMembers, err := organization.GetTeamMembers(ctx, &organization.SearchMembersOptions{ ListOptions: utils.GetListOptions(ctx), - }); err != nil { + TeamID: ctx.Org.Team.ID, + }) + if err != nil { ctx.Error(http.StatusInternalServerError, "GetTeamMembers", err) return } - members := make([]*api.User, len(ctx.Org.Team.Members)) - for i, member := range ctx.Org.Team.Members { - members[i] = convert.ToUser(member, ctx.User) + + members := make([]*api.User, len(teamMembers)) + for i, member := range teamMembers { + members[i] = convert.ToUser(member, ctx.Doer) } ctx.SetTotalCountHeader(int64(ctx.Org.Team.NumMembers)) @@ -422,7 +426,7 @@ func GetTeamMember(ctx *context.APIContext) { return } teamID := ctx.ParamsInt64("teamid") - isTeamMember, err := models.IsUserInTeams(u.ID, []int64{teamID}) + isTeamMember, err := organization.IsUserInTeams(ctx, u.ID, []int64{teamID}) if err != nil { ctx.Error(http.StatusInternalServerError, "IsUserInTeams", err) return @@ -430,7 +434,7 @@ func GetTeamMember(ctx *context.APIContext) { ctx.NotFound() return } - ctx.JSON(http.StatusOK, convert.ToUser(u, ctx.User)) + ctx.JSON(http.StatusOK, convert.ToUser(u, ctx.Doer)) } // AddTeamMember api for add a member to a team @@ -462,7 +466,7 @@ func AddTeamMember(ctx *context.APIContext) { if ctx.Written() { return } - if err := ctx.Org.Team.AddMember(u.ID); err != nil { + if err := models.AddTeamMember(ctx.Org.Team, u.ID); err != nil { ctx.Error(http.StatusInternalServerError, "AddMember", err) return } @@ -499,8 +503,8 @@ func RemoveTeamMember(ctx *context.APIContext) { return } - if err := ctx.Org.Team.RemoveMember(u.ID); err != nil { - ctx.Error(http.StatusInternalServerError, "RemoveMember", err) + if err := models.RemoveTeamMember(ctx.Org.Team, u.ID); err != nil { + ctx.Error(http.StatusInternalServerError, "RemoveTeamMember", err) return } ctx.Status(http.StatusNoContent) @@ -533,14 +537,17 @@ func GetTeamRepos(ctx *context.APIContext) { // "$ref": "#/responses/RepositoryList" team := ctx.Org.Team - if err := team.GetRepositories(&models.SearchOrgTeamOptions{ + teamRepos, err := organization.GetTeamRepositories(ctx, &organization.SearchTeamRepoOptions{ ListOptions: utils.GetListOptions(ctx), - }); err != nil { + TeamID: team.ID, + }) + if err != nil { ctx.Error(http.StatusInternalServerError, "GetTeamRepos", err) + return } - repos := make([]*api.Repository, len(team.Repos)) - for i, repo := range team.Repos { - access, err := models.AccessLevel(ctx.User, repo) + repos := make([]*api.Repository, len(teamRepos)) + for i, repo := range teamRepos { + access, err := models.AccessLevel(ctx.Doer, repo) if err != nil { ctx.Error(http.StatusInternalServerError, "GetTeamRepos", err) return @@ -551,6 +558,55 @@ func GetTeamRepos(ctx *context.APIContext) { ctx.JSON(http.StatusOK, repos) } +// GetTeamRepo api for get a particular repo of team +func GetTeamRepo(ctx *context.APIContext) { + // swagger:operation GET /teams/{id}/repos/{org}/{repo} organization orgListTeamRepo + // --- + // summary: List a particular repo of team + // produces: + // - application/json + // parameters: + // - name: id + // in: path + // description: id of the team + // type: integer + // format: int64 + // required: true + // - name: org + // in: path + // description: organization that owns the repo to list + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo to list + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/Repository" + // "404": + // "$ref": "#/responses/notFound" + + repo := getRepositoryByParams(ctx) + if ctx.Written() { + return + } + + if !organization.HasTeamRepo(ctx, ctx.Org.Team.OrgID, ctx.Org.Team.ID, repo.ID) { + ctx.NotFound() + return + } + + access, err := models.AccessLevel(ctx.Doer, repo) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetTeamRepos", err) + return + } + + ctx.JSON(http.StatusOK, convert.ToRepo(repo, access)) +} + // getRepositoryByParams get repository by a team's organization ID and repo name func getRepositoryByParams(ctx *context.APIContext) *repo_model.Repository { repo, err := repo_model.GetRepositoryByName(ctx.Org.Team.OrgID, ctx.Params(":reponame")) @@ -599,14 +655,14 @@ func AddTeamRepository(ctx *context.APIContext) { if ctx.Written() { return } - if access, err := models.AccessLevel(ctx.User, repo); err != nil { + if access, err := models.AccessLevel(ctx.Doer, repo); err != nil { ctx.Error(http.StatusInternalServerError, "AccessLevel", err) return } else if access < perm.AccessModeAdmin { ctx.Error(http.StatusForbidden, "", "Must have admin-level access to the repository") return } - if err := ctx.Org.Team.AddRepository(repo); err != nil { + if err := models.AddRepository(ctx.Org.Team, repo); err != nil { ctx.Error(http.StatusInternalServerError, "AddRepository", err) return } @@ -649,14 +705,14 @@ func RemoveTeamRepository(ctx *context.APIContext) { if ctx.Written() { return } - if access, err := models.AccessLevel(ctx.User, repo); err != nil { + if access, err := models.AccessLevel(ctx.Doer, repo); err != nil { ctx.Error(http.StatusInternalServerError, "AccessLevel", err) return } else if access < perm.AccessModeAdmin { ctx.Error(http.StatusForbidden, "", "Must have admin-level access to the repository") return } - if err := ctx.Org.Team.RemoveRepository(repo.ID); err != nil { + if err := models.RemoveRepository(ctx.Org.Team, repo.ID); err != nil { ctx.Error(http.StatusInternalServerError, "RemoveRepository", err) return } @@ -707,14 +763,15 @@ func SearchTeam(ctx *context.APIContext) { listOptions := utils.GetListOptions(ctx) - opts := &models.SearchOrgTeamOptions{ + opts := &organization.SearchTeamOptions{ + UserID: ctx.Doer.ID, Keyword: ctx.FormTrim("q"), OrgID: ctx.Org.Organization.ID, IncludeDesc: ctx.FormString("include_desc") == "" || ctx.FormBool("include_desc"), ListOptions: listOptions, } - teams, maxResults, err := models.SearchOrgTeams(opts) + teams, maxResults, err := organization.SearchTeam(opts) if err != nil { log.Error("SearchTeam failed: %v", err) ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ diff --git a/routers/api/v1/packages/package.go b/routers/api/v1/packages/package.go new file mode 100644 index 0000000000..038924737a --- /dev/null +++ b/routers/api/v1/packages/package.go @@ -0,0 +1,212 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "net/http" + + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" + api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/routers/api/v1/utils" + packages_service "code.gitea.io/gitea/services/packages" +) + +// ListPackages gets all packages of an owner +func ListPackages(ctx *context.APIContext) { + // swagger:operation GET /packages/{owner} package listPackages + // --- + // summary: Gets all packages of an owner + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the packages + // type: string + // required: true + // - name: page + // in: query + // description: page number of results to return (1-based) + // type: integer + // - name: limit + // in: query + // description: page size of results + // type: integer + // - name: type + // in: query + // description: package type filter + // type: string + // enum: [composer, conan, container, generic, helm, maven, npm, nuget, pypi, rubygems] + // - name: q + // in: query + // description: name filter + // type: string + // responses: + // "200": + // "$ref": "#/responses/PackageList" + + listOptions := utils.GetListOptions(ctx) + + packageType := ctx.FormTrim("type") + query := ctx.FormTrim("q") + + pvs, count, err := packages.SearchVersions(ctx, &packages.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages.Type(packageType), + Name: packages.SearchValue{Value: query}, + Paginator: &listOptions, + }) + if err != nil { + ctx.Error(http.StatusInternalServerError, "SearchVersions", err) + return + } + + pds, err := packages.GetPackageDescriptors(ctx, pvs) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetPackageDescriptors", err) + return + } + + apiPackages := make([]*api.Package, 0, len(pds)) + for _, pd := range pds { + apiPackage, err := convert.ToPackage(ctx, pd, ctx.Doer) + if err != nil { + ctx.Error(http.StatusInternalServerError, "Error converting package for api", err) + return + } + apiPackages = append(apiPackages, apiPackage) + } + + ctx.SetLinkHeader(int(count), listOptions.PageSize) + ctx.SetTotalCountHeader(count) + ctx.JSON(http.StatusOK, apiPackages) +} + +// GetPackage gets a package +func GetPackage(ctx *context.APIContext) { + // swagger:operation GET /packages/{owner}/{type}/{name}/{version} package getPackage + // --- + // summary: Gets a package + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the package + // type: string + // required: true + // - name: type + // in: path + // description: type of the package + // type: string + // required: true + // - name: name + // in: path + // description: name of the package + // type: string + // required: true + // - name: version + // in: path + // description: version of the package + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/Package" + // "404": + // "$ref": "#/responses/notFound" + + apiPackage, err := convert.ToPackage(ctx, ctx.Package.Descriptor, ctx.Doer) + if err != nil { + ctx.Error(http.StatusInternalServerError, "Error converting package for api", err) + return + } + + ctx.JSON(http.StatusOK, apiPackage) +} + +// DeletePackage deletes a package +func DeletePackage(ctx *context.APIContext) { + // swagger:operation DELETE /packages/{owner}/{type}/{name}/{version} package deletePackage + // --- + // summary: Delete a package + // parameters: + // - name: owner + // in: path + // description: owner of the package + // type: string + // required: true + // - name: type + // in: path + // description: type of the package + // type: string + // required: true + // - name: name + // in: path + // description: name of the package + // type: string + // required: true + // - name: version + // in: path + // description: version of the package + // type: string + // required: true + // responses: + // "204": + // "$ref": "#/responses/empty" + // "404": + // "$ref": "#/responses/notFound" + + err := packages_service.RemovePackageVersion(ctx.Doer, ctx.Package.Descriptor.Version) + if err != nil { + ctx.Error(http.StatusInternalServerError, "RemovePackageVersion", err) + return + } + ctx.Status(http.StatusNoContent) +} + +// ListPackageFiles gets all files of a package +func ListPackageFiles(ctx *context.APIContext) { + // swagger:operation GET /packages/{owner}/{type}/{name}/{version}/files package listPackageFiles + // --- + // summary: Gets all files of a package + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the package + // type: string + // required: true + // - name: type + // in: path + // description: type of the package + // type: string + // required: true + // - name: name + // in: path + // description: name of the package + // type: string + // required: true + // - name: version + // in: path + // description: version of the package + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/PackageFileList" + // "404": + // "$ref": "#/responses/notFound" + + apiPackageFiles := make([]*api.PackageFile, 0, len(ctx.Package.Descriptor.Files)) + for _, pfd := range ctx.Package.Descriptor.Files { + apiPackageFiles = append(apiPackageFiles, convert.ToPackageFile(pfd)) + } + + ctx.JSON(http.StatusOK, apiPackageFiles) +} diff --git a/routers/api/v1/repo/blob.go b/routers/api/v1/repo/blob.go index 19d893a68b..035f2dc1e1 100644 --- a/routers/api/v1/repo/blob.go +++ b/routers/api/v1/repo/blob.go @@ -45,7 +45,8 @@ func GetBlob(ctx *context.APIContext) { ctx.Error(http.StatusBadRequest, "", "sha not provided") return } - if blob, err := files_service.GetBlobBySHA(ctx, ctx.Repo.Repository, sha); err != nil { + + if blob, err := files_service.GetBlobBySHA(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, sha); err != nil { ctx.Error(http.StatusBadRequest, "", err) } else { ctx.JSON(http.StatusOK, blob) diff --git a/routers/api/v1/repo/branch.go b/routers/api/v1/repo/branch.go index 3a0c3201ac..c030a896a7 100644 --- a/routers/api/v1/repo/branch.go +++ b/routers/api/v1/repo/branch.go @@ -11,6 +11,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" @@ -75,7 +76,7 @@ func GetBranch(ctx *context.APIContext) { return } - br, err := convert.ToBranch(ctx.Repo.Repository, branch, c, branchProtection, ctx.User, ctx.Repo.IsAdmin()) + br, err := convert.ToBranch(ctx.Repo.Repository, branch, c, branchProtection, ctx.Doer, ctx.Repo.IsAdmin()) if err != nil { ctx.Error(http.StatusInternalServerError, "convert.ToBranch", err) return @@ -117,7 +118,7 @@ func DeleteBranch(ctx *context.APIContext) { branchName := ctx.Params("*") - if err := repo_service.DeleteBranch(ctx.User, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil { + if err := repo_service.DeleteBranch(ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil { switch { case git.IsErrBranchNotExist(err): ctx.NotFound(err) @@ -176,7 +177,7 @@ func CreateBranch(ctx *context.APIContext) { opt.OldBranchName = ctx.Repo.Repository.DefaultBranch } - err := repo_service.CreateNewBranch(ctx, ctx.User, ctx.Repo.Repository, opt.OldBranchName, opt.BranchName) + err := repo_service.CreateNewBranch(ctx, ctx.Doer, ctx.Repo.Repository, opt.OldBranchName, opt.BranchName) if err != nil { if models.IsErrBranchDoesNotExist(err) { ctx.Error(http.StatusNotFound, "", "The old branch does not exist") @@ -211,7 +212,7 @@ func CreateBranch(ctx *context.APIContext) { return } - br, err := convert.ToBranch(ctx.Repo.Repository, branch, commit, branchProtection, ctx.User, ctx.Repo.IsAdmin()) + br, err := convert.ToBranch(ctx.Repo.Repository, branch, commit, branchProtection, ctx.Doer, ctx.Repo.IsAdmin()) if err != nil { ctx.Error(http.StatusInternalServerError, "convert.ToBranch", err) return @@ -258,10 +259,15 @@ func ListBranches(ctx *context.APIContext) { return } - apiBranches := make([]*api.Branch, len(branches)) + apiBranches := make([]*api.Branch, 0, len(branches)) for i := range branches { c, err := branches[i].GetCommit() if err != nil { + // Skip if this branch doesn't exist anymore. + if git.IsErrNotExist(err) { + totalNumOfBranches-- + continue + } ctx.Error(http.StatusInternalServerError, "GetCommit", err) return } @@ -270,11 +276,12 @@ func ListBranches(ctx *context.APIContext) { ctx.Error(http.StatusInternalServerError, "GetBranchProtection", err) return } - apiBranches[i], err = convert.ToBranch(ctx.Repo.Repository, branches[i], c, branchProtection, ctx.User, ctx.Repo.IsAdmin()) + apiBranch, err := convert.ToBranch(ctx.Repo.Repository, branches[i], c, branchProtection, ctx.Doer, ctx.Repo.IsAdmin()) if err != nil { ctx.Error(http.StatusInternalServerError, "convert.ToBranch", err) return } + apiBranches = append(apiBranches, apiBranch) } ctx.SetLinkHeader(totalNumOfBranches, listOptions.PageSize) @@ -448,27 +455,27 @@ func CreateBranchProtection(ctx *context.APIContext) { } var whitelistTeams, mergeWhitelistTeams, approvalsWhitelistTeams []int64 if repo.Owner.IsOrganization() { - whitelistTeams, err = models.GetTeamIDsByNames(repo.OwnerID, form.PushWhitelistTeams, false) + whitelistTeams, err = organization.GetTeamIDsByNames(repo.OwnerID, form.PushWhitelistTeams, false) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) return } ctx.Error(http.StatusInternalServerError, "GetTeamIDsByNames", err) return } - mergeWhitelistTeams, err = models.GetTeamIDsByNames(repo.OwnerID, form.MergeWhitelistTeams, false) + mergeWhitelistTeams, err = organization.GetTeamIDsByNames(repo.OwnerID, form.MergeWhitelistTeams, false) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) return } ctx.Error(http.StatusInternalServerError, "GetTeamIDsByNames", err) return } - approvalsWhitelistTeams, err = models.GetTeamIDsByNames(repo.OwnerID, form.ApprovalsWhitelistTeams, false) + approvalsWhitelistTeams, err = organization.GetTeamIDsByNames(repo.OwnerID, form.ApprovalsWhitelistTeams, false) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) return } @@ -497,7 +504,7 @@ func CreateBranchProtection(ctx *context.APIContext) { BlockOnOutdatedBranch: form.BlockOnOutdatedBranch, } - err = models.UpdateProtectBranch(ctx.Repo.Repository, protectBranch, models.WhitelistOptions{ + err = models.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, models.WhitelistOptions{ UserIDs: whitelistUsers, TeamIDs: whitelistTeams, MergeUserIDs: mergeWhitelistUsers, @@ -692,9 +699,9 @@ func EditBranchProtection(ctx *context.APIContext) { var whitelistTeams, mergeWhitelistTeams, approvalsWhitelistTeams []int64 if repo.Owner.IsOrganization() { if form.PushWhitelistTeams != nil { - whitelistTeams, err = models.GetTeamIDsByNames(repo.OwnerID, form.PushWhitelistTeams, false) + whitelistTeams, err = organization.GetTeamIDsByNames(repo.OwnerID, form.PushWhitelistTeams, false) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) return } @@ -705,9 +712,9 @@ func EditBranchProtection(ctx *context.APIContext) { whitelistTeams = protectBranch.WhitelistTeamIDs } if form.MergeWhitelistTeams != nil { - mergeWhitelistTeams, err = models.GetTeamIDsByNames(repo.OwnerID, form.MergeWhitelistTeams, false) + mergeWhitelistTeams, err = organization.GetTeamIDsByNames(repo.OwnerID, form.MergeWhitelistTeams, false) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) return } @@ -718,9 +725,9 @@ func EditBranchProtection(ctx *context.APIContext) { mergeWhitelistTeams = protectBranch.MergeWhitelistTeamIDs } if form.ApprovalsWhitelistTeams != nil { - approvalsWhitelistTeams, err = models.GetTeamIDsByNames(repo.OwnerID, form.ApprovalsWhitelistTeams, false) + approvalsWhitelistTeams, err = organization.GetTeamIDsByNames(repo.OwnerID, form.ApprovalsWhitelistTeams, false) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "Team does not exist", err) return } @@ -732,7 +739,7 @@ func EditBranchProtection(ctx *context.APIContext) { } } - err = models.UpdateProtectBranch(ctx.Repo.Repository, protectBranch, models.WhitelistOptions{ + err = models.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, models.WhitelistOptions{ UserIDs: whitelistUsers, TeamIDs: whitelistTeams, MergeUserIDs: mergeWhitelistUsers, diff --git a/routers/api/v1/repo/collaborators.go b/routers/api/v1/repo/collaborators.go index d49b6357bd..2db1724b2a 100644 --- a/routers/api/v1/repo/collaborators.go +++ b/routers/api/v1/repo/collaborators.go @@ -63,7 +63,7 @@ func ListCollaborators(ctx *context.APIContext) { users := make([]*api.User, len(collaborators)) for i, collaborator := range collaborators { - users[i] = convert.ToUser(collaborator.User, ctx.User) + users[i] = convert.ToUser(collaborator.User, ctx.Doer) } ctx.SetTotalCountHeader(count) @@ -233,6 +233,61 @@ func DeleteCollaborator(ctx *context.APIContext) { ctx.Status(http.StatusNoContent) } +// GetRepoPermissions gets repository permissions for a user +func GetRepoPermissions(ctx *context.APIContext) { + // swagger:operation GET /repos/{owner}/{repo}/collaborators/{collaborator}/permission repository repoGetRepoPermissions + // --- + // summary: Get repository permissions for a user + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: collaborator + // in: path + // description: username of the collaborator + // type: string + // required: true + // responses: + // "200": + // "$ref": "#/responses/RepoCollaboratorPermission" + // "404": + // "$ref": "#/responses/notFound" + // "403": + // "$ref": "#/responses/forbidden" + + if !ctx.Doer.IsAdmin && ctx.Doer.LoginName != ctx.Params(":collaborator") && !ctx.IsUserRepoAdmin() { + ctx.Error(http.StatusForbidden, "User", "Only admins can query all permissions, repo admins can query all repo permissions, collaborators can query only their own") + return + } + + collaborator, err := user_model.GetUserByName(ctx.Params(":collaborator")) + if err != nil { + if user_model.IsErrUserNotExist(err) { + ctx.Error(http.StatusNotFound, "GetUserByName", err) + } else { + ctx.Error(http.StatusInternalServerError, "GetUserByName", err) + } + return + } + + permission, err := models.GetUserRepoPermission(ctx, ctx.Repo.Repository, collaborator) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) + return + } + + ctx.JSON(http.StatusOK, convert.ToUserAndPermission(collaborator, ctx.ContextUser, permission.AccessMode)) +} + // GetReviewers return all users that can be requested to review in this repo func GetReviewers(ctx *context.APIContext) { // swagger:operation GET /repos/{owner}/{repo}/reviewers repository repoGetReviewers @@ -255,12 +310,12 @@ func GetReviewers(ctx *context.APIContext) { // "200": // "$ref": "#/responses/UserList" - reviewers, err := models.GetReviewers(ctx.Repo.Repository, ctx.User.ID, 0) + reviewers, err := models.GetReviewers(ctx.Repo.Repository, ctx.Doer.ID, 0) if err != nil { ctx.Error(http.StatusInternalServerError, "ListCollaborators", err) return } - ctx.JSON(http.StatusOK, convert.ToUsers(ctx.User, reviewers)) + ctx.JSON(http.StatusOK, convert.ToUsers(ctx.Doer, reviewers)) } // GetAssignees return all users that have write access and can be assigned to issues @@ -290,5 +345,5 @@ func GetAssignees(ctx *context.APIContext) { ctx.Error(http.StatusInternalServerError, "ListCollaborators", err) return } - ctx.JSON(http.StatusOK, convert.ToUsers(ctx.User, assignees)) + ctx.JSON(http.StatusOK, convert.ToUsers(ctx.Doer, assignees)) } diff --git a/routers/api/v1/repo/commits.go b/routers/api/v1/repo/commits.go index b6c47e0685..b196ce9774 100644 --- a/routers/api/v1/repo/commits.go +++ b/routers/api/v1/repo/commits.go @@ -11,7 +11,6 @@ import ( "net/http" "strconv" - repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" @@ -268,16 +267,12 @@ func DownloadCommitDiffOrPatch(ctx *context.APIContext) { // "$ref": "#/responses/string" // "404": // "$ref": "#/responses/notFound" - repoPath := repo_model.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name) - if err := git.GetRawDiff( - ctx, - repoPath, - ctx.Params(":sha"), - git.RawDiffType(ctx.Params(":diffType")), - ctx.Resp, - ); err != nil { + sha := ctx.Params(":sha") + diffType := git.RawDiffType(ctx.Params(":diffType")) + + if err := git.GetRawDiff(ctx.Repo.GitRepo, sha, diffType, ctx.Resp); err != nil { if git.IsErrNotExist(err) { - ctx.NotFound(ctx.Params(":sha")) + ctx.NotFound(sha) return } ctx.Error(http.StatusInternalServerError, "DownloadCommitDiffOrPatch", err) diff --git a/routers/api/v1/repo/file.go b/routers/api/v1/repo/file.go index a27e383bc3..2a4c4ad979 100644 --- a/routers/api/v1/repo/file.go +++ b/routers/api/v1/repo/file.go @@ -9,13 +9,16 @@ import ( "encoding/base64" "fmt" "net/http" + "path" "time" "code.gitea.io/gitea/models" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" + "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/common" @@ -53,7 +56,7 @@ func GetRawFile(ctx *context.APIContext) { // required: false // responses: // 200: - // description: success + // description: Returns raw file content. // "404": // "$ref": "#/responses/notFound" @@ -62,33 +65,50 @@ func GetRawFile(ctx *context.APIContext) { return } - commit := ctx.Repo.Commit - - if ref := ctx.FormTrim("ref"); len(ref) > 0 { - var err error - commit, err = ctx.Repo.GitRepo.GetCommit(ref) - if err != nil { - if git.IsErrNotExist(err) { - ctx.NotFound() - } else { - ctx.Error(http.StatusInternalServerError, "GetBlobByPath", err) - } - return - } + blob, lastModified := getBlobForEntry(ctx) + if ctx.Written() { + return } - blob, err := commit.GetBlobByPath(ctx.Repo.TreePath) + if err := common.ServeBlob(ctx.Context, blob, lastModified); err != nil { + ctx.Error(http.StatusInternalServerError, "ServeBlob", err) + } +} + +func getBlobForEntry(ctx *context.APIContext) (blob *git.Blob, lastModified time.Time) { + entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath) if err != nil { if git.IsErrNotExist(err) { ctx.NotFound() } else { - ctx.Error(http.StatusInternalServerError, "GetBlobByPath", err) + ctx.Error(http.StatusInternalServerError, "GetTreeEntryByPath", err) } return } - if err = common.ServeBlob(ctx.Context, blob); err != nil { - ctx.Error(http.StatusInternalServerError, "ServeBlob", err) + + if entry.IsDir() || entry.IsSubModule() { + ctx.NotFound("getBlobForEntry", nil) + return } + + var c *git.LastCommitCache + if setting.CacheService.LastCommit.Enabled && ctx.Repo.CommitsCount >= setting.CacheService.LastCommit.CommitsCount { + c = git.NewLastCommitCache(ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, setting.LastCommitCacheTTLSeconds, cache.GetCache()) + } + + info, _, err := git.Entries([]*git.TreeEntry{entry}).GetCommitsInfo(ctx, ctx.Repo.Commit, path.Dir("/" + ctx.Repo.TreePath)[1:], c) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetCommitsInfo", err) + return + } + + if len(info) == 1 { + // Not Modified + lastModified = info[0].Commit.Committer.When + } + blob = entry.Blob() + + return } // GetArchive get archive of a repository @@ -122,7 +142,7 @@ func GetArchive(ctx *context.APIContext) { repoPath := repo_model.RepoPath(ctx.Params(":username"), ctx.Params(":reponame")) if ctx.Repo.GitRepo == nil { - gitRepo, err := git.OpenRepositoryCtx(ctx, repoPath) + gitRepo, err := git.OpenRepository(ctx, repoPath) if err != nil { ctx.Error(http.StatusInternalServerError, "OpenRepository", err) return @@ -157,13 +177,18 @@ func GetEditorconfig(ctx *context.APIContext) { // description: filepath of file to get // type: string // required: true + // - name: ref + // in: query + // description: "The name of the commit/branch/tag. Default the repository’s default branch (usually master)" + // type: string + // required: false // responses: // 200: // description: success // "404": // "$ref": "#/responses/notFound" - ec, err := ctx.Repo.GetEditorconfig() + ec, err := ctx.Repo.GetEditorconfig(ctx.Repo.Commit) if err != nil { if git.IsErrNotExist(err) { ctx.NotFound(err) @@ -183,8 +208,10 @@ func GetEditorconfig(ctx *context.APIContext) { } // canWriteFiles returns true if repository is editable and user has proper access level. -func canWriteFiles(r *context.Repository) bool { - return r.Permission.CanWrite(unit.TypeCode) && !r.Repository.IsMirror && !r.Repository.IsArchived +func canWriteFiles(ctx *context.APIContext, branch string) bool { + return ctx.Repo.Permission.CanWriteToBranch(ctx.Doer, branch) && + !ctx.Repo.Repository.IsMirror && + !ctx.Repo.Repository.IsArchived } // canReadFiles returns true if repository is readable and user has proper access level. @@ -233,9 +260,6 @@ func CreateFile(ctx *context.APIContext) { // "$ref": "#/responses/error" apiOpts := web.GetForm(ctx).(*api.CreateFileOptions) - if ctx.Repo.Repository.IsEmpty { - ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", fmt.Errorf("repo is empty")) - } if apiOpts.BranchName == "" { apiOpts.BranchName = ctx.Repo.Repository.DefaultBranch @@ -389,9 +413,9 @@ func handleCreateOrUpdateFileError(ctx *context.APIContext, err error) { // Called from both CreateFile or UpdateFile to handle both func createOrUpdateFile(ctx *context.APIContext, opts *files_service.UpdateRepoFileOptions) (*api.FileResponse, error) { - if !canWriteFiles(ctx.Repo) { + if !canWriteFiles(ctx, opts.OldBranch) { return nil, models.ErrUserDoesNotHaveAccessToRepo{ - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, RepoName: ctx.Repo.Repository.LowerName, } } @@ -402,7 +426,7 @@ func createOrUpdateFile(ctx *context.APIContext, opts *files_service.UpdateRepoF } opts.Content = string(content) - return files_service.CreateOrUpdateRepoFile(ctx, ctx.Repo.Repository, ctx.User, opts) + return files_service.CreateOrUpdateRepoFile(ctx, ctx.Repo.Repository, ctx.Doer, opts) } // DeleteFile Delete a file in a repository @@ -446,9 +470,9 @@ func DeleteFile(ctx *context.APIContext) { // "$ref": "#/responses/error" apiOpts := web.GetForm(ctx).(*api.DeleteFileOptions) - if !canWriteFiles(ctx.Repo) { + if !canWriteFiles(ctx, apiOpts.BranchName) { ctx.Error(http.StatusForbidden, "DeleteFile", models.ErrUserDoesNotHaveAccessToRepo{ - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, RepoName: ctx.Repo.Repository.LowerName, }) return @@ -489,7 +513,7 @@ func DeleteFile(ctx *context.APIContext) { opts.Message = ctx.Tr("repo.editor.delete", opts.TreePath) } - if fileResponse, err := files_service.DeleteRepoFile(ctx, ctx.Repo.Repository, ctx.User, opts); err != nil { + if fileResponse, err := files_service.DeleteRepoFile(ctx, ctx.Repo.Repository, ctx.Doer, opts); err != nil { if git.IsErrBranchNotExist(err) || models.IsErrRepoFileDoesNotExist(err) || git.IsErrNotExist(err) { ctx.Error(http.StatusNotFound, "DeleteFile", err) return @@ -546,7 +570,7 @@ func GetContents(ctx *context.APIContext) { if !canReadFiles(ctx.Repo) { ctx.Error(http.StatusInternalServerError, "GetContentsOrList", models.ErrUserDoesNotHaveAccessToRepo{ - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, RepoName: ctx.Repo.Repository.LowerName, }) return diff --git a/routers/api/v1/repo/fork.go b/routers/api/v1/repo/fork.go index d814ae909e..10c05e5503 100644 --- a/routers/api/v1/repo/fork.go +++ b/routers/api/v1/repo/fork.go @@ -10,6 +10,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -58,7 +59,7 @@ func ListForks(ctx *context.APIContext) { } apiForks := make([]*api.Repository, len(forks)) for i, fork := range forks { - access, err := models.AccessLevel(ctx.User, fork) + access, err := models.AccessLevel(ctx.Doer, fork) if err != nil { ctx.Error(http.StatusInternalServerError, "AccessLevel", err) return @@ -106,18 +107,18 @@ func CreateFork(ctx *context.APIContext) { repo := ctx.Repo.Repository var forker *user_model.User // user/org that will own the fork if form.Organization == nil { - forker = ctx.User + forker = ctx.Doer } else { - org, err := models.GetOrgByName(*form.Organization) + org, err := organization.GetOrgByName(*form.Organization) if err != nil { - if models.IsErrOrgNotExist(err) { + if organization.IsErrOrgNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "", err) } else { ctx.Error(http.StatusInternalServerError, "GetOrgByName", err) } return } - isMember, err := org.IsOrgMember(ctx.User.ID) + isMember, err := org.IsOrgMember(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrgMember", err) return @@ -135,7 +136,7 @@ func CreateFork(ctx *context.APIContext) { name = *form.Name } - fork, err := repo_service.ForkRepository(ctx.User, forker, repo_service.ForkRepoOptions{ + fork, err := repo_service.ForkRepository(ctx, ctx.Doer, forker, repo_service.ForkRepoOptions{ BaseRepo: repo, Name: name, Description: repo.Description, diff --git a/routers/api/v1/repo/hook.go b/routers/api/v1/repo/hook.go index fdcaf5e389..7ec6cd88ab 100644 --- a/routers/api/v1/repo/hook.go +++ b/routers/api/v1/repo/hook.go @@ -138,6 +138,11 @@ func TestHook(ctx *context.APIContext) { // type: integer // format: int64 // required: true + // - name: ref + // in: query + // description: "The name of the commit/branch/tag. Default the repository’s default branch (usually master)" + // type: string + // required: false // responses: // "204": // "$ref": "#/responses/empty" @@ -163,8 +168,8 @@ func TestHook(ctx *context.APIContext) { Commits: []*api.PayloadCommit{commit}, HeadCommit: commit, Repo: convert.ToRepo(ctx.Repo.Repository, perm.AccessModeNone), - Pusher: convert.ToUserWithAccessMode(ctx.User, perm.AccessModeNone), - Sender: convert.ToUserWithAccessMode(ctx.User, perm.AccessModeNone), + Pusher: convert.ToUserWithAccessMode(ctx.Doer, perm.AccessModeNone), + Sender: convert.ToUserWithAccessMode(ctx.Doer, perm.AccessModeNone), }); err != nil { ctx.Error(http.StatusInternalServerError, "PrepareWebhook: ", err) return diff --git a/routers/api/v1/repo/issue.go b/routers/api/v1/repo/issue.go index 9e550c4c47..9654b270c0 100644 --- a/routers/api/v1/repo/issue.go +++ b/routers/api/v1/repo/issue.go @@ -14,6 +14,8 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" @@ -110,7 +112,7 @@ func SearchIssues(ctx *context.APIContext) { // "200": // "$ref": "#/responses/IssueList" - before, since, err := utils.GetQueryBeforeSince(ctx) + before, since, err := context.GetQueryBeforeSince(ctx.Context) if err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return @@ -135,7 +137,7 @@ func SearchIssues(ctx *context.APIContext) { // This needs to be a column that is not nil in fixtures or // MySQL will return different results when sorting by null in some cases OrderBy: db.SearchOrderByAlphabetically, - Actor: ctx.User, + Actor: ctx.Doer, } if ctx.IsSigned { opts.Private = true @@ -161,9 +163,9 @@ func SearchIssues(ctx *context.APIContext) { ctx.Error(http.StatusBadRequest, "", "Owner organisation is required for filtering on team") return } - team, err := models.GetTeam(opts.OwnerID, ctx.FormString("team")) + team, err := organization.GetTeam(opts.OwnerID, ctx.FormString("team")) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusBadRequest, "Team not found", err) } else { ctx.Error(http.StatusInternalServerError, "GetUserByName", err) @@ -173,6 +175,7 @@ func SearchIssues(ctx *context.APIContext) { opts.TeamID = team.ID } + repoCond := models.SearchRepositoryCondition(opts) repoIDs, _, err := models.SearchRepositoryIDs(opts) if err != nil { ctx.Error(http.StatusInternalServerError, "SearchRepositoryByName", err) @@ -233,7 +236,7 @@ func SearchIssues(ctx *context.APIContext) { Page: ctx.FormInt("page"), PageSize: limit, }, - RepoIDs: repoIDs, + RepoCond: repoCond, IsClosed: isClosed, IssueIDs: issueIDs, IncludedLabelNames: includedLabelNames, @@ -245,18 +248,23 @@ func SearchIssues(ctx *context.APIContext) { UpdatedAfterUnix: since, } + ctxUserID := int64(0) + if ctx.IsSigned { + ctxUserID = ctx.Doer.ID + } + // Filter for: Created by User, Assigned to User, Mentioning User, Review of User Requested if ctx.FormBool("created") { - issuesOpt.PosterID = ctx.User.ID + issuesOpt.PosterID = ctxUserID } if ctx.FormBool("assigned") { - issuesOpt.AssigneeID = ctx.User.ID + issuesOpt.AssigneeID = ctxUserID } if ctx.FormBool("mentioned") { - issuesOpt.MentionedID = ctx.User.ID + issuesOpt.MentionedID = ctxUserID } if ctx.FormBool("review_requested") { - issuesOpt.ReviewRequestedID = ctx.User.ID + issuesOpt.ReviewRequestedID = ctxUserID } if issues, err = models.Issues(issuesOpt); err != nil { @@ -353,7 +361,7 @@ func ListIssues(ctx *context.APIContext) { // responses: // "200": // "$ref": "#/responses/IssueList" - before, since, err := utils.GetQueryBeforeSince(ctx) + before, since, err := context.GetQueryBeforeSince(ctx.Context) if err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return @@ -399,12 +407,12 @@ func ListIssues(ctx *context.APIContext) { for i := range part { // uses names and fall back to ids // non existent milestones are discarded - mile, err := models.GetMilestoneByRepoIDANDName(ctx.Repo.Repository.ID, part[i]) + mile, err := issues_model.GetMilestoneByRepoIDANDName(ctx.Repo.Repository.ID, part[i]) if err == nil { mileIDs = append(mileIDs, mile.ID) continue } - if !models.IsErrMilestoneNotExist(err) { + if !issues_model.IsErrMilestoneNotExist(err) { ctx.Error(http.StatusInternalServerError, "GetMilestoneByRepoIDANDName", err) return } @@ -412,12 +420,12 @@ func ListIssues(ctx *context.APIContext) { if err != nil { continue } - mile, err = models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, id) + mile, err = issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, id) if err == nil { mileIDs = append(mileIDs, mile.ID) continue } - if models.IsErrMilestoneNotExist(err) { + if issues_model.IsErrMilestoneNotExist(err) { continue } ctx.Error(http.StatusInternalServerError, "GetMilestoneByRepoID", err) @@ -455,7 +463,7 @@ func ListIssues(ctx *context.APIContext) { if len(keyword) == 0 || len(issueIDs) > 0 || len(labelIDs) > 0 { issuesOpt := &models.IssuesOptions{ ListOptions: listOptions, - RepoIDs: []int64{ctx.Repo.Repository.ID}, + RepoID: ctx.Repo.Repository.ID, IsClosed: isClosed, IssueIDs: issueIDs, LabelIDs: labelIDs, @@ -592,8 +600,8 @@ func CreateIssue(ctx *context.APIContext) { RepoID: ctx.Repo.Repository.ID, Repo: ctx.Repo.Repository, Title: form.Title, - PosterID: ctx.User.ID, - Poster: ctx.User, + PosterID: ctx.Doer.ID, + Poster: ctx.Doer, Content: form.Body, Ref: form.Ref, DeadlineUnix: deadlineUnix, @@ -646,7 +654,7 @@ func CreateIssue(ctx *context.APIContext) { } if form.Closed { - if err := issue_service.ChangeStatus(issue, ctx.User, true); err != nil { + if err := issue_service.ChangeStatus(issue, ctx.Doer, true); err != nil { if models.IsErrDependenciesLeft(err) { ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this issue because it still has open dependencies") return @@ -724,7 +732,7 @@ func EditIssue(ctx *context.APIContext) { return } - if !issue.IsPoster(ctx.User.ID) && !canWrite { + if !issue.IsPoster(ctx.Doer.ID) && !canWrite { ctx.Status(http.StatusForbidden) return } @@ -737,7 +745,7 @@ func EditIssue(ctx *context.APIContext) { issue.Content = *form.Body } if form.Ref != nil { - err = issue_service.ChangeIssueRef(issue, ctx.User, *form.Ref) + err = issue_service.ChangeIssueRef(issue, ctx.Doer, *form.Ref) if err != nil { ctx.Error(http.StatusInternalServerError, "UpdateRef", err) return @@ -754,7 +762,7 @@ func EditIssue(ctx *context.APIContext) { deadlineUnix = timeutil.TimeStamp(deadline.Unix()) } - if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.User); err != nil { + if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.Doer); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err) return } @@ -775,7 +783,7 @@ func EditIssue(ctx *context.APIContext) { oneAssignee = *form.Assignee } - err = issue_service.UpdateAssignees(issue, oneAssignee, form.Assignees, ctx.User) + err = issue_service.UpdateAssignees(issue, oneAssignee, form.Assignees, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "UpdateAssignees", err) return @@ -786,7 +794,7 @@ func EditIssue(ctx *context.APIContext) { issue.MilestoneID != *form.Milestone { oldMilestoneID := issue.MilestoneID issue.MilestoneID = *form.Milestone - if err = issue_service.ChangeMilestoneAssign(issue, ctx.User, oldMilestoneID); err != nil { + if err = issue_service.ChangeMilestoneAssign(issue, ctx.Doer, oldMilestoneID); err != nil { ctx.Error(http.StatusInternalServerError, "ChangeMilestoneAssign", err) return } @@ -803,7 +811,7 @@ func EditIssue(ctx *context.APIContext) { } issue.IsClosed = api.StateClosed == api.StateType(*form.State) } - statusChangeComment, titleChanged, err := models.UpdateIssueByAPI(issue, ctx.User) + statusChangeComment, titleChanged, err := models.UpdateIssueByAPI(issue, ctx.Doer) if err != nil { if models.IsErrDependenciesLeft(err) { ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this issue because it still has open dependencies") @@ -814,11 +822,11 @@ func EditIssue(ctx *context.APIContext) { } if titleChanged { - notification.NotifyIssueChangeTitle(ctx.User, issue, oldTitle) + notification.NotifyIssueChangeTitle(ctx.Doer, issue, oldTitle) } if statusChangeComment != nil { - notification.NotifyIssueChangeStatus(ctx.User, issue, statusChangeComment, issue.IsClosed) + notification.NotifyIssueChangeStatus(ctx.Doer, issue, statusChangeComment, issue.IsClosed) } // Refetch from database to assign some automatic values @@ -872,7 +880,7 @@ func DeleteIssue(ctx *context.APIContext) { return } - if err = issue_service.DeleteIssue(ctx.User, ctx.Repo.GitRepo, issue); err != nil { + if err = issue_service.DeleteIssue(ctx.Doer, ctx.Repo.GitRepo, issue); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteIssueByID", err) return } @@ -941,7 +949,7 @@ func UpdateIssueDeadline(ctx *context.APIContext) { deadlineUnix = timeutil.TimeStamp(deadline.Unix()) } - if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.User); err != nil { + if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.Doer); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err) return } diff --git a/routers/api/v1/repo/issue_comment.go b/routers/api/v1/repo/issue_comment.go index f90028a0ab..bc68cb396b 100644 --- a/routers/api/v1/repo/issue_comment.go +++ b/routers/api/v1/repo/issue_comment.go @@ -6,6 +6,7 @@ package repo import ( + stdCtx "context" "errors" "net/http" @@ -58,7 +59,7 @@ func ListIssueComments(ctx *context.APIContext) { // "200": // "$ref": "#/responses/CommentList" - before, since, err := utils.GetQueryBeforeSince(ctx) + before, since, err := context.GetQueryBeforeSince(ctx.Context) if err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return @@ -150,7 +151,7 @@ func ListIssueCommentsAndTimeline(ctx *context.APIContext) { // "200": // "$ref": "#/responses/TimelineList" - before, since, err := utils.GetQueryBeforeSince(ctx) + before, since, err := context.GetQueryBeforeSince(ctx.Context) if err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return @@ -183,9 +184,9 @@ func ListIssueCommentsAndTimeline(ctx *context.APIContext) { var apiComments []*api.TimelineComment for _, comment := range comments { - if comment.Type != models.CommentTypeCode && isXRefCommentAccessible(ctx.User, comment, issue.RepoID) { + if comment.Type != models.CommentTypeCode && isXRefCommentAccessible(ctx, ctx.Doer, comment, issue.RepoID) { comment.Issue = issue - apiComments = append(apiComments, convert.ToTimelineComment(comment, ctx.User)) + apiComments = append(apiComments, convert.ToTimelineComment(comment, ctx.Doer)) } } @@ -193,16 +194,16 @@ func ListIssueCommentsAndTimeline(ctx *context.APIContext) { ctx.JSON(http.StatusOK, &apiComments) } -func isXRefCommentAccessible(user *user_model.User, c *models.Comment, issueRepoID int64) bool { +func isXRefCommentAccessible(ctx stdCtx.Context, user *user_model.User, c *models.Comment, issueRepoID int64) bool { // Remove comments that the user has no permissions to see if models.CommentTypeIsRef(c.Type) && c.RefRepoID != issueRepoID && c.RefRepoID != 0 { var err error // Set RefRepo for description in template - c.RefRepo, err = repo_model.GetRepositoryByID(c.RefRepoID) + c.RefRepo, err = repo_model.GetRepositoryByIDCtx(ctx, c.RefRepoID) if err != nil { return false } - perm, err := models.GetUserRepoPermission(c.RefRepo, user) + perm, err := models.GetUserRepoPermission(ctx, c.RefRepo, user) if err != nil { return false } @@ -253,7 +254,7 @@ func ListRepoIssueComments(ctx *context.APIContext) { // "200": // "$ref": "#/responses/CommentList" - before, since, err := utils.GetQueryBeforeSince(ctx) + before, since, err := context.GetQueryBeforeSince(ctx.Context) if err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return @@ -347,12 +348,12 @@ func CreateIssueComment(ctx *context.APIContext) { return } - if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.User.IsAdmin { + if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin { ctx.Error(http.StatusForbidden, "CreateIssueComment", errors.New(ctx.Tr("repo.issues.comment_on_locked"))) return } - comment, err := comment_service.CreateIssueComment(ctx.User, ctx.Repo.Repository, issue, form.Body, nil) + comment, err := comment_service.CreateIssueComment(ctx.Doer, ctx.Repo.Repository, issue, form.Body, nil) if err != nil { ctx.Error(http.StatusInternalServerError, "CreateIssueComment", err) return @@ -534,7 +535,7 @@ func editIssueComment(ctx *context.APIContext, form api.EditIssueCommentOption) return } - if !ctx.IsSigned || (ctx.User.ID != comment.PosterID && !ctx.Repo.IsAdmin()) { + if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.IsAdmin()) { ctx.Status(http.StatusForbidden) return } @@ -546,7 +547,7 @@ func editIssueComment(ctx *context.APIContext, form api.EditIssueCommentOption) oldContent := comment.Content comment.Content = form.Body - if err := comment_service.UpdateComment(comment, ctx.User, oldContent); err != nil { + if err := comment_service.UpdateComment(comment, ctx.Doer, oldContent); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateComment", err) return } @@ -637,7 +638,7 @@ func deleteIssueComment(ctx *context.APIContext) { return } - if !ctx.IsSigned || (ctx.User.ID != comment.PosterID && !ctx.Repo.IsAdmin()) { + if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.IsAdmin()) { ctx.Status(http.StatusForbidden) return } else if comment.Type != models.CommentTypeComment { @@ -645,7 +646,7 @@ func deleteIssueComment(ctx *context.APIContext) { return } - if err = comment_service.DeleteComment(ctx.User, comment); err != nil { + if err = comment_service.DeleteComment(ctx.Doer, comment); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteCommentByID", err) return } diff --git a/routers/api/v1/repo/issue_label.go b/routers/api/v1/repo/issue_label.go index 0469ae247c..e314e756dd 100644 --- a/routers/api/v1/repo/issue_label.go +++ b/routers/api/v1/repo/issue_label.go @@ -106,7 +106,7 @@ func AddIssueLabels(ctx *context.APIContext) { return } - if err = issue_service.AddLabels(issue, ctx.User, labels); err != nil { + if err = issue_service.AddLabels(issue, ctx.Doer, labels); err != nil { ctx.Error(http.StatusInternalServerError, "AddLabels", err) return } @@ -183,7 +183,7 @@ func DeleteIssueLabel(ctx *context.APIContext) { return } - if err := issue_service.RemoveLabel(issue, ctx.User, label); err != nil { + if err := issue_service.RemoveLabel(issue, ctx.Doer, label); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteIssueLabel", err) return } @@ -232,7 +232,7 @@ func ReplaceIssueLabels(ctx *context.APIContext) { return } - if err := issue_service.ReplaceLabels(issue, ctx.User, labels); err != nil { + if err := issue_service.ReplaceLabels(issue, ctx.Doer, labels); err != nil { ctx.Error(http.StatusInternalServerError, "ReplaceLabels", err) return } @@ -291,7 +291,7 @@ func ClearIssueLabels(ctx *context.APIContext) { return } - if err := issue_service.ClearLabels(issue, ctx.User); err != nil { + if err := issue_service.ClearLabels(issue, ctx.Doer); err != nil { ctx.Error(http.StatusInternalServerError, "ClearLabels", err) return } diff --git a/routers/api/v1/repo/issue_reaction.go b/routers/api/v1/repo/issue_reaction.go index 9c5086700c..5aa7366796 100644 --- a/routers/api/v1/repo/issue_reaction.go +++ b/routers/api/v1/repo/issue_reaction.go @@ -9,6 +9,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" api "code.gitea.io/gitea/modules/structs" @@ -67,12 +68,12 @@ func GetIssueCommentReactions(ctx *context.APIContext) { return } - reactions, _, err := models.FindCommentReactions(comment) + reactions, _, err := issues_model.FindCommentReactions(comment.IssueID, comment.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "FindCommentReactions", err) return } - _, err = reactions.LoadUsers(ctx.Repo.Repository) + _, err = reactions.LoadUsers(ctx, ctx.Repo.Repository) if err != nil { ctx.Error(http.StatusInternalServerError, "ReactionList.LoadUsers()", err) return @@ -81,7 +82,7 @@ func GetIssueCommentReactions(ctx *context.APIContext) { var result []api.Reaction for _, r := range reactions { result = append(result, api.Reaction{ - User: convert.ToUser(r.User, ctx.User), + User: convert.ToUser(r.User, ctx.Doer), Reaction: r.Type, Created: r.CreatedUnix.AsTime(), }) @@ -197,13 +198,13 @@ func changeIssueCommentReaction(ctx *context.APIContext, form api.EditReactionOp if isCreateType { // PostIssueCommentReaction part - reaction, err := models.CreateCommentReaction(ctx.User, comment.Issue, comment, form.Reaction) + reaction, err := issues_model.CreateCommentReaction(ctx.Doer.ID, comment.Issue.ID, comment.ID, form.Reaction) if err != nil { - if models.IsErrForbiddenIssueReaction(err) { + if issues_model.IsErrForbiddenIssueReaction(err) { ctx.Error(http.StatusForbidden, err.Error(), err) - } else if models.IsErrReactionAlreadyExist(err) { + } else if issues_model.IsErrReactionAlreadyExist(err) { ctx.JSON(http.StatusOK, api.Reaction{ - User: convert.ToUser(ctx.User, ctx.User), + User: convert.ToUser(ctx.Doer, ctx.Doer), Reaction: reaction.Type, Created: reaction.CreatedUnix.AsTime(), }) @@ -214,13 +215,13 @@ func changeIssueCommentReaction(ctx *context.APIContext, form api.EditReactionOp } ctx.JSON(http.StatusCreated, api.Reaction{ - User: convert.ToUser(ctx.User, ctx.User), + User: convert.ToUser(ctx.Doer, ctx.Doer), Reaction: reaction.Type, Created: reaction.CreatedUnix.AsTime(), }) } else { // DeleteIssueCommentReaction part - err = models.DeleteCommentReaction(ctx.User, comment.Issue, comment, form.Reaction) + err = issues_model.DeleteCommentReaction(ctx.Doer.ID, comment.Issue.ID, comment.ID, form.Reaction) if err != nil { ctx.Error(http.StatusInternalServerError, "DeleteCommentReaction", err) return @@ -285,12 +286,12 @@ func GetIssueReactions(ctx *context.APIContext) { return } - reactions, count, err := models.FindIssueReactions(issue, utils.GetListOptions(ctx)) + reactions, count, err := issues_model.FindIssueReactions(issue.ID, utils.GetListOptions(ctx)) if err != nil { ctx.Error(http.StatusInternalServerError, "FindIssueReactions", err) return } - _, err = reactions.LoadUsers(ctx.Repo.Repository) + _, err = reactions.LoadUsers(ctx, ctx.Repo.Repository) if err != nil { ctx.Error(http.StatusInternalServerError, "ReactionList.LoadUsers()", err) return @@ -299,7 +300,7 @@ func GetIssueReactions(ctx *context.APIContext) { var result []api.Reaction for _, r := range reactions { result = append(result, api.Reaction{ - User: convert.ToUser(r.User, ctx.User), + User: convert.ToUser(r.User, ctx.Doer), Reaction: r.Type, Created: r.CreatedUnix.AsTime(), }) @@ -407,13 +408,13 @@ func changeIssueReaction(ctx *context.APIContext, form api.EditReactionOption, i if isCreateType { // PostIssueReaction part - reaction, err := models.CreateIssueReaction(ctx.User, issue, form.Reaction) + reaction, err := issues_model.CreateIssueReaction(ctx.Doer.ID, issue.ID, form.Reaction) if err != nil { - if models.IsErrForbiddenIssueReaction(err) { + if issues_model.IsErrForbiddenIssueReaction(err) { ctx.Error(http.StatusForbidden, err.Error(), err) - } else if models.IsErrReactionAlreadyExist(err) { + } else if issues_model.IsErrReactionAlreadyExist(err) { ctx.JSON(http.StatusOK, api.Reaction{ - User: convert.ToUser(ctx.User, ctx.User), + User: convert.ToUser(ctx.Doer, ctx.Doer), Reaction: reaction.Type, Created: reaction.CreatedUnix.AsTime(), }) @@ -424,13 +425,13 @@ func changeIssueReaction(ctx *context.APIContext, form api.EditReactionOption, i } ctx.JSON(http.StatusCreated, api.Reaction{ - User: convert.ToUser(ctx.User, ctx.User), + User: convert.ToUser(ctx.Doer, ctx.Doer), Reaction: reaction.Type, Created: reaction.CreatedUnix.AsTime(), }) } else { // DeleteIssueReaction part - err = models.DeleteIssueReaction(ctx.User, issue, form.Reaction) + err = issues_model.DeleteIssueReaction(ctx.Doer.ID, issue.ID, form.Reaction) if err != nil { ctx.Error(http.StatusInternalServerError, "DeleteIssueReaction", err) return diff --git a/routers/api/v1/repo/issue_stopwatch.go b/routers/api/v1/repo/issue_stopwatch.go index ce80182511..382f294346 100644 --- a/routers/api/v1/repo/issue_stopwatch.go +++ b/routers/api/v1/repo/issue_stopwatch.go @@ -9,7 +9,6 @@ import ( "net/http" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/routers/api/v1/utils" @@ -56,7 +55,7 @@ func StartIssueStopwatch(ctx *context.APIContext) { return } - if err := models.CreateIssueStopwatch(db.DefaultContext, ctx.User, issue); err != nil { + if err := models.CreateIssueStopwatch(ctx, ctx.Doer, issue); err != nil { ctx.Error(http.StatusInternalServerError, "CreateOrStopIssueStopwatch", err) return } @@ -105,7 +104,7 @@ func StopIssueStopwatch(ctx *context.APIContext) { return } - if err := models.FinishIssueStopwatch(db.DefaultContext, ctx.User, issue); err != nil { + if err := models.FinishIssueStopwatch(ctx, ctx.Doer, issue); err != nil { ctx.Error(http.StatusInternalServerError, "CreateOrStopIssueStopwatch", err) return } @@ -154,7 +153,7 @@ func DeleteIssueStopwatch(ctx *context.APIContext) { return } - if err := models.CancelStopwatch(ctx.User, issue); err != nil { + if err := models.CancelStopwatch(ctx.Doer, issue); err != nil { ctx.Error(http.StatusInternalServerError, "CancelStopwatch", err) return } @@ -179,12 +178,12 @@ func prepareIssueStopwatch(ctx *context.APIContext, shouldExist bool) (*models.I return nil, errors.New("Unable to write to PRs") } - if !ctx.Repo.CanUseTimetracker(issue, ctx.User) { + if !ctx.Repo.CanUseTimetracker(issue, ctx.Doer) { ctx.Status(http.StatusForbidden) return nil, errors.New("Cannot use time tracker") } - if models.StopwatchExists(ctx.User.ID, issue.ID) != shouldExist { + if models.StopwatchExists(ctx.Doer.ID, issue.ID) != shouldExist { if shouldExist { ctx.Error(http.StatusConflict, "StopwatchExists", "cannot stop/cancel a non existent stopwatch") err = errors.New("cannot stop/cancel a non existent stopwatch") @@ -220,13 +219,13 @@ func GetStopwatches(ctx *context.APIContext) { // "200": // "$ref": "#/responses/StopWatchList" - sws, err := models.GetUserStopwatches(ctx.User.ID, utils.GetListOptions(ctx)) + sws, err := models.GetUserStopwatches(ctx.Doer.ID, utils.GetListOptions(ctx)) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserStopwatches", err) return } - count, err := models.CountUserStopwatches(ctx.User.ID) + count, err := models.CountUserStopwatches(ctx.Doer.ID) if err != nil { ctx.InternalServerError(err) return diff --git a/routers/api/v1/repo/issue_subscription.go b/routers/api/v1/repo/issue_subscription.go index 76c668697e..f00c85b126 100644 --- a/routers/api/v1/repo/issue_subscription.go +++ b/routers/api/v1/repo/issue_subscription.go @@ -128,8 +128,8 @@ func setIssueSubscription(ctx *context.APIContext, watch bool) { } // only admin and user for itself can change subscription - if user.ID != ctx.User.ID && !ctx.User.IsAdmin { - ctx.Error(http.StatusForbidden, "User", fmt.Errorf("%s is not permitted to change subscriptions for %s", ctx.User.Name, user.Name)) + if user.ID != ctx.Doer.ID && !ctx.Doer.IsAdmin { + ctx.Error(http.StatusForbidden, "User", fmt.Errorf("%s is not permitted to change subscriptions for %s", ctx.Doer.Name, user.Name)) return } @@ -197,7 +197,7 @@ func CheckIssueSubscription(ctx *context.APIContext) { return } - watching, err := models.CheckIssueWatch(ctx.User, issue) + watching, err := models.CheckIssueWatch(ctx.Doer, issue) if err != nil { ctx.InternalServerError(err) return @@ -281,7 +281,7 @@ func GetIssueSubscribers(ctx *context.APIContext) { } apiUsers := make([]*api.User, 0, len(users)) for _, v := range users { - apiUsers = append(apiUsers, convert.ToUser(v, ctx.User)) + apiUsers = append(apiUsers, convert.ToUser(v, ctx.Doer)) } count, err := models.CountIssueWatchers(issue.ID) diff --git a/routers/api/v1/repo/issue_tracked_time.go b/routers/api/v1/repo/issue_tracked_time.go index 79ba59996c..8ccad87838 100644 --- a/routers/api/v1/repo/issue_tracked_time.go +++ b/routers/api/v1/repo/issue_tracked_time.go @@ -10,6 +10,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" @@ -103,18 +104,18 @@ func ListTrackedTimes(ctx *context.APIContext) { opts.UserID = user.ID } - if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil { + if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = context.GetQueryBeforeSince(ctx.Context); err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return } - cantSetUser := !ctx.User.IsAdmin && - opts.UserID != ctx.User.ID && + cantSetUser := !ctx.Doer.IsAdmin && + opts.UserID != ctx.Doer.ID && !ctx.IsUserRepoWriter([]unit.Type{unit.TypeIssues}) if cantSetUser { if opts.UserID == 0 { - opts.UserID = ctx.User.ID + opts.UserID = ctx.Doer.ID } else { ctx.Error(http.StatusForbidden, "", fmt.Errorf("query by user not allowed; not enough rights")) return @@ -189,7 +190,7 @@ func AddTime(ctx *context.APIContext) { return } - if !ctx.Repo.CanUseTimetracker(issue, ctx.User) { + if !ctx.Repo.CanUseTimetracker(issue, ctx.Doer) { if !ctx.Repo.Repository.IsTimetrackerEnabled() { ctx.Error(http.StatusBadRequest, "", "time tracking disabled") return @@ -198,9 +199,9 @@ func AddTime(ctx *context.APIContext) { return } - user := ctx.User + user := ctx.Doer if form.User != "" { - if (ctx.IsUserRepoAdmin() && ctx.User.Name != form.User) || ctx.User.IsAdmin { + if (ctx.IsUserRepoAdmin() && ctx.Doer.Name != form.User) || ctx.Doer.IsAdmin { // allow only RepoAdmin, Admin and User to add time user, err = user_model.GetUserByName(form.User) if err != nil { @@ -270,7 +271,7 @@ func ResetIssueTime(ctx *context.APIContext) { return } - if !ctx.Repo.CanUseTimetracker(issue, ctx.User) { + if !ctx.Repo.CanUseTimetracker(issue, ctx.Doer) { if !ctx.Repo.Repository.IsTimetrackerEnabled() { ctx.JSON(http.StatusBadRequest, struct{ Message string }{Message: "time tracking disabled"}) return @@ -279,16 +280,16 @@ func ResetIssueTime(ctx *context.APIContext) { return } - err = models.DeleteIssueUserTimes(issue, ctx.User) + err = models.DeleteIssueUserTimes(issue, ctx.Doer) if err != nil { - if models.IsErrNotExist(err) { + if db.IsErrNotExist(err) { ctx.Error(http.StatusNotFound, "DeleteIssueUserTimes", err) } else { ctx.Error(http.StatusInternalServerError, "DeleteIssueUserTimes", err) } return } - ctx.Status(204) + ctx.Status(http.StatusNoContent) } // DeleteTime delete a specific time by id @@ -341,7 +342,7 @@ func DeleteTime(ctx *context.APIContext) { return } - if !ctx.Repo.CanUseTimetracker(issue, ctx.User) { + if !ctx.Repo.CanUseTimetracker(issue, ctx.Doer) { if !ctx.Repo.Repository.IsTimetrackerEnabled() { ctx.JSON(http.StatusBadRequest, struct{ Message string }{Message: "time tracking disabled"}) return @@ -352,7 +353,7 @@ func DeleteTime(ctx *context.APIContext) { time, err := models.GetTrackedTimeByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrNotExist(err) { + if db.IsErrNotExist(err) { ctx.NotFound(err) return } @@ -364,7 +365,7 @@ func DeleteTime(ctx *context.APIContext) { return } - if !ctx.User.IsAdmin && time.UserID != ctx.User.ID { + if !ctx.Doer.IsAdmin && time.UserID != ctx.Doer.ID { // Only Admin and User itself can delete their time ctx.Status(http.StatusForbidden) return @@ -428,7 +429,7 @@ func ListTrackedTimesByUser(ctx *context.APIContext) { return } - if !ctx.IsUserRepoAdmin() && !ctx.User.IsAdmin && ctx.User.ID != user.ID { + if !ctx.IsUserRepoAdmin() && !ctx.Doer.IsAdmin && ctx.Doer.ID != user.ID { ctx.Error(http.StatusForbidden, "", fmt.Errorf("query by user not allowed; not enough rights")) return } @@ -522,18 +523,18 @@ func ListTrackedTimesByRepository(ctx *context.APIContext) { } var err error - if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil { + if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = context.GetQueryBeforeSince(ctx.Context); err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return } - cantSetUser := !ctx.User.IsAdmin && - opts.UserID != ctx.User.ID && + cantSetUser := !ctx.Doer.IsAdmin && + opts.UserID != ctx.Doer.ID && !ctx.IsUserRepoWriter([]unit.Type{unit.TypeIssues}) if cantSetUser { if opts.UserID == 0 { - opts.UserID = ctx.User.ID + opts.UserID = ctx.Doer.ID } else { ctx.Error(http.StatusForbidden, "", fmt.Errorf("query by user not allowed; not enough rights")) return @@ -593,11 +594,11 @@ func ListMyTrackedTimes(ctx *context.APIContext) { opts := &models.FindTrackedTimesOptions{ ListOptions: utils.GetListOptions(ctx), - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, } var err error - if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = utils.GetQueryBeforeSince(ctx); err != nil { + if opts.CreatedBeforeUnix, opts.CreatedAfterUnix, err = context.GetQueryBeforeSince(ctx.Context); err != nil { ctx.Error(http.StatusUnprocessableEntity, "GetQueryBeforeSince", err) return } diff --git a/routers/api/v1/repo/key.go b/routers/api/v1/repo/key.go index 669cc7c51c..0c780eb97d 100644 --- a/routers/api/v1/repo/key.go +++ b/routers/api/v1/repo/key.go @@ -87,7 +87,7 @@ func ListDeployKeys(ctx *context.APIContext) { Fingerprint: ctx.FormString("fingerprint"), } - keys, err := asymkey_model.ListDeployKeys(db.DefaultContext, opts) + keys, err := asymkey_model.ListDeployKeys(ctx, opts) if err != nil { ctx.InternalServerError(err) return @@ -107,7 +107,7 @@ func ListDeployKeys(ctx *context.APIContext) { return } apiKeys[i] = convert.ToDeployKey(apiLink, keys[i]) - if ctx.User.IsAdmin || ((ctx.Repo.Repository.ID == keys[i].RepoID) && (ctx.User.ID == ctx.Repo.Owner.ID)) { + if ctx.Doer.IsAdmin || ((ctx.Repo.Repository.ID == keys[i].RepoID) && (ctx.Doer.ID == ctx.Repo.Owner.ID)) { apiKeys[i], _ = appendPrivateInformation(apiKeys[i], keys[i], ctx.Repo.Repository) } } @@ -144,7 +144,7 @@ func GetDeployKey(ctx *context.APIContext) { // "200": // "$ref": "#/responses/DeployKey" - key, err := asymkey_model.GetDeployKeyByID(db.DefaultContext, ctx.ParamsInt64(":id")) + key, err := asymkey_model.GetDeployKeyByID(ctx, ctx.ParamsInt64(":id")) if err != nil { if asymkey_model.IsErrDeployKeyNotExist(err) { ctx.NotFound() @@ -161,7 +161,7 @@ func GetDeployKey(ctx *context.APIContext) { apiLink := composeDeployKeysAPILink(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name) apiKey := convert.ToDeployKey(apiLink, key) - if ctx.User.IsAdmin || ((ctx.Repo.Repository.ID == key.RepoID) && (ctx.User.ID == ctx.Repo.Owner.ID)) { + if ctx.Doer.IsAdmin || ((ctx.Repo.Repository.ID == key.RepoID) && (ctx.Doer.ID == ctx.Repo.Owner.ID)) { apiKey, _ = appendPrivateInformation(apiKey, key, ctx.Repo.Repository) } ctx.JSON(http.StatusOK, apiKey) @@ -270,7 +270,7 @@ func DeleteDeploykey(ctx *context.APIContext) { // "403": // "$ref": "#/responses/forbidden" - if err := asymkey_service.DeleteDeployKey(ctx.User, ctx.ParamsInt64(":id")); err != nil { + if err := asymkey_service.DeleteDeployKey(ctx.Doer, ctx.ParamsInt64(":id")); err != nil { if asymkey_model.IsErrKeyAccessDenied(err) { ctx.Error(http.StatusForbidden, "", "You do not have access to this key") } else { diff --git a/routers/api/v1/repo/label.go b/routers/api/v1/repo/label.go index 67682fc60d..ab559a2eed 100644 --- a/routers/api/v1/repo/label.go +++ b/routers/api/v1/repo/label.go @@ -161,7 +161,7 @@ func CreateLabel(ctx *context.APIContext) { RepoID: ctx.Repo.Repository.ID, Description: form.Description, } - if err := models.NewLabel(label); err != nil { + if err := models.NewLabel(ctx, label); err != nil { ctx.Error(http.StatusInternalServerError, "NewLabel", err) return } diff --git a/routers/api/v1/repo/language.go b/routers/api/v1/repo/language.go index 427a8fd6b5..f47b0a0e78 100644 --- a/routers/api/v1/repo/language.go +++ b/routers/api/v1/repo/language.go @@ -76,9 +76,7 @@ func GetLanguages(ctx *context.APIContext) { } resp := make(languageResponse, len(langs)) - for i, v := range langs { - resp[i] = v - } + copy(resp, langs) ctx.JSON(http.StatusOK, resp) } diff --git a/routers/api/v1/repo/main_test.go b/routers/api/v1/repo/main_test.go index f9ed886999..1f91a24937 100644 --- a/routers/api/v1/repo/main_test.go +++ b/routers/api/v1/repo/main_test.go @@ -9,8 +9,15 @@ import ( "testing" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/setting" + webhook_service "code.gitea.io/gitea/services/webhook" ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..", "..")) + setting.LoadForTest() + setting.NewQueueService() + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", "..", ".."), + SetUp: webhook_service.Init, + }) } diff --git a/routers/api/v1/repo/migrate.go b/routers/api/v1/repo/migrate.go index 26da835546..f5851bfcae 100644 --- a/routers/api/v1/repo/migrate.go +++ b/routers/api/v1/repo/migrate.go @@ -13,6 +13,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -51,6 +52,8 @@ func Migrate(ctx *context.APIContext) { // "$ref": "#/responses/Repository" // "403": // "$ref": "#/responses/forbidden" + // "409": + // description: The repository with the same name already exists. // "422": // "$ref": "#/responses/validationError" @@ -66,7 +69,7 @@ func Migrate(ctx *context.APIContext) { } else if form.RepoOwnerID != 0 { repoOwner, err = user_model.GetUserByID(form.RepoOwnerID) } else { - repoOwner = ctx.User + repoOwner = ctx.Doer } if err != nil { if user_model.IsErrUserNotExist(err) { @@ -82,15 +85,15 @@ func Migrate(ctx *context.APIContext) { return } - if !ctx.User.IsAdmin { - if !repoOwner.IsOrganization() && ctx.User.ID != repoOwner.ID { + if !ctx.Doer.IsAdmin { + if !repoOwner.IsOrganization() && ctx.Doer.ID != repoOwner.ID { ctx.Error(http.StatusForbidden, "", "Given user is not an organization.") return } if repoOwner.IsOrganization() { // Check ownership of organization. - isOwner, err := models.OrgFromUser(repoOwner).IsOwnedBy(ctx.User.ID) + isOwner, err := organization.OrgFromUser(repoOwner).IsOwnedBy(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOwnedBy", err) return @@ -103,7 +106,7 @@ func Migrate(ctx *context.APIContext) { remoteAddr, err := forms.ParseRemoteAddr(form.CloneAddr, form.AuthUsername, form.AuthPassword) if err == nil { - err = migrations.IsMigrateURLAllowed(remoteAddr, ctx.User) + err = migrations.IsMigrateURLAllowed(remoteAddr, ctx.Doer) } if err != nil { handleRemoteAddrError(ctx, err) @@ -130,7 +133,7 @@ func Migrate(ctx *context.APIContext) { ctx.Error(http.StatusInternalServerError, "", ctx.Tr("repo.migrate.invalid_lfs_endpoint")) return } - err = migrations.IsMigrateURLAllowed(ep.String(), ctx.User) + err = migrations.IsMigrateURLAllowed(ep.String(), ctx.Doer) if err != nil { handleRemoteAddrError(ctx, err) return @@ -167,7 +170,7 @@ func Migrate(ctx *context.APIContext) { opts.Releases = false } - repo, err := repo_module.CreateRepository(ctx.User, repoOwner, models.CreateRepoOptions{ + repo, err := repo_module.CreateRepository(ctx.Doer, repoOwner, models.CreateRepoOptions{ Name: opts.RepoName, Description: opts.Description, OriginalURL: form.CloneAddr, @@ -192,18 +195,18 @@ func Migrate(ctx *context.APIContext) { } if err == nil { - notification.NotifyMigrateRepository(ctx.User, repoOwner, repo) + notification.NotifyMigrateRepository(ctx.Doer, repoOwner, repo) return } if repo != nil { - if errDelete := models.DeleteRepository(ctx.User, repoOwner.ID, repo.ID); errDelete != nil { + if errDelete := models.DeleteRepository(ctx.Doer, repoOwner.ID, repo.ID); errDelete != nil { log.Error("DeleteRepository: %v", errDelete) } } }() - if repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.User, repoOwner.Name, opts, nil); err != nil { + if repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.Doer, repoOwner.Name, opts, nil); err != nil { handleMigrateError(ctx, repoOwner, remoteAddr, err) return } @@ -235,7 +238,7 @@ func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, rem case base.IsErrNotSupported(err): ctx.Error(http.StatusUnprocessableEntity, "", err) default: - err = util.NewStringURLSanitizedError(err, remoteAddr, true) + err = util.SanitizeErrorCredentialURLs(err) if strings.Contains(err.Error(), "Authentication failed") || strings.Contains(err.Error(), "Bad credentials") || strings.Contains(err.Error(), "could not read Username") { diff --git a/routers/api/v1/repo/milestone.go b/routers/api/v1/repo/milestone.go index 3b4b85158c..ce6aa7f46f 100644 --- a/routers/api/v1/repo/milestone.go +++ b/routers/api/v1/repo/milestone.go @@ -10,7 +10,7 @@ import ( "strconv" "time" - "code.gitea.io/gitea/models" + issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" api "code.gitea.io/gitea/modules/structs" @@ -57,7 +57,7 @@ func ListMilestones(ctx *context.APIContext) { // "200": // "$ref": "#/responses/MilestoneList" - milestones, total, err := models.GetMilestones(models.GetMilestonesOption{ + milestones, total, err := issues_model.GetMilestones(issues_model.GetMilestonesOption{ ListOptions: utils.GetListOptions(ctx), RepoID: ctx.Repo.Repository.ID, State: api.StateType(ctx.FormString("state")), @@ -146,7 +146,7 @@ func CreateMilestone(ctx *context.APIContext) { form.Deadline = &defaultDeadline } - milestone := &models.Milestone{ + milestone := &issues_model.Milestone{ RepoID: ctx.Repo.Repository.ID, Name: form.Title, Content: form.Description, @@ -158,7 +158,7 @@ func CreateMilestone(ctx *context.APIContext) { milestone.ClosedDateUnix = timeutil.TimeStampNow() } - if err := models.NewMilestone(milestone); err != nil { + if err := issues_model.NewMilestone(milestone); err != nil { ctx.Error(http.StatusInternalServerError, "NewMilestone", err) return } @@ -218,7 +218,7 @@ func EditMilestone(ctx *context.APIContext) { milestone.IsClosed = *form.State == string(api.StateClosed) } - if err := models.UpdateMilestone(milestone, oldIsClosed); err != nil { + if err := issues_model.UpdateMilestone(milestone, oldIsClosed); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateMilestone", err) return } @@ -255,7 +255,7 @@ func DeleteMilestone(ctx *context.APIContext) { return } - if err := models.DeleteMilestoneByRepoID(ctx.Repo.Repository.ID, m.ID); err != nil { + if err := issues_model.DeleteMilestoneByRepoID(ctx.Repo.Repository.ID, m.ID); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteMilestoneByRepoID", err) return } @@ -263,23 +263,23 @@ func DeleteMilestone(ctx *context.APIContext) { } // getMilestoneByIDOrName get milestone by ID and if not available by name -func getMilestoneByIDOrName(ctx *context.APIContext) *models.Milestone { +func getMilestoneByIDOrName(ctx *context.APIContext) *issues_model.Milestone { mile := ctx.Params(":id") mileID, _ := strconv.ParseInt(mile, 0, 64) if mileID != 0 { - milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, mileID) + milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, mileID) if err == nil { return milestone - } else if !models.IsErrMilestoneNotExist(err) { + } else if !issues_model.IsErrMilestoneNotExist(err) { ctx.Error(http.StatusInternalServerError, "GetMilestoneByRepoID", err) return nil } } - milestone, err := models.GetMilestoneByRepoIDANDName(ctx.Repo.Repository.ID, mile) + milestone, err := issues_model.GetMilestoneByRepoIDANDName(ctx.Repo.Repository.ID, mile) if err != nil { - if models.IsErrMilestoneNotExist(err) { + if issues_model.IsErrMilestoneNotExist(err) { ctx.NotFound() return nil } diff --git a/routers/api/v1/repo/mirror.go b/routers/api/v1/repo/mirror.go index c9ac3e8292..d7facd24d9 100644 --- a/routers/api/v1/repo/mirror.go +++ b/routers/api/v1/repo/mirror.go @@ -5,8 +5,10 @@ package repo import ( + "errors" "net/http" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" @@ -48,6 +50,15 @@ func MirrorSync(ctx *context.APIContext) { return } + if _, err := repo_model.GetMirrorByRepoID(repo.ID); err != nil { + if errors.Is(err, repo_model.ErrMirrorNotExist) { + ctx.Error(http.StatusBadRequest, "MirrorSync", "Repository is not a mirror") + return + } + ctx.Error(http.StatusInternalServerError, "MirrorSync", err) + return + } + mirror_service.StartToMirror(repo.ID) ctx.Status(http.StatusOK) diff --git a/routers/api/v1/repo/notes.go b/routers/api/v1/repo/notes.go index edcb27b1ec..bd8e27e40b 100644 --- a/routers/api/v1/repo/notes.go +++ b/routers/api/v1/repo/notes.go @@ -55,15 +55,13 @@ func GetNote(ctx *context.APIContext) { } func getNote(ctx *context.APIContext, identifier string) { - gitRepo, err := git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.RepoPath()) - if err != nil { - ctx.Error(http.StatusInternalServerError, "OpenRepository", err) + if ctx.Repo.GitRepo == nil { + ctx.InternalServerError(fmt.Errorf("no open git repo")) return } - defer gitRepo.Close() + var note git.Note - err = git.GetNote(ctx, gitRepo, identifier, ¬e) - if err != nil { + if err := git.GetNote(ctx, ctx.Repo.GitRepo, identifier, ¬e); err != nil { if git.IsErrNotExist(err) { ctx.NotFound(identifier) return @@ -72,7 +70,7 @@ func getNote(ctx *context.APIContext, identifier string) { return } - cmt, err := convert.ToCommit(ctx.Repo.Repository, gitRepo, note.Commit, nil) + cmt, err := convert.ToCommit(ctx.Repo.Repository, ctx.Repo.GitRepo, note.Commit, nil) if err != nil { ctx.Error(http.StatusInternalServerError, "ToCommit", err) return diff --git a/routers/api/v1/repo/patch.go b/routers/api/v1/repo/patch.go index 64a7a32d16..6dbf979701 100644 --- a/routers/api/v1/repo/patch.go +++ b/routers/api/v1/repo/patch.go @@ -77,15 +77,15 @@ func ApplyDiffPatch(ctx *context.APIContext) { opts.Message = "apply-patch" } - if !canWriteFiles(ctx.Repo) { + if !canWriteFiles(ctx, apiOpts.BranchName) { ctx.Error(http.StatusInternalServerError, "ApplyPatch", models.ErrUserDoesNotHaveAccessToRepo{ - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, RepoName: ctx.Repo.Repository.LowerName, }) return } - fileResponse, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.User, opts) + fileResponse, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, opts) if err != nil { if models.IsErrUserCannotCommit(err) || models.IsErrFilePathProtected(err) { ctx.Error(http.StatusForbidden, "Access", err) diff --git a/routers/api/v1/repo/pull.go b/routers/api/v1/repo/pull.go index a494cb06cc..f95bc6b16b 100644 --- a/routers/api/v1/repo/pull.go +++ b/routers/api/v1/repo/pull.go @@ -14,6 +14,8 @@ import ( "time" "code.gitea.io/gitea/models" + issues_model "code.gitea.io/gitea/models/issues" + pull_model "code.gitea.io/gitea/models/pull" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -27,6 +29,7 @@ import ( "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/api/v1/utils" asymkey_service "code.gitea.io/gitea/services/asymkey" + "code.gitea.io/gitea/services/automerge" "code.gitea.io/gitea/services/forms" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" @@ -110,15 +113,15 @@ func ListPullRequests(ctx *context.APIContext) { ctx.Error(http.StatusInternalServerError, "LoadAttributes", err) return } - if err = prs[i].LoadBaseRepo(); err != nil { + if err = prs[i].LoadBaseRepoCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadBaseRepo", err) return } - if err = prs[i].LoadHeadRepo(); err != nil { + if err = prs[i].LoadHeadRepoCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadHeadRepo", err) return } - apiPrs[i] = convert.ToAPIPullRequest(ctx, prs[i], ctx.User) + apiPrs[i] = convert.ToAPIPullRequest(ctx, prs[i], ctx.Doer) } ctx.SetLinkHeader(int(maxResults), listOptions.PageSize) @@ -166,15 +169,15 @@ func GetPullRequest(ctx *context.APIContext) { return } - if err = pr.LoadBaseRepo(); err != nil { + if err = pr.LoadBaseRepoCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadBaseRepo", err) return } - if err = pr.LoadHeadRepo(); err != nil { + if err = pr.LoadHeadRepoCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadHeadRepo", err) return } - ctx.JSON(http.StatusOK, convert.ToAPIPullRequest(ctx, pr, ctx.User)) + ctx.JSON(http.StatusOK, convert.ToAPIPullRequest(ctx, pr, ctx.Doer)) } // DownloadPullDiffOrPatch render a pull's raw diff or patch @@ -342,9 +345,9 @@ func CreatePullRequest(ctx *context.APIContext) { } if form.Milestone > 0 { - milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, form.Milestone) + milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, form.Milestone) if err != nil { - if models.IsErrMilestoneNotExist(err) { + if issues_model.IsErrMilestoneNotExist(err) { ctx.NotFound() } else { ctx.Error(http.StatusInternalServerError, "GetMilestoneByRepoID", err) @@ -363,8 +366,8 @@ func CreatePullRequest(ctx *context.APIContext) { prIssue := &models.Issue{ RepoID: repo.ID, Title: form.Title, - PosterID: ctx.User.ID, - Poster: ctx.User, + PosterID: ctx.Doer.ID, + Poster: ctx.Doer, MilestoneID: milestoneID, IsPull: true, Content: form.Body, @@ -420,7 +423,7 @@ func CreatePullRequest(ctx *context.APIContext) { } log.Trace("Pull request created: %d/%d", repo.ID, prIssue.ID) - ctx.JSON(http.StatusCreated, convert.ToAPIPullRequest(ctx, pr, ctx.User)) + ctx.JSON(http.StatusCreated, convert.ToAPIPullRequest(ctx, pr, ctx.Doer)) } // EditPullRequest does what it says @@ -484,7 +487,7 @@ func EditPullRequest(ctx *context.APIContext) { issue := pr.Issue issue.Repo = ctx.Repo.Repository - if !issue.IsPoster(ctx.User.ID) && !ctx.Repo.CanWrite(unit.TypePullRequests) { + if !issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWrite(unit.TypePullRequests) { ctx.Status(http.StatusForbidden) return } @@ -506,7 +509,7 @@ func EditPullRequest(ctx *context.APIContext) { deadlineUnix = timeutil.TimeStamp(deadline.Unix()) } - if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.User); err != nil { + if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.Doer); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err) return } @@ -522,7 +525,7 @@ func EditPullRequest(ctx *context.APIContext) { // Send an empty array ([]) to clear all assignees from the Issue. if ctx.Repo.CanWrite(unit.TypePullRequests) && (form.Assignees != nil || len(form.Assignee) > 0) { - err = issue_service.UpdateAssignees(issue, form.Assignee, form.Assignees, ctx.User) + err = issue_service.UpdateAssignees(issue, form.Assignee, form.Assignees, ctx.Doer) if err != nil { if user_model.IsErrUserNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "", fmt.Sprintf("Assignee does not exist: [name: %s]", err)) @@ -537,7 +540,7 @@ func EditPullRequest(ctx *context.APIContext) { issue.MilestoneID != form.Milestone { oldMilestoneID := issue.MilestoneID issue.MilestoneID = form.Milestone - if err = issue_service.ChangeMilestoneAssign(issue, ctx.User, oldMilestoneID); err != nil { + if err = issue_service.ChangeMilestoneAssign(issue, ctx.Doer, oldMilestoneID); err != nil { ctx.Error(http.StatusInternalServerError, "ChangeMilestoneAssign", err) return } @@ -560,7 +563,7 @@ func EditPullRequest(ctx *context.APIContext) { labels = append(labels, orgLabels...) } - if err = issue.ReplaceLabels(labels, ctx.User); err != nil { + if err = models.ReplaceIssueLabels(issue, labels, ctx.Doer); err != nil { ctx.Error(http.StatusInternalServerError, "ReplaceLabelsError", err) return } @@ -573,7 +576,7 @@ func EditPullRequest(ctx *context.APIContext) { } issue.IsClosed = api.StateClosed == api.StateType(*form.State) } - statusChangeComment, titleChanged, err := models.UpdateIssueByAPI(issue, ctx.User) + statusChangeComment, titleChanged, err := models.UpdateIssueByAPI(issue, ctx.Doer) if err != nil { if models.IsErrDependenciesLeft(err) { ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this pull request because it still has open dependencies") @@ -584,11 +587,11 @@ func EditPullRequest(ctx *context.APIContext) { } if titleChanged { - notification.NotifyIssueChangeTitle(ctx.User, issue, oldTitle) + notification.NotifyIssueChangeTitle(ctx.Doer, issue, oldTitle) } if statusChangeComment != nil { - notification.NotifyIssueChangeStatus(ctx.User, issue, statusChangeComment, issue.IsClosed) + notification.NotifyIssueChangeStatus(ctx.Doer, issue, statusChangeComment, issue.IsClosed) } // change pull target branch @@ -597,7 +600,7 @@ func EditPullRequest(ctx *context.APIContext) { ctx.Error(http.StatusNotFound, "NewBaseBranchNotExist", fmt.Errorf("new base '%s' not exist", form.Base)) return } - if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.User, form.Base); err != nil { + if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.Doer, form.Base); err != nil { if models.IsErrPullRequestAlreadyExists(err) { ctx.Error(http.StatusConflict, "IsErrPullRequestAlreadyExists", err) return @@ -612,7 +615,19 @@ func EditPullRequest(ctx *context.APIContext) { } return } - notification.NotifyPullRequestChangeTargetBranch(ctx.User, pr, form.Base) + notification.NotifyPullRequestChangeTargetBranch(ctx.Doer, pr, form.Base) + } + + // update allow edits + if form.AllowMaintainerEdit != nil { + if err := pull_service.SetAllowEdits(ctx, ctx.Doer, pr, *form.AllowMaintainerEdit); err != nil { + if errors.Is(pull_service.ErrUserHasNoPermissionForAction, err) { + ctx.Error(http.StatusForbidden, "SetAllowEdits", fmt.Sprintf("SetAllowEdits: %s", err)) + return + } + ctx.ServerError("SetAllowEdits", err) + return + } } // Refetch from database @@ -627,7 +642,7 @@ func EditPullRequest(ctx *context.APIContext) { } // TODO this should be 200, not 201 - ctx.JSON(http.StatusCreated, convert.ToAPIPullRequest(ctx, pr, ctx.User)) + ctx.JSON(http.StatusCreated, convert.ToAPIPullRequest(ctx, pr, ctx.Doer)) } // IsPullRequestMerged checks if a PR exists given an index @@ -713,7 +728,8 @@ func MergePullRequest(ctx *context.APIContext) { // "$ref": "#/responses/error" form := web.GetForm(ctx).(*forms.MergePullRequestForm) - pr, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) + + pr, err := models.GetPullRequestByIndexCtx(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) if err != nil { if models.IsErrPullRequestNotExist(err) { ctx.NotFound("GetPullRequestByIndex", err) @@ -723,13 +739,12 @@ func MergePullRequest(ctx *context.APIContext) { return } - if err = pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadHeadRepo", err) return } - err = pr.LoadIssue() - if err != nil { + if err := pr.LoadIssueCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadIssue", err) return } @@ -737,35 +752,40 @@ func MergePullRequest(ctx *context.APIContext) { if ctx.IsSigned { // Update issue-user. - if err = pr.Issue.ReadBy(ctx.User.ID); err != nil { + if err = pr.Issue.ReadBy(ctx, ctx.Doer.ID); err != nil { ctx.Error(http.StatusInternalServerError, "ReadBy", err) return } } - if pr.Issue.IsClosed { - ctx.NotFound() - return - } + manuallMerge := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged + force := form.ForceMerge != nil && *form.ForceMerge - allowedMerge, err := pull_service.IsUserAllowedToMerge(pr, ctx.Repo.Permission, ctx.User) - if err != nil { - ctx.Error(http.StatusInternalServerError, "IsUSerAllowedToMerge", err) - return - } - if !allowedMerge { - ctx.Error(http.StatusMethodNotAllowed, "Merge", "User not allowed to merge PR") - return - } - - if pr.HasMerged { - ctx.Error(http.StatusMethodNotAllowed, "PR already merged", "") + // start with merging by checking + if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, manuallMerge, force); err != nil { + if errors.Is(err, pull_service.ErrIsClosed) { + ctx.NotFound() + } else if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) { + ctx.Error(http.StatusMethodNotAllowed, "Merge", "User not allowed to merge PR") + } else if errors.Is(err, pull_service.ErrHasMerged) { + ctx.Error(http.StatusMethodNotAllowed, "PR already merged", "") + } else if errors.Is(err, pull_service.ErrIsWorkInProgress) { + ctx.Error(http.StatusMethodNotAllowed, "PR is a work in progress", "Work in progress PRs cannot be merged") + } else if errors.Is(err, pull_service.ErrNotMergableState) { + ctx.Error(http.StatusMethodNotAllowed, "PR not in mergeable state", "Please try again later") + } else if models.IsErrDisallowedToMerge(err) { + ctx.Error(http.StatusMethodNotAllowed, "PR is not ready to be merged", err) + } else if asymkey_service.IsErrWontSign(err) { + ctx.Error(http.StatusMethodNotAllowed, fmt.Sprintf("Protected branch %s requires signed commits but this merge would not be signed", pr.BaseBranch), err) + } else { + ctx.InternalServerError(err) + } return } // handle manually-merged mark - if repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged { - if err = pull_service.MergedManually(pr, ctx.User, ctx.Repo.GitRepo, form.MergeCommitID); err != nil { + if manuallMerge { + if err := pull_service.MergedManually(pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil { if models.IsErrInvalidMergeStyle(err) { ctx.Error(http.StatusMethodNotAllowed, "Invalid merge style", fmt.Errorf("%s is not allowed an allowed merge style for this repository", repo_model.MergeStyle(form.Do))) return @@ -781,54 +801,16 @@ func MergePullRequest(ctx *context.APIContext) { return } - if !pr.CanAutoMerge() { - ctx.Error(http.StatusMethodNotAllowed, "PR not in mergeable state", "Please try again later") - return - } - - if pr.IsWorkInProgress() { - ctx.Error(http.StatusMethodNotAllowed, "PR is a work in progress", "Work in progress PRs cannot be merged") - return - } - - if err := pull_service.CheckPRReadyToMerge(ctx, pr, false); err != nil { - if !models.IsErrNotAllowedToMerge(err) { - ctx.Error(http.StatusInternalServerError, "CheckPRReadyToMerge", err) - return - } - if form.ForceMerge != nil && *form.ForceMerge { - if isRepoAdmin, err := models.IsUserRepoAdmin(pr.BaseRepo, ctx.User); err != nil { - ctx.Error(http.StatusInternalServerError, "IsUserRepoAdmin", err) - return - } else if !isRepoAdmin { - ctx.Error(http.StatusMethodNotAllowed, "Merge", "Only repository admin can merge if not all checks are ok (force merge)") - } - } else { - ctx.Error(http.StatusMethodNotAllowed, "PR is not ready to be merged", err) - return - } - } - - if _, err := pull_service.IsSignedIfRequired(ctx, pr, ctx.User); err != nil { - if !asymkey_service.IsErrWontSign(err) { - ctx.Error(http.StatusInternalServerError, "IsSignedIfRequired", err) - return - } - ctx.Error(http.StatusMethodNotAllowed, fmt.Sprintf("Protected branch %s requires signed commits but this merge would not be signed", pr.BaseBranch), err) - return - } - if len(form.Do) == 0 { form.Do = string(repo_model.MergeStyleMerge) } message := strings.TrimSpace(form.MergeTitleField) if len(message) == 0 { - if repo_model.MergeStyle(form.Do) == repo_model.MergeStyleMerge { - message = pr.GetDefaultMergeMessage() - } - if repo_model.MergeStyle(form.Do) == repo_model.MergeStyleSquash { - message = pr.GetDefaultSquashMessage() + message, err = pull_service.GetDefaultMergeMessage(ctx.Repo.GitRepo, pr, repo_model.MergeStyle(form.Do)) + if err != nil { + ctx.Error(http.StatusInternalServerError, "GetDefaultMergeMessage", err) + return } } @@ -837,10 +819,25 @@ func MergePullRequest(ctx *context.APIContext) { message += "\n\n" + form.MergeMessageField } - if err := pull_service.Merge(ctx, pr, ctx.User, ctx.Repo.GitRepo, repo_model.MergeStyle(form.Do), form.HeadCommitID, message); err != nil { + if form.MergeWhenChecksSucceed { + scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message) + if err != nil { + if pull_model.IsErrAlreadyScheduledToAutoMerge(err) { + ctx.Error(http.StatusConflict, "ScheduleAutoMerge", err) + return + } + ctx.Error(http.StatusInternalServerError, "ScheduleAutoMerge", err) + return + } else if scheduled { + // nothing more to do ... + ctx.Status(http.StatusCreated) + return + } + } + + if err := pull_service.Merge(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, repo_model.MergeStyle(form.Do), form.HeadCommitID, message); err != nil { if models.IsErrInvalidMergeStyle(err) { ctx.Error(http.StatusMethodNotAllowed, "Invalid merge style", fmt.Errorf("%s is not allowed an allowed merge style for this repository", repo_model.MergeStyle(form.Do))) - return } else if models.IsErrMergeConflicts(err) { conflictError := err.(models.ErrMergeConflicts) ctx.JSON(http.StatusConflict, conflictError) @@ -852,28 +849,25 @@ func MergePullRequest(ctx *context.APIContext) { ctx.JSON(http.StatusConflict, conflictError) } else if git.IsErrPushOutOfDate(err) { ctx.Error(http.StatusConflict, "Merge", "merge push out of date") - return } else if models.IsErrSHADoesNotMatch(err) { ctx.Error(http.StatusConflict, "Merge", "head out of date") - return } else if git.IsErrPushRejected(err) { errPushRej := err.(*git.ErrPushRejected) if len(errPushRej.Message) == 0 { ctx.Error(http.StatusConflict, "Merge", "PushRejected without remote error message") - return + } else { + ctx.Error(http.StatusConflict, "Merge", "PushRejected with remote message: "+errPushRej.Message) } - ctx.Error(http.StatusConflict, "Merge", "PushRejected with remote message: "+errPushRej.Message) - return + } else { + ctx.Error(http.StatusInternalServerError, "Merge", err) } - ctx.Error(http.StatusInternalServerError, "Merge", err) return } - log.Trace("Pull request merged: %d", pr.ID) if form.DeleteBranchAfterMerge { // Don't cleanup when there are other PR's that use this branch as head branch. - exist, err := models.HasUnmergedPullRequestsByHeadInfo(pr.HeadRepoID, pr.HeadBranch) + exist, err := models.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch) if err != nil { ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err) return @@ -887,14 +881,14 @@ func MergePullRequest(ctx *context.APIContext) { if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil { headRepo = ctx.Repo.GitRepo } else { - headRepo, err = git.OpenRepositoryCtx(ctx, pr.HeadRepo.RepoPath()) + headRepo, err = git.OpenRepository(ctx, pr.HeadRepo.RepoPath()) if err != nil { ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.RepoPath()), err) return } defer headRepo.Close() } - if err := repo_service.DeleteBranch(ctx.User, pr.HeadRepo, headRepo, pr.HeadBranch); err != nil { + if err := repo_service.DeleteBranch(ctx.Doer, pr.HeadRepo, headRepo, pr.HeadBranch); err != nil { switch { case git.IsErrBranchNotExist(err): ctx.NotFound(err) @@ -907,7 +901,7 @@ func MergePullRequest(ctx *context.APIContext) { } return } - if err := models.AddDeletePRBranchComment(ctx.User, pr.BaseRepo, pr.Issue.ID, pr.HeadBranch); err != nil { + if err := models.AddDeletePRBranchComment(ctx, ctx.Doer, pr.BaseRepo, pr.Issue.ID, pr.HeadBranch); err != nil { // Do not fail here as branch has already been deleted log.Error("DeleteBranch: %v", err) } @@ -981,7 +975,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) headRepo = ctx.Repo.Repository headGitRepo = ctx.Repo.GitRepo } else { - headGitRepo, err = git.OpenRepositoryCtx(ctx, repo_model.RepoPath(headUser.Name, headRepo.Name)) + headGitRepo, err = git.OpenRepository(ctx, repo_model.RepoPath(headUser.Name, headRepo.Name)) if err != nil { ctx.Error(http.StatusInternalServerError, "OpenRepository", err) return nil, nil, nil, nil, "", "" @@ -989,7 +983,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) } // user should have permission to read baseRepo's codes and pulls, NOT headRepo's - permBase, err := models.GetUserRepoPermission(baseRepo, ctx.User) + permBase, err := models.GetUserRepoPermission(ctx, baseRepo, ctx.Doer) if err != nil { headGitRepo.Close() ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) @@ -998,7 +992,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) if !permBase.CanReadIssuesOrPulls(true) || !permBase.CanRead(unit.TypeCode) { if log.IsTrace() { log.Trace("Permission Denied: User %-v cannot create/read pull requests or cannot read code in Repo %-v\nUser in baseRepo has Permissions: %-+v", - ctx.User, + ctx.Doer, baseRepo, permBase) } @@ -1008,7 +1002,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) } // user should have permission to read headrepo's codes - permHead, err := models.GetUserRepoPermission(headRepo, ctx.User) + permHead, err := models.GetUserRepoPermission(ctx, headRepo, ctx.Doer) if err != nil { headGitRepo.Close() ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) @@ -1017,7 +1011,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) if !permHead.CanRead(unit.TypeCode) { if log.IsTrace() { log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v", - ctx.User, + ctx.Doer, headRepo, permHead) } @@ -1109,18 +1103,18 @@ func UpdatePullRequest(ctx *context.APIContext) { return } - if err = pr.LoadBaseRepo(); err != nil { + if err = pr.LoadBaseRepoCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadBaseRepo", err) return } - if err = pr.LoadHeadRepo(); err != nil { + if err = pr.LoadHeadRepoCtx(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadHeadRepo", err) return } rebase := ctx.FormString("style") == "rebase" - allowedUpdateByMerge, allowedUpdateByRebase, err := pull_service.IsUserAllowedToUpdate(pr, ctx.User) + allowedUpdateByMerge, allowedUpdateByRebase, err := pull_service.IsUserAllowedToUpdate(ctx, pr, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "IsUserAllowedToMerge", err) return @@ -1134,7 +1128,7 @@ func UpdatePullRequest(ctx *context.APIContext) { // default merge commit message message := fmt.Sprintf("Merge branch '%s' into %s", pr.BaseBranch, pr.HeadBranch) - if err = pull_service.Update(ctx, pr, ctx.User, message, rebase); err != nil { + if err = pull_service.Update(ctx, pr, ctx.Doer, message, rebase); err != nil { if models.IsErrMergeConflicts(err) { ctx.Error(http.StatusConflict, "Update", "merge failed because of conflict") return @@ -1149,6 +1143,78 @@ func UpdatePullRequest(ctx *context.APIContext) { ctx.Status(http.StatusOK) } +// MergePullRequest cancel an auto merge scheduled for a given PullRequest by index +func CancelScheduledAutoMerge(ctx *context.APIContext) { + // swagger:operation DELETE /repos/{owner}/{repo}/pulls/{index}/merge repository repoCancelScheduledAutoMerge + // --- + // summary: Cancel the scheduled auto merge for the given pull request + // produces: + // - application/json + // parameters: + // - name: owner + // in: path + // description: owner of the repo + // type: string + // required: true + // - name: repo + // in: path + // description: name of the repo + // type: string + // required: true + // - name: index + // in: path + // description: index of the pull request to merge + // type: integer + // format: int64 + // required: true + // responses: + // "204": + // "$ref": "#/responses/empty" + // "403": + // "$ref": "#/responses/forbidden" + // "404": + // "$ref": "#/responses/notFound" + + pullIndex := ctx.ParamsInt64(":index") + pull, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, pullIndex) + if err != nil { + if models.IsErrPullRequestNotExist(err) { + ctx.NotFound() + return + } + ctx.InternalServerError(err) + return + } + + exist, autoMerge, err := pull_model.GetScheduledMergeByPullID(ctx, pull.ID) + if err != nil { + ctx.InternalServerError(err) + return + } + if !exist { + ctx.NotFound() + return + } + + if ctx.Doer.ID != autoMerge.DoerID { + allowed, err := models.IsUserRepoAdminCtx(ctx, ctx.Repo.Repository, ctx.Doer) + if err != nil { + ctx.InternalServerError(err) + return + } + if !allowed { + ctx.Error(http.StatusForbidden, "No permission to cancel", "user has no permission to cancel the scheduled auto merge") + return + } + } + + if err := automerge.RemoveScheduledAutoMerge(ctx, ctx.Doer, pull); err != nil { + ctx.InternalServerError(err) + } else { + ctx.Status(http.StatusNoContent) + } +} + // GetPullRequestCommits gets all commits associated with a given PR func GetPullRequestCommits(ctx *context.APIContext) { // swagger:operation GET /repos/{owner}/{repo}/pulls/{index}/commits repository repoGetPullRequestCommits @@ -1197,7 +1263,7 @@ func GetPullRequestCommits(ctx *context.APIContext) { return } - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { ctx.InternalServerError(err) return } diff --git a/routers/api/v1/repo/pull_review.go b/routers/api/v1/repo/pull_review.go index 9eb63bafad..b3ebe49bf5 100644 --- a/routers/api/v1/repo/pull_review.go +++ b/routers/api/v1/repo/pull_review.go @@ -10,6 +10,7 @@ import ( "strings" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" @@ -74,7 +75,7 @@ func ListPullReviews(ctx *context.APIContext) { return } - if err = pr.Issue.LoadRepo(); err != nil { + if err = pr.Issue.LoadRepo(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "LoadRepo", err) return } @@ -97,7 +98,7 @@ func ListPullReviews(ctx *context.APIContext) { return } - apiReviews, err := convert.ToPullReviewList(ctx, allReviews, ctx.User) + apiReviews, err := convert.ToPullReviewList(ctx, allReviews, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "convertToPullReviewList", err) return @@ -148,7 +149,7 @@ func GetPullReview(ctx *context.APIContext) { return } - apiReview, err := convert.ToPullReview(ctx, review, ctx.User) + apiReview, err := convert.ToPullReview(ctx, review, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "convertToPullReview", err) return @@ -198,7 +199,7 @@ func GetPullReviewComments(ctx *context.APIContext) { return } - apiComments, err := convert.ToPullReviewCommentList(ctx, review, ctx.User) + apiComments, err := convert.ToPullReviewCommentList(ctx, review, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "convertToPullReviewCommentList", err) return @@ -250,11 +251,11 @@ func DeletePullReview(ctx *context.APIContext) { return } - if ctx.User == nil { + if ctx.Doer == nil { ctx.NotFound() return } - if !ctx.User.IsAdmin && ctx.User.ID != review.ReviewerID { + if !ctx.Doer.IsAdmin && ctx.Doer.ID != review.ReviewerID { ctx.Error(http.StatusForbidden, "only admin and user itself can delete a review", nil) return } @@ -321,7 +322,7 @@ func CreatePullReview(ctx *context.APIContext) { return } - if err := pr.Issue.LoadRepo(); err != nil { + if err := pr.Issue.LoadRepo(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "pr.Issue.LoadRepo", err) return } @@ -353,7 +354,7 @@ func CreatePullReview(ctx *context.APIContext) { } if _, err := pull_service.CreateCodeComment(ctx, - ctx.User, + ctx.Doer, ctx.Repo.GitRepo, pr.Issue, line, @@ -369,14 +370,14 @@ func CreatePullReview(ctx *context.APIContext) { } // create review and associate all pending review comments - review, _, err := pull_service.SubmitReview(ctx, ctx.User, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, opts.CommitID, nil) + review, _, err := pull_service.SubmitReview(ctx, ctx.Doer, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, opts.CommitID, nil) if err != nil { ctx.Error(http.StatusInternalServerError, "SubmitReview", err) return } // convert response - apiReview, err := convert.ToPullReview(ctx, review, ctx.User) + apiReview, err := convert.ToPullReview(ctx, review, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "convertToPullReview", err) return @@ -457,14 +458,14 @@ func SubmitPullReview(ctx *context.APIContext) { } // create review and associate all pending review comments - review, _, err = pull_service.SubmitReview(ctx, ctx.User, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, headCommitID, nil) + review, _, err = pull_service.SubmitReview(ctx, ctx.Doer, ctx.Repo.GitRepo, pr.Issue, reviewType, opts.Body, headCommitID, nil) if err != nil { ctx.Error(http.StatusInternalServerError, "SubmitReview", err) return } // convert response - apiReview, err := convert.ToPullReview(ctx, review, ctx.User) + apiReview, err := convert.ToPullReview(ctx, review, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "convertToPullReview", err) return @@ -486,7 +487,7 @@ func preparePullReviewType(ctx *context.APIContext, pr *models.PullRequest, even switch event { case api.ReviewStateApproved: // can not approve your own PR - if pr.Issue.IsPoster(ctx.User.ID) { + if pr.Issue.IsPoster(ctx.Doer.ID) { ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("approve your own pull is not allowed")) return -1, true } @@ -495,7 +496,7 @@ func preparePullReviewType(ctx *context.APIContext, pr *models.PullRequest, even case api.ReviewStateRequestChanges: // can not reject your own PR - if pr.Issue.IsPoster(ctx.User.ID) { + if pr.Issue.IsPoster(ctx.Doer.ID) { ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("reject your own pull is not allowed")) return -1, true } @@ -551,7 +552,7 @@ func prepareSingleReview(ctx *context.APIContext) (*models.Review, *models.PullR } // make sure that the user has access to this review if it is pending - if review.Type == models.ReviewTypePending && review.ReviewerID != ctx.User.ID && !ctx.User.IsAdmin { + if review.Type == models.ReviewTypePending && review.ReviewerID != ctx.Doer.ID && !ctx.Doer.IsAdmin { ctx.NotFound("GetReviewByID") return nil, nil, true } @@ -656,14 +657,14 @@ func apiReviewRequest(ctx *context.APIContext, opts api.PullReviewRequestOptions return } - if err := pr.Issue.LoadRepo(); err != nil { + if err := pr.Issue.LoadRepo(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "pr.Issue.LoadRepo", err) return } reviewers := make([]*user_model.User, 0, len(opts.Reviewers)) - permDoer, err := models.GetUserRepoPermission(pr.Issue.Repo, ctx.User) + permDoer, err := models.GetUserRepoPermission(ctx, pr.Issue.Repo, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err) return @@ -686,7 +687,7 @@ func apiReviewRequest(ctx *context.APIContext, opts api.PullReviewRequestOptions return } - err = issue_service.IsValidReviewRequest(reviewer, ctx.User, isAdd, pr.Issue, &permDoer) + err = issue_service.IsValidReviewRequest(ctx, reviewer, ctx.Doer, isAdd, pr.Issue, &permDoer) if err != nil { if models.IsErrNotValidReviewRequest(err) { ctx.Error(http.StatusUnprocessableEntity, "NotValidReviewRequest", err) @@ -705,7 +706,7 @@ func apiReviewRequest(ctx *context.APIContext, opts api.PullReviewRequestOptions } for _, reviewer := range reviewers { - comment, err := issue_service.ReviewRequest(pr.Issue, ctx.User, reviewer, isAdd) + comment, err := issue_service.ReviewRequest(pr.Issue, ctx.Doer, reviewer, isAdd) if err != nil { ctx.Error(http.StatusInternalServerError, "ReviewRequest", err) return @@ -722,12 +723,12 @@ func apiReviewRequest(ctx *context.APIContext, opts api.PullReviewRequestOptions if ctx.Repo.Repository.Owner.IsOrganization() && len(opts.TeamReviewers) > 0 { - teamReviewers := make([]*models.Team, 0, len(opts.TeamReviewers)) + teamReviewers := make([]*organization.Team, 0, len(opts.TeamReviewers)) for _, t := range opts.TeamReviewers { - var teamReviewer *models.Team - teamReviewer, err = models.GetTeam(ctx.Repo.Owner.ID, t) + var teamReviewer *organization.Team + teamReviewer, err = organization.GetTeam(ctx.Repo.Owner.ID, t) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.NotFound("TeamNotExist", fmt.Sprintf("Team '%s' not exist", t)) return } @@ -735,7 +736,7 @@ func apiReviewRequest(ctx *context.APIContext, opts api.PullReviewRequestOptions return } - err = issue_service.IsValidTeamReviewRequest(teamReviewer, ctx.User, isAdd, pr.Issue) + err = issue_service.IsValidTeamReviewRequest(ctx, teamReviewer, ctx.Doer, isAdd, pr.Issue) if err != nil { if models.IsErrNotValidReviewRequest(err) { ctx.Error(http.StatusUnprocessableEntity, "NotValidReviewRequest", err) @@ -749,7 +750,7 @@ func apiReviewRequest(ctx *context.APIContext, opts api.PullReviewRequestOptions } for _, teamReviewer := range teamReviewers { - comment, err := issue_service.TeamReviewRequest(pr.Issue, ctx.User, teamReviewer, isAdd) + comment, err := issue_service.TeamReviewRequest(pr.Issue, ctx.Doer, teamReviewer, isAdd) if err != nil { ctx.ServerError("TeamReviewRequest", err) return @@ -766,7 +767,7 @@ func apiReviewRequest(ctx *context.APIContext, opts api.PullReviewRequestOptions } if isAdd { - apiReviews, err := convert.ToPullReviewList(ctx, reviews, ctx.User) + apiReviews, err := convert.ToPullReviewList(ctx, reviews, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "convertToPullReviewList", err) return @@ -884,7 +885,7 @@ func dismissReview(ctx *context.APIContext, msg string, isDismiss bool) { return } - _, err := pull_service.DismissReview(ctx, review.ID, msg, ctx.User, isDismiss) + _, err := pull_service.DismissReview(ctx, review.ID, msg, ctx.Doer, isDismiss) if err != nil { ctx.Error(http.StatusInternalServerError, "pull_service.DismissReview", err) return @@ -896,7 +897,7 @@ func dismissReview(ctx *context.APIContext, msg string, isDismiss bool) { } // convert response - apiReview, err := convert.ToPullReview(ctx, review, ctx.User) + apiReview, err := convert.ToPullReview(ctx, review, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "convertToPullReview", err) return diff --git a/routers/api/v1/repo/release.go b/routers/api/v1/repo/release.go index 3cdd798151..7d23a38add 100644 --- a/routers/api/v1/repo/release.go +++ b/routers/api/v1/repo/release.go @@ -191,8 +191,8 @@ func CreateRelease(ctx *context.APIContext) { } rel = &models.Release{ RepoID: ctx.Repo.Repository.ID, - PublisherID: ctx.User.ID, - Publisher: ctx.User, + PublisherID: ctx.Doer.ID, + Publisher: ctx.Doer, TagName: form.TagName, Target: form.Target, Title: form.Title, @@ -220,12 +220,12 @@ func CreateRelease(ctx *context.APIContext) { rel.Note = form.Note rel.IsDraft = form.IsDraft rel.IsPrerelease = form.IsPrerelease - rel.PublisherID = ctx.User.ID + rel.PublisherID = ctx.Doer.ID rel.IsTag = false rel.Repo = ctx.Repo.Repository - rel.Publisher = ctx.User + rel.Publisher = ctx.Doer - if err = releaseservice.UpdateRelease(ctx.User, ctx.Repo.GitRepo, rel, nil, nil, nil); err != nil { + if err = releaseservice.UpdateRelease(ctx.Doer, ctx.Repo.GitRepo, rel, nil, nil, nil); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateRelease", err) return } @@ -300,7 +300,7 @@ func EditRelease(ctx *context.APIContext) { if form.IsPrerelease != nil { rel.IsPrerelease = *form.IsPrerelease } - if err := releaseservice.UpdateRelease(ctx.User, ctx.Repo.GitRepo, rel, nil, nil, nil); err != nil { + if err := releaseservice.UpdateRelease(ctx.Doer, ctx.Repo.GitRepo, rel, nil, nil, nil); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateRelease", err) return } @@ -356,7 +356,7 @@ func DeleteRelease(ctx *context.APIContext) { ctx.NotFound() return } - if err := releaseservice.DeleteReleaseByID(ctx, id, ctx.User, false); err != nil { + if err := releaseservice.DeleteReleaseByID(ctx, id, ctx.Doer, false); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteReleaseByID", err) return } diff --git a/routers/api/v1/repo/release_attachment.go b/routers/api/v1/repo/release_attachment.go index b1bc48d30f..c172b66127 100644 --- a/routers/api/v1/repo/release_attachment.go +++ b/routers/api/v1/repo/release_attachment.go @@ -184,7 +184,7 @@ func CreateReleaseAttachment(ctx *context.APIContext) { } // Create a new attachment and save the file - attach, err := attachment.UploadAttachment(file, ctx.User.ID, release.RepoID, releaseID, filename, setting.Repository.Release.AllowedTypes) + attach, err := attachment.UploadAttachment(file, ctx.Doer.ID, release.RepoID, releaseID, filename, setting.Repository.Release.AllowedTypes) if err != nil { if upload.IsErrFileTypeForbidden(err) { ctx.Error(http.StatusBadRequest, "DetectContentType", err) diff --git a/routers/api/v1/repo/release_tags.go b/routers/api/v1/repo/release_tags.go index d77bdf0331..a737bcf1c8 100644 --- a/routers/api/v1/repo/release_tags.go +++ b/routers/api/v1/repo/release_tags.go @@ -110,7 +110,7 @@ func DeleteReleaseByTag(ctx *context.APIContext) { return } - if err = releaseservice.DeleteReleaseByID(ctx, release.ID, ctx.User, false); err != nil { + if err = releaseservice.DeleteReleaseByID(ctx, release.ID, ctx.Doer, false); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteReleaseByID", err) } diff --git a/routers/api/v1/repo/repo.go b/routers/api/v1/repo/repo.go index 560139c457..29e8352142 100644 --- a/routers/api/v1/repo/repo.go +++ b/routers/api/v1/repo/repo.go @@ -13,6 +13,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" unit_model "code.gitea.io/gitea/models/unit" @@ -21,6 +22,7 @@ import ( "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" @@ -30,23 +32,6 @@ import ( repo_service "code.gitea.io/gitea/services/repository" ) -var searchOrderByMap = map[string]map[string]db.SearchOrderBy{ - "asc": { - "alpha": db.SearchOrderByAlphabetically, - "created": db.SearchOrderByOldest, - "updated": db.SearchOrderByLeastUpdated, - "size": db.SearchOrderBySize, - "id": db.SearchOrderByID, - }, - "desc": { - "alpha": db.SearchOrderByAlphabeticallyReverse, - "created": db.SearchOrderByNewest, - "updated": db.SearchOrderByRecentUpdated, - "size": db.SearchOrderBySizeReverse, - "id": db.SearchOrderByIDReverse, - }, -} - // Search repositories via options func Search(ctx *context.APIContext) { // swagger:operation GET /repos/search repository repoSearch @@ -139,7 +124,7 @@ func Search(ctx *context.APIContext) { opts := &models.SearchRepoOptions{ ListOptions: utils.GetListOptions(ctx), - Actor: ctx.User, + Actor: ctx.Doer, Keyword: ctx.FormTrim("q"), OwnerID: ctx.FormInt64("uid"), PriorityOwnerID: ctx.FormInt64("priority_owner_id"), @@ -192,7 +177,7 @@ func Search(ctx *context.APIContext) { if len(sortOrder) == 0 { sortOrder = "asc" } - if searchModeMap, ok := searchOrderByMap[sortOrder]; ok { + if searchModeMap, ok := context.SearchOrderByMap[sortOrder]; ok { if orderBy, ok := searchModeMap[sortMode]; ok { opts.OrderBy = orderBy } else { @@ -217,14 +202,14 @@ func Search(ctx *context.APIContext) { results := make([]*api.Repository, len(repos)) for i, repo := range repos { - if err = repo.GetOwner(db.DefaultContext); err != nil { + if err = repo.GetOwner(ctx); err != nil { ctx.JSON(http.StatusInternalServerError, api.SearchError{ OK: false, Error: err.Error(), }) return } - accessMode, err := models.AccessLevel(ctx.User, repo) + accessMode, err := models.AccessLevel(ctx.Doer, repo) if err != nil { ctx.JSON(http.StatusInternalServerError, api.SearchError{ OK: false, @@ -233,7 +218,6 @@ func Search(ctx *context.APIContext) { } results[i] = convert.ToRepo(repo, accessMode) } - ctx.SetLinkHeader(int(count), opts.PageSize) ctx.SetTotalCountHeader(count) ctx.JSON(http.StatusOK, api.SearchResults{ @@ -247,7 +231,7 @@ func CreateUserRepo(ctx *context.APIContext, owner *user_model.User, opt api.Cre if opt.AutoInit && opt.Readme == "" { opt.Readme = "Default" } - repo, err := repo_service.CreateRepository(ctx.User, owner, models.CreateRepoOptions{ + repo, err := repo_service.CreateRepository(ctx.Doer, owner, models.CreateRepoOptions{ Name: opt.Name, Description: opt.Description, IssueLabels: opt.IssueLabels, @@ -264,7 +248,8 @@ func CreateUserRepo(ctx *context.APIContext, owner *user_model.User, opt api.Cre if repo_model.IsErrRepoAlreadyExist(err) { ctx.Error(http.StatusConflict, "", "The repository with the same name already exists.") } else if db.IsErrNameReserved(err) || - db.IsErrNamePatternNotAllowed(err) { + db.IsErrNamePatternNotAllowed(err) || + repo_module.IsErrIssueLabelTemplateLoad(err) { ctx.Error(http.StatusUnprocessableEntity, "", err) } else { ctx.Error(http.StatusInternalServerError, "CreateRepository", err) @@ -303,12 +288,12 @@ func Create(ctx *context.APIContext) { // "422": // "$ref": "#/responses/validationError" opt := web.GetForm(ctx).(*api.CreateRepoOption) - if ctx.User.IsOrganization() { + if ctx.Doer.IsOrganization() { // Shouldn't reach this condition, but just in case. ctx.Error(http.StatusUnprocessableEntity, "", "not allowed creating repository for organization") return } - CreateUserRepo(ctx, ctx.User, *opt) + CreateUserRepo(ctx, ctx.Doer, *opt) } // Generate Create a repository using a template @@ -353,21 +338,22 @@ func Generate(ctx *context.APIContext) { return } - if ctx.User.IsOrganization() { + if ctx.Doer.IsOrganization() { ctx.Error(http.StatusUnprocessableEntity, "", "not allowed creating repository for organization") return } opts := models.GenerateRepoOptions{ - Name: form.Name, - Description: form.Description, - Private: form.Private, - GitContent: form.GitContent, - Topics: form.Topics, - GitHooks: form.GitHooks, - Webhooks: form.Webhooks, - Avatar: form.Avatar, - IssueLabels: form.Labels, + Name: form.Name, + DefaultBranch: form.DefaultBranch, + Description: form.Description, + Private: form.Private, + GitContent: form.GitContent, + Topics: form.Topics, + GitHooks: form.GitHooks, + Webhooks: form.Webhooks, + Avatar: form.Avatar, + IssueLabels: form.Labels, } if !opts.IsValid() { @@ -375,7 +361,7 @@ func Generate(ctx *context.APIContext) { return } - ctxUser := ctx.User + ctxUser := ctx.Doer var err error if form.Owner != ctxUser.Name { ctxUser, err = user_model.GetUserByName(form.Owner) @@ -391,13 +377,13 @@ func Generate(ctx *context.APIContext) { return } - if !ctx.User.IsAdmin && !ctxUser.IsOrganization() { + if !ctx.Doer.IsAdmin && !ctxUser.IsOrganization() { ctx.Error(http.StatusForbidden, "", "Only admin can generate repository for other user.") return } - if !ctx.User.IsAdmin { - canCreate, err := models.OrgFromUser(ctxUser).CanCreateOrgRepo(ctx.User.ID) + if !ctx.Doer.IsAdmin { + canCreate, err := organization.OrgFromUser(ctxUser).CanCreateOrgRepo(ctx.Doer.ID) if err != nil { ctx.ServerError("CanCreateOrgRepo", err) return @@ -408,7 +394,7 @@ func Generate(ctx *context.APIContext) { } } - repo, err := repo_service.GenerateRepository(ctx.User, ctxUser, ctx.Repo.Repository, opts) + repo, err := repo_service.GenerateRepository(ctx.Doer, ctxUser, ctx.Repo.Repository, opts) if err != nil { if repo_model.IsErrRepoAlreadyExist(err) { ctx.Error(http.StatusConflict, "", "The repository with the same name already exists.") @@ -483,9 +469,9 @@ func CreateOrgRepo(ctx *context.APIContext) { // "403": // "$ref": "#/responses/forbidden" opt := web.GetForm(ctx).(*api.CreateRepoOption) - org, err := models.GetOrgByName(ctx.Params(":org")) + org, err := organization.GetOrgByName(ctx.Params(":org")) if err != nil { - if models.IsErrOrgNotExist(err) { + if organization.IsErrOrgNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "", err) } else { ctx.Error(http.StatusInternalServerError, "GetOrgByName", err) @@ -493,13 +479,13 @@ func CreateOrgRepo(ctx *context.APIContext) { return } - if !models.HasOrgOrUserVisible(org.AsUser(), ctx.User) { + if !organization.HasOrgOrUserVisible(ctx, org.AsUser(), ctx.Doer) { ctx.NotFound("HasOrgOrUserVisible", nil) return } - if !ctx.User.IsAdmin { - canCreate, err := org.CanCreateOrgRepo(ctx.User.ID) + if !ctx.Doer.IsAdmin { + canCreate, err := org.CanCreateOrgRepo(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "CanCreateOrgRepo", err) return @@ -569,7 +555,7 @@ func GetByID(ctx *context.APIContext) { return } - perm, err := models.GetUserRepoPermission(repo, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, repo, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "AccessLevel", err) return @@ -629,7 +615,7 @@ func Edit(ctx *context.APIContext) { } if opts.MirrorInterval != nil { - if err := updateMirrorInterval(ctx, opts); err != nil { + if err := updateMirror(ctx, opts); err != nil { return } } @@ -653,7 +639,7 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err } // Check if repository name has been changed and not just a case change if repo.LowerName != strings.ToLower(newRepoName) { - if err := repo_service.ChangeRepositoryName(ctx.User, repo, newRepoName); err != nil { + if err := repo_service.ChangeRepositoryName(ctx.Doer, repo, newRepoName); err != nil { switch { case repo_model.IsErrRepoAlreadyExist(err): ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("repo name is already taken [name: %s]", newRepoName), err) @@ -694,7 +680,7 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err visibilityChanged = repo.IsPrivate != *opts.Private // when ForcePrivate enabled, you could change public repo to private, but only admin users can change private to public - if visibilityChanged && setting.Repository.ForcePrivate && !*opts.Private && !ctx.User.IsAdmin { + if visibilityChanged && setting.Repository.ForcePrivate && !*opts.Private && !ctx.Doer.IsAdmin { err := fmt.Errorf("cannot change private repository to public") ctx.Error(http.StatusUnprocessableEntity, "Force Private enabled", err) return err @@ -709,7 +695,7 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err if ctx.Repo.GitRepo == nil && !repo.IsEmpty { var err error - ctx.Repo.GitRepo, err = git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.RepoPath()) + ctx.Repo.GitRepo, err = git.OpenRepository(ctx, ctx.Repo.Repository.RepoPath()) if err != nil { ctx.Error(http.StatusInternalServerError, "Unable to OpenRepository", err) return err @@ -958,37 +944,67 @@ func updateRepoArchivedState(ctx *context.APIContext, opts api.EditRepoOption) e return nil } -// updateMirrorInterval updates the repo's mirror Interval -func updateMirrorInterval(ctx *context.APIContext, opts api.EditRepoOption) error { +// updateMirror updates a repo's mirror Interval and EnablePrune +func updateMirror(ctx *context.APIContext, opts api.EditRepoOption) error { repo := ctx.Repo.Repository + // only update mirror if interval or enable prune are provided + if opts.MirrorInterval == nil && opts.EnablePrune == nil { + return nil + } + + // these values only make sense if the repo is a mirror + if !repo.IsMirror { + err := fmt.Errorf("repo is not a mirror, can not change mirror interval") + ctx.Error(http.StatusUnprocessableEntity, err.Error(), err) + return err + } + + // get the mirror from the repo + mirror, err := repo_model.GetMirrorByRepoID(repo.ID) + if err != nil { + log.Error("Failed to get mirror: %s", err) + ctx.Error(http.StatusInternalServerError, "MirrorInterval", err) + return err + } + + // update MirrorInterval if opts.MirrorInterval != nil { - if !repo.IsMirror { - err := fmt.Errorf("repo is not a mirror, can not change mirror interval") - ctx.Error(http.StatusUnprocessableEntity, err.Error(), err) - return err - } - mirror, err := repo_model.GetMirrorByRepoID(repo.ID) + + // MirrorInterval should be a duration + interval, err := time.ParseDuration(*opts.MirrorInterval) if err != nil { - log.Error("Failed to get mirror: %s", err) - ctx.Error(http.StatusInternalServerError, "MirrorInterval", err) - return err - } - if interval, err := time.ParseDuration(*opts.MirrorInterval); err == nil { - mirror.Interval = interval - mirror.Repo = repo - if err := repo_model.UpdateMirror(mirror); err != nil { - log.Error("Failed to Set Mirror Interval: %s", err) - ctx.Error(http.StatusUnprocessableEntity, "MirrorInterval", err) - return err - } - log.Trace("Repository %s/%s Mirror Interval was Updated to %s", ctx.Repo.Owner.Name, repo.Name, interval) - } else { log.Error("Wrong format for MirrorInternal Sent: %s", err) ctx.Error(http.StatusUnprocessableEntity, "MirrorInterval", err) return err } + + // Ensure the provided duration is not too short + if interval != 0 && interval < setting.Mirror.MinInterval { + err := fmt.Errorf("invalid mirror interval: %s is below minimum interval: %s", interval, setting.Mirror.MinInterval) + ctx.Error(http.StatusUnprocessableEntity, "MirrorInterval", err) + return err + } + + mirror.Interval = interval + mirror.Repo = repo + mirror.ScheduleNextUpdate() + log.Trace("Repository %s Mirror[%d] Set Interval: %s NextUpdateUnix: %s", repo.FullName(), mirror.ID, interval, mirror.NextUpdateUnix) } + + // update EnablePrune + if opts.EnablePrune != nil { + mirror.EnablePrune = *opts.EnablePrune + log.Trace("Repository %s Mirror[%d] Set EnablePrune: %t", repo.FullName(), mirror.ID, mirror.EnablePrune) + } + + // finally update the mirror in the DB + if err := repo_model.UpdateMirror(mirror); err != nil { + log.Error("Failed to Set Mirror Interval: %s", err) + ctx.Error(http.StatusUnprocessableEntity, "MirrorInterval", err) + return err + } + return nil } @@ -1019,7 +1035,7 @@ func Delete(ctx *context.APIContext) { owner := ctx.Repo.Owner repo := ctx.Repo.Repository - canDelete, err := models.CanUserDelete(repo, ctx.User) + canDelete, err := models.CanUserDelete(repo, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "CanUserDelete", err) return @@ -1032,7 +1048,7 @@ func Delete(ctx *context.APIContext) { ctx.Repo.GitRepo.Close() } - if err := repo_service.DeleteRepository(ctx, ctx.User, repo, true); err != nil { + if err := repo_service.DeleteRepository(ctx, ctx.Doer, repo, true); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteRepository", err) return } diff --git a/routers/api/v1/repo/repo_test.go b/routers/api/v1/repo/repo_test.go index 652fecefbc..9acc0ee7d2 100644 --- a/routers/api/v1/repo/repo_test.go +++ b/routers/api/v1/repo/repo_test.go @@ -24,7 +24,7 @@ func TestRepoEdit(t *testing.T) { ctx := test.MockContext(t, "user2/repo1") test.LoadRepo(t, ctx, 1) test.LoadUser(t, ctx, 2) - ctx.Repo.Owner = ctx.User + ctx.Repo.Owner = ctx.Doer description := "new description" website := "http://wwww.newwebsite.com" private := true @@ -71,7 +71,7 @@ func TestRepoEditNameChange(t *testing.T) { ctx := test.MockContext(t, "user2/repo1") test.LoadRepo(t, ctx, 1) test.LoadUser(t, ctx, 2) - ctx.Repo.Owner = ctx.User + ctx.Repo.Owner = ctx.Doer name := "newname" opts := api.EditRepoOption{ Name: &name, diff --git a/routers/api/v1/repo/star.go b/routers/api/v1/repo/star.go index a53ed7136f..c78c3cc512 100644 --- a/routers/api/v1/repo/star.go +++ b/routers/api/v1/repo/star.go @@ -51,7 +51,7 @@ func ListStargazers(ctx *context.APIContext) { } users := make([]*api.User, len(stargazers)) for i, stargazer := range stargazers { - users[i] = convert.ToUser(stargazer, ctx.User) + users[i] = convert.ToUser(stargazer, ctx.Doer) } ctx.SetTotalCountHeader(int64(ctx.Repo.Repository.NumStars)) diff --git a/routers/api/v1/repo/status.go b/routers/api/v1/repo/status.go index 01faf5ad25..f4c0ebd38c 100644 --- a/routers/api/v1/repo/status.go +++ b/routers/api/v1/repo/status.go @@ -62,7 +62,7 @@ func NewCommitStatus(ctx *context.APIContext) { Description: form.Description, Context: form.Context, } - if err := files_service.CreateCommitStatus(ctx, ctx.Repo.Repository, ctx.User, sha, status); err != nil { + if err := files_service.CreateCommitStatus(ctx, ctx.Repo.Repository, ctx.Doer, sha, status); err != nil { ctx.Error(http.StatusInternalServerError, "CreateCommitStatus", err) return } diff --git a/routers/api/v1/repo/subscriber.go b/routers/api/v1/repo/subscriber.go index 31126d5695..c1aaa24193 100644 --- a/routers/api/v1/repo/subscriber.go +++ b/routers/api/v1/repo/subscriber.go @@ -51,7 +51,7 @@ func ListSubscribers(ctx *context.APIContext) { } users := make([]*api.User, len(subscribers)) for i, subscriber := range subscribers { - users[i] = convert.ToUser(subscriber, ctx.User) + users[i] = convert.ToUser(subscriber, ctx.Doer) } ctx.SetTotalCountHeader(int64(ctx.Repo.Repository.NumWatches)) diff --git a/routers/api/v1/repo/tag.go b/routers/api/v1/repo/tag.go index a60f4f320f..8942912754 100644 --- a/routers/api/v1/repo/tag.go +++ b/routers/api/v1/repo/tag.go @@ -191,7 +191,7 @@ func CreateTag(ctx *context.APIContext) { return } - if err := releaseservice.CreateNewTag(ctx, ctx.User, ctx.Repo.Repository, commit.ID.String(), form.TagName, form.Message); err != nil { + if err := releaseservice.CreateNewTag(ctx, ctx.Doer, ctx.Repo.Repository, commit.ID.String(), form.TagName, form.Message); err != nil { if models.IsErrTagAlreadyExists(err) { ctx.Error(http.StatusConflict, "tag exist", err) return @@ -255,7 +255,7 @@ func DeleteTag(ctx *context.APIContext) { return } - if err = releaseservice.DeleteReleaseByID(ctx, tag.ID, ctx.User, true); err != nil { + if err = releaseservice.DeleteReleaseByID(ctx, tag.ID, ctx.Doer, true); err != nil { ctx.Error(http.StatusInternalServerError, "DeleteReleaseByID", err) } diff --git a/routers/api/v1/repo/teams.go b/routers/api/v1/repo/teams.go index 024224b5a3..1e3ea326d3 100644 --- a/routers/api/v1/repo/teams.go +++ b/routers/api/v1/repo/teams.go @@ -9,6 +9,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" api "code.gitea.io/gitea/modules/structs" @@ -41,7 +42,7 @@ func ListTeams(ctx *context.APIContext) { return } - teams, err := models.GetRepoTeams(ctx.Repo.Repository) + teams, err := organization.GetRepoTeams(ctx.Repo.Repository) if err != nil { ctx.InternalServerError(err) return @@ -101,7 +102,7 @@ func IsTeam(ctx *context.APIContext) { return } - if team.HasRepository(ctx.Repo.Repository.ID) { + if models.HasRepository(team, ctx.Repo.Repository.ID) { if err := team.GetUnits(); err != nil { ctx.Error(http.StatusInternalServerError, "GetUnits", err) return @@ -196,20 +197,20 @@ func changeRepoTeam(ctx *context.APIContext, add bool) { return } - repoHasTeam := team.HasRepository(ctx.Repo.Repository.ID) + repoHasTeam := models.HasRepository(team, ctx.Repo.Repository.ID) var err error if add { if repoHasTeam { ctx.Error(http.StatusUnprocessableEntity, "alreadyAdded", fmt.Errorf("team '%s' is already added to repo", team.Name)) return } - err = team.AddRepository(ctx.Repo.Repository) + err = models.AddRepository(team, ctx.Repo.Repository) } else { if !repoHasTeam { ctx.Error(http.StatusUnprocessableEntity, "notAdded", fmt.Errorf("team '%s' was not added to repo", team.Name)) return } - err = team.RemoveRepository(ctx.Repo.Repository.ID) + err = models.RemoveRepository(team, ctx.Repo.Repository.ID) } if err != nil { ctx.InternalServerError(err) @@ -219,10 +220,10 @@ func changeRepoTeam(ctx *context.APIContext, add bool) { ctx.Status(http.StatusNoContent) } -func getTeamByParam(ctx *context.APIContext) *models.Team { - team, err := models.GetTeam(ctx.Repo.Owner.ID, ctx.Params(":team")) +func getTeamByParam(ctx *context.APIContext) *organization.Team { + team, err := organization.GetTeam(ctx.Repo.Owner.ID, ctx.Params(":team")) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Error(http.StatusNotFound, "TeamNotExit", err) return nil } diff --git a/routers/api/v1/repo/transfer.go b/routers/api/v1/repo/transfer.go index a997444f49..241c578e60 100644 --- a/routers/api/v1/repo/transfer.go +++ b/routers/api/v1/repo/transfer.go @@ -9,6 +9,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -67,23 +68,23 @@ func Transfer(ctx *context.APIContext) { } if newOwner.Type == user_model.UserTypeOrganization { - if !ctx.User.IsAdmin && newOwner.Visibility == api.VisibleTypePrivate && !models.OrgFromUser(newOwner).HasMemberWithUserID(ctx.User.ID) { + if !ctx.Doer.IsAdmin && newOwner.Visibility == api.VisibleTypePrivate && !organization.OrgFromUser(newOwner).HasMemberWithUserID(ctx.Doer.ID) { // The user shouldn't know about this organization ctx.Error(http.StatusNotFound, "", "The new owner does not exist or cannot be found") return } } - var teams []*models.Team + var teams []*organization.Team if opts.TeamIDs != nil { if !newOwner.IsOrganization() { ctx.Error(http.StatusUnprocessableEntity, "repoTransfer", "Teams can only be added to organization-owned repositories") return } - org := convert.ToOrganization(models.OrgFromUser(newOwner)) + org := convert.ToOrganization(organization.OrgFromUser(newOwner)) for _, tID := range *opts.TeamIDs { - team, err := models.GetTeamByID(tID) + team, err := organization.GetTeamByID(tID) if err != nil { ctx.Error(http.StatusUnprocessableEntity, "team", fmt.Errorf("team %d not found", tID)) return @@ -103,14 +104,16 @@ func Transfer(ctx *context.APIContext) { ctx.Repo.GitRepo = nil } - if err := repo_service.StartRepositoryTransfer(ctx.User, newOwner, ctx.Repo.Repository, teams); err != nil { + oldFullname := ctx.Repo.Repository.FullName() + + if err := repo_service.StartRepositoryTransfer(ctx.Doer, newOwner, ctx.Repo.Repository, teams); err != nil { if models.IsErrRepoTransferInProgress(err) { - ctx.Error(http.StatusConflict, "CreatePendingRepositoryTransfer", err) + ctx.Error(http.StatusConflict, "StartRepositoryTransfer", err) return } if repo_model.IsErrRepoAlreadyExist(err) { - ctx.Error(http.StatusUnprocessableEntity, "CreatePendingRepositoryTransfer", err) + ctx.Error(http.StatusUnprocessableEntity, "StartRepositoryTransfer", err) return } @@ -119,12 +122,12 @@ func Transfer(ctx *context.APIContext) { } if ctx.Repo.Repository.Status == repo_model.RepositoryPendingTransfer { - log.Trace("Repository transfer initiated: %s -> %s", ctx.Repo.Repository.FullName(), newOwner.Name) + log.Trace("Repository transfer initiated: %s -> %s", oldFullname, ctx.Repo.Repository.FullName()) ctx.JSON(http.StatusCreated, convert.ToRepo(ctx.Repo.Repository, perm.AccessModeAdmin)) return } - log.Trace("Repository transferred: %s -> %s", ctx.Repo.Repository.FullName(), newOwner.Name) + log.Trace("Repository transferred: %s -> %s", oldFullname, ctx.Repo.Repository.FullName()) ctx.JSON(http.StatusAccepted, convert.ToRepo(ctx.Repo.Repository, perm.AccessModeAdmin)) } @@ -218,7 +221,7 @@ func acceptOrRejectRepoTransfer(ctx *context.APIContext, accept bool) error { return err } - if !repoTransfer.CanUserAcceptTransfer(ctx.User) { + if !repoTransfer.CanUserAcceptTransfer(ctx.Doer) { ctx.Error(http.StatusForbidden, "CanUserAcceptTransfer", nil) return fmt.Errorf("user does not have permissions to do this") } diff --git a/routers/api/v1/repo/wiki.go b/routers/api/v1/repo/wiki.go index f7054b5067..a3a5904925 100644 --- a/routers/api/v1/repo/wiki.go +++ b/routers/api/v1/repo/wiki.go @@ -71,7 +71,7 @@ func NewWikiPage(ctx *context.APIContext) { } form.ContentBase64 = string(content) - if err := wiki_service.AddWikiPage(ctx, ctx.User, ctx.Repo.Repository, wikiName, form.ContentBase64, form.Message); err != nil { + if err := wiki_service.AddWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName, form.ContentBase64, form.Message); err != nil { if models.IsErrWikiReservedName(err) { ctx.Error(http.StatusBadRequest, "IsErrWikiReservedName", err) } else if models.IsErrWikiAlreadyExist(err) { @@ -144,7 +144,7 @@ func EditWikiPage(ctx *context.APIContext) { } form.ContentBase64 = string(content) - if err := wiki_service.EditWikiPage(ctx, ctx.User, ctx.Repo.Repository, oldWikiName, newWikiName, form.ContentBase64, form.Message); err != nil { + if err := wiki_service.EditWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, oldWikiName, newWikiName, form.ContentBase64, form.Message); err != nil { ctx.Error(http.StatusInternalServerError, "EditWikiPage", err) return } @@ -233,7 +233,7 @@ func DeleteWikiPage(ctx *context.APIContext) { wikiName := wiki_service.NormalizeWikiName(ctx.Params(":pageName")) - if err := wiki_service.DeleteWikiPage(ctx, ctx.User, ctx.Repo.Repository, wikiName); err != nil { + if err := wiki_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName); err != nil { if err.Error() == "file does not exist" { ctx.NotFound(err) return @@ -458,7 +458,7 @@ func findEntryForFile(commit *git.Commit, target string) (*git.TreeEntry, error) // findWikiRepoCommit opens the wiki repo and returns the latest commit, writing to context on error. // The caller is responsible for closing the returned repo again func findWikiRepoCommit(ctx *context.APIContext) (*git.Repository, *git.Commit) { - wikiRepo, err := git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.WikiPath()) + wikiRepo, err := git.OpenRepository(ctx, ctx.Repo.Repository.WikiPath()) if err != nil { if git.IsErrNotExist(err) || err.Error() == "no such file or directory" { diff --git a/routers/api/v1/swagger/package.go b/routers/api/v1/swagger/package.go new file mode 100644 index 0000000000..2a1f057314 --- /dev/null +++ b/routers/api/v1/swagger/package.go @@ -0,0 +1,30 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package swagger + +import ( + api "code.gitea.io/gitea/modules/structs" +) + +// Package +// swagger:response Package +type swaggerResponsePackage struct { + // in:body + Body api.Package `json:"body"` +} + +// PackageList +// swagger:response PackageList +type swaggerResponsePackageList struct { + // in:body + Body []api.Package `json:"body"` +} + +// PackageFileList +// swagger:response PackageFileList +type swaggerResponsePackageFileList struct { + // in:body + Body []api.PackageFile `json:"body"` +} diff --git a/routers/api/v1/swagger/repo.go b/routers/api/v1/swagger/repo.go index 40aeca677d..ab802db781 100644 --- a/routers/api/v1/swagger/repo.go +++ b/routers/api/v1/swagger/repo.go @@ -344,3 +344,10 @@ type swaggerWikiCommitList struct { // in:body Body api.WikiCommitList `json:"body"` } + +// RepoCollaboratorPermission +// swagger:response RepoCollaboratorPermission +type swaggerRepoCollaboratorPermission struct { + // in:body + Body api.RepoCollaboratorPermission `json:"body"` +} diff --git a/routers/api/v1/user/app.go b/routers/api/v1/user/app.go index 94cfab45bd..165b8f005e 100644 --- a/routers/api/v1/user/app.go +++ b/routers/api/v1/user/app.go @@ -45,7 +45,7 @@ func ListAccessTokens(ctx *context.APIContext) { // "200": // "$ref": "#/responses/AccessTokenList" - opts := models.ListAccessTokensOptions{UserID: ctx.User.ID, ListOptions: utils.GetListOptions(ctx)} + opts := models.ListAccessTokensOptions{UserID: ctx.Doer.ID, ListOptions: utils.GetListOptions(ctx)} count, err := models.CountAccessTokens(opts) if err != nil { @@ -99,7 +99,7 @@ func CreateAccessToken(ctx *context.APIContext) { form := web.GetForm(ctx).(*api.CreateAccessTokenOption) t := &models.AccessToken{ - UID: ctx.User.ID, + UID: ctx.Doer.ID, Name: form.Name, } @@ -157,7 +157,7 @@ func DeleteAccessToken(ctx *context.APIContext) { if tokenID == 0 { tokens, err := models.ListAccessTokens(models.ListAccessTokensOptions{ Name: token, - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, }) if err != nil { ctx.Error(http.StatusInternalServerError, "ListAccessTokens", err) @@ -180,7 +180,7 @@ func DeleteAccessToken(ctx *context.APIContext) { return } - if err := models.DeleteAccessTokenByID(tokenID, ctx.User.ID); err != nil { + if err := models.DeleteAccessTokenByID(tokenID, ctx.Doer.ID); err != nil { if models.IsErrAccessTokenNotExist(err) { ctx.NotFound() } else { @@ -215,7 +215,7 @@ func CreateOauth2Application(ctx *context.APIContext) { app, err := auth.CreateOAuth2Application(auth.CreateOAuth2ApplicationOptions{ Name: data.Name, - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, RedirectURIs: data.RedirectURIs, }) if err != nil { @@ -252,7 +252,7 @@ func ListOauth2Applications(ctx *context.APIContext) { // "200": // "$ref": "#/responses/OAuth2ApplicationList" - apps, total, err := auth.ListOAuth2Applications(ctx.User.ID, utils.GetListOptions(ctx)) + apps, total, err := auth.ListOAuth2Applications(ctx.Doer.ID, utils.GetListOptions(ctx)) if err != nil { ctx.Error(http.StatusInternalServerError, "ListOAuth2Applications", err) return @@ -288,7 +288,7 @@ func DeleteOauth2Application(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" appID := ctx.ParamsInt64(":id") - if err := auth.DeleteOAuth2Application(appID, ctx.User.ID); err != nil { + if err := auth.DeleteOAuth2Application(appID, ctx.Doer.ID); err != nil { if auth.IsErrOAuthApplicationNotFound(err) { ctx.NotFound() } else { @@ -365,7 +365,7 @@ func UpdateOauth2Application(ctx *context.APIContext) { app, err := auth.UpdateOAuth2Application(auth.UpdateOAuth2ApplicationOptions{ Name: data.Name, - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, ID: appID, RedirectURIs: data.RedirectURIs, }) diff --git a/routers/api/v1/user/email.go b/routers/api/v1/user/email.go index 6887c306cc..170ffb7736 100644 --- a/routers/api/v1/user/email.go +++ b/routers/api/v1/user/email.go @@ -28,7 +28,7 @@ func ListEmails(ctx *context.APIContext) { // "200": // "$ref": "#/responses/EmailList" - emails, err := user_model.GetEmailAddresses(ctx.User.ID) + emails, err := user_model.GetEmailAddresses(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "GetEmailAddresses", err) return @@ -71,7 +71,7 @@ func AddEmail(ctx *context.APIContext) { emails := make([]*user_model.EmailAddress, len(form.Emails)) for i := range form.Emails { emails[i] = &user_model.EmailAddress{ - UID: ctx.User.ID, + UID: ctx.Doer.ID, Email: form.Emails[i], IsActivated: !setting.Service.RegisterEmailConfirm, } @@ -80,8 +80,16 @@ func AddEmail(ctx *context.APIContext) { if err := user_model.AddEmailAddresses(emails); err != nil { if user_model.IsErrEmailAlreadyUsed(err) { ctx.Error(http.StatusUnprocessableEntity, "", "Email address has been used: "+err.(user_model.ErrEmailAlreadyUsed).Email) - } else if user_model.IsErrEmailInvalid(err) { - errMsg := fmt.Sprintf("Email address %s invalid", err.(user_model.ErrEmailInvalid).Email) + } else if user_model.IsErrEmailCharIsNotSupported(err) || user_model.IsErrEmailInvalid(err) { + email := "" + if typedError, ok := err.(user_model.ErrEmailInvalid); ok { + email = typedError.Email + } + if typedError, ok := err.(user_model.ErrEmailCharIsNotSupported); ok { + email = typedError.Email + } + + errMsg := fmt.Sprintf("Email address %q invalid", email) ctx.Error(http.StatusUnprocessableEntity, "", errMsg) } else { ctx.Error(http.StatusInternalServerError, "AddEmailAddresses", err) @@ -123,7 +131,7 @@ func DeleteEmail(ctx *context.APIContext) { for i := range form.Emails { emails[i] = &user_model.EmailAddress{ Email: form.Emails[i], - UID: ctx.User.ID, + UID: ctx.Doer.ID, } } diff --git a/routers/api/v1/user/follower.go b/routers/api/v1/user/follower.go index 1eacb89db2..3c81b27f8d 100644 --- a/routers/api/v1/user/follower.go +++ b/routers/api/v1/user/follower.go @@ -18,7 +18,7 @@ import ( func responseAPIUsers(ctx *context.APIContext, users []*user_model.User) { apiUsers := make([]*api.User, len(users)) for i := range users { - apiUsers[i] = convert.ToUser(users[i], ctx.User) + apiUsers[i] = convert.ToUser(users[i], ctx.Doer) } ctx.JSON(http.StatusOK, &apiUsers) } @@ -54,7 +54,7 @@ func ListMyFollowers(ctx *context.APIContext) { // "200": // "$ref": "#/responses/UserList" - listUserFollowers(ctx, ctx.User) + listUserFollowers(ctx, ctx.Doer) } // ListFollowers list the given user's followers @@ -82,11 +82,7 @@ func ListFollowers(ctx *context.APIContext) { // "200": // "$ref": "#/responses/UserList" - u := GetUserByParams(ctx) - if ctx.Written() { - return - } - listUserFollowers(ctx, u) + listUserFollowers(ctx, ctx.ContextUser) } func listUserFollowing(ctx *context.APIContext, u *user_model.User) { @@ -120,7 +116,7 @@ func ListMyFollowing(ctx *context.APIContext) { // "200": // "$ref": "#/responses/UserList" - listUserFollowing(ctx, ctx.User) + listUserFollowing(ctx, ctx.Doer) } // ListFollowing list the users that the given user is following @@ -148,11 +144,7 @@ func ListFollowing(ctx *context.APIContext) { // "200": // "$ref": "#/responses/UserList" - u := GetUserByParams(ctx) - if ctx.Written() { - return - } - listUserFollowing(ctx, u) + listUserFollowing(ctx, ctx.ContextUser) } func checkUserFollowing(ctx *context.APIContext, u *user_model.User, followID int64) { @@ -180,25 +172,21 @@ func CheckMyFollowing(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - target := GetUserByParams(ctx) - if ctx.Written() { - return - } - checkUserFollowing(ctx, ctx.User, target.ID) + checkUserFollowing(ctx, ctx.Doer, ctx.ContextUser.ID) } // CheckFollowing check if one user is following another user func CheckFollowing(ctx *context.APIContext) { - // swagger:operation GET /users/{follower}/following/{followee} user userCheckFollowing + // swagger:operation GET /users/{username}/following/{target} user userCheckFollowing // --- // summary: Check if one user is following another user // parameters: - // - name: follower + // - name: username // in: path // description: username of following user // type: string // required: true - // - name: followee + // - name: target // in: path // description: username of followed user // type: string @@ -209,15 +197,11 @@ func CheckFollowing(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - u := GetUserByParams(ctx) - if ctx.Written() { - return - } target := GetUserByParamsName(ctx, ":target") if ctx.Written() { return } - checkUserFollowing(ctx, u, target.ID) + checkUserFollowing(ctx, ctx.ContextUser, target.ID) } // Follow follow a user @@ -235,11 +219,7 @@ func Follow(ctx *context.APIContext) { // "204": // "$ref": "#/responses/empty" - target := GetUserByParams(ctx) - if ctx.Written() { - return - } - if err := user_model.FollowUser(ctx.User.ID, target.ID); err != nil { + if err := user_model.FollowUser(ctx.Doer.ID, ctx.ContextUser.ID); err != nil { ctx.Error(http.StatusInternalServerError, "FollowUser", err) return } @@ -261,11 +241,7 @@ func Unfollow(ctx *context.APIContext) { // "204": // "$ref": "#/responses/empty" - target := GetUserByParams(ctx) - if ctx.Written() { - return - } - if err := user_model.UnfollowUser(ctx.User.ID, target.ID); err != nil { + if err := user_model.UnfollowUser(ctx.Doer.ID, ctx.ContextUser.ID); err != nil { ctx.Error(http.StatusInternalServerError, "UnfollowUser", err) return } diff --git a/routers/api/v1/user/gpg_key.go b/routers/api/v1/user/gpg_key.go index 26aeeeabf9..b211a24a0e 100644 --- a/routers/api/v1/user/gpg_key.go +++ b/routers/api/v1/user/gpg_key.go @@ -18,7 +18,7 @@ import ( ) func listGPGKeys(ctx *context.APIContext, uid int64, listOptions db.ListOptions) { - keys, err := asymkey_model.ListGPGKeys(db.DefaultContext, uid, listOptions) + keys, err := asymkey_model.ListGPGKeys(ctx, uid, listOptions) if err != nil { ctx.Error(http.StatusInternalServerError, "ListGPGKeys", err) return @@ -64,11 +64,7 @@ func ListGPGKeys(ctx *context.APIContext) { // "200": // "$ref": "#/responses/GPGKeyList" - user := GetUserByParams(ctx) - if ctx.Written() { - return - } - listGPGKeys(ctx, user.ID, utils.GetListOptions(ctx)) + listGPGKeys(ctx, ctx.ContextUser.ID, utils.GetListOptions(ctx)) } // ListMyGPGKeys get the GPG key list of the authenticated user @@ -91,7 +87,7 @@ func ListMyGPGKeys(ctx *context.APIContext) { // "200": // "$ref": "#/responses/GPGKeyList" - listGPGKeys(ctx, ctx.User.ID, utils.GetListOptions(ctx)) + listGPGKeys(ctx, ctx.Doer.ID, utils.GetListOptions(ctx)) } // GetGPGKey get the GPG key based on a id @@ -128,8 +124,8 @@ func GetGPGKey(ctx *context.APIContext) { // CreateUserGPGKey creates new GPG key to given user by ID. func CreateUserGPGKey(ctx *context.APIContext, form api.CreateGPGKeyOption, uid int64) { - token := asymkey_model.VerificationToken(ctx.User, 1) - lastToken := asymkey_model.VerificationToken(ctx.User, 0) + token := asymkey_model.VerificationToken(ctx.Doer, 1) + lastToken := asymkey_model.VerificationToken(ctx.Doer, 0) keys, err := asymkey_model.AddGPGKey(uid, form.ArmoredKey, token, form.Signature) if err != nil && asymkey_model.IsErrGPGInvalidTokenSignature(err) { @@ -156,7 +152,7 @@ func GetVerificationToken(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - token := asymkey_model.VerificationToken(ctx.User, 1) + token := asymkey_model.VerificationToken(ctx.Doer, 1) ctx.PlainText(http.StatusOK, token) } @@ -178,12 +174,12 @@ func VerifyUserGPGKey(ctx *context.APIContext) { // "$ref": "#/responses/validationError" form := web.GetForm(ctx).(*api.VerifyGPGKeyOption) - token := asymkey_model.VerificationToken(ctx.User, 1) - lastToken := asymkey_model.VerificationToken(ctx.User, 0) + token := asymkey_model.VerificationToken(ctx.Doer, 1) + lastToken := asymkey_model.VerificationToken(ctx.Doer, 0) - _, err := asymkey_model.VerifyGPGKey(ctx.User.ID, form.KeyID, token, form.Signature) + _, err := asymkey_model.VerifyGPGKey(ctx.Doer.ID, form.KeyID, token, form.Signature) if err != nil && asymkey_model.IsErrGPGInvalidTokenSignature(err) { - _, err = asymkey_model.VerifyGPGKey(ctx.User.ID, form.KeyID, lastToken, form.Signature) + _, err = asymkey_model.VerifyGPGKey(ctx.Doer.ID, form.KeyID, lastToken, form.Signature) } if err != nil { @@ -230,7 +226,7 @@ func CreateGPGKey(ctx *context.APIContext) { // "$ref": "#/responses/validationError" form := web.GetForm(ctx).(*api.CreateGPGKeyOption) - CreateUserGPGKey(ctx, *form, ctx.User.ID) + CreateUserGPGKey(ctx, *form, ctx.Doer.ID) } // DeleteGPGKey remove a GPG key belonging to the authenticated user @@ -255,7 +251,7 @@ func DeleteGPGKey(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - if err := asymkey_model.DeleteGPGKey(ctx.User, ctx.ParamsInt64(":id")); err != nil { + if err := asymkey_model.DeleteGPGKey(ctx.Doer, ctx.ParamsInt64(":id")); err != nil { if asymkey_model.IsErrGPGKeyAccessDenied(err) { ctx.Error(http.StatusForbidden, "", "You do not have access to this key") } else { diff --git a/routers/api/v1/user/key.go b/routers/api/v1/user/key.go index e8cc2035e5..71a2c910a6 100644 --- a/routers/api/v1/user/key.go +++ b/routers/api/v1/user/key.go @@ -86,7 +86,7 @@ func listPublicKeys(ctx *context.APIContext, user *user_model.User) { apiKeys := make([]*api.PublicKey, len(keys)) for i := range keys { apiKeys[i] = convert.ToPublicKey(apiLink, keys[i]) - if ctx.User.IsAdmin || ctx.User.ID == keys[i].OwnerID { + if ctx.Doer.IsAdmin || ctx.Doer.ID == keys[i].OwnerID { apiKeys[i], _ = appendPrivateInformation(apiKeys[i], keys[i], user) } } @@ -119,7 +119,7 @@ func ListMyPublicKeys(ctx *context.APIContext) { // "200": // "$ref": "#/responses/PublicKeyList" - listPublicKeys(ctx, ctx.User) + listPublicKeys(ctx, ctx.Doer) } // ListPublicKeys list the given user's public keys @@ -151,11 +151,7 @@ func ListPublicKeys(ctx *context.APIContext) { // "200": // "$ref": "#/responses/PublicKeyList" - user := GetUserByParams(ctx) - if ctx.Written() { - return - } - listPublicKeys(ctx, user) + listPublicKeys(ctx, ctx.ContextUser) } // GetPublicKey get a public key @@ -190,8 +186,8 @@ func GetPublicKey(ctx *context.APIContext) { apiLink := composePublicKeysAPILink() apiKey := convert.ToPublicKey(apiLink, key) - if ctx.User.IsAdmin || ctx.User.ID == key.OwnerID { - apiKey, _ = appendPrivateInformation(apiKey, key, ctx.User) + if ctx.Doer.IsAdmin || ctx.Doer.ID == key.OwnerID { + apiKey, _ = appendPrivateInformation(apiKey, key, ctx.Doer) } ctx.JSON(http.StatusOK, apiKey) } @@ -211,8 +207,8 @@ func CreateUserPublicKey(ctx *context.APIContext, form api.CreateKeyOption, uid } apiLink := composePublicKeysAPILink() apiKey := convert.ToPublicKey(apiLink, key) - if ctx.User.IsAdmin || ctx.User.ID == key.OwnerID { - apiKey, _ = appendPrivateInformation(apiKey, key, ctx.User) + if ctx.Doer.IsAdmin || ctx.Doer.ID == key.OwnerID { + apiKey, _ = appendPrivateInformation(apiKey, key, ctx.Doer) } ctx.JSON(http.StatusCreated, apiKey) } @@ -238,7 +234,7 @@ func CreatePublicKey(ctx *context.APIContext) { // "$ref": "#/responses/validationError" form := web.GetForm(ctx).(*api.CreateKeyOption) - CreateUserPublicKey(ctx, *form, ctx.User.ID) + CreateUserPublicKey(ctx, *form, ctx.Doer.ID) } // DeletePublicKey delete one public key @@ -266,16 +262,21 @@ func DeletePublicKey(ctx *context.APIContext) { id := ctx.ParamsInt64(":id") externallyManaged, err := asymkey_model.PublicKeyIsExternallyManaged(id) if err != nil { - ctx.Error(http.StatusInternalServerError, "PublicKeyIsExternallyManaged", err) - } - if externallyManaged { - ctx.Error(http.StatusForbidden, "", "SSH Key is externally managed for this user") - } - - if err := asymkey_service.DeletePublicKey(ctx.User, id); err != nil { if asymkey_model.IsErrKeyNotExist(err) { ctx.NotFound() - } else if asymkey_model.IsErrKeyAccessDenied(err) { + } else { + ctx.Error(http.StatusInternalServerError, "PublicKeyIsExternallyManaged", err) + } + return + } + + if externallyManaged { + ctx.Error(http.StatusForbidden, "", "SSH Key is externally managed for this user") + return + } + + if err := asymkey_service.DeletePublicKey(ctx.Doer, id); err != nil { + if asymkey_model.IsErrKeyAccessDenied(err) { ctx.Error(http.StatusForbidden, "", "You do not have access to this key") } else { ctx.Error(http.StatusInternalServerError, "DeletePublicKey", err) diff --git a/routers/api/v1/user/repo.go b/routers/api/v1/user/repo.go index 109548ec76..0231c8ccbc 100644 --- a/routers/api/v1/user/repo.go +++ b/routers/api/v1/user/repo.go @@ -8,7 +8,6 @@ import ( "net/http" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/perm" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" @@ -39,12 +38,12 @@ func listUserRepos(ctx *context.APIContext, u *user_model.User, private bool) { apiRepos := make([]*api.Repository, 0, len(repos)) for i := range repos { - access, err := models.AccessLevel(ctx.User, repos[i]) + access, err := models.AccessLevel(ctx.Doer, repos[i]) if err != nil { ctx.Error(http.StatusInternalServerError, "AccessLevel", err) return } - if ctx.IsSigned && ctx.User.IsAdmin || access >= perm.AccessModeRead { + if ctx.IsSigned && ctx.Doer.IsAdmin || access >= perm.AccessModeRead { apiRepos = append(apiRepos, convert.ToRepo(repos[i], access)) } } @@ -79,12 +78,8 @@ func ListUserRepos(ctx *context.APIContext) { // "200": // "$ref": "#/responses/RepositoryList" - user := GetUserByParams(ctx) - if ctx.Written() { - return - } private := ctx.IsSigned - listUserRepos(ctx, user, private) + listUserRepos(ctx, ctx.ContextUser, private) } // ListMyRepos - list the repositories you own or have access to. @@ -109,8 +104,8 @@ func ListMyRepos(ctx *context.APIContext) { opts := &models.SearchRepoOptions{ ListOptions: utils.GetListOptions(ctx), - Actor: ctx.User, - OwnerID: ctx.User.ID, + Actor: ctx.Doer, + OwnerID: ctx.Doer.ID, Private: ctx.IsSigned, IncludeDescription: true, } @@ -124,11 +119,11 @@ func ListMyRepos(ctx *context.APIContext) { results := make([]*api.Repository, len(repos)) for i, repo := range repos { - if err = repo.GetOwner(db.DefaultContext); err != nil { + if err = repo.GetOwner(ctx); err != nil { ctx.Error(http.StatusInternalServerError, "GetOwner", err) return } - accessMode, err := models.AccessLevel(ctx.User, repo) + accessMode, err := models.AccessLevel(ctx.Doer, repo) if err != nil { ctx.Error(http.StatusInternalServerError, "AccessLevel", err) } diff --git a/routers/api/v1/user/settings.go b/routers/api/v1/user/settings.go index 5f4d76ed72..dc7e7f1160 100644 --- a/routers/api/v1/user/settings.go +++ b/routers/api/v1/user/settings.go @@ -24,7 +24,7 @@ func GetUserSettings(ctx *context.APIContext) { // responses: // "200": // "$ref": "#/responses/UserSettings" - ctx.JSON(http.StatusOK, convert.User2UserSettings(ctx.User)) + ctx.JSON(http.StatusOK, convert.User2UserSettings(ctx.Doer)) } // UpdateUserSettings returns user settings @@ -46,38 +46,38 @@ func UpdateUserSettings(ctx *context.APIContext) { form := web.GetForm(ctx).(*api.UserSettingsOptions) if form.FullName != nil { - ctx.User.FullName = *form.FullName + ctx.Doer.FullName = *form.FullName } if form.Description != nil { - ctx.User.Description = *form.Description + ctx.Doer.Description = *form.Description } if form.Website != nil { - ctx.User.Website = *form.Website + ctx.Doer.Website = *form.Website } if form.Location != nil { - ctx.User.Location = *form.Location + ctx.Doer.Location = *form.Location } if form.Language != nil { - ctx.User.Language = *form.Language + ctx.Doer.Language = *form.Language } if form.Theme != nil { - ctx.User.Theme = *form.Theme + ctx.Doer.Theme = *form.Theme } if form.DiffViewStyle != nil { - ctx.User.DiffViewStyle = *form.DiffViewStyle + ctx.Doer.DiffViewStyle = *form.DiffViewStyle } if form.HideEmail != nil { - ctx.User.KeepEmailPrivate = *form.HideEmail + ctx.Doer.KeepEmailPrivate = *form.HideEmail } if form.HideActivity != nil { - ctx.User.KeepActivityPrivate = *form.HideActivity + ctx.Doer.KeepActivityPrivate = *form.HideActivity } - if err := user_model.UpdateUser(ctx.User, false); err != nil { + if err := user_model.UpdateUser(ctx.Doer, false); err != nil { ctx.InternalServerError(err) return } - ctx.JSON(http.StatusOK, convert.User2UserSettings(ctx.User)) + ctx.JSON(http.StatusOK, convert.User2UserSettings(ctx.Doer)) } diff --git a/routers/api/v1/user/star.go b/routers/api/v1/user/star.go index cc527d9213..cdbc35471b 100644 --- a/routers/api/v1/user/star.go +++ b/routers/api/v1/user/star.go @@ -21,7 +21,7 @@ import ( // getStarredRepos returns the repos that the user with the specified userID has // starred func getStarredRepos(user *user_model.User, private bool, listOptions db.ListOptions) ([]*api.Repository, error) { - starredRepos, err := models.GetStarredRepos(user.ID, private, listOptions) + starredRepos, err := repo_model.GetStarredRepos(user.ID, private, listOptions) if err != nil { return nil, err } @@ -62,15 +62,14 @@ func GetStarredRepos(ctx *context.APIContext) { // "200": // "$ref": "#/responses/RepositoryList" - user := GetUserByParams(ctx) - private := user.ID == ctx.User.ID - repos, err := getStarredRepos(user, private, utils.GetListOptions(ctx)) + private := ctx.ContextUser.ID == ctx.Doer.ID + repos, err := getStarredRepos(ctx.ContextUser, private, utils.GetListOptions(ctx)) if err != nil { ctx.Error(http.StatusInternalServerError, "getStarredRepos", err) return } - ctx.SetTotalCountHeader(int64(user.NumStars)) + ctx.SetTotalCountHeader(int64(ctx.ContextUser.NumStars)) ctx.JSON(http.StatusOK, &repos) } @@ -94,12 +93,12 @@ func GetMyStarredRepos(ctx *context.APIContext) { // "200": // "$ref": "#/responses/RepositoryList" - repos, err := getStarredRepos(ctx.User, true, utils.GetListOptions(ctx)) + repos, err := getStarredRepos(ctx.Doer, true, utils.GetListOptions(ctx)) if err != nil { ctx.Error(http.StatusInternalServerError, "getStarredRepos", err) } - ctx.SetTotalCountHeader(int64(ctx.User.NumStars)) + ctx.SetTotalCountHeader(int64(ctx.Doer.NumStars)) ctx.JSON(http.StatusOK, &repos) } @@ -125,7 +124,7 @@ func IsStarring(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - if repo_model.IsStaring(ctx.User.ID, ctx.Repo.Repository.ID) { + if repo_model.IsStaring(ctx.Doer.ID, ctx.Repo.Repository.ID) { ctx.Status(http.StatusNoContent) } else { ctx.NotFound() @@ -152,7 +151,7 @@ func Star(ctx *context.APIContext) { // "204": // "$ref": "#/responses/empty" - err := repo_model.StarRepo(ctx.User.ID, ctx.Repo.Repository.ID, true) + err := repo_model.StarRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, true) if err != nil { ctx.Error(http.StatusInternalServerError, "StarRepo", err) return @@ -180,7 +179,7 @@ func Unstar(ctx *context.APIContext) { // "204": // "$ref": "#/responses/empty" - err := repo_model.StarRepo(ctx.User.ID, ctx.Repo.Repository.ID, false) + err := repo_model.StarRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, false) if err != nil { ctx.Error(http.StatusInternalServerError, "StarRepo", err) return diff --git a/routers/api/v1/user/user.go b/routers/api/v1/user/user.go index bba7b7a5d1..018f75762f 100644 --- a/routers/api/v1/user/user.go +++ b/routers/api/v1/user/user.go @@ -56,7 +56,7 @@ func Search(ctx *context.APIContext) { listOptions := utils.GetListOptions(ctx) users, maxResults, err := user_model.SearchUsers(&user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, Keyword: ctx.FormTrim("q"), UID: ctx.FormInt64("uid"), Type: user_model.UserTypeIndividual, @@ -75,7 +75,7 @@ func Search(ctx *context.APIContext) { ctx.JSON(http.StatusOK, map[string]interface{}{ "ok": true, - "data": convert.ToUsers(ctx.User, users), + "data": convert.ToUsers(ctx.Doer, users), }) } @@ -98,18 +98,12 @@ func GetInfo(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - u := GetUserByParams(ctx) - - if ctx.Written() { - return - } - - if !models.IsUserVisibleToViewer(u, ctx.User) { + if !user_model.IsUserVisibleToViewer(ctx.ContextUser, ctx.Doer) { // fake ErrUserNotExist error message to not leak information about existence ctx.NotFound("GetUserByName", user_model.ErrUserNotExist{Name: ctx.Params(":username")}) return } - ctx.JSON(http.StatusOK, convert.ToUser(u, ctx.User)) + ctx.JSON(http.StatusOK, convert.ToUser(ctx.ContextUser, ctx.Doer)) } // GetAuthenticatedUser get current user's information @@ -123,7 +117,7 @@ func GetAuthenticatedUser(ctx *context.APIContext) { // "200": // "$ref": "#/responses/User" - ctx.JSON(http.StatusOK, convert.ToUser(ctx.User, ctx.User)) + ctx.JSON(http.StatusOK, convert.ToUser(ctx.Doer, ctx.Doer)) } // GetUserHeatmapData is the handler to get a users heatmap @@ -145,12 +139,7 @@ func GetUserHeatmapData(ctx *context.APIContext) { // "404": // "$ref": "#/responses/notFound" - user := GetUserByParams(ctx) - if ctx.Written() { - return - } - - heatmap, err := models.GetUserHeatmapDataByUser(user, ctx.User) + heatmap, err := models.GetUserHeatmapDataByUser(ctx.ContextUser, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserHeatmapDataByUser", err) return diff --git a/routers/api/v1/user/watch.go b/routers/api/v1/user/watch.go index 49b1d47d95..e7c6837cb8 100644 --- a/routers/api/v1/user/watch.go +++ b/routers/api/v1/user/watch.go @@ -19,7 +19,7 @@ import ( // getWatchedRepos returns the repos that the user with the specified userID is watching func getWatchedRepos(user *user_model.User, private bool, listOptions db.ListOptions) ([]*api.Repository, int64, error) { - watchedRepos, total, err := models.GetWatchedRepos(user.ID, private, listOptions) + watchedRepos, total, err := repo_model.GetWatchedRepos(user.ID, private, listOptions) if err != nil { return nil, 0, err } @@ -60,9 +60,8 @@ func GetWatchedRepos(ctx *context.APIContext) { // "200": // "$ref": "#/responses/RepositoryList" - user := GetUserByParams(ctx) - private := user.ID == ctx.User.ID - repos, total, err := getWatchedRepos(user, private, utils.GetListOptions(ctx)) + private := ctx.ContextUser.ID == ctx.Doer.ID + repos, total, err := getWatchedRepos(ctx.ContextUser, private, utils.GetListOptions(ctx)) if err != nil { ctx.Error(http.StatusInternalServerError, "getWatchedRepos", err) } @@ -91,7 +90,7 @@ func GetMyWatchedRepos(ctx *context.APIContext) { // "200": // "$ref": "#/responses/RepositoryList" - repos, total, err := getWatchedRepos(ctx.User, true, utils.GetListOptions(ctx)) + repos, total, err := getWatchedRepos(ctx.Doer, true, utils.GetListOptions(ctx)) if err != nil { ctx.Error(http.StatusInternalServerError, "getWatchedRepos", err) } @@ -123,7 +122,7 @@ func IsWatching(ctx *context.APIContext) { // "404": // description: User is not watching this repo or repo do not exist - if repo_model.IsWatching(ctx.User.ID, ctx.Repo.Repository.ID) { + if repo_model.IsWatching(ctx.Doer.ID, ctx.Repo.Repository.ID) { ctx.JSON(http.StatusOK, api.WatchInfo{ Subscribed: true, Ignored: false, @@ -157,7 +156,7 @@ func Watch(ctx *context.APIContext) { // "200": // "$ref": "#/responses/WatchInfo" - err := repo_model.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true) + err := repo_model.WatchRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, true) if err != nil { ctx.Error(http.StatusInternalServerError, "WatchRepo", err) return @@ -192,7 +191,7 @@ func Unwatch(ctx *context.APIContext) { // "204": // "$ref": "#/responses/empty" - err := repo_model.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, false) + err := repo_model.WatchRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, false) if err != nil { ctx.Error(http.StatusInternalServerError, "UnwatchRepo", err) return diff --git a/routers/api/v1/utils/git.go b/routers/api/v1/utils/git.go index f7a7fe83c9..ac64d5b87b 100644 --- a/routers/api/v1/utils/git.go +++ b/routers/api/v1/utils/git.go @@ -5,6 +5,7 @@ package utils import ( + "fmt" "net/http" "code.gitea.io/gitea/modules/context" @@ -35,12 +36,7 @@ func ResolveRefOrSha(ctx *context.APIContext, ref string) string { // GetGitRefs return git references based on filter func GetGitRefs(ctx *context.APIContext, filter string) ([]*git.Reference, string, error) { if ctx.Repo.GitRepo == nil { - var err error - ctx.Repo.GitRepo, err = git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.RepoPath()) - if err != nil { - return nil, "OpenRepository", err - } - defer ctx.Repo.GitRepo.Close() + return nil, "", fmt.Errorf("no open git repo found in context") } if len(filter) > 0 { filter = "refs/" + filter diff --git a/routers/api/v1/utils/hook.go b/routers/api/v1/utils/hook.go index 1f0a35ce22..4c3753231d 100644 --- a/routers/api/v1/utils/hook.go +++ b/routers/api/v1/utils/hook.go @@ -9,7 +9,6 @@ import ( "net/http" "strings" - "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/webhook" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/convert" @@ -164,7 +163,7 @@ func addHook(ctx *context.APIContext, form *api.CreateHookOption, orgID, repoID if err := w.UpdateEvent(); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateEvent", err) return nil, false - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.Error(http.StatusInternalServerError, "CreateWebhook", err) return nil, false } @@ -246,18 +245,29 @@ func editHook(ctx *context.APIContext, form *api.EditHookOption, w *webhook.Webh w.ChooseEvents = true w.Create = util.IsStringInSlice(string(webhook.HookEventCreate), form.Events, true) w.Push = util.IsStringInSlice(string(webhook.HookEventPush), form.Events, true) - w.PullRequest = util.IsStringInSlice(string(webhook.HookEventPullRequest), form.Events, true) w.Create = util.IsStringInSlice(string(webhook.HookEventCreate), form.Events, true) w.Delete = util.IsStringInSlice(string(webhook.HookEventDelete), form.Events, true) w.Fork = util.IsStringInSlice(string(webhook.HookEventFork), form.Events, true) - w.Issues = util.IsStringInSlice(string(webhook.HookEventIssues), form.Events, true) - w.IssueComment = util.IsStringInSlice(string(webhook.HookEventIssueComment), form.Events, true) - w.Push = util.IsStringInSlice(string(webhook.HookEventPush), form.Events, true) - w.PullRequest = util.IsStringInSlice(string(webhook.HookEventPullRequest), form.Events, true) w.Repository = util.IsStringInSlice(string(webhook.HookEventRepository), form.Events, true) w.Release = util.IsStringInSlice(string(webhook.HookEventRelease), form.Events, true) w.BranchFilter = form.BranchFilter + // Issues + w.Issues = issuesHook(form.Events, "issues_only") + w.IssueAssign = issuesHook(form.Events, string(webhook.HookEventIssueAssign)) + w.IssueLabel = issuesHook(form.Events, string(webhook.HookEventIssueLabel)) + w.IssueMilestone = issuesHook(form.Events, string(webhook.HookEventIssueMilestone)) + w.IssueComment = issuesHook(form.Events, string(webhook.HookEventIssueComment)) + + // Pull requests + w.PullRequest = pullHook(form.Events, "pull_request_only") + w.PullRequestAssign = pullHook(form.Events, string(webhook.HookEventPullRequestAssign)) + w.PullRequestLabel = pullHook(form.Events, string(webhook.HookEventPullRequestLabel)) + w.PullRequestMilestone = pullHook(form.Events, string(webhook.HookEventPullRequestMilestone)) + w.PullRequestComment = pullHook(form.Events, string(webhook.HookEventPullRequestComment)) + w.PullRequestReview = pullHook(form.Events, "pull_request_review") + w.PullRequestSync = pullHook(form.Events, string(webhook.HookEventPullRequestSync)) + if err := w.UpdateEvent(); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateEvent", err) return false diff --git a/routers/api/v1/utils/page.go b/routers/api/v1/utils/page.go new file mode 100644 index 0000000000..608bec7395 --- /dev/null +++ b/routers/api/v1/utils/page.go @@ -0,0 +1,19 @@ +// Copyright 2017 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package utils + +import ( + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" +) + +// GetListOptions returns list options using the page and limit parameters +func GetListOptions(ctx *context.APIContext) db.ListOptions { + return db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), + } +} diff --git a/routers/common/middleware.go b/routers/common/middleware.go index 880700969a..6ea1e1dfbe 100644 --- a/routers/common/middleware.go +++ b/routers/common/middleware.go @@ -27,7 +27,7 @@ func Middlewares() []func(http.Handler) http.Handler { // First of all escape the URL RawPath to ensure that all routing is done using a correctly escaped URL req.URL.RawPath = req.URL.EscapedPath() - ctx, _, finished := process.GetManager().AddContext(req.Context(), fmt.Sprintf("%s: %s", req.Method, req.RequestURI)) + ctx, _, finished := process.GetManager().AddTypedContext(req.Context(), fmt.Sprintf("%s: %s", req.Method, req.RequestURI), process.RequestProcessType, true) defer finished() next.ServeHTTP(context.NewResponse(resp), req.WithContext(ctx)) }) @@ -70,9 +70,9 @@ func Middlewares() []func(http.Handler) http.Handler { combinedErr := fmt.Sprintf("PANIC: %v\n%s", err, log.Stack(2)) log.Error("%v", combinedErr) if setting.IsProd { - http.Error(resp, http.StatusText(500), 500) + http.Error(resp, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) } else { - http.Error(resp, combinedErr, 500) + http.Error(resp, combinedErr, http.StatusInternalServerError) } } }() diff --git a/routers/common/repo.go b/routers/common/repo.go index b0e14b63f5..d037e151f9 100644 --- a/routers/common/repo.go +++ b/routers/common/repo.go @@ -10,6 +10,7 @@ import ( "path" "path/filepath" "strings" + "time" "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/context" @@ -22,8 +23,8 @@ import ( ) // ServeBlob download a git.Blob -func ServeBlob(ctx *context.Context, blob *git.Blob) error { - if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`) { +func ServeBlob(ctx *context.Context, blob *git.Blob, lastModified time.Time) error { + if httpcache.HandleGenericETagTimeCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`, lastModified) { return nil } diff --git a/routers/init.go b/routers/init.go index 804dfd6533..759945ce25 100644 --- a/routers/init.go +++ b/routers/init.go @@ -32,12 +32,14 @@ import ( "code.gitea.io/gitea/modules/svg" "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/modules/web" + packages_router "code.gitea.io/gitea/routers/api/packages" apiv1 "code.gitea.io/gitea/routers/api/v1" "code.gitea.io/gitea/routers/common" "code.gitea.io/gitea/routers/private" web_routers "code.gitea.io/gitea/routers/web" "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/auth/source/oauth2" + "code.gitea.io/gitea/services/automerge" "code.gitea.io/gitea/services/cron" "code.gitea.io/gitea/services/mailer" repo_migrations "code.gitea.io/gitea/services/migrations" @@ -47,8 +49,6 @@ import ( "code.gitea.io/gitea/services/repository/archiver" "code.gitea.io/gitea/services/task" "code.gitea.io/gitea/services/webhook" - - "gitea.com/go-chi/session" ) func mustInit(fn func() error) { @@ -73,7 +73,7 @@ func mustInitCtx(ctx context.Context, fn func(ctx context.Context) error) { func InitGitServices() { setting.NewServices() mustInit(storage.Init) - mustInit(repo_service.NewContext) + mustInit(repo_service.Init) } func syncAppPathForGit(ctx context.Context) error { @@ -116,7 +116,9 @@ func GlobalInitInstalled(ctx context.Context) { // Setup i18n translation.InitLocales() - InitGitServices() + setting.NewServices() + mustInit(storage.Init) + mailer.NewContext() mustInit(cache.NewContext) notification.NewContext() @@ -138,16 +140,18 @@ func GlobalInitInstalled(ctx context.Context) { mustInit(oauth2.Init) models.NewRepoContext() + mustInit(repo_service.Init) // Booting long running goroutines. - cron.NewContext() + cron.NewContext(ctx) issue_indexer.InitIssueIndexer(false) code_indexer.Init() mustInit(stats_indexer.Init) mirror_service.InitSyncMirrors() - webhook.InitDeliverHooks() + mustInit(webhook.Init) mustInit(pull_service.Init) + mustInit(automerge.Init) mustInit(task.Init) mustInit(repo_migrations.Init) eventsource.GetManager().Init() @@ -173,20 +177,12 @@ func NormalRoutes() *web.Route { r.Use(middle) } - sessioner := session.Sessioner(session.Options{ - Provider: setting.SessionConfig.Provider, - ProviderConfig: setting.SessionConfig.ProviderConfig, - CookieName: setting.SessionConfig.CookieName, - CookiePath: setting.SessionConfig.CookiePath, - Gclifetime: setting.SessionConfig.Gclifetime, - Maxlifetime: setting.SessionConfig.Maxlifetime, - Secure: setting.SessionConfig.Secure, - SameSite: setting.SessionConfig.SameSite, - Domain: setting.SessionConfig.Domain, - }) - - r.Mount("/", web_routers.Routes(sessioner)) - r.Mount("/api/v1", apiv1.Routes(sessioner)) + r.Mount("/", web_routers.Routes()) + r.Mount("/api/v1", apiv1.Routes()) r.Mount("/api/internal", private.Routes()) + if setting.Packages.Enabled { + r.Mount("/api/packages", packages_router.Routes()) + r.Mount("/v2", packages_router.ContainerRoutes()) + } return r } diff --git a/routers/install/install.go b/routers/install/install.go index 98eeb5f8a0..41b11aef33 100644 --- a/routers/install/install.go +++ b/routers/install/install.go @@ -42,24 +42,22 @@ const ( tplPostInstall base.TplName = "post-install" ) -var supportedDbTypeNames []map[string]string // use a slice to keep order -func getDbTypeNames() []map[string]string { - if supportedDbTypeNames == nil { - for _, t := range setting.SupportedDatabaseTypes { - supportedDbTypeNames = append(supportedDbTypeNames, map[string]string{"type": t, "name": setting.DatabaseTypeNames[t]}) - } +// getSupportedDbTypeNames returns a slice for supported database types and names. The slice is used to keep the order +func getSupportedDbTypeNames() (dbTypeNames []map[string]string) { + for _, t := range setting.SupportedDatabaseTypes { + dbTypeNames = append(dbTypeNames, map[string]string{"type": t, "name": setting.DatabaseTypeNames[t]}) } - return supportedDbTypeNames + return dbTypeNames } // Init prepare for rendering installation page func Init(next http.Handler) http.Handler { rnd := templates.HTMLRenderer() - + dbTypeNames := getSupportedDbTypeNames() return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { if setting.InstallLock { resp.Header().Add("Refresh", "1; url="+setting.AppURL+"user/login") - _ = rnd.HTML(resp, 200, string(tplPostInstall), nil) + _ = rnd.HTML(resp, http.StatusOK, string(tplPostInstall), nil) return } locale := middleware.Locale(resp, req) @@ -74,13 +72,15 @@ func Init(next http.Handler) http.Handler { "i18n": locale, "Title": locale.Tr("install.install"), "PageIsInstall": true, - "DbTypeNames": getDbTypeNames(), + "DbTypeNames": dbTypeNames, "AllLangs": translation.AllLangs(), "PageStartTime": startTime, "PasswordHashAlgorithms": user_model.AvailableHashAlgorithms, }, } + defer ctx.Close() + ctx.Req = context.WithContext(req, &ctx) next.ServeHTTP(resp, ctx.Req) }) @@ -136,6 +136,7 @@ func Install(ctx *context.Context) { form.SMTPHost = setting.MailService.Host form.SMTPFrom = setting.MailService.From form.SMTPUser = setting.MailService.User + form.SMTPPasswd = setting.MailService.Passwd } form.RegisterConfirm = setting.Service.RegisterEmailConfirm form.MailNotify = setting.Service.EnableNotifyMail @@ -501,13 +502,17 @@ func SubmitInstall(ctx *context.Context) { // Create admin account if len(form.AdminName) > 0 { u := &user_model.User{ - Name: form.AdminName, - Email: form.AdminEmail, - Passwd: form.AdminPasswd, - IsAdmin: true, - IsActive: true, + Name: form.AdminName, + Email: form.AdminEmail, + Passwd: form.AdminPasswd, + IsAdmin: true, } - if err = user_model.CreateUser(u); err != nil { + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsRestricted: util.OptionalBoolFalse, + IsActive: util.OptionalBoolTrue, + } + + if err = user_model.CreateUser(u, overwriteDefault); err != nil { if !user_model.IsErrUserAlreadyExist(err) { setting.InstallLock = false ctx.Data["Err_AdminName"] = true diff --git a/routers/install/routes.go b/routers/install/routes.go index f377cd40c9..e77081afe0 100644 --- a/routers/install/routes.go +++ b/routers/install/routes.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/routers/common" + "code.gitea.io/gitea/routers/web/healthcheck" "code.gitea.io/gitea/services/forms" "gitea.com/go-chi/session" @@ -41,9 +42,9 @@ func installRecovery() func(next http.Handler) http.Handler { combinedErr := fmt.Sprintf("PANIC: %v\n%s", err, log.Stack(2)) log.Error("%s", combinedErr) if setting.IsProd { - http.Error(w, http.StatusText(500), 500) + http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError) } else { - http.Error(w, combinedErr, 500) + http.Error(w, combinedErr, http.StatusInternalServerError) } } }() @@ -66,7 +67,7 @@ func installRecovery() func(next http.Handler) http.Handler { if !setting.IsProd { store["ErrorMsg"] = combinedErr } - err = rnd.HTML(w, 500, "status/500", templates.BaseVars().Merge(store)) + err = rnd.HTML(w, http.StatusInternalServerError, "status/500", templates.BaseVars().Merge(store)) if err != nil { log.Error("%v", err) } @@ -106,6 +107,7 @@ func Routes() *web.Route { r.Use(Init) r.Get("/", Install) r.Post("/", web.Bind(forms.InstallForm{}), SubmitInstall) + r.Get("/api/healthz", healthcheck.Check) r.NotFound(web.Wrap(installNotFound)) return r diff --git a/routers/install/routes_test.go b/routers/install/routes_test.go index 35a66c1c47..29003c3841 100644 --- a/routers/install/routes_test.go +++ b/routers/install/routes_test.go @@ -13,7 +13,6 @@ import ( func TestRoutes(t *testing.T) { routes := Routes() assert.NotNil(t, routes) - assert.Len(t, routes.R.Routes(), 1) assert.EqualValues(t, "/", routes.R.Routes()[0].Pattern) assert.Nil(t, routes.R.Routes()[0].SubRoutes) assert.Len(t, routes.R.Routes()[0].Handlers, 2) diff --git a/routers/private/hook_pre_receive.go b/routers/private/hook_pre_receive.go index 85464deb29..d2203a1f99 100644 --- a/routers/private/hook_pre_receive.go +++ b/routers/private/hook_pre_receive.go @@ -12,6 +12,8 @@ import ( "strings" "code.gitea.io/gitea/models" + asymkey_model "code.gitea.io/gitea/models/asymkey" + perm_model "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" gitea_context "code.gitea.io/gitea/modules/context" @@ -24,8 +26,12 @@ import ( type preReceiveContext struct { *gitea_context.PrivateContext - user *user_model.User - perm models.Permission + + // loadedPusher indicates that where the following information are loaded + loadedPusher bool + user *user_model.User // it's the org user if a DeployKey is used + userPerm models.Permission + deployKeyAccessMode perm_model.AccessMode canCreatePullRequest bool checkedCanCreatePullRequest bool @@ -39,64 +45,56 @@ type preReceiveContext struct { env []string opts *private.HookOptions + + branchName string } -// User gets or loads User -func (ctx *preReceiveContext) User() *user_model.User { - if ctx.user == nil { - ctx.user, ctx.perm = loadUserAndPermission(ctx.PrivateContext, ctx.opts.UserID) - } - return ctx.user -} - -// Perm gets or loads Perm -func (ctx *preReceiveContext) Perm() *models.Permission { - if ctx.user == nil { - ctx.user, ctx.perm = loadUserAndPermission(ctx.PrivateContext, ctx.opts.UserID) - } - return &ctx.perm -} - -// CanWriteCode returns true if can write code +// CanWriteCode returns true if pusher can write code func (ctx *preReceiveContext) CanWriteCode() bool { if !ctx.checkedCanWriteCode { - ctx.canWriteCode = ctx.Perm().CanWrite(unit.TypeCode) + if !ctx.loadPusherAndPermission() { + return false + } + ctx.canWriteCode = ctx.userPerm.CanWriteToBranch(ctx.user, ctx.branchName) || ctx.deployKeyAccessMode >= perm_model.AccessModeWrite ctx.checkedCanWriteCode = true } return ctx.canWriteCode } -// AssertCanWriteCode returns true if can write code +// AssertCanWriteCode returns true if pusher can write code func (ctx *preReceiveContext) AssertCanWriteCode() bool { if !ctx.CanWriteCode() { if ctx.Written() { return false } ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": "User permission denied.", + "err": "User permission denied for writing.", }) return false } return true } -// CanCreatePullRequest returns true if can create pull requests +// CanCreatePullRequest returns true if pusher can create pull requests func (ctx *preReceiveContext) CanCreatePullRequest() bool { if !ctx.checkedCanCreatePullRequest { - ctx.canCreatePullRequest = ctx.Perm().CanRead(unit.TypePullRequests) + if !ctx.loadPusherAndPermission() { + return false + } + ctx.canCreatePullRequest = ctx.userPerm.CanRead(unit.TypePullRequests) ctx.checkedCanCreatePullRequest = true } return ctx.canCreatePullRequest } -// AssertCanCreatePullRequest returns true if can create pull requests +// AssertCreatePullRequest returns true if can create pull requests func (ctx *preReceiveContext) AssertCreatePullRequest() bool { if !ctx.CanCreatePullRequest() { if ctx.Written() { return false } ctx.JSON(http.StatusForbidden, map[string]interface{}{ - "err": "User permission denied.", + "err": "User permission denied for creating pull-request.", }) return false } @@ -138,13 +136,15 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) { } func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullName string) { + branchName := strings.TrimPrefix(refFullName, git.BranchPrefix) + ctx.branchName = branchName + if !ctx.AssertCanWriteCode() { return } repo := ctx.Repo.Repository gitRepo := ctx.Repo.GitRepo - branchName := strings.TrimPrefix(refFullName, git.BranchPrefix) if branchName == repo.DefaultBranch && newCommitID == git.EmptySHA { log.Warn("Forbidden: Branch: %s is the default branch in %-v and cannot be deleted", branchName, repo) @@ -183,7 +183,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN // 2. Disallow force pushes to protected branches if git.EmptySHA != oldCommitID { - output, err := git.NewCommand(ctx, "rev-list", "--max-count=1", oldCommitID, "^"+newCommitID).RunInDirWithEnv(repo.RepoPath(), ctx.env) + output, _, err := git.NewCommand(ctx, "rev-list", "--max-count=1", oldCommitID, "^"+newCommitID).RunStdString(&git.RunOpts{Dir: repo.RepoPath(), Env: ctx.env}) if err != nil { log.Error("Unable to detect force push between: %s and %s in %-v Error: %v", oldCommitID, newCommitID, repo, err) ctx.JSON(http.StatusInternalServerError, private.Response{ @@ -246,7 +246,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN // 5. Check if the doer is allowed to push canPush := false - if ctx.opts.IsDeployKey { + if ctx.opts.DeployKeyID != 0 { canPush = !changedProtectedfiles && protectBranch.CanPush && (!protectBranch.EnableWhitelist || protectBranch.WhitelistDeployKeys) } else { canPush = !changedProtectedfiles && protectBranch.CanUserPush(ctx.opts.UserID) @@ -294,7 +294,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN // 6b. Merge (from UI or API) // Get the PR, user and permissions for the user in the repository - pr, err := models.GetPullRequestByID(ctx.opts.PullRequestID) + pr, err := models.GetPullRequestByID(ctx, ctx.opts.PullRequestID) if err != nil { log.Error("Unable to get PullRequest %d Error: %v", ctx.opts.PullRequestID, err) ctx.JSON(http.StatusInternalServerError, private.Response{ @@ -303,9 +303,15 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN return } + // although we should have called `loadPusherAndPermission` before, here we call it explicitly again because we need to access ctx.user below + if !ctx.loadPusherAndPermission() { + // if error occurs, loadPusherAndPermission had written the error response + return + } + // Now check if the user is allowed to merge PRs for this repository // Note: we can use ctx.perm and ctx.user directly as they will have been loaded above - allowedMerge, err := pull_service.IsUserAllowedToMerge(pr, ctx.perm, ctx.user) + allowedMerge, err := pull_service.IsUserAllowedToMerge(ctx, pr, ctx.userPerm, ctx.user) if err != nil { log.Error("Error calculating if allowed to merge: %v", err) ctx.JSON(http.StatusInternalServerError, private.Response{ @@ -323,7 +329,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN } // If we're an admin for the repository we can ignore status checks, reviews and override protected files - if ctx.perm.IsAdmin() { + if ctx.userPerm.IsAdmin() { return } @@ -337,8 +343,8 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID, refFullN } // Check all status checks and reviews are ok - if err := pull_service.CheckPRReadyToMerge(ctx, pr, true); err != nil { - if models.IsErrNotAllowedToMerge(err) { + if err := pull_service.CheckPullBranchProtections(ctx, pr, true); err != nil { + if models.IsErrDisallowedToMerge(err) { log.Warn("Forbidden: User %d is not allowed push to protected branch %s in %-v and pr #%d is not ready to be merged: %s", ctx.opts.UserID, branchName, repo, pr.Index, err.Error()) ctx.JSON(http.StatusForbidden, private.Response{ Err: fmt.Sprintf("Not allowed to push to protected branch %s and pr #%d is not ready to be merged: %s", branchName, ctx.opts.PullRequestID, err.Error()), @@ -450,24 +456,44 @@ func generateGitEnv(opts *private.HookOptions) (env []string) { return env } -func loadUserAndPermission(ctx *gitea_context.PrivateContext, id int64) (user *user_model.User, perm models.Permission) { - user, err := user_model.GetUserByID(id) - if err != nil { - log.Error("Unable to get User id %d Error: %v", id, err) - ctx.JSON(http.StatusInternalServerError, private.Response{ - Err: fmt.Sprintf("Unable to get User id %d Error: %v", id, err), - }) - return +// loadPusherAndPermission returns false if an error occurs, and it writes the error response +func (ctx *preReceiveContext) loadPusherAndPermission() bool { + if ctx.loadedPusher { + return true } - perm, err = models.GetUserRepoPermission(ctx.Repo.Repository, user) + user, err := user_model.GetUserByID(ctx.opts.UserID) + if err != nil { + log.Error("Unable to get User id %d Error: %v", ctx.opts.UserID, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to get User id %d Error: %v", ctx.opts.UserID, err), + }) + return false + } + ctx.user = user + + userPerm, err := models.GetUserRepoPermission(ctx, ctx.Repo.Repository, user) if err != nil { log.Error("Unable to get Repo permission of repo %s/%s of User %s", ctx.Repo.Repository.OwnerName, ctx.Repo.Repository.Name, user.Name, err) ctx.JSON(http.StatusInternalServerError, private.Response{ Err: fmt.Sprintf("Unable to get Repo permission of repo %s/%s of User %s: %v", ctx.Repo.Repository.OwnerName, ctx.Repo.Repository.Name, user.Name, err), }) - return + return false + } + ctx.userPerm = userPerm + + if ctx.opts.DeployKeyID != 0 { + deployKey, err := asymkey_model.GetDeployKeyByID(ctx, ctx.opts.DeployKeyID) + if err != nil { + log.Error("Unable to get DeployKey id %d Error: %v", ctx.opts.DeployKeyID, err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Unable to get DeployKey id %d Error: %v", ctx.opts.DeployKeyID, err), + }) + return false + } + ctx.deployKeyAccessMode = deployKey.Mode } - return + ctx.loadedPusher = true + return true } diff --git a/routers/private/hook_verification.go b/routers/private/hook_verification.go index 683ed8d071..dfa6195b19 100644 --- a/routers/private/hook_verification.go +++ b/routers/private/hook_verification.go @@ -45,11 +45,10 @@ func verifyCommits(oldCommitID, newCommitID string, repo *git.Repository, env [] // This is safe as force pushes are already forbidden err = git.NewCommand(repo.Ctx, "rev-list", oldCommitID+"..."+newCommitID). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: repo.Path, - Stdout: stdoutWriter, + Run(&git.RunOpts{ + Env: env, + Dir: repo.Path, + Stdout: stdoutWriter, PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { _ = stdoutWriter.Close() err := readAndVerifyCommitsFromShaReader(stdoutReader, repo, env) @@ -93,11 +92,10 @@ func readAndVerifyCommit(sha string, repo *git.Repository, env []string) error { hash := git.MustIDFromString(sha) return git.NewCommand(repo.Ctx, "cat-file", "commit", sha). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: repo.Path, - Stdout: stdoutWriter, + Run(&git.RunOpts{ + Env: env, + Dir: repo.Path, + Stdout: stdoutWriter, PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { _ = stdoutWriter.Close() commit, err := git.CommitFromReader(repo, hash, stdoutReader) diff --git a/routers/private/internal.go b/routers/private/internal.go index 263180bd58..6ba87d67bf 100644 --- a/routers/private/internal.go +++ b/routers/private/internal.go @@ -70,6 +70,7 @@ func Routes() *web.Route { r.Post("/manager/release-and-reopen-logging", ReleaseReopenLogging) r.Post("/manager/add-logger", bind(private.LoggerOptions{}), AddLogger) r.Post("/manager/remove-logger/{group}/{name}", RemoveLogger) + r.Get("/manager/processes", Processes) r.Post("/mail/send", SendEmail) r.Post("/restore_repo", RestoreRepo) diff --git a/routers/private/internal_repo.go b/routers/private/internal_repo.go index ade862c613..c50d2a01a7 100644 --- a/routers/private/internal_repo.go +++ b/routers/private/internal_repo.go @@ -43,7 +43,7 @@ func RepoAssignment(ctx *gitea_context.PrivateContext) context.CancelFunc { return nil } - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { log.Error("Failed to open repository: %s/%s Error: %v", ownerName, repoName, err) ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ diff --git a/routers/private/mail.go b/routers/private/mail.go index 8b69c38093..853b58b09d 100644 --- a/routers/private/mail.go +++ b/routers/private/mail.go @@ -60,7 +60,7 @@ func SendEmail(ctx *context.PrivateContext) { } } else { err := user_model.IterateUser(func(user *user_model.User) error { - if len(user.Email) > 0 { + if len(user.Email) > 0 && user.IsActive { emails = append(emails, user.Email) } return nil diff --git a/routers/private/manager_process.go b/routers/private/manager_process.go new file mode 100644 index 0000000000..f8932d61fa --- /dev/null +++ b/routers/private/manager_process.go @@ -0,0 +1,161 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package private + +import ( + "bytes" + "fmt" + "io" + "net/http" + "runtime" + "time" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/private" + process_module "code.gitea.io/gitea/modules/process" +) + +// Processes prints out the processes +func Processes(ctx *context.PrivateContext) { + pid := ctx.FormString("cancel-pid") + if pid != "" { + process_module.GetManager().Cancel(process_module.IDType(pid)) + runtime.Gosched() + time.Sleep(100 * time.Millisecond) + } + + flat := ctx.FormBool("flat") + noSystem := ctx.FormBool("no-system") + stacktraces := ctx.FormBool("stacktraces") + json := ctx.FormBool("json") + + var processes []*process_module.Process + goroutineCount := int64(0) + processCount := 0 + var err error + if stacktraces { + processes, processCount, goroutineCount, err = process_module.GetManager().ProcessStacktraces(flat, noSystem) + if err != nil { + log.Error("Unable to get stacktrace: %v", err) + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Failed to get stacktraces: %v", err), + }) + return + } + } else { + processes, processCount = process_module.GetManager().Processes(flat, noSystem) + } + + if json { + ctx.JSON(http.StatusOK, map[string]interface{}{ + "TotalNumberOfGoroutines": goroutineCount, + "TotalNumberOfProcesses": processCount, + "Processes": processes, + }) + return + } + + ctx.Resp.Header().Set("Content-Type", "text/plain;charset=utf-8") + ctx.Resp.WriteHeader(http.StatusOK) + + if err := writeProcesses(ctx.Resp, processes, processCount, goroutineCount, "", flat); err != nil { + log.Error("Unable to write out process stacktrace: %v", err) + if !ctx.Written() { + ctx.JSON(http.StatusInternalServerError, private.Response{ + Err: fmt.Sprintf("Failed to get stacktraces: %v", err), + }) + } + return + } +} + +func writeProcesses(out io.Writer, processes []*process_module.Process, processCount int, goroutineCount int64, indent string, flat bool) error { + if goroutineCount > 0 { + if _, err := fmt.Fprintf(out, "%sTotal Number of Goroutines: %d\n", indent, goroutineCount); err != nil { + return err + } + } + if _, err := fmt.Fprintf(out, "%sTotal Number of Processes: %d\n", indent, processCount); err != nil { + return err + } + if len(processes) > 0 { + if err := writeProcess(out, processes[0], " ", flat); err != nil { + return err + } + } + if len(processes) > 1 { + for _, process := range processes[1:] { + if _, err := fmt.Fprintf(out, "%s | \n", indent); err != nil { + return err + } + if err := writeProcess(out, process, " ", flat); err != nil { + return err + } + } + } + return nil +} + +func writeProcess(out io.Writer, process *process_module.Process, indent string, flat bool) error { + sb := &bytes.Buffer{} + if flat { + if process.ParentPID != "" { + _, _ = fmt.Fprintf(sb, "%s+ PID: %s\t\tType: %s\n", indent, process.PID, process.Type) + } else { + _, _ = fmt.Fprintf(sb, "%s+ PID: %s:%s\tType: %s\n", indent, process.ParentPID, process.PID, process.Type) + } + } else { + _, _ = fmt.Fprintf(sb, "%s+ PID: %s\tType: %s\n", indent, process.PID, process.Type) + } + indent += "| " + + _, _ = fmt.Fprintf(sb, "%sDescription: %s\n", indent, process.Description) + _, _ = fmt.Fprintf(sb, "%sStart: %s\n", indent, process.Start) + + if len(process.Stacks) > 0 { + _, _ = fmt.Fprintf(sb, "%sGoroutines:\n", indent) + for _, stack := range process.Stacks { + indent := indent + " " + _, _ = fmt.Fprintf(sb, "%s+ Description: %s", indent, stack.Description) + if stack.Count > 1 { + _, _ = fmt.Fprintf(sb, "* %d", stack.Count) + } + _, _ = fmt.Fprintf(sb, "\n") + indent += "| " + if len(stack.Labels) > 0 { + _, _ = fmt.Fprintf(sb, "%sLabels: %q:%q", indent, stack.Labels[0].Name, stack.Labels[0].Value) + + if len(stack.Labels) > 1 { + for _, label := range stack.Labels[1:] { + _, _ = fmt.Fprintf(sb, ", %q:%q", label.Name, label.Value) + } + } + _, _ = fmt.Fprintf(sb, "\n") + } + _, _ = fmt.Fprintf(sb, "%sStack:\n", indent) + indent += " " + for _, entry := range stack.Entry { + _, _ = fmt.Fprintf(sb, "%s+ %s\n", indent, entry.Function) + _, _ = fmt.Fprintf(sb, "%s| %s:%d\n", indent, entry.File, entry.Line) + } + } + } + if _, err := out.Write(sb.Bytes()); err != nil { + return err + } + sb.Reset() + if len(process.Children) > 0 { + if _, err := fmt.Fprintf(out, "%sChildren:\n", indent); err != nil { + return err + } + for _, child := range process.Children { + if err := writeProcess(out, child, indent+" ", flat); err != nil { + return err + } + } + } + return nil +} diff --git a/routers/private/manager_unix.go b/routers/private/manager_unix.go index 402bade5d4..43cbdec01c 100644 --- a/routers/private/manager_unix.go +++ b/routers/private/manager_unix.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !windows -// +build !windows package private diff --git a/routers/private/manager_windows.go b/routers/private/manager_windows.go index 014018a539..2b72ee952d 100644 --- a/routers/private/manager_windows.go +++ b/routers/private/manager_windows.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build windows -// +build windows package private diff --git a/routers/private/serv.go b/routers/private/serv.go index 65989d868b..6ef0079a2b 100644 --- a/routers/private/serv.go +++ b/routers/private/serv.go @@ -229,8 +229,6 @@ func ServCommand(ctx *context.PrivateContext) { var deployKey *asymkey_model.DeployKey var user *user_model.User if key.Type == asymkey_model.KeyTypeDeploy { - results.IsDeployKey = true - var err error deployKey, err = asymkey_model.GetDeployKeyByRepo(key.ID, repo.ID) if err != nil { @@ -248,6 +246,7 @@ func ServCommand(ctx *context.PrivateContext) { }) return } + results.DeployKeyID = deployKey.ID results.KeyName = deployKey.Name // FIXME: Deploy keys aren't really the owner of the repo pushing changes @@ -321,7 +320,7 @@ func ServCommand(ctx *context.PrivateContext) { mode = perm.AccessModeRead } - perm, err := models.GetUserRepoPermission(repo, user) + perm, err := models.GetUserRepoPermission(ctx, repo, user) if err != nil { log.Error("Unable to get permissions for %-v with key %d in %-v Error: %v", user, key.ID, repo, err) ctx.JSON(http.StatusInternalServerError, private.ErrServCommand{ @@ -410,9 +409,9 @@ func ServCommand(ctx *context.PrivateContext) { return } } - log.Debug("Serv Results:\nIsWiki: %t\nIsDeployKey: %t\nKeyID: %d\tKeyName: %s\nUserName: %s\nUserID: %d\nOwnerName: %s\nRepoName: %s\nRepoID: %d", + log.Debug("Serv Results:\nIsWiki: %t\nDeployKeyID: %d\nKeyID: %d\tKeyName: %s\nUserName: %s\nUserID: %d\nOwnerName: %s\nRepoName: %s\nRepoID: %d", results.IsWiki, - results.IsDeployKey, + results.DeployKeyID, results.KeyID, results.KeyName, results.UserName, diff --git a/routers/web/admin/admin.go b/routers/web/admin/admin.go index fac3ef9622..d4093f2049 100644 --- a/routers/web/admin/admin.go +++ b/routers/web/admin/admin.go @@ -35,10 +35,11 @@ import ( ) const ( - tplDashboard base.TplName = "admin/dashboard" - tplConfig base.TplName = "admin/config" - tplMonitor base.TplName = "admin/monitor" - tplQueue base.TplName = "admin/queue" + tplDashboard base.TplName = "admin/dashboard" + tplConfig base.TplName = "admin/config" + tplMonitor base.TplName = "admin/monitor" + tplStacktrace base.TplName = "admin/stacktrace" + tplQueue base.TplName = "admin/queue" ) var sysStatus struct { @@ -149,7 +150,7 @@ func DashboardPost(ctx *context.Context) { if form.Op != "" { task := cron.GetTask(form.Op) if task != nil { - go task.RunWithUser(ctx.User, nil) + go task.RunWithUser(ctx.Doer, nil) ctx.Flash.Success(ctx.Tr("admin.dashboard.task.started", ctx.Tr("admin.dashboard."+form.Op))) } else { ctx.Flash.Error(ctx.Tr("admin.dashboard.task.unknown", form.Op)) @@ -326,12 +327,33 @@ func Monitor(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("admin.monitor") ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminMonitor"] = true - ctx.Data["Processes"] = process.GetManager().Processes(true) + ctx.Data["Processes"], ctx.Data["ProcessCount"] = process.GetManager().Processes(false, true) ctx.Data["Entries"] = cron.ListTasks() ctx.Data["Queues"] = queue.GetManager().ManagedQueues() + ctx.HTML(http.StatusOK, tplMonitor) } +// GoroutineStacktrace show admin monitor goroutines page +func GoroutineStacktrace(ctx *context.Context) { + ctx.Data["Title"] = ctx.Tr("admin.monitor") + ctx.Data["PageIsAdmin"] = true + ctx.Data["PageIsAdminMonitor"] = true + + processStacks, processCount, goroutineCount, err := process.GetManager().ProcessStacktraces(false, false) + if err != nil { + ctx.ServerError("GoroutineStacktrace", err) + return + } + + ctx.Data["ProcessStacks"] = processStacks + + ctx.Data["GoroutineCount"] = goroutineCount + ctx.Data["ProcessCount"] = processCount + + ctx.HTML(http.StatusOK, tplStacktrace) +} + // MonitorCancel cancels a process func MonitorCancel(ctx *context.Context) { pid := ctx.Params("pid") @@ -346,7 +368,7 @@ func Queue(ctx *context.Context) { qid := ctx.ParamsInt64("qid") mq := queue.GetManager().GetManagedQueue(qid) if mq == nil { - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } ctx.Data["Title"] = ctx.Tr("admin.monitor.queue", mq.Name) @@ -361,7 +383,7 @@ func WorkerCancel(ctx *context.Context) { qid := ctx.ParamsInt64("qid") mq := queue.GetManager().GetManagedQueue(qid) if mq == nil { - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } pid := ctx.ParamsInt64("pid") @@ -377,7 +399,7 @@ func Flush(ctx *context.Context) { qid := ctx.ParamsInt64("qid") mq := queue.GetManager().GetManagedQueue(qid) if mq == nil { - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } timeout, err := time.ParseDuration(ctx.FormString("timeout")) @@ -423,7 +445,7 @@ func AddWorkers(ctx *context.Context) { qid := ctx.ParamsInt64("qid") mq := queue.GetManager().GetManagedQueue(qid) if mq == nil { - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } number := ctx.FormInt("number") @@ -453,7 +475,7 @@ func SetQueueSettings(ctx *context.Context) { qid := ctx.ParamsInt64("qid") mq := queue.GetManager().GetManagedQueue(qid) if mq == nil { - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } if _, ok := mq.Managed.(queue.ManagedPool); !ok { diff --git a/routers/web/admin/auths.go b/routers/web/admin/auths.go index a8e0cd37b6..1d72a88aa1 100644 --- a/routers/web/admin/auths.go +++ b/routers/web/admin/auths.go @@ -93,7 +93,7 @@ func NewAuthSource(ctx *context.Context) { ctx.Data["PageIsAdmin"] = true ctx.Data["PageIsAdminAuthentications"] = true - ctx.Data["type"] = auth.LDAP + ctx.Data["type"] = auth.LDAP.Int() ctx.Data["CurrentTypeName"] = auth.Names[auth.LDAP] ctx.Data["CurrentSecurityProtocol"] = ldap.SecurityProtocolNames[ldap.SecurityProtocolUnencrypted] ctx.Data["smtp_auth"] = "PLAIN" @@ -112,7 +112,7 @@ func NewAuthSource(ctx *context.Context) { ctx.Data["SSPIDefaultLanguage"] = "" // only the first as default - ctx.Data["oauth2_provider"] = oauth2providers[0] + ctx.Data["oauth2_provider"] = oauth2providers[0].Name ctx.HTML(http.StatusOK, tplAuthNew) } @@ -253,9 +253,6 @@ func NewAuthSourcePost(ctx *context.Context) { ctx.Data["SSPISeparatorReplacement"] = "_" ctx.Data["SSPIDefaultLanguage"] = "" - // FIXME: most error path to render tplAuthNew will fail and result in 500 - // * template: admin/auth/new:17:68: executing "admin/auth/new" at <.type.Int>: can't evaluate field Int in type interface {} - // * template: admin/auth/source/oauth:5:93: executing "admin/auth/source/oauth" at <.oauth2_provider.Name>: can't evaluate field Name in type interface {} hasTLS := false var config convert.Conversion switch auth.Type(form.Type) { @@ -313,7 +310,7 @@ func NewAuthSourcePost(ctx *context.Context) { return } - log.Trace("Authentication created by admin(%s): %s", ctx.User.Name, form.Name) + log.Trace("Authentication created by admin(%s): %s", ctx.Doer.Name, form.Name) ctx.Flash.Success(ctx.Tr("admin.auths.new_success", form.Name)) ctx.Redirect(setting.AppSubURL + "/admin/auths") @@ -416,7 +413,7 @@ func EditAuthSourcePost(ctx *context.Context) { } return } - log.Trace("Authentication changed by admin(%s): %d", ctx.User.Name, source.ID) + log.Trace("Authentication changed by admin(%s): %d", ctx.Doer.Name, source.ID) ctx.Flash.Success(ctx.Tr("admin.auths.update_success")) ctx.Redirect(setting.AppSubURL + "/admin/auths/" + strconv.FormatInt(form.ID, 10)) @@ -441,7 +438,7 @@ func DeleteAuthSource(ctx *context.Context) { }) return } - log.Trace("Authentication deleted by admin(%s): %d", ctx.User.Name, source.ID) + log.Trace("Authentication deleted by admin(%s): %d", ctx.Doer.Name, source.ID) ctx.Flash.Success(ctx.Tr("admin.auths.deletion_success")) ctx.JSON(http.StatusOK, map[string]interface{}{ diff --git a/routers/web/admin/emails.go b/routers/web/admin/emails.go index b94f9d72c4..9482ae0123 100644 --- a/routers/web/admin/emails.go +++ b/routers/web/admin/emails.go @@ -87,7 +87,7 @@ func Emails(ctx *context.Context) { emails[i].SearchEmailResult = *baseEmails[i] // Don't let the admin deactivate its own primary email address // We already know the user is admin - emails[i].CanChange = ctx.User.ID != emails[i].UID || !emails[i].IsPrimary + emails[i].CanChange = ctx.Doer.ID != emails[i].UID || !emails[i].IsPrimary } } ctx.Data["Keyword"] = opts.Keyword diff --git a/routers/web/admin/hooks.go b/routers/web/admin/hooks.go index 8cb99e1d1e..1483d0959d 100644 --- a/routers/web/admin/hooks.go +++ b/routers/web/admin/hooks.go @@ -11,6 +11,7 @@ import ( "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" ) const ( @@ -34,7 +35,7 @@ func DefaultOrSystemWebhooks(ctx *context.Context) { sys["Title"] = ctx.Tr("admin.systemhooks") sys["Description"] = ctx.Tr("admin.systemhooks.desc") - sys["Webhooks"], err = webhook.GetSystemWebhooks() + sys["Webhooks"], err = webhook.GetSystemWebhooks(util.OptionalBoolNone) sys["BaseLink"] = setting.AppSubURL + "/admin/hooks" sys["BaseLinkNew"] = setting.AppSubURL + "/admin/system-hooks" if err != nil { diff --git a/routers/web/admin/main_test.go b/routers/web/admin/main_test.go index e41d8fea75..4e6ad4d743 100644 --- a/routers/web/admin/main_test.go +++ b/routers/web/admin/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } diff --git a/routers/web/admin/notice.go b/routers/web/admin/notice.go index 3613f428ed..b50549b804 100644 --- a/routers/web/admin/notice.go +++ b/routers/web/admin/notice.go @@ -59,10 +59,10 @@ func DeleteNotices(ctx *context.Context) { if err := admin_model.DeleteNoticesByIDs(ids); err != nil { ctx.Flash.Error("DeleteNoticesByIDs: " + err.Error()) - ctx.Status(500) + ctx.Status(http.StatusInternalServerError) } else { ctx.Flash.Success(ctx.Tr("admin.notices.delete_success")) - ctx.Status(200) + ctx.Status(http.StatusOK) } } @@ -73,7 +73,7 @@ func EmptyNotices(ctx *context.Context) { return } - log.Trace("System notices deleted by admin (%s): [start: %d]", ctx.User.Name, 0) + log.Trace("System notices deleted by admin (%s): [start: %d]", ctx.Doer.Name, 0) ctx.Flash.Success(ctx.Tr("admin.notices.delete_success")) ctx.Redirect(setting.AppSubURL + "/admin/notices") } diff --git a/routers/web/admin/orgs.go b/routers/web/admin/orgs.go index 3d440d83cb..6081ab9b1c 100644 --- a/routers/web/admin/orgs.go +++ b/routers/web/admin/orgs.go @@ -26,7 +26,7 @@ func Organizations(ctx *context.Context) { ctx.Data["PageIsAdminOrganizations"] = true explore.RenderUserSearch(ctx, &user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, Type: user_model.UserTypeOrganization, ListOptions: db.ListOptions{ PageSize: setting.UI.Admin.OrgPagingNum, diff --git a/routers/web/admin/packages.go b/routers/web/admin/packages.go new file mode 100644 index 0000000000..79bf025dd2 --- /dev/null +++ b/routers/web/admin/packages.go @@ -0,0 +1,95 @@ +// Copyright 2014 The Gogs Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package admin + +import ( + "net/http" + "net/url" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" + packages_service "code.gitea.io/gitea/services/packages" +) + +const ( + tplPackagesList base.TplName = "admin/packages/list" +) + +// Packages shows all packages +func Packages(ctx *context.Context) { + page := ctx.FormInt("page") + if page <= 1 { + page = 1 + } + query := ctx.FormTrim("q") + packageType := ctx.FormTrim("type") + sort := ctx.FormTrim("sort") + + pvs, total, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + Type: packages_model.Type(packageType), + Name: packages_model.SearchValue{Value: query}, + Sort: sort, + Paginator: &db.ListOptions{ + PageSize: setting.UI.PackagesPagingNum, + Page: page, + }, + }) + if err != nil { + ctx.ServerError("SearchVersions", err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + ctx.ServerError("GetPackageDescriptors", err) + return + } + + totalBlobSize, err := packages_model.GetTotalBlobSize() + if err != nil { + ctx.ServerError("GetTotalBlobSize", err) + return + } + + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["PageIsAdmin"] = true + ctx.Data["PageIsAdminPackages"] = true + ctx.Data["Query"] = query + ctx.Data["PackageType"] = packageType + ctx.Data["SortType"] = sort + ctx.Data["PackageDescriptors"] = pds + ctx.Data["Total"] = total + ctx.Data["TotalBlobSize"] = totalBlobSize + + pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5) + pager.AddParamString("q", query) + pager.AddParamString("type", packageType) + pager.AddParamString("sort", sort) + ctx.Data["Page"] = pager + + ctx.HTML(http.StatusOK, tplPackagesList) +} + +// DeletePackageVersion deletes a package version +func DeletePackageVersion(ctx *context.Context) { + pv, err := packages_model.GetVersionByID(db.DefaultContext, ctx.FormInt64("id")) + if err != nil { + ctx.ServerError("GetRepositoryByID", err) + return + } + + if err := packages_service.RemovePackageVersion(ctx.Doer, pv); err != nil { + ctx.ServerError("RemovePackageVersion", err) + return + } + + ctx.Flash.Success(ctx.Tr("packages.settings.delete.success")) + ctx.JSON(http.StatusOK, map[string]interface{}{ + "redirect": setting.AppSubURL + "/admin/packages?page=" + url.QueryEscape(ctx.FormString("page")) + "&q=" + url.QueryEscape(ctx.FormString("q")) + "&type=" + url.QueryEscape(ctx.FormString("type")), + }) +} diff --git a/routers/web/admin/repos.go b/routers/web/admin/repos.go index c4290fc22b..fb7be12c35 100644 --- a/routers/web/admin/repos.go +++ b/routers/web/admin/repos.go @@ -52,7 +52,7 @@ func DeleteRepo(ctx *context.Context) { ctx.Repo.GitRepo.Close() } - if err := repo_service.DeleteRepository(ctx, ctx.User, repo, true); err != nil { + if err := repo_service.DeleteRepository(ctx, ctx.Doer, repo, true); err != nil { ctx.ServerError("DeleteRepository", err) return } @@ -148,7 +148,7 @@ func AdoptOrDeleteRepository(ctx *context.Context) { if has || !isDir { // Fallthrough to failure mode } else if action == "adopt" { - if _, err := repo_service.AdoptRepository(ctx.User, ctxUser, models.CreateRepoOptions{ + if _, err := repo_service.AdoptRepository(ctx.Doer, ctxUser, models.CreateRepoOptions{ Name: dirSplit[1], IsPrivate: true, }); err != nil { @@ -157,7 +157,7 @@ func AdoptOrDeleteRepository(ctx *context.Context) { } ctx.Flash.Success(ctx.Tr("repo.adopt_preexisting_success", dir)) } else if action == "delete" { - if err := repo_service.DeleteUnadoptedRepository(ctx.User, ctxUser, dirSplit[1]); err != nil { + if err := repo_service.DeleteUnadoptedRepository(ctx.Doer, ctxUser, dirSplit[1]); err != nil { ctx.ServerError("repository.AdoptRepository", err) return } diff --git a/routers/web/admin/users.go b/routers/web/admin/users.go index 1f304297c0..7841ac569f 100644 --- a/routers/web/admin/users.go +++ b/routers/web/admin/users.go @@ -63,7 +63,7 @@ func Users(ctx *context.Context) { } explore.RenderUserSearch(ctx, &user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, Type: user_model.UserTypeIndividual, ListOptions: db.ListOptions{ PageSize: setting.UI.Admin.UserPagingNum, @@ -125,10 +125,14 @@ func NewUserPost(ctx *context.Context) { Name: form.UserName, Email: form.Email, Passwd: form.Password, - IsActive: true, LoginType: auth.Plain, } + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolTrue, + Visibility: &form.Visibility, + } + if len(form.LoginType) > 0 { fields := strings.Split(form.LoginType, "-") if len(fields) == 2 { @@ -163,7 +167,7 @@ func NewUserPost(ctx *context.Context) { u.MustChangePassword = form.MustChangePassword } - if err := user_model.CreateUser(u, &user_model.CreateUserOverwriteOptions{Visibility: form.Visibility}); err != nil { + if err := user_model.CreateUser(u, overwriteDefault); err != nil { switch { case user_model.IsErrUserAlreadyExist(err): ctx.Data["Err_UserName"] = true @@ -171,6 +175,9 @@ func NewUserPost(ctx *context.Context) { case user_model.IsErrEmailAlreadyUsed(err): ctx.Data["Err_Email"] = true ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplUserNew, &form) + case user_model.IsErrEmailCharIsNotSupported(err): + ctx.Data["Err_Email"] = true + ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplUserNew, &form) case user_model.IsErrEmailInvalid(err): ctx.Data["Err_Email"] = true ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplUserNew, &form) @@ -188,7 +195,7 @@ func NewUserPost(ctx *context.Context) { } return } - log.Trace("Account created by admin (%s): %s", ctx.User.Name, u.Name) + log.Trace("Account created by admin (%s): %s", ctx.Doer.Name, u.Name) // Send email notification. if form.SendNotify { @@ -376,7 +383,7 @@ func EditUserPost(ctx *context.Context) { u.Visibility = form.Visibility // skip self Prohibit Login - if ctx.User.ID == u.ID { + if ctx.Doer.ID == u.ID { u.ProhibitLogin = false } else { u.ProhibitLogin = form.ProhibitLogin @@ -386,7 +393,8 @@ func EditUserPost(ctx *context.Context) { if user_model.IsErrEmailAlreadyUsed(err) { ctx.Data["Err_Email"] = true ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplUserEdit, &form) - } else if user_model.IsErrEmailInvalid(err) { + } else if user_model.IsErrEmailCharIsNotSupported(err) || + user_model.IsErrEmailInvalid(err) { ctx.Data["Err_Email"] = true ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplUserEdit, &form) } else { @@ -394,7 +402,7 @@ func EditUserPost(ctx *context.Context) { } return } - log.Trace("Account profile updated by admin (%s): %s", ctx.User.Name, u.Name) + log.Trace("Account profile updated by admin (%s): %s", ctx.Doer.Name, u.Name) ctx.Flash.Success(ctx.Tr("admin.users.update_profile_success")) ctx.Redirect(setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid"))) @@ -408,6 +416,15 @@ func DeleteUser(ctx *context.Context) { return } + // admin should not delete themself + if u.ID == ctx.Doer.ID { + ctx.Flash.Error(ctx.Tr("admin.users.cannot_delete_self")) + ctx.JSON(http.StatusOK, map[string]interface{}{ + "redirect": setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")), + }) + return + } + if err = user_service.DeleteUser(u); err != nil { switch { case models.IsErrUserOwnRepos(err): @@ -420,12 +437,17 @@ func DeleteUser(ctx *context.Context) { ctx.JSON(http.StatusOK, map[string]interface{}{ "redirect": setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")), }) + case models.IsErrUserOwnPackages(err): + ctx.Flash.Error(ctx.Tr("admin.users.still_own_packages")) + ctx.JSON(http.StatusOK, map[string]interface{}{ + "redirect": setting.AppSubURL + "/admin/users/" + ctx.Params(":userid"), + }) default: ctx.ServerError("DeleteUser", err) } return } - log.Trace("Account deleted by admin (%s): %s", ctx.User.Name, u.Name) + log.Trace("Account deleted by admin (%s): %s", ctx.Doer.Name, u.Name) ctx.Flash.Success(ctx.Tr("admin.users.deletion_success")) ctx.JSON(http.StatusOK, map[string]interface{}{ diff --git a/routers/web/admin/users_test.go b/routers/web/admin/users_test.go index 46133688a5..9de548685c 100644 --- a/routers/web/admin/users_test.go +++ b/routers/web/admin/users_test.go @@ -27,7 +27,7 @@ func TestNewUserPost_MustChangePassword(t *testing.T) { ID: 2, }).(*user_model.User) - ctx.User = u + ctx.Doer = u username := "gitea" email := "gitea@gitea.io" @@ -64,7 +64,7 @@ func TestNewUserPost_MustChangePasswordFalse(t *testing.T) { ID: 2, }).(*user_model.User) - ctx.User = u + ctx.Doer = u username := "gitea" email := "gitea@gitea.io" @@ -101,7 +101,7 @@ func TestNewUserPost_InvalidEmail(t *testing.T) { ID: 2, }).(*user_model.User) - ctx.User = u + ctx.Doer = u username := "gitea" email := "gitea@gitea.io\r\n" @@ -131,7 +131,7 @@ func TestNewUserPost_VisibilityDefaultPublic(t *testing.T) { ID: 2, }).(*user_model.User) - ctx.User = u + ctx.Doer = u username := "gitea" email := "gitea@gitea.io" @@ -169,7 +169,7 @@ func TestNewUserPost_VisibilityPrivate(t *testing.T) { ID: 2, }).(*user_model.User) - ctx.User = u + ctx.Doer = u username := "gitea" email := "gitea@gitea.io" diff --git a/routers/web/auth.go b/routers/web/auth.go new file mode 100644 index 0000000000..a771643b66 --- /dev/null +++ b/routers/web/auth.go @@ -0,0 +1,11 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +//go:build !windows + +package web + +import auth_service "code.gitea.io/gitea/services/auth" + +func specialAdd(group *auth_service.Group) {} diff --git a/routers/web/auth/auth.go b/routers/web/auth/auth.go index ce8ec8a1e3..be936d2230 100644 --- a/routers/web/auth/auth.go +++ b/routers/web/auth/auth.go @@ -107,7 +107,7 @@ func resetLocale(ctx *context.Context, u *user_model.User) error { // If the user does not have a locale set, we save the current one. if len(u.Language) == 0 { u.Language = ctx.Locale.Language() - if err := user_model.UpdateUserCols(db.DefaultContext, u, "language"); err != nil { + if err := user_model.UpdateUserCols(ctx, u, "language"); err != nil { return err } } @@ -195,7 +195,7 @@ func SignInPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.SignInForm) u, source, err := auth_service.UserSignIn(form.UserName, form.Password) if err != nil { - if user_model.IsErrUserNotExist(err) { + if user_model.IsErrUserNotExist(err) || user_model.IsErrEmailAddressNotExist(err) { ctx.RenderWithErr(ctx.Tr("form.username_password_incorrect"), tplSignIn, &form) log.Info("Failed authentication attempt for %s from %s: %v", form.UserName, ctx.RemoteAddr(), err) } else if user_model.IsErrEmailAlreadyUsed(err) { @@ -333,7 +333,7 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe // If the user does not have a locale set, we save the current one. if len(u.Language) == 0 { u.Language = ctx.Locale.Language() - if err := user_model.UpdateUserCols(db.DefaultContext, u, "language"); err != nil { + if err := user_model.UpdateUserCols(ctx, u, "language"); err != nil { ctx.ServerError("UpdateUserCols Language", fmt.Errorf("Error updating user language [user: %d, locale: %s]", u.ID, u.Language)) return setting.AppSubURL + "/" } @@ -345,12 +345,12 @@ func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRe ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req) } - // Clear whatever CSRF has right now, force to generate a new one + // Clear whatever CSRF cookie has right now, force to generate a new one middleware.DeleteCSRFCookie(ctx.Resp) // Register last login u.SetLastLogin() - if err := user_model.UpdateUserCols(db.DefaultContext, u, "last_login_unix"); err != nil { + if err := user_model.UpdateUserCols(ctx, u, "last_login_unix"); err != nil { ctx.ServerError("UpdateUserCols", err) return setting.AppSubURL + "/" } @@ -393,8 +393,8 @@ func HandleSignOut(ctx *context.Context) { // SignOut sign out from login status func SignOut(ctx *context.Context) { - if ctx.User != nil { - eventsource.GetManager().SendMessageBlocking(ctx.User.ID, &eventsource.Event{ + if ctx.Doer != nil { + eventsource.GetManager().SendMessageBlocking(ctx.Doer.ID, &eventsource.Event{ Name: "logout", Data: ctx.Session.ID(), }) @@ -507,14 +507,12 @@ func SignUpPost(ctx *context.Context) { } u := &user_model.User{ - Name: form.UserName, - Email: form.Email, - Passwd: form.Password, - IsActive: !(setting.Service.RegisterEmailConfirm || setting.Service.RegisterManualConfirm), - IsRestricted: setting.Service.DefaultUserIsRestricted, + Name: form.UserName, + Email: form.Email, + Passwd: form.Password, } - if !createAndHandleCreatedUser(ctx, tplSignUp, form, u, nil, false) { + if !createAndHandleCreatedUser(ctx, tplSignUp, form, u, nil, nil, false) { // error already handled return } @@ -525,8 +523,8 @@ func SignUpPost(ctx *context.Context) { // createAndHandleCreatedUser calls createUserInContext and // then handleUserCreated. -func createAndHandleCreatedUser(ctx *context.Context, tpl base.TplName, form interface{}, u *user_model.User, gothUser *goth.User, allowLink bool) bool { - if !createUserInContext(ctx, tpl, form, u, gothUser, allowLink) { +func createAndHandleCreatedUser(ctx *context.Context, tpl base.TplName, form interface{}, u *user_model.User, overwrites *user_model.CreateUserOverwriteOptions, gothUser *goth.User, allowLink bool) bool { + if !createUserInContext(ctx, tpl, form, u, overwrites, gothUser, allowLink) { return false } return handleUserCreated(ctx, u, gothUser) @@ -534,8 +532,8 @@ func createAndHandleCreatedUser(ctx *context.Context, tpl base.TplName, form int // createUserInContext creates a user and handles errors within a given context. // Optionally a template can be specified. -func createUserInContext(ctx *context.Context, tpl base.TplName, form interface{}, u *user_model.User, gothUser *goth.User, allowLink bool) (ok bool) { - if err := user_model.CreateUser(u); err != nil { +func createUserInContext(ctx *context.Context, tpl base.TplName, form interface{}, u *user_model.User, overwrites *user_model.CreateUserOverwriteOptions, gothUser *goth.User, allowLink bool) (ok bool) { + if err := user_model.CreateUser(u, overwrites); err != nil { if allowLink && (user_model.IsErrUserAlreadyExist(err) || user_model.IsErrEmailAlreadyUsed(err)) { if setting.OAuth2Client.AccountLinking == setting.OAuth2AccountLinkingAuto { var user *user_model.User @@ -573,6 +571,9 @@ func createUserInContext(ctx *context.Context, tpl base.TplName, form interface{ case user_model.IsErrEmailAlreadyUsed(err): ctx.Data["Err_Email"] = true ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tpl, form) + case user_model.IsErrEmailCharIsNotSupported(err): + ctx.Data["Err_Email"] = true + ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tpl, form) case user_model.IsErrEmailInvalid(err): ctx.Data["Err_Email"] = true ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tpl, form) @@ -599,11 +600,11 @@ func createUserInContext(ctx *context.Context, tpl base.TplName, form interface{ // sends a confirmation email if required. func handleUserCreated(ctx *context.Context, u *user_model.User, gothUser *goth.User) (ok bool) { // Auto-set admin for the only user. - if user_model.CountUsers() == 1 { + if user_model.CountUsers(nil) == 1 { u.IsAdmin = true u.IsActive = true u.SetLastLogin() - if err := user_model.UpdateUserCols(db.DefaultContext, u, "is_admin", "is_active", "last_login_unix"); err != nil { + if err := user_model.UpdateUserCols(ctx, u, "is_admin", "is_active", "last_login_unix"); err != nil { ctx.ServerError("UpdateUser", err) return } @@ -618,6 +619,12 @@ func handleUserCreated(ctx *context.Context, u *user_model.User, gothUser *goth. // Send confirmation email if !u.IsActive && u.ID > 1 { + if setting.Service.RegisterManualConfirm { + ctx.Data["ManualActivationOnly"] = true + ctx.HTML(http.StatusOK, TplActivate) + return + } + mailer.SendActivateAccountMail(ctx.Locale, u) ctx.Data["IsSendRegisterMail"] = true @@ -640,19 +647,19 @@ func Activate(ctx *context.Context) { if len(code) == 0 { ctx.Data["IsActivatePage"] = true - if ctx.User == nil || ctx.User.IsActive { + if ctx.Doer == nil || ctx.Doer.IsActive { ctx.NotFound("invalid user", nil) return } // Resend confirmation email. if setting.Service.RegisterEmailConfirm { - if ctx.Cache.IsExist("MailResendLimit_" + ctx.User.LowerName) { + if ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName) { ctx.Data["ResendLimited"] = true } else { ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale.Language()) - mailer.SendActivateAccountMail(ctx.Locale, ctx.User) + mailer.SendActivateAccountMail(ctx.Locale, ctx.Doer) - if err := ctx.Cache.Put("MailResendLimit_"+ctx.User.LowerName, ctx.User.LowerName, 180); err != nil { + if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil { log.Error("Set cache(MailResendLimit) fail: %v", err) } } @@ -724,7 +731,7 @@ func handleAccountActivation(ctx *context.Context, user *user_model.User) { ctx.ServerError("UpdateUser", err) return } - if err := user_model.UpdateUserCols(db.DefaultContext, user, "is_active", "rands"); err != nil { + if err := user_model.UpdateUserCols(ctx, user, "is_active", "rands"); err != nil { if user_model.IsErrUserNotExist(err) { ctx.NotFound("UpdateUserCols", err) } else { diff --git a/routers/web/auth/linkaccount.go b/routers/web/auth/linkaccount.go index bf5fb83265..c3e96f077a 100644 --- a/routers/web/auth/linkaccount.go +++ b/routers/web/auth/linkaccount.go @@ -283,13 +283,12 @@ func LinkAccountPostRegister(ctx *context.Context) { Name: form.UserName, Email: form.Email, Passwd: form.Password, - IsActive: !(setting.Service.RegisterEmailConfirm || setting.Service.RegisterManualConfirm), LoginType: auth.OAuth2, LoginSource: authSource.ID, LoginName: gothUser.UserID, } - if !createAndHandleCreatedUser(ctx, tplLinkAccount, form, u, &gothUser, false) { + if !createAndHandleCreatedUser(ctx, tplLinkAccount, form, u, nil, &gothUser, false) { // error already handled return } diff --git a/routers/web/auth/main_test.go b/routers/web/auth/main_test.go index 2b16f3c405..71f522fb07 100644 --- a/routers/web/auth/main_test.go +++ b/routers/web/auth/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } diff --git a/routers/web/auth/oauth.go b/routers/web/auth/oauth.go index 64e9c5c208..4c3e3c3ace 100644 --- a/routers/web/auth/oauth.go +++ b/routers/web/auth/oauth.go @@ -16,7 +16,6 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/auth" - "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -25,6 +24,7 @@ import ( "code.gitea.io/gitea/modules/session" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" auth_service "code.gitea.io/gitea/services/auth" @@ -267,21 +267,21 @@ type userInfoResponse struct { // InfoOAuth manages request for userinfo endpoint func InfoOAuth(ctx *context.Context) { - if ctx.User == nil || ctx.Data["AuthedMethod"] != (&auth_service.OAuth2{}).Name() { + if ctx.Doer == nil || ctx.Data["AuthedMethod"] != (&auth_service.OAuth2{}).Name() { ctx.Resp.Header().Set("WWW-Authenticate", `Bearer realm=""`) ctx.PlainText(http.StatusUnauthorized, "no valid authorization") return } response := &userInfoResponse{ - Sub: fmt.Sprint(ctx.User.ID), - Name: ctx.User.FullName, - Username: ctx.User.Name, - Email: ctx.User.Email, - Picture: ctx.User.AvatarLink(), + Sub: fmt.Sprint(ctx.Doer.ID), + Name: ctx.Doer.FullName, + Username: ctx.Doer.Name, + Email: ctx.Doer.Email, + Picture: ctx.Doer.AvatarLink(), } - groups, err := getOAuthGroupsForUser(ctx.User) + groups, err := getOAuthGroupsForUser(ctx.Doer) if err != nil { ctx.ServerError("Oauth groups for user", err) return @@ -317,7 +317,7 @@ func getOAuthGroupsForUser(user *user_model.User) ([]string, error) { // IntrospectOAuth introspects an oauth token func IntrospectOAuth(ctx *context.Context) { - if ctx.User == nil { + if ctx.Doer == nil { ctx.Resp.Header().Set("WWW-Authenticate", `Bearer realm=""`) ctx.PlainText(http.StatusUnauthorized, "no valid authorization") return @@ -438,7 +438,7 @@ func AuthorizeOAuth(ctx *context.Context) { return } - grant, err := app.GetGrantByUserID(ctx.User.ID) + grant, err := app.GetGrantByUserID(ctx.Doer.ID) if err != nil { handleServerError(ctx, form.State, form.RedirectURI) return @@ -463,7 +463,7 @@ func AuthorizeOAuth(ctx *context.Context) { log.Error("Unable to update nonce: %v", err) } } - ctx.Redirect(redirect.String(), 302) + ctx.Redirect(redirect.String()) return } @@ -515,7 +515,7 @@ func GrantApplicationOAuth(ctx *context.Context) { ctx.ServerError("GetOAuth2ApplicationByClientID", err) return } - grant, err := app.CreateGrant(ctx.User.ID, form.Scope) + grant, err := app.CreateGrant(ctx.Doer.ID, form.Scope) if err != nil { handleAuthorizeError(ctx, AuthorizeError{ State: form.State, @@ -545,7 +545,7 @@ func GrantApplicationOAuth(ctx *context.Context) { handleServerError(ctx, form.State, form.RedirectURI) return } - ctx.Redirect(redirect.String(), 302) + ctx.Redirect(redirect.String(), http.StatusSeeOther) } // OIDCWellKnown generates JSON so OIDC clients know Gitea's capabilities @@ -753,7 +753,7 @@ func handleAuthorizeError(ctx *context.Context, authErr AuthorizeError, redirect if redirectURI == "" { log.Warn("Authorization failed: %v", authErr.ErrorDescription) ctx.Data["Error"] = authErr - ctx.HTML(400, tplGrantError) + ctx.HTML(http.StatusBadRequest, tplGrantError) return } redirect, err := url.Parse(redirectURI) @@ -766,7 +766,7 @@ func handleAuthorizeError(ctx *context.Context, authErr AuthorizeError, redirect q.Set("error_description", authErr.ErrorDescription) q.Set("state", authErr.State) redirect.RawQuery = q.Encode() - ctx.Redirect(redirect.String(), 302) + ctx.Redirect(redirect.String(), http.StatusSeeOther) } // SignInOAuth handles the OAuth2 login buttons @@ -868,19 +868,21 @@ func SignInOAuthCallback(ctx *context.Context) { return } u = &user_model.User{ - Name: getUserName(&gothUser), - FullName: gothUser.Name, - Email: gothUser.Email, - IsActive: !setting.OAuth2Client.RegisterEmailConfirm, - LoginType: auth.OAuth2, - LoginSource: authSource.ID, - LoginName: gothUser.UserID, - IsRestricted: setting.Service.DefaultUserIsRestricted, + Name: getUserName(&gothUser), + FullName: gothUser.Name, + Email: gothUser.Email, + LoginType: auth.OAuth2, + LoginSource: authSource.ID, + LoginName: gothUser.UserID, + } + + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolOf(!setting.OAuth2Client.RegisterEmailConfirm), } setUserGroupClaims(authSource, u, &gothUser) - if !createAndHandleCreatedUser(ctx, base.TplName(""), nil, u, &gothUser, setting.OAuth2Client.AccountLinking != setting.OAuth2AccountLinkingDisabled) { + if !createAndHandleCreatedUser(ctx, base.TplName(""), nil, u, overwriteDefault, &gothUser, setting.OAuth2Client.AccountLinking != setting.OAuth2AccountLinkingDisabled) { // error already handled return } @@ -1008,7 +1010,7 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model log.Error("Error storing session: %v", err) } - // Clear whatever CSRF has right now, force to generate a new one + // Clear whatever CSRF cookie has right now, force to generate a new one middleware.DeleteCSRFCookie(ctx.Resp) // Register last login @@ -1021,7 +1023,7 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model cols = append(cols, "is_admin", "is_restricted") } - if err := user_model.UpdateUserCols(db.DefaultContext, u, cols...); err != nil { + if err := user_model.UpdateUserCols(ctx, u, cols...); err != nil { ctx.ServerError("UpdateUserCols", err) return } @@ -1048,7 +1050,7 @@ func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model changed := setUserGroupClaims(source, u, &gothUser) if changed { - if err := user_model.UpdateUserCols(db.DefaultContext, u, "is_admin", "is_restricted"); err != nil { + if err := user_model.UpdateUserCols(ctx, u, "is_admin", "is_restricted"); err != nil { ctx.ServerError("UpdateUserCols", err) return } diff --git a/routers/web/auth/openid.go b/routers/web/auth/openid.go index f3189887a5..3012d8c5a5 100644 --- a/routers/web/auth/openid.go +++ b/routers/web/auth/openid.go @@ -423,12 +423,11 @@ func RegisterOpenIDPost(ctx *context.Context) { } u := &user_model.User{ - Name: form.UserName, - Email: form.Email, - Passwd: password, - IsActive: !(setting.Service.RegisterEmailConfirm || setting.Service.RegisterManualConfirm), + Name: form.UserName, + Email: form.Email, + Passwd: password, } - if !createUserInContext(ctx, tplSignUpOID, form, u, nil, false) { + if !createUserInContext(ctx, tplSignUpOID, form, u, nil, nil, false) { // error already handled return } diff --git a/routers/web/auth/password.go b/routers/web/auth/password.go index 65d5c55976..d7bf67cffb 100644 --- a/routers/web/auth/password.go +++ b/routers/web/auth/password.go @@ -9,7 +9,6 @@ import ( "net/http" "code.gitea.io/gitea/models/auth" - "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -103,7 +102,7 @@ func commonResetPassword(ctx *context.Context) (*user_model.User, *auth.TwoFacto ctx.Data["Title"] = ctx.Tr("auth.reset_password") ctx.Data["Code"] = code - if nil != ctx.User { + if nil != ctx.Doer { ctx.Data["user_signed_in"] = true } @@ -133,8 +132,8 @@ func commonResetPassword(ctx *context.Context) (*user_model.User, *auth.TwoFacto // Show the user that they are affecting the account that they intended to ctx.Data["user_email"] = u.Email - if nil != ctx.User && u.ID != ctx.User.ID { - ctx.Flash.Error(ctx.Tr("auth.reset_password_wrong_user", ctx.User.Email, u.Email)) + if nil != ctx.Doer && u.ID != ctx.Doer.ID { + ctx.Flash.Error(ctx.Tr("auth.reset_password_wrong_user", ctx.Doer.Email, u.Email)) return nil, nil } @@ -232,7 +231,7 @@ func ResetPasswdPost(ctx *context.Context) { return } u.MustChangePassword = false - if err := user_model.UpdateUserCols(db.DefaultContext, u, "must_change_password", "passwd", "passwd_hash_algo", "rands", "salt"); err != nil { + if err := user_model.UpdateUserCols(ctx, u, "must_change_password", "passwd", "passwd_hash_algo", "rands", "salt"); err != nil { ctx.ServerError("UpdateUser", err) return } @@ -283,7 +282,7 @@ func MustChangePasswordPost(ctx *context.Context) { ctx.HTML(http.StatusOK, tplMustChangePassword) return } - u := ctx.User + u := ctx.Doer // Make sure only requests for users who are eligible to change their password via // this method passes through if !u.MustChangePassword { @@ -327,7 +326,7 @@ func MustChangePasswordPost(ctx *context.Context) { u.MustChangePassword = false - if err := user_model.UpdateUserCols(db.DefaultContext, u, "must_change_password", "passwd", "passwd_hash_algo", "salt"); err != nil { + if err := user_model.UpdateUserCols(ctx, u, "must_change_password", "passwd", "passwd_hash_algo", "salt"); err != nil { ctx.ServerError("UpdateUser", err) return } diff --git a/routers/web/auth/webauthn.go b/routers/web/auth/webauthn.go index bedbe7ddc3..c0cf58f3d3 100644 --- a/routers/web/auth/webauthn.go +++ b/routers/web/auth/webauthn.go @@ -39,7 +39,7 @@ func WebAuthn(ctx *context.Context) { return } - ctx.HTML(200, tplWebAuthn) + ctx.HTML(http.StatusOK, tplWebAuthn) } // WebAuthnLoginAssertion submits a WebAuthn challenge to the browser @@ -166,5 +166,5 @@ func WebAuthnLoginAssertionPost(ctx *context.Context) { } } - ctx.JSON(200, map[string]string{"redirect": redirect}) + ctx.JSON(http.StatusOK, map[string]string{"redirect": redirect}) } diff --git a/routers/web/auth_windows.go b/routers/web/auth_windows.go new file mode 100644 index 0000000000..f404fd3771 --- /dev/null +++ b/routers/web/auth_windows.go @@ -0,0 +1,20 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package web + +import ( + "code.gitea.io/gitea/models/auth" + auth_service "code.gitea.io/gitea/services/auth" +) + +// specialAdd registers the SSPI auth method as the last method in the list. +// The SSPI plugin is expected to be executed last, as it returns 401 status code if negotiation +// fails (or if negotiation should continue), which would prevent other authentication methods +// to execute at all. +func specialAdd(group *auth_service.Group) { + if auth.IsSSPIEnabled() { + group.Add(&auth_service.SSPI{}) + } +} diff --git a/routers/web/base.go b/routers/web/base.go index f7eb003cc4..938abaef81 100644 --- a/routers/web/base.go +++ b/routers/web/base.go @@ -11,7 +11,6 @@ import ( "net/http" "os" "path" - "path/filepath" "strings" "code.gitea.io/gitea/modules/context" @@ -28,6 +27,7 @@ import ( ) func storageHandler(storageSetting setting.Storage, prefix string, objStore storage.ObjectStorage) func(next http.Handler) http.Handler { + prefix = strings.Trim(prefix, "/") funcInfo := routing.GetFuncInfo(storageHandler, prefix) return func(next http.Handler) http.Handler { if storageSetting.ServeDirect { @@ -37,29 +37,32 @@ func storageHandler(storageSetting setting.Storage, prefix string, objStore stor return } - if !strings.HasPrefix(req.URL.RequestURI(), "/"+prefix) { + if !strings.HasPrefix(req.URL.Path, "/"+prefix+"/") { next.ServeHTTP(w, req) return } routing.UpdateFuncInfo(req.Context(), funcInfo) - rPath := strings.TrimPrefix(req.URL.RequestURI(), "/"+prefix) + rPath := strings.TrimPrefix(req.URL.Path, "/"+prefix+"/") + rPath = path.Clean("/" + strings.ReplaceAll(rPath, "\\", "/"))[1:] + u, err := objStore.URL(rPath, path.Base(rPath)) if err != nil { if os.IsNotExist(err) || errors.Is(err, os.ErrNotExist) { log.Warn("Unable to find %s %s", prefix, rPath) - http.Error(w, "file not found", 404) + http.Error(w, "file not found", http.StatusNotFound) return } log.Error("Error whilst getting URL for %s %s. Error: %v", prefix, rPath, err) - http.Error(w, fmt.Sprintf("Error whilst getting URL for %s %s", prefix, rPath), 500) + http.Error(w, fmt.Sprintf("Error whilst getting URL for %s %s", prefix, rPath), http.StatusInternalServerError) return } + http.Redirect( w, req, u.String(), - 301, + http.StatusPermanentRedirect, ) }) } @@ -70,22 +73,18 @@ func storageHandler(storageSetting setting.Storage, prefix string, objStore stor return } - prefix := strings.Trim(prefix, "/") - - if !strings.HasPrefix(req.URL.EscapedPath(), "/"+prefix+"/") { + if !strings.HasPrefix(req.URL.Path, "/"+prefix+"/") { next.ServeHTTP(w, req) return } routing.UpdateFuncInfo(req.Context(), funcInfo) - rPath := strings.TrimPrefix(req.URL.EscapedPath(), "/"+prefix+"/") - rPath = strings.TrimPrefix(rPath, "/") + rPath := strings.TrimPrefix(req.URL.Path, "/"+prefix+"/") + rPath = path.Clean("/" + strings.ReplaceAll(rPath, "\\", "/"))[1:] if rPath == "" { - http.Error(w, "file not found", 404) + http.Error(w, "file not found", http.StatusNotFound) return } - rPath = path.Clean("/" + filepath.ToSlash(rPath)) - rPath = rPath[1:] fi, err := objStore.Stat(rPath) if err == nil && httpcache.HandleTimeCache(req, w, fi) { @@ -97,11 +96,11 @@ func storageHandler(storageSetting setting.Storage, prefix string, objStore stor if err != nil { if os.IsNotExist(err) || errors.Is(err, os.ErrNotExist) { log.Warn("Unable to find %s %s", prefix, rPath) - http.Error(w, "file not found", 404) + http.Error(w, "file not found", http.StatusNotFound) return } log.Error("Error whilst opening %s %s. Error: %v", prefix, rPath, err) - http.Error(w, fmt.Sprintf("Error whilst opening %s %s", prefix, rPath), 500) + http.Error(w, fmt.Sprintf("Error whilst opening %s %s", prefix, rPath), http.StatusInternalServerError) return } defer fr.Close() @@ -109,7 +108,7 @@ func storageHandler(storageSetting setting.Storage, prefix string, objStore stor _, err = io.Copy(w, fr) if err != nil { log.Error("Error whilst rendering %s %s. Error: %v", prefix, rPath, err) - http.Error(w, fmt.Sprintf("Error whilst rendering %s %s", prefix, rPath), 500) + http.Error(w, fmt.Sprintf("Error whilst rendering %s %s", prefix, rPath), http.StatusInternalServerError) return } }) @@ -164,7 +163,7 @@ func Recovery() func(next http.Handler) http.Handler { if !setting.IsProd { store["ErrorMsg"] = combinedErr } - err = rnd.HTML(w, 500, "status/500", templates.BaseVars().Merge(store)) + err = rnd.HTML(w, http.StatusInternalServerError, "status/500", templates.BaseVars().Merge(store)) if err != nil { log.Error("%v", err) } diff --git a/routers/web/events/events.go b/routers/web/events/events.go index 41f52375c3..d8c6f38d02 100644 --- a/routers/web/events/events.go +++ b/routers/web/events/events.go @@ -8,15 +8,10 @@ import ( "net/http" "time" - "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/context" - "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/eventsource" "code.gitea.io/gitea/modules/graceful" - "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" - "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/routers/web/auth" ) @@ -48,7 +43,7 @@ func Events(ctx *context.Context) { shutdownCtx := graceful.GetManager().ShutdownContext() - uid := ctx.User.ID + uid := ctx.Doer.ID messageChan := eventsource.GetManager().Register(uid) @@ -71,8 +66,6 @@ func Events(ctx *context.Context) { timer := time.NewTicker(30 * time.Second) - stopwatchTimer := time.NewTicker(setting.UI.Notification.MinTimeout) - loop: for { select { @@ -82,7 +75,7 @@ loop: } _, err := event.WriteTo(ctx.Resp) if err != nil { - log.Error("Unable to write to EventStream for user %s: %v", ctx.User.Name, err) + log.Error("Unable to write to EventStream for user %s: %v", ctx.Doer.Name, err) go unregister() break loop } @@ -93,32 +86,6 @@ loop: case <-shutdownCtx.Done(): go unregister() break loop - case <-stopwatchTimer.C: - sws, err := models.GetUserStopwatches(ctx.User.ID, db.ListOptions{}) - if err != nil { - log.Error("Unable to GetUserStopwatches: %v", err) - continue - } - apiSWs, err := convert.ToStopWatches(sws) - if err != nil { - log.Error("Unable to APIFormat stopwatches: %v", err) - continue - } - dataBs, err := json.Marshal(apiSWs) - if err != nil { - log.Error("Unable to marshal stopwatches: %v", err) - continue - } - _, err = (&eventsource.Event{ - Name: "stopwatches", - Data: string(dataBs), - }).WriteTo(ctx.Resp) - if err != nil { - log.Error("Unable to write to EventStream for user %s: %v", ctx.User.Name, err) - go unregister() - break loop - } - ctx.Resp.Flush() case event, ok := <-messageChan: if !ok { break loop @@ -145,7 +112,7 @@ loop: _, err := event.WriteTo(ctx.Resp) if err != nil { - log.Error("Unable to write to EventStream for user %s: %v", ctx.User.Name, err) + log.Error("Unable to write to EventStream for user %s: %v", ctx.Doer.Name, err) go unregister() break loop } diff --git a/routers/web/explore/code.go b/routers/web/explore/code.go index 640a5a0e4f..41ca27782f 100644 --- a/routers/web/explore/code.go +++ b/routers/web/explore/code.go @@ -24,7 +24,7 @@ const ( // Code render explore code page func Code(ctx *context.Context) { if !setting.Indexer.RepoIndexerEnabled { - ctx.Redirect(setting.AppSubURL+"/explore", 302) + ctx.Redirect(setting.AppSubURL + "/explore") return } @@ -49,13 +49,13 @@ func Code(ctx *context.Context) { err error isAdmin bool ) - if ctx.User != nil { - isAdmin = ctx.User.IsAdmin + if ctx.Doer != nil { + isAdmin = ctx.Doer.IsAdmin } // guest user or non-admin user - if ctx.User == nil || !isAdmin { - repoIDs, err = models.FindUserAccessibleRepoIDs(ctx.User) + if ctx.Doer == nil || !isAdmin { + repoIDs, err = models.FindUserAccessibleRepoIDs(ctx.Doer) if err != nil { ctx.ServerError("SearchResults", err) return @@ -69,7 +69,7 @@ func Code(ctx *context.Context) { ) // if non-admin login user, we need check UnitTypeCode at first - if ctx.User != nil && len(repoIDs) > 0 { + if ctx.Doer != nil && len(repoIDs) > 0 { repoMaps, err := repo_model.GetRepositoriesMapByIDs(repoIDs) if err != nil { ctx.ServerError("SearchResults", err) @@ -79,7 +79,7 @@ func Code(ctx *context.Context) { rightRepoMap := make(map[int64]*repo_model.Repository, len(repoMaps)) repoIDs = make([]int64, 0, len(repoMaps)) for id, repo := range repoMaps { - if models.CheckRepoUnitUser(repo, ctx.User, unit.TypeCode) { + if models.CheckRepoUnitUser(repo, ctx.Doer, unit.TypeCode) { rightRepoMap[id] = repo repoIDs = append(repoIDs, id) } @@ -98,7 +98,7 @@ func Code(ctx *context.Context) { ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable() } // if non-login user or isAdmin, no need to check UnitTypeCode - } else if (ctx.User == nil && len(repoIDs) > 0) || isAdmin { + } else if (ctx.Doer == nil && len(repoIDs) > 0) || isAdmin { total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch) if err != nil { if code_indexer.IsAvailable() { @@ -138,7 +138,6 @@ func Code(ctx *context.Context) { ctx.Data["queryType"] = queryType ctx.Data["SearchResults"] = searchResults ctx.Data["SearchResultLanguages"] = searchResultLanguages - ctx.Data["RequireHighlightJS"] = true ctx.Data["PageIsViewCode"] = true pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5) diff --git a/routers/web/explore/org.go b/routers/web/explore/org.go index 41c0a0c83c..eb6972fad3 100644 --- a/routers/web/explore/org.go +++ b/routers/web/explore/org.go @@ -27,12 +27,12 @@ func Organizations(ctx *context.Context) { ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled visibleTypes := []structs.VisibleType{structs.VisibleTypePublic} - if ctx.User != nil { + if ctx.Doer != nil { visibleTypes = append(visibleTypes, structs.VisibleTypeLimited, structs.VisibleTypePrivate) } RenderUserSearch(ctx, &user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, Type: user_model.UserTypeOrganization, ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum}, Visible: visibleTypes, diff --git a/routers/web/explore/repo.go b/routers/web/explore/repo.go index ce3aefe26f..3e8aa2bb0f 100644 --- a/routers/web/explore/repo.go +++ b/routers/web/explore/repo.go @@ -86,7 +86,7 @@ func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) { Page: page, PageSize: opts.PageSize, }, - Actor: ctx.User, + Actor: ctx.Doer, OrderBy: orderBy, Private: opts.Private, Keyword: keyword, @@ -124,14 +124,14 @@ func Repos(ctx *context.Context) { ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled var ownerID int64 - if ctx.User != nil && !ctx.User.IsAdmin { - ownerID = ctx.User.ID + if ctx.Doer != nil && !ctx.Doer.IsAdmin { + ownerID = ctx.Doer.ID } RenderRepoSearch(ctx, &RepoSearchOptions{ PageSize: setting.UI.ExplorePagingNum, OwnerID: ownerID, - Private: ctx.User != nil, + Private: ctx.Doer != nil, TplName: tplExploreRepos, }) } diff --git a/routers/web/explore/topic.go b/routers/web/explore/topic.go new file mode 100644 index 0000000000..39b87f2498 --- /dev/null +++ b/routers/web/explore/topic.go @@ -0,0 +1,42 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package explore + +import ( + "net/http" + + "code.gitea.io/gitea/models/db" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" + api "code.gitea.io/gitea/modules/structs" +) + +// TopicSearch search for creating topic +func TopicSearch(ctx *context.Context) { + opts := &repo_model.FindTopicOptions{ + Keyword: ctx.FormString("q"), + ListOptions: db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), + }, + } + + topics, total, err := repo_model.FindTopics(opts) + if err != nil { + ctx.Error(http.StatusInternalServerError) + return + } + + topicResponses := make([]*api.TopicResponse, len(topics)) + for i, topic := range topics { + topicResponses[i] = convert.ToTopicResponse(topic) + } + + ctx.SetTotalCountHeader(total) + ctx.JSON(http.StatusOK, map[string]interface{}{ + "topics": topicResponses, + }) +} diff --git a/routers/web/explore/user.go b/routers/web/explore/user.go index 98788f5433..ea0d7d5f9d 100644 --- a/routers/web/explore/user.go +++ b/routers/web/explore/user.go @@ -102,7 +102,7 @@ func Users(ctx *context.Context) { ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled RenderUserSearch(ctx, &user_model.SearchUserOptions{ - Actor: ctx.User, + Actor: ctx.Doer, Type: user_model.UserTypeIndividual, ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum}, IsActive: util.OptionalBoolTrue, diff --git a/routers/web/feed/convert.go b/routers/web/feed/convert.go index 4dbd9c9d0e..64801a6078 100644 --- a/routers/web/feed/convert.go +++ b/routers/web/feed/convert.go @@ -7,12 +7,15 @@ package feed import ( "fmt" "html" + "net/http" "net/url" "strconv" "strings" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/util" @@ -44,8 +47,27 @@ func toReleaseLink(act *models.Action) string { return act.GetRepoLink() + "/releases/tag/" + util.PathEscapeSegments(act.GetBranch()) } +// renderMarkdown creates a minimal markdown render context from an action. +// If rendering fails, the original markdown text is returned +func renderMarkdown(ctx *context.Context, act *models.Action, content string) string { + markdownCtx := &markup.RenderContext{ + Ctx: ctx, + URLPrefix: act.GetRepoLink(), + Type: markdown.MarkupName, + Metas: map[string]string{ + "user": act.GetRepoUserName(), + "repo": act.GetRepoName(), + }, + } + markdown, err := markdown.RenderString(markdownCtx, content) + if err != nil { + return content + } + return markdown +} + // feedActionsToFeedItems convert gitea's Action feed to feeds Item -func feedActionsToFeedItems(ctx *context.Context, actions []*models.Action) (items []*feeds.Item, err error) { +func feedActionsToFeedItems(ctx *context.Context, actions models.ActionList) (items []*feeds.Item, err error) { for _, act := range actions { act.LoadActUser() @@ -192,12 +214,12 @@ func feedActionsToFeedItems(ctx *context.Context, actions []*models.Action) (ite case models.ActionCreateIssue, models.ActionCreatePullRequest: desc = strings.Join(act.GetIssueInfos(), "#") - content = act.GetIssueContent() + content = renderMarkdown(ctx, act, act.GetIssueContent()) case models.ActionCommentIssue, models.ActionApprovePullRequest, models.ActionRejectPullRequest, models.ActionCommentPull: desc = act.GetIssueTitle() comment := act.GetIssueInfos()[1] if len(comment) != 0 { - desc += "\n\n" + comment + desc += "\n\n" + renderMarkdown(ctx, act, comment) } case models.ActionMergePullRequest: desc = act.GetIssueInfos()[1] @@ -226,3 +248,18 @@ func feedActionsToFeedItems(ctx *context.Context, actions []*models.Action) (ite } return } + +// GetFeedType return if it is a feed request and altered name and feed type. +func GetFeedType(name string, req *http.Request) (bool, string, string) { + if strings.HasSuffix(name, ".rss") || + strings.Contains(req.Header.Get("Accept"), "application/rss+xml") { + return true, strings.TrimSuffix(name, ".rss"), "rss" + } + + if strings.HasSuffix(name, ".atom") || + strings.Contains(req.Header.Get("Accept"), "application/atom+xml") { + return true, strings.TrimSuffix(name, ".atom"), "atom" + } + + return false, name, "" +} diff --git a/routers/web/feed/profile.go b/routers/web/feed/profile.go index 1a7f4ad24b..61a39755f5 100644 --- a/routers/web/feed/profile.go +++ b/routers/web/feed/profile.go @@ -9,70 +9,43 @@ import ( "time" "code.gitea.io/gitea/models" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" "github.com/gorilla/feeds" ) -// RetrieveFeeds loads feeds for the specified user -func RetrieveFeeds(ctx *context.Context, options models.GetFeedsOptions) []*models.Action { - actions, err := models.GetFeeds(options) - if err != nil { - ctx.ServerError("GetFeeds", err) - return nil - } - - userCache := map[int64]*user_model.User{options.RequestedUser.ID: options.RequestedUser} - if ctx.User != nil { - userCache[ctx.User.ID] = ctx.User - } - for _, act := range actions { - if act.ActUser != nil { - userCache[act.ActUserID] = act.ActUser - } - } - - for _, act := range actions { - repoOwner, ok := userCache[act.Repo.OwnerID] - if !ok { - repoOwner, err = user_model.GetUserByID(act.Repo.OwnerID) - if err != nil { - if user_model.IsErrUserNotExist(err) { - continue - } - ctx.ServerError("GetUserByID", err) - return nil - } - userCache[repoOwner.ID] = repoOwner - } - act.Repo.Owner = repoOwner - } - return actions +// ShowUserFeedRSS show user activity as RSS feed +func ShowUserFeedRSS(ctx *context.Context) { + showUserFeed(ctx, "rss") } -// ShowUserFeed show user activity as RSS / Atom feed -func ShowUserFeed(ctx *context.Context, ctxUser *user_model.User, formatType string) { - actions := RetrieveFeeds(ctx, models.GetFeedsOptions{ - RequestedUser: ctxUser, - Actor: ctx.User, +// ShowUserFeedAtom show user activity as Atom feed +func ShowUserFeedAtom(ctx *context.Context) { + showUserFeed(ctx, "atom") +} + +// showUserFeed show user activity as RSS / Atom feed +func showUserFeed(ctx *context.Context, formatType string) { + actions, err := models.GetFeeds(ctx, models.GetFeedsOptions{ + RequestedUser: ctx.ContextUser, + Actor: ctx.Doer, IncludePrivate: false, - OnlyPerformedBy: true, + OnlyPerformedBy: !ctx.ContextUser.IsOrganization(), IncludeDeleted: false, Date: ctx.FormString("date"), }) - if ctx.Written() { + if err != nil { + ctx.ServerError("GetFeeds", err) return } feed := &feeds.Feed{ - Title: ctx.Tr("home.feed_of", ctxUser.DisplayName()), - Link: &feeds.Link{Href: ctxUser.HTMLURL()}, - Description: ctxUser.Description, + Title: ctx.Tr("home.feed_of", ctx.ContextUser.DisplayName()), + Link: &feeds.Link{Href: ctx.ContextUser.HTMLURL()}, + Description: ctx.ContextUser.Description, Created: time.Now(), } - var err error feed.Items, err = feedActionsToFeedItems(ctx, actions) if err != nil { ctx.ServerError("convert feed", err) diff --git a/routers/web/feed/repo.go b/routers/web/feed/repo.go new file mode 100644 index 0000000000..ac856195b9 --- /dev/null +++ b/routers/web/feed/repo.go @@ -0,0 +1,44 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package feed + +import ( + "time" + + "code.gitea.io/gitea/models" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/context" + + "github.com/gorilla/feeds" +) + +// ShowRepoFeed shows user activity on the repo as RSS / Atom feed +func ShowRepoFeed(ctx *context.Context, repo *repo_model.Repository, formatType string) { + actions, err := models.GetFeeds(ctx, models.GetFeedsOptions{ + RequestedRepo: repo, + Actor: ctx.Doer, + IncludePrivate: true, + Date: ctx.FormString("date"), + }) + if err != nil { + ctx.ServerError("GetFeeds", err) + return + } + + feed := &feeds.Feed{ + Title: ctx.Tr("home.feed_of", repo.FullName()), + Link: &feeds.Link{Href: repo.HTMLURL()}, + Description: repo.Description, + Created: time.Now(), + } + + feed.Items, err = feedActionsToFeedItems(ctx, actions) + if err != nil { + ctx.ServerError("convert feed", err) + return + } + + writeFeed(ctx, feed, formatType) +} diff --git a/routers/web/goget.go b/routers/web/goget.go index 2843a96c30..a58739fe42 100644 --- a/routers/web/goget.go +++ b/routers/web/goget.go @@ -5,6 +5,8 @@ package web import ( + "fmt" + "html" "net/http" "net/url" "path" @@ -14,8 +16,6 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - - "github.com/unknwon/com" ) func goGet(ctx *context.Context) { @@ -48,7 +48,7 @@ func goGet(ctx *context.Context) { `)) - ctx.Status(400) + ctx.Status(http.StatusBadRequest) return } branchName := setting.Repository.DefaultBranch @@ -65,23 +65,23 @@ func goGet(ctx *context.Context) { if appURL.Scheme == string(setting.HTTP) { insecure = "--insecure " } - ctx.RespHeader().Set("Content-Type", "text/html") - ctx.Status(http.StatusOK) - _, _ = ctx.Write([]byte(com.Expand(` + + goGetImport := context.ComposeGoGetImport(ownerName, trimmedRepoName) + goImportContent := fmt.Sprintf("%s git %s", goGetImport, repo_model.ComposeHTTPSCloneURL(ownerName, repoName) /*CloneLink*/) + goSourceContent := fmt.Sprintf("%s _ %s %s", goGetImport, prefix+"{/dir}" /*GoDocDirectory*/, prefix+"{/dir}/{file}#L{line}" /*GoDocFile*/) + goGetCli := fmt.Sprintf("go get %s%s", insecure, goGetImport) + + res := fmt.Sprintf(` - - + + - go get {Insecure}{GoGetImport} + %s - -`, map[string]string{ - "GoGetImport": context.ComposeGoGetImport(ownerName, trimmedRepoName), - "CloneLink": repo_model.ComposeHTTPSCloneURL(ownerName, repoName), - "GoDocDirectory": prefix + "{/dir}", - "GoDocFile": prefix + "{/dir}/{file}#L{line}", - "Insecure": insecure, - }))) +`, html.EscapeString(goImportContent), html.EscapeString(goSourceContent), html.EscapeString(goGetCli)) + + ctx.RespHeader().Set("Content-Type", "text/html") + _, _ = ctx.Write([]byte(res)) } diff --git a/routers/web/healthcheck/check.go b/routers/web/healthcheck/check.go new file mode 100644 index 0000000000..481f05c0da --- /dev/null +++ b/routers/web/healthcheck/check.go @@ -0,0 +1,143 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package healthcheck + +import ( + "net/http" + "os" + "time" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/cache" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" +) + +type status string + +const ( + // pass healthy (acceptable aliases: "ok" to support Node's Terminus and "up" for Java's SpringBoot) + // fail unhealthy (acceptable aliases: "error" to support Node's Terminus and "down" for Java's SpringBoot), and + // warn healthy, with some concerns. + // + // ref https://datatracker.ietf.org/doc/html/draft-inadarei-api-health-check#section-3.1 + // status: (required) indicates whether the service status is acceptable + // or not. API publishers SHOULD use following values for the field: + // The value of the status field is case-insensitive and is tightly + // related with the HTTP response code returned by the health endpoint. + // For "pass" status, HTTP response code in the 2xx-3xx range MUST be + // used. For "fail" status, HTTP response code in the 4xx-5xx range + // MUST be used. In case of the "warn" status, endpoints MUST return + // HTTP status in the 2xx-3xx range, and additional information SHOULD + // be provided, utilizing optional fields of the response. + pass status = "pass" + fail status = "fail" + warn status = "warn" +) + +func (s status) ToHTTPStatus() int { + if s == pass || s == warn { + return http.StatusOK + } + return http.StatusFailedDependency +} + +type checks map[string][]componentStatus + +// response is the data returned by the health endpoint, which will be marshaled to JSON format +type response struct { + Status status `json:"status"` + Description string `json:"description"` // a human-friendly description of the service + Checks checks `json:"checks,omitempty"` // The Checks Object, should be omitted on installation route +} + +// componentStatus presents one status of a single check object +// an object that provides detailed health statuses of additional downstream systems and endpoints +// which can affect the overall health of the main API. +type componentStatus struct { + Status status `json:"status"` + Time string `json:"time"` // the date-time, in ISO8601 format + Output string `json:"output,omitempty"` // this field SHOULD be omitted for "pass" state. +} + +// Check is the health check API handler +func Check(w http.ResponseWriter, r *http.Request) { + rsp := response{ + Status: pass, + Description: setting.AppName, + Checks: make(checks), + } + + statuses := make([]status, 0) + if setting.InstallLock { + statuses = append(statuses, checkDatabase(rsp.Checks)) + statuses = append(statuses, checkCache(rsp.Checks)) + } + for _, s := range statuses { + if s != pass { + rsp.Status = fail + break + } + } + + data, _ := json.MarshalIndent(rsp, "", " ") + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(rsp.Status.ToHTTPStatus()) + _, _ = w.Write(data) +} + +// database checks gitea database status +func checkDatabase(checks checks) status { + st := componentStatus{} + if err := db.GetEngine(db.DefaultContext).Ping(); err != nil { + st.Status = fail + st.Time = getCheckTime() + log.Error("database ping failed with error: %v", err) + } else { + st.Status = pass + st.Time = getCheckTime() + } + + if setting.Database.UseSQLite3 && st.Status == pass { + if !setting.EnableSQLite3 { + st.Status = fail + st.Time = getCheckTime() + log.Error("SQLite3 health check failed with error: %v", "this Gitea binary is built without SQLite3 enabled") + } else { + if _, err := os.Stat(setting.Database.Path); err != nil { + st.Status = fail + st.Time = getCheckTime() + log.Error("SQLite3 file exists check failed with error: %v", err) + } + } + } + + checks["database:ping"] = []componentStatus{st} + return st.Status +} + +// cache checks gitea cache status +func checkCache(checks checks) status { + if !setting.CacheService.Enabled { + return pass + } + + st := componentStatus{} + if err := cache.Ping(); err != nil { + st.Status = fail + st.Time = getCheckTime() + log.Error("cache ping failed with error: %v", err) + } else { + st.Status = pass + st.Time = getCheckTime() + } + checks["cache:ping"] = []componentStatus{st} + return st.Status +} + +func getCheckTime() string { + return time.Now().UTC().Format(time.RFC3339) +} diff --git a/routers/web/home.go b/routers/web/home.go index ed81d84c7f..9036814ddf 100644 --- a/routers/web/home.go +++ b/routers/web/home.go @@ -25,14 +25,14 @@ const ( // Home render home page func Home(ctx *context.Context) { if ctx.IsSigned { - if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm { + if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm { ctx.Data["Title"] = ctx.Tr("auth.active_your_account") ctx.HTML(http.StatusOK, auth.TplActivate) - } else if !ctx.User.IsActive || ctx.User.ProhibitLogin { - log.Info("Failed authentication attempt for %s from %s", ctx.User.Name, ctx.RemoteAddr()) + } else if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin { + log.Info("Failed authentication attempt for %s from %s", ctx.Doer.Name, ctx.RemoteAddr()) ctx.Data["Title"] = ctx.Tr("auth.prohibit_login") ctx.HTML(http.StatusOK, "user/auth/prohibit_login") - } else if ctx.User.MustChangePassword { + } else if ctx.Doer.MustChangePassword { ctx.Data["Title"] = ctx.Tr("auth.must_change_password") ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password" middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI()) diff --git a/routers/web/metrics.go b/routers/web/metrics.go index 37558ee337..c7e01b8faa 100644 --- a/routers/web/metrics.go +++ b/routers/web/metrics.go @@ -21,13 +21,13 @@ func Metrics(resp http.ResponseWriter, req *http.Request) { } header := req.Header.Get("Authorization") if header == "" { - http.Error(resp, "", 401) + http.Error(resp, "", http.StatusUnauthorized) return } got := []byte(header) want := []byte("Bearer " + setting.Metrics.Token) if subtle.ConstantTimeCompare(got, want) != 1 { - http.Error(resp, "", 401) + http.Error(resp, "", http.StatusUnauthorized) return } promhttp.Handler().ServeHTTP(resp, req) diff --git a/routers/web/misc/markdown.go b/routers/web/misc/markdown.go new file mode 100644 index 0000000000..0567cbb30c --- /dev/null +++ b/routers/web/misc/markdown.go @@ -0,0 +1,99 @@ +// Copyright 2014 The Gogs Authors. All rights reserved. +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "net/http" + "strings" + + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/markup" + "code.gitea.io/gitea/modules/markup/markdown" + "code.gitea.io/gitea/modules/setting" + api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/web" + + "mvdan.cc/xurls/v2" +) + +// Markdown render markdown document to HTML +func Markdown(ctx *context.Context) { + // swagger:operation POST /markdown miscellaneous renderMarkdown + // --- + // summary: Render a markdown document as HTML + // parameters: + // - name: body + // in: body + // schema: + // "$ref": "#/definitions/MarkdownOption" + // consumes: + // - application/json + // produces: + // - text/html + // responses: + // "200": + // "$ref": "#/responses/MarkdownRender" + // "422": + // "$ref": "#/responses/validationError" + + form := web.GetForm(ctx).(*api.MarkdownOption) + + if ctx.HasAPIError() { + ctx.Error(http.StatusUnprocessableEntity, "", ctx.GetErrMsg()) + return + } + + if len(form.Text) == 0 { + _, _ = ctx.Write([]byte("")) + return + } + + switch form.Mode { + case "comment": + fallthrough + case "gfm": + urlPrefix := form.Context + meta := map[string]string{} + if !strings.HasPrefix(setting.AppSubURL+"/", urlPrefix) { + // check if urlPrefix is already set to a URL + linkRegex, _ := xurls.StrictMatchingScheme("https?://") + m := linkRegex.FindStringIndex(urlPrefix) + if m == nil { + urlPrefix = util.URLJoin(setting.AppURL, form.Context) + } + } + if ctx.Repo != nil && ctx.Repo.Repository != nil { + // "gfm" = Github Flavored Markdown - set this to render as a document + if form.Mode == "gfm" { + meta = ctx.Repo.Repository.ComposeDocumentMetas() + } else { + meta = ctx.Repo.Repository.ComposeMetas() + } + } + if form.Mode == "gfm" { + meta["mode"] = "document" + } + + if err := markdown.Render(&markup.RenderContext{ + Ctx: ctx, + URLPrefix: urlPrefix, + Metas: meta, + IsWiki: form.Wiki, + }, strings.NewReader(form.Text), ctx.Resp); err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + default: + if err := markdown.RenderRaw(&markup.RenderContext{ + Ctx: ctx, + URLPrefix: form.Context, + }, strings.NewReader(form.Text), ctx.Resp); err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + } +} diff --git a/routers/api/v1/misc/swagger.go b/routers/web/misc/swagger.go similarity index 100% rename from routers/api/v1/misc/swagger.go rename to routers/web/misc/swagger.go diff --git a/routers/web/org/home.go b/routers/web/org/home.go index fc81ceb719..24a0f13b54 100644 --- a/routers/web/org/home.go +++ b/routers/web/org/home.go @@ -10,6 +10,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -39,7 +40,7 @@ func Home(ctx *context.Context) { org := ctx.Org.Organization - if !models.HasOrgOrUserVisible(org.AsUser(), ctx.User) { + if !organization.HasOrgOrUserVisible(ctx, org.AsUser(), ctx.Doer) { ctx.NotFound("HasOrgOrUserVisible", nil) return } @@ -113,7 +114,7 @@ func Home(ctx *context.Context) { OwnerID: org.ID, OrderBy: orderBy, Private: ctx.IsSigned, - Actor: ctx.User, + Actor: ctx.Doer, Language: language, IncludeDescription: setting.UI.SearchRepoDescription, }) @@ -122,28 +123,28 @@ func Home(ctx *context.Context) { return } - opts := &models.FindOrgMembersOpts{ + opts := &organization.FindOrgMembersOpts{ OrgID: org.ID, PublicOnly: true, ListOptions: db.ListOptions{Page: 1, PageSize: 25}, } - if ctx.User != nil { - isMember, err := org.IsOrgMember(ctx.User.ID) + if ctx.Doer != nil { + isMember, err := org.IsOrgMember(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrgMember") return } - opts.PublicOnly = !isMember && !ctx.User.IsAdmin + opts.PublicOnly = !isMember && !ctx.Doer.IsAdmin } - members, _, err := models.FindOrgMembers(opts) + members, _, err := organization.FindOrgMembers(opts) if err != nil { ctx.ServerError("FindOrgMembers", err) return } - membersCount, err := models.CountOrgMembers(opts) + membersCount, err := organization.CountOrgMembers(opts) if err != nil { ctx.ServerError("CountOrgMembers", err) return diff --git a/routers/web/org/members.go b/routers/web/org/members.go index b8e7fa1ff5..add8e724bd 100644 --- a/routers/web/org/members.go +++ b/routers/web/org/members.go @@ -9,6 +9,7 @@ import ( "net/http" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" @@ -31,21 +32,21 @@ func Members(ctx *context.Context) { page = 1 } - opts := &models.FindOrgMembersOpts{ + opts := &organization.FindOrgMembersOpts{ OrgID: org.ID, PublicOnly: true, } - if ctx.User != nil { - isMember, err := ctx.Org.Organization.IsOrgMember(ctx.User.ID) + if ctx.Doer != nil { + isMember, err := ctx.Org.Organization.IsOrgMember(ctx.Doer.ID) if err != nil { ctx.Error(http.StatusInternalServerError, "IsOrgMember") return } - opts.PublicOnly = !isMember && !ctx.User.IsAdmin + opts.PublicOnly = !isMember && !ctx.Doer.IsAdmin } - total, err := models.CountOrgMembers(opts) + total, err := organization.CountOrgMembers(opts) if err != nil { ctx.Error(http.StatusInternalServerError, "CountOrgMembers") return @@ -54,7 +55,7 @@ func Members(ctx *context.Context) { pager := context.NewPagination(int(total), setting.UI.MembersPagingNum, page, 5) opts.ListOptions.Page = page opts.ListOptions.PageSize = setting.UI.MembersPagingNum - members, membersIsPublic, err := models.FindOrgMembers(opts) + members, membersIsPublic, err := organization.FindOrgMembers(opts) if err != nil { ctx.ServerError("GetMembers", err) return @@ -80,24 +81,24 @@ func MembersAction(ctx *context.Context) { var err error switch ctx.Params(":action") { case "private": - if ctx.User.ID != uid && !ctx.Org.IsOwner { + if ctx.Doer.ID != uid && !ctx.Org.IsOwner { ctx.Error(http.StatusNotFound) return } - err = models.ChangeOrgUserStatus(org.ID, uid, false) + err = organization.ChangeOrgUserStatus(org.ID, uid, false) case "public": - if ctx.User.ID != uid && !ctx.Org.IsOwner { + if ctx.Doer.ID != uid && !ctx.Org.IsOwner { ctx.Error(http.StatusNotFound) return } - err = models.ChangeOrgUserStatus(org.ID, uid, true) + err = organization.ChangeOrgUserStatus(org.ID, uid, true) case "remove": if !ctx.Org.IsOwner { ctx.Error(http.StatusNotFound) return } - err = org.RemoveMember(uid) - if models.IsErrLastOrgOwner(err) { + err = models.RemoveOrgUser(org.ID, uid) + if organization.IsErrLastOrgOwner(err) { ctx.Flash.Error(ctx.Tr("form.last_org_owner")) ctx.JSON(http.StatusOK, map[string]interface{}{ "redirect": ctx.Org.OrgLink + "/members", @@ -105,8 +106,8 @@ func MembersAction(ctx *context.Context) { return } case "leave": - err = org.RemoveMember(ctx.User.ID) - if models.IsErrLastOrgOwner(err) { + err = models.RemoveOrgUser(org.ID, ctx.Doer.ID) + if organization.IsErrLastOrgOwner(err) { ctx.Flash.Error(ctx.Tr("form.last_org_owner")) ctx.JSON(http.StatusOK, map[string]interface{}{ "redirect": ctx.Org.OrgLink + "/members", diff --git a/routers/web/org/org.go b/routers/web/org/org.go index c66a0cd4c8..32d8787995 100644 --- a/routers/web/org/org.go +++ b/routers/web/org/org.go @@ -9,8 +9,8 @@ import ( "errors" "net/http" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -29,7 +29,7 @@ const ( func Create(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("new_org") ctx.Data["DefaultOrgVisibilityMode"] = setting.Service.DefaultOrgVisibilityMode - if !ctx.User.CanCreateOrganization() { + if !ctx.Doer.CanCreateOrganization() { ctx.ServerError("Not allowed", errors.New(ctx.Tr("org.form.create_org_not_allowed"))) return } @@ -41,7 +41,7 @@ func CreatePost(ctx *context.Context) { form := *web.GetForm(ctx).(*forms.CreateOrgForm) ctx.Data["Title"] = ctx.Tr("new_org") - if !ctx.User.CanCreateOrganization() { + if !ctx.Doer.CanCreateOrganization() { ctx.ServerError("Not allowed", errors.New(ctx.Tr("org.form.create_org_not_allowed"))) return } @@ -51,7 +51,7 @@ func CreatePost(ctx *context.Context) { return } - org := &models.Organization{ + org := &organization.Organization{ Name: form.OrgName, IsActive: true, Type: user_model.UserTypeOrganization, @@ -59,7 +59,7 @@ func CreatePost(ctx *context.Context) { RepoAdminChangeTeamAccess: form.RepoAdminChangeTeamAccess, } - if err := models.CreateOrganization(org, ctx.User); err != nil { + if err := organization.CreateOrganization(org, ctx.Doer); err != nil { ctx.Data["Err_OrgName"] = true switch { case user_model.IsErrUserAlreadyExist(err): @@ -68,7 +68,7 @@ func CreatePost(ctx *context.Context) { ctx.RenderWithErr(ctx.Tr("org.form.name_reserved", err.(db.ErrNameReserved).Name), tplCreateOrg, &form) case db.IsErrNamePatternNotAllowed(err): ctx.RenderWithErr(ctx.Tr("org.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplCreateOrg, &form) - case models.IsErrUserNotAllowedCreateOrg(err): + case organization.IsErrUserNotAllowedCreateOrg(err): ctx.RenderWithErr(ctx.Tr("org.form.create_org_not_allowed"), tplCreateOrg, &form) default: ctx.ServerError("CreateOrganization", err) diff --git a/routers/web/org/org_labels.go b/routers/web/org/org_labels.go index 9cc9a92507..d79ffc597c 100644 --- a/routers/web/org/org_labels.go +++ b/routers/web/org/org_labels.go @@ -10,6 +10,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/context" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/forms" ) @@ -48,7 +49,7 @@ func NewLabel(ctx *context.Context) { Description: form.Description, Color: form.Color, } - if err := models.NewLabel(l); err != nil { + if err := models.NewLabel(ctx, l); err != nil { ctx.ServerError("NewLabel", err) return } @@ -100,9 +101,9 @@ func InitializeLabels(ctx *context.Context) { return } - if err := models.InitializeLabels(db.DefaultContext, ctx.Org.Organization.ID, form.TemplateName, true); err != nil { - if models.IsErrIssueLabelTemplateLoad(err) { - originalErr := err.(models.ErrIssueLabelTemplateLoad).OriginalError + if err := repo_module.InitializeLabels(ctx, ctx.Org.Organization.ID, form.TemplateName, true); err != nil { + if repo_module.IsErrIssueLabelTemplateLoad(err) { + originalErr := err.(repo_module.ErrIssueLabelTemplateLoad).OriginalError ctx.Flash.Error(ctx.Tr("repo.issues.label_templates.fail_to_load_file", form.TemplateName, originalErr)) ctx.Redirect(ctx.Org.OrgLink + "/settings/labels") return diff --git a/routers/web/org/setting.go b/routers/web/org/setting.go index 404aac8894..5cd245ef09 100644 --- a/routers/web/org/setting.go +++ b/routers/web/org/setting.go @@ -18,6 +18,7 @@ import ( "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web" user_setting "code.gitea.io/gitea/routers/web/user/setting" @@ -96,7 +97,7 @@ func SettingsPost(ctx *context.Context) { org.Name = form.Name org.LowerName = strings.ToLower(form.Name) - if ctx.User.IsAdmin { + if ctx.Doer.IsAdmin { org.MaxRepoCreation = form.MaxRepoCreation } @@ -181,6 +182,9 @@ func SettingsDelete(ctx *context.Context) { if models.IsErrUserOwnRepos(err) { ctx.Flash.Error(ctx.Tr("form.org_still_own_repo")) ctx.Redirect(ctx.Org.OrgLink + "/settings/delete") + } else if models.IsErrUserOwnPackages(err) { + ctx.Flash.Error(ctx.Tr("form.org_still_own_packages")) + ctx.Redirect(ctx.Org.OrgLink + "/settings/delete") } else { ctx.ServerError("DeleteOrganization", err) } @@ -232,6 +236,6 @@ func Labels(ctx *context.Context) { ctx.Data["PageIsOrgSettings"] = true ctx.Data["PageIsOrgSettingsLabels"] = true ctx.Data["RequireTribute"] = true - ctx.Data["LabelTemplates"] = models.LabelTemplates + ctx.Data["LabelTemplates"] = repo_module.LabelTemplates ctx.HTML(http.StatusOK, tplSettingsLabels) } diff --git a/routers/web/org/teams.go b/routers/web/org/teams.go index f6e09eb4c8..31bfaea92f 100644 --- a/routers/web/org/teams.go +++ b/routers/web/org/teams.go @@ -13,13 +13,17 @@ import ( "strings" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" unit_model "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/log" + api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/routers/utils" "code.gitea.io/gitea/services/forms" @@ -43,7 +47,7 @@ func Teams(ctx *context.Context) { ctx.Data["PageIsOrgTeams"] = true for _, t := range ctx.Org.Teams { - if err := t.GetMembers(&models.SearchMembersOptions{}); err != nil { + if err := t.GetMembersCtx(ctx); err != nil { ctx.ServerError("GetMembers", err) return } @@ -69,11 +73,11 @@ func TeamsAction(ctx *context.Context) { ctx.Error(http.StatusNotFound) return } - err = ctx.Org.Team.AddMember(ctx.User.ID) + err = models.AddTeamMember(ctx.Org.Team, ctx.Doer.ID) case "leave": - err = ctx.Org.Team.RemoveMember(ctx.User.ID) + err = models.RemoveTeamMember(ctx.Org.Team, ctx.Doer.ID) if err != nil { - if models.IsErrLastOrgOwner(err) { + if organization.IsErrLastOrgOwner(err) { ctx.Flash.Error(ctx.Tr("form.last_org_owner")) } else { log.Error("Action(%s): %v", ctx.Params(":action"), err) @@ -94,9 +98,9 @@ func TeamsAction(ctx *context.Context) { ctx.Error(http.StatusNotFound) return } - err = ctx.Org.Team.RemoveMember(uid) + err = models.RemoveTeamMember(ctx.Org.Team, uid) if err != nil { - if models.IsErrLastOrgOwner(err) { + if organization.IsErrLastOrgOwner(err) { ctx.Flash.Error(ctx.Tr("form.last_org_owner")) } else { log.Error("Action(%s): %v", ctx.Params(":action"), err) @@ -139,14 +143,14 @@ func TeamsAction(ctx *context.Context) { if ctx.Org.Team.IsMember(u.ID) { ctx.Flash.Error(ctx.Tr("org.teams.add_duplicate_users")) } else { - err = ctx.Org.Team.AddMember(u.ID) + err = models.AddTeamMember(ctx.Org.Team, u.ID) } page = "team" } if err != nil { - if models.IsErrLastOrgOwner(err) { + if organization.IsErrLastOrgOwner(err) { ctx.Flash.Error(ctx.Tr("form.last_org_owner")) } else { log.Error("Action(%s): %v", ctx.Params(":action"), err) @@ -191,13 +195,13 @@ func TeamsRepoAction(ctx *context.Context) { ctx.ServerError("GetRepositoryByName", err) return } - err = ctx.Org.Team.AddRepository(repo) + err = models.AddRepository(ctx.Org.Team, repo) case "remove": - err = ctx.Org.Team.RemoveRepository(ctx.FormInt64("repoid")) + err = models.RemoveRepository(ctx.Org.Team, ctx.FormInt64("repoid")) case "addall": - err = ctx.Org.Team.AddAllRepositories() + err = models.AddAllRepositories(ctx.Org.Team) case "removeall": - err = ctx.Org.Team.RemoveAllRepositories() + err = models.RemoveAllRepositories(ctx.Org.Team) } if err != nil { @@ -220,7 +224,7 @@ func NewTeam(ctx *context.Context) { ctx.Data["Title"] = ctx.Org.Organization.FullName ctx.Data["PageIsOrgTeams"] = true ctx.Data["PageIsOrgTeamsNew"] = true - ctx.Data["Team"] = &models.Team{} + ctx.Data["Team"] = &organization.Team{} ctx.Data["Units"] = unit_model.Units ctx.HTML(http.StatusOK, tplTeamNew) } @@ -251,7 +255,7 @@ func NewTeamPost(ctx *context.Context) { p = unit_model.MinUnitAccessMode(unitPerms) } - t := &models.Team{ + t := &organization.Team{ OrgID: ctx.Org.Organization.ID, Name: form.TeamName, Description: form.Description, @@ -261,9 +265,9 @@ func NewTeamPost(ctx *context.Context) { } if t.AccessMode < perm.AccessModeAdmin { - units := make([]*models.TeamUnit, 0, len(unitPerms)) + units := make([]*organization.TeamUnit, 0, len(unitPerms)) for tp, perm := range unitPerms { - units = append(units, &models.TeamUnit{ + units = append(units, &organization.TeamUnit{ OrgID: ctx.Org.Organization.ID, Type: tp, AccessMode: perm, @@ -291,7 +295,7 @@ func NewTeamPost(ctx *context.Context) { if err := models.NewTeam(t); err != nil { ctx.Data["Err_TeamName"] = true switch { - case models.IsErrTeamAlreadyExist(err): + case organization.IsErrTeamAlreadyExist(err): ctx.RenderWithErr(ctx.Tr("form.team_name_been_taken"), tplTeamNew, &form) default: ctx.ServerError("NewTeam", err) @@ -307,7 +311,7 @@ func TeamMembers(ctx *context.Context) { ctx.Data["Title"] = ctx.Org.Team.Name ctx.Data["PageIsOrgTeams"] = true ctx.Data["PageIsOrgTeamMembers"] = true - if err := ctx.Org.Team.GetMembers(&models.SearchMembersOptions{}); err != nil { + if err := ctx.Org.Team.GetMembersCtx(ctx); err != nil { ctx.ServerError("GetMembers", err) return } @@ -320,7 +324,7 @@ func TeamRepositories(ctx *context.Context) { ctx.Data["Title"] = ctx.Org.Team.Name ctx.Data["PageIsOrgTeams"] = true ctx.Data["PageIsOrgTeamRepos"] = true - if err := ctx.Org.Team.GetRepositories(&models.SearchOrgTeamOptions{}); err != nil { + if err := ctx.Org.Team.GetRepositoriesCtx(ctx); err != nil { ctx.ServerError("GetRepositories", err) return } @@ -328,6 +332,51 @@ func TeamRepositories(ctx *context.Context) { ctx.HTML(http.StatusOK, tplTeamRepositories) } +// SearchTeam api for searching teams +func SearchTeam(ctx *context.Context) { + listOptions := db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), + } + + opts := &organization.SearchTeamOptions{ + UserID: ctx.Doer.ID, + Keyword: ctx.FormTrim("q"), + OrgID: ctx.Org.Organization.ID, + IncludeDesc: ctx.FormString("include_desc") == "" || ctx.FormBool("include_desc"), + ListOptions: listOptions, + } + + teams, maxResults, err := organization.SearchTeam(opts) + if err != nil { + log.Error("SearchTeam failed: %v", err) + ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ + "ok": false, + "error": "SearchTeam internal failure", + }) + return + } + + apiTeams := make([]*api.Team, len(teams)) + for i := range teams { + if err := teams[i].GetUnits(); err != nil { + log.Error("Team GetUnits failed: %v", err) + ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ + "ok": false, + "error": "SearchTeam failed to get units", + }) + return + } + apiTeams[i] = convert.ToTeam(teams[i]) + } + + ctx.SetTotalCountHeader(maxResults) + ctx.JSON(http.StatusOK, map[string]interface{}{ + "ok": true, + "data": apiTeams, + }) +} + // EditTeam render team edit page func EditTeam(ctx *context.Context) { ctx.Data["Title"] = ctx.Org.Organization.FullName @@ -374,17 +423,17 @@ func EditTeamPost(ctx *context.Context) { } t.Description = form.Description if t.AccessMode < perm.AccessModeAdmin { - units := make([]models.TeamUnit, 0, len(unitPerms)) + units := make([]organization.TeamUnit, 0, len(unitPerms)) for tp, perm := range unitPerms { - units = append(units, models.TeamUnit{ + units = append(units, organization.TeamUnit{ OrgID: t.OrgID, TeamID: t.ID, Type: tp, AccessMode: perm, }) } - if err := models.UpdateTeamUnits(t, units); err != nil { - ctx.Error(http.StatusInternalServerError, "LoadIssue", err.Error()) + if err := organization.UpdateTeamUnits(t, units); err != nil { + ctx.Error(http.StatusInternalServerError, "UpdateTeamUnits", err.Error()) return } } @@ -403,7 +452,7 @@ func EditTeamPost(ctx *context.Context) { if err := models.UpdateTeam(t, isAuthChanged, isIncludeAllChanged); err != nil { ctx.Data["Err_TeamName"] = true switch { - case models.IsErrTeamAlreadyExist(err): + case organization.IsErrTeamAlreadyExist(err): ctx.RenderWithErr(ctx.Tr("form.team_name_been_taken"), tplTeamNew, &form) default: ctx.ServerError("UpdateTeam", err) diff --git a/routers/web/repo/attachment.go b/routers/web/repo/attachment.go index 4101d81ac5..c930311f70 100644 --- a/routers/web/repo/attachment.go +++ b/routers/web/repo/attachment.go @@ -44,7 +44,7 @@ func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) { } defer file.Close() - attach, err := attachment.UploadAttachment(file, ctx.User.ID, repoID, 0, header.Filename, allowedTypes) + attach, err := attachment.UploadAttachment(file, ctx.Doer.ID, repoID, 0, header.Filename, allowedTypes) if err != nil { if upload.IsErrFileTypeForbidden(err) { ctx.Error(http.StatusBadRequest, err.Error()) @@ -68,7 +68,7 @@ func DeleteAttachment(ctx *context.Context) { ctx.Error(http.StatusBadRequest, err.Error()) return } - if !ctx.IsSigned || (ctx.User.ID != attach.UploaderID) { + if !ctx.IsSigned || (ctx.Doer.ID != attach.UploaderID) { ctx.Error(http.StatusForbidden) return } @@ -101,12 +101,12 @@ func GetAttachment(ctx *context.Context) { } if repository == nil { // If not linked - if !(ctx.IsSigned && attach.UploaderID == ctx.User.ID) { // We block if not the uploader + if !(ctx.IsSigned && attach.UploaderID == ctx.Doer.ID) { // We block if not the uploader ctx.Error(http.StatusNotFound) return } } else { // If we have the repository we check access - perm, err := models.GetUserRepoPermission(repository, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, repository, ctx.Doer) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err.Error()) return diff --git a/routers/web/repo/branch.go b/routers/web/repo/branch.go index 489ef9a357..08f388bf3c 100644 --- a/routers/web/repo/branch.go +++ b/routers/web/repo/branch.go @@ -56,7 +56,7 @@ func Branches(ctx *context.Context) { ctx.Data["IsWriter"] = ctx.Repo.CanWrite(unit.TypeCode) ctx.Data["IsMirror"] = ctx.Repo.Repository.IsMirror ctx.Data["CanPull"] = ctx.Repo.CanWrite(unit.TypeCode) || - (ctx.IsSigned && repo_model.HasForkedRepo(ctx.User.ID, ctx.Repo.Repository.ID)) + (ctx.IsSigned && repo_model.HasForkedRepo(ctx.Doer.ID, ctx.Repo.Repository.ID)) ctx.Data["PageIsViewCode"] = true ctx.Data["PageIsBranches"] = true @@ -90,7 +90,7 @@ func DeleteBranchPost(ctx *context.Context) { defer redirect(ctx) branchName := ctx.FormString("name") - if err := repo_service.DeleteBranch(ctx.User, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil { + if err := repo_service.DeleteBranch(ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil { switch { case git.IsErrBranchNotExist(err): log.Debug("DeleteBranch: Can't delete non existing branch '%s'", branchName) @@ -129,7 +129,7 @@ func RestoreBranchPost(ctx *context.Context) { if err := git.Push(ctx, ctx.Repo.Repository.RepoPath(), git.PushOptions{ Remote: ctx.Repo.Repository.RepoPath(), Branch: fmt.Sprintf("%s:%s%s", deletedBranch.Commit, git.BranchPrefix, deletedBranch.Name), - Env: models.PushingEnvironment(ctx.User, ctx.Repo.Repository), + Env: repo_module.PushingEnvironment(ctx.Doer, ctx.Repo.Repository), }); err != nil { if strings.Contains(err.Error(), "already exists") { log.Debug("RestoreBranch: Can't restore branch '%s', since one with same name already exist", deletedBranch.Name) @@ -147,8 +147,8 @@ func RestoreBranchPost(ctx *context.Context) { RefFullName: git.BranchPrefix + deletedBranch.Name, OldCommitID: git.EmptySHA, NewCommitID: deletedBranch.Commit, - PusherID: ctx.User.ID, - PusherName: ctx.User.Name, + PusherID: ctx.Doer.ID, + PusherName: ctx.Doer.Name, RepoUserName: ctx.Repo.Owner.Name, RepoName: ctx.Repo.Repository.Name, }); err != nil { @@ -279,7 +279,7 @@ func loadOneBranch(ctx *context.Context, rawBranch, defaultBranch *git.Branch, p } if repo, ok := repoIDToRepo[pr.BaseRepoID]; ok { pr.BaseRepo = repo - } else if err := pr.LoadBaseRepo(); err != nil { + } else if err := pr.LoadBaseRepoCtx(ctx); err != nil { ctx.ServerError("pr.LoadBaseRepo", err) return nil } else { @@ -290,7 +290,7 @@ func loadOneBranch(ctx *context.Context, rawBranch, defaultBranch *git.Branch, p if pr.HasMerged { baseGitRepo, ok := repoIDToGitRepo[pr.BaseRepoID] if !ok { - baseGitRepo, err = git.OpenRepositoryCtx(ctx, pr.BaseRepo.RepoPath()) + baseGitRepo, err = git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { ctx.ServerError("OpenRepository", err) return nil @@ -364,11 +364,11 @@ func CreateBranch(ctx *context.Context) { if ctx.Repo.IsViewBranch { target = ctx.Repo.BranchName } - err = release_service.CreateNewTag(ctx, ctx.User, ctx.Repo.Repository, target, form.NewBranchName, "") + err = release_service.CreateNewTag(ctx, ctx.Doer, ctx.Repo.Repository, target, form.NewBranchName, "") } else if ctx.Repo.IsViewBranch { - err = repo_service.CreateNewBranch(ctx, ctx.User, ctx.Repo.Repository, ctx.Repo.BranchName, form.NewBranchName) + err = repo_service.CreateNewBranch(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.BranchName, form.NewBranchName) } else { - err = repo_service.CreateNewBranchFromCommit(ctx, ctx.User, ctx.Repo.Repository, ctx.Repo.CommitID, form.NewBranchName) + err = repo_service.CreateNewBranchFromCommit(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.CommitID, form.NewBranchName) } if err != nil { if models.IsErrTagAlreadyExists(err) { diff --git a/routers/web/repo/cherry_pick.go b/routers/web/repo/cherry_pick.go index eb22809348..d2a4e19d0e 100644 --- a/routers/web/repo/cherry_pick.go +++ b/routers/web/repo/cherry_pick.go @@ -47,8 +47,6 @@ func CherryPick(ctx *context.Context) { ctx.Data["commit_message"] = splits[1] } - ctx.Data["RequireHighlightJS"] = true - canCommit := renderCommitRights(ctx) ctx.Data["TreePath"] = "" @@ -77,7 +75,6 @@ func CherryPickPost(ctx *context.Context) { ctx.Data["CherryPickType"] = "cherry-pick" } - ctx.Data["RequireHighlightJS"] = true canCommit := renderCommitRights(ctx) branchName := ctx.Repo.BranchName if form.CommitChoice == frmCommitChoiceNewBranch { @@ -127,7 +124,7 @@ func CherryPickPost(ctx *context.Context) { // First lets try the simple plain read-tree -m approach opts.Content = sha - if _, err := files.CherryPick(ctx, ctx.Repo.Repository, ctx.User, form.Revert, opts); err != nil { + if _, err := files.CherryPick(ctx, ctx.Repo.Repository, ctx.Doer, form.Revert, opts); err != nil { if models.IsErrBranchAlreadyExists(err) { // User has specified a branch that already exists branchErr := err.(models.ErrBranchAlreadyExists) @@ -151,7 +148,7 @@ func CherryPickPost(ctx *context.Context) { return } } else { - if err := git.GetRawDiff(ctx, ctx.Repo.Repository.RepoPath(), sha, git.RawDiffType("patch"), buf); err != nil { + if err := git.GetRawDiff(ctx.Repo.GitRepo, sha, git.RawDiffType("patch"), buf); err != nil { if git.IsErrNotExist(err) { ctx.NotFound("GetRawDiff", errors.New("commit "+ctx.Params(":sha")+" does not exist.")) return @@ -164,7 +161,7 @@ func CherryPickPost(ctx *context.Context) { opts.Content = buf.String() ctx.Data["FileContent"] = opts.Content - if _, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.User, opts); err != nil { + if _, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, opts); err != nil { if models.IsErrBranchAlreadyExists(err) { // User has specified a branch that already exists branchErr := err.(models.ErrBranchAlreadyExists) diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go index 36cc005cec..7f68fd3dd1 100644 --- a/routers/web/repo/commit.go +++ b/routers/web/repo/commit.go @@ -7,13 +7,13 @@ package repo import ( "errors" + "fmt" "net/http" "strings" "code.gitea.io/gitea/models" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" - repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/charset" @@ -253,7 +253,6 @@ func FileHistory(ctx *context.Context) { // Diff show different from current commit to previous commit func Diff(ctx *context.Context) { ctx.Data["PageIsDiff"] = true - ctx.Data["RequireHighlightJS"] = true ctx.Data["RequireTribute"] = true userName := ctx.Repo.Owner.Name @@ -265,7 +264,7 @@ func Diff(ctx *context.Context) { ) if ctx.Data["PageIsWiki"] != nil { - gitRepo, err = git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.WikiPath()) + gitRepo, err = git.OpenRepository(ctx, ctx.Repo.Repository.WikiPath()) if err != nil { ctx.ServerError("Repo.GitRepo.GetCommit", err) return @@ -381,15 +380,24 @@ func Diff(ctx *context.Context) { // RawDiff dumps diff results of repository in given commit ID to io.Writer func RawDiff(ctx *context.Context) { - var repoPath string + var gitRepo *git.Repository if ctx.Data["PageIsWiki"] != nil { - repoPath = ctx.Repo.Repository.WikiPath() + wikiRepo, err := git.OpenRepository(ctx, ctx.Repo.Repository.WikiPath()) + if err != nil { + ctx.ServerError("OpenRepository", err) + return + } + defer wikiRepo.Close() + gitRepo = wikiRepo } else { - repoPath = repo_model.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name) + gitRepo = ctx.Repo.GitRepo + if gitRepo == nil { + ctx.ServerError("GitRepo not open", fmt.Errorf("no open git repo for '%s'", ctx.Repo.Repository.FullName())) + return + } } if err := git.GetRawDiff( - ctx, - repoPath, + gitRepo, ctx.Params(":sha"), git.RawDiffType(ctx.Params(":ext")), ctx.Resp, diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go index 80d4fee19e..31914c43ab 100644 --- a/routers/web/repo/compare.go +++ b/routers/web/repo/compare.go @@ -18,7 +18,6 @@ import ( "strings" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -144,6 +143,11 @@ func setCsvCompareContext(ctx *context.Context) { if err == errTooLarge { return CsvDiffResult{nil, err.Error()} } + if err != nil { + log.Error("CreateCsvDiff error whilst creating baseReader from file %s in commit %s in %s: %v", diffFile.Name, baseCommit.ID.String(), ctx.Repo.Repository.Name, err) + return CsvDiffResult{nil, "unable to load file from base commit"} + } + headReader, headBlobCloser, err := csvReaderFromCommit(&markup.RenderContext{Ctx: ctx, Filename: diffFile.Name}, headCommit) if headBlobCloser != nil { defer headBlobCloser.Close() @@ -151,13 +155,17 @@ func setCsvCompareContext(ctx *context.Context) { if err == errTooLarge { return CsvDiffResult{nil, err.Error()} } + if err != nil { + log.Error("CreateCsvDiff error whilst creating headReader from file %s in commit %s in %s: %v", diffFile.Name, headCommit.ID.String(), ctx.Repo.Repository.Name, err) + return CsvDiffResult{nil, "unable to load file from head commit"} + } sections, err := gitdiff.CreateCsvDiff(diffFile, baseReader, headReader) if err != nil { errMessage, err := csv_module.FormatError(err, ctx.Locale) if err != nil { - log.Error("RenderCsvDiff failed: %v", err) - return CsvDiffResult{nil, ""} + log.Error("CreateCsvDiff FormatError failed: %v", err) + return CsvDiffResult{nil, "unknown csv diff error"} } return CsvDiffResult{nil, errMessage} } @@ -269,7 +277,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { } return nil } - if err := ci.HeadRepo.GetOwner(db.DefaultContext); err != nil { + if err := ci.HeadRepo.GetOwner(ctx); err != nil { if user_model.IsErrUserNotExist(err) { ctx.NotFound("GetUserByName", nil) } else { @@ -299,6 +307,13 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { ci.BaseBranch = baseCommit.ID.String() ctx.Data["BaseBranch"] = ci.BaseBranch baseIsCommit = true + } else if ci.BaseBranch == git.EmptySHA { + if isSameRepo { + ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadBranch)) + } else { + ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadRepo.FullName()) + ":" + util.PathEscapeSegments(ci.HeadBranch)) + } + return nil } else { ctx.NotFound("IsRefExist", nil) return nil @@ -338,8 +353,8 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { // check if they have a fork of the base repo and offer that as // "OwnForkRepo" var ownForkRepo *repo_model.Repository - if ctx.User != nil && baseRepo.OwnerID != ctx.User.ID { - repo := repo_model.GetForkedRepo(ctx.User.ID, baseRepo.ID) + if ctx.Doer != nil && baseRepo.OwnerID != ctx.Doer.ID { + repo := repo_model.GetForkedRepo(ctx.Doer.ID, baseRepo.ID) if repo != nil { ownForkRepo = repo ctx.Data["OwnForkRepo"] = ownForkRepo @@ -354,7 +369,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { has = true } - // 4. If the ctx.User has their own fork of the baseRepo and the headUser is the ctx.User + // 4. If the ctx.Doer has their own fork of the baseRepo and the headUser is the ctx.Doer // set the headRepo to the ownFork if !has && ownForkRepo != nil && ownForkRepo.OwnerID == ci.HeadUser.ID { ci.HeadRepo = ownForkRepo @@ -383,7 +398,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { ci.HeadRepo = ctx.Repo.Repository ci.HeadGitRepo = ctx.Repo.GitRepo } else if has { - ci.HeadGitRepo, err = git.OpenRepositoryCtx(ctx, ci.HeadRepo.RepoPath()) + ci.HeadGitRepo, err = git.OpenRepository(ctx, ci.HeadRepo.RepoPath()) if err != nil { ctx.ServerError("OpenRepository", err) return nil @@ -392,11 +407,12 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { } ctx.Data["HeadRepo"] = ci.HeadRepo + ctx.Data["BaseCompareRepo"] = ctx.Repo.Repository - // Now we need to assert that the ctx.User has permission to read + // Now we need to assert that the ctx.Doer has permission to read // the baseRepo's code and pulls // (NOT headRepo's) - permBase, err := models.GetUserRepoPermission(baseRepo, ctx.User) + permBase, err := models.GetUserRepoPermission(ctx, baseRepo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return nil @@ -404,7 +420,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { if !permBase.CanRead(unit.TypeCode) { if log.IsTrace() { log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in baseRepo has Permissions: %-+v", - ctx.User, + ctx.Doer, baseRepo, permBase) } @@ -414,8 +430,8 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { // If we're not merging from the same repo: if !isSameRepo { - // Assert ctx.User has permission to read headRepo's codes - permHead, err := models.GetUserRepoPermission(ci.HeadRepo, ctx.User) + // Assert ctx.Doer has permission to read headRepo's codes + permHead, err := models.GetUserRepoPermission(ctx, ci.HeadRepo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return nil @@ -423,13 +439,14 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { if !permHead.CanRead(unit.TypeCode) { if log.IsTrace() { log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v", - ctx.User, + ctx.Doer, ci.HeadRepo, permHead) } ctx.NotFound("ParseCompareInfo", nil) return nil } + ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode) } // If we have a rootRepo and it's different from: @@ -439,7 +456,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { if rootRepo != nil && rootRepo.ID != ci.HeadRepo.ID && rootRepo.ID != baseRepo.ID { - canRead := models.CheckRepoUnitUser(rootRepo, ctx.User, unit.TypeCode) + canRead := models.CheckRepoUnitUser(rootRepo, ctx.Doer, unit.TypeCode) if canRead { ctx.Data["RootRepo"] = rootRepo if !fileOnly { @@ -464,7 +481,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { ownForkRepo.ID != ci.HeadRepo.ID && ownForkRepo.ID != baseRepo.ID && (rootRepo == nil || ownForkRepo.ID != rootRepo.ID) { - canRead := models.CheckRepoUnitUser(ownForkRepo, ctx.User, unit.TypeCode) + canRead := models.CheckRepoUnitUser(ownForkRepo, ctx.Doer, unit.TypeCode) if canRead { ctx.Data["OwnForkRepo"] = ownForkRepo if !fileOnly { @@ -506,7 +523,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) { if log.IsTrace() { log.Trace("Permission Denied: User: %-v cannot create/read pull requests in Repo: %-v\nUser in baseRepo has Permissions: %-+v", - ctx.User, + ctx.Doer, baseRepo, permBase) } @@ -655,7 +672,7 @@ func PrepareCompareDiff( } func getBranchesAndTagsForRepo(ctx gocontext.Context, repo *repo_model.Repository) (branches, tags []string, err error) { - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { return nil, nil, err } @@ -789,7 +806,7 @@ func ExcerptBlob(ctx *context.Context) { gitRepo := ctx.Repo.GitRepo if ctx.FormBool("wiki") { var err error - gitRepo, err = git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.WikiPath()) + gitRepo, err = git.OpenRepository(ctx, ctx.Repo.Repository.WikiPath()) if err != nil { ctx.ServerError("OpenRepository", err) return @@ -857,7 +874,7 @@ func ExcerptBlob(ctx *context.Context) { } } ctx.Data["section"] = section - ctx.Data["fileName"] = filePath + ctx.Data["FileNameHash"] = base.EncodeSha1(filePath) ctx.Data["AfterCommitID"] = commitID ctx.Data["Anchor"] = anchor ctx.HTML(http.StatusOK, tplBlobExcerpt) diff --git a/routers/web/repo/download.go b/routers/web/repo/download.go index 72d34cb937..eae61bb8e7 100644 --- a/routers/web/repo/download.go +++ b/routers/web/repo/download.go @@ -6,7 +6,11 @@ package repo import ( + "path" + "time" + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/httpcache" @@ -18,8 +22,8 @@ import ( ) // ServeBlobOrLFS download a git.Blob redirecting to LFS if necessary -func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error { - if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`) { +func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob, lastModified time.Time) error { + if httpcache.HandleGenericETagTimeCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`, lastModified) { return nil } @@ -45,7 +49,7 @@ func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error { log.Error("ServeBlobOrLFS: Close: %v", err) } closed = true - return common.ServeBlob(ctx, blob) + return common.ServeBlob(ctx, blob, lastModified) } if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+pointer.Oid+`"`) { return nil @@ -76,37 +80,65 @@ func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob) error { } closed = true - return common.ServeBlob(ctx, blob) + return common.ServeBlob(ctx, blob, lastModified) +} + +func getBlobForEntry(ctx *context.Context) (blob *git.Blob, lastModified time.Time) { + entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath) + if err != nil { + if git.IsErrNotExist(err) { + ctx.NotFound("GetTreeEntryByPath", err) + } else { + ctx.ServerError("GetTreeEntryByPath", err) + } + return + } + + if entry.IsDir() || entry.IsSubModule() { + ctx.NotFound("getBlobForEntry", nil) + return + } + + var c *git.LastCommitCache + if setting.CacheService.LastCommit.Enabled && ctx.Repo.CommitsCount >= setting.CacheService.LastCommit.CommitsCount { + c = git.NewLastCommitCache(ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, setting.LastCommitCacheTTLSeconds, cache.GetCache()) + } + + info, _, err := git.Entries([]*git.TreeEntry{entry}).GetCommitsInfo(ctx, ctx.Repo.Commit, path.Dir("/" + ctx.Repo.TreePath)[1:], c) + if err != nil { + ctx.ServerError("GetCommitsInfo", err) + return + } + + if len(info) == 1 { + // Not Modified + lastModified = info[0].Commit.Committer.When + } + blob = entry.Blob() + + return } // SingleDownload download a file by repos path func SingleDownload(ctx *context.Context) { - blob, err := ctx.Repo.Commit.GetBlobByPath(ctx.Repo.TreePath) - if err != nil { - if git.IsErrNotExist(err) { - ctx.NotFound("GetBlobByPath", nil) - } else { - ctx.ServerError("GetBlobByPath", err) - } + blob, lastModified := getBlobForEntry(ctx) + if blob == nil { return } - if err = common.ServeBlob(ctx, blob); err != nil { + + if err := common.ServeBlob(ctx, blob, lastModified); err != nil { ctx.ServerError("ServeBlob", err) } } // SingleDownloadOrLFS download a file by repos path redirecting to LFS if necessary func SingleDownloadOrLFS(ctx *context.Context) { - blob, err := ctx.Repo.Commit.GetBlobByPath(ctx.Repo.TreePath) - if err != nil { - if git.IsErrNotExist(err) { - ctx.NotFound("GetBlobByPath", nil) - } else { - ctx.ServerError("GetBlobByPath", err) - } + blob, lastModified := getBlobForEntry(ctx) + if blob == nil { return } - if err = ServeBlobOrLFS(ctx, blob); err != nil { + + if err := ServeBlobOrLFS(ctx, blob, lastModified); err != nil { ctx.ServerError("ServeBlobOrLFS", err) } } @@ -122,7 +154,7 @@ func DownloadByID(ctx *context.Context) { } return } - if err = common.ServeBlob(ctx, blob); err != nil { + if err = common.ServeBlob(ctx, blob, time.Time{}); err != nil { ctx.ServerError("ServeBlob", err) } } @@ -138,7 +170,7 @@ func DownloadByIDOrLFS(ctx *context.Context) { } return } - if err = ServeBlobOrLFS(ctx, blob); err != nil { + if err = ServeBlobOrLFS(ctx, blob, time.Time{}); err != nil { ctx.ServerError("ServeBlob", err) } } diff --git a/routers/web/repo/editor.go b/routers/web/repo/editor.go index c4fff1e421..1253721d34 100644 --- a/routers/web/repo/editor.go +++ b/routers/web/repo/editor.go @@ -40,7 +40,7 @@ const ( ) func renderCommitRights(ctx *context.Context) bool { - canCommitToBranch, err := ctx.Repo.CanCommitToBranch(ctx, ctx.User) + canCommitToBranch, err := ctx.Repo.CanCommitToBranch(ctx, ctx.Doer) if err != nil { log.Error("CanCommitToBranch: %v", err) } @@ -67,7 +67,6 @@ func getParentTreeFields(treePath string) (treeNames, treePaths []string) { func editFile(ctx *context.Context, isNewFile bool) { ctx.Data["PageIsEdit"] = true ctx.Data["IsNewFile"] = isNewFile - ctx.Data["RequireHighlightJS"] = true canCommit := renderCommitRights(ctx) treePath := cleanUploadFileName(ctx.Repo.TreePath) @@ -197,7 +196,6 @@ func editFilePost(ctx *context.Context, form forms.EditRepoFileForm, isNewFile b ctx.Data["PageIsEdit"] = true ctx.Data["PageHasPosted"] = true ctx.Data["IsNewFile"] = isNewFile - ctx.Data["RequireHighlightJS"] = true ctx.Data["TreePath"] = form.TreePath ctx.Data["TreeNames"] = treeNames ctx.Data["TreePaths"] = treePaths @@ -241,7 +239,7 @@ func editFilePost(ctx *context.Context, form forms.EditRepoFileForm, isNewFile b message += "\n\n" + form.CommitMessage } - if _, err := files_service.CreateOrUpdateRepoFile(ctx, ctx.Repo.Repository, ctx.User, &files_service.UpdateRepoFileOptions{ + if _, err := files_service.CreateOrUpdateRepoFile(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.UpdateRepoFileOptions{ LastCommitID: form.LastCommit, OldBranch: ctx.Repo.BranchName, NewBranch: branchName, @@ -447,7 +445,7 @@ func DeleteFilePost(ctx *context.Context) { message += "\n\n" + form.CommitMessage } - if _, err := files_service.DeleteRepoFile(ctx, ctx.Repo.Repository, ctx.User, &files_service.DeleteRepoFileOptions{ + if _, err := files_service.DeleteRepoFile(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.DeleteRepoFileOptions{ LastCommitID: form.LastCommit, OldBranch: ctx.Repo.BranchName, NewBranch: branchName, @@ -653,7 +651,7 @@ func UploadFilePost(ctx *context.Context) { message += "\n\n" + form.CommitMessage } - if err := files_service.UploadRepoFiles(ctx, ctx.Repo.Repository, ctx.User, &files_service.UploadRepoFileOptions{ + if err := files_service.UploadRepoFiles(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.UploadRepoFileOptions{ LastCommitID: ctx.Repo.CommitID, OldBranch: oldBranchName, NewBranch: branchName, @@ -780,7 +778,7 @@ func UploadFileToServer(ctx *context.Context) { func RemoveUploadFileFromServer(ctx *context.Context) { form := web.GetForm(ctx).(*forms.RemoveUploadFileForm) if len(form.File) == 0 { - ctx.Status(204) + ctx.Status(http.StatusNoContent) return } @@ -790,7 +788,7 @@ func RemoveUploadFileFromServer(ctx *context.Context) { } log.Trace("Upload file removed: %s", form.File) - ctx.Status(204) + ctx.Status(http.StatusNoContent) } // GetUniquePatchBranchName Gets a unique branch name for a new patch branch @@ -798,7 +796,7 @@ func RemoveUploadFileFromServer(ctx *context.Context) { // that doesn't already exist. If we exceed 1000 tries or an error is thrown, we just return "" so the user has to // type in the branch name themselves (will be an empty field) func GetUniquePatchBranchName(ctx *context.Context) string { - prefix := ctx.User.LowerName + "-patch-" + prefix := ctx.Doer.LowerName + "-patch-" for i := 1; i <= 1000; i++ { branchName := fmt.Sprintf("%s%d", prefix, i) if _, err := ctx.Repo.GitRepo.GetBranch(branchName); err != nil { diff --git a/routers/web/repo/editor_test.go b/routers/web/repo/editor_test.go index ab7532ebb5..2bebb6fd52 100644 --- a/routers/web/repo/editor_test.go +++ b/routers/web/repo/editor_test.go @@ -67,7 +67,7 @@ func TestGetClosestParentWithFiles(t *testing.T) { repo := ctx.Repo.Repository branch := repo.DefaultBranch - gitRepo, _ := git.OpenRepository(repo.RepoPath()) + gitRepo, _ := git.OpenRepository(git.DefaultContext, repo.RepoPath()) defer gitRepo.Close() commit, _ := gitRepo.GetBranchCommit(branch) expectedTreePath := "" diff --git a/routers/web/repo/http.go b/routers/web/repo/http.go index d5379b610e..a52d9b76c2 100644 --- a/routers/web/repo/http.go +++ b/routers/web/repo/http.go @@ -21,14 +21,13 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/auth" - "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" - user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/util" @@ -111,19 +110,7 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { reponame = reponame[:len(reponame)-5] } - owner, err := user_model.GetUserByName(username) - if err != nil { - if user_model.IsErrUserNotExist(err) { - if redirectUserID, err := user_model.LookupUserRedirect(username); err == nil { - context.RedirectToUser(ctx, username, redirectUserID) - } else { - ctx.NotFound(fmt.Sprintf("User %s does not exist", username), nil) - } - } else { - ctx.ServerError("GetUserByName", err) - } - return - } + owner := ctx.ContextUser if !owner.IsOrganization() && !owner.IsActive { ctx.PlainText(http.StatusForbidden, "Repository cannot be accessed. You cannot push or open issues/pull-requests.") return @@ -159,7 +146,7 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { // don't allow anonymous pulls if organization is not public if isPublicPull { - if err := repo.GetOwner(db.DefaultContext); err != nil { + if err := repo.GetOwner(ctx); err != nil { ctx.ServerError("GetOwner", err) return } @@ -178,7 +165,7 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { } if ctx.IsBasicAuth && ctx.Data["IsApiToken"] != true { - _, err = auth.GetTwoFactorByUID(ctx.User.ID) + _, err = auth.GetTwoFactorByUID(ctx.Doer.ID) if err == nil { // TODO: This response should be changed to "invalid credentials" for security reasons once the expectation behind it (creating an app token to authenticate) is properly documented ctx.PlainText(http.StatusUnauthorized, "Users with two-factor authentication enabled cannot perform HTTP/HTTPS operations via plain username and password. Please create and use a personal access token on the user settings page") @@ -189,13 +176,13 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { } } - if !ctx.User.IsActive || ctx.User.ProhibitLogin { + if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin { ctx.PlainText(http.StatusForbidden, "Your account is disabled.") return } if repoExist { - p, err := models.GetUserRepoPermission(repo, ctx.User) + p, err := models.GetUserRepoPermission(ctx, repo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return @@ -218,22 +205,21 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { } environ = []string{ - models.EnvRepoUsername + "=" + username, - models.EnvRepoName + "=" + reponame, - models.EnvPusherName + "=" + ctx.User.Name, - models.EnvPusherID + fmt.Sprintf("=%d", ctx.User.ID), - models.EnvIsDeployKey + "=false", - models.EnvAppURL + "=" + setting.AppURL, + repo_module.EnvRepoUsername + "=" + username, + repo_module.EnvRepoName + "=" + reponame, + repo_module.EnvPusherName + "=" + ctx.Doer.Name, + repo_module.EnvPusherID + fmt.Sprintf("=%d", ctx.Doer.ID), + repo_module.EnvAppURL + "=" + setting.AppURL, } - if !ctx.User.KeepEmailPrivate { - environ = append(environ, models.EnvPusherEmail+"="+ctx.User.Email) + if !ctx.Doer.KeepEmailPrivate { + environ = append(environ, repo_module.EnvPusherEmail+"="+ctx.Doer.Email) } if isWiki { - environ = append(environ, models.EnvRepoIsWiki+"=true") + environ = append(environ, repo_module.EnvRepoIsWiki+"=true") } else { - environ = append(environ, models.EnvRepoIsWiki+"=false") + environ = append(environ, repo_module.EnvRepoIsWiki+"=false") } } @@ -263,7 +249,7 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { return } - repo, err = repo_service.PushCreateRepo(ctx.User, owner, reponame) + repo, err = repo_service.PushCreateRepo(ctx.Doer, owner, reponame) if err != nil { log.Error("pushCreateRepo: %v", err) ctx.Status(http.StatusNotFound) @@ -284,7 +270,7 @@ func httpBase(ctx *context.Context) (h *serviceHandler) { } } - environ = append(environ, models.EnvRepoID+fmt.Sprintf("=%d", repo.ID)) + environ = append(environ, repo_module.EnvRepoID+fmt.Sprintf("=%d", repo.ID)) w := ctx.Resp r := ctx.Req @@ -328,7 +314,7 @@ func dummyInfoRefs(ctx *context.Context) { return } - refs, err := git.NewCommand(ctx, "receive-pack", "--stateless-rpc", "--advertise-refs", ".").RunInDirBytes(tmpDir) + refs, _, err := git.NewCommand(ctx, "receive-pack", "--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Dir: tmpDir}) if err != nil { log.Error(fmt.Sprintf("%v - %s", err, string(refs))) } @@ -412,7 +398,7 @@ func (h *serviceHandler) sendFile(contentType, file string) { var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`) func getGitConfig(ctx gocontext.Context, option, dir string) string { - out, err := git.NewCommand(ctx, "config", option).RunInDir(dir) + out, _, err := git.NewCommand(ctx, "config", option).RunStdString(&git.RunOpts{Dir: dir}) if err != nil { log.Error("%v - %s", err, out) } @@ -487,13 +473,12 @@ func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) { var stderr bytes.Buffer cmd := git.NewCommand(h.r.Context(), service, "--stateless-rpc", h.dir) cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", h.dir)) - if err := cmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: h.dir, - Env: append(os.Environ(), h.environ...), - Stdout: h.w, - Stdin: reqBody, - Stderr: &stderr, + if err := cmd.Run(&git.RunOpts{ + Dir: h.dir, + Env: append(os.Environ(), h.environ...), + Stdout: h.w, + Stdin: reqBody, + Stderr: &stderr, }); err != nil { if err.Error() != "signal: killed" { log.Error("Fail to serve RPC(%s) in %s: %v - %s", service, h.dir, err, stderr.String()) @@ -527,7 +512,7 @@ func getServiceType(r *http.Request) string { } func updateServerInfo(ctx gocontext.Context, dir string) []byte { - out, err := git.NewCommand(ctx, "update-server-info").RunInDirBytes(dir) + out, _, err := git.NewCommand(ctx, "update-server-info").RunStdBytes(&git.RunOpts{Dir: dir}) if err != nil { log.Error(fmt.Sprintf("%v - %s", err, string(out))) } @@ -557,7 +542,7 @@ func GetInfoRefs(ctx *context.Context) { } h.environ = append(os.Environ(), h.environ...) - refs, err := git.NewCommand(ctx, service, "--stateless-rpc", "--advertise-refs", ".").RunInDirTimeoutEnv(h.environ, -1, h.dir) + refs, _, err := git.NewCommand(ctx, service, "--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.dir}) if err != nil { log.Error(fmt.Sprintf("%v - %s", err, string(refs))) } diff --git a/routers/web/repo/issue.go b/routers/web/repo/issue.go index a81b1f1962..7ddeb05f71 100644 --- a/routers/web/repo/issue.go +++ b/routers/web/repo/issue.go @@ -7,6 +7,7 @@ package repo import ( "bytes" + stdCtx "context" "errors" "fmt" "io" @@ -16,9 +17,14 @@ import ( "path" "strconv" "strings" + "time" "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" + project_model "code.gitea.io/gitea/models/project" + pull_model "code.gitea.io/gitea/models/pull" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -32,6 +38,8 @@ import ( "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/templates/vars" + "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/upload" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" @@ -40,8 +48,6 @@ import ( "code.gitea.io/gitea/services/forms" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" - - "github.com/unknwon/com" ) const ( @@ -77,7 +83,7 @@ func MustAllowUserComment(ctx *context.Context) { return } - if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.User.IsAdmin { + if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin { ctx.Flash.Error(ctx.Tr("repo.issues.comment_on_locked")) ctx.Redirect(issue.HTMLURL()) return @@ -107,9 +113,9 @@ func MustAllowPulls(ctx *context.Context) { } // User can send pull request if owns a forked repository. - if ctx.IsSigned && repo_model.HasForkedRepo(ctx.User.ID, ctx.Repo.Repository.ID) { + if ctx.IsSigned && repo_model.HasForkedRepo(ctx.Doer.ID, ctx.Repo.Repository.ID) { ctx.Repo.PullRequest.Allowed = true - ctx.Repo.PullRequest.HeadInfoSubURL = url.PathEscape(ctx.User.Name) + ":" + util.PathEscapeSegments(ctx.Repo.BranchName) + ctx.Repo.PullRequest.HeadInfoSubURL = url.PathEscape(ctx.Doer.Name) + ":" + util.PathEscapeSegments(ctx.Repo.BranchName) } } @@ -133,13 +139,13 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption uti if ctx.IsSigned { switch viewType { case "created_by": - posterID = ctx.User.ID + posterID = ctx.Doer.ID case "mentioned": - mentionedID = ctx.User.ID + mentionedID = ctx.Doer.ID case "assigned": - assigneeID = ctx.User.ID + assigneeID = ctx.Doer.ID case "review_requested": - reviewRequestedID = ctx.User.ID + reviewRequestedID = ctx.Doer.ID } } @@ -228,7 +234,7 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption uti Page: pager.Paginater.Current(), PageSize: setting.UI.IssuePagingNum, }, - RepoIDs: []int64{repo.ID}, + RepoID: repo.ID, AssigneeID: assigneeID, PosterID: posterID, MentionedID: mentionedID, @@ -259,20 +265,21 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption uti // Check read status if !ctx.IsSigned { issues[i].IsRead = true - } else if err = issues[i].GetIsRead(ctx.User.ID); err != nil { + } else if err = issues[i].GetIsRead(ctx.Doer.ID); err != nil { ctx.ServerError("GetIsRead", err) return } } - commitStatus, err := pull_service.GetIssuesLastCommitStatus(ctx, issues) + commitStatuses, lastStatus, err := pull_service.GetIssuesAllCommitStatus(ctx, issues) if err != nil { - ctx.ServerError("GetIssuesLastCommitStatus", err) + ctx.ServerError("GetIssuesAllCommitStatus", err) return } ctx.Data["Issues"] = issues - ctx.Data["CommitStatus"] = commitStatus + ctx.Data["CommitLastStatus"] = lastStatus + ctx.Data["CommitStatuses"] = commitStatuses // Get assignees. ctx.Data["Assignees"], err = models.GetRepoAssignees(repo) @@ -335,9 +342,9 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption uti } if ctx.Repo.CanWriteIssuesOrPulls(ctx.Params(":type") == "pulls") { - projects, _, err := models.GetProjects(models.ProjectSearchOptions{ + projects, _, err := project_model.GetProjects(project_model.SearchOptions{ RepoID: repo.ID, - Type: models.ProjectTypeRepository, + Type: project_model.TypeRepository, IsClosed: util.OptionalBoolOf(isShowClosed), }) if err != nil { @@ -399,7 +406,7 @@ func Issues(ctx *context.Context) { var err error // Get milestones - ctx.Data["Milestones"], _, err = models.GetMilestones(models.GetMilestonesOption{ + ctx.Data["Milestones"], _, err = issues_model.GetMilestones(issues_model.GetMilestonesOption{ RepoID: ctx.Repo.Repository.ID, State: api.StateType(ctx.FormString("state")), }) @@ -416,7 +423,7 @@ func Issues(ctx *context.Context) { // RetrieveRepoMilestonesAndAssignees find all the milestones and assignees of a repository func RetrieveRepoMilestonesAndAssignees(ctx *context.Context, repo *repo_model.Repository) { var err error - ctx.Data["OpenMilestones"], _, err = models.GetMilestones(models.GetMilestonesOption{ + ctx.Data["OpenMilestones"], _, err = issues_model.GetMilestones(issues_model.GetMilestonesOption{ RepoID: repo.ID, State: api.StateOpen, }) @@ -424,7 +431,7 @@ func RetrieveRepoMilestonesAndAssignees(ctx *context.Context, repo *repo_model.R ctx.ServerError("GetMilestones", err) return } - ctx.Data["ClosedMilestones"], _, err = models.GetMilestones(models.GetMilestonesOption{ + ctx.Data["ClosedMilestones"], _, err = issues_model.GetMilestones(issues_model.GetMilestonesOption{ RepoID: repo.ID, State: api.StateClosed, }) @@ -445,22 +452,22 @@ func RetrieveRepoMilestonesAndAssignees(ctx *context.Context, repo *repo_model.R func retrieveProjects(ctx *context.Context, repo *repo_model.Repository) { var err error - ctx.Data["OpenProjects"], _, err = models.GetProjects(models.ProjectSearchOptions{ + ctx.Data["OpenProjects"], _, err = project_model.GetProjects(project_model.SearchOptions{ RepoID: repo.ID, Page: -1, IsClosed: util.OptionalBoolFalse, - Type: models.ProjectTypeRepository, + Type: project_model.TypeRepository, }) if err != nil { ctx.ServerError("GetProjects", err) return } - ctx.Data["ClosedProjects"], _, err = models.GetProjects(models.ProjectSearchOptions{ + ctx.Data["ClosedProjects"], _, err = project_model.GetProjects(project_model.SearchOptions{ RepoID: repo.ID, Page: -1, IsClosed: util.OptionalBoolTrue, - Type: models.ProjectTypeRepository, + Type: project_model.TypeRepository, }) if err != nil { ctx.ServerError("GetProjects", err) @@ -471,7 +478,7 @@ func retrieveProjects(ctx *context.Context, repo *repo_model.Repository) { // repoReviewerSelection items to bee shown type repoReviewerSelection struct { IsTeam bool - Team *models.Team + Team *organization.Team User *user_model.User Review *models.Review CanChange bool @@ -504,7 +511,7 @@ func RetrieveRepoReviewers(ctx *context.Context, repo *repo_model.Repository, is pullReviews []*repoReviewerSelection reviewersResult []*repoReviewerSelection teamReviewersResult []*repoReviewerSelection - teamReviewers []*models.Team + teamReviewers []*organization.Team reviewers []*user_model.User ) @@ -514,7 +521,7 @@ func RetrieveRepoReviewers(ctx *context.Context, repo *repo_model.Repository, is posterID = 0 } - reviewers, err = models.GetReviewers(repo, ctx.User.ID, posterID) + reviewers, err = models.GetReviewers(repo, ctx.Doer.ID, posterID) if err != nil { ctx.ServerError("GetReviewers", err) return @@ -551,11 +558,11 @@ func RetrieveRepoReviewers(ctx *context.Context, repo *repo_model.Repository, is if ctx.Repo.IsAdmin() { // Admin can dismiss or re-request any review requests tmp.CanChange = true - } else if ctx.User != nil && ctx.User.ID == review.ReviewerID && review.Type == models.ReviewTypeRequest { + } else if ctx.Doer != nil && ctx.Doer.ID == review.ReviewerID && review.Type == models.ReviewTypeRequest { // A user can refuse review requests tmp.CanChange = true - } else if (canChooseReviewer || (ctx.User != nil && ctx.User.ID == issue.PosterID)) && review.Type != models.ReviewTypeRequest && - ctx.User.ID != review.ReviewerID { + } else if (canChooseReviewer || (ctx.Doer != nil && ctx.Doer.ID == issue.PosterID)) && review.Type != models.ReviewTypeRequest && + ctx.Doer.ID != review.ReviewerID { // The poster of the PR, a manager, or official reviewers can re-request review from other reviewers tmp.CanChange = true } @@ -586,7 +593,7 @@ func RetrieveRepoReviewers(ctx *context.Context, repo *repo_model.Repository, is item.User = item.Review.Reviewer } else if item.Review.ReviewerTeamID > 0 { if err = item.Review.LoadReviewerTeam(); err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { continue } ctx.ServerError("LoadReviewerTeam", err) @@ -699,14 +706,12 @@ func RetrieveRepoMetas(ctx *context.Context, repo *repo_model.Repository, isPull ctx.Data["Branches"] = brs // Contains true if the user can create issue dependencies - ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx.User, isPull) + ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx.Doer, isPull) return labels } func getFileContentFromDefaultBranch(ctx *context.Context, filename string) (string, bool) { - var bytes []byte - if ctx.Repo.Commit == nil { var err error ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch) @@ -727,7 +732,7 @@ func getFileContentFromDefaultBranch(ctx *context.Context, filename string) (str return "", false } defer r.Close() - bytes, err = io.ReadAll(r) + bytes, err := io.ReadAll(r) if err != nil { return "", false } @@ -788,7 +793,6 @@ func NewIssue(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.issues.new") ctx.Data["PageIsIssueList"] = true ctx.Data["NewIssueChooseTemplate"] = len(ctx.IssueTemplatesFromDefaultBranch()) > 0 - ctx.Data["RequireHighlightJS"] = true ctx.Data["RequireTribute"] = true ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes title := ctx.FormString("title") @@ -802,7 +806,7 @@ func NewIssue(ctx *context.Context) { milestoneID := ctx.FormInt64("milestone") if milestoneID > 0 { - milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, milestoneID) + milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID) if err != nil { log.Error("GetMilestoneByID: %d: %v", milestoneID, err) } else { @@ -813,7 +817,7 @@ func NewIssue(ctx *context.Context) { projectID := ctx.FormInt64("project") if projectID > 0 { - project, err := models.GetProjectByID(projectID) + project, err := project_model.GetProjectByID(projectID) if err != nil { log.Error("GetProjectByID: %d: %v", projectID, err) } else if project.RepoID != ctx.Repo.Repository.ID { @@ -843,12 +847,19 @@ func NewIssue(ctx *context.Context) { func NewIssueChooseTemplate(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.issues.new") ctx.Data["PageIsIssueList"] = true - ctx.Data["milestone"] = ctx.FormInt64("milestone") issueTemplates := ctx.IssueTemplatesFromDefaultBranch() - ctx.Data["NewIssueChooseTemplate"] = len(issueTemplates) > 0 ctx.Data["IssueTemplates"] = issueTemplates + if len(issueTemplates) == 0 { + // The "issues/new" and "issues/new/choose" share the same query parameters "project" and "milestone", if no template here, just redirect to the "issues/new" page with these parameters. + ctx.Redirect(fmt.Sprintf("%s/issues/new?%s", ctx.Repo.Repository.HTMLURL(), ctx.Req.URL.RawQuery), http.StatusSeeOther) + return + } + + ctx.Data["milestone"] = ctx.FormInt64("milestone") + ctx.Data["project"] = ctx.FormInt64("project") + ctx.HTML(http.StatusOK, tplIssueChoose) } @@ -859,7 +870,7 @@ func DeleteIssue(ctx *context.Context) { return } - if err := issue_service.DeleteIssue(ctx.User, ctx.Repo.GitRepo, issue); err != nil { + if err := issue_service.DeleteIssue(ctx.Doer, ctx.Repo.GitRepo, issue); err != nil { ctx.ServerError("DeleteIssueByID", err) return } @@ -904,7 +915,7 @@ func ValidateRepoMetas(ctx *context.Context, form forms.CreateIssueForm, isPull // Check milestone. milestoneID := form.MilestoneID if milestoneID > 0 { - milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, milestoneID) + milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID) if err != nil { ctx.ServerError("GetMilestoneByID", err) return nil, nil, 0, 0 @@ -918,7 +929,7 @@ func ValidateRepoMetas(ctx *context.Context, form forms.CreateIssueForm, isPull } if form.ProjectID > 0 { - p, err := models.GetProjectByID(form.ProjectID) + p, err := project_model.GetProjectByID(form.ProjectID) if err != nil { ctx.ServerError("GetProjectByID", err) return nil, nil, 0, 0 @@ -975,7 +986,6 @@ func NewIssuePost(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.issues.new") ctx.Data["PageIsIssueList"] = true ctx.Data["NewIssueChooseTemplate"] = len(ctx.IssueTemplatesFromDefaultBranch()) > 0 - ctx.Data["RequireHighlightJS"] = true ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled upload.AddUploadContext(ctx, "comment") @@ -1008,8 +1018,8 @@ func NewIssuePost(ctx *context.Context) { RepoID: repo.ID, Repo: repo, Title: form.Title, - PosterID: ctx.User.ID, - Poster: ctx.User, + PosterID: ctx.Doer.ID, + Poster: ctx.Doer, MilestoneID: milestoneID, Content: form.Content, Ref: form.Ref, @@ -1025,7 +1035,7 @@ func NewIssuePost(ctx *context.Context) { } if projectID > 0 { - if err := models.ChangeProjectAssign(issue, ctx.User, projectID); err != nil { + if err := models.ChangeProjectAssign(issue, ctx.Doer, projectID); err != nil { ctx.ServerError("ChangeProjectAssign", err) return } @@ -1040,8 +1050,8 @@ func NewIssuePost(ctx *context.Context) { } // roleDescriptor returns the Role Descriptor for a comment in/with the given repo, poster and issue -func roleDescriptor(repo *repo_model.Repository, poster *user_model.User, issue *models.Issue) (models.RoleDescriptor, error) { - perm, err := models.GetUserRepoPermission(repo, poster) +func roleDescriptor(ctx stdCtx.Context, repo *repo_model.Repository, poster *user_model.User, issue *models.Issue) (models.RoleDescriptor, error) { + perm, err := models.GetUserRepoPermission(ctx, repo, poster) if err != nil { return models.RoleDescriptorNone, err } @@ -1103,7 +1113,13 @@ func ViewIssue(ctx *context.Context) { if extIssueUnit.ExternalTrackerConfig().ExternalTrackerStyle == markup.IssueNameStyleNumeric || extIssueUnit.ExternalTrackerConfig().ExternalTrackerStyle == "" { metas := ctx.Repo.Repository.ComposeMetas() metas["index"] = ctx.Params(":index") - ctx.Redirect(com.Expand(extIssueUnit.ExternalTrackerConfig().ExternalTrackerFormat, metas)) + res, err := vars.Expand(extIssueUnit.ExternalTrackerConfig().ExternalTrackerFormat, metas) + if err != nil { + log.Error("unable to expand template vars for issue url. issue: %s, err: %v", metas["index"], err) + ctx.ServerError("Expand", err) + return + } + ctx.Redirect(res) return } } else if err != nil && !repo_model.IsErrUnitTypeNotExist(err) { @@ -1158,7 +1174,6 @@ func ViewIssue(ctx *context.Context) { ctx.Data["IssueType"] = "all" } - ctx.Data["RequireHighlightJS"] = true ctx.Data["RequireTribute"] = true ctx.Data["IsProjectsEnabled"] = ctx.Repo.CanRead(unit.TypeProjects) ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled @@ -1177,10 +1192,10 @@ func ViewIssue(ctx *context.Context) { ctx.Data["Title"] = fmt.Sprintf("#%d - %s", issue.Index, issue.Title) iw := new(models.IssueWatch) - if ctx.User != nil { - iw.UserID = ctx.User.ID + if ctx.Doer != nil { + iw.UserID = ctx.Doer.ID iw.IssueID = issue.ID - iw.IsWatching, err = models.CheckIssueWatch(ctx.User, issue) + iw.IsWatching, err = models.CheckIssueWatch(ctx.Doer, issue) if err != nil { ctx.ServerError("CheckIssueWatch", err) return @@ -1260,8 +1275,8 @@ func ViewIssue(ctx *context.Context) { if issue.IsPull { canChooseReviewer := ctx.Repo.CanWrite(unit.TypePullRequests) - if !canChooseReviewer && ctx.User != nil && ctx.IsSigned { - canChooseReviewer, err = models.IsOfficialReviewer(issue, ctx.User) + if !canChooseReviewer && ctx.Doer != nil && ctx.IsSigned { + canChooseReviewer, err = models.IsOfficialReviewer(issue, ctx.Doer) if err != nil { ctx.ServerError("IsOfficialReviewer", err) return @@ -1276,7 +1291,7 @@ func ViewIssue(ctx *context.Context) { if ctx.IsSigned { // Update issue-user. - if err = issue.ReadBy(ctx.User.ID); err != nil { + if err = issue.ReadBy(ctx, ctx.Doer.ID); err != nil { ctx.ServerError("ReadBy", err) return } @@ -1292,11 +1307,11 @@ func ViewIssue(ctx *context.Context) { if ctx.Repo.Repository.IsTimetrackerEnabled() { if ctx.IsSigned { // Deal with the stopwatch - ctx.Data["IsStopwatchRunning"] = models.StopwatchExists(ctx.User.ID, issue.ID) + ctx.Data["IsStopwatchRunning"] = models.StopwatchExists(ctx.Doer.ID, issue.ID) if !ctx.Data["IsStopwatchRunning"].(bool) { var exists bool var sw *models.Stopwatch - if exists, sw, err = models.HasUserStopwatch(ctx.User.ID); err != nil { + if exists, sw, err = models.HasUserStopwatch(ctx.Doer.ID); err != nil { ctx.ServerError("HasUserStopwatch", err) return } @@ -1308,7 +1323,7 @@ func ViewIssue(ctx *context.Context) { ctx.ServerError("GetIssueByID", err) return } - if err = otherIssue.LoadRepo(); err != nil { + if err = otherIssue.LoadRepo(ctx); err != nil { ctx.ServerError("LoadRepo", err) return } @@ -1316,7 +1331,7 @@ func ViewIssue(ctx *context.Context) { ctx.Data["OtherStopwatchURL"] = otherIssue.HTMLURL() } } - ctx.Data["CanUseTimetracker"] = ctx.Repo.CanUseTimetracker(issue, ctx.User) + ctx.Data["CanUseTimetracker"] = ctx.Repo.CanUseTimetracker(issue, ctx.Doer) } else { ctx.Data["CanUseTimetracker"] = false } @@ -1327,12 +1342,12 @@ func ViewIssue(ctx *context.Context) { } // Check if the user can use the dependencies - ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx.User, issue.IsPull) + ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx.Doer, issue.IsPull) // check if dependencies can be created across repositories ctx.Data["AllowCrossRepositoryDependencies"] = setting.Service.AllowCrossRepositoryDependencies - if issue.ShowRole, err = roleDescriptor(repo, issue.Poster, issue); err != nil { + if issue.ShowRole, err = roleDescriptor(ctx, repo, issue.Poster, issue); err != nil { ctx.ServerError("roleDescriptor", err) return } @@ -1371,7 +1386,7 @@ func ViewIssue(ctx *context.Context) { continue } - comment.ShowRole, err = roleDescriptor(repo, comment.Poster, issue) + comment.ShowRole, err = roleDescriptor(ctx, repo, comment.Poster, issue) if err != nil { ctx.ServerError("roleDescriptor", err) return @@ -1388,7 +1403,7 @@ func ViewIssue(ctx *context.Context) { ctx.ServerError("LoadMilestone", err) return } - ghostMilestone := &models.Milestone{ + ghostMilestone := &issues_model.Milestone{ ID: -1, Name: ctx.Tr("repo.issues.deleted_milestone"), } @@ -1405,7 +1420,7 @@ func ViewIssue(ctx *context.Context) { return } - ghostProject := &models.Project{ + ghostProject := &project_model.Project{ ID: -1, Title: ctx.Tr("repo.issues.deleted_project"), } @@ -1470,7 +1485,7 @@ func ViewIssue(ctx *context.Context) { continue } - c.ShowRole, err = roleDescriptor(repo, c.Poster, issue) + c.ShowRole, err = roleDescriptor(ctx, repo, c.Poster, issue) if err != nil { ctx.ServerError("roleDescriptor", err) return @@ -1508,40 +1523,43 @@ func ViewIssue(ctx *context.Context) { ctx.Data["AllowMerge"] = false if ctx.IsSigned { - if err := pull.LoadHeadRepo(); err != nil { + if err := pull.LoadHeadRepoCtx(ctx); err != nil { log.Error("LoadHeadRepo: %v", err) - } else if pull.HeadRepo != nil && pull.HeadBranch != pull.HeadRepo.DefaultBranch { - perm, err := models.GetUserRepoPermission(pull.HeadRepo, ctx.User) + } else if pull.HeadRepo != nil { + perm, err := models.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return } if perm.CanWrite(unit.TypeCode) { // Check if branch is not protected - if protected, err := models.IsProtectedBranch(pull.HeadRepo.ID, pull.HeadBranch); err != nil { - log.Error("IsProtectedBranch: %v", err) - } else if !protected { - canDelete = true - ctx.Data["DeleteBranchLink"] = issue.Link() + "/cleanup" + if pull.HeadBranch != pull.HeadRepo.DefaultBranch { + if protected, err := models.IsProtectedBranch(pull.HeadRepo.ID, pull.HeadBranch); err != nil { + log.Error("IsProtectedBranch: %v", err) + } else if !protected { + canDelete = true + ctx.Data["DeleteBranchLink"] = issue.Link() + "/cleanup" + } } + ctx.Data["CanWriteToHeadRepo"] = true } } - if err := pull.LoadBaseRepo(); err != nil { + if err := pull.LoadBaseRepoCtx(ctx); err != nil { log.Error("LoadBaseRepo: %v", err) } - perm, err := models.GetUserRepoPermission(pull.BaseRepo, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, pull.BaseRepo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return } - ctx.Data["AllowMerge"], err = pull_service.IsUserAllowedToMerge(pull, perm, ctx.User) + ctx.Data["AllowMerge"], err = pull_service.IsUserAllowedToMerge(ctx, pull, perm, ctx.Doer) if err != nil { ctx.ServerError("IsUserAllowedToMerge", err) return } - if ctx.Data["CanMarkConversation"], err = models.CanMarkConversation(issue, ctx.User); err != nil { + if ctx.Data["CanMarkConversation"], err = models.CanMarkConversation(issue, ctx.Doer); err != nil { ctx.ServerError("CanMarkConversation", err) return } @@ -1554,26 +1572,42 @@ func ViewIssue(ctx *context.Context) { } prConfig := prUnit.PullRequestsConfig() + var mergeStyle repo_model.MergeStyle // Check correct values and select default if ms, ok := ctx.Data["MergeStyle"].(repo_model.MergeStyle); !ok || !prConfig.IsMergeStyleAllowed(ms) { defaultMergeStyle := prConfig.GetDefaultMergeStyle() if prConfig.IsMergeStyleAllowed(defaultMergeStyle) && !ok { - ctx.Data["MergeStyle"] = defaultMergeStyle + mergeStyle = defaultMergeStyle } else if prConfig.AllowMerge { - ctx.Data["MergeStyle"] = repo_model.MergeStyleMerge + mergeStyle = repo_model.MergeStyleMerge } else if prConfig.AllowRebase { - ctx.Data["MergeStyle"] = repo_model.MergeStyleRebase + mergeStyle = repo_model.MergeStyleRebase } else if prConfig.AllowRebaseMerge { - ctx.Data["MergeStyle"] = repo_model.MergeStyleRebaseMerge + mergeStyle = repo_model.MergeStyleRebaseMerge } else if prConfig.AllowSquash { - ctx.Data["MergeStyle"] = repo_model.MergeStyleSquash + mergeStyle = repo_model.MergeStyleSquash } else if prConfig.AllowManualMerge { - ctx.Data["MergeStyle"] = repo_model.MergeStyleManuallyMerged - } else { - ctx.Data["MergeStyle"] = "" + mergeStyle = repo_model.MergeStyleManuallyMerged } } + + ctx.Data["MergeStyle"] = mergeStyle + + defaultMergeMessage, err := pull_service.GetDefaultMergeMessage(ctx.Repo.GitRepo, pull, mergeStyle) + if err != nil { + ctx.ServerError("GetDefaultMergeMessage", err) + return + } + ctx.Data["DefaultMergeMessage"] = defaultMergeMessage + + defaultSquashMergeMessage, err := pull_service.GetDefaultMergeMessage(ctx.Repo.GitRepo, pull, repo_model.MergeStyleSquash) + if err != nil { + ctx.ServerError("GetDefaultSquashMergeMessage", err) + return + } + ctx.Data["DefaultSquashMergeMessage"] = defaultSquashMergeMessage + if err = pull.LoadProtectedBranch(); err != nil { ctx.ServerError("LoadProtectedBranch", err) return @@ -1581,15 +1615,14 @@ func ViewIssue(ctx *context.Context) { ctx.Data["ShowMergeInstructions"] = true if pull.ProtectedBranch != nil { var showMergeInstructions bool - if ctx.User != nil { - showMergeInstructions = pull.ProtectedBranch.CanUserPush(ctx.User.ID) + if ctx.Doer != nil { + showMergeInstructions = pull.ProtectedBranch.CanUserPush(ctx.Doer.ID) } - cnt := pull.ProtectedBranch.GetGrantedApprovalsCount(pull) - ctx.Data["IsBlockedByApprovals"] = !pull.ProtectedBranch.HasEnoughApprovals(pull) - ctx.Data["IsBlockedByRejection"] = pull.ProtectedBranch.MergeBlockedByRejectedReview(pull) - ctx.Data["IsBlockedByOfficialReviewRequests"] = pull.ProtectedBranch.MergeBlockedByOfficialReviewRequests(pull) + ctx.Data["IsBlockedByApprovals"] = !pull.ProtectedBranch.HasEnoughApprovals(ctx, pull) + ctx.Data["IsBlockedByRejection"] = pull.ProtectedBranch.MergeBlockedByRejectedReview(ctx, pull) + ctx.Data["IsBlockedByOfficialReviewRequests"] = pull.ProtectedBranch.MergeBlockedByOfficialReviewRequests(ctx, pull) ctx.Data["IsBlockedByOutdatedBranch"] = pull.ProtectedBranch.MergeBlockedByOutdatedBranch(pull) - ctx.Data["GrantedApprovals"] = cnt + ctx.Data["GrantedApprovals"] = pull.ProtectedBranch.GetGrantedApprovalsCount(ctx, pull) ctx.Data["RequireSigned"] = pull.ProtectedBranch.RequireSignedCommits ctx.Data["ChangedProtectedFiles"] = pull.ChangedProtectedFiles ctx.Data["IsBlockedByChangedProtectedFiles"] = len(pull.ChangedProtectedFiles) != 0 @@ -1597,8 +1630,8 @@ func ViewIssue(ctx *context.Context) { ctx.Data["ShowMergeInstructions"] = showMergeInstructions } ctx.Data["WillSign"] = false - if ctx.User != nil { - sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.User, pull.BaseRepo.RepoPath(), pull.BaseBranch, pull.GetGitRefName()) + if ctx.Doer != nil { + sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.Doer, pull.BaseRepo.RepoPath(), pull.BaseBranch, pull.GetGitRefName()) ctx.Data["WillSign"] = sign ctx.Data["SigningKey"] = key if err != nil { @@ -1619,7 +1652,7 @@ func ViewIssue(ctx *context.Context) { (!pull.HasMerged || ctx.Data["HeadBranchCommitID"] == ctx.Data["PullHeadCommitID"]) if isPullBranchDeletable && pull.HasMerged { - exist, err := models.HasUnmergedPullRequestsByHeadInfo(pull.HeadRepoID, pull.HeadBranch) + exist, err := models.HasUnmergedPullRequestsByHeadInfo(ctx, pull.HeadRepoID, pull.HeadBranch) if err != nil { ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err) return @@ -1636,7 +1669,7 @@ func ViewIssue(ctx *context.Context) { if pull.CanAutoMerge() || pull.IsWorkInProgress() || pull.IsChecking() { return false } - if (ctx.User.IsAdmin || ctx.Repo.IsAdmin()) && prConfig.AllowManualMerge { + if (ctx.Doer.IsAdmin || ctx.Repo.IsAdmin()) && prConfig.AllowManualMerge { return true } @@ -1644,6 +1677,13 @@ func ViewIssue(ctx *context.Context) { } ctx.Data["StillCanManualMerge"] = stillCanManualMerge() + + // Check if there is a pending pr merge + ctx.Data["HasPendingPullRequestMerge"], ctx.Data["PendingPullRequestMerge"], err = pull_model.GetScheduledMergeByPullID(ctx, pull.ID) + if err != nil { + ctx.ServerError("GetScheduledMergeByPullID", err) + return + } } // Get Dependencies @@ -1663,16 +1703,16 @@ func ViewIssue(ctx *context.Context) { ctx.Data["Issue"] = issue ctx.Data["Reference"] = issue.Ref ctx.Data["SignInLink"] = setting.AppSubURL + "/user/login?redirect_to=" + url.QueryEscape(ctx.Data["Link"].(string)) - ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.User.ID) + ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID) ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) ctx.Data["HasProjectsWritePermission"] = ctx.Repo.CanWrite(unit.TypeProjects) - ctx.Data["IsRepoAdmin"] = ctx.IsSigned && (ctx.Repo.IsAdmin() || ctx.User.IsAdmin) + ctx.Data["IsRepoAdmin"] = ctx.IsSigned && (ctx.Repo.IsAdmin() || ctx.Doer.IsAdmin) ctx.Data["LockReasons"] = setting.Repository.Issue.LockReasons ctx.Data["RefEndName"] = git.RefEndName(issue.Ref) var hiddenCommentTypes *big.Int if ctx.IsSigned { - val, err := user_model.GetUserSetting(ctx.User.ID, user_model.SettingsKeyHiddenCommentTypes) + val, err := user_model.GetUserSetting(ctx.Doer.ID, user_model.SettingsKeyHiddenCommentTypes) if err != nil { ctx.ServerError("GetUserSetting", err) return @@ -1747,6 +1787,20 @@ func getActionIssues(ctx *context.Context) []*models.Issue { return issues } +// GetIssueInfo get an issue of a repository +func GetIssueInfo(ctx *context.Context) { + issue, err := models.GetIssueWithAttrsByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) + if err != nil { + if models.IsErrIssueNotExist(err) { + ctx.Error(http.StatusNotFound) + } else { + ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error()) + } + return + } + ctx.JSON(http.StatusOK, convert.ToAPIIssue(issue)) +} + // UpdateIssueTitle change issue's title func UpdateIssueTitle(ctx *context.Context) { issue := GetActionIssue(ctx) @@ -1754,7 +1808,7 @@ func UpdateIssueTitle(ctx *context.Context) { return } - if !ctx.IsSigned || (!issue.IsPoster(ctx.User.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { + if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { ctx.Error(http.StatusForbidden) return } @@ -1765,7 +1819,7 @@ func UpdateIssueTitle(ctx *context.Context) { return } - if err := issue_service.ChangeTitle(issue, ctx.User, title); err != nil { + if err := issue_service.ChangeTitle(issue, ctx.Doer, title); err != nil { ctx.ServerError("ChangeTitle", err) return } @@ -1782,14 +1836,14 @@ func UpdateIssueRef(ctx *context.Context) { return } - if !ctx.IsSigned || (!issue.IsPoster(ctx.User.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) || issue.IsPull { + if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) || issue.IsPull { ctx.Error(http.StatusForbidden) return } ref := ctx.FormTrim("ref") - if err := issue_service.ChangeIssueRef(issue, ctx.User, ref); err != nil { + if err := issue_service.ChangeIssueRef(issue, ctx.Doer, ref); err != nil { ctx.ServerError("ChangeRef", err) return } @@ -1806,12 +1860,12 @@ func UpdateIssueContent(ctx *context.Context) { return } - if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { + if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { ctx.Error(http.StatusForbidden) return } - if err := issue_service.ChangeContent(issue, ctx.User, ctx.Req.FormValue("content")); err != nil { + if err := issue_service.ChangeContent(issue, ctx.Doer, ctx.Req.FormValue("content")); err != nil { ctx.ServerError("ChangeContent", err) return } @@ -1841,6 +1895,40 @@ func UpdateIssueContent(ctx *context.Context) { }) } +// UpdateIssueDeadline updates an issue deadline +func UpdateIssueDeadline(ctx *context.Context) { + form := web.GetForm(ctx).(*api.EditDeadlineOption) + issue, err := models.GetIssueByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) + if err != nil { + if models.IsErrIssueNotExist(err) { + ctx.NotFound("GetIssueByIndex", err) + } else { + ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error()) + } + return + } + + if !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) { + ctx.Error(http.StatusForbidden, "", "Not repo writer") + return + } + + var deadlineUnix timeutil.TimeStamp + var deadline time.Time + if form.Deadline != nil && !form.Deadline.IsZero() { + deadline = time.Date(form.Deadline.Year(), form.Deadline.Month(), form.Deadline.Day(), + 23, 59, 59, 0, time.Local) + deadlineUnix = timeutil.TimeStamp(deadline.Unix()) + } + + if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.Doer); err != nil { + ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err.Error()) + return + } + + ctx.JSON(http.StatusCreated, api.IssueDeadline{Deadline: &deadline}) +} + // UpdateIssueMilestone change issue's milestone func UpdateIssueMilestone(ctx *context.Context) { issues := getActionIssues(ctx) @@ -1855,7 +1943,7 @@ func UpdateIssueMilestone(ctx *context.Context) { continue } issue.MilestoneID = milestoneID - if err := issue_service.ChangeMilestoneAssign(issue, ctx.User, oldMilestoneID); err != nil { + if err := issue_service.ChangeMilestoneAssign(issue, ctx.Doer, oldMilestoneID); err != nil { ctx.ServerError("ChangeMilestoneAssign", err) return } @@ -1879,7 +1967,7 @@ func UpdateIssueAssignee(ctx *context.Context) { for _, issue := range issues { switch action { case "clear": - if err := issue_service.DeleteNotPassedAssignee(issue, ctx.User, []*user_model.User{}); err != nil { + if err := issue_service.DeleteNotPassedAssignee(issue, ctx.Doer, []*user_model.User{}); err != nil { ctx.ServerError("ClearAssignees", err) return } @@ -1900,7 +1988,7 @@ func UpdateIssueAssignee(ctx *context.Context) { return } - _, _, err = issue_service.ToggleAssignee(issue, ctx.User, assigneeID) + _, _, err = issue_service.ToggleAssignee(issue, ctx.Doer, assigneeID) if err != nil { ctx.ServerError("ToggleAssignee", err) return @@ -1924,12 +2012,12 @@ func UpdatePullReviewRequest(ctx *context.Context) { // TODO: Not support 'clear' now if action != "attach" && action != "detach" { - ctx.Status(403) + ctx.Status(http.StatusForbidden) return } for _, issue := range issues { - if err := issue.LoadRepo(); err != nil { + if err := issue.LoadRepo(ctx); err != nil { ctx.ServerError("issue.LoadRepo", err) return } @@ -1939,12 +2027,12 @@ func UpdatePullReviewRequest(ctx *context.Context) { "UpdatePullReviewRequest: refusing to add review request for non-PR issue %-v#%d", issue.Repo, issue.Index, ) - ctx.Status(403) + ctx.Status(http.StatusForbidden) return } if reviewID < 0 { // negative reviewIDs represent team requests - if err := issue.Repo.GetOwner(db.DefaultContext); err != nil { + if err := issue.Repo.GetOwner(ctx); err != nil { ctx.ServerError("issue.Repo.GetOwner", err) return } @@ -1954,13 +2042,13 @@ func UpdatePullReviewRequest(ctx *context.Context) { "UpdatePullReviewRequest: refusing to add team review request for %s#%d owned by non organization UID[%d]", issue.Repo.FullName(), issue.Index, issue.Repo.ID, ) - ctx.Status(403) + ctx.Status(http.StatusForbidden) return } - team, err := models.GetTeamByID(-reviewID) + team, err := organization.GetTeamByID(-reviewID) if err != nil { - ctx.ServerError("models.GetTeamByID", err) + ctx.ServerError("GetTeamByID", err) return } @@ -1968,11 +2056,11 @@ func UpdatePullReviewRequest(ctx *context.Context) { log.Warn( "UpdatePullReviewRequest: refusing to add team review request for UID[%d] team %s to %s#%d owned by UID[%d]", team.OrgID, team.Name, issue.Repo.FullName(), issue.Index, issue.Repo.ID) - ctx.Status(403) + ctx.Status(http.StatusForbidden) return } - err = issue_service.IsValidTeamReviewRequest(team, ctx.User, action == "attach", issue) + err = issue_service.IsValidTeamReviewRequest(ctx, team, ctx.Doer, action == "attach", issue) if err != nil { if models.IsErrNotValidReviewRequest(err) { log.Warn( @@ -1980,14 +2068,14 @@ func UpdatePullReviewRequest(ctx *context.Context) { team.OrgID, team.Name, issue.Repo.FullName(), issue.Index, issue.Repo.ID, err, ) - ctx.Status(403) + ctx.Status(http.StatusForbidden) return } ctx.ServerError("IsValidTeamReviewRequest", err) return } - _, err = issue_service.TeamReviewRequest(issue, ctx.User, team, action == "attach") + _, err = issue_service.TeamReviewRequest(issue, ctx.Doer, team, action == "attach") if err != nil { ctx.ServerError("TeamReviewRequest", err) return @@ -2003,14 +2091,14 @@ func UpdatePullReviewRequest(ctx *context.Context) { reviewID, issue.Repo, issue.Index, err, ) - ctx.Status(403) + ctx.Status(http.StatusForbidden) return } ctx.ServerError("GetUserByID", err) return } - err = issue_service.IsValidReviewRequest(reviewer, ctx.User, action == "attach", issue, nil) + err = issue_service.IsValidReviewRequest(ctx, reviewer, ctx.Doer, action == "attach", issue, nil) if err != nil { if models.IsErrNotValidReviewRequest(err) { log.Warn( @@ -2018,14 +2106,14 @@ func UpdatePullReviewRequest(ctx *context.Context) { reviewer, issue.Repo, issue.Index, err, ) - ctx.Status(403) + ctx.Status(http.StatusForbidden) return } ctx.ServerError("isValidReviewRequest", err) return } - _, err = issue_service.ReviewRequest(issue, ctx.User, reviewer, action == "attach") + _, err = issue_service.ReviewRequest(issue, ctx.Doer, reviewer, action == "attach") if err != nil { ctx.ServerError("ReviewRequest", err) return @@ -2037,6 +2125,339 @@ func UpdatePullReviewRequest(ctx *context.Context) { }) } +// SearchIssues searches for issues across the repositories that the user has access to +func SearchIssues(ctx *context.Context) { + before, since, err := context.GetQueryBeforeSince(ctx) + if err != nil { + ctx.Error(http.StatusUnprocessableEntity, err.Error()) + return + } + + var isClosed util.OptionalBool + switch ctx.FormString("state") { + case "closed": + isClosed = util.OptionalBoolTrue + case "all": + isClosed = util.OptionalBoolNone + default: + isClosed = util.OptionalBoolFalse + } + + // find repos user can access (for issue search) + opts := &models.SearchRepoOptions{ + Private: false, + AllPublic: true, + TopicOnly: false, + Collaborate: util.OptionalBoolNone, + // This needs to be a column that is not nil in fixtures or + // MySQL will return different results when sorting by null in some cases + OrderBy: db.SearchOrderByAlphabetically, + Actor: ctx.Doer, + } + if ctx.IsSigned { + opts.Private = true + opts.AllLimited = true + } + if ctx.FormString("owner") != "" { + owner, err := user_model.GetUserByName(ctx.FormString("owner")) + if err != nil { + if user_model.IsErrUserNotExist(err) { + ctx.Error(http.StatusBadRequest, "Owner not found", err.Error()) + } else { + ctx.Error(http.StatusInternalServerError, "GetUserByName", err.Error()) + } + return + } + opts.OwnerID = owner.ID + opts.AllLimited = false + opts.AllPublic = false + opts.Collaborate = util.OptionalBoolFalse + } + if ctx.FormString("team") != "" { + if ctx.FormString("owner") == "" { + ctx.Error(http.StatusBadRequest, "", "Owner organisation is required for filtering on team") + return + } + team, err := organization.GetTeam(opts.OwnerID, ctx.FormString("team")) + if err != nil { + if organization.IsErrTeamNotExist(err) { + ctx.Error(http.StatusBadRequest, "Team not found", err.Error()) + } else { + ctx.Error(http.StatusInternalServerError, "GetUserByName", err.Error()) + } + return + } + opts.TeamID = team.ID + } + + repoCond := models.SearchRepositoryCondition(opts) + repoIDs, _, err := models.SearchRepositoryIDs(opts) + if err != nil { + ctx.Error(http.StatusInternalServerError, "SearchRepositoryByName", err.Error()) + return + } + + var issues []*models.Issue + var filteredCount int64 + + keyword := ctx.FormTrim("q") + if strings.IndexByte(keyword, 0) >= 0 { + keyword = "" + } + var issueIDs []int64 + if len(keyword) > 0 && len(repoIDs) > 0 { + if issueIDs, err = issue_indexer.SearchIssuesByKeyword(ctx, repoIDs, keyword); err != nil { + ctx.Error(http.StatusInternalServerError, "SearchIssuesByKeyword", err.Error()) + return + } + } + + var isPull util.OptionalBool + switch ctx.FormString("type") { + case "pulls": + isPull = util.OptionalBoolTrue + case "issues": + isPull = util.OptionalBoolFalse + default: + isPull = util.OptionalBoolNone + } + + labels := ctx.FormTrim("labels") + var includedLabelNames []string + if len(labels) > 0 { + includedLabelNames = strings.Split(labels, ",") + } + + milestones := ctx.FormTrim("milestones") + var includedMilestones []string + if len(milestones) > 0 { + includedMilestones = strings.Split(milestones, ",") + } + + // this api is also used in UI, + // so the default limit is set to fit UI needs + limit := ctx.FormInt("limit") + if limit == 0 { + limit = setting.UI.IssuePagingNum + } else if limit > setting.API.MaxResponseItems { + limit = setting.API.MaxResponseItems + } + + // Only fetch the issues if we either don't have a keyword or the search returned issues + // This would otherwise return all issues if no issues were found by the search. + if len(keyword) == 0 || len(issueIDs) > 0 || len(includedLabelNames) > 0 || len(includedMilestones) > 0 { + issuesOpt := &models.IssuesOptions{ + ListOptions: db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: limit, + }, + RepoCond: repoCond, + IsClosed: isClosed, + IssueIDs: issueIDs, + IncludedLabelNames: includedLabelNames, + IncludeMilestones: includedMilestones, + SortType: "priorityrepo", + PriorityRepoID: ctx.FormInt64("priority_repo_id"), + IsPull: isPull, + UpdatedBeforeUnix: before, + UpdatedAfterUnix: since, + } + + ctxUserID := int64(0) + if ctx.IsSigned { + ctxUserID = ctx.Doer.ID + } + + // Filter for: Created by User, Assigned to User, Mentioning User, Review of User Requested + if ctx.FormBool("created") { + issuesOpt.PosterID = ctxUserID + } + if ctx.FormBool("assigned") { + issuesOpt.AssigneeID = ctxUserID + } + if ctx.FormBool("mentioned") { + issuesOpt.MentionedID = ctxUserID + } + if ctx.FormBool("review_requested") { + issuesOpt.ReviewRequestedID = ctxUserID + } + + if issues, err = models.Issues(issuesOpt); err != nil { + ctx.Error(http.StatusInternalServerError, "Issues", err.Error()) + return + } + + issuesOpt.ListOptions = db.ListOptions{ + Page: -1, + } + if filteredCount, err = models.CountIssues(issuesOpt); err != nil { + ctx.Error(http.StatusInternalServerError, "CountIssues", err.Error()) + return + } + } + + ctx.SetTotalCountHeader(filteredCount) + ctx.JSON(http.StatusOK, convert.ToAPIIssueList(issues)) +} + +func getUserIDForFilter(ctx *context.Context, queryName string) int64 { + userName := ctx.FormString(queryName) + if len(userName) == 0 { + return 0 + } + + user, err := user_model.GetUserByName(userName) + if user_model.IsErrUserNotExist(err) { + ctx.NotFound("", err) + return 0 + } + + if err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return 0 + } + + return user.ID +} + +// ListIssues list the issues of a repository +func ListIssues(ctx *context.Context) { + before, since, err := context.GetQueryBeforeSince(ctx) + if err != nil { + ctx.Error(http.StatusUnprocessableEntity, err.Error()) + return + } + + var isClosed util.OptionalBool + switch ctx.FormString("state") { + case "closed": + isClosed = util.OptionalBoolTrue + case "all": + isClosed = util.OptionalBoolNone + default: + isClosed = util.OptionalBoolFalse + } + + var issues []*models.Issue + var filteredCount int64 + + keyword := ctx.FormTrim("q") + if strings.IndexByte(keyword, 0) >= 0 { + keyword = "" + } + var issueIDs []int64 + var labelIDs []int64 + if len(keyword) > 0 { + issueIDs, err = issue_indexer.SearchIssuesByKeyword(ctx, []int64{ctx.Repo.Repository.ID}, keyword) + if err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + } + + if splitted := strings.Split(ctx.FormString("labels"), ","); len(splitted) > 0 { + labelIDs, err = models.GetLabelIDsInRepoByNames(ctx.Repo.Repository.ID, splitted) + if err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + } + + var mileIDs []int64 + if part := strings.Split(ctx.FormString("milestones"), ","); len(part) > 0 { + for i := range part { + // uses names and fall back to ids + // non existent milestones are discarded + mile, err := issues_model.GetMilestoneByRepoIDANDName(ctx.Repo.Repository.ID, part[i]) + if err == nil { + mileIDs = append(mileIDs, mile.ID) + continue + } + if !issues_model.IsErrMilestoneNotExist(err) { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + id, err := strconv.ParseInt(part[i], 10, 64) + if err != nil { + continue + } + mile, err = issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, id) + if err == nil { + mileIDs = append(mileIDs, mile.ID) + continue + } + if issues_model.IsErrMilestoneNotExist(err) { + continue + } + ctx.Error(http.StatusInternalServerError, err.Error()) + } + } + + listOptions := db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), + } + + var isPull util.OptionalBool + switch ctx.FormString("type") { + case "pulls": + isPull = util.OptionalBoolTrue + case "issues": + isPull = util.OptionalBoolFalse + default: + isPull = util.OptionalBoolNone + } + + // FIXME: we should be more efficient here + createdByID := getUserIDForFilter(ctx, "created_by") + if ctx.Written() { + return + } + assignedByID := getUserIDForFilter(ctx, "assigned_by") + if ctx.Written() { + return + } + mentionedByID := getUserIDForFilter(ctx, "mentioned_by") + if ctx.Written() { + return + } + + // Only fetch the issues if we either don't have a keyword or the search returned issues + // This would otherwise return all issues if no issues were found by the search. + if len(keyword) == 0 || len(issueIDs) > 0 || len(labelIDs) > 0 { + issuesOpt := &models.IssuesOptions{ + ListOptions: listOptions, + RepoID: ctx.Repo.Repository.ID, + IsClosed: isClosed, + IssueIDs: issueIDs, + LabelIDs: labelIDs, + MilestoneIDs: mileIDs, + IsPull: isPull, + UpdatedBeforeUnix: before, + UpdatedAfterUnix: since, + PosterID: createdByID, + AssigneeID: assignedByID, + MentionedID: mentionedByID, + } + + if issues, err = models.Issues(issuesOpt); err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + + issuesOpt.ListOptions = db.ListOptions{ + Page: -1, + } + if filteredCount, err = models.CountIssues(issuesOpt); err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + } + + ctx.SetTotalCountHeader(filteredCount) + ctx.JSON(http.StatusOK, convert.ToAPIIssueList(issues)) +} + // UpdateIssueStatus change issue's status func UpdateIssueStatus(ctx *context.Context) { issues := getActionIssues(ctx) @@ -2060,7 +2481,7 @@ func UpdateIssueStatus(ctx *context.Context) { } for _, issue := range issues { if issue.IsClosed != isClosed { - if err := issue_service.ChangeStatus(issue, ctx.User, isClosed); err != nil { + if err := issue_service.ChangeStatus(issue, ctx.Doer, isClosed); err != nil { if models.IsErrDependenciesLeft(err) { ctx.JSON(http.StatusPreconditionFailed, map[string]interface{}{ "error": "cannot close this issue because it still has open dependencies", @@ -2085,7 +2506,7 @@ func NewComment(ctx *context.Context) { return } - if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { + if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { if log.IsTrace() { if ctx.IsSigned { issueType := "issues" @@ -2094,7 +2515,7 @@ func NewComment(ctx *context.Context) { } log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, log.NewColoredIDValue(issue.PosterID), issueType, ctx.Repo.Repository, @@ -2108,9 +2529,9 @@ func NewComment(ctx *context.Context) { return } - if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.User.IsAdmin { + if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin { ctx.Flash.Error(ctx.Tr("repo.issues.comment_on_locked")) - ctx.Redirect(issue.HTMLURL(), http.StatusSeeOther) + ctx.Redirect(issue.HTMLURL()) return } @@ -2128,7 +2549,7 @@ func NewComment(ctx *context.Context) { var comment *models.Comment defer func() { // Check if issue admin/poster changes the status of issue. - if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.User.ID))) && + if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) && (form.Status == "reopen" || form.Status == "close") && !(issue.IsPull && issue.PullRequest.HasMerged) { @@ -2157,21 +2578,21 @@ func NewComment(ctx *context.Context) { ctx.Flash.Info(ctx.Tr("repo.pulls.open_unmerged_pull_exists", pr.Index)) } else { isClosed := form.Status == "close" - if err := issue_service.ChangeStatus(issue, ctx.User, isClosed); err != nil { + if err := issue_service.ChangeStatus(issue, ctx.Doer, isClosed); err != nil { log.Error("ChangeStatus: %v", err) if models.IsErrDependenciesLeft(err) { if issue.IsPull { ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked")) - ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index), http.StatusSeeOther) + ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index)) } else { ctx.Flash.Error(ctx.Tr("repo.issues.dependency.issue_close_blocked")) - ctx.Redirect(fmt.Sprintf("%s/issues/%d", ctx.Repo.RepoLink, issue.Index), http.StatusSeeOther) + ctx.Redirect(fmt.Sprintf("%s/issues/%d", ctx.Repo.RepoLink, issue.Index)) } return } } else { - if err := stopTimerIfAvailable(ctx.User, issue); err != nil { + if err := stopTimerIfAvailable(ctx.Doer, issue); err != nil { ctx.ServerError("CreateOrStopIssueStopwatch", err) return } @@ -2198,7 +2619,7 @@ func NewComment(ctx *context.Context) { return } - comment, err := comment_service.CreateIssueComment(ctx.User, ctx.Repo.Repository, issue, form.Content, attachments) + comment, err := comment_service.CreateIssueComment(ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments) if err != nil { ctx.ServerError("CreateIssueComment", err) return @@ -2220,7 +2641,7 @@ func UpdateCommentContent(ctx *context.Context) { return } - if !ctx.IsSigned || (ctx.User.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) { + if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) { ctx.Error(http.StatusForbidden) return } @@ -2238,7 +2659,7 @@ func UpdateCommentContent(ctx *context.Context) { }) return } - if err = comment_service.UpdateComment(comment, ctx.User, oldContent); err != nil { + if err = comment_service.UpdateComment(comment, ctx.Doer, oldContent); err != nil { ctx.ServerError("UpdateComment", err) return } @@ -2286,7 +2707,7 @@ func DeleteComment(ctx *context.Context) { return } - if !ctx.IsSigned || (ctx.User.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) { + if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) { ctx.Error(http.StatusForbidden) return } else if comment.Type != models.CommentTypeComment && comment.Type != models.CommentTypeCode { @@ -2294,12 +2715,12 @@ func DeleteComment(ctx *context.Context) { return } - if err = comment_service.DeleteComment(ctx.User, comment); err != nil { + if err = comment_service.DeleteComment(ctx.Doer, comment); err != nil { ctx.ServerError("DeleteCommentByID", err) return } - ctx.Status(200) + ctx.Status(http.StatusOK) } // ChangeIssueReaction create a reaction for issue @@ -2310,7 +2731,7 @@ func ChangeIssueReaction(ctx *context.Context) { return } - if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { + if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { if log.IsTrace() { if ctx.IsSigned { issueType := "issues" @@ -2319,7 +2740,7 @@ func ChangeIssueReaction(ctx *context.Context) { } log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, log.NewColoredIDValue(issue.PosterID), issueType, ctx.Repo.Repository, @@ -2340,9 +2761,9 @@ func ChangeIssueReaction(ctx *context.Context) { switch ctx.Params(":action") { case "react": - reaction, err := models.CreateIssueReaction(ctx.User, issue, form.Content) + reaction, err := issues_model.CreateIssueReaction(ctx.Doer.ID, issue.ID, form.Content) if err != nil { - if models.IsErrForbiddenIssueReaction(err) { + if issues_model.IsErrForbiddenIssueReaction(err) { ctx.ServerError("ChangeIssueReaction", err) return } @@ -2358,7 +2779,7 @@ func ChangeIssueReaction(ctx *context.Context) { log.Trace("Reaction for issue created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, reaction.ID) case "unreact": - if err := models.DeleteIssueReaction(ctx.User, issue, form.Content); err != nil { + if err := issues_model.DeleteIssueReaction(ctx.Doer.ID, issue.ID, form.Content); err != nil { ctx.ServerError("DeleteIssueReaction", err) return } @@ -2412,7 +2833,7 @@ func ChangeCommentReaction(ctx *context.Context) { return } - if !ctx.IsSigned || (ctx.User.ID != comment.PosterID && !ctx.Repo.CanReadIssuesOrPulls(comment.Issue.IsPull)) { + if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanReadIssuesOrPulls(comment.Issue.IsPull)) { if log.IsTrace() { if ctx.IsSigned { issueType := "issues" @@ -2421,7 +2842,7 @@ func ChangeCommentReaction(ctx *context.Context) { } log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, log.NewColoredIDValue(comment.Issue.PosterID), issueType, ctx.Repo.Repository, @@ -2442,9 +2863,9 @@ func ChangeCommentReaction(ctx *context.Context) { switch ctx.Params(":action") { case "react": - reaction, err := models.CreateCommentReaction(ctx.User, comment.Issue, comment, form.Content) + reaction, err := issues_model.CreateCommentReaction(ctx.Doer.ID, comment.Issue.ID, comment.ID, form.Content) if err != nil { - if models.IsErrForbiddenIssueReaction(err) { + if issues_model.IsErrForbiddenIssueReaction(err) { ctx.ServerError("ChangeIssueReaction", err) return } @@ -2460,7 +2881,7 @@ func ChangeCommentReaction(ctx *context.Context) { log.Trace("Reaction for comment created: %d/%d/%d/%d", ctx.Repo.Repository.ID, comment.Issue.ID, comment.ID, reaction.ID) case "unreact": - if err := models.DeleteCommentReaction(ctx.User, comment.Issue, comment, form.Content); err != nil { + if err := issues_model.DeleteCommentReaction(ctx.Doer.ID, comment.Issue.ID, comment.ID, form.Content); err != nil { ctx.ServerError("DeleteCommentReaction", err) return } @@ -2520,7 +2941,7 @@ func filterXRefComments(ctx *context.Context, issue *models.Issue) error { if err != nil { return err } - perm, err := models.GetUserRepoPermission(c.RefRepo, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, c.RefRepo, ctx.Doer) if err != nil { return err } @@ -2586,7 +3007,7 @@ func updateAttachments(item interface{}, files []string) error { if len(files) > 0 { switch content := item.(type) { case *models.Issue: - err = content.UpdateAttachments(files) + err = models.UpdateIssueAttachments(content.ID, files) case *models.Comment: err = content.UpdateAttachments(files) default: @@ -2689,19 +3110,19 @@ func combineLabelComments(issue *models.Issue) { // get all teams that current user can mention func handleTeamMentions(ctx *context.Context) { - if ctx.User == nil || !ctx.Repo.Owner.IsOrganization() { + if ctx.Doer == nil || !ctx.Repo.Owner.IsOrganization() { return } var isAdmin bool var err error - var teams []*models.Team - org := models.OrgFromUser(ctx.Repo.Owner) + var teams []*organization.Team + org := organization.OrgFromUser(ctx.Repo.Owner) // Admin has super access. - if ctx.User.IsAdmin { + if ctx.Doer.IsAdmin { isAdmin = true } else { - isAdmin, err = org.IsOwnedBy(ctx.User.ID) + isAdmin, err = org.IsOwnedBy(ctx.Doer.ID) if err != nil { ctx.ServerError("IsOwnedBy", err) return @@ -2715,7 +3136,7 @@ func handleTeamMentions(ctx *context.Context) { return } } else { - teams, err = org.GetUserTeams(ctx.User.ID) + teams, err = org.GetUserTeams(ctx.Doer.ID) if err != nil { ctx.ServerError("GetUserTeams", err) return diff --git a/routers/web/repo/issue_content_history.go b/routers/web/repo/issue_content_history.go index ed9468b913..11cc8a2a6f 100644 --- a/routers/web/repo/issue_content_history.go +++ b/routers/web/repo/issue_content_history.go @@ -12,16 +12,15 @@ import ( "strings" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" issuesModel "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + "code.gitea.io/gitea/modules/translation/i18n" "github.com/sergi/go-diff/diffmatchpatch" - "github.com/unknwon/i18n" ) // GetContentHistoryOverview get overview @@ -32,7 +31,7 @@ func GetContentHistoryOverview(ctx *context.Context) { } lang := ctx.Locale.Language() - editedHistoryCountMap, _ := issuesModel.QueryIssueContentHistoryEditedCountMap(db.DefaultContext, issue.ID) + editedHistoryCountMap, _ := issuesModel.QueryIssueContentHistoryEditedCountMap(ctx, issue.ID) ctx.JSON(http.StatusOK, map[string]interface{}{ "i18n": map[string]interface{}{ "textEdited": i18n.Tr(lang, "repo.issues.content_history.edited"), @@ -52,7 +51,7 @@ func GetContentHistoryList(ctx *context.Context) { return } - items, _ := issuesModel.FetchIssueContentHistoryList(db.DefaultContext, issue.ID, commentID) + items, _ := issuesModel.FetchIssueContentHistoryList(ctx, issue.ID, commentID) // render history list to HTML for frontend dropdown items: (name, value) // name is HTML of "avatar + userName + userAction + timeSince" @@ -99,11 +98,11 @@ func canSoftDeleteContentHistory(ctx *context.Context, issue *models.Issue, comm } else if ctx.Repo.CanWrite(unit.TypeIssues) { if comment == nil { // the issue poster or the history poster can soft-delete - canSoftDelete = ctx.User.ID == issue.PosterID || ctx.User.ID == history.PosterID + canSoftDelete = ctx.Doer.ID == issue.PosterID || ctx.Doer.ID == history.PosterID canSoftDelete = canSoftDelete && (history.IssueID == issue.ID) } else { // the comment poster or the history poster can soft-delete - canSoftDelete = ctx.User.ID == comment.PosterID || ctx.User.ID == history.PosterID + canSoftDelete = ctx.Doer.ID == comment.PosterID || ctx.Doer.ID == history.PosterID canSoftDelete = canSoftDelete && (history.IssueID == issue.ID) canSoftDelete = canSoftDelete && (history.CommentID == comment.ID) } @@ -119,7 +118,7 @@ func GetContentHistoryDetail(ctx *context.Context) { } historyID := ctx.FormInt64("history_id") - history, prevHistory, err := issuesModel.GetIssueContentHistoryAndPrev(db.DefaultContext, historyID) + history, prevHistory, err := issuesModel.GetIssueContentHistoryAndPrev(ctx, historyID) if err != nil { ctx.JSON(http.StatusNotFound, map[string]interface{}{ "message": "Can not find the content history", @@ -196,7 +195,7 @@ func SoftDeleteContentHistory(ctx *context.Context) { return } } - if history, err = issuesModel.GetIssueContentHistoryByID(db.DefaultContext, historyID); err != nil { + if history, err = issuesModel.GetIssueContentHistoryByID(ctx, historyID); err != nil { log.Error("can not get issue content history %v. err=%v", historyID, err) return } @@ -209,7 +208,7 @@ func SoftDeleteContentHistory(ctx *context.Context) { return } - err = issuesModel.SoftDeleteIssueContentHistory(db.DefaultContext, historyID) + err = issuesModel.SoftDeleteIssueContentHistory(ctx, historyID) log.Debug("soft delete issue content history. issue=%d, comment=%d, history=%d", issue.ID, commentID, historyID) ctx.JSON(http.StatusOK, map[string]interface{}{ "ok": err == nil, diff --git a/routers/web/repo/issue_dependency.go b/routers/web/repo/issue_dependency.go index 015f31d830..ec713238c6 100644 --- a/routers/web/repo/issue_dependency.go +++ b/routers/web/repo/issue_dependency.go @@ -22,20 +22,20 @@ func AddDependency(ctx *context.Context) { } // Check if the Repo is allowed to have dependencies - if !ctx.Repo.CanCreateIssueDependencies(ctx.User, issue.IsPull) { + if !ctx.Repo.CanCreateIssueDependencies(ctx.Doer, issue.IsPull) { ctx.Error(http.StatusForbidden, "CanCreateIssueDependencies") return } depID := ctx.FormInt64("newDependency") - if err = issue.LoadRepo(); err != nil { + if err = issue.LoadRepo(ctx); err != nil { ctx.ServerError("LoadRepo", err) return } // Redirect - defer ctx.Redirect(issue.HTMLURL(), http.StatusSeeOther) + defer ctx.Redirect(issue.HTMLURL()) // Dependency dep, err := models.GetIssueByID(depID) @@ -56,7 +56,7 @@ func AddDependency(ctx *context.Context) { return } - err = models.CreateIssueDependency(ctx.User, issue, dep) + err = models.CreateIssueDependency(ctx.Doer, issue, dep) if err != nil { if models.IsErrDependencyExists(err) { ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_exists")) @@ -81,14 +81,14 @@ func RemoveDependency(ctx *context.Context) { } // Check if the Repo is allowed to have dependencies - if !ctx.Repo.CanCreateIssueDependencies(ctx.User, issue.IsPull) { + if !ctx.Repo.CanCreateIssueDependencies(ctx.Doer, issue.IsPull) { ctx.Error(http.StatusForbidden, "CanCreateIssueDependencies") return } depID := ctx.FormInt64("removeDependencyID") - if err = issue.LoadRepo(); err != nil { + if err = issue.LoadRepo(ctx); err != nil { ctx.ServerError("LoadRepo", err) return } @@ -115,7 +115,7 @@ func RemoveDependency(ctx *context.Context) { return } - if err = models.RemoveIssueDependency(ctx.User, issue, dep, depType); err != nil { + if err = models.RemoveIssueDependency(ctx.Doer, issue, dep, depType); err != nil { if models.IsErrDependencyNotExists(err) { ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_not_exist")) return @@ -125,5 +125,5 @@ func RemoveDependency(ctx *context.Context) { } // Redirect - ctx.Redirect(issue.HTMLURL(), http.StatusSeeOther) + ctx.Redirect(issue.HTMLURL()) } diff --git a/routers/web/repo/issue_label.go b/routers/web/repo/issue_label.go index c6fa8c4c8c..887bbc115f 100644 --- a/routers/web/repo/issue_label.go +++ b/routers/web/repo/issue_label.go @@ -9,9 +9,11 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/forms" issue_service "code.gitea.io/gitea/services/issue" @@ -27,7 +29,7 @@ func Labels(ctx *context.Context) { ctx.Data["PageIsIssueList"] = true ctx.Data["PageIsLabels"] = true ctx.Data["RequireTribute"] = true - ctx.Data["LabelTemplates"] = models.LabelTemplates + ctx.Data["LabelTemplates"] = repo_module.LabelTemplates ctx.HTML(http.StatusOK, tplLabels) } @@ -39,9 +41,9 @@ func InitializeLabels(ctx *context.Context) { return } - if err := models.InitializeLabels(db.DefaultContext, ctx.Repo.Repository.ID, form.TemplateName, false); err != nil { - if models.IsErrIssueLabelTemplateLoad(err) { - originalErr := err.(models.ErrIssueLabelTemplateLoad).OriginalError + if err := repo_module.InitializeLabels(ctx, ctx.Repo.Repository.ID, form.TemplateName, false); err != nil { + if repo_module.IsErrIssueLabelTemplateLoad(err) { + originalErr := err.(repo_module.ErrIssueLabelTemplateLoad).OriginalError ctx.Flash.Error(ctx.Tr("repo.issues.label_templates.fail_to_load_file", form.TemplateName, originalErr)) ctx.Redirect(ctx.Repo.RepoLink + "/labels") return @@ -77,13 +79,13 @@ func RetrieveLabels(ctx *context.Context) { } ctx.Data["OrgLabels"] = orgLabels - org, err := models.GetOrgByName(ctx.Repo.Owner.LowerName) + org, err := organization.GetOrgByName(ctx.Repo.Owner.LowerName) if err != nil { ctx.ServerError("GetOrgByName", err) return } - if ctx.User != nil { - ctx.Org.IsOwner, err = org.IsOwnedBy(ctx.User.ID) + if ctx.Doer != nil { + ctx.Org.IsOwner, err = org.IsOwnedBy(ctx.Doer.ID) if err != nil { ctx.ServerError("org.IsOwnedBy", err) return @@ -115,7 +117,7 @@ func NewLabel(ctx *context.Context) { Description: form.Description, Color: form.Color, } - if err := models.NewLabel(l); err != nil { + if err := models.NewLabel(ctx, l); err != nil { ctx.ServerError("NewLabel", err) return } @@ -169,7 +171,7 @@ func UpdateIssueLabel(ctx *context.Context) { switch action := ctx.FormString("action"); action { case "clear": for _, issue := range issues { - if err := issue_service.ClearLabels(issue, ctx.User); err != nil { + if err := issue_service.ClearLabels(issue, ctx.Doer); err != nil { ctx.ServerError("ClearLabels", err) return } @@ -189,7 +191,7 @@ func UpdateIssueLabel(ctx *context.Context) { // detach if any issues already have label, otherwise attach action = "attach" for _, issue := range issues { - if issue.HasLabel(label.ID) { + if models.HasIssueLabel(issue.ID, label.ID) { action = "detach" break } @@ -198,14 +200,14 @@ func UpdateIssueLabel(ctx *context.Context) { if action == "attach" { for _, issue := range issues { - if err = issue_service.AddLabel(issue, ctx.User, label); err != nil { + if err = issue_service.AddLabel(issue, ctx.Doer, label); err != nil { ctx.ServerError("AddLabel", err) return } } } else { for _, issue := range issues { - if err = issue_service.RemoveLabel(issue, ctx.User, label); err != nil { + if err = issue_service.RemoveLabel(issue, ctx.Doer, label); err != nil { ctx.ServerError("RemoveLabel", err) return } diff --git a/routers/web/repo/issue_label_test.go b/routers/web/repo/issue_label_test.go index baa34530fa..5d7a29ee93 100644 --- a/routers/web/repo/issue_label_test.go +++ b/routers/web/repo/issue_label_test.go @@ -36,7 +36,7 @@ func TestInitializeLabels(t *testing.T) { test.LoadRepo(t, ctx, 2) web.SetForm(ctx, &forms.InitializeLabelsForm{TemplateName: "Default"}) InitializeLabels(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) unittest.AssertExistsAndLoadBean(t, &models.Label{ RepoID: 2, Name: "enhancement", @@ -82,7 +82,7 @@ func TestNewLabel(t *testing.T) { Color: "#abcdef", }) NewLabel(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) unittest.AssertExistsAndLoadBean(t, &models.Label{ Name: "newlabel", Color: "#abcdef", @@ -101,7 +101,7 @@ func TestUpdateLabel(t *testing.T) { Color: "#abcdef", }) UpdateLabel(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) unittest.AssertExistsAndLoadBean(t, &models.Label{ ID: 2, Name: "newnameforlabel", diff --git a/routers/web/repo/issue_lock.go b/routers/web/repo/issue_lock.go index 103b60c65d..5ac5cac52e 100644 --- a/routers/web/repo/issue_lock.go +++ b/routers/web/repo/issue_lock.go @@ -5,8 +5,6 @@ package repo import ( - "net/http" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/web" @@ -35,7 +33,7 @@ func LockIssue(ctx *context.Context) { } if err := models.LockIssue(&models.IssueLockOptions{ - Doer: ctx.User, + Doer: ctx.Doer, Issue: issue, Reason: form.Reason, }); err != nil { @@ -43,7 +41,7 @@ func LockIssue(ctx *context.Context) { return } - ctx.Redirect(issue.HTMLURL(), http.StatusSeeOther) + ctx.Redirect(issue.HTMLURL()) } // UnlockIssue unlocks a previously locked issue. @@ -60,12 +58,12 @@ func UnlockIssue(ctx *context.Context) { } if err := models.UnlockIssue(&models.IssueLockOptions{ - Doer: ctx.User, + Doer: ctx.Doer, Issue: issue, }); err != nil { ctx.ServerError("UnlockIssue", err) return } - ctx.Redirect(issue.HTMLURL(), http.StatusSeeOther) + ctx.Redirect(issue.HTMLURL()) } diff --git a/routers/web/repo/issue_stopwatch.go b/routers/web/repo/issue_stopwatch.go index 0e9405fde4..83e4ecedbf 100644 --- a/routers/web/repo/issue_stopwatch.go +++ b/routers/web/repo/issue_stopwatch.go @@ -9,7 +9,9 @@ import ( "strings" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/eventsource" ) // IssueStopwatch creates or stops a stopwatch for the given issue. @@ -21,16 +23,16 @@ func IssueStopwatch(c *context.Context) { var showSuccessMessage bool - if !models.StopwatchExists(c.User.ID, issue.ID) { + if !models.StopwatchExists(c.Doer.ID, issue.ID) { showSuccessMessage = true } - if !c.Repo.CanUseTimetracker(issue, c.User) { + if !c.Repo.CanUseTimetracker(issue, c.Doer) { c.NotFound("CanUseTimetracker", nil) return } - if err := models.CreateOrStopIssueStopwatch(c.User, issue); err != nil { + if err := models.CreateOrStopIssueStopwatch(c.Doer, issue); err != nil { c.ServerError("CreateOrStopIssueStopwatch", err) return } @@ -49,33 +51,45 @@ func CancelStopwatch(c *context.Context) { if c.Written() { return } - if !c.Repo.CanUseTimetracker(issue, c.User) { + if !c.Repo.CanUseTimetracker(issue, c.Doer) { c.NotFound("CanUseTimetracker", nil) return } - if err := models.CancelStopwatch(c.User, issue); err != nil { + if err := models.CancelStopwatch(c.Doer, issue); err != nil { c.ServerError("CancelStopwatch", err) return } + stopwatches, err := models.GetUserStopwatches(c.Doer.ID, db.ListOptions{}) + if err != nil { + c.ServerError("GetUserStopwatches", err) + return + } + if len(stopwatches) == 0 { + eventsource.GetManager().SendMessage(c.Doer.ID, &eventsource.Event{ + Name: "stopwatches", + Data: "{}", + }) + } + url := issue.HTMLURL() c.Redirect(url, http.StatusSeeOther) } // GetActiveStopwatch is the middleware that sets .ActiveStopwatch on context -func GetActiveStopwatch(c *context.Context) { - if strings.HasPrefix(c.Req.URL.Path, "/api") { +func GetActiveStopwatch(ctx *context.Context) { + if strings.HasPrefix(ctx.Req.URL.Path, "/api") { return } - if !c.IsSigned { + if !ctx.IsSigned { return } - _, sw, err := models.HasUserStopwatch(c.User.ID) + _, sw, err := models.HasUserStopwatch(ctx.Doer.ID) if err != nil { - c.ServerError("HasUserStopwatch", err) + ctx.ServerError("HasUserStopwatch", err) return } @@ -85,15 +99,15 @@ func GetActiveStopwatch(c *context.Context) { issue, err := models.GetIssueByID(sw.IssueID) if err != nil || issue == nil { - c.ServerError("GetIssueByID", err) + ctx.ServerError("GetIssueByID", err) return } - if err = issue.LoadRepo(); err != nil { - c.ServerError("LoadRepo", err) + if err = issue.LoadRepo(ctx); err != nil { + ctx.ServerError("LoadRepo", err) return } - c.Data["ActiveStopwatch"] = StopwatchTmplInfo{ + ctx.Data["ActiveStopwatch"] = StopwatchTmplInfo{ issue.Link(), issue.Repo.FullName(), issue.Index, diff --git a/routers/web/repo/issue_timetrack.go b/routers/web/repo/issue_timetrack.go index ec6bb6142d..28274a7f7b 100644 --- a/routers/web/repo/issue_timetrack.go +++ b/routers/web/repo/issue_timetrack.go @@ -9,6 +9,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" @@ -22,7 +23,7 @@ func AddTimeManually(c *context.Context) { if c.Written() { return } - if !c.Repo.CanUseTimetracker(issue, c.User) { + if !c.Repo.CanUseTimetracker(issue, c.Doer) { c.NotFound("CanUseTimetracker", nil) return } @@ -42,7 +43,7 @@ func AddTimeManually(c *context.Context) { return } - if _, err := models.AddTime(c.User, issue, int64(total.Seconds()), time.Now()); err != nil { + if _, err := models.AddTime(c.Doer, issue, int64(total.Seconds()), time.Now()); err != nil { c.ServerError("AddTime", err) return } @@ -56,14 +57,14 @@ func DeleteTime(c *context.Context) { if c.Written() { return } - if !c.Repo.CanUseTimetracker(issue, c.User) { + if !c.Repo.CanUseTimetracker(issue, c.Doer) { c.NotFound("CanUseTimetracker", nil) return } t, err := models.GetTrackedTimeByID(c.ParamsInt64(":timeid")) if err != nil { - if models.IsErrNotExist(err) { + if db.IsErrNotExist(err) { c.NotFound("time not found", err) return } @@ -72,7 +73,7 @@ func DeleteTime(c *context.Context) { } // only OP or admin may delete - if !c.IsSigned || (!c.IsUserSiteAdmin() && c.User.ID != t.UserID) { + if !c.IsSigned || (!c.IsUserSiteAdmin() && c.Doer.ID != t.UserID) { c.Error(http.StatusForbidden, "not allowed") return } diff --git a/routers/web/repo/issue_watch.go b/routers/web/repo/issue_watch.go index dabbff842b..53fec11cdc 100644 --- a/routers/web/repo/issue_watch.go +++ b/routers/web/repo/issue_watch.go @@ -20,7 +20,7 @@ func IssueWatch(ctx *context.Context) { return } - if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { + if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) { if log.IsTrace() { if ctx.IsSigned { issueType := "issues" @@ -29,7 +29,7 @@ func IssueWatch(ctx *context.Context) { } log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+ "User in Repo has Permissions: %-+v", - ctx.User, + ctx.Doer, log.NewColoredIDValue(issue.PosterID), issueType, ctx.Repo.Repository, @@ -48,10 +48,10 @@ func IssueWatch(ctx *context.Context) { return } - if err := models.CreateOrUpdateIssueWatch(ctx.User.ID, issue.ID, watch); err != nil { + if err := models.CreateOrUpdateIssueWatch(ctx.Doer.ID, issue.ID, watch); err != nil { ctx.ServerError("CreateOrUpdateIssueWatch", err) return } - ctx.Redirect(issue.HTMLURL(), http.StatusSeeOther) + ctx.Redirect(issue.HTMLURL()) } diff --git a/routers/web/repo/lfs.go b/routers/web/repo/lfs.go index d2d62786fe..7c2ff1cfae 100644 --- a/routers/web/repo/lfs.go +++ b/routers/web/repo/lfs.go @@ -23,6 +23,7 @@ import ( "code.gitea.io/gitea/modules/git/pipeline" "code.gitea.io/gitea/modules/lfs" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/typesniffer" @@ -103,14 +104,14 @@ func LFSLocks(ctx *context.Context) { } // Clone base repo. - tmpBasePath, err := models.CreateTemporaryPath("locks") + tmpBasePath, err := repo_module.CreateTemporaryPath("locks") if err != nil { log.Error("Failed to create temporary path: %v", err) ctx.ServerError("LFSLocks", err) return } defer func() { - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("LFSLocks: RemoveTemporaryPath: %v", err) } }() @@ -124,7 +125,7 @@ func LFSLocks(ctx *context.Context) { return } - gitRepo, err := git.OpenRepositoryCtx(ctx, tmpBasePath) + gitRepo, err := git.OpenRepository(ctx, tmpBasePath) if err != nil { log.Error("Unable to open temporary repository: %s (%v)", tmpBasePath, err) ctx.ServerError("LFSLocks", fmt.Errorf("failed to open new temporary repository in: %s %v", tmpBasePath, err)) @@ -217,7 +218,7 @@ func LFSLockFile(ctx *context.Context) { _, err := models.CreateLFSLock(ctx.Repo.Repository, &models.LFSLock{ Path: lockPath, - OwnerID: ctx.User.ID, + OwnerID: ctx.Doer.ID, }) if err != nil { if models.IsErrLFSLockAlreadyExist(err) { @@ -237,7 +238,7 @@ func LFSUnlock(ctx *context.Context) { ctx.NotFound("LFSUnlock", nil) return } - _, err := models.DeleteLFSLockByID(ctx.ParamsInt64("lid"), ctx.Repo.Repository, ctx.User, true) + _, err := models.DeleteLFSLockByID(ctx.ParamsInt64("lid"), ctx.Repo.Repository, ctx.Doer, true) if err != nil { ctx.ServerError("LFSUnlock", err) return @@ -253,6 +254,13 @@ func LFSFileGet(ctx *context.Context) { } ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs" oid := ctx.Params("oid") + + p := lfs.Pointer{Oid: oid} + if !p.IsValid() { + ctx.NotFound("LFSFileGet", nil) + return + } + ctx.Data["Title"] = oid ctx.Data["PageIsSettingsLFS"] = true meta, err := models.GetLFSMetaObjectByOid(ctx.Repo.Repository.ID, oid) @@ -343,6 +351,12 @@ func LFSDelete(ctx *context.Context) { return } oid := ctx.Params("oid") + p := lfs.Pointer{Oid: oid} + if !p.IsValid() { + ctx.NotFound("LFSDelete", nil) + return + } + count, err := models.RemoveLFSMetaObjectByOid(ctx.Repo.Repository.ID, oid) if err != nil { ctx.ServerError("LFSDelete", err) @@ -463,7 +477,7 @@ func LFSPointerFiles(ctx *context.Context) { // Can we fix? // OK well that's "simple" // - we need to check whether current user has access to a repo that has access to the file - result.Associatable, err = models.LFSObjectAccessible(ctx.User, pointerBlob.Oid) + result.Associatable, err = models.LFSObjectAccessible(ctx.Doer, pointerBlob.Oid) if err != nil { return err } @@ -538,7 +552,7 @@ func LFSAutoAssociate(ctx *context.Context) { metas[i].Oid = oid[:idx] // metas[i].RepositoryID = ctx.Repo.Repository.ID } - if err := models.LFSAutoAssociate(metas, ctx.User, ctx.Repo.Repository.ID); err != nil { + if err := models.LFSAutoAssociate(metas, ctx.Doer, ctx.Repo.Repository.ID); err != nil { ctx.ServerError("LFSAutoAssociate", err) return } diff --git a/routers/web/repo/main_test.go b/routers/web/repo/main_test.go index 81e3a8e281..a1ca3c3bc7 100644 --- a/routers/web/repo/main_test.go +++ b/routers/web/repo/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } diff --git a/routers/web/repo/middlewares.go b/routers/web/repo/middlewares.go index c99f0ee8f9..ae4177cf1e 100644 --- a/routers/web/repo/middlewares.go +++ b/routers/web/repo/middlewares.go @@ -43,7 +43,7 @@ func SetDiffViewStyle(ctx *context.Context) { } var ( - userStyle = ctx.User.DiffViewStyle + userStyle = ctx.Doer.DiffViewStyle style string ) @@ -56,7 +56,7 @@ func SetDiffViewStyle(ctx *context.Context) { } ctx.Data["IsSplitStyle"] = style == "split" - if err := user_model.UpdateUserDiffViewStyle(ctx.User, style); err != nil { + if err := user_model.UpdateUserDiffViewStyle(ctx.Doer, style); err != nil { ctx.ServerError("ErrUpdateDiffViewStyle", err) } } @@ -72,12 +72,12 @@ func SetWhitespaceBehavior(ctx *context.Context) { whitespaceBehavior = defaultWhitespaceBehavior } if ctx.IsSigned { - userWhitespaceBehavior, err := user_model.GetUserSetting(ctx.User.ID, user_model.SettingsKeyDiffWhitespaceBehavior, defaultWhitespaceBehavior) + userWhitespaceBehavior, err := user_model.GetUserSetting(ctx.Doer.ID, user_model.SettingsKeyDiffWhitespaceBehavior, defaultWhitespaceBehavior) if err == nil { if whitespaceBehavior == "" { whitespaceBehavior = userWhitespaceBehavior } else if whitespaceBehavior != userWhitespaceBehavior { - _ = user_model.SetUserSetting(ctx.User.ID, user_model.SettingsKeyDiffWhitespaceBehavior, whitespaceBehavior) + _ = user_model.SetUserSetting(ctx.Doer.ID, user_model.SettingsKeyDiffWhitespaceBehavior, whitespaceBehavior) } } // else: we can ignore the error safely } diff --git a/routers/web/repo/migrate.go b/routers/web/repo/migrate.go index 9a31d809d4..38cdbd4973 100644 --- a/routers/web/repo/migrate.go +++ b/routers/web/repo/migrate.go @@ -106,8 +106,7 @@ func handleMigrateError(ctx *context.Context, owner *user_model.User, err error, ctx.Data["Err_RepoName"] = true ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tpl, form) default: - remoteAddr, _ := forms.ParseRemoteAddr(form.CloneAddr, form.AuthUsername, form.AuthPassword) - err = util.NewStringURLSanitizedError(err, remoteAddr, true) + err = util.SanitizeErrorCredentialURLs(err) if strings.Contains(err.Error(), "Authentication failed") || strings.Contains(err.Error(), "Bad credentials") || strings.Contains(err.Error(), "could not read Username") { @@ -178,7 +177,7 @@ func MigratePost(ctx *context.Context) { remoteAddr, err := forms.ParseRemoteAddr(form.CloneAddr, form.AuthUsername, form.AuthPassword) if err == nil { - err = migrations.IsMigrateURLAllowed(remoteAddr, ctx.User) + err = migrations.IsMigrateURLAllowed(remoteAddr, ctx.Doer) } if err != nil { ctx.Data["Err_CloneAddr"] = true @@ -195,7 +194,7 @@ func MigratePost(ctx *context.Context) { ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_lfs_endpoint"), tpl, &form) return } - err = migrations.IsMigrateURLAllowed(ep.String(), ctx.User) + err = migrations.IsMigrateURLAllowed(ep.String(), ctx.Doer) if err != nil { ctx.Data["Err_LFSEndpoint"] = true handleMigrateRemoteAddrError(ctx, err, tpl, form) @@ -233,13 +232,13 @@ func MigratePost(ctx *context.Context) { opts.Releases = false } - err = repo_model.CheckCreateRepository(ctx.User, ctxUser, opts.RepoName, false) + err = repo_model.CheckCreateRepository(ctx.Doer, ctxUser, opts.RepoName, false) if err != nil { handleMigrateError(ctx, ctxUser, err, "MigratePost", tpl, form) return } - err = task.MigrateRepository(ctx.User, ctxUser, opts) + err = task.MigrateRepository(ctx.Doer, ctxUser, opts) if err == nil { ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(opts.RepoName)) return diff --git a/routers/web/repo/milestone.go b/routers/web/repo/milestone.go index df5fd411b4..1e75bd79fb 100644 --- a/routers/web/repo/milestone.go +++ b/routers/web/repo/milestone.go @@ -9,8 +9,8 @@ import ( "net/url" "time" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/markup" @@ -38,7 +38,7 @@ func Milestones(ctx *context.Context) { ctx.Data["PageIsMilestones"] = true isShowClosed := ctx.FormString("state") == "closed" - stats, err := models.GetMilestonesStatsByRepoCond(builder.And(builder.Eq{"id": ctx.Repo.Repository.ID})) + stats, err := issues_model.GetMilestonesStatsByRepoCond(builder.And(builder.Eq{"id": ctx.Repo.Repository.ID})) if err != nil { ctx.ServerError("MilestoneStats", err) return @@ -60,7 +60,7 @@ func Milestones(ctx *context.Context) { state = structs.StateClosed } - miles, total, err := models.GetMilestones(models.GetMilestonesOption{ + miles, total, err := issues_model.GetMilestones(issues_model.GetMilestonesOption{ ListOptions: db.ListOptions{ Page: page, PageSize: setting.UI.IssuePagingNum, @@ -143,7 +143,7 @@ func NewMilestonePost(ctx *context.Context) { } deadline = time.Date(deadline.Year(), deadline.Month(), deadline.Day(), 23, 59, 59, 0, deadline.Location()) - if err = models.NewMilestone(&models.Milestone{ + if err = issues_model.NewMilestone(&issues_model.Milestone{ RepoID: ctx.Repo.Repository.ID, Name: form.Title, Content: form.Content, @@ -163,9 +163,9 @@ func EditMilestone(ctx *context.Context) { ctx.Data["PageIsMilestones"] = true ctx.Data["PageIsEditMilestone"] = true - m, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, ctx.ParamsInt64(":id")) + m, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrMilestoneNotExist(err) { + if issues_model.IsErrMilestoneNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetMilestoneByRepoID", err) @@ -203,9 +203,9 @@ func EditMilestonePost(ctx *context.Context) { } deadline = time.Date(deadline.Year(), deadline.Month(), deadline.Day(), 23, 59, 59, 0, deadline.Location()) - m, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, ctx.ParamsInt64(":id")) + m, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrMilestoneNotExist(err) { + if issues_model.IsErrMilestoneNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetMilestoneByRepoID", err) @@ -215,7 +215,7 @@ func EditMilestonePost(ctx *context.Context) { m.Name = form.Title m.Content = form.Content m.DeadlineUnix = timeutil.TimeStamp(deadline.Unix()) - if err = models.UpdateMilestone(m, m.IsClosed); err != nil { + if err = issues_model.UpdateMilestone(m, m.IsClosed); err != nil { ctx.ServerError("UpdateMilestone", err) return } @@ -237,8 +237,8 @@ func ChangeMilestoneStatus(ctx *context.Context) { } id := ctx.ParamsInt64(":id") - if err := models.ChangeMilestoneStatusByRepoIDAndID(ctx.Repo.Repository.ID, id, toClose); err != nil { - if models.IsErrMilestoneNotExist(err) { + if err := issues_model.ChangeMilestoneStatusByRepoIDAndID(ctx.Repo.Repository.ID, id, toClose); err != nil { + if issues_model.IsErrMilestoneNotExist(err) { ctx.NotFound("", err) } else { ctx.ServerError("ChangeMilestoneStatusByIDAndRepoID", err) @@ -250,7 +250,7 @@ func ChangeMilestoneStatus(ctx *context.Context) { // DeleteMilestone delete a milestone func DeleteMilestone(ctx *context.Context) { - if err := models.DeleteMilestoneByRepoID(ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil { + if err := issues_model.DeleteMilestoneByRepoID(ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil { ctx.Flash.Error("DeleteMilestoneByRepoID: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("repo.milestones.deletion_success")) @@ -264,9 +264,9 @@ func DeleteMilestone(ctx *context.Context) { // MilestoneIssuesAndPulls lists all the issues and pull requests of the milestone func MilestoneIssuesAndPulls(ctx *context.Context) { milestoneID := ctx.ParamsInt64(":id") - milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, milestoneID) + milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID) if err != nil { - if models.IsErrMilestoneNotExist(err) { + if issues_model.IsErrMilestoneNotExist(err) { ctx.NotFound("GetMilestoneByID", err) return } diff --git a/routers/web/repo/packages.go b/routers/web/repo/packages.go new file mode 100644 index 0000000000..03ea4fc5f4 --- /dev/null +++ b/routers/web/repo/packages.go @@ -0,0 +1,73 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package repo + +import ( + "net/http" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/setting" +) + +const ( + tplPackagesList base.TplName = "repo/packages" +) + +// Packages displays a list of all packages in the repository +func Packages(ctx *context.Context) { + page := ctx.FormInt("page") + if page <= 1 { + page = 1 + } + query := ctx.FormTrim("q") + packageType := ctx.FormTrim("type") + + pvs, total, err := packages.SearchLatestVersions(ctx, &packages.PackageSearchOptions{ + Paginator: &db.ListOptions{ + PageSize: setting.UI.PackagesPagingNum, + Page: page, + }, + OwnerID: ctx.ContextUser.ID, + RepoID: ctx.Repo.Repository.ID, + Type: packages.Type(packageType), + Name: packages.SearchValue{Value: query}, + }) + if err != nil { + ctx.ServerError("SearchLatestVersions", err) + return + } + + pds, err := packages.GetPackageDescriptors(ctx, pvs) + if err != nil { + ctx.ServerError("GetPackageDescriptors", err) + return + } + + hasPackages, err := packages.HasRepositoryPackages(ctx, ctx.Repo.Repository.ID) + if err != nil { + ctx.ServerError("HasRepositoryPackages", err) + return + } + + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["IsPackagesPage"] = true + ctx.Data["ContextUser"] = ctx.ContextUser + ctx.Data["Query"] = query + ctx.Data["PackageType"] = packageType + ctx.Data["HasPackages"] = hasPackages + ctx.Data["PackageDescriptors"] = pds + ctx.Data["Total"] = total + ctx.Data["RepositoryAccessMap"] = map[int64]bool{ctx.Repo.Repository.ID: true} // There is only the current repository + + pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5) + pager.AddParam(ctx, "q", "Query") + pager.AddParam(ctx, "type", "PackageType") + ctx.Data["Page"] = pager + + ctx.HTML(http.StatusOK, tplPackagesList) +} diff --git a/routers/web/repo/patch.go b/routers/web/repo/patch.go index e08cde89f2..cd731337ba 100644 --- a/routers/web/repo/patch.go +++ b/routers/web/repo/patch.go @@ -24,8 +24,6 @@ const ( // NewDiffPatch render create patch page func NewDiffPatch(ctx *context.Context) { - ctx.Data["RequireHighlightJS"] = true - canCommit := renderCommitRights(ctx) ctx.Data["TreePath"] = "" @@ -54,7 +52,6 @@ func NewDiffPatchPost(ctx *context.Context) { if form.CommitChoice == frmCommitChoiceNewBranch { branchName = form.NewBranchName } - ctx.Data["RequireHighlightJS"] = true ctx.Data["TreePath"] = "" ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL() ctx.Data["FileContent"] = form.Content @@ -90,7 +87,7 @@ func NewDiffPatchPost(ctx *context.Context) { message += "\n\n" + form.CommitMessage } - if _, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.User, &files.ApplyDiffPatchOptions{ + if _, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, &files.ApplyDiffPatchOptions{ LastCommitID: form.LastCommit, OldBranch: ctx.Repo.BranchName, NewBranch: branchName, diff --git a/routers/web/repo/projects.go b/routers/web/repo/projects.go index 3836a00d04..a6f843d848 100644 --- a/routers/web/repo/projects.go +++ b/routers/web/repo/projects.go @@ -12,6 +12,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/perm" + project_model "code.gitea.io/gitea/models/project" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -69,12 +70,12 @@ func Projects(ctx *context.Context) { total = repo.NumClosedProjects } - projects, count, err := models.GetProjects(models.ProjectSearchOptions{ + projects, count, err := project_model.GetProjects(project_model.SearchOptions{ RepoID: repo.ID, Page: page, IsClosed: util.OptionalBoolOf(isShowClosed), SortType: sortType, - Type: models.ProjectTypeRepository, + Type: project_model.TypeRepository, }) if err != nil { ctx.ServerError("GetProjects", err) @@ -122,7 +123,7 @@ func Projects(ctx *context.Context) { // NewProject render creating a project page func NewProject(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.projects.new") - ctx.Data["ProjectTypes"] = models.GetProjectsConfig() + ctx.Data["ProjectTypes"] = project_model.GetProjectsConfig() ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects) ctx.HTML(http.StatusOK, tplProjectsNew) } @@ -134,18 +135,18 @@ func NewProjectPost(ctx *context.Context) { if ctx.HasError() { ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects) - ctx.Data["ProjectTypes"] = models.GetProjectsConfig() + ctx.Data["ProjectTypes"] = project_model.GetProjectsConfig() ctx.HTML(http.StatusOK, tplProjectsNew) return } - if err := models.NewProject(&models.Project{ + if err := project_model.NewProject(&project_model.Project{ RepoID: ctx.Repo.Repository.ID, Title: form.Title, Description: form.Content, - CreatorID: ctx.User.ID, + CreatorID: ctx.Doer.ID, BoardType: form.BoardType, - Type: models.ProjectTypeRepository, + Type: project_model.TypeRepository, }); err != nil { ctx.ServerError("NewProject", err) return @@ -168,8 +169,8 @@ func ChangeProjectStatus(ctx *context.Context) { } id := ctx.ParamsInt64(":id") - if err := models.ChangeProjectStatusByRepoIDAndID(ctx.Repo.Repository.ID, id, toClose); err != nil { - if models.IsErrProjectNotExist(err) { + if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx.Repo.Repository.ID, id, toClose); err != nil { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", err) } else { ctx.ServerError("ChangeProjectStatusByIDAndRepoID", err) @@ -181,9 +182,9 @@ func ChangeProjectStatus(ctx *context.Context) { // DeleteProject delete a project func DeleteProject(ctx *context.Context) { - p, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + p, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -195,7 +196,7 @@ func DeleteProject(ctx *context.Context) { return } - if err := models.DeleteProjectByID(p.ID); err != nil { + if err := project_model.DeleteProjectByID(p.ID); err != nil { ctx.Flash.Error("DeleteProjectByID: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("repo.projects.deletion_success")) @@ -212,9 +213,9 @@ func EditProject(ctx *context.Context) { ctx.Data["PageIsEditProjects"] = true ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects) - p, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + p, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -244,9 +245,9 @@ func EditProjectPost(ctx *context.Context) { return } - p, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + p, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -260,7 +261,7 @@ func EditProjectPost(ctx *context.Context) { p.Title = form.Title p.Description = form.Content - if err = models.UpdateProject(p); err != nil { + if err = project_model.UpdateProject(p); err != nil { ctx.ServerError("UpdateProjects", err) return } @@ -271,9 +272,9 @@ func EditProjectPost(ctx *context.Context) { // ViewProject renders the project board for a project func ViewProject(ctx *context.Context) { - project, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + project, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -285,7 +286,7 @@ func ViewProject(ctx *context.Context) { return } - boards, err := models.GetProjectBoards(project.ID) + boards, err := project_model.GetBoards(project.ID) if err != nil { ctx.ServerError("GetProjectBoards", err) return @@ -295,27 +296,29 @@ func ViewProject(ctx *context.Context) { boards[0].Title = ctx.Tr("repo.projects.type.uncategorized") } - issueList, err := boards.LoadIssues() + issuesMap, err := models.LoadIssuesFromBoardList(boards) if err != nil { ctx.ServerError("LoadIssuesOfBoards", err) return } linkedPrsMap := make(map[int64][]*models.Issue) - for _, issue := range issueList { - var referencedIds []int64 - for _, comment := range issue.Comments { - if comment.RefIssueID != 0 && comment.RefIsPull { - referencedIds = append(referencedIds, comment.RefIssueID) + for _, issuesList := range issuesMap { + for _, issue := range issuesList { + var referencedIds []int64 + for _, comment := range issue.Comments { + if comment.RefIssueID != 0 && comment.RefIsPull { + referencedIds = append(referencedIds, comment.RefIssueID) + } } - } - if len(referencedIds) > 0 { - if linkedPrs, err := models.Issues(&models.IssuesOptions{ - IssueIDs: referencedIds, - IsPull: util.OptionalBoolTrue, - }); err == nil { - linkedPrsMap[issue.ID] = linkedPrs + if len(referencedIds) > 0 { + if linkedPrs, err := models.Issues(&models.IssuesOptions{ + IssueIDs: referencedIds, + IsPull: util.OptionalBoolTrue, + }); err == nil { + linkedPrsMap[issue.ID] = linkedPrs + } } } } @@ -335,6 +338,7 @@ func ViewProject(ctx *context.Context) { ctx.Data["IsProjectsPage"] = true ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects) ctx.Data["Project"] = project + ctx.Data["IssuesMap"] = issuesMap ctx.Data["Boards"] = boards ctx.HTML(http.StatusOK, tplProjectsView) @@ -354,7 +358,7 @@ func UpdateIssueProject(ctx *context.Context) { continue } - if err := models.ChangeProjectAssign(issue, ctx.User, projectID); err != nil { + if err := models.ChangeProjectAssign(issue, ctx.Doer, projectID); err != nil { ctx.ServerError("ChangeProjectAssign", err) return } @@ -367,7 +371,7 @@ func UpdateIssueProject(ctx *context.Context) { // DeleteProjectBoard allows for the deletion of a project board func DeleteProjectBoard(ctx *context.Context) { - if ctx.User == nil { + if ctx.Doer == nil { ctx.JSON(http.StatusForbidden, map[string]string{ "message": "Only signed in users are allowed to perform this action.", }) @@ -381,9 +385,9 @@ func DeleteProjectBoard(ctx *context.Context) { return } - project, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + project, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -391,7 +395,7 @@ func DeleteProjectBoard(ctx *context.Context) { return } - pb, err := models.GetProjectBoard(ctx.ParamsInt64(":boardID")) + pb, err := project_model.GetBoard(ctx.ParamsInt64(":boardID")) if err != nil { ctx.ServerError("GetProjectBoard", err) return @@ -410,7 +414,7 @@ func DeleteProjectBoard(ctx *context.Context) { return } - if err := models.DeleteProjectBoardByID(ctx.ParamsInt64(":boardID")); err != nil { + if err := project_model.DeleteBoardByID(ctx.ParamsInt64(":boardID")); err != nil { ctx.ServerError("DeleteProjectBoardByID", err) return } @@ -430,9 +434,9 @@ func AddBoardToProjectPost(ctx *context.Context) { return } - project, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + project, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -440,11 +444,11 @@ func AddBoardToProjectPost(ctx *context.Context) { return } - if err := models.NewProjectBoard(&models.ProjectBoard{ + if err := project_model.NewBoard(&project_model.Board{ ProjectID: project.ID, Title: form.Title, Color: form.Color, - CreatorID: ctx.User.ID, + CreatorID: ctx.Doer.ID, }); err != nil { ctx.ServerError("NewProjectBoard", err) return @@ -455,8 +459,8 @@ func AddBoardToProjectPost(ctx *context.Context) { }) } -func checkProjectBoardChangePermissions(ctx *context.Context) (*models.Project, *models.ProjectBoard) { - if ctx.User == nil { +func checkProjectBoardChangePermissions(ctx *context.Context) (*project_model.Project, *project_model.Board) { + if ctx.Doer == nil { ctx.JSON(http.StatusForbidden, map[string]string{ "message": "Only signed in users are allowed to perform this action.", }) @@ -470,9 +474,9 @@ func checkProjectBoardChangePermissions(ctx *context.Context) (*models.Project, return nil, nil } - project, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + project, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -480,7 +484,7 @@ func checkProjectBoardChangePermissions(ctx *context.Context) (*models.Project, return nil, nil } - board, err := models.GetProjectBoard(ctx.ParamsInt64(":boardID")) + board, err := project_model.GetBoard(ctx.ParamsInt64(":boardID")) if err != nil { ctx.ServerError("GetProjectBoard", err) return nil, nil @@ -519,7 +523,7 @@ func EditProjectBoard(ctx *context.Context) { board.Sorting = form.Sorting } - if err := models.UpdateProjectBoard(board); err != nil { + if err := project_model.UpdateBoard(board); err != nil { ctx.ServerError("UpdateProjectBoard", err) return } @@ -536,7 +540,7 @@ func SetDefaultProjectBoard(ctx *context.Context) { return } - if err := models.SetDefaultBoard(project.ID, board.ID); err != nil { + if err := project_model.SetDefaultBoard(project.ID, board.ID); err != nil { ctx.ServerError("SetDefaultBoard", err) return } @@ -548,7 +552,7 @@ func SetDefaultProjectBoard(ctx *context.Context) { // MoveIssues moves or keeps issues in a column and sorts them inside that column func MoveIssues(ctx *context.Context) { - if ctx.User == nil { + if ctx.Doer == nil { ctx.JSON(http.StatusForbidden, map[string]string{ "message": "Only signed in users are allowed to perform this action.", }) @@ -562,9 +566,9 @@ func MoveIssues(ctx *context.Context) { return } - project, err := models.GetProjectByID(ctx.ParamsInt64(":id")) + project, err := project_model.GetProjectByID(ctx.ParamsInt64(":id")) if err != nil { - if models.IsErrProjectNotExist(err) { + if project_model.IsErrProjectNotExist(err) { ctx.NotFound("ProjectNotExist", nil) } else { ctx.ServerError("GetProjectByID", err) @@ -576,19 +580,18 @@ func MoveIssues(ctx *context.Context) { return } - var board *models.ProjectBoard + var board *project_model.Board if ctx.ParamsInt64(":boardID") == 0 { - board = &models.ProjectBoard{ + board = &project_model.Board{ ID: 0, ProjectID: project.ID, Title: ctx.Tr("repo.projects.type.uncategorized"), } } else { - // column - board, err = models.GetProjectBoard(ctx.ParamsInt64(":boardID")) + board, err = project_model.GetBoard(ctx.ParamsInt64(":boardID")) if err != nil { - if models.IsErrProjectBoardNotExist(err) { + if project_model.IsErrProjectBoardNotExist(err) { ctx.NotFound("ProjectBoardNotExist", nil) } else { ctx.ServerError("GetProjectBoard", err) @@ -634,7 +637,7 @@ func MoveIssues(ctx *context.Context) { return } - if err = models.MoveIssuesOnProjectBoard(board, sortedIssueIDs); err != nil { + if err = project_model.MoveIssuesOnProjectBoard(board, sortedIssueIDs); err != nil { ctx.ServerError("MoveIssuesOnProjectBoard", err) return } @@ -647,8 +650,43 @@ func MoveIssues(ctx *context.Context) { // CreateProject renders the generic project creation page func CreateProject(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.projects.new") - ctx.Data["ProjectTypes"] = models.GetProjectsConfig() + ctx.Data["ProjectTypes"] = project_model.GetProjectsConfig() ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects) ctx.HTML(http.StatusOK, tplGenericProjectsNew) } + +// CreateProjectPost creates an individual and/or organization project +func CreateProjectPost(ctx *context.Context, form forms.UserCreateProjectForm) { + user := checkContextUser(ctx, form.UID) + if ctx.Written() { + return + } + + ctx.Data["ContextUser"] = user + + if ctx.HasError() { + ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects) + ctx.HTML(http.StatusOK, tplGenericProjectsNew) + return + } + + projectType := project_model.TypeIndividual + if user.IsOrganization() { + projectType = project_model.TypeOrganization + } + + if err := project_model.NewProject(&project_model.Project{ + Title: form.Title, + Description: form.Content, + CreatorID: user.ID, + BoardType: form.BoardType, + Type: projectType, + }); err != nil { + ctx.ServerError("NewProject", err) + return + } + + ctx.Flash.Success(ctx.Tr("repo.projects.create_success", form.Title)) + ctx.Redirect(setting.AppSubURL + "/") +} diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go index 1ac3e51e41..27b61309a5 100644 --- a/routers/web/repo/pull.go +++ b/routers/web/repo/pull.go @@ -18,6 +18,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -33,6 +34,7 @@ import ( "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/routers/utils" + asymkey_service "code.gitea.io/gitea/services/asymkey" "code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/gitdiff" pull_service "code.gitea.io/gitea/services/pull" @@ -68,7 +70,7 @@ func getRepository(ctx *context.Context, repoID int64) *repo_model.Repository { return nil } - perm, err := models.GetUserRepoPermission(repo, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, repo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return nil @@ -77,7 +79,7 @@ func getRepository(ctx *context.Context, repoID int64) *repo_model.Repository { if !perm.CanRead(unit.TypeCode) { log.Trace("Permission Denied: User %-v cannot read %-v of repo %-v\n"+ "User in repo has Permissions: %-+v", - ctx.User, + ctx.Doer, unit.TypeCode, ctx.Repo, perm) @@ -99,7 +101,7 @@ func getForkRepository(ctx *context.Context) *repo_model.Repository { return nil } - if err := forkRepo.GetOwner(db.DefaultContext); err != nil { + if err := forkRepo.GetOwner(ctx); err != nil { ctx.ServerError("GetOwner", err) return nil } @@ -107,16 +109,16 @@ func getForkRepository(ctx *context.Context) *repo_model.Repository { ctx.Data["repo_name"] = forkRepo.Name ctx.Data["description"] = forkRepo.Description ctx.Data["IsPrivate"] = forkRepo.IsPrivate || forkRepo.Owner.Visibility == structs.VisibleTypePrivate - canForkToUser := forkRepo.OwnerID != ctx.User.ID && !repo_model.HasForkedRepo(ctx.User.ID, forkRepo.ID) + canForkToUser := forkRepo.OwnerID != ctx.Doer.ID && !repo_model.HasForkedRepo(ctx.Doer.ID, forkRepo.ID) ctx.Data["ForkRepo"] = forkRepo - ownedOrgs, err := models.GetOrgsCanCreateRepoByUserID(ctx.User.ID) + ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx.Doer.ID) if err != nil { ctx.ServerError("GetOrgsCanCreateRepoByUserID", err) return nil } - var orgs []*models.Organization + var orgs []*organization.Organization for _, org := range ownedOrgs { if forkRepo.OwnerID != org.ID && !repo_model.HasForkedRepo(org.ID, forkRepo.ID) { orgs = append(orgs, org) @@ -125,7 +127,7 @@ func getForkRepository(ctx *context.Context) *repo_model.Repository { traverseParentRepo := forkRepo for { - if ctx.User.ID == traverseParentRepo.OwnerID { + if ctx.Doer.ID == traverseParentRepo.OwnerID { canForkToUser = false } else { for i, org := range orgs { @@ -150,7 +152,7 @@ func getForkRepository(ctx *context.Context) *repo_model.Repository { ctx.Data["Orgs"] = orgs if canForkToUser { - ctx.Data["ContextUser"] = ctx.User + ctx.Data["ContextUser"] = ctx.Doer } else if len(orgs) > 0 { ctx.Data["ContextUser"] = orgs[0] } @@ -216,7 +218,7 @@ func ForkPost(ctx *context.Context) { // Check if user is allowed to create repo's on the organization. if ctxUser.IsOrganization() { - isAllowedToFork, err := models.OrgFromUser(ctxUser).CanCreateOrgRepo(ctx.User.ID) + isAllowedToFork, err := organization.OrgFromUser(ctxUser).CanCreateOrgRepo(ctx.Doer.ID) if err != nil { ctx.ServerError("CanCreateOrgRepo", err) return @@ -226,7 +228,7 @@ func ForkPost(ctx *context.Context) { } } - repo, err := repo_service.ForkRepository(ctx.User, ctxUser, repo_service.ForkRepoOptions{ + repo, err := repo_service.ForkRepository(ctx, ctx.Doer, ctxUser, repo_service.ForkRepoOptions{ BaseRepo: forkRepo, Name: form.RepoName, Description: form.Description, @@ -264,7 +266,7 @@ func checkPullInfo(ctx *context.Context) *models.Issue { ctx.ServerError("LoadPoster", err) return nil } - if err := issue.LoadRepo(); err != nil { + if err := issue.LoadRepo(ctx); err != nil { ctx.ServerError("LoadRepo", err) return nil } @@ -281,14 +283,14 @@ func checkPullInfo(ctx *context.Context) *models.Issue { return nil } - if err = issue.PullRequest.LoadHeadRepo(); err != nil { + if err = issue.PullRequest.LoadHeadRepoCtx(ctx); err != nil { ctx.ServerError("LoadHeadRepo", err) return nil } if ctx.IsSigned { // Update issue-user. - if err = issue.ReadBy(ctx.User.ID); err != nil { + if err = issue.ReadBy(ctx, ctx.Doer.ID); err != nil { ctx.ServerError("ReadBy", err) return nil } @@ -338,7 +340,7 @@ func PrepareMergedViewPullInfo(ctx *context.Context, issue *models.Issue) *git.C } if commitSHA != "" { // Get immediate parent of the first commit in the patch, grab history back - parentCommit, err = git.NewCommand(ctx, "rev-list", "-1", "--skip=1", commitSHA).RunInDir(ctx.Repo.GitRepo.Path) + parentCommit, _, err = git.NewCommand(ctx, "rev-list", "-1", "--skip=1", commitSHA).RunStdString(&git.RunOpts{Dir: ctx.Repo.GitRepo.Path}) if err == nil { parentCommit = strings.TrimSpace(parentCommit) } @@ -395,12 +397,12 @@ func PrepareViewPullInfo(ctx *context.Context, issue *models.Issue) *git.Compare repo := ctx.Repo.Repository pull := issue.PullRequest - if err := pull.LoadHeadRepo(); err != nil { + if err := pull.LoadHeadRepoCtx(ctx); err != nil { ctx.ServerError("LoadHeadRepo", err) return nil } - if err := pull.LoadBaseRepo(); err != nil { + if err := pull.LoadBaseRepoCtx(ctx); err != nil { ctx.ServerError("LoadBaseRepo", err) return nil } @@ -417,7 +419,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *models.Issue) *git.Compare if pull.BaseRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil { baseGitRepo = ctx.Repo.GitRepo } else { - baseGitRepo, err := git.OpenRepositoryCtx(ctx, pull.BaseRepo.RepoPath()) + baseGitRepo, err := git.OpenRepository(ctx, pull.BaseRepo.RepoPath()) if err != nil { ctx.ServerError("OpenRepository", err) return nil @@ -469,7 +471,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *models.Issue) *git.Compare var headBranchSha string // HeadRepo may be missing if pull.HeadRepo != nil { - headGitRepo, err := git.OpenRepositoryCtx(ctx, pull.HeadRepo.RepoPath()) + headGitRepo, err := git.OpenRepository(ctx, pull.HeadRepo.RepoPath()) if err != nil { ctx.ServerError("OpenRepository", err) return nil @@ -497,7 +499,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *models.Issue) *git.Compare if headBranchExist { var err error - ctx.Data["UpdateAllowed"], ctx.Data["UpdateByRebaseAllowed"], err = pull_service.IsUserAllowedToUpdate(pull, ctx.User) + ctx.Data["UpdateAllowed"], ctx.Data["UpdateByRebaseAllowed"], err = pull_service.IsUserAllowedToUpdate(ctx, pull, ctx.Doer) if err != nil { ctx.ServerError("IsUserAllowedToUpdate", err) return nil @@ -683,23 +685,36 @@ func ViewPullFiles(ctx *context.Context) { if fileOnly && (len(files) == 2 || len(files) == 1) { maxLines, maxFiles = -1, -1 } + diffOptions := &gitdiff.DiffOptions{ + BeforeCommitID: startCommitID, + AfterCommitID: endCommitID, + SkipTo: ctx.FormString("skip-to"), + MaxLines: maxLines, + MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters, + MaxFiles: maxFiles, + WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)), + } - diff, err := gitdiff.GetDiff(gitRepo, - &gitdiff.DiffOptions{ - BeforeCommitID: startCommitID, - AfterCommitID: endCommitID, - SkipTo: ctx.FormString("skip-to"), - MaxLines: maxLines, - MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters, - MaxFiles: maxFiles, - WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)), - }, ctx.FormStrings("files")...) + var methodWithError string + var diff *gitdiff.Diff + if !ctx.IsSigned { + diff, err = gitdiff.GetDiff(gitRepo, diffOptions, files...) + methodWithError = "GetDiff" + } else { + diff, err = gitdiff.SyncAndGetUserSpecificDiff(ctx, ctx.Doer.ID, pull, gitRepo, diffOptions, files...) + methodWithError = "SyncAndGetUserSpecificDiff" + } if err != nil { - ctx.ServerError("GetDiffRangeWithWhitespaceBehavior", err) + ctx.ServerError(methodWithError, err) return } - if err = diff.LoadComments(ctx, issue, ctx.User); err != nil { + ctx.PageData["prReview"] = map[string]interface{}{ + "numberOfFiles": diff.NumFiles, + "numberOfViewedFiles": diff.NumViewedFiles, + } + + if err = diff.LoadComments(ctx, issue, ctx.Doer); err != nil { ctx.ServerError("LoadComments", err) return } @@ -732,8 +747,8 @@ func ViewPullFiles(ctx *context.Context) { return } - if ctx.IsSigned && ctx.User != nil { - if ctx.Data["CanMarkConversation"], err = models.CanMarkConversation(issue, ctx.User); err != nil { + if ctx.IsSigned && ctx.Doer != nil { + if ctx.Data["CanMarkConversation"], err = models.CanMarkConversation(issue, ctx.Doer); err != nil { ctx.ServerError("CanMarkConversation", err) return } @@ -741,7 +756,6 @@ func ViewPullFiles(ctx *context.Context) { setCompareContext(ctx, baseCommit, commit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name) - ctx.Data["RequireHighlightJS"] = true ctx.Data["RequireTribute"] = true if ctx.Data["Assignees"], err = models.GetRepoAssignees(ctx.Repo.Repository); err != nil { ctx.ServerError("GetAssignees", err) @@ -751,13 +765,29 @@ func ViewPullFiles(ctx *context.Context) { if ctx.Written() { return } - ctx.Data["CurrentReview"], err = models.GetCurrentReview(ctx.User, issue) + + currentReview, err := models.GetCurrentReview(ctx.Doer, issue) if err != nil && !models.IsErrReviewNotExist(err) { ctx.ServerError("GetCurrentReview", err) return } + numPendingCodeComments := int64(0) + if currentReview != nil { + numPendingCodeComments, err = models.CountComments(&models.FindCommentsOptions{ + Type: models.CommentTypeCode, + ReviewID: currentReview.ID, + IssueID: issue.ID, + }) + if err != nil { + ctx.ServerError("CountComments", err) + return + } + } + ctx.Data["CurrentReview"] = currentReview + ctx.Data["PendingCodeCommentNumber"] = numPendingCodeComments + getBranchData(ctx, issue) - ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.User.ID) + ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID) ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled @@ -783,16 +813,16 @@ func UpdatePullRequest(ctx *context.Context) { rebase := ctx.FormString("style") == "rebase" - if err := issue.PullRequest.LoadBaseRepo(); err != nil { + if err := issue.PullRequest.LoadBaseRepoCtx(ctx); err != nil { ctx.ServerError("LoadBaseRepo", err) return } - if err := issue.PullRequest.LoadHeadRepo(); err != nil { + if err := issue.PullRequest.LoadHeadRepoCtx(ctx); err != nil { ctx.ServerError("LoadHeadRepo", err) return } - allowedUpdateByMerge, allowedUpdateByRebase, err := pull_service.IsUserAllowedToUpdate(issue.PullRequest, ctx.User) + allowedUpdateByMerge, allowedUpdateByRebase, err := pull_service.IsUserAllowedToUpdate(ctx, issue.PullRequest, ctx.Doer) if err != nil { ctx.ServerError("IsUserAllowedToMerge", err) return @@ -808,7 +838,7 @@ func UpdatePullRequest(ctx *context.Context) { // default merge commit message message := fmt.Sprintf("Merge branch '%s' into %s", issue.PullRequest.BaseBranch, issue.PullRequest.HeadBranch) - if err = pull_service.Update(ctx, issue.PullRequest, ctx.User, message, rebase); err != nil { + if err = pull_service.Update(ctx, issue.PullRequest, ctx.Doer, message, rebase); err != nil { if models.IsErrMergeConflicts(err) { conflictError := err.(models.ErrMergeConflicts) flashError, err := ctx.RenderToString(tplAlertDetails, map[string]interface{}{ @@ -857,50 +887,62 @@ func MergePullRequest(ctx *context.Context) { if ctx.Written() { return } - if issue.IsClosed { - if issue.IsPull { - ctx.Flash.Error(ctx.Tr("repo.pulls.is_closed")) - ctx.Redirect(issue.Link()) - return - } - ctx.Flash.Error(ctx.Tr("repo.issues.closed_title")) - ctx.Redirect(issue.Link()) - return - } pr := issue.PullRequest + pr.Issue = issue + pr.Issue.Repo = ctx.Repo.Repository + manuallMerge := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged + forceMerge := form.ForceMerge != nil && *form.ForceMerge - allowedMerge, err := pull_service.IsUserAllowedToMerge(pr, ctx.Repo.Permission, ctx.User) - if err != nil { - ctx.ServerError("IsUserAllowedToMerge", err) - return - } - if !allowedMerge { - ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed")) - ctx.Redirect(issue.Link()) - return - } - - if pr.HasMerged { - ctx.Flash.Error(ctx.Tr("repo.pulls.has_merged")) - ctx.Redirect(issue.Link()) + // start with merging by checking + if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, manuallMerge, forceMerge); err != nil { + if errors.Is(err, pull_service.ErrIsClosed) { + if issue.IsPull { + ctx.Flash.Error(ctx.Tr("repo.pulls.is_closed")) + ctx.Redirect(issue.Link()) + } else { + ctx.Flash.Error(ctx.Tr("repo.issues.closed_title")) + ctx.Redirect(issue.Link()) + } + } else if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) { + ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed")) + ctx.Redirect(issue.Link()) + } else if errors.Is(err, pull_service.ErrHasMerged) { + ctx.Flash.Error(ctx.Tr("repo.pulls.has_merged")) + ctx.Redirect(issue.Link()) + } else if errors.Is(err, pull_service.ErrIsWorkInProgress) { + ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_wip")) + ctx.Redirect(issue.Link()) + } else if errors.Is(err, pull_service.ErrNotMergableState) { + ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready")) + ctx.Redirect(issue.Link()) + } else if models.IsErrDisallowedToMerge(err) { + ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready")) + ctx.Redirect(issue.Link()) + } else if asymkey_service.IsErrWontSign(err) { + ctx.Flash.Error(err.Error()) // has not translation ... + ctx.Redirect(issue.Link()) + } else if errors.Is(err, pull_service.ErrDependenciesLeft) { + ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked")) + ctx.Redirect(issue.Link()) + } else { + ctx.ServerError("WebCheck", err) + } return } // handle manually-merged mark - if repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged { - if err = pull_service.MergedManually(pr, ctx.User, ctx.Repo.GitRepo, form.MergeCommitID); err != nil { + if manuallMerge { + if err := pull_service.MergedManually(pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil { if models.IsErrInvalidMergeStyle(err) { ctx.Flash.Error(ctx.Tr("repo.pulls.invalid_merge_option")) ctx.Redirect(issue.Link()) - return } else if strings.Contains(err.Error(), "Wrong commit ID") { ctx.Flash.Error(ctx.Tr("repo.pulls.wrong_commit_id")) ctx.Redirect(issue.Link()) - return + } else { + ctx.ServerError("MergedManually", err) } - - ctx.ServerError("MergedManually", err) return } @@ -908,49 +950,13 @@ func MergePullRequest(ctx *context.Context) { return } - if !pr.CanAutoMerge() { - ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready")) - ctx.Redirect(issue.Link()) - return - } - - if pr.IsWorkInProgress() { - ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_wip")) - ctx.Redirect(issue.Link()) - return - } - - if err := pull_service.CheckPRReadyToMerge(ctx, pr, false); err != nil { - if !models.IsErrNotAllowedToMerge(err) { - ctx.ServerError("Merge PR status", err) - return - } - if isRepoAdmin, err := models.IsUserRepoAdmin(pr.BaseRepo, ctx.User); err != nil { - ctx.ServerError("IsUserRepoAdmin", err) - return - } else if !isRepoAdmin { - ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready")) - ctx.Redirect(issue.Link()) - return - } - } - - if ctx.HasError() { - ctx.Flash.Error(ctx.Data["ErrorMsg"].(string)) - ctx.Redirect(issue.Link()) - return - } - message := strings.TrimSpace(form.MergeTitleField) if len(message) == 0 { - if repo_model.MergeStyle(form.Do) == repo_model.MergeStyleMerge { - message = pr.GetDefaultMergeMessage() - } - if repo_model.MergeStyle(form.Do) == repo_model.MergeStyleRebaseMerge { - message = pr.GetDefaultMergeMessage() - } - if repo_model.MergeStyle(form.Do) == repo_model.MergeStyleSquash { - message = pr.GetDefaultSquashMessage() + var err error + message, err = pull_service.GetDefaultMergeMessage(ctx.Repo.GitRepo, pr, repo_model.MergeStyle(form.Do)) + if err != nil { + ctx.ServerError("GetDefaultMergeMessage", err) + return } } @@ -959,25 +965,10 @@ func MergePullRequest(ctx *context.Context) { message += "\n\n" + form.MergeMessageField } - pr.Issue = issue - pr.Issue.Repo = ctx.Repo.Repository - - noDeps, err := models.IssueNoDependenciesLeft(issue) - if err != nil { - return - } - - if !noDeps { - ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked")) - ctx.Redirect(issue.Link()) - return - } - - if err = pull_service.Merge(ctx, pr, ctx.User, ctx.Repo.GitRepo, repo_model.MergeStyle(form.Do), form.HeadCommitID, message); err != nil { + if err := pull_service.Merge(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, repo_model.MergeStyle(form.Do), form.HeadCommitID, message); err != nil { if models.IsErrInvalidMergeStyle(err) { ctx.Flash.Error(ctx.Tr("repo.pulls.invalid_merge_option")) ctx.Redirect(issue.Link()) - return } else if models.IsErrMergeConflicts(err) { conflictError := err.(models.ErrMergeConflicts) flashError, err := ctx.RenderToString(tplAlertDetails, map[string]interface{}{ @@ -991,7 +982,6 @@ func MergePullRequest(ctx *context.Context) { } ctx.Flash.Error(flashError) ctx.Redirect(issue.Link()) - return } else if models.IsErrRebaseConflicts(err) { conflictError := err.(models.ErrRebaseConflicts) flashError, err := ctx.RenderToString(tplAlertDetails, map[string]interface{}{ @@ -1005,22 +995,18 @@ func MergePullRequest(ctx *context.Context) { } ctx.Flash.Error(flashError) ctx.Redirect(issue.Link()) - return } else if models.IsErrMergeUnrelatedHistories(err) { log.Debug("MergeUnrelatedHistories error: %v", err) ctx.Flash.Error(ctx.Tr("repo.pulls.unrelated_histories")) ctx.Redirect(issue.Link()) - return } else if git.IsErrPushOutOfDate(err) { log.Debug("MergePushOutOfDate error: %v", err) ctx.Flash.Error(ctx.Tr("repo.pulls.merge_out_of_date")) ctx.Redirect(issue.Link()) - return } else if models.IsErrSHADoesNotMatch(err) { log.Debug("MergeHeadOutOfDate error: %v", err) ctx.Flash.Error(ctx.Tr("repo.pulls.head_out_of_date")) ctx.Redirect(issue.Link()) - return } else if git.IsErrPushRejected(err) { log.Debug("MergePushRejected error: %v", err) pushrejErr := err.(*git.ErrPushRejected) @@ -1040,13 +1026,14 @@ func MergePullRequest(ctx *context.Context) { ctx.Flash.Error(flashError) } ctx.Redirect(issue.Link()) - return + } else { + ctx.ServerError("Merge", err) } - ctx.ServerError("Merge", err) return } + log.Trace("Pull request merged: %d", pr.ID) - if err := stopTimerIfAvailable(ctx.User, issue); err != nil { + if err := stopTimerIfAvailable(ctx.Doer, issue); err != nil { ctx.ServerError("CreateOrStopIssueStopwatch", err) return } @@ -1055,7 +1042,7 @@ func MergePullRequest(ctx *context.Context) { if form.DeleteBranchAfterMerge { // Don't cleanup when other pr use this branch as head branch - exist, err := models.HasUnmergedPullRequestsByHeadInfo(pr.HeadRepoID, pr.HeadBranch) + exist, err := models.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch) if err != nil { ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err) return @@ -1069,7 +1056,7 @@ func MergePullRequest(ctx *context.Context) { if ctx.Repo != nil && ctx.Repo.Repository != nil && pr.HeadRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil { headRepo = ctx.Repo.GitRepo } else { - headRepo, err = git.OpenRepositoryCtx(ctx, pr.HeadRepo.RepoPath()) + headRepo, err = git.OpenRepository(ctx, pr.HeadRepo.RepoPath()) if err != nil { ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.RepoPath()), err) return @@ -1100,7 +1087,6 @@ func CompareAndPullRequestPost(ctx *context.Context) { ctx.Data["IsDiffCompare"] = true ctx.Data["IsRepoToolbarCommits"] = true ctx.Data["RequireTribute"] = true - ctx.Data["RequireHighlightJS"] = true ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled upload.AddUploadContext(ctx, "comment") @@ -1168,21 +1154,22 @@ func CompareAndPullRequestPost(ctx *context.Context) { RepoID: repo.ID, Repo: repo, Title: form.Title, - PosterID: ctx.User.ID, - Poster: ctx.User, + PosterID: ctx.Doer.ID, + Poster: ctx.Doer, MilestoneID: milestoneID, IsPull: true, Content: form.Content, } pullRequest := &models.PullRequest{ - HeadRepoID: ci.HeadRepo.ID, - BaseRepoID: repo.ID, - HeadBranch: ci.HeadBranch, - BaseBranch: ci.BaseBranch, - HeadRepo: ci.HeadRepo, - BaseRepo: repo, - MergeBase: ci.CompareInfo.MergeBase, - Type: models.PullRequestGitea, + HeadRepoID: ci.HeadRepo.ID, + BaseRepoID: repo.ID, + HeadBranch: ci.HeadBranch, + BaseBranch: ci.BaseBranch, + HeadRepo: ci.HeadRepo, + BaseRepo: repo, + MergeBase: ci.CompareInfo.MergeBase, + Type: models.PullRequestGitea, + AllowMaintainerEdit: form.AllowMaintainerEdit, } // FIXME: check error in the case two people send pull request at almost same time, give nice error prompt // instead of 500. @@ -1235,7 +1222,7 @@ func CleanUpPullRequest(ctx *context.Context) { } // Don't cleanup when there are other PR's that use this branch as head branch. - exist, err := models.HasUnmergedPullRequestsByHeadInfo(pr.HeadRepoID, pr.HeadBranch) + exist, err := models.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch) if err != nil { ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err) return @@ -1245,22 +1232,22 @@ func CleanUpPullRequest(ctx *context.Context) { return } - if err := pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { ctx.ServerError("LoadHeadRepo", err) return } else if pr.HeadRepo == nil { // Forked repository has already been deleted ctx.NotFound("CleanUpPullRequest", nil) return - } else if err = pr.LoadBaseRepo(); err != nil { + } else if err = pr.LoadBaseRepoCtx(ctx); err != nil { ctx.ServerError("LoadBaseRepo", err) return - } else if err = pr.HeadRepo.GetOwner(db.DefaultContext); err != nil { + } else if err = pr.HeadRepo.GetOwner(ctx); err != nil { ctx.ServerError("HeadRepo.GetOwner", err) return } - perm, err := models.GetUserRepoPermission(pr.HeadRepo, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, pr.HeadRepo, ctx.Doer) if err != nil { ctx.ServerError("GetUserRepoPermission", err) return @@ -1279,7 +1266,7 @@ func CleanUpPullRequest(ctx *context.Context) { gitBaseRepo = ctx.Repo.GitRepo } else { // If not just open it - gitBaseRepo, err = git.OpenRepositoryCtx(ctx, pr.BaseRepo.RepoPath()) + gitBaseRepo, err = git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.BaseRepo.RepoPath()), err) return @@ -1294,7 +1281,7 @@ func CleanUpPullRequest(ctx *context.Context) { gitRepo = ctx.Repo.GitRepo } else if pr.BaseRepoID != pr.HeadRepoID { // Otherwise just load it up - gitRepo, err = git.OpenRepositoryCtx(ctx, pr.HeadRepo.RepoPath()) + gitRepo, err = git.OpenRepository(ctx, pr.HeadRepo.RepoPath()) if err != nil { ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.RepoPath()), err) return @@ -1331,7 +1318,7 @@ func CleanUpPullRequest(ctx *context.Context) { func deleteBranch(ctx *context.Context, pr *models.PullRequest, gitRepo *git.Repository) { fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch - if err := repo_service.DeleteBranch(ctx.User, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil { + if err := repo_service.DeleteBranch(ctx.Doer, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil { switch { case git.IsErrBranchNotExist(err): ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName)) @@ -1346,7 +1333,7 @@ func deleteBranch(ctx *context.Context, pr *models.PullRequest, gitRepo *git.Rep return } - if err := models.AddDeletePRBranchComment(ctx.User, pr.BaseRepo, pr.IssueID, pr.HeadBranch); err != nil { + if err := models.AddDeletePRBranchComment(ctx, ctx.Doer, pr.BaseRepo, pr.IssueID, pr.HeadBranch); err != nil { // Do not fail here as branch has already been deleted log.Error("DeleteBranch: %v", err) } @@ -1396,7 +1383,7 @@ func UpdatePullRequestTarget(ctx *context.Context) { return } - if !ctx.IsSigned || (!issue.IsPoster(ctx.User.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { + if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { ctx.Error(http.StatusForbidden) return } @@ -1407,7 +1394,7 @@ func UpdatePullRequestTarget(ctx *context.Context) { return } - if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.User, targetBranch); err != nil { + if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.Doer, targetBranch); err != nil { if models.IsErrPullRequestAlreadyExists(err) { err := err.(models.ErrPullRequestAlreadyExists) @@ -1448,9 +1435,37 @@ func UpdatePullRequestTarget(ctx *context.Context) { } return } - notification.NotifyPullRequestChangeTargetBranch(ctx.User, pr, targetBranch) + notification.NotifyPullRequestChangeTargetBranch(ctx.Doer, pr, targetBranch) ctx.JSON(http.StatusOK, map[string]interface{}{ "base_branch": pr.BaseBranch, }) } + +// SetAllowEdits allow edits from maintainers to PRs +func SetAllowEdits(ctx *context.Context) { + form := web.GetForm(ctx).(*forms.UpdateAllowEditsForm) + + pr, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) + if err != nil { + if models.IsErrPullRequestNotExist(err) { + ctx.NotFound("GetPullRequestByIndex", err) + } else { + ctx.ServerError("GetPullRequestByIndex", err) + } + return + } + + if err := pull_service.SetAllowEdits(ctx, ctx.Doer, pr, form.AllowMaintainerEdit); err != nil { + if errors.Is(pull_service.ErrUserHasNoPermissionForAction, err) { + ctx.Error(http.StatusForbidden) + return + } + ctx.ServerError("SetAllowEdits", err) + return + } + + ctx.JSON(http.StatusOK, map[string]interface{}{ + "allow_maintainer_edit": pr.AllowMaintainerEdit, + }) +} diff --git a/routers/web/repo/pull_review.go b/routers/web/repo/pull_review.go index c92f08a88a..98272ed48d 100644 --- a/routers/web/repo/pull_review.go +++ b/routers/web/repo/pull_review.go @@ -9,8 +9,10 @@ import ( "net/http" "code.gitea.io/gitea/models" + pull_model "code.gitea.io/gitea/models/pull" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web" @@ -29,7 +31,7 @@ func RenderNewCodeCommentForm(ctx *context.Context) { if !issue.IsPull { return } - currentReview, err := models.GetCurrentReview(ctx.User, issue) + currentReview, err := models.GetCurrentReview(ctx.Doer, issue) if err != nil && !models.IsErrReviewNotExist(err) { ctx.ServerError("GetCurrentReview", err) return @@ -69,7 +71,7 @@ func CreateCodeComment(ctx *context.Context) { } comment, err := pull_service.CreateCodeComment(ctx, - ctx.User, + ctx.Doer, ctx.Repo.GitRepo, issue, signedLine, @@ -117,7 +119,7 @@ func UpdateResolveConversation(ctx *context.Context) { } var permResult bool - if permResult, err = models.CanMarkConversation(comment.Issue, ctx.User); err != nil { + if permResult, err = models.CanMarkConversation(comment.Issue, ctx.Doer); err != nil { ctx.ServerError("CanMarkConversation", err) return } @@ -132,7 +134,7 @@ func UpdateResolveConversation(ctx *context.Context) { } if action == "Resolve" || action == "UnResolve" { - err = models.MarkConversation(comment, ctx.User, action == "Resolve") + err = models.MarkConversation(comment, ctx.Doer, action == "Resolve") if err != nil { ctx.ServerError("MarkConversation", err) return @@ -152,7 +154,7 @@ func UpdateResolveConversation(ctx *context.Context) { } func renderConversation(ctx *context.Context, comment *models.Comment) { - comments, err := models.FetchCodeCommentsByLine(ctx, comment.Issue, ctx.User, comment.TreePath, comment.Line) + comments, err := models.FetchCodeCommentsByLine(ctx, comment.Issue, ctx.Doer, comment.TreePath, comment.Line) if err != nil { ctx.ServerError("FetchCodeCommentsByLine", err) return @@ -198,7 +200,7 @@ func SubmitReview(ctx *context.Context) { // can not approve/reject your own PR case models.ReviewTypeApprove, models.ReviewTypeReject: - if issue.IsPoster(ctx.User.ID) { + if issue.IsPoster(ctx.Doer.ID) { var translated string if reviewType == models.ReviewTypeApprove { translated = ctx.Tr("repo.issues.review.self.approval") @@ -217,7 +219,7 @@ func SubmitReview(ctx *context.Context) { attachments = form.Files } - _, comm, err := pull_service.SubmitReview(ctx, ctx.User, ctx.Repo.GitRepo, issue, reviewType, form.Content, form.CommitID, attachments) + _, comm, err := pull_service.SubmitReview(ctx, ctx.Doer, ctx.Repo.GitRepo, issue, reviewType, form.Content, form.CommitID, attachments) if err != nil { if models.IsContentEmptyErr(err) { ctx.Flash.Error(ctx.Tr("repo.issues.review.content.empty")) @@ -234,7 +236,7 @@ func SubmitReview(ctx *context.Context) { // DismissReview dismissing stale review by repo admin func DismissReview(ctx *context.Context) { form := web.GetForm(ctx).(*forms.DismissReviewForm) - comm, err := pull_service.DismissReview(ctx, form.ReviewID, form.Message, ctx.User, true) + comm, err := pull_service.DismissReview(ctx, form.ReviewID, form.Message, ctx.Doer, true) if err != nil { ctx.ServerError("pull_service.DismissReview", err) return @@ -242,3 +244,47 @@ func DismissReview(ctx *context.Context) { ctx.Redirect(fmt.Sprintf("%s/pulls/%d#%s", ctx.Repo.RepoLink, comm.Issue.Index, comm.HashTag())) } + +// viewedFilesUpdate Struct to parse the body of a request to update the reviewed files of a PR +// If you want to implement an API to update the review, simply move this struct into modules. +type viewedFilesUpdate struct { + Files map[string]bool `json:"files"` + HeadCommitSHA string `json:"headCommitSHA"` +} + +func UpdateViewedFiles(ctx *context.Context) { + // Find corresponding PR + issue := checkPullInfo(ctx) + if ctx.Written() { + return + } + pull := issue.PullRequest + + var data *viewedFilesUpdate + err := json.NewDecoder(ctx.Req.Body).Decode(&data) + if err != nil { + log.Warn("Attempted to update a review but could not parse request body: %v", err) + ctx.Resp.WriteHeader(http.StatusBadRequest) + return + } + + // Expect the review to have been now if no head commit was supplied + if data.HeadCommitSHA == "" { + data.HeadCommitSHA = pull.HeadCommitID + } + + updatedFiles := make(map[string]pull_model.ViewedState, len(data.Files)) + for file, viewed := range data.Files { + + // Only unviewed and viewed are possible, has-changed can not be set from the outside + state := pull_model.Unviewed + if viewed { + state = pull_model.Viewed + } + updatedFiles[file] = state + } + + if err := pull_model.UpdateReviewState(ctx, ctx.Doer.ID, pull.ID, data.HeadCommitSHA, updatedFiles); err != nil { + ctx.ServerError("UpdateReview", err) + } +} diff --git a/routers/web/repo/release.go b/routers/web/repo/release.go index 4f58ac9546..5f894ae501 100644 --- a/routers/web/repo/release.go +++ b/routers/web/repo/release.go @@ -134,8 +134,8 @@ func releasesOrTags(ctx *context.Context, isTagList bool) { // Temporary cache commits count of used branches to speed up. countCache := make(map[string]int64) cacheUsers := make(map[int64]*user_model.User) - if ctx.User != nil { - cacheUsers[ctx.User.ID] = ctx.User + if ctx.Doer != nil { + cacheUsers[ctx.Doer.ID] = ctx.Doer } var ok bool @@ -325,7 +325,7 @@ func NewReleasePost(ctx *context.Context) { } if len(form.TagOnly) > 0 { - if err = releaseservice.CreateNewTag(ctx, ctx.User, ctx.Repo.Repository, form.Target, form.TagName, msg); err != nil { + if err = releaseservice.CreateNewTag(ctx, ctx.Doer, ctx.Repo.Repository, form.Target, form.TagName, msg); err != nil { if models.IsErrTagAlreadyExists(err) { e := err.(models.ErrTagAlreadyExists) ctx.Flash.Error(ctx.Tr("repo.branch.tag_collision", e.TagName)) @@ -357,8 +357,8 @@ func NewReleasePost(ctx *context.Context) { rel = &models.Release{ RepoID: ctx.Repo.Repository.ID, Repo: ctx.Repo.Repository, - PublisherID: ctx.User.ID, - Publisher: ctx.User, + PublisherID: ctx.Doer.ID, + Publisher: ctx.Doer, Title: form.Title, TagName: form.TagName, Target: form.Target, @@ -394,16 +394,16 @@ func NewReleasePost(ctx *context.Context) { rel.Target = form.Target rel.IsDraft = len(form.Draft) > 0 rel.IsPrerelease = form.Prerelease - rel.PublisherID = ctx.User.ID + rel.PublisherID = ctx.Doer.ID rel.IsTag = false - if err = releaseservice.UpdateRelease(ctx.User, ctx.Repo.GitRepo, rel, attachmentUUIDs, nil, nil); err != nil { + if err = releaseservice.UpdateRelease(ctx.Doer, ctx.Repo.GitRepo, rel, attachmentUUIDs, nil, nil); err != nil { ctx.Data["Err_TagName"] = true ctx.ServerError("UpdateRelease", err) return } } - log.Trace("Release created: %s/%s:%s", ctx.User.LowerName, ctx.Repo.Repository.Name, form.TagName) + log.Trace("Release created: %s/%s:%s", ctx.Doer.LowerName, ctx.Repo.Repository.Name, form.TagName) ctx.Redirect(ctx.Repo.RepoLink + "/releases") } @@ -497,7 +497,7 @@ func EditReleasePost(ctx *context.Context) { rel.Note = form.Content rel.IsDraft = len(form.Draft) > 0 rel.IsPrerelease = form.Prerelease - if err = releaseservice.UpdateRelease(ctx.User, ctx.Repo.GitRepo, + if err = releaseservice.UpdateRelease(ctx.Doer, ctx.Repo.GitRepo, rel, addAttachmentUUIDs, delAttachmentUUIDs, editAttachments); err != nil { ctx.ServerError("UpdateRelease", err) return @@ -516,7 +516,7 @@ func DeleteTag(ctx *context.Context) { } func deleteReleaseOrTag(ctx *context.Context, isDelTag bool) { - if err := releaseservice.DeleteReleaseByID(ctx, ctx.FormInt64("id"), ctx.User, isDelTag); err != nil { + if err := releaseservice.DeleteReleaseByID(ctx, ctx.FormInt64("id"), ctx.Doer, isDelTag); err != nil { ctx.Flash.Error("DeleteReleaseByID: " + err.Error()) } else { if isDelTag { diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go index 89ebef3a59..199651b2f1 100644 --- a/routers/web/repo/repo.go +++ b/routers/web/repo/repo.go @@ -14,15 +14,20 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/forms" repo_service "code.gitea.io/gitea/services/repository" @@ -57,14 +62,14 @@ func MustBeAbleToUpload(ctx *context.Context) { } func checkContextUser(ctx *context.Context, uid int64) *user_model.User { - orgs, err := models.GetOrgsCanCreateRepoByUserID(ctx.User.ID) + orgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx.Doer.ID) if err != nil { ctx.ServerError("GetOrgsCanCreateRepoByUserID", err) return nil } - if !ctx.User.IsAdmin { - orgsAvailable := []*models.Organization{} + if !ctx.Doer.IsAdmin { + orgsAvailable := []*organization.Organization{} for i := 0; i < len(orgs); i++ { if orgs[i].CanCreateRepo() { orgsAvailable = append(orgsAvailable, orgs[i]) @@ -76,13 +81,13 @@ func checkContextUser(ctx *context.Context, uid int64) *user_model.User { } // Not equal means current user is an organization. - if uid == ctx.User.ID || uid == 0 { - return ctx.User + if uid == ctx.Doer.ID || uid == 0 { + return ctx.Doer } org, err := user_model.GetUserByID(uid) if user_model.IsErrUserNotExist(err) { - return ctx.User + return ctx.Doer } if err != nil { @@ -95,8 +100,8 @@ func checkContextUser(ctx *context.Context, uid int64) *user_model.User { ctx.Error(http.StatusForbidden) return nil } - if !ctx.User.IsAdmin { - canCreate, err := models.OrgFromUser(org).CanCreateOrgRepo(ctx.User.ID) + if !ctx.Doer.IsAdmin { + canCreate, err := organization.OrgFromUser(org).CanCreateOrgRepo(ctx.Doer.ID) if err != nil { ctx.ServerError("CanCreateOrgRepo", err) return nil @@ -113,13 +118,13 @@ func checkContextUser(ctx *context.Context, uid int64) *user_model.User { func getRepoPrivate(ctx *context.Context) bool { switch strings.ToLower(setting.Repository.DefaultPrivate) { case setting.RepoCreatingLastUserVisibility: - return ctx.User.LastRepoVisibility + return ctx.Doer.LastRepoVisibility case setting.RepoCreatingPrivate: return true case setting.RepoCreatingPublic: return false default: - return ctx.User.LastRepoVisibility + return ctx.Doer.LastRepoVisibility } } @@ -128,10 +133,10 @@ func Create(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("new_repo") // Give default value for template to render. - ctx.Data["Gitignores"] = models.Gitignores - ctx.Data["LabelTemplates"] = models.LabelTemplates - ctx.Data["Licenses"] = models.Licenses - ctx.Data["Readmes"] = models.Readmes + ctx.Data["Gitignores"] = repo_module.Gitignores + ctx.Data["LabelTemplates"] = repo_module.LabelTemplates + ctx.Data["Licenses"] = repo_module.Licenses + ctx.Data["Readmes"] = repo_module.Readmes ctx.Data["readme"] = "Default" ctx.Data["private"] = getRepoPrivate(ctx) ctx.Data["IsForcedPrivate"] = setting.Repository.ForcePrivate @@ -153,8 +158,8 @@ func Create(ctx *context.Context) { } } - ctx.Data["CanCreateRepo"] = ctx.User.CanCreateRepo() - ctx.Data["MaxCreationLimit"] = ctx.User.MaxCreationLimit() + ctx.Data["CanCreateRepo"] = ctx.Doer.CanCreateRepo() + ctx.Data["MaxCreationLimit"] = ctx.Doer.MaxCreationLimit() ctx.HTML(http.StatusOK, tplCreate) } @@ -196,13 +201,13 @@ func CreatePost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.CreateRepoForm) ctx.Data["Title"] = ctx.Tr("new_repo") - ctx.Data["Gitignores"] = models.Gitignores - ctx.Data["LabelTemplates"] = models.LabelTemplates - ctx.Data["Licenses"] = models.Licenses - ctx.Data["Readmes"] = models.Readmes + ctx.Data["Gitignores"] = repo_module.Gitignores + ctx.Data["LabelTemplates"] = repo_module.LabelTemplates + ctx.Data["Licenses"] = repo_module.Licenses + ctx.Data["Readmes"] = repo_module.Readmes - ctx.Data["CanCreateRepo"] = ctx.User.CanCreateRepo() - ctx.Data["MaxCreationLimit"] = ctx.User.MaxCreationLimit() + ctx.Data["CanCreateRepo"] = ctx.Doer.CanCreateRepo() + ctx.Data["MaxCreationLimit"] = ctx.Doer.MaxCreationLimit() ctxUser := checkContextUser(ctx, form.UID) if ctx.Written() { @@ -245,14 +250,14 @@ func CreatePost(ctx *context.Context) { return } - repo, err = repo_service.GenerateRepository(ctx.User, ctxUser, templateRepo, opts) + repo, err = repo_service.GenerateRepository(ctx.Doer, ctxUser, templateRepo, opts) if err == nil { log.Trace("Repository generated [%d]: %s/%s", repo.ID, ctxUser.Name, repo.Name) ctx.Redirect(repo.Link()) return } } else { - repo, err = repo_service.CreateRepository(ctx.User, ctxUser, models.CreateRepoOptions{ + repo, err = repo_service.CreateRepository(ctx.Doer, ctxUser, models.CreateRepoOptions{ Name: form.RepoName, Description: form.Description, Gitignores: form.Gitignores, @@ -280,13 +285,13 @@ func Action(ctx *context.Context) { var err error switch ctx.Params(":action") { case "watch": - err = repo_model.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true) + err = repo_model.WatchRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, true) case "unwatch": - err = repo_model.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, false) + err = repo_model.WatchRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, false) case "star": - err = repo_model.StarRepo(ctx.User.ID, ctx.Repo.Repository.ID, true) + err = repo_model.StarRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, true) case "unstar": - err = repo_model.StarRepo(ctx.User.ID, ctx.Repo.Repository.ID, false) + err = repo_model.StarRepo(ctx.Doer.ID, ctx.Repo.Repository.ID, false) case "accept_transfer": err = acceptOrRejectRepoTransfer(ctx, true) case "reject_transfer": @@ -320,7 +325,7 @@ func acceptOrRejectRepoTransfer(ctx *context.Context, accept bool) error { return err } - if !repoTransfer.CanUserAcceptTransfer(ctx.User) { + if !repoTransfer.CanUserAcceptTransfer(ctx.Doer) { return errors.New("user does not have enough permissions") } @@ -353,7 +358,7 @@ func RedirectDownload(ctx *context.Context) { ) tagNames := []string{vTag} curRepo := ctx.Repo.Repository - releases, err := models.GetReleasesByRepoIDAndNames(db.DefaultContext, curRepo.ID, tagNames) + releases, err := models.GetReleasesByRepoIDAndNames(ctx, curRepo.ID, tagNames) if err != nil { if repo_model.IsErrAttachmentNotExist(err) { ctx.Error(http.StatusNotFound) @@ -394,7 +399,7 @@ func Download(ctx *context.Context) { return } - archiver, err := repo_model.GetRepoArchiver(db.DefaultContext, aReq.RepoID, aReq.Type, aReq.CommitID) + archiver, err := repo_model.GetRepoArchiver(ctx, aReq.RepoID, aReq.Type, aReq.CommitID) if err != nil { ctx.ServerError("models.GetRepoArchiver", err) return @@ -424,7 +429,7 @@ func Download(ctx *context.Context) { return } times++ - archiver, err = repo_model.GetRepoArchiver(db.DefaultContext, aReq.RepoID, aReq.Type, aReq.CommitID) + archiver, err = repo_model.GetRepoArchiver(ctx, aReq.RepoID, aReq.Type, aReq.CommitID) if err != nil { ctx.ServerError("archiver_service.StartArchive", err) return @@ -480,7 +485,7 @@ func InitiateDownload(ctx *context.Context) { return } - archiver, err := repo_model.GetRepoArchiver(db.DefaultContext, aReq.RepoID, aReq.Type, aReq.CommitID) + archiver, err := repo_model.GetRepoArchiver(ctx, aReq.RepoID, aReq.Type, aReq.CommitID) if err != nil { ctx.ServerError("archiver_service.StartArchive", err) return @@ -501,3 +506,114 @@ func InitiateDownload(ctx *context.Context) { "complete": completed, }) } + +// SearchRepo repositories via options +func SearchRepo(ctx *context.Context) { + opts := &models.SearchRepoOptions{ + ListOptions: db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), + }, + Actor: ctx.Doer, + Keyword: ctx.FormTrim("q"), + OwnerID: ctx.FormInt64("uid"), + PriorityOwnerID: ctx.FormInt64("priority_owner_id"), + TeamID: ctx.FormInt64("team_id"), + TopicOnly: ctx.FormBool("topic"), + Collaborate: util.OptionalBoolNone, + Private: ctx.IsSigned && (ctx.FormString("private") == "" || ctx.FormBool("private")), + Template: util.OptionalBoolNone, + StarredByID: ctx.FormInt64("starredBy"), + IncludeDescription: ctx.FormBool("includeDesc"), + } + + if ctx.FormString("template") != "" { + opts.Template = util.OptionalBoolOf(ctx.FormBool("template")) + } + + if ctx.FormBool("exclusive") { + opts.Collaborate = util.OptionalBoolFalse + } + + mode := ctx.FormString("mode") + switch mode { + case "source": + opts.Fork = util.OptionalBoolFalse + opts.Mirror = util.OptionalBoolFalse + case "fork": + opts.Fork = util.OptionalBoolTrue + case "mirror": + opts.Mirror = util.OptionalBoolTrue + case "collaborative": + opts.Mirror = util.OptionalBoolFalse + opts.Collaborate = util.OptionalBoolTrue + case "": + default: + ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid search mode: \"%s\"", mode)) + return + } + + if ctx.FormString("archived") != "" { + opts.Archived = util.OptionalBoolOf(ctx.FormBool("archived")) + } + + if ctx.FormString("is_private") != "" { + opts.IsPrivate = util.OptionalBoolOf(ctx.FormBool("is_private")) + } + + sortMode := ctx.FormString("sort") + if len(sortMode) > 0 { + sortOrder := ctx.FormString("order") + if len(sortOrder) == 0 { + sortOrder = "asc" + } + if searchModeMap, ok := context.SearchOrderByMap[sortOrder]; ok { + if orderBy, ok := searchModeMap[sortMode]; ok { + opts.OrderBy = orderBy + } else { + ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid sort mode: \"%s\"", sortMode)) + return + } + } else { + ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid sort order: \"%s\"", sortOrder)) + return + } + } + + var err error + repos, count, err := models.SearchRepository(opts) + if err != nil { + ctx.JSON(http.StatusInternalServerError, api.SearchError{ + OK: false, + Error: err.Error(), + }) + return + } + + ctx.SetTotalCountHeader(count) + + // To improve performance when only the count is requested + if ctx.FormBool("count_only") { + return + } + + results := make([]*api.Repository, len(repos)) + for i, repo := range repos { + results[i] = &api.Repository{ + ID: repo.ID, + FullName: repo.FullName(), + Fork: repo.IsFork, + Private: repo.IsPrivate, + Template: repo.IsTemplate, + Mirror: repo.IsMirror, + Stars: repo.NumStars, + HTMLURL: repo.HTMLURL(), + Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate, + } + } + + ctx.JSON(http.StatusOK, api.SearchResults{ + OK: true, + Data: results, + }) +} diff --git a/routers/web/repo/search.go b/routers/web/repo/search.go index e33fe38dea..8f141cb149 100644 --- a/routers/web/repo/search.go +++ b/routers/web/repo/search.go @@ -18,7 +18,7 @@ const tplSearch base.TplName = "repo/search" // Search render repository search page func Search(ctx *context.Context) { if !setting.Indexer.RepoIndexerEnabled { - ctx.Redirect(ctx.Repo.RepoLink, 302) + ctx.Redirect(ctx.Repo.RepoLink) return } language := ctx.FormTrim("l") @@ -47,7 +47,6 @@ func Search(ctx *context.Context) { ctx.Data["SourcePath"] = ctx.Repo.Repository.HTMLURL() ctx.Data["SearchResults"] = searchResults ctx.Data["SearchResultLanguages"] = searchResultLanguages - ctx.Data["RequireHighlightJS"] = true ctx.Data["PageIsViewCode"] = true pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5) diff --git a/routers/web/repo/setting.go b/routers/web/repo/setting.go index 5f7b948b8c..ccb603833c 100644 --- a/routers/web/repo/setting.go +++ b/routers/web/repo/setting.go @@ -17,6 +17,7 @@ import ( "code.gitea.io/gitea/models" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" unit_model "code.gitea.io/gitea/models/unit" @@ -31,7 +32,6 @@ import ( "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/validation" @@ -70,7 +70,7 @@ func Settings(ctx *context.Context) { ctx.Data["SigningKeyAvailable"] = len(signing) > 0 ctx.Data["SigningSettings"] = setting.Repository.Signing ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled - if ctx.User.IsAdmin { + if ctx.Doer.IsAdmin { if setting.Indexer.RepoIndexerEnabled { status, err := repo_model.GetIndexerStatus(ctx.Repo.Repository, repo_model.RepoIndexerTypeCode) if err != nil { @@ -119,7 +119,7 @@ func SettingsPost(ctx *context.Context) { ctx.Repo.GitRepo.Close() ctx.Repo.GitRepo = nil } - if err := repo_service.ChangeRepositoryName(ctx.User, repo, newRepoName); err != nil { + if err := repo_service.ChangeRepositoryName(ctx.Doer, repo, newRepoName); err != nil { ctx.Data["Err_RepoName"] = true switch { case repo_model.IsErrRepoAlreadyExist(err): @@ -162,7 +162,7 @@ func SettingsPost(ctx *context.Context) { visibilityChanged := repo.IsPrivate != form.Private // when ForcePrivate enabled, you could change public repo to private, but only admin users can change private to public - if visibilityChanged && setting.Repository.ForcePrivate && !form.Private && !ctx.User.IsAdmin { + if visibilityChanged && setting.Repository.ForcePrivate && !form.Private && !ctx.Doer.IsAdmin { ctx.RenderWithErr(ctx.Tr("form.repository_force_private"), tplSettingsOptions, form) return } @@ -194,11 +194,7 @@ func SettingsPost(ctx *context.Context) { } else { ctx.Repo.Mirror.EnablePrune = form.EnablePrune ctx.Repo.Mirror.Interval = interval - if interval != 0 { - ctx.Repo.Mirror.NextUpdateUnix = timeutil.TimeStampNow().AddDuration(interval) - } else { - ctx.Repo.Mirror.NextUpdateUnix = 0 - } + ctx.Repo.Mirror.ScheduleNextUpdate() if err := repo_model.UpdateMirror(ctx.Repo.Mirror); err != nil { ctx.Data["Err_Interval"] = true ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form) @@ -213,7 +209,7 @@ func SettingsPost(ctx *context.Context) { address, err := forms.ParseRemoteAddr(form.MirrorAddress, form.MirrorUsername, form.MirrorPassword) if err == nil { - err = migrations.IsMigrateURLAllowed(address, ctx.User) + err = migrations.IsMigrateURLAllowed(address, ctx.Doer) } if err != nil { ctx.Data["Err_MirrorAddress"] = true @@ -235,7 +231,7 @@ func SettingsPost(ctx *context.Context) { ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_lfs_endpoint"), tplSettingsOptions, &form) return } - err = migrations.IsMigrateURLAllowed(ep.String(), ctx.User) + err = migrations.IsMigrateURLAllowed(ep.String(), ctx.Doer) if err != nil { ctx.Data["Err_LFSEndpoint"] = true handleSettingRemoteAddrError(ctx, err, form) @@ -329,7 +325,7 @@ func SettingsPost(ctx *context.Context) { address, err := forms.ParseRemoteAddr(form.PushMirrorAddress, form.PushMirrorUsername, form.PushMirrorPassword) if err == nil { - err = migrations.IsMigrateURLAllowed(address, ctx.User) + err = migrations.IsMigrateURLAllowed(address, ctx.Doer) } if err != nil { ctx.Data["Err_PushMirrorAddress"] = true @@ -460,6 +456,15 @@ func SettingsPost(ctx *context.Context) { deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeProjects) } + if form.EnablePackages && !unit_model.TypeProjects.UnitGlobalDisabled() { + units = append(units, repo_model.RepoUnit{ + RepoID: repo.ID, + Type: unit_model.TypePackages, + }) + } else if !unit_model.TypePackages.UnitGlobalDisabled() { + deleteUnitTypes = append(deleteUnitTypes, unit_model.TypePackages) + } + if form.EnablePulls && !unit_model.TypePullRequests.UnitGlobalDisabled() { units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, @@ -516,7 +521,7 @@ func SettingsPost(ctx *context.Context) { ctx.Redirect(ctx.Repo.RepoLink + "/settings") case "admin": - if !ctx.User.IsAdmin { + if !ctx.Doer.IsAdmin { ctx.Error(http.StatusForbidden) return } @@ -536,7 +541,7 @@ func SettingsPost(ctx *context.Context) { ctx.Redirect(ctx.Repo.RepoLink + "/settings") case "admin_index": - if !ctx.User.IsAdmin { + if !ctx.Doer.IsAdmin { ctx.Error(http.StatusForbidden) return } @@ -595,7 +600,7 @@ func SettingsPost(ctx *context.Context) { ctx.Error(http.StatusNotFound) return } - if err := repo.GetOwner(db.DefaultContext); err != nil { + if err := repo.GetOwner(ctx); err != nil { ctx.ServerError("Convert Fork", err) return } @@ -648,7 +653,7 @@ func SettingsPost(ctx *context.Context) { } if newOwner.Type == user_model.UserTypeOrganization { - if !ctx.User.IsAdmin && newOwner.Visibility == structs.VisibleTypePrivate && !models.OrgFromUser(newOwner).HasMemberWithUserID(ctx.User.ID) { + if !ctx.Doer.IsAdmin && newOwner.Visibility == structs.VisibleTypePrivate && !organization.OrgFromUser(newOwner).HasMemberWithUserID(ctx.Doer.ID) { // The user shouldn't know about this organization ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_owner_name"), tplSettingsOptions, nil) return @@ -661,7 +666,7 @@ func SettingsPost(ctx *context.Context) { ctx.Repo.GitRepo = nil } - if err := repo_service.StartRepositoryTransfer(ctx.User, newOwner, repo, nil); err != nil { + if err := repo_service.StartRepositoryTransfer(ctx.Doer, newOwner, repo, nil); err != nil { if repo_model.IsErrRepoAlreadyExist(err) { ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplSettingsOptions, nil) } else if models.IsErrRepoTransferInProgress(err) { @@ -724,7 +729,7 @@ func SettingsPost(ctx *context.Context) { ctx.Repo.GitRepo.Close() } - if err := repo_service.DeleteRepository(ctx, ctx.User, ctx.Repo.Repository, true); err != nil { + if err := repo_service.DeleteRepository(ctx, ctx.Doer, ctx.Repo.Repository, true); err != nil { ctx.ServerError("DeleteRepository", err) return } @@ -835,7 +840,7 @@ func Collaboration(ctx *context.Context) { } ctx.Data["Collaborators"] = users - teams, err := models.GetRepoTeams(ctx.Repo.Repository) + teams, err := organization.GetRepoTeams(ctx.Repo.Repository) if err != nil { ctx.ServerError("GetRepoTeams", err) return @@ -894,7 +899,7 @@ func CollaborationPost(ctx *context.Context) { } if setting.Service.EnableNotifyMail { - mailer.SendCollaboratorMail(u, ctx.User, ctx.Repo.Repository) + mailer.SendCollaboratorMail(u, ctx.Doer, ctx.Repo.Repository) } ctx.Flash.Success(ctx.Tr("repo.settings.add_collaborator_success")) @@ -938,9 +943,9 @@ func AddTeamPost(ctx *context.Context) { return } - team, err := models.OrgFromUser(ctx.Repo.Owner).GetTeam(name) + team, err := organization.OrgFromUser(ctx.Repo.Owner).GetTeam(name) if err != nil { - if models.IsErrTeamNotExist(err) { + if organization.IsErrTeamNotExist(err) { ctx.Flash.Error(ctx.Tr("form.team_not_exist")) ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration") } else { @@ -955,13 +960,13 @@ func AddTeamPost(ctx *context.Context) { return } - if models.HasTeamRepo(ctx.Repo.Repository.OwnerID, team.ID, ctx.Repo.Repository.ID) { + if organization.HasTeamRepo(ctx, ctx.Repo.Repository.OwnerID, team.ID, ctx.Repo.Repository.ID) { ctx.Flash.Error(ctx.Tr("repo.settings.add_team_duplicate")) ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration") return } - if err = team.AddRepository(ctx.Repo.Repository); err != nil { + if err = models.AddRepository(team, ctx.Repo.Repository); err != nil { ctx.ServerError("team.AddRepository", err) return } @@ -978,13 +983,13 @@ func DeleteTeam(ctx *context.Context) { return } - team, err := models.GetTeamByID(ctx.FormInt64("id")) + team, err := organization.GetTeamByID(ctx.FormInt64("id")) if err != nil { ctx.ServerError("GetTeamByID", err) return } - if err = team.RemoveRepository(ctx.Repo.Repository.ID); err != nil { + if err = models.RemoveRepository(team, ctx.Repo.Repository.ID); err != nil { ctx.ServerError("team.RemoveRepositorys", err) return } @@ -1055,7 +1060,7 @@ func DeployKeys(ctx *context.Context) { ctx.Data["PageIsSettingsKeys"] = true ctx.Data["DisableSSH"] = setting.SSH.Disabled - keys, err := asymkey_model.ListDeployKeys(db.DefaultContext, &asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID}) + keys, err := asymkey_model.ListDeployKeys(ctx, &asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID}) if err != nil { ctx.ServerError("ListDeployKeys", err) return @@ -1070,8 +1075,9 @@ func DeployKeysPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.AddKeyForm) ctx.Data["Title"] = ctx.Tr("repo.settings.deploy_keys") ctx.Data["PageIsSettingsKeys"] = true + ctx.Data["DisableSSH"] = setting.SSH.Disabled - keys, err := asymkey_model.ListDeployKeys(db.DefaultContext, &asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID}) + keys, err := asymkey_model.ListDeployKeys(ctx, &asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID}) if err != nil { ctx.ServerError("ListDeployKeys", err) return @@ -1127,7 +1133,7 @@ func DeployKeysPost(ctx *context.Context) { // DeleteDeployKey response for deleting a deploy key func DeleteDeployKey(ctx *context.Context) { - if err := asymkey_service.DeleteDeployKey(ctx.User, ctx.FormInt64("id")); err != nil { + if err := asymkey_service.DeleteDeployKey(ctx.Doer, ctx.FormInt64("id")); err != nil { ctx.Flash.Error("DeleteDeployKey: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("repo.settings.deploy_key_deletion_success")) diff --git a/routers/web/repo/setting_protected_branch.go b/routers/web/repo/setting_protected_branch.go index 1435b820ad..1f6e2316e7 100644 --- a/routers/web/repo/setting_protected_branch.go +++ b/routers/web/repo/setting_protected_branch.go @@ -11,6 +11,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/base" @@ -73,7 +74,7 @@ func ProtectedBranchPost(ctx *context.Context) { branch := ctx.FormString("branch") if !ctx.Repo.GitRepo.IsBranchExist(branch) { - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } else if repo.DefaultBranch != branch { repo.DefaultBranch = branch @@ -158,7 +159,7 @@ func SettingsProtectedBranch(c *context.Context) { } if c.Repo.Owner.IsOrganization() { - teams, err := models.OrgFromUser(c.Repo.Owner).TeamsWithAccessToRepo(c.Repo.Repository.ID, perm.AccessModeRead) + teams, err := organization.OrgFromUser(c.Repo.Owner).TeamsWithAccessToRepo(c.Repo.Repository.ID, perm.AccessModeRead) if err != nil { c.ServerError("Repo.Owner.TeamsWithAccessToRepo", err) return @@ -260,7 +261,7 @@ func SettingsProtectedBranchPost(ctx *context.Context) { protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch - err = models.UpdateProtectBranch(ctx.Repo.Repository, protectBranch, models.WhitelistOptions{ + err = models.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, models.WhitelistOptions{ UserIDs: whitelistUsers, TeamIDs: whitelistTeams, MergeUserIDs: mergeWhitelistUsers, @@ -305,7 +306,7 @@ func RenameBranchPost(ctx *context.Context) { return } - msg, err := repository.RenameBranch(ctx.Repo.Repository, ctx.User, ctx.Repo.GitRepo, form.From, form.To) + msg, err := repository.RenameBranch(ctx.Repo.Repository, ctx.Doer, ctx.Repo.GitRepo, form.From, form.To) if err != nil { ctx.ServerError("RenameBranch", err) return diff --git a/routers/web/repo/settings_test.go b/routers/web/repo/settings_test.go index bd29eca195..36d02de273 100644 --- a/routers/web/repo/settings_test.go +++ b/routers/web/repo/settings_test.go @@ -11,6 +11,7 @@ import ( "code.gitea.io/gitea/models" asymkey_model "code.gitea.io/gitea/models/asymkey" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" @@ -60,7 +61,7 @@ func TestAddReadOnlyDeployKey(t *testing.T) { } web.SetForm(ctx, &addKeyForm) DeployKeysPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{ Name: addKeyForm.Title, @@ -90,7 +91,7 @@ func TestAddReadWriteOnlyDeployKey(t *testing.T) { } web.SetForm(ctx, &addKeyForm) DeployKeysPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{ Name: addKeyForm.Title, @@ -127,7 +128,7 @@ func TestCollaborationPost(t *testing.T) { CollaborationPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) exists, err := models.IsCollaborator(re.ID, 4) assert.NoError(t, err) @@ -153,7 +154,7 @@ func TestCollaborationPost_InactiveUser(t *testing.T) { CollaborationPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assert.NotEmpty(t, ctx.Flash.ErrorMsg) } @@ -185,7 +186,7 @@ func TestCollaborationPost_AddCollaboratorTwice(t *testing.T) { CollaborationPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) exists, err := models.IsCollaborator(re.ID, 4) assert.NoError(t, err) @@ -194,7 +195,7 @@ func TestCollaborationPost_AddCollaboratorTwice(t *testing.T) { // Try adding the same collaborator again CollaborationPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assert.NotEmpty(t, ctx.Flash.ErrorMsg) } @@ -216,7 +217,7 @@ func TestCollaborationPost_NonExistentUser(t *testing.T) { CollaborationPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assert.NotEmpty(t, ctx.Flash.ErrorMsg) } @@ -231,7 +232,7 @@ func TestAddTeamPost(t *testing.T) { Type: user_model.UserTypeOrganization, } - team := &models.Team{ + team := &organization.Team{ ID: 11, OrgID: 26, } @@ -255,8 +256,8 @@ func TestAddTeamPost(t *testing.T) { AddTeamPost(ctx) - assert.True(t, team.HasRepository(re.ID)) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.True(t, models.HasRepository(team, re.ID)) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assert.Empty(t, ctx.Flash.ErrorMsg) } @@ -271,7 +272,7 @@ func TestAddTeamPost_NotAllowed(t *testing.T) { Type: user_model.UserTypeOrganization, } - team := &models.Team{ + team := &organization.Team{ ID: 11, OrgID: 26, } @@ -295,8 +296,8 @@ func TestAddTeamPost_NotAllowed(t *testing.T) { AddTeamPost(ctx) - assert.False(t, team.HasRepository(re.ID)) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.False(t, models.HasRepository(team, re.ID)) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assert.NotEmpty(t, ctx.Flash.ErrorMsg) } @@ -311,7 +312,7 @@ func TestAddTeamPost_AddTeamTwice(t *testing.T) { Type: user_model.UserTypeOrganization, } - team := &models.Team{ + team := &organization.Team{ ID: 11, OrgID: 26, } @@ -336,8 +337,8 @@ func TestAddTeamPost_AddTeamTwice(t *testing.T) { AddTeamPost(ctx) AddTeamPost(ctx) - assert.True(t, team.HasRepository(re.ID)) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.True(t, models.HasRepository(team, re.ID)) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assert.NotEmpty(t, ctx.Flash.ErrorMsg) } @@ -370,7 +371,7 @@ func TestAddTeamPost_NonExistentTeam(t *testing.T) { ctx.Repo = repo AddTeamPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assert.NotEmpty(t, ctx.Flash.ErrorMsg) } @@ -385,7 +386,7 @@ func TestDeleteTeam(t *testing.T) { Type: user_model.UserTypeOrganization, } - team := &models.Team{ + team := &organization.Team{ ID: 2, OrgID: 3, } @@ -409,5 +410,5 @@ func TestDeleteTeam(t *testing.T) { DeleteTeam(ctx) - assert.False(t, team.HasRepository(re.ID)) + assert.False(t, models.HasRepository(team, re.ID)) } diff --git a/routers/web/repo/tag.go b/routers/web/repo/tag.go index 0114704f82..7da1e36c81 100644 --- a/routers/web/repo/tag.go +++ b/routers/web/repo/tag.go @@ -10,6 +10,7 @@ import ( "strings" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -150,7 +151,7 @@ func setTagsContext(ctx *context.Context) error { ctx.Data["Users"] = users if ctx.Repo.Owner.IsOrganization() { - teams, err := models.OrgFromUser(ctx.Repo.Owner).TeamsWithAccessToRepo(ctx.Repo.Repository.ID, perm.AccessModeRead) + teams, err := organization.OrgFromUser(ctx.Repo.Owner).TeamsWithAccessToRepo(ctx.Repo.Repository.ID, perm.AccessModeRead) if err != nil { ctx.ServerError("Repo.Owner.TeamsWithAccessToRepo", err) return err diff --git a/routers/web/repo/topic.go b/routers/web/repo/topic.go index a6a7ac6c8e..efbfc62d56 100644 --- a/routers/web/repo/topic.go +++ b/routers/web/repo/topic.go @@ -15,7 +15,7 @@ import ( // TopicsPost response for creating repository func TopicsPost(ctx *context.Context) { - if ctx.User == nil { + if ctx.Doer == nil { ctx.JSON(http.StatusForbidden, map[string]interface{}{ "message": "Only owners could change the topics.", }) diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go index 9ff72b2102..86fc36fad7 100644 --- a/routers/web/repo/view.go +++ b/routers/web/repo/view.go @@ -38,6 +38,7 @@ import ( "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/web/feed" ) const ( @@ -145,6 +146,21 @@ func renderDirectory(ctx *context.Context, treeLink string) { ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+path.Base(ctx.Repo.TreePath), ctx.Repo.RefName) } + // Check permission to add or upload new file. + if ctx.Repo.CanWrite(unit_model.TypeCode) && ctx.Repo.IsViewBranch { + ctx.Data["CanAddFile"] = !ctx.Repo.Repository.IsArchived + ctx.Data["CanUploadFile"] = setting.Repository.Upload.Enabled && !ctx.Repo.Repository.IsArchived + } + + readmeFile, readmeTreelink := findReadmeFile(ctx, entries, treeLink) + if ctx.Written() || readmeFile == nil { + return + } + + renderReadmeFile(ctx, readmeFile, readmeTreelink) +} + +func findReadmeFile(ctx *context.Context, entries git.Entries, treeLink string) (*namedBlob, string) { // 3 for the extensions in exts[] in order // the last one is for a readme that doesn't // strictly match an extension @@ -182,7 +198,7 @@ func renderDirectory(ctx *context.Context, treeLink string) { target, err = entry.FollowLinks() if err != nil && !git.IsErrBadLink(err) { ctx.ServerError("FollowLinks", err) - return + return nil, "" } } log.Debug("%t", target == nil) @@ -204,7 +220,7 @@ func renderDirectory(ctx *context.Context, treeLink string) { entry, err = entry.FollowLinks() if err != nil && !git.IsErrBadLink(err) { ctx.ServerError("FollowLinks", err) - return + return nil, "" } } if entry != nil && (entry.IsExecutable() || entry.IsRegular()) { @@ -235,7 +251,7 @@ func renderDirectory(ctx *context.Context, treeLink string) { readmeFile, err = getReadmeFileFromPath(ctx.Repo.Commit, entry.GetSubJumpablePathName()) if err != nil { ctx.ServerError("getReadmeFileFromPath", err) - return + return nil, "" } if readmeFile != nil { readmeFile.name = entry.Name() + "/" + readmeFile.name @@ -244,129 +260,127 @@ func renderDirectory(ctx *context.Context, treeLink string) { } } } + return readmeFile, readmeTreelink +} - if readmeFile != nil { - ctx.Data["RawFileLink"] = "" - ctx.Data["ReadmeInList"] = true - ctx.Data["ReadmeExist"] = true - ctx.Data["FileIsSymlink"] = readmeFile.isSymlink +func renderReadmeFile(ctx *context.Context, readmeFile *namedBlob, readmeTreelink string) { + ctx.Data["RawFileLink"] = "" + ctx.Data["ReadmeInList"] = true + ctx.Data["ReadmeExist"] = true + ctx.Data["FileIsSymlink"] = readmeFile.isSymlink - dataRc, err := readmeFile.blob.DataAsync() - if err != nil { - ctx.ServerError("Data", err) - return - } - defer dataRc.Close() + dataRc, err := readmeFile.blob.DataAsync() + if err != nil { + ctx.ServerError("Data", err) + return + } + defer dataRc.Close() - buf := make([]byte, 1024) - n, _ := util.ReadAtMost(dataRc, buf) - buf = buf[:n] + buf := make([]byte, 1024) + n, _ := util.ReadAtMost(dataRc, buf) + buf = buf[:n] - st := typesniffer.DetectContentType(buf) - isTextFile := st.IsText() + st := typesniffer.DetectContentType(buf) + isTextFile := st.IsText() - ctx.Data["FileIsText"] = isTextFile - ctx.Data["FileName"] = readmeFile.name - fileSize := int64(0) - isLFSFile := false - ctx.Data["IsLFSFile"] = false + ctx.Data["FileIsText"] = isTextFile + ctx.Data["FileName"] = readmeFile.name + fileSize := int64(0) + isLFSFile := false + ctx.Data["IsLFSFile"] = false - // FIXME: what happens when README file is an image? - if isTextFile && setting.LFS.StartServer { - pointer, _ := lfs.ReadPointerFromBuffer(buf) - if pointer.IsValid() { - meta, err := models.GetLFSMetaObjectByOid(ctx.Repo.Repository.ID, pointer.Oid) - if err != nil && err != models.ErrLFSObjectNotExist { - ctx.ServerError("GetLFSMetaObject", err) + // FIXME: what happens when README file is an image? + if isTextFile && setting.LFS.StartServer { + pointer, _ := lfs.ReadPointerFromBuffer(buf) + if pointer.IsValid() { + meta, err := models.GetLFSMetaObjectByOid(ctx.Repo.Repository.ID, pointer.Oid) + if err != nil && err != models.ErrLFSObjectNotExist { + ctx.ServerError("GetLFSMetaObject", err) + return + } + if meta != nil { + ctx.Data["IsLFSFile"] = true + isLFSFile = true + + // OK read the lfs object + var err error + dataRc, err = lfs.ReadMetaObject(pointer) + if err != nil { + ctx.ServerError("ReadMetaObject", err) return } - if meta != nil { - ctx.Data["IsLFSFile"] = true - isLFSFile = true + defer dataRc.Close() - // OK read the lfs object - var err error - dataRc, err = lfs.ReadMetaObject(pointer) - if err != nil { - ctx.ServerError("ReadMetaObject", err) - return - } - defer dataRc.Close() - - buf = make([]byte, 1024) - n, err = util.ReadAtMost(dataRc, buf) - if err != nil { - ctx.ServerError("Data", err) - return - } - buf = buf[:n] - - st = typesniffer.DetectContentType(buf) - isTextFile = st.IsText() - ctx.Data["IsTextFile"] = isTextFile - - fileSize = meta.Size - ctx.Data["FileSize"] = meta.Size - filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.name)) - ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.HTMLURL(), url.PathEscape(meta.Oid), url.PathEscape(filenameBase64)) + buf = make([]byte, 1024) + n, err = util.ReadAtMost(dataRc, buf) + if err != nil { + ctx.ServerError("Data", err) + return } - } - } + buf = buf[:n] - if !isLFSFile { - fileSize = readmeFile.blob.Size() - } + st = typesniffer.DetectContentType(buf) + isTextFile = st.IsText() + ctx.Data["IsTextFile"] = isTextFile - if isTextFile { - if fileSize >= setting.UI.MaxDisplayFileSize { - // Pretend that this is a normal text file to display 'This file is too large to be shown' - ctx.Data["IsFileTooLarge"] = true - ctx.Data["IsTextFile"] = true - ctx.Data["FileSize"] = fileSize - } else { - rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc)) - - if markupType := markup.Type(readmeFile.name); markupType != "" { - ctx.Data["IsMarkup"] = true - ctx.Data["MarkupType"] = string(markupType) - var result strings.Builder - err := markup.Render(&markup.RenderContext{ - Ctx: ctx, - Filename: readmeFile.name, - URLPrefix: readmeTreelink, - Metas: ctx.Repo.Repository.ComposeDocumentMetas(), - GitRepo: ctx.Repo.GitRepo, - }, rd, &result) - if err != nil { - log.Error("Render failed: %v then fallback", err) - buf := &bytes.Buffer{} - ctx.Data["EscapeStatus"], _ = charset.EscapeControlReader(rd, buf) - ctx.Data["FileContent"] = strings.ReplaceAll( - gotemplate.HTMLEscapeString(buf.String()), "\n", `
`, - ) - } else { - ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlString(result.String()) - } - } else { - ctx.Data["IsRenderedHTML"] = true - buf := &bytes.Buffer{} - ctx.Data["EscapeStatus"], err = charset.EscapeControlReader(rd, buf) - if err != nil { - log.Error("Read failed: %v", err) - } - - ctx.Data["FileContent"] = strings.ReplaceAll( - gotemplate.HTMLEscapeString(buf.String()), "\n", `
`, - ) - } + fileSize = meta.Size + ctx.Data["FileSize"] = meta.Size + filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.name)) + ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.HTMLURL(), url.PathEscape(meta.Oid), url.PathEscape(filenameBase64)) } } } - // Check permission to add or upload new file. - if ctx.Repo.CanWrite(unit_model.TypeCode) && ctx.Repo.IsViewBranch { - ctx.Data["CanAddFile"] = !ctx.Repo.Repository.IsArchived - ctx.Data["CanUploadFile"] = setting.Repository.Upload.Enabled && !ctx.Repo.Repository.IsArchived + if !isTextFile { + return + } + + if !isLFSFile { + fileSize = readmeFile.blob.Size() + } + + if fileSize >= setting.UI.MaxDisplayFileSize { + // Pretend that this is a normal text file to display 'This file is too large to be shown' + ctx.Data["IsFileTooLarge"] = true + ctx.Data["IsTextFile"] = true + ctx.Data["FileSize"] = fileSize + return + } + + rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc)) + + if markupType := markup.Type(readmeFile.name); markupType != "" { + ctx.Data["IsMarkup"] = true + ctx.Data["MarkupType"] = string(markupType) + var result strings.Builder + err := markup.Render(&markup.RenderContext{ + Ctx: ctx, + Filename: readmeFile.name, + URLPrefix: readmeTreelink, + Metas: ctx.Repo.Repository.ComposeDocumentMetas(), + GitRepo: ctx.Repo.GitRepo, + }, rd, &result) + if err != nil { + log.Error("Render failed: %v then fallback", err) + buf := &bytes.Buffer{} + ctx.Data["EscapeStatus"], _ = charset.EscapeControlReader(rd, buf) + ctx.Data["FileContent"] = strings.ReplaceAll( + gotemplate.HTMLEscapeString(buf.String()), "\n", `
`, + ) + } else { + ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlString(result.String()) + } + } else { + ctx.Data["IsRenderedHTML"] = true + buf := &bytes.Buffer{} + ctx.Data["EscapeStatus"], err = charset.EscapeControlReader(rd, buf) + if err != nil { + log.Error("Read failed: %v", err) + } + + ctx.Data["FileContent"] = strings.ReplaceAll( + gotemplate.HTMLEscapeString(buf.String()), "\n", `
`, + ) } } @@ -488,9 +502,17 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st } rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc)) + + shouldRenderSource := ctx.FormString("display") == "source" readmeExist := markup.IsReadmeFile(blob.Name()) ctx.Data["ReadmeExist"] = readmeExist - if markupType := markup.Type(blob.Name()); markupType != "" { + + markupType := markup.Type(blob.Name()) + if markupType != "" { + ctx.Data["HasSourceRenderedToggle"] = true + } + + if markupType != "" && !shouldRenderSource { ctx.Data["IsMarkup"] = true ctx.Data["MarkupType"] = markupType var result strings.Builder @@ -506,7 +528,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st return } ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlString(result.String()) - } else if readmeExist { + } else if readmeExist && !shouldRenderSource { buf := &bytes.Buffer{} ctx.Data["IsRenderedHTML"] = true @@ -557,8 +579,8 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st ctx.Data["LineEscapeStatus"] = statuses } if !isLFSFile { - if ctx.Repo.CanEnableEditor() { - if lfsLock != nil && lfsLock.OwnerID != ctx.User.ID { + if ctx.Repo.CanEnableEditor(ctx.Doer) { + if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID { ctx.Data["CanEditFile"] = false ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.this_file_locked") } else { @@ -567,7 +589,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st } } else if !ctx.Repo.IsViewBranch { ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch") - } else if !ctx.Repo.CanWrite(unit_model.TypeCode) { + } else if !ctx.Repo.CanWriteToBranch(ctx.Doer, ctx.Repo.BranchName) { ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit") } } @@ -607,8 +629,8 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st } } - if ctx.Repo.CanEnableEditor() { - if lfsLock != nil && lfsLock.OwnerID != ctx.User.ID { + if ctx.Repo.CanEnableEditor(ctx.Doer) { + if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID { ctx.Data["CanDeleteFile"] = false ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.this_file_locked") } else { @@ -617,7 +639,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st } } else if !ctx.Repo.IsViewBranch { ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch") - } else if !ctx.Repo.CanWrite(unit_model.TypeCode) { + } else if !ctx.Repo.CanWriteToBranch(ctx.Doer, ctx.Repo.BranchName) { ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access") } } @@ -662,7 +684,7 @@ func checkHomeCodeViewable(ctx *context.Context) { if ctx.IsSigned { // Set repo notification-status read if unread - if err := models.SetRepoReadBy(ctx.Repo.Repository.ID, ctx.User.ID); err != nil { + if err := models.SetRepoReadBy(ctx.Repo.Repository.ID, ctx.Doer.ID); err != nil { ctx.ServerError("ReadBy", err) return } @@ -691,6 +713,14 @@ func checkHomeCodeViewable(ctx *context.Context) { // Home render repository home page func Home(ctx *context.Context) { + isFeed, _, showFeedType := feed.GetFeedType(ctx.Params(":reponame"), ctx.Req) + if isFeed { + feed.ShowRepoFeed(ctx, ctx.Repo.Repository, showFeedType) + return + } + + ctx.Data["FeedURL"] = ctx.Repo.Repository.HTMLURL() + checkHomeCodeViewable(ctx) if ctx.Written() { return @@ -875,7 +905,7 @@ func renderCode(ctx *context.Context) { ctx.ServerError("UpdateRepositoryCols", err) return } - if err = models.UpdateRepoSize(db.DefaultContext, ctx.Repo.Repository); err != nil { + if err = models.UpdateRepoSize(ctx, ctx.Repo.Repository); err != nil { ctx.ServerError("UpdateRepoSize", err) return } @@ -1008,7 +1038,7 @@ func Forks(ctx *context.Context) { } for _, fork := range forks { - if err = fork.GetOwner(db.DefaultContext); err != nil { + if err = fork.GetOwner(ctx); err != nil { ctx.ServerError("GetOwner", err) return } diff --git a/routers/web/repo/webhook.go b/routers/web/repo/webhook.go index 76ebd1c485..d2e2461189 100644 --- a/routers/web/repo/webhook.go +++ b/routers/web/repo/webhook.go @@ -13,7 +13,6 @@ import ( "path" "strings" - "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/perm" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/models/webhook" @@ -85,7 +84,7 @@ func getOrgRepoCtx(ctx *context.Context) (*orgRepoCtx, error) { }, nil } - if ctx.User.IsAdmin { + if ctx.Doer.IsAdmin { // Are we looking at default webhooks? if ctx.Params(":configType") == "default-hooks" { return &orgRepoCtx{ @@ -148,7 +147,6 @@ func WebhooksNew(ctx *context.Context) { if hookType == "discord" { ctx.Data["DiscordHook"] = map[string]interface{}{ "Username": "Gitea", - "IconURL": setting.AppURL + "img/favicon.png", } } ctx.Data["BaseLink"] = orCtx.LinkNew @@ -181,6 +179,7 @@ func ParseHookEvent(form forms.WebhookForm) *webhook.HookEvent { PullRequestReview: form.PullRequestReview, PullRequestSync: form.PullRequestSync, Repository: form.Repository, + Package: form.Package, }, BranchFilter: form.BranchFilter, } @@ -227,7 +226,7 @@ func GiteaHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -281,7 +280,7 @@ func newGogsWebhookPost(ctx *context.Context, form forms.NewGogshookForm, kind w if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -333,7 +332,7 @@ func DiscordHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -376,7 +375,7 @@ func DingtalkHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -428,7 +427,7 @@ func TelegramHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -483,7 +482,7 @@ func MatrixHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -526,7 +525,7 @@ func MSTeamsHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -586,7 +585,7 @@ func SlackHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -629,7 +628,7 @@ func FeishuHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -673,7 +672,7 @@ func WechatworkHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -726,7 +725,7 @@ func PackagistHooksNewPost(ctx *context.Context) { if err := w.UpdateEvent(); err != nil { ctx.ServerError("UpdateEvent", err) return - } else if err := webhook.CreateWebhook(db.DefaultContext, w); err != nil { + } else if err := webhook.CreateWebhook(ctx, w); err != nil { ctx.ServerError("CreateWebhook", err) return } @@ -736,8 +735,6 @@ func PackagistHooksNewPost(ctx *context.Context) { } func checkWebhook(ctx *context.Context) (*orgRepoCtx, *webhook.Webhook) { - ctx.Data["RequireHighlightJS"] = true - orCtx, err := getOrgRepoCtx(ctx) if err != nil { ctx.ServerError("getOrgRepoCtx", err) @@ -1242,7 +1239,7 @@ func TestWebhook(ctx *context.Context) { w, err := webhook.GetWebhookByRepoID(ctx.Repo.Repository.ID, hookID) if err != nil { ctx.Flash.Error("GetWebhookByID: " + err.Error()) - ctx.Status(500) + ctx.Status(http.StatusInternalServerError) return } @@ -1258,7 +1255,7 @@ func TestWebhook(ctx *context.Context) { } } - apiUser := convert.ToUserWithAccessMode(ctx.User, perm.AccessModeNone) + apiUser := convert.ToUserWithAccessMode(ctx.Doer, perm.AccessModeNone) apiCommit := &api.PayloadCommit{ ID: commit.ID.String(), @@ -1286,10 +1283,10 @@ func TestWebhook(ctx *context.Context) { } if err := webhook_service.PrepareWebhook(w, ctx.Repo.Repository, webhook.HookEventPush, p); err != nil { ctx.Flash.Error("PrepareWebhook: " + err.Error()) - ctx.Status(500) + ctx.Status(http.StatusInternalServerError) } else { ctx.Flash.Info(ctx.Tr("repo.settings.webhook.delivery.success")) - ctx.Status(200) + ctx.Status(http.StatusOK) } } diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go index 633458081f..77f60a1dfa 100644 --- a/routers/web/repo/wiki.go +++ b/routers/web/repo/wiki.go @@ -13,6 +13,7 @@ import ( "net/url" "path/filepath" "strings" + "time" "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/unit" @@ -47,7 +48,7 @@ func MustEnableWiki(ctx *context.Context) { if log.IsTrace() { log.Trace("Permission Denied: User %-v cannot read %-v or %-v of repo %-v\n"+ "User in repo has Permissions: %-+v", - ctx.User, + ctx.Doer, unit.TypeWiki, unit.TypeExternalWiki, ctx.Repo.Repository, @@ -90,7 +91,7 @@ func findEntryForFile(commit *git.Commit, target string) (*git.TreeEntry, error) } func findWikiRepoCommit(ctx *context.Context) (*git.Repository, *git.Commit, error) { - wikiRepo, err := git.OpenRepositoryCtx(ctx, ctx.Repo.Repository.WikiPath()) + wikiRepo, err := git.OpenRepository(ctx, ctx.Repo.Repository.WikiPath()) if err != nil { ctx.ServerError("OpenRepository", err) return nil, nil, err @@ -189,7 +190,9 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) { ctx.Data["old_title"] = pageName ctx.Data["Title"] = pageName ctx.Data["title"] = pageName - ctx.Data["RequireHighlightJS"] = true + + isSideBar := pageName == "_Sidebar" + isFooter := pageName == "_Footer" // lookup filename in wiki - get filecontent, gitTree entry , real filename data, entry, pageFilename, noEntry := wikiContentsByName(ctx, commit, pageName) @@ -203,20 +206,30 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) { return nil, nil } - sidebarContent, _, _, _ := wikiContentsByName(ctx, commit, "_Sidebar") - if ctx.Written() { - if wikiRepo != nil { - wikiRepo.Close() + var sidebarContent []byte + if !isSideBar { + sidebarContent, _, _, _ = wikiContentsByName(ctx, commit, "_Sidebar") + if ctx.Written() { + if wikiRepo != nil { + wikiRepo.Close() + } + return nil, nil } - return nil, nil + } else { + sidebarContent = data } - footerContent, _, _, _ := wikiContentsByName(ctx, commit, "_Footer") - if ctx.Written() { - if wikiRepo != nil { - wikiRepo.Close() + var footerContent []byte + if !isFooter { + footerContent, _, _, _ = wikiContentsByName(ctx, commit, "_Footer") + if ctx.Written() { + if wikiRepo != nil { + wikiRepo.Close() + } + return nil, nil } - return nil, nil + } else { + footerContent = data } rctx := &markup.RenderContext{ @@ -237,27 +250,35 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) { ctx.Data["EscapeStatus"], ctx.Data["content"] = charset.EscapeControlString(buf.String()) - buf.Reset() - if err := markdown.Render(rctx, bytes.NewReader(sidebarContent), &buf); err != nil { - if wikiRepo != nil { - wikiRepo.Close() + if !isSideBar { + buf.Reset() + if err := markdown.Render(rctx, bytes.NewReader(sidebarContent), &buf); err != nil { + if wikiRepo != nil { + wikiRepo.Close() + } + ctx.ServerError("Render", err) + return nil, nil } - ctx.ServerError("Render", err) - return nil, nil + ctx.Data["sidebarPresent"] = sidebarContent != nil + ctx.Data["sidebarEscapeStatus"], ctx.Data["sidebarContent"] = charset.EscapeControlString(buf.String()) + } else { + ctx.Data["sidebarPresent"] = false } - ctx.Data["sidebarPresent"] = sidebarContent != nil - ctx.Data["sidebarEscapeStatus"], ctx.Data["sidebarContent"] = charset.EscapeControlString(buf.String()) - buf.Reset() - if err := markdown.Render(rctx, bytes.NewReader(footerContent), &buf); err != nil { - if wikiRepo != nil { - wikiRepo.Close() + if !isFooter { + buf.Reset() + if err := markdown.Render(rctx, bytes.NewReader(footerContent), &buf); err != nil { + if wikiRepo != nil { + wikiRepo.Close() + } + ctx.ServerError("Render", err) + return nil, nil } - ctx.ServerError("Render", err) - return nil, nil + ctx.Data["footerPresent"] = footerContent != nil + ctx.Data["footerEscapeStatus"], ctx.Data["footerContent"] = charset.EscapeControlString(buf.String()) + } else { + ctx.Data["footerPresent"] = false } - ctx.Data["footerPresent"] = footerContent != nil - ctx.Data["footerEscapeStatus"], ctx.Data["footerContent"] = charset.EscapeControlString(buf.String()) // get commit count - wiki revisions commitsCount, _ := wikiRepo.FileCommitsCount("master", pageFilename) @@ -287,7 +308,6 @@ func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) ctx.Data["old_title"] = pageName ctx.Data["Title"] = pageName ctx.Data["title"] = pageName - ctx.Data["RequireHighlightJS"] = true ctx.Data["Username"] = ctx.Repo.Owner.Name ctx.Data["Reponame"] = ctx.Repo.Repository.Name @@ -363,7 +383,6 @@ func renderEditPage(ctx *context.Context) { ctx.Data["old_title"] = pageName ctx.Data["Title"] = pageName ctx.Data["title"] = pageName - ctx.Data["RequireHighlightJS"] = true // lookup filename in wiki - get filecontent, gitTree entry , real filename data, entry, _, noEntry := wikiContentsByName(ctx, commit, pageName) @@ -409,7 +428,6 @@ func WikiPost(ctx *context.Context) { // Wiki renders single wiki page func Wiki(ctx *context.Context) { - ctx.Data["PageIsWiki"] = true ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived switch ctx.FormString("action") { @@ -474,7 +492,6 @@ func Wiki(ctx *context.Context) { // WikiRevision renders file revision list of wiki page func WikiRevision(ctx *context.Context) { - ctx.Data["PageIsWiki"] = true ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived if !ctx.Repo.Repository.HasWiki() { @@ -519,7 +536,6 @@ func WikiPages(ctx *context.Context) { } ctx.Data["Title"] = ctx.Tr("repo.wiki.pages") - ctx.Data["PageIsWiki"] = true ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived wikiRepo, commit, err := findWikiRepoCommit(ctx) @@ -612,7 +628,7 @@ func WikiRaw(ctx *context.Context) { } if entry != nil { - if err = common.ServeBlob(ctx, entry.Blob()); err != nil { + if err = common.ServeBlob(ctx, entry.Blob(), time.Time{}); err != nil { ctx.ServerError("ServeBlob", err) } return @@ -624,7 +640,6 @@ func WikiRaw(ctx *context.Context) { // NewWiki render wiki create page func NewWiki(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page") - ctx.Data["PageIsWiki"] = true if !ctx.Repo.Repository.HasWiki() { ctx.Data["title"] = "Home" @@ -640,7 +655,6 @@ func NewWiki(ctx *context.Context) { func NewWikiPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.NewWikiForm) ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page") - ctx.Data["PageIsWiki"] = true if ctx.HasError() { ctx.HTML(http.StatusOK, tplWikiNew) @@ -658,7 +672,7 @@ func NewWikiPost(ctx *context.Context) { form.Message = ctx.Tr("repo.editor.add", form.Title) } - if err := wiki_service.AddWikiPage(ctx, ctx.User, ctx.Repo.Repository, wikiName, form.Content, form.Message); err != nil { + if err := wiki_service.AddWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName, form.Content, form.Message); err != nil { if models.IsErrWikiReservedName(err) { ctx.Data["Err_Title"] = true ctx.RenderWithErr(ctx.Tr("repo.wiki.reserved_page", wikiName), tplWikiNew, &form) @@ -676,7 +690,6 @@ func NewWikiPost(ctx *context.Context) { // EditWiki render wiki modify page func EditWiki(ctx *context.Context) { - ctx.Data["PageIsWiki"] = true ctx.Data["PageIsWikiEdit"] = true if !ctx.Repo.Repository.HasWiki() { @@ -696,7 +709,6 @@ func EditWiki(ctx *context.Context) { func EditWikiPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.NewWikiForm) ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page") - ctx.Data["PageIsWiki"] = true if ctx.HasError() { ctx.HTML(http.StatusOK, tplWikiNew) @@ -710,7 +722,7 @@ func EditWikiPost(ctx *context.Context) { form.Message = ctx.Tr("repo.editor.update", form.Title) } - if err := wiki_service.EditWikiPage(ctx, ctx.User, ctx.Repo.Repository, oldWikiName, newWikiName, form.Content, form.Message); err != nil { + if err := wiki_service.EditWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, oldWikiName, newWikiName, form.Content, form.Message); err != nil { ctx.ServerError("EditWikiPage", err) return } @@ -725,7 +737,7 @@ func DeleteWikiPagePost(ctx *context.Context) { wikiName = "Home" } - if err := wiki_service.DeleteWikiPage(ctx, ctx.User, ctx.Repo.Repository, wikiName); err != nil { + if err := wiki_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName); err != nil { ctx.ServerError("DeleteWikiPage", err) return } diff --git a/routers/web/repo/wiki_test.go b/routers/web/repo/wiki_test.go index b19c628a9f..34f466854f 100644 --- a/routers/web/repo/wiki_test.go +++ b/routers/web/repo/wiki_test.go @@ -26,7 +26,7 @@ const ( ) func wikiEntry(t *testing.T, repo *repo_model.Repository, wikiName string) *git.TreeEntry { - wikiRepo, err := git.OpenRepository(repo.WikiPath()) + wikiRepo, err := git.OpenRepository(git.DefaultContext, repo.WikiPath()) assert.NoError(t, err) defer wikiRepo.Close() commit, err := wikiRepo.GetBranchCommit("master") @@ -124,7 +124,7 @@ func TestNewWikiPost(t *testing.T) { Message: message, }) NewWikiPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assertWikiExists(t, ctx.Repo.Repository, title) assert.Equal(t, wikiContent(t, ctx.Repo.Repository, title), content) } @@ -176,7 +176,7 @@ func TestEditWikiPost(t *testing.T) { Message: message, }) EditWikiPost(ctx) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) assertWikiExists(t, ctx.Repo.Repository, title) assert.Equal(t, wikiContent(t, ctx.Repo.Repository, title), content) if title != "Home" { diff --git a/routers/web/user/home.go b/routers/web/user/home.go index 379e1f8e20..2e7b382de6 100644 --- a/routers/web/user/home.go +++ b/routers/web/user/home.go @@ -17,6 +17,8 @@ import ( "code.gitea.io/gitea/models" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -29,7 +31,6 @@ import ( "code.gitea.io/gitea/modules/markup/markdown" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - "code.gitea.io/gitea/routers/web/feed" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" @@ -47,7 +48,7 @@ const ( // getDashboardContextUser finds out which context user dashboard is being viewed as . func getDashboardContextUser(ctx *context.Context) *user_model.User { - ctxUser := ctx.User + ctxUser := ctx.Doer orgName := ctx.Params(":org") if len(orgName) > 0 { ctxUser = ctx.Org.Organization.AsUser() @@ -55,7 +56,7 @@ func getDashboardContextUser(ctx *context.Context) *user_model.User { } ctx.Data["ContextUser"] = ctxUser - orgs, err := models.GetUserOrgsList(ctx.User) + orgs, err := models.GetUserOrgsList(ctx.Doer) if err != nil { ctx.ServerError("GetUserOrgsList", err) return nil @@ -75,7 +76,7 @@ func Dashboard(ctx *context.Context) { ctx.Data["Title"] = ctxUser.DisplayName() + " - " + ctx.Tr("dashboard") ctx.Data["PageIsDashboard"] = true ctx.Data["PageIsNews"] = true - cnt, _ := models.GetOrganizationCount(db.DefaultContext, ctxUser) + cnt, _ := organization.GetOrganizationCount(ctx, ctxUser) ctx.Data["UserOrgsCount"] = cnt var uid int64 @@ -89,7 +90,7 @@ func Dashboard(ctx *context.Context) { } if setting.Service.EnableUserHeatmap { - data, err := models.GetUserHeatmapDataByUserTeam(ctxUser, ctx.Org.Team, ctx.User) + data, err := models.GetUserHeatmapDataByUserTeam(ctxUser, ctx.Org.Team, ctx.Doer) if err != nil { ctx.ServerError("GetUserHeatmapDataByUserTeam", err) return @@ -100,11 +101,11 @@ func Dashboard(ctx *context.Context) { var err error var mirrors []*repo_model.Repository if ctxUser.IsOrganization() { - var env models.AccessibleReposEnvironment + var env organization.AccessibleReposEnvironment if ctx.Org.Team != nil { - env = models.OrgFromUser(ctxUser).AccessibleTeamReposEnv(ctx.Org.Team) + env = organization.OrgFromUser(ctxUser).AccessibleTeamReposEnv(ctx.Org.Team) } else { - env, err = models.OrgFromUser(ctxUser).AccessibleReposEnv(ctx.User.ID) + env, err = organization.AccessibleReposEnv(ctx, organization.OrgFromUser(ctxUser), ctx.Doer.ID) if err != nil { ctx.ServerError("AccessibleReposEnv", err) return @@ -131,17 +132,17 @@ func Dashboard(ctx *context.Context) { ctx.Data["MirrorCount"] = len(mirrors) ctx.Data["Mirrors"] = mirrors - ctx.Data["Feeds"] = feed.RetrieveFeeds(ctx, models.GetFeedsOptions{ + ctx.Data["Feeds"], err = models.GetFeeds(ctx, models.GetFeedsOptions{ RequestedUser: ctxUser, RequestedTeam: ctx.Org.Team, - Actor: ctx.User, + Actor: ctx.Doer, IncludePrivate: true, OnlyPerformedBy: false, IncludeDeleted: false, Date: ctx.FormString("date"), }) - - if ctx.Written() { + if err != nil { + ctx.ServerError("GetFeeds", err) return } @@ -152,7 +153,7 @@ func Dashboard(ctx *context.Context) { func Milestones(ctx *context.Context) { if unit.TypeIssues.UnitGlobalDisabled() && unit.TypePullRequests.UnitGlobalDisabled() { log.Debug("Milestones overview page not available as both issues and pull requests are globally disabled") - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } @@ -219,13 +220,13 @@ func Milestones(ctx *context.Context) { } } - counts, err := models.CountMilestonesByRepoCondAndKw(userRepoCond, keyword, isShowClosed) + counts, err := issues_model.CountMilestonesByRepoCondAndKw(userRepoCond, keyword, isShowClosed) if err != nil { ctx.ServerError("CountMilestonesByRepoIDs", err) return } - milestones, err := models.SearchMilestones(repoCond, page, isShowClosed, sortType, keyword) + milestones, err := issues_model.SearchMilestones(repoCond, page, isShowClosed, sortType, keyword) if err != nil { ctx.ServerError("SearchMilestones", err) return @@ -271,17 +272,17 @@ func Milestones(ctx *context.Context) { i++ } - milestoneStats, err := models.GetMilestonesStatsByRepoCondAndKw(repoCond, keyword) + milestoneStats, err := issues_model.GetMilestonesStatsByRepoCondAndKw(repoCond, keyword) if err != nil { ctx.ServerError("GetMilestoneStats", err) return } - var totalMilestoneStats *models.MilestonesStats + var totalMilestoneStats *issues_model.MilestonesStats if len(repoIDs) == 0 { totalMilestoneStats = milestoneStats } else { - totalMilestoneStats, err = models.GetMilestonesStatsByRepoCondAndKw(userRepoCond, keyword) + totalMilestoneStats, err = issues_model.GetMilestonesStatsByRepoCondAndKw(userRepoCond, keyword) if err != nil { ctx.ServerError("GetMilestoneStats", err) return @@ -324,7 +325,7 @@ func Milestones(ctx *context.Context) { func Pulls(ctx *context.Context) { if unit.TypePullRequests.UnitGlobalDisabled() { log.Debug("Pull request overview page not available as it is globally disabled.") - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } @@ -337,7 +338,7 @@ func Pulls(ctx *context.Context) { func Issues(ctx *context.Context) { if unit.TypeIssues.UnitGlobalDisabled() { log.Debug("Issues overview page not available as it is globally disabled.") - ctx.Status(404) + ctx.Status(http.StatusNotFound) return } @@ -363,7 +364,7 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { var ( viewType string sortType = ctx.FormString("sort") - filterMode = models.FilterModeAll + filterMode int ) // -------------------------------------------------------------------------------- @@ -389,8 +390,10 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { filterMode = models.FilterModeMention case "review_requested": filterMode = models.FilterModeReviewRequested - case "your_repositories": // filterMode already set to All + case "your_repositories": + fallthrough default: + filterMode = models.FilterModeYourRepositories viewType = "your_repositories" } @@ -403,8 +406,8 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { // -------------------------------------------------------------------------- // Get repository IDs where User/Org/Team has access. - var team *models.Team - var org *models.Organization + var team *organization.Team + var org *organization.Organization if ctx.Org != nil { org = ctx.Org.Organization team = ctx.Org.Team @@ -417,19 +420,50 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { IsArchived: util.OptionalBoolFalse, Org: org, Team: team, - User: ctx.User, + User: ctx.Doer, + } + + // Search all repositories which + // + // As user: + // - Owns the repository. + // - Have collaborator permissions in repository. + // + // As org: + // - Owns the repository. + // + // As team: + // - Team org's owns the repository. + // - Team has read permission to repository. + repoOpts := &models.SearchRepoOptions{ + Actor: ctx.Doer, + OwnerID: ctx.Doer.ID, + Private: true, + AllPublic: false, + AllLimited: false, + } + + if ctxUser.IsOrganization() && ctx.Org.Team != nil { + repoOpts.TeamID = ctx.Org.Team.ID } switch filterMode { case models.FilterModeAll: case models.FilterModeAssign: - opts.AssigneeID = ctx.User.ID + opts.AssigneeID = ctx.Doer.ID case models.FilterModeCreate: - opts.PosterID = ctx.User.ID + opts.PosterID = ctx.Doer.ID case models.FilterModeMention: - opts.MentionedID = ctx.User.ID + opts.MentionedID = ctx.Doer.ID case models.FilterModeReviewRequested: - opts.ReviewRequestedID = ctx.User.ID + opts.ReviewRequestedID = ctx.Doer.ID + case models.FilterModeYourRepositories: + if ctxUser.IsOrganization() && ctx.Org.Team != nil { + // Fixes a issue whereby the user's ID would be used + // to check if it's in the team(which possible isn't the case). + opts.User = nil + } + opts.RepoCond = models.SearchRepositoryCondition(repoOpts) } // keyword holds the search term entered into the search field. @@ -493,7 +527,7 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { // Gets set when clicking filters on the issues overview page. repoIDs := getRepoIDs(ctx.FormString("repos")) if len(repoIDs) > 0 { - opts.RepoIDs = repoIDs + opts.RepoCond = builder.In("issue.repo_id", repoIDs) } // ------------------------------ @@ -539,7 +573,7 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { } } - commitStatus, err := pull_service.GetIssuesLastCommitStatus(ctx, issues) + commitStatuses, lastStatus, err := pull_service.GetIssuesAllCommitStatus(ctx, issues) if err != nil { ctx.ServerError("GetIssuesLastCommitStatus", err) return @@ -551,7 +585,7 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { var issueStats *models.IssueStats if !forceEmpty { statsOpts := models.UserIssueStatsOptions{ - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, FilterMode: filterMode, IsPull: isPullList, IsClosed: isShowClosed, @@ -561,8 +595,12 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { Org: org, Team: team, } - if len(repoIDs) > 0 { - statsOpts.RepoIDs = repoIDs + if filterMode == models.FilterModeYourRepositories { + statsOpts.RepoCond = models.SearchRepositoryCondition(repoOpts) + } + // Detect when we only should search by team. + if opts.User == nil { + statsOpts.UserID = 0 } issueStats, err = models.GetUserIssueStats(statsOpts) if err != nil { @@ -612,7 +650,8 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) { } return 0 } - ctx.Data["CommitStatus"] = commitStatus + ctx.Data["CommitLastStatus"] = lastStatus + ctx.Data["CommitStatuses"] = commitStatuses ctx.Data["Repos"] = showRepos ctx.Data["Counts"] = issueCountByRepo ctx.Data["IssueStats"] = issueStats @@ -714,8 +753,8 @@ func loadRepoByIDs(ctxUser *user_model.User, issueCountByRepo map[int64]int64, u } // ShowSSHKeys output all the ssh keys of user by uid -func ShowSSHKeys(ctx *context.Context, uid int64) { - keys, err := asymkey_model.ListPublicKeys(uid, db.ListOptions{}) +func ShowSSHKeys(ctx *context.Context) { + keys, err := asymkey_model.ListPublicKeys(ctx.ContextUser.ID, db.ListOptions{}) if err != nil { ctx.ServerError("ListPublicKeys", err) return @@ -730,8 +769,8 @@ func ShowSSHKeys(ctx *context.Context, uid int64) { } // ShowGPGKeys output all the public GPG keys of user by uid -func ShowGPGKeys(ctx *context.Context, uid int64) { - keys, err := asymkey_model.ListGPGKeys(db.DefaultContext, uid, db.ListOptions{}) +func ShowGPGKeys(ctx *context.Context) { + keys, err := asymkey_model.ListGPGKeys(ctx, ctx.ContextUser.ID, db.ListOptions{}) if err != nil { ctx.ServerError("ListGPGKeys", err) return diff --git a/routers/web/user/home_test.go b/routers/web/user/home_test.go index cd599abd04..bf78e00ada 100644 --- a/routers/web/user/home_test.go +++ b/routers/web/user/home_test.go @@ -26,7 +26,7 @@ func TestArchivedIssues(t *testing.T) { ctx.Req.Form.Set("state", "open") // Assume: User 30 has access to two Repos with Issues, one of the Repos being archived. - repos, _, _ := models.GetUserRepositories(&models.SearchRepoOptions{Actor: ctx.User}) + repos, _, _ := models.GetUserRepositories(&models.SearchRepoOptions{Actor: ctx.Doer}) assert.Len(t, repos, 2) IsArchived := make(map[int64]bool) NumIssues := make(map[int64]int) diff --git a/routers/web/user/main_test.go b/routers/web/user/main_test.go index 77b48d89fb..517957a85c 100644 --- a/routers/web/user/main_test.go +++ b/routers/web/user/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } diff --git a/routers/web/user/notification.go b/routers/web/user/notification.go index 08cd1b8b31..05421cf555 100644 --- a/routers/web/user/notification.go +++ b/routers/web/user/notification.go @@ -15,6 +15,7 @@ import ( "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/structs" ) const ( @@ -33,7 +34,7 @@ func GetNotificationCount(c *context.Context) { } c.Data["NotificationUnreadCount"] = func() int64 { - count, err := models.GetNotificationCount(c.User, models.NotificationStatusUnread) + count, err := models.GetNotificationCount(c.Doer, models.NotificationStatusUnread) if err != nil { c.ServerError("GetNotificationCount", err) return -1 @@ -78,7 +79,7 @@ func getNotifications(c *context.Context) { status = models.NotificationStatusUnread } - total, err := models.GetNotificationCount(c.User, status) + total, err := models.GetNotificationCount(c.Doer, status) if err != nil { c.ServerError("ErrGetNotificationCount", err) return @@ -92,7 +93,7 @@ func getNotifications(c *context.Context) { } statuses := []models.NotificationStatus{status, models.NotificationStatusPinned} - notifications, err := models.NotificationsForUser(c.User, statuses, page, perPage) + notifications, err := models.NotificationsForUser(c.Doer, statuses, page, perPage) if err != nil { c.ServerError("ErrNotificationsForUser", err) return @@ -161,7 +162,7 @@ func NotificationStatusPost(c *context.Context) { return } - if _, err := models.SetNotificationStatus(notificationID, c.User, status); err != nil { + if _, err := models.SetNotificationStatus(notificationID, c.Doer, status); err != nil { c.ServerError("SetNotificationStatus", err) return } @@ -183,7 +184,7 @@ func NotificationStatusPost(c *context.Context) { // NotificationPurgePost is a route for 'purging' the list of notifications - marking all unread as read func NotificationPurgePost(c *context.Context) { - err := models.UpdateNotificationStatuses(c.User, models.NotificationStatusUnread, models.NotificationStatusRead) + err := models.UpdateNotificationStatuses(c.Doer, models.NotificationStatusUnread, models.NotificationStatusRead) if err != nil { c.ServerError("ErrUpdateNotificationStatuses", err) return @@ -191,3 +192,8 @@ func NotificationPurgePost(c *context.Context) { c.Redirect(setting.AppSubURL+"/notifications", http.StatusSeeOther) } + +// NewAvailable returns the notification counts +func NewAvailable(ctx *context.Context) { + ctx.JSON(http.StatusOK, structs.NotificationCount{New: models.CountUnread(ctx.Doer)}) +} diff --git a/routers/web/user/package.go b/routers/web/user/package.go new file mode 100644 index 0000000000..1c33998db9 --- /dev/null +++ b/routers/web/user/package.go @@ -0,0 +1,376 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package user + +import ( + "net/http" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + container_model "code.gitea.io/gitea/models/packages/container" + "code.gitea.io/gitea/models/perm" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/modules/base" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/web" + "code.gitea.io/gitea/services/forms" + packages_service "code.gitea.io/gitea/services/packages" +) + +const ( + tplPackagesList base.TplName = "user/overview/packages" + tplPackagesView base.TplName = "package/view" + tplPackageVersionList base.TplName = "user/overview/package_versions" + tplPackagesSettings base.TplName = "package/settings" +) + +// ListPackages displays a list of all packages of the context user +func ListPackages(ctx *context.Context) { + page := ctx.FormInt("page") + if page <= 1 { + page = 1 + } + query := ctx.FormTrim("q") + packageType := ctx.FormTrim("type") + + pvs, total, err := packages_model.SearchLatestVersions(ctx, &packages_model.PackageSearchOptions{ + Paginator: &db.ListOptions{ + PageSize: setting.UI.PackagesPagingNum, + Page: page, + }, + OwnerID: ctx.ContextUser.ID, + Type: packages_model.Type(packageType), + Name: packages_model.SearchValue{Value: query}, + }) + if err != nil { + ctx.ServerError("SearchLatestVersions", err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + ctx.ServerError("GetPackageDescriptors", err) + return + } + + repositoryAccessMap := make(map[int64]bool) + for _, pd := range pds { + if pd.Repository == nil { + continue + } + if _, has := repositoryAccessMap[pd.Repository.ID]; has { + continue + } + + permission, err := models.GetUserRepoPermission(ctx, pd.Repository, ctx.Doer) + if err != nil { + ctx.ServerError("GetUserRepoPermission", err) + return + } + repositoryAccessMap[pd.Repository.ID] = permission.HasAccess() + } + + hasPackages, err := packages_model.HasOwnerPackages(ctx, ctx.ContextUser.ID) + if err != nil { + ctx.ServerError("HasOwnerPackages", err) + return + } + + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["IsPackagesPage"] = true + ctx.Data["ContextUser"] = ctx.ContextUser + ctx.Data["Query"] = query + ctx.Data["PackageType"] = packageType + ctx.Data["HasPackages"] = hasPackages + ctx.Data["PackageDescriptors"] = pds + ctx.Data["Total"] = total + ctx.Data["RepositoryAccessMap"] = repositoryAccessMap + + pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5) + pager.AddParam(ctx, "q", "Query") + pager.AddParam(ctx, "type", "PackageType") + ctx.Data["Page"] = pager + + ctx.HTML(http.StatusOK, tplPackagesList) +} + +// RedirectToLastVersion redirects to the latest package version +func RedirectToLastVersion(ctx *context.Context) { + p, err := packages_model.GetPackageByName(ctx, ctx.Package.Owner.ID, packages_model.Type(ctx.Params("type")), ctx.Params("name")) + if err != nil { + if err == packages_model.ErrPackageNotExist { + ctx.NotFound("GetPackageByName", err) + } else { + ctx.ServerError("GetPackageByName", err) + } + return + } + + pvs, _, err := packages_model.SearchLatestVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: p.ID, + }) + if err != nil { + ctx.ServerError("GetPackageByName", err) + return + } + if len(pvs) == 0 { + ctx.NotFound("", err) + return + } + + pd, err := packages_model.GetPackageDescriptor(ctx, pvs[0]) + if err != nil { + ctx.ServerError("GetPackageDescriptor", err) + return + } + + ctx.Redirect(pd.FullWebLink()) +} + +// ViewPackageVersion displays a single package version +func ViewPackageVersion(ctx *context.Context) { + pd := ctx.Package.Descriptor + + ctx.Data["Title"] = pd.Package.Name + ctx.Data["IsPackagesPage"] = true + ctx.Data["ContextUser"] = ctx.ContextUser + ctx.Data["PackageDescriptor"] = pd + + var ( + total int64 + pvs []*packages_model.PackageVersion + err error + ) + switch pd.Package.Type { + case packages_model.TypeContainer: + ctx.Data["RegistryHost"] = setting.Packages.RegistryHost + + pvs, total, err = container_model.SearchImageTags(ctx, &container_model.ImageTagsSearchOptions{ + Paginator: db.NewAbsoluteListOptions(0, 5), + PackageID: pd.Package.ID, + IsTagged: true, + }) + default: + pvs, total, err = packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + Paginator: db.NewAbsoluteListOptions(0, 5), + PackageID: pd.Package.ID, + }) + if err != nil { + ctx.ServerError("SearchVersions", err) + return + } + } + if err != nil { + ctx.ServerError("", err) + return + } + + ctx.Data["LatestVersions"] = pvs + ctx.Data["TotalVersionCount"] = total + + ctx.Data["CanWritePackages"] = ctx.Package.AccessMode >= perm.AccessModeWrite || ctx.IsUserSiteAdmin() + + hasRepositoryAccess := false + if pd.Repository != nil { + permission, err := models.GetUserRepoPermission(ctx, pd.Repository, ctx.Doer) + if err != nil { + ctx.ServerError("GetUserRepoPermission", err) + return + } + hasRepositoryAccess = permission.HasAccess() + } + ctx.Data["HasRepositoryAccess"] = hasRepositoryAccess + + ctx.HTML(http.StatusOK, tplPackagesView) +} + +// ListPackageVersions lists all versions of a package +func ListPackageVersions(ctx *context.Context) { + p, err := packages_model.GetPackageByName(ctx, ctx.Package.Owner.ID, packages_model.Type(ctx.Params("type")), ctx.Params("name")) + if err != nil { + if err == packages_model.ErrPackageNotExist { + ctx.NotFound("GetPackageByName", err) + } else { + ctx.ServerError("GetPackageByName", err) + } + return + } + + page := ctx.FormInt("page") + if page <= 1 { + page = 1 + } + pagination := &db.ListOptions{ + PageSize: setting.UI.PackagesPagingNum, + Page: page, + } + + query := ctx.FormTrim("q") + + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["IsPackagesPage"] = true + ctx.Data["ContextUser"] = ctx.ContextUser + ctx.Data["PackageDescriptor"] = &packages_model.PackageDescriptor{ + Package: p, + Owner: ctx.Package.Owner, + } + ctx.Data["Query"] = query + + pagerParams := map[string]string{ + "q": query, + } + + var ( + total int64 + pvs []*packages_model.PackageVersion + ) + switch p.Type { + case packages_model.TypeContainer: + tagged := ctx.FormTrim("tagged") + + pagerParams["tagged"] = tagged + ctx.Data["Tagged"] = tagged + + pvs, total, err = container_model.SearchImageTags(ctx, &container_model.ImageTagsSearchOptions{ + Paginator: pagination, + PackageID: p.ID, + Query: query, + IsTagged: tagged == "" || tagged == "tagged", + }) + if err != nil { + ctx.ServerError("SearchImageTags", err) + return + } + default: + pvs, total, err = packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + Paginator: pagination, + PackageID: p.ID, + Version: packages_model.SearchValue{ + ExactMatch: false, + Value: query, + }, + }) + if err != nil { + ctx.ServerError("SearchVersions", err) + return + } + } + + ctx.Data["PackageDescriptors"], err = packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + ctx.ServerError("GetPackageDescriptors", err) + return + } + + ctx.Data["Total"] = total + + pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5) + for k, v := range pagerParams { + pager.AddParamString(k, v) + } + ctx.Data["Page"] = pager + + ctx.HTML(http.StatusOK, tplPackageVersionList) +} + +// PackageSettings displays the package settings page +func PackageSettings(ctx *context.Context) { + pd := ctx.Package.Descriptor + + ctx.Data["Title"] = pd.Package.Name + ctx.Data["IsPackagesPage"] = true + ctx.Data["ContextUser"] = ctx.ContextUser + ctx.Data["PackageDescriptor"] = pd + + repos, _, _ := models.GetUserRepositories(&models.SearchRepoOptions{ + Actor: pd.Owner, + Private: true, + }) + ctx.Data["Repos"] = repos + ctx.Data["CanWritePackages"] = ctx.Package.AccessMode >= perm.AccessModeWrite || ctx.IsUserSiteAdmin() + + ctx.HTML(http.StatusOK, tplPackagesSettings) +} + +// PackageSettingsPost updates the package settings +func PackageSettingsPost(ctx *context.Context) { + pd := ctx.Package.Descriptor + + form := web.GetForm(ctx).(*forms.PackageSettingForm) + switch form.Action { + case "link": + success := func() bool { + repoID := int64(0) + if form.RepoID != 0 { + repo, err := repo_model.GetRepositoryByID(form.RepoID) + if err != nil { + log.Error("Error getting repository: %v", err) + return false + } + + if repo.OwnerID != pd.Owner.ID { + return false + } + + repoID = repo.ID + } + + if err := packages_model.SetRepositoryLink(ctx, pd.Package.ID, repoID); err != nil { + log.Error("Error updating package: %v", err) + return false + } + + return true + }() + + if success { + ctx.Flash.Success(ctx.Tr("packages.settings.link.success")) + } else { + ctx.Flash.Error(ctx.Tr("packages.settings.link.error")) + } + + ctx.Redirect(ctx.Link) + return + case "delete": + err := packages_service.RemovePackageVersion(ctx.Doer, ctx.Package.Descriptor.Version) + if err != nil { + log.Error("Error deleting package: %v", err) + ctx.Flash.Error(ctx.Tr("packages.settings.delete.error")) + } else { + ctx.Flash.Success(ctx.Tr("packages.settings.delete.success")) + } + + ctx.Redirect(ctx.Package.Owner.HTMLURL() + "/-/packages") + return + } +} + +// DownloadPackageFile serves the content of a package file +func DownloadPackageFile(ctx *context.Context) { + pf, err := packages_model.GetFileForVersionByID(ctx, ctx.Package.Descriptor.Version.ID, ctx.ParamsInt64(":fileid")) + if err != nil { + if err == packages_model.ErrPackageFileNotExist { + ctx.NotFound("", err) + } else { + ctx.ServerError("GetFileForVersionByID", err) + } + return + } + + s, _, err := packages_service.GetPackageFileStream( + ctx, + pf, + ) + if err != nil { + ctx.ServerError("GetPackageFileStream", err) + return + } + defer s.Close() + + ctx.ServeStream(s, pf.Name) +} diff --git a/routers/web/user/profile.go b/routers/web/user/profile.go index 9c0ce10dae..85870eddf5 100644 --- a/routers/web/user/profile.go +++ b/routers/web/user/profile.go @@ -8,11 +8,12 @@ package user import ( "fmt" "net/http" - "path" "strings" "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" + project_model "code.gitea.io/gitea/models/project" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/context" @@ -24,133 +25,51 @@ import ( "code.gitea.io/gitea/routers/web/org" ) -// GetUserByName get user by name -func GetUserByName(ctx *context.Context, name string) *user_model.User { - user, err := user_model.GetUserByName(name) - if err != nil { - if user_model.IsErrUserNotExist(err) { - if redirectUserID, err := user_model.LookupUserRedirect(name); err == nil { - context.RedirectToUser(ctx, name, redirectUserID) - } else { - ctx.NotFound("GetUserByName", err) - } - } else { - ctx.ServerError("GetUserByName", err) - } - return nil - } - return user -} - -// GetUserByParams returns user whose name is presented in URL paramenter. -func GetUserByParams(ctx *context.Context) *user_model.User { - return GetUserByName(ctx, ctx.Params(":username")) -} - // Profile render user's profile page func Profile(ctx *context.Context) { - uname := ctx.Params(":username") - - // Special handle for FireFox requests favicon.ico. - if uname == "favicon.ico" { - ctx.ServeFile(path.Join(setting.StaticRootPath, "public/img/favicon.png")) + if strings.Contains(ctx.Req.Header.Get("Accept"), "application/rss+xml") { + feed.ShowUserFeedRSS(ctx) + return + } + if strings.Contains(ctx.Req.Header.Get("Accept"), "application/atom+xml") { + feed.ShowUserFeedAtom(ctx) return } - if strings.HasSuffix(uname, ".png") { - ctx.Error(http.StatusNotFound) - return - } - - isShowKeys := false - if strings.HasSuffix(uname, ".keys") { - isShowKeys = true - uname = strings.TrimSuffix(uname, ".keys") - } - - isShowGPG := false - if strings.HasSuffix(uname, ".gpg") { - isShowGPG = true - uname = strings.TrimSuffix(uname, ".gpg") - } - - showFeedType := "" - if strings.HasSuffix(uname, ".rss") { - showFeedType = "rss" - uname = strings.TrimSuffix(uname, ".rss") - } else if strings.Contains(ctx.Req.Header.Get("Accept"), "application/rss+xml") { - showFeedType = "rss" - } - if strings.HasSuffix(uname, ".atom") { - showFeedType = "atom" - uname = strings.TrimSuffix(uname, ".atom") - } else if strings.Contains(ctx.Req.Header.Get("Accept"), "application/atom+xml") { - showFeedType = "atom" - } - - ctxUser := GetUserByName(ctx, uname) - if ctx.Written() { - return - } - - if ctxUser.IsOrganization() { - /* - // TODO: enable after rss.RetrieveFeeds() do handle org correctly - // Show Org RSS feed - if len(showFeedType) != 0 { - rss.ShowUserFeed(ctx, ctxUser, showFeedType) - return - } - */ - + if ctx.ContextUser.IsOrganization() { org.Home(ctx) return } // check view permissions - if !models.IsUserVisibleToViewer(ctxUser, ctx.User) { - ctx.NotFound("user", fmt.Errorf(uname)) + if !user_model.IsUserVisibleToViewer(ctx.ContextUser, ctx.Doer) { + ctx.NotFound("user", fmt.Errorf(ctx.ContextUser.Name)) return } - // Show SSH keys. - if isShowKeys { - ShowSSHKeys(ctx, ctxUser.ID) - return - } - - // Show GPG keys. - if isShowGPG { - ShowGPGKeys(ctx, ctxUser.ID) - return - } - - // Show User RSS feed - if len(showFeedType) != 0 { - feed.ShowUserFeed(ctx, ctxUser, showFeedType) - return - } + // advertise feed via meta tag + ctx.Data["FeedURL"] = ctx.ContextUser.HTMLURL() // Show OpenID URIs - openIDs, err := user_model.GetUserOpenIDs(ctxUser.ID) + openIDs, err := user_model.GetUserOpenIDs(ctx.ContextUser.ID) if err != nil { ctx.ServerError("GetUserOpenIDs", err) return } var isFollowing bool - if ctx.User != nil && ctxUser != nil { - isFollowing = user_model.IsFollowing(ctx.User.ID, ctxUser.ID) + if ctx.Doer != nil { + isFollowing = user_model.IsFollowing(ctx.Doer.ID, ctx.ContextUser.ID) } - ctx.Data["Title"] = ctxUser.DisplayName() + ctx.Data["Title"] = ctx.ContextUser.DisplayName() ctx.Data["PageIsUserProfile"] = true - ctx.Data["Owner"] = ctxUser + ctx.Data["Owner"] = ctx.ContextUser ctx.Data["OpenIDs"] = openIDs ctx.Data["IsFollowing"] = isFollowing if setting.Service.EnableUserHeatmap { - data, err := models.GetUserHeatmapDataByUser(ctxUser, ctx.User) + data, err := models.GetUserHeatmapDataByUser(ctx.ContextUser, ctx.Doer) if err != nil { ctx.ServerError("GetUserHeatmapDataByUser", err) return @@ -158,13 +77,13 @@ func Profile(ctx *context.Context) { ctx.Data["HeatmapData"] = data } - if len(ctxUser.Description) != 0 { + if len(ctx.ContextUser.Description) != 0 { content, err := markdown.RenderString(&markup.RenderContext{ URLPrefix: ctx.Repo.RepoLink, Metas: map[string]string{"mode": "document"}, GitRepo: ctx.Repo.GitRepo, Ctx: ctx, - }, ctxUser.Description) + }, ctx.ContextUser.Description) if err != nil { ctx.ServerError("RenderString", err) return @@ -172,10 +91,10 @@ func Profile(ctx *context.Context) { ctx.Data["RenderedDescription"] = content } - showPrivate := ctx.IsSigned && (ctx.User.IsAdmin || ctx.User.ID == ctxUser.ID) + showPrivate := ctx.IsSigned && (ctx.Doer.IsAdmin || ctx.Doer.ID == ctx.ContextUser.ID) - orgs, err := models.FindOrgs(models.FindOrgOptions{ - UserID: ctxUser.ID, + orgs, err := organization.FindOrgs(organization.FindOrgOptions{ + UserID: ctx.ContextUser.ID, IncludePrivate: showPrivate, }) if err != nil { @@ -184,7 +103,7 @@ func Profile(ctx *context.Context) { } ctx.Data["Orgs"] = orgs - ctx.Data["HasOrgsVisible"] = models.HasOrgsVisible(orgs, ctx.User) + ctx.Data["HasOrgsVisible"] = organization.HasOrgsVisible(orgs, ctx.Doer) tab := ctx.FormString("tab") ctx.Data["TabName"] = tab @@ -238,7 +157,7 @@ func Profile(ctx *context.Context) { switch tab { case "followers": - items, err := user_model.GetUserFollowers(ctxUser, db.ListOptions{ + items, err := user_model.GetUserFollowers(ctx.ContextUser, db.ListOptions{ PageSize: setting.UI.User.RepoPagingNum, Page: page, }) @@ -248,9 +167,9 @@ func Profile(ctx *context.Context) { } ctx.Data["Cards"] = items - total = ctxUser.NumFollowers + total = ctx.ContextUser.NumFollowers case "following": - items, err := user_model.GetUserFollowing(ctxUser, db.ListOptions{ + items, err := user_model.GetUserFollowing(ctx.ContextUser, db.ListOptions{ PageSize: setting.UI.User.RepoPagingNum, Page: page, }) @@ -260,17 +179,18 @@ func Profile(ctx *context.Context) { } ctx.Data["Cards"] = items - total = ctxUser.NumFollowing + total = ctx.ContextUser.NumFollowing case "activity": - ctx.Data["Feeds"] = feed.RetrieveFeeds(ctx, models.GetFeedsOptions{ - RequestedUser: ctxUser, - Actor: ctx.User, + ctx.Data["Feeds"], err = models.GetFeeds(ctx, models.GetFeedsOptions{ + RequestedUser: ctx.ContextUser, + Actor: ctx.Doer, IncludePrivate: showPrivate, OnlyPerformedBy: true, IncludeDeleted: false, Date: ctx.FormString("date"), }) - if ctx.Written() { + if err != nil { + ctx.ServerError("GetFeeds", err) return } case "stars": @@ -280,11 +200,11 @@ func Profile(ctx *context.Context) { PageSize: setting.UI.User.RepoPagingNum, Page: page, }, - Actor: ctx.User, + Actor: ctx.Doer, Keyword: keyword, OrderBy: orderBy, Private: ctx.IsSigned, - StarredByID: ctxUser.ID, + StarredByID: ctx.ContextUser.ID, Collaborate: util.OptionalBoolFalse, TopicOnly: topicOnly, Language: language, @@ -297,10 +217,10 @@ func Profile(ctx *context.Context) { total = int(count) case "projects": - ctx.Data["OpenProjects"], _, err = models.GetProjects(models.ProjectSearchOptions{ + ctx.Data["OpenProjects"], _, err = project_model.GetProjects(project_model.SearchOptions{ Page: -1, IsClosed: util.OptionalBoolFalse, - Type: models.ProjectTypeIndividual, + Type: project_model.TypeIndividual, }) if err != nil { ctx.ServerError("GetProjects", err) @@ -312,11 +232,11 @@ func Profile(ctx *context.Context) { PageSize: setting.UI.User.RepoPagingNum, Page: page, }, - Actor: ctx.User, + Actor: ctx.Doer, Keyword: keyword, OrderBy: orderBy, Private: ctx.IsSigned, - WatchedByID: ctxUser.ID, + WatchedByID: ctx.ContextUser.ID, Collaborate: util.OptionalBoolFalse, TopicOnly: topicOnly, Language: language, @@ -334,9 +254,9 @@ func Profile(ctx *context.Context) { PageSize: setting.UI.User.RepoPagingNum, Page: page, }, - Actor: ctx.User, + Actor: ctx.Doer, Keyword: keyword, - OwnerID: ctxUser.ID, + OwnerID: ctx.ContextUser.ID, OrderBy: orderBy, Private: ctx.IsSigned, Collaborate: util.OptionalBoolFalse, @@ -360,25 +280,21 @@ func Profile(ctx *context.Context) { pager.AddParam(ctx, "language", "Language") } ctx.Data["Page"] = pager + ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled - ctx.Data["ShowUserEmail"] = len(ctxUser.Email) > 0 && ctx.IsSigned && (!ctxUser.KeepEmailPrivate || ctxUser.ID == ctx.User.ID) + ctx.Data["ShowUserEmail"] = len(ctx.ContextUser.Email) > 0 && ctx.IsSigned && (!ctx.ContextUser.KeepEmailPrivate || ctx.ContextUser.ID == ctx.Doer.ID) ctx.HTML(http.StatusOK, tplProfile) } // Action response for follow/unfollow user request func Action(ctx *context.Context) { - u := GetUserByParams(ctx) - if ctx.Written() { - return - } - var err error switch ctx.FormString("action") { case "follow": - err = user_model.FollowUser(ctx.User.ID, u.ID) + err = user_model.FollowUser(ctx.Doer.ID, ctx.ContextUser.ID) case "unfollow": - err = user_model.UnfollowUser(ctx.User.ID, u.ID) + err = user_model.UnfollowUser(ctx.Doer.ID, ctx.ContextUser.ID) } if err != nil { @@ -386,5 +302,5 @@ func Action(ctx *context.Context) { return } // FIXME: We should check this URL and make sure that it's a valid Gitea URL - ctx.RedirectToFirst(ctx.FormString("redirect_to"), u.HomeLink()) + ctx.RedirectToFirst(ctx.FormString("redirect_to"), ctx.ContextUser.HomeLink()) } diff --git a/routers/web/user/search.go b/routers/web/user/search.go new file mode 100644 index 0000000000..328c7bade4 --- /dev/null +++ b/routers/web/user/search.go @@ -0,0 +1,44 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package user + +import ( + "net/http" + + "code.gitea.io/gitea/models/db" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" +) + +// Search search users +func Search(ctx *context.Context) { + listOptions := db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), + } + + users, maxResults, err := user_model.SearchUsers(&user_model.SearchUserOptions{ + Actor: ctx.Doer, + Keyword: ctx.FormTrim("q"), + UID: ctx.FormInt64("uid"), + Type: user_model.UserTypeIndividual, + ListOptions: listOptions, + }) + if err != nil { + ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ + "ok": false, + "error": err.Error(), + }) + return + } + + ctx.SetTotalCountHeader(maxResults) + + ctx.JSON(http.StatusOK, map[string]interface{}{ + "ok": true, + "data": convert.ToUsers(ctx.Doer, users), + }) +} diff --git a/routers/web/user/setting/account.go b/routers/web/user/setting/account.go index b73122fa12..b2476dff94 100644 --- a/routers/web/user/setting/account.go +++ b/routers/web/user/setting/account.go @@ -11,7 +11,6 @@ import ( "time" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -34,7 +33,7 @@ const ( func Account(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsAccount"] = true - ctx.Data["Email"] = ctx.User.Email + ctx.Data["Email"] = ctx.Doer.Email loadAccountData(ctx) @@ -56,7 +55,7 @@ func AccountPost(ctx *context.Context) { if len(form.Password) < setting.MinPasswordLength { ctx.Flash.Error(ctx.Tr("auth.password_too_short", setting.MinPasswordLength)) - } else if ctx.User.IsPasswordSet() && !ctx.User.ValidatePassword(form.OldPassword) { + } else if ctx.Doer.IsPasswordSet() && !ctx.Doer.ValidatePassword(form.OldPassword) { ctx.Flash.Error(ctx.Tr("settings.password_incorrect")) } else if form.Password != form.Retype { ctx.Flash.Error(ctx.Tr("form.password_not_match")) @@ -71,15 +70,15 @@ func AccountPost(ctx *context.Context) { ctx.Flash.Error(errMsg) } else { var err error - if err = ctx.User.SetPassword(form.Password); err != nil { + if err = ctx.Doer.SetPassword(form.Password); err != nil { ctx.ServerError("UpdateUser", err) return } - if err := user_model.UpdateUserCols(db.DefaultContext, ctx.User, "salt", "passwd_hash_algo", "passwd"); err != nil { + if err := user_model.UpdateUserCols(ctx, ctx.Doer, "salt", "passwd_hash_algo", "passwd"); err != nil { ctx.ServerError("UpdateUser", err) return } - log.Trace("User password updated: %s", ctx.User.Name) + log.Trace("User password updated: %s", ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.change_password_success")) } @@ -99,50 +98,50 @@ func EmailPost(ctx *context.Context) { return } - log.Trace("Email made primary: %s", ctx.User.Name) + log.Trace("Email made primary: %s", ctx.Doer.Name) ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } // Send activation Email if ctx.FormString("_method") == "SENDACTIVATION" { var address string - if ctx.Cache.IsExist("MailResendLimit_" + ctx.User.LowerName) { + if ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName) { log.Error("Send activation: activation still pending") ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } id := ctx.FormInt64("id") - email, err := user_model.GetEmailAddressByID(ctx.User.ID, id) + email, err := user_model.GetEmailAddressByID(ctx.Doer.ID, id) if err != nil { - log.Error("GetEmailAddressByID(%d,%d) error: %v", ctx.User.ID, id, err) + log.Error("GetEmailAddressByID(%d,%d) error: %v", ctx.Doer.ID, id, err) ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } if email == nil { - log.Warn("Send activation failed: EmailAddress[%d] not found for user: %-v", id, ctx.User) + log.Warn("Send activation failed: EmailAddress[%d] not found for user: %-v", id, ctx.Doer) ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } if email.IsActivated { - log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.User) + log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.Doer) ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } if email.IsPrimary { - if ctx.User.IsActive && !setting.Service.RegisterEmailConfirm { - log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.User) + if ctx.Doer.IsActive && !setting.Service.RegisterEmailConfirm { + log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.Doer) ctx.Redirect(setting.AppSubURL + "/user/settings/account") return } // Only fired when the primary email is inactive (Wrong state) - mailer.SendActivateAccountMail(ctx.Locale, ctx.User) + mailer.SendActivateAccountMail(ctx.Locale, ctx.Doer) } else { - mailer.SendActivateEmailMail(ctx.User, email) + mailer.SendActivateEmailMail(ctx.Doer, email) } address = email.Email - if err := ctx.Cache.Put("MailResendLimit_"+ctx.User.LowerName, ctx.User.LowerName, 180); err != nil { + if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil { log.Error("Set cache(MailResendLimit) fail: %v", err) } ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", address, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale.Language()))) @@ -155,16 +154,16 @@ func EmailPost(ctx *context.Context) { if !(preference == user_model.EmailNotificationsEnabled || preference == user_model.EmailNotificationsOnMention || preference == user_model.EmailNotificationsDisabled) { - log.Error("Email notifications preference change returned unrecognized option %s: %s", preference, ctx.User.Name) + log.Error("Email notifications preference change returned unrecognized option %s: %s", preference, ctx.Doer.Name) ctx.ServerError("SetEmailPreference", errors.New("option unrecognized")) return } - if err := user_model.SetEmailNotifications(ctx.User, preference); err != nil { + if err := user_model.SetEmailNotifications(ctx.Doer, preference); err != nil { log.Error("Set Email Notifications failed: %v", err) ctx.ServerError("SetEmailNotifications", err) return } - log.Trace("Email notifications preference made %s: %s", preference, ctx.User.Name) + log.Trace("Email notifications preference made %s: %s", preference, ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.email_preference_set_success")) ctx.Redirect(setting.AppSubURL + "/user/settings/account") return @@ -178,7 +177,7 @@ func EmailPost(ctx *context.Context) { } email := &user_model.EmailAddress{ - UID: ctx.User.ID, + UID: ctx.Doer.ID, Email: form.Email, IsActivated: !setting.Service.RegisterEmailConfirm, } @@ -188,7 +187,8 @@ func EmailPost(ctx *context.Context) { ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplSettingsAccount, &form) return - } else if user_model.IsErrEmailInvalid(err) { + } else if user_model.IsErrEmailCharIsNotSupported(err) || + user_model.IsErrEmailInvalid(err) { loadAccountData(ctx) ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplSettingsAccount, &form) @@ -200,8 +200,8 @@ func EmailPost(ctx *context.Context) { // Send confirmation email if setting.Service.RegisterEmailConfirm { - mailer.SendActivateEmailMail(ctx.User, email) - if err := ctx.Cache.Put("MailResendLimit_"+ctx.User.LowerName, ctx.User.LowerName, 180); err != nil { + mailer.SendActivateEmailMail(ctx.Doer, email) + if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil { log.Error("Set cache(MailResendLimit) fail: %v", err) } ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", email.Email, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale.Language()))) @@ -215,11 +215,11 @@ func EmailPost(ctx *context.Context) { // DeleteEmail response for delete user's email func DeleteEmail(ctx *context.Context) { - if err := user_model.DeleteEmailAddress(&user_model.EmailAddress{ID: ctx.FormInt64("id"), UID: ctx.User.ID}); err != nil { + if err := user_model.DeleteEmailAddress(&user_model.EmailAddress{ID: ctx.FormInt64("id"), UID: ctx.Doer.ID}); err != nil { ctx.ServerError("DeleteEmail", err) return } - log.Trace("Email address deleted: %s", ctx.User.Name) + log.Trace("Email address deleted: %s", ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.email_deletion_success")) ctx.JSON(http.StatusOK, map[string]interface{}{ @@ -232,7 +232,7 @@ func DeleteAccount(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsAccount"] = true - if _, _, err := auth.UserSignIn(ctx.User.Name, ctx.FormString("password")); err != nil { + if _, _, err := auth.UserSignIn(ctx.Doer.Name, ctx.FormString("password")); err != nil { if user_model.IsErrUserNotExist(err) { loadAccountData(ctx) @@ -243,7 +243,7 @@ func DeleteAccount(ctx *context.Context) { return } - if err := user.DeleteUser(ctx.User); err != nil { + if err := user.DeleteUser(ctx.Doer); err != nil { switch { case models.IsErrUserOwnRepos(err): ctx.Flash.Error(ctx.Tr("form.still_own_repo")) @@ -251,17 +251,20 @@ func DeleteAccount(ctx *context.Context) { case models.IsErrUserHasOrgs(err): ctx.Flash.Error(ctx.Tr("form.still_has_org")) ctx.Redirect(setting.AppSubURL + "/user/settings/account") + case models.IsErrUserOwnPackages(err): + ctx.Flash.Error(ctx.Tr("form.still_own_packages")) + ctx.Redirect(setting.AppSubURL + "/user/settings/account") default: ctx.ServerError("DeleteUser", err) } } else { - log.Trace("Account deleted: %s", ctx.User.Name) + log.Trace("Account deleted: %s", ctx.Doer.Name) ctx.Redirect(setting.AppSubURL + "/") } } func loadAccountData(ctx *context.Context) { - emlist, err := user_model.GetEmailAddresses(ctx.User.ID) + emlist, err := user_model.GetEmailAddresses(ctx.Doer.ID) if err != nil { ctx.ServerError("GetEmailAddresses", err) return @@ -270,7 +273,7 @@ func loadAccountData(ctx *context.Context) { user_model.EmailAddress CanBePrimary bool } - pendingActivation := ctx.Cache.IsExist("MailResendLimit_" + ctx.User.LowerName) + pendingActivation := ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName) emails := make([]*UserEmail, len(emlist)) for i, em := range emlist { var email UserEmail @@ -279,12 +282,12 @@ func loadAccountData(ctx *context.Context) { emails[i] = &email } ctx.Data["Emails"] = emails - ctx.Data["EmailNotificationsPreference"] = ctx.User.EmailNotifications() + ctx.Data["EmailNotificationsPreference"] = ctx.Doer.EmailNotifications() ctx.Data["ActivationsPending"] = pendingActivation ctx.Data["CanAddEmails"] = !pendingActivation || !setting.Service.RegisterEmailConfirm if setting.Service.UserDeleteWithCommentsMaxTime != 0 { ctx.Data["UserDeleteWithCommentsMaxTime"] = setting.Service.UserDeleteWithCommentsMaxTime.String() - ctx.Data["UserDeleteWithComments"] = ctx.User.CreatedUnix.AsTime().Add(setting.Service.UserDeleteWithCommentsMaxTime).After(time.Now()) + ctx.Data["UserDeleteWithComments"] = ctx.Doer.CreatedUnix.AsTime().Add(setting.Service.UserDeleteWithCommentsMaxTime).After(time.Now()) } } diff --git a/routers/web/user/setting/account_test.go b/routers/web/user/setting/account_test.go index a67d09e9ed..005603e7ac 100644 --- a/routers/web/user/setting/account_test.go +++ b/routers/web/user/setting/account_test.go @@ -94,6 +94,6 @@ func TestChangePassword(t *testing.T) { AccountPost(ctx) assert.Contains(t, ctx.Flash.ErrorMsg, req.Message) - assert.EqualValues(t, http.StatusFound, ctx.Resp.Status()) + assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status()) } } diff --git a/routers/web/user/setting/adopt.go b/routers/web/user/setting/adopt.go index 348705b743..ce2377a997 100644 --- a/routers/web/user/setting/adopt.go +++ b/routers/web/user/setting/adopt.go @@ -28,7 +28,7 @@ func AdoptOrDeleteRepository(ctx *context.Context) { dir := ctx.FormString("id") action := ctx.FormString("action") - ctxUser := ctx.User + ctxUser := ctx.Doer root := user_model.UserPath(ctxUser.LowerName) // check not a repo diff --git a/routers/web/user/setting/applications.go b/routers/web/user/setting/applications.go index 20ffdfaf84..b0f599fc45 100644 --- a/routers/web/user/setting/applications.go +++ b/routers/web/user/setting/applications.go @@ -45,7 +45,7 @@ func ApplicationsPost(ctx *context.Context) { } t := &models.AccessToken{ - UID: ctx.User.ID, + UID: ctx.Doer.ID, Name: form.Name, } @@ -73,7 +73,7 @@ func ApplicationsPost(ctx *context.Context) { // DeleteApplication response for delete user access token func DeleteApplication(ctx *context.Context) { - if err := models.DeleteAccessTokenByID(ctx.FormInt64("id"), ctx.User.ID); err != nil { + if err := models.DeleteAccessTokenByID(ctx.FormInt64("id"), ctx.Doer.ID); err != nil { ctx.Flash.Error("DeleteAccessTokenByID: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("settings.delete_token_success")) @@ -85,7 +85,7 @@ func DeleteApplication(ctx *context.Context) { } func loadApplicationsData(ctx *context.Context) { - tokens, err := models.ListAccessTokens(models.ListAccessTokensOptions{UserID: ctx.User.ID}) + tokens, err := models.ListAccessTokens(models.ListAccessTokensOptions{UserID: ctx.Doer.ID}) if err != nil { ctx.ServerError("ListAccessTokens", err) return @@ -93,12 +93,12 @@ func loadApplicationsData(ctx *context.Context) { ctx.Data["Tokens"] = tokens ctx.Data["EnableOAuth2"] = setting.OAuth2.Enable if setting.OAuth2.Enable { - ctx.Data["Applications"], err = auth.GetOAuth2ApplicationsByUserID(ctx.User.ID) + ctx.Data["Applications"], err = auth.GetOAuth2ApplicationsByUserID(ctx.Doer.ID) if err != nil { ctx.ServerError("GetOAuth2ApplicationsByUserID", err) return } - ctx.Data["Grants"], err = auth.GetOAuth2GrantsByUserID(ctx.User.ID) + ctx.Data["Grants"], err = auth.GetOAuth2GrantsByUserID(ctx.Doer.ID) if err != nil { ctx.ServerError("GetOAuth2GrantsByUserID", err) return diff --git a/routers/web/user/setting/keys.go b/routers/web/user/setting/keys.go index f926c1f311..a8d07ea47a 100644 --- a/routers/web/user/setting/keys.go +++ b/routers/web/user/setting/keys.go @@ -52,7 +52,7 @@ func KeysPost(ctx *context.Context) { } switch form.Type { case "principal": - content, err := asymkey_model.CheckPrincipalKeyString(ctx.User, form.Content) + content, err := asymkey_model.CheckPrincipalKeyString(ctx.Doer, form.Content) if err != nil { if db.IsErrSSHDisabled(err) { ctx.Flash.Info(ctx.Tr("settings.ssh_disabled")) @@ -62,7 +62,7 @@ func KeysPost(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + "/user/settings/keys") return } - if _, err = asymkey_model.AddPrincipalKey(ctx.User.ID, content, 0); err != nil { + if _, err = asymkey_model.AddPrincipalKey(ctx.Doer.ID, content, 0); err != nil { ctx.Data["HasPrincipalError"] = true switch { case asymkey_model.IsErrKeyAlreadyExist(err), asymkey_model.IsErrKeyNameAlreadyUsed(err): @@ -78,12 +78,12 @@ func KeysPost(ctx *context.Context) { ctx.Flash.Success(ctx.Tr("settings.add_principal_success", form.Content)) ctx.Redirect(setting.AppSubURL + "/user/settings/keys") case "gpg": - token := asymkey_model.VerificationToken(ctx.User, 1) - lastToken := asymkey_model.VerificationToken(ctx.User, 0) + token := asymkey_model.VerificationToken(ctx.Doer, 1) + lastToken := asymkey_model.VerificationToken(ctx.Doer, 0) - keys, err := asymkey_model.AddGPGKey(ctx.User.ID, form.Content, token, form.Signature) + keys, err := asymkey_model.AddGPGKey(ctx.Doer.ID, form.Content, token, form.Signature) if err != nil && asymkey_model.IsErrGPGInvalidTokenSignature(err) { - keys, err = asymkey_model.AddGPGKey(ctx.User.ID, form.Content, lastToken, form.Signature) + keys, err = asymkey_model.AddGPGKey(ctx.Doer.ID, form.Content, lastToken, form.Signature) } if err != nil { ctx.Data["HasGPGError"] = true @@ -125,12 +125,12 @@ func KeysPost(ctx *context.Context) { ctx.Flash.Success(ctx.Tr("settings.add_gpg_key_success", keyIDs)) ctx.Redirect(setting.AppSubURL + "/user/settings/keys") case "verify_gpg": - token := asymkey_model.VerificationToken(ctx.User, 1) - lastToken := asymkey_model.VerificationToken(ctx.User, 0) + token := asymkey_model.VerificationToken(ctx.Doer, 1) + lastToken := asymkey_model.VerificationToken(ctx.Doer, 0) - keyID, err := asymkey_model.VerifyGPGKey(ctx.User.ID, form.KeyID, token, form.Signature) + keyID, err := asymkey_model.VerifyGPGKey(ctx.Doer.ID, form.KeyID, token, form.Signature) if err != nil && asymkey_model.IsErrGPGInvalidTokenSignature(err) { - keyID, err = asymkey_model.VerifyGPGKey(ctx.User.ID, form.KeyID, lastToken, form.Signature) + keyID, err = asymkey_model.VerifyGPGKey(ctx.Doer.ID, form.KeyID, lastToken, form.Signature) } if err != nil { ctx.Data["HasGPGVerifyError"] = true @@ -161,7 +161,7 @@ func KeysPost(ctx *context.Context) { return } - if _, err = asymkey_model.AddPublicKey(ctx.User.ID, form.Title, content, 0); err != nil { + if _, err = asymkey_model.AddPublicKey(ctx.Doer.ID, form.Title, content, 0); err != nil { ctx.Data["HasSSHError"] = true switch { case asymkey_model.IsErrKeyAlreadyExist(err): @@ -185,12 +185,12 @@ func KeysPost(ctx *context.Context) { ctx.Flash.Success(ctx.Tr("settings.add_key_success", form.Title)) ctx.Redirect(setting.AppSubURL + "/user/settings/keys") case "verify_ssh": - token := asymkey_model.VerificationToken(ctx.User, 1) - lastToken := asymkey_model.VerificationToken(ctx.User, 0) + token := asymkey_model.VerificationToken(ctx.Doer, 1) + lastToken := asymkey_model.VerificationToken(ctx.Doer, 0) - fingerprint, err := asymkey_model.VerifySSHKey(ctx.User.ID, form.Fingerprint, token, form.Signature) + fingerprint, err := asymkey_model.VerifySSHKey(ctx.Doer.ID, form.Fingerprint, token, form.Signature) if err != nil && asymkey_model.IsErrSSHInvalidTokenSignature(err) { - fingerprint, err = asymkey_model.VerifySSHKey(ctx.User.ID, form.Fingerprint, lastToken, form.Signature) + fingerprint, err = asymkey_model.VerifySSHKey(ctx.Doer.ID, form.Fingerprint, lastToken, form.Signature) } if err != nil { ctx.Data["HasSSHVerifyError"] = true @@ -217,7 +217,7 @@ func KeysPost(ctx *context.Context) { func DeleteKey(ctx *context.Context) { switch ctx.FormString("type") { case "gpg": - if err := asymkey_model.DeleteGPGKey(ctx.User, ctx.FormInt64("id")); err != nil { + if err := asymkey_model.DeleteGPGKey(ctx.Doer, ctx.FormInt64("id")); err != nil { ctx.Flash.Error("DeleteGPGKey: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("settings.gpg_key_deletion_success")) @@ -234,13 +234,13 @@ func DeleteKey(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + "/user/settings/keys") return } - if err := asymkey_service.DeletePublicKey(ctx.User, keyID); err != nil { + if err := asymkey_service.DeletePublicKey(ctx.Doer, keyID); err != nil { ctx.Flash.Error("DeletePublicKey: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("settings.ssh_key_deletion_success")) } case "principal": - if err := asymkey_service.DeletePublicKey(ctx.User, ctx.FormInt64("id")); err != nil { + if err := asymkey_service.DeletePublicKey(ctx.Doer, ctx.FormInt64("id")); err != nil { ctx.Flash.Error("DeletePublicKey: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("settings.ssh_principal_deletion_success")) @@ -255,7 +255,7 @@ func DeleteKey(ctx *context.Context) { } func loadKeysData(ctx *context.Context) { - keys, err := asymkey_model.ListPublicKeys(ctx.User.ID, db.ListOptions{}) + keys, err := asymkey_model.ListPublicKeys(ctx.Doer.ID, db.ListOptions{}) if err != nil { ctx.ServerError("ListPublicKeys", err) return @@ -269,18 +269,18 @@ func loadKeysData(ctx *context.Context) { } ctx.Data["ExternalKeys"] = externalKeys - gpgkeys, err := asymkey_model.ListGPGKeys(db.DefaultContext, ctx.User.ID, db.ListOptions{}) + gpgkeys, err := asymkey_model.ListGPGKeys(ctx, ctx.Doer.ID, db.ListOptions{}) if err != nil { ctx.ServerError("ListGPGKeys", err) return } ctx.Data["GPGKeys"] = gpgkeys - tokenToSign := asymkey_model.VerificationToken(ctx.User, 1) + tokenToSign := asymkey_model.VerificationToken(ctx.Doer, 1) // generate a new aes cipher using the csrfToken ctx.Data["TokenToSign"] = tokenToSign - principals, err := asymkey_model.ListPrincipalKeys(ctx.User.ID, db.ListOptions{}) + principals, err := asymkey_model.ListPrincipalKeys(ctx.Doer.ID, db.ListOptions{}) if err != nil { ctx.ServerError("ListPrincipalKeys", err) return diff --git a/routers/web/user/setting/main_test.go b/routers/web/user/setting/main_test.go index b6ed7f5b18..d4df464abd 100644 --- a/routers/web/user/setting/main_test.go +++ b/routers/web/user/setting/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", "..", ".."), + }) } diff --git a/routers/web/user/setting/oauth2.go b/routers/web/user/setting/oauth2.go index 65d0924da0..76c50852a0 100644 --- a/routers/web/user/setting/oauth2.go +++ b/routers/web/user/setting/oauth2.go @@ -37,7 +37,7 @@ func OAuthApplicationsPost(ctx *context.Context) { app, err := auth.CreateOAuth2Application(auth.CreateOAuth2ApplicationOptions{ Name: form.Name, RedirectURIs: []string{form.RedirectURI}, - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, }) if err != nil { ctx.ServerError("CreateOAuth2Application", err) @@ -71,7 +71,7 @@ func OAuthApplicationsEdit(ctx *context.Context) { ID: ctx.ParamsInt64("id"), Name: form.Name, RedirectURIs: []string{form.RedirectURI}, - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, }); err != nil { ctx.ServerError("UpdateOAuth2Application", err) return @@ -94,7 +94,7 @@ func OAuthApplicationsRegenerateSecret(ctx *context.Context) { ctx.ServerError("GetOAuth2ApplicationByID", err) return } - if app.UID != ctx.User.ID { + if app.UID != ctx.Doer.ID { ctx.NotFound("Application not found", nil) return } @@ -119,7 +119,7 @@ func OAuth2ApplicationShow(ctx *context.Context) { ctx.ServerError("GetOAuth2ApplicationByID", err) return } - if app.UID != ctx.User.ID { + if app.UID != ctx.Doer.ID { ctx.NotFound("Application not found", nil) return } @@ -129,11 +129,11 @@ func OAuth2ApplicationShow(ctx *context.Context) { // DeleteOAuth2Application deletes the given oauth2 application func DeleteOAuth2Application(ctx *context.Context) { - if err := auth.DeleteOAuth2Application(ctx.FormInt64("id"), ctx.User.ID); err != nil { + if err := auth.DeleteOAuth2Application(ctx.FormInt64("id"), ctx.Doer.ID); err != nil { ctx.ServerError("DeleteOAuth2Application", err) return } - log.Trace("OAuth2 Application deleted: %s", ctx.User.Name) + log.Trace("OAuth2 Application deleted: %s", ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.remove_oauth2_application_success")) ctx.JSON(http.StatusOK, map[string]interface{}{ @@ -143,11 +143,11 @@ func DeleteOAuth2Application(ctx *context.Context) { // RevokeOAuth2Grant revokes the grant with the given id func RevokeOAuth2Grant(ctx *context.Context) { - if ctx.User.ID == 0 || ctx.FormInt64("id") == 0 { + if ctx.Doer.ID == 0 || ctx.FormInt64("id") == 0 { ctx.ServerError("RevokeOAuth2Grant", fmt.Errorf("user id or grant id is zero")) return } - if err := auth.RevokeOAuth2Grant(ctx.FormInt64("id"), ctx.User.ID); err != nil { + if err := auth.RevokeOAuth2Grant(ctx.FormInt64("id"), ctx.Doer.ID); err != nil { ctx.ServerError("RevokeOAuth2Grant", err) return } diff --git a/routers/web/user/setting/profile.go b/routers/web/user/setting/profile.go index e77e02348c..0123b9b523 100644 --- a/routers/web/user/setting/profile.go +++ b/routers/web/user/setting/profile.go @@ -17,12 +17,14 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/translation/i18n" "code.gitea.io/gitea/modules/typesniffer" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" @@ -30,8 +32,6 @@ import ( "code.gitea.io/gitea/services/agit" "code.gitea.io/gitea/services/forms" user_service "code.gitea.io/gitea/services/user" - - "github.com/unknwon/i18n" ) const ( @@ -106,24 +106,24 @@ func ProfilePost(ctx *context.Context) { return } - if len(form.Name) != 0 && ctx.User.Name != form.Name { - log.Debug("Changing name for %s to %s", ctx.User.Name, form.Name) - if err := HandleUsernameChange(ctx, ctx.User, form.Name); err != nil { + if len(form.Name) != 0 && ctx.Doer.Name != form.Name { + log.Debug("Changing name for %s to %s", ctx.Doer.Name, form.Name) + if err := HandleUsernameChange(ctx, ctx.Doer, form.Name); err != nil { ctx.Redirect(setting.AppSubURL + "/user/settings") return } - ctx.User.Name = form.Name - ctx.User.LowerName = strings.ToLower(form.Name) + ctx.Doer.Name = form.Name + ctx.Doer.LowerName = strings.ToLower(form.Name) } - ctx.User.FullName = form.FullName - ctx.User.KeepEmailPrivate = form.KeepEmailPrivate - ctx.User.Website = form.Website - ctx.User.Location = form.Location - ctx.User.Description = form.Description - ctx.User.KeepActivityPrivate = form.KeepActivityPrivate - ctx.User.Visibility = form.Visibility - if err := user_model.UpdateUserSetting(ctx.User); err != nil { + ctx.Doer.FullName = form.FullName + ctx.Doer.KeepEmailPrivate = form.KeepEmailPrivate + ctx.Doer.Website = form.Website + ctx.Doer.Location = form.Location + ctx.Doer.Description = form.Description + ctx.Doer.KeepActivityPrivate = form.KeepActivityPrivate + ctx.Doer.Visibility = form.Visibility + if err := user_model.UpdateUserSetting(ctx.Doer); err != nil { if _, ok := err.(user_model.ErrEmailAlreadyUsed); ok { ctx.Flash.Error(ctx.Tr("form.email_been_used")) ctx.Redirect(setting.AppSubURL + "/user/settings") @@ -134,10 +134,10 @@ func ProfilePost(ctx *context.Context) { } // Update the language to the one we just set - middleware.SetLocaleCookie(ctx.Resp, ctx.User.Language, 0) + middleware.SetLocaleCookie(ctx.Resp, ctx.Doer.Language, 0) - log.Trace("User settings updated: %s", ctx.User.Name) - ctx.Flash.Success(i18n.Tr(ctx.User.Language, "settings.update_profile_success")) + log.Trace("User settings updated: %s", ctx.Doer.Name) + ctx.Flash.Success(i18n.Tr(ctx.Doer.Language, "settings.update_profile_success")) ctx.Redirect(setting.AppSubURL + "/user/settings") } @@ -185,7 +185,7 @@ func UpdateAvatarSetting(ctx *context.Context, form *forms.AvatarForm, ctxUser * } } - if err := user_model.UpdateUserCols(db.DefaultContext, ctxUser, "avatar", "avatar_email", "use_custom_avatar"); err != nil { + if err := user_model.UpdateUserCols(ctx, ctxUser, "avatar", "avatar_email", "use_custom_avatar"); err != nil { return fmt.Errorf("UpdateUser: %v", err) } @@ -195,7 +195,7 @@ func UpdateAvatarSetting(ctx *context.Context, form *forms.AvatarForm, ctxUser * // AvatarPost response for change user's avatar request func AvatarPost(ctx *context.Context) { form := web.GetForm(ctx).(*forms.AvatarForm) - if err := UpdateAvatarSetting(ctx, form, ctx.User); err != nil { + if err := UpdateAvatarSetting(ctx, form, ctx.Doer); err != nil { ctx.Flash.Error(err.Error()) } else { ctx.Flash.Success(ctx.Tr("settings.update_avatar_success")) @@ -206,7 +206,7 @@ func AvatarPost(ctx *context.Context) { // DeleteAvatar render delete avatar page func DeleteAvatar(ctx *context.Context) { - if err := user_service.DeleteAvatar(ctx.User); err != nil { + if err := user_service.DeleteAvatar(ctx.Doer); err != nil { ctx.Flash.Error(err.Error()) } @@ -218,12 +218,12 @@ func Organization(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsOrganization"] = true - opts := models.FindOrgOptions{ + opts := organization.FindOrgOptions{ ListOptions: db.ListOptions{ PageSize: setting.UI.Admin.UserPagingNum, Page: ctx.FormInt("page"), }, - UserID: ctx.User.ID, + UserID: ctx.Doer.ID, IncludePrivate: ctx.IsSigned, } @@ -231,12 +231,12 @@ func Organization(ctx *context.Context) { opts.Page = 1 } - orgs, err := models.FindOrgs(opts) + orgs, err := organization.FindOrgs(opts) if err != nil { ctx.ServerError("FindOrgs", err) return } - total, err := models.CountOrgs(opts) + total, err := organization.CountOrgs(opts) if err != nil { ctx.ServerError("CountOrgs", err) return @@ -268,7 +268,7 @@ func Repos(ctx *context.Context) { adoptOrDelete := ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories) - ctxUser := ctx.User + ctxUser := ctx.Doer count := 0 if adoptOrDelete { @@ -360,7 +360,7 @@ func Appearance(ctx *context.Context) { ctx.Data["PageIsSettingsAppearance"] = true var hiddenCommentTypes *big.Int - val, err := user_model.GetUserSetting(ctx.User.ID, user_model.SettingsKeyHiddenCommentTypes) + val, err := user_model.GetUserSetting(ctx.Doer.ID, user_model.SettingsKeyHiddenCommentTypes) if err != nil { ctx.ServerError("GetUserSetting", err) return @@ -391,13 +391,13 @@ func UpdateUIThemePost(ctx *context.Context) { return } - if err := user_model.UpdateUserTheme(ctx.User, form.Theme); err != nil { + if err := user_model.UpdateUserTheme(ctx.Doer, form.Theme); err != nil { ctx.Flash.Error(ctx.Tr("settings.theme_update_error")) ctx.Redirect(setting.AppSubURL + "/user/settings/appearance") return } - log.Trace("Update user theme: %s", ctx.User.Name) + log.Trace("Update user theme: %s", ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.theme_update_success")) ctx.Redirect(setting.AppSubURL + "/user/settings/appearance") } @@ -414,31 +414,31 @@ func UpdateUserLang(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + "/user/settings/appearance") return } - ctx.User.Language = form.Language + ctx.Doer.Language = form.Language } - if err := user_model.UpdateUserSetting(ctx.User); err != nil { + if err := user_model.UpdateUserSetting(ctx.Doer); err != nil { ctx.ServerError("UpdateUserSetting", err) return } // Update the language to the one we just set - middleware.SetLocaleCookie(ctx.Resp, ctx.User.Language, 0) + middleware.SetLocaleCookie(ctx.Resp, ctx.Doer.Language, 0) - log.Trace("User settings updated: %s", ctx.User.Name) - ctx.Flash.Success(i18n.Tr(ctx.User.Language, "settings.update_language_success")) + log.Trace("User settings updated: %s", ctx.Doer.Name) + ctx.Flash.Success(i18n.Tr(ctx.Doer.Language, "settings.update_language_success")) ctx.Redirect(setting.AppSubURL + "/user/settings/appearance") } // UpdateUserHiddenComments update a user's shown comment types func UpdateUserHiddenComments(ctx *context.Context) { - err := user_model.SetUserSetting(ctx.User.ID, user_model.SettingsKeyHiddenCommentTypes, forms.UserHiddenCommentTypesFromRequest(ctx).String()) + err := user_model.SetUserSetting(ctx.Doer.ID, user_model.SettingsKeyHiddenCommentTypes, forms.UserHiddenCommentTypesFromRequest(ctx).String()) if err != nil { ctx.ServerError("SetUserSetting", err) return } - log.Trace("User settings updated: %s", ctx.User.Name) + log.Trace("User settings updated: %s", ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.saved_successfully")) ctx.Redirect(setting.AppSubURL + "/user/settings/appearance") } diff --git a/routers/web/user/setting/security/2fa.go b/routers/web/user/setting/security/2fa.go index 98e1f9b083..5fd81bae41 100644 --- a/routers/web/user/setting/security/2fa.go +++ b/routers/web/user/setting/security/2fa.go @@ -29,7 +29,7 @@ func RegenerateScratchTwoFactor(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsSecurity"] = true - t, err := auth.GetTwoFactorByUID(ctx.User.ID) + t, err := auth.GetTwoFactorByUID(ctx.Doer.ID) if err != nil { if auth.IsErrTwoFactorNotEnrolled(err) { ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled")) @@ -59,7 +59,7 @@ func DisableTwoFactor(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsSecurity"] = true - t, err := auth.GetTwoFactorByUID(ctx.User.ID) + t, err := auth.GetTwoFactorByUID(ctx.Doer.ID) if err != nil { if auth.IsErrTwoFactorNotEnrolled(err) { ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled")) @@ -69,7 +69,7 @@ func DisableTwoFactor(ctx *context.Context) { return } - if err = auth.DeleteTwoFactorByID(t.ID, ctx.User.ID); err != nil { + if err = auth.DeleteTwoFactorByID(t.ID, ctx.Doer.ID); err != nil { if auth.IsErrTwoFactorNotEnrolled(err) { // There is a potential DB race here - we must have been disabled by another request in the intervening period ctx.Flash.Success(ctx.Tr("settings.twofa_disabled")) @@ -100,7 +100,7 @@ func twofaGenerateSecretAndQr(ctx *context.Context) bool { otpKey, err = totp.Generate(totp.GenerateOpts{ SecretSize: 40, Issuer: issuer, - AccountName: ctx.User.Name, + AccountName: ctx.Doer.Name, }) if err != nil { ctx.ServerError("SettingsTwoFactor: totpGenerate Failed", err) @@ -146,10 +146,10 @@ func EnrollTwoFactor(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsSecurity"] = true - t, err := auth.GetTwoFactorByUID(ctx.User.ID) + t, err := auth.GetTwoFactorByUID(ctx.Doer.ID) if t != nil { // already enrolled - we should redirect back! - log.Warn("Trying to re-enroll %-v in twofa when already enrolled", ctx.User) + log.Warn("Trying to re-enroll %-v in twofa when already enrolled", ctx.Doer) ctx.Flash.Error(ctx.Tr("settings.twofa_is_enrolled")) ctx.Redirect(setting.AppSubURL + "/user/settings/security") return @@ -172,7 +172,7 @@ func EnrollTwoFactorPost(ctx *context.Context) { ctx.Data["Title"] = ctx.Tr("settings") ctx.Data["PageIsSettingsSecurity"] = true - t, err := auth.GetTwoFactorByUID(ctx.User.ID) + t, err := auth.GetTwoFactorByUID(ctx.Doer.ID) if t != nil { // already enrolled ctx.Flash.Error(ctx.Tr("settings.twofa_is_enrolled")) @@ -210,7 +210,7 @@ func EnrollTwoFactorPost(ctx *context.Context) { } t = &auth.TwoFactor{ - UID: ctx.User.ID, + UID: ctx.Doer.ID, } err = t.SetSecret(secret) if err != nil { diff --git a/routers/web/user/setting/security/openid.go b/routers/web/user/setting/security/openid.go index 1a15a2414c..2ecc9b0533 100644 --- a/routers/web/user/setting/security/openid.go +++ b/routers/web/user/setting/security/openid.go @@ -45,7 +45,7 @@ func OpenIDPost(ctx *context.Context) { form.Openid = id log.Trace("Normalized id: " + id) - oids, err := user_model.GetUserOpenIDs(ctx.User.ID) + oids, err := user_model.GetUserOpenIDs(ctx.Doer.ID) if err != nil { ctx.ServerError("GetUserOpenIDs", err) return @@ -89,7 +89,7 @@ func settingsOpenIDVerify(ctx *context.Context) { log.Trace("Verified ID: " + id) - oid := &user_model.UserOpenID{UID: ctx.User.ID, URI: id} + oid := &user_model.UserOpenID{UID: ctx.Doer.ID, URI: id} if err = user_model.AddUserOpenID(oid); err != nil { if user_model.IsErrOpenIDAlreadyUsed(err) { ctx.RenderWithErr(ctx.Tr("form.openid_been_used", id), tplSettingsSecurity, &forms.AddOpenIDForm{Openid: id}) @@ -98,7 +98,7 @@ func settingsOpenIDVerify(ctx *context.Context) { ctx.ServerError("AddUserOpenID", err) return } - log.Trace("Associated OpenID %s to user %s", id, ctx.User.Name) + log.Trace("Associated OpenID %s to user %s", id, ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.add_openid_success")) ctx.Redirect(setting.AppSubURL + "/user/settings/security") @@ -106,11 +106,11 @@ func settingsOpenIDVerify(ctx *context.Context) { // DeleteOpenID response for delete user's openid func DeleteOpenID(ctx *context.Context) { - if err := user_model.DeleteUserOpenID(&user_model.UserOpenID{ID: ctx.FormInt64("id"), UID: ctx.User.ID}); err != nil { + if err := user_model.DeleteUserOpenID(&user_model.UserOpenID{ID: ctx.FormInt64("id"), UID: ctx.Doer.ID}); err != nil { ctx.ServerError("DeleteUserOpenID", err) return } - log.Trace("OpenID address deleted: %s", ctx.User.Name) + log.Trace("OpenID address deleted: %s", ctx.Doer.Name) ctx.Flash.Success(ctx.Tr("settings.openid_deletion_success")) ctx.JSON(http.StatusOK, map[string]interface{}{ diff --git a/routers/web/user/setting/security/security.go b/routers/web/user/setting/security/security.go index 67bbbf8b31..a87012c480 100644 --- a/routers/web/user/setting/security/security.go +++ b/routers/web/user/setting/security/security.go @@ -43,7 +43,7 @@ func DeleteAccountLink(ctx *context.Context) { if id <= 0 { ctx.Flash.Error("Account link id is not given") } else { - if _, err := user_model.RemoveAccountLink(ctx.User, id); err != nil { + if _, err := user_model.RemoveAccountLink(ctx.Doer, id); err != nil { ctx.Flash.Error("RemoveAccountLink: " + err.Error()) } else { ctx.Flash.Success(ctx.Tr("settings.remove_account_link_success")) @@ -56,28 +56,28 @@ func DeleteAccountLink(ctx *context.Context) { } func loadSecurityData(ctx *context.Context) { - enrolled, err := auth.HasTwoFactorByUID(ctx.User.ID) + enrolled, err := auth.HasTwoFactorByUID(ctx.Doer.ID) if err != nil { ctx.ServerError("SettingsTwoFactor", err) return } ctx.Data["TOTPEnrolled"] = enrolled - credentials, err := auth.GetWebAuthnCredentialsByUID(ctx.User.ID) + credentials, err := auth.GetWebAuthnCredentialsByUID(ctx.Doer.ID) if err != nil { ctx.ServerError("GetWebAuthnCredentialsByUID", err) return } ctx.Data["WebAuthnCredentials"] = credentials - tokens, err := models.ListAccessTokens(models.ListAccessTokensOptions{UserID: ctx.User.ID}) + tokens, err := models.ListAccessTokens(models.ListAccessTokensOptions{UserID: ctx.Doer.ID}) if err != nil { ctx.ServerError("ListAccessTokens", err) return } ctx.Data["Tokens"] = tokens - accountLinks, err := user_model.ListAccountLinks(ctx.User) + accountLinks, err := user_model.ListAccountLinks(ctx.Doer) if err != nil { ctx.ServerError("ListAccountLinks", err) return @@ -109,7 +109,7 @@ func loadSecurityData(ctx *context.Context) { } ctx.Data["AccountLinks"] = sources - openid, err := user_model.GetUserOpenIDs(ctx.User.ID) + openid, err := user_model.GetUserOpenIDs(ctx.Doer.ID) if err != nil { ctx.ServerError("GetUserOpenIDs", err) return diff --git a/routers/web/user/setting/security/webauthn.go b/routers/web/user/setting/security/webauthn.go index 7e2fc7283b..bb2d1f733e 100644 --- a/routers/web/user/setting/security/webauthn.go +++ b/routers/web/user/setting/security/webauthn.go @@ -28,7 +28,7 @@ func WebAuthnRegister(ctx *context.Context) { return } - cred, err := auth.GetWebAuthnCredentialByName(ctx.User.ID, form.Name) + cred, err := auth.GetWebAuthnCredentialByName(ctx.Doer.ID, form.Name) if err != nil && !auth.IsErrWebAuthnCredentialNotExist(err) { ctx.ServerError("GetWebAuthnCredentialsByUID", err) return @@ -44,7 +44,7 @@ func WebAuthnRegister(ctx *context.Context) { return } - credentialOptions, sessionData, err := wa.WebAuthn.BeginRegistration((*wa.User)(ctx.User)) + credentialOptions, sessionData, err := wa.WebAuthn.BeginRegistration((*wa.User)(ctx.Doer)) if err != nil { ctx.ServerError("Unable to BeginRegistration", err) return @@ -78,7 +78,7 @@ func WebauthnRegisterPost(ctx *context.Context) { }() // Verify that the challenge succeeded - cred, err := wa.WebAuthn.FinishRegistration((*wa.User)(ctx.User), *sessionData, ctx.Req) + cred, err := wa.WebAuthn.FinishRegistration((*wa.User)(ctx.Doer), *sessionData, ctx.Req) if err != nil { if pErr, ok := err.(*protocol.Error); ok { log.Error("Unable to finish registration due to error: %v\nDevInfo: %s", pErr, pErr.DevInfo) @@ -87,7 +87,7 @@ func WebauthnRegisterPost(ctx *context.Context) { return } - dbCred, err := auth.GetWebAuthnCredentialByName(ctx.User.ID, name) + dbCred, err := auth.GetWebAuthnCredentialByName(ctx.Doer.ID, name) if err != nil && !auth.IsErrWebAuthnCredentialNotExist(err) { ctx.ServerError("GetWebAuthnCredentialsByUID", err) return @@ -98,7 +98,7 @@ func WebauthnRegisterPost(ctx *context.Context) { } // Create the credential - _, err = auth.CreateCredential(ctx.User.ID, name, cred) + _, err = auth.CreateCredential(ctx.Doer.ID, name, cred) if err != nil { ctx.ServerError("CreateCredential", err) return @@ -111,7 +111,7 @@ func WebauthnRegisterPost(ctx *context.Context) { // WebauthnDelete deletes an security key by id func WebauthnDelete(ctx *context.Context) { form := web.GetForm(ctx).(*forms.WebauthnDeleteForm) - if _, err := auth.DeleteCredential(form.ID, ctx.User.ID); err != nil { + if _, err := auth.DeleteCredential(form.ID, ctx.Doer.ID); err != nil { ctx.ServerError("GetWebAuthnCredentialByID", err) return } diff --git a/routers/web/user/stop_watch.go b/routers/web/user/stop_watch.go new file mode 100644 index 0000000000..4b16c9aeda --- /dev/null +++ b/routers/web/user/stop_watch.go @@ -0,0 +1,41 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package user + +import ( + "net/http" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/convert" +) + +// GetStopwatches get all stopwatches +func GetStopwatches(ctx *context.Context) { + sws, err := models.GetUserStopwatches(ctx.Doer.ID, db.ListOptions{ + Page: ctx.FormInt("page"), + PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")), + }) + if err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + + count, err := models.CountUserStopwatches(ctx.Doer.ID) + if err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + + apiSWs, err := convert.ToStopWatches(sws) + if err != nil { + ctx.Error(http.StatusInternalServerError, err.Error()) + return + } + + ctx.SetTotalCountHeader(count) + ctx.JSON(http.StatusOK, apiSWs) +} diff --git a/routers/web/user/task.go b/routers/web/user/task.go index 4dbd1b8537..fd561cdd4c 100644 --- a/routers/web/user/task.go +++ b/routers/web/user/task.go @@ -15,7 +15,7 @@ import ( // TaskStatus returns task's status func TaskStatus(ctx *context.Context) { - task, opts, err := models.GetMigratingTaskByID(ctx.ParamsInt64("task"), ctx.User.ID) + task, opts, err := models.GetMigratingTaskByID(ctx.ParamsInt64("task"), ctx.Doer.ID) if err != nil { if models.IsErrTaskDoesNotExist(err) { ctx.JSON(http.StatusNotFound, map[string]interface{}{ diff --git a/routers/web/web.go b/routers/web/web.go index 53304dac90..97ea1e9035 100644 --- a/routers/web/web.go +++ b/routers/web/web.go @@ -10,6 +10,7 @@ import ( "os" "path" + "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/unit" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" @@ -19,22 +20,26 @@ import ( "code.gitea.io/gitea/modules/public" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/templates" "code.gitea.io/gitea/modules/validation" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/modules/web/routing" - "code.gitea.io/gitea/routers/api/v1/misc" "code.gitea.io/gitea/routers/web/admin" "code.gitea.io/gitea/routers/web/auth" "code.gitea.io/gitea/routers/web/dev" "code.gitea.io/gitea/routers/web/events" "code.gitea.io/gitea/routers/web/explore" + "code.gitea.io/gitea/routers/web/feed" + "code.gitea.io/gitea/routers/web/healthcheck" + "code.gitea.io/gitea/routers/web/misc" "code.gitea.io/gitea/routers/web/org" "code.gitea.io/gitea/routers/web/repo" "code.gitea.io/gitea/routers/web/user" user_setting "code.gitea.io/gitea/routers/web/user/setting" "code.gitea.io/gitea/routers/web/user/setting/security" auth_service "code.gitea.io/gitea/services/auth" + context_service "code.gitea.io/gitea/services/context" "code.gitea.io/gitea/services/forms" "code.gitea.io/gitea/services/lfs" "code.gitea.io/gitea/services/mailer" @@ -42,6 +47,7 @@ import ( _ "code.gitea.io/gitea/modules/session" // to registers all internal adapters "gitea.com/go-chi/captcha" + "gitea.com/go-chi/session" "github.com/NYTimes/gziphandler" "github.com/go-chi/chi/v5/middleware" "github.com/go-chi/cors" @@ -71,8 +77,28 @@ func CorsHandler() func(next http.Handler) http.Handler { } } +// The OAuth2 plugin is expected to be executed first, as it must ignore the user id stored +// in the session (if there is a user id stored in session other plugins might return the user +// object for that id). +// +// The Session plugin is expected to be executed second, in order to skip authentication +// for users that have already signed in. +func buildAuthGroup() *auth_service.Group { + group := auth_service.NewGroup( + &auth_service.OAuth2{}, // FIXME: this should be removed and only applied in download and oauth realted routers + &auth_service.Basic{}, // FIXME: this should be removed and only applied in download and git/lfs routers + &auth_service.Session{}, + ) + if setting.Service.EnableReverseProxyAuth { + group.Add(&auth_service.ReverseProxy{}) + } + specialAdd(group) + + return group +} + // Routes returns all web routes -func Routes(sessioner func(http.Handler) http.Handler) *web.Route { +func Routes() *web.Route { routes := web.NewRoute() routes.Use(web.WrapWithPrefix(public.AssetsURLPathPrefix, public.AssetsHandlerFunc(&public.Options{ @@ -81,6 +107,17 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { CorsHandler: CorsHandler(), }), "AssetsHandler")) + sessioner := session.Sessioner(session.Options{ + Provider: setting.SessionConfig.Provider, + ProviderConfig: setting.SessionConfig.ProviderConfig, + CookieName: setting.SessionConfig.CookieName, + CookiePath: setting.SessionConfig.CookiePath, + Gclifetime: setting.SessionConfig.Gclifetime, + Maxlifetime: setting.SessionConfig.Maxlifetime, + Secure: setting.SessionConfig.Secure, + SameSite: setting.SessionConfig.SameSite, + Domain: setting.SessionConfig.Domain, + }) routes.Use(sessioner) routes.Use(Recovery()) @@ -96,7 +133,12 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { // this png is very likely to always be below the limit for gzip so it doesn't need to pass through gzip routes.Get("/apple-touch-icon.png", func(w http.ResponseWriter, req *http.Request) { - http.Redirect(w, req, path.Join(setting.StaticURLPrefix, "/assets/img/apple-touch-icon.png"), 301) + http.Redirect(w, req, path.Join(setting.StaticURLPrefix, "/assets/img/apple-touch-icon.png"), http.StatusPermanentRedirect) + }) + + // redirect default favicon to the path of the custom favicon with a default as a fallback + routes.Get("/favicon.ico", func(w http.ResponseWriter, req *http.Request) { + http.Redirect(w, req, path.Join(setting.StaticURLPrefix, "/assets/img/favicon.png"), 301) }) common := []interface{}{} @@ -137,24 +179,31 @@ func Routes(sessioner func(http.Handler) http.Handler) *web.Route { routes.Get("/ssh_info", func(rw http.ResponseWriter, req *http.Request) { if !git.SupportProcReceive { - rw.WriteHeader(404) + rw.WriteHeader(http.StatusNotFound) return } rw.Header().Set("content-type", "text/json;charset=UTF-8") _, err := rw.Write([]byte(`{"type":"gitea","version":1}`)) if err != nil { log.Error("fail to write result: err: %v", err) - rw.WriteHeader(500) + rw.WriteHeader(http.StatusInternalServerError) return } - rw.WriteHeader(200) + rw.WriteHeader(http.StatusOK) }) + routes.Get("/api/healthz", healthcheck.Check) + // Removed: toolbox.Toolboxer middleware will provide debug information which seems unnecessary common = append(common, context.Contexter()) + group := buildAuthGroup() + if err := group.Init(); err != nil { + log.Error("Could not initialize '%s' auth method, error: %s", group.Name(), err) + } + // Get user from session if logged in. - common = append(common, context.Auth(auth_service.NewGroup(auth_service.Methods()...))) + common = append(common, context.Auth(group)) // GetHead allows a HEAD request redirect to GET if HEAD method is not defined for that route common = append(common, middleware.GetHead) @@ -233,6 +282,13 @@ func RegisterRoutes(m *web.Route) { } } + federationEnabled := func(ctx *context.Context) { + if !setting.Federation.Enabled { + ctx.Error(http.StatusNotFound) + return + } + } + // FIXME: not all routes need go through same middleware. // Especially some AJAX requests, we can reduce middleware number to improve performance. // Routers. @@ -240,10 +296,10 @@ func RegisterRoutes(m *web.Route) { m.Get("/", Home) m.Group("/.well-known", func() { m.Get("/openid-configuration", auth.OIDCWellKnown) - if setting.Federation.Enabled { + m.Group("", func() { m.Get("/nodeinfo", NodeInfoLinks) m.Get("/webfinger", WebfingerQuery) - } + }, federationEnabled) m.Get("/change-password", func(w http.ResponseWriter, req *http.Request) { http.Redirect(w, req, "/user/settings/account", http.StatusTemporaryRedirect) }) @@ -257,8 +313,13 @@ func RegisterRoutes(m *web.Route) { m.Get("/users", explore.Users) m.Get("/organizations", explore.Organizations) m.Get("/code", explore.Code) + m.Get("/topics/search", explore.TopicSearch) }, ignExploreSignIn) - m.Get("/issues", reqSignIn, user.Issues) + m.Group("/issues", func() { + m.Get("", user.Issues) + m.Get("/search", repo.SearchIssues) + }, reqSignIn) + m.Get("/pulls", reqSignIn, user.Pulls) m.Get("/milestones", reqSignIn, reqMilestonesDashboardPageEnabled, user.Milestones) @@ -379,8 +440,8 @@ func RegisterRoutes(m *web.Route) { m.Group("/user", func() { // r.Get("/feeds", binding.Bind(auth.FeedsForm{}), user.Feeds) - m.Get("/activate", auth.Activate, reqSignIn) - m.Post("/activate", auth.ActivatePost, reqSignIn) + m.Get("/activate", auth.Activate) + m.Post("/activate", auth.ActivatePost) m.Any("/activate_email", auth.ActivateEmail) m.Get("/avatar/{username}/{size}", user.AvatarByUserName) m.Get("/recover_account", auth.ResetPasswd) @@ -388,7 +449,9 @@ func RegisterRoutes(m *web.Route) { m.Get("/forgot_password", auth.ForgotPasswd) m.Post("/forgot_password", auth.ForgotPasswdPost) m.Post("/logout", auth.SignOut) - m.Get("/task/{task}", user.TaskStatus) + m.Get("/task/{task}", reqSignIn, user.TaskStatus) + m.Get("/stopwatches", reqSignIn, user.GetStopwatches) + m.Get("/search", ignExploreSignIn, user.Search) }) // ***** END: User ***** @@ -404,6 +467,7 @@ func RegisterRoutes(m *web.Route) { m.Post("/config/test_mail", admin.SendTestMail) m.Group("/monitor", func() { m.Get("", admin.Monitor) + m.Get("/stacktrace", admin.GoroutineStacktrace) m.Post("/cancel/{pid}", admin.MonitorCancel) m.Group("/queue/{qid}", func() { m.Get("", admin.Queue) @@ -440,6 +504,13 @@ func RegisterRoutes(m *web.Route) { m.Post("/delete", admin.DeleteRepo) }) + if setting.Packages.Enabled { + m.Group("/packages", func() { + m.Get("", admin.Packages) + m.Post("/delete", admin.DeletePackageVersion) + }) + } + m.Group("/hooks", func() { m.Get("", admin.DefaultOrSystemWebhooks) m.Post("/delete", admin.DeleteDefaultOrSystemWebhook) @@ -492,11 +563,21 @@ func RegisterRoutes(m *web.Route) { // ***** END: Admin ***** m.Group("", func() { - m.Get("/{username}", user.Profile) + m.Get("/favicon.ico", func(ctx *context.Context) { + ctx.ServeFile(path.Join(setting.StaticRootPath, "public/img/favicon.png")) + }) + m.Group("/{username}", func() { + m.Get(".png", func(ctx *context.Context) { ctx.Error(http.StatusNotFound) }) + m.Get(".keys", user.ShowSSHKeys) + m.Get(".gpg", user.ShowGPGKeys) + m.Get(".rss", feed.ShowUserFeedRSS) + m.Get(".atom", feed.ShowUserFeedAtom) + m.Get("", user.Profile) + }, context_service.UserAssignmentWeb()) m.Get("/attachments/{uuid}", repo.GetAttachment) }, ignSignIn) - m.Post("/{username}", reqSignIn, user.Action) + m.Post("/{username}", reqSignIn, context_service.UserAssignmentWeb(), user.Action) if !setting.IsProd { m.Get("/template/*", dev.TemplatePreview) @@ -504,6 +585,7 @@ func RegisterRoutes(m *web.Route) { reqRepoAdmin := context.RequireRepoAdmin() reqRepoCodeWriter := context.RequireRepoWriter(unit.TypeCode) + canEnableEditor := context.CanEnableEditor() reqRepoCodeReader := context.RequireRepoReader(unit.TypeCode) reqRepoReleaseWriter := context.RequireRepoWriter(unit.TypeReleases) reqRepoReleaseReader := context.RequireRepoReader(unit.TypeReleases) @@ -516,6 +598,14 @@ func RegisterRoutes(m *web.Route) { reqRepoProjectsReader := context.RequireRepoReader(unit.TypeProjects) reqRepoProjectsWriter := context.RequireRepoWriter(unit.TypeProjects) + reqPackageAccess := func(accessMode perm.AccessMode) func(ctx *context.Context) { + return func(ctx *context.Context) { + if ctx.Package.AccessMode < accessMode && !ctx.IsUserSiteAdmin() { + ctx.NotFound("", nil) + } + } + } + // ***** START: Organization ***** m.Group("/org", func() { m.Group("", func() { @@ -547,6 +637,7 @@ func RegisterRoutes(m *web.Route) { m.Group("/{org}", func() { m.Get("/teams/new", org.NewTeam) m.Post("/teams/new", bindIgnErr(forms.CreateTeamForm{}), org.NewTeamPost) + m.Get("/teams/-/search", org.SearchTeam) m.Get("/teams/{team}/edit", org.EditTeam) m.Post("/teams/{team}/edit", bindIgnErr(forms.CreateTeamForm{}), org.EditTeamPost) m.Post("/teams/{team}/delete", org.DeleteTeam) @@ -611,8 +702,29 @@ func RegisterRoutes(m *web.Route) { m.Combo("/{repoid}").Get(repo.Fork). Post(bindIgnErr(forms.CreateRepoForm{}), repo.ForkPost) }, context.RepoIDAssignment(), context.UnitTypes(), reqRepoCodeReader) + m.Get("/search", repo.SearchRepo) }, reqSignIn) + m.Group("/{username}/-", func() { + if setting.Packages.Enabled { + m.Group("/packages", func() { + m.Get("", user.ListPackages) + m.Group("/{type}/{name}", func() { + m.Get("", user.RedirectToLastVersion) + m.Get("/versions", user.ListPackageVersions) + m.Group("/{version}", func() { + m.Get("", user.ViewPackageVersion) + m.Get("/files/{fileid}", user.DownloadPackageFile) + m.Group("/settings", func() { + m.Get("", user.PackageSettings) + m.Post("", bindIgnErr(forms.PackageSettingForm{}), user.PackageSettingsPost) + }, reqPackageAccess(perm.AccessModeWrite)) + }) + }) + }, context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead)) + } + }, context_service.UserAssignmentWeb()) + // ***** Release Attachment Download without Signin m.Get("/{username}/{reponame}/releases/download/{vTag}/{fileName}", ignSignIn, context.RepoAssignment, repo.MustBeNotEmpty, repo.RedirectDownload) @@ -733,15 +845,19 @@ func RegisterRoutes(m *web.Route) { Post(bindIgnErr(forms.CreateIssueForm{}), repo.NewIssuePost) m.Get("/choose", context.RepoRef(), repo.NewIssueChooseTemplate) }) + m.Get("/search", repo.ListIssues) }, context.RepoMustNotBeArchived(), reqRepoIssueReader) // FIXME: should use different URLs but mostly same logic for comments of issue and pull request. // So they can apply their own enable/disable logic on routers. m.Group("/{type:issues|pulls}", func() { m.Group("/{index}", func() { + m.Get("/info", repo.GetIssueInfo) m.Post("/title", repo.UpdateIssueTitle) m.Post("/content", repo.UpdateIssueContent) + m.Post("/deadline", bindIgnErr(structs.EditDeadlineOption{}), repo.UpdateIssueDeadline) m.Post("/watch", repo.IssueWatch) m.Post("/ref", repo.UpdateIssueRef) + m.Post("/viewed-files", repo.UpdateViewedFiles) m.Group("/dependency", func() { m.Post("/add", repo.AddDependency) m.Post("/delete", repo.RemoveDependency) @@ -787,6 +903,7 @@ func RegisterRoutes(m *web.Route) { m.Group("/comments/{id}", func() { m.Get("/attachments", repo.GetCommentAttachments) }) + m.Post("/markdown", bindIgnErr(structs.MarkdownOption{}), misc.Markdown) m.Group("/labels", func() { m.Post("/new", bindIgnErr(forms.CreateLabelForm{}), repo.NewLabel) m.Post("/edit", bindIgnErr(forms.CreateLabelForm{}), repo.UpdateLabel) @@ -821,12 +938,12 @@ func RegisterRoutes(m *web.Route) { Post(bindIgnErr(forms.EditRepoFileForm{}), repo.NewDiffPatchPost) m.Combo("/_cherrypick/{sha:([a-f0-9]{7,40})}/*").Get(repo.CherryPick). Post(bindIgnErr(forms.CherryPickForm{}), repo.CherryPickPost) - }, context.RepoRefByType(context.RepoRefBranch), repo.MustBeEditable) + }, repo.MustBeEditable) m.Group("", func() { m.Post("/upload-file", repo.UploadFileToServer) m.Post("/upload-remove", bindIgnErr(forms.RemoveUploadFileForm{}), repo.RemoveUploadFileFromServer) - }, context.RepoRef(), repo.MustBeEditable, repo.MustBeAbleToUpload) - }, context.RepoMustNotBeArchived(), reqRepoCodeWriter, repo.MustBeNotEmpty) + }, repo.MustBeEditable, repo.MustBeAbleToUpload) + }, context.RepoRef(), canEnableEditor, context.RepoMustNotBeArchived(), repo.MustBeNotEmpty) m.Group("/branches", func() { m.Group("/_new", func() { @@ -899,6 +1016,10 @@ func RegisterRoutes(m *web.Route) { m.Get("/milestones", reqRepoIssuesOrPullsReader, repo.Milestones) }, context.RepoRef()) + if setting.Packages.Enabled { + m.Get("/packages", repo.Packages) + } + m.Group("/projects", func() { m.Get("", repo.Projects) m.Get("/{id}", repo.ViewProject) @@ -943,6 +1064,7 @@ func RegisterRoutes(m *web.Route) { m.Get("/commit/{sha:[a-f0-9]{7,40}}.{ext:patch|diff}", repo.RawDiff) }, repo.MustEnableWiki, func(ctx *context.Context) { ctx.Data["PageIsWiki"] = true + ctx.Data["CloneButtonOriginLink"] = ctx.Repo.Repository.WikiCloneLink() }) m.Group("/wiki", func() { @@ -996,6 +1118,7 @@ func RegisterRoutes(m *web.Route) { m.Get("/commits", context.RepoRef(), repo.ViewPullCommits) m.Post("/merge", context.RepoMustNotBeArchived(), bindIgnErr(forms.MergePullRequestForm{}), repo.MergePullRequest) m.Post("/update", repo.UpdatePullRequest) + m.Post("/set_allow_maintainer_edit", bindIgnErr(forms.UpdateAllowEditsForm{}), repo.SetAllowEdits) m.Post("/cleanup", context.RepoMustNotBeArchived(), context.RepoRef(), repo.CleanUpPullRequest) m.Group("/files", func() { m.Get("", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.ViewPullFiles) @@ -1103,7 +1226,7 @@ func RegisterRoutes(m *web.Route) { m.GetOptions("/objects/{head:[0-9a-f]{2}}/{hash:[0-9a-f]{38}}", repo.GetLooseObject) m.GetOptions("/objects/pack/pack-{file:[0-9a-f]{40}}.pack", repo.GetPackFile) m.GetOptions("/objects/pack/pack-{file:[0-9a-f]{40}}.idx", repo.GetIdxFile) - }, ignSignInAndCsrf) + }, ignSignInAndCsrf, context_service.UserAssignmentWeb()) }) }) // ***** END: Repository ***** @@ -1112,6 +1235,7 @@ func RegisterRoutes(m *web.Route) { m.Get("", user.Notifications) m.Post("/status", user.NotificationStatusPost) m.Post("/purge", user.NotificationPurgePost) + m.Get("/new", user.NewAvailable) }, reqSignIn) if setting.API.EnableSwagger { diff --git a/routers/web/webfinger.go b/routers/web/webfinger.go index 02cbe1af21..27d0351b81 100644 --- a/routers/web/webfinger.go +++ b/routers/web/webfinger.go @@ -8,7 +8,6 @@ import ( "fmt" "net/http" "net/url" - "regexp" "strings" user_model "code.gitea.io/gitea/models/user" @@ -17,8 +16,6 @@ import ( "code.gitea.io/gitea/modules/setting" ) -var webfingerRessourcePattern = regexp.MustCompile(`(?i)\A([a-z^:]+):(.*)\z`) - // https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-webfinger-14#section-4.4 type webfingerJRD struct { @@ -39,26 +36,20 @@ type webfingerLink struct { // WebfingerQuery returns informations about a resource // https://datatracker.ietf.org/doc/html/rfc7565 func WebfingerQuery(ctx *context.Context) { - resource := ctx.FormTrim("resource") - - scheme := "acct" - uri := resource - - match := webfingerRessourcePattern.FindStringSubmatch(resource) - if match != nil { - scheme = match[1] - uri = match[2] - } - appURL, _ := url.Parse(setting.AppURL) - var u *user_model.User - var err error + resource, err := url.Parse(ctx.FormTrim("resource")) + if err != nil { + ctx.Error(http.StatusBadRequest) + return + } - switch scheme { + var u *user_model.User + + switch resource.Scheme { case "acct": // allow only the current host - parts := strings.SplitN(uri, "@", 2) + parts := strings.SplitN(resource.Opaque, "@", 2) if len(parts) != 2 { ctx.Error(http.StatusBadRequest) return @@ -70,7 +61,10 @@ func WebfingerQuery(ctx *context.Context) { u, err = user_model.GetUserByNameCtx(ctx, parts[0]) case "mailto": - u, err = user_model.GetUserByEmailContext(ctx, uri) + u, err = user_model.GetUserByEmailContext(ctx, resource.Opaque) + if u != nil && u.KeepEmailPrivate { + err = user_model.ErrUserNotExist{} + } default: ctx.Error(http.StatusBadRequest) return @@ -79,7 +73,7 @@ func WebfingerQuery(ctx *context.Context) { if user_model.IsErrUserNotExist(err) { ctx.Error(http.StatusNotFound) } else { - log.Error("Error getting user: %v", err) + log.Error("Error getting user: %s Error: %v", resource.Opaque, err) ctx.Error(http.StatusInternalServerError) } return @@ -92,7 +86,6 @@ func WebfingerQuery(ctx *context.Context) { aliases := []string{ u.HTMLURL(), - appURL.String() + "api/v1/activitypub/user/" + strings.ToLower(u.Name), } if !u.KeepEmailPrivate { aliases = append(aliases, fmt.Sprintf("mailto:%s", u.Email)) @@ -108,15 +101,6 @@ func WebfingerQuery(ctx *context.Context) { Rel: "http://webfinger.net/rel/avatar", Href: u.AvatarLink(), }, - { - Rel: "self", - Type: "application/activity+json", - Href: appURL.String() + "api/v1/activitypub/user/" + strings.ToLower(u.Name), - }, - { - Rel: "http://ostatus.org/schema/1.0/subscribe", - Href: appURL.String() + "api/v1/authorize_interaction?uri={uri}", - }, } ctx.JSON(http.StatusOK, &webfingerJRD{ diff --git a/services/agit/agit.go b/services/agit/agit.go index b2859c8a5b..2889236181 100644 --- a/services/agit/agit.go +++ b/services/agit/agit.go @@ -177,7 +177,7 @@ func ProcRecive(ctx *context.PrivateContext, opts *private.HookOptions) []privat } // update exist pull request - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err) ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ "Err": fmt.Sprintf("Unable to load base repository for PR[%d] Error: %v", pr.ID, err), @@ -205,7 +205,7 @@ func ProcRecive(ctx *context.PrivateContext, opts *private.HookOptions) []privat } if !forcePush { - output, err := git.NewCommand(ctx, "rev-list", "--max-count=1", oldCommitID, "^"+opts.NewCommitIDs[i]).RunInDirWithEnv(repo.RepoPath(), os.Environ()) + output, _, err := git.NewCommand(ctx, "rev-list", "--max-count=1", oldCommitID, "^"+opts.NewCommitIDs[i]).RunStdString(&git.RunOpts{Dir: repo.RepoPath(), Env: os.Environ()}) if err != nil { log.Error("Unable to detect force push between: %s and %s in %-v Error: %v", oldCommitID, opts.NewCommitIDs[i], repo, err) ctx.JSON(http.StatusInternalServerError, private.Response{ diff --git a/services/asymkey/main_test.go b/services/asymkey/main_test.go index a891a10cf6..cb3d059456 100644 --- a/services/asymkey/main_test.go +++ b/services/asymkey/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go index c2c6829d61..6b17c017fc 100644 --- a/services/asymkey/sign.go +++ b/services/asymkey/sign.go @@ -90,15 +90,15 @@ func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) { if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" { // Can ignore the error here as it means that commit.gpgsign is not set - value, _ := git.NewCommand(ctx, "config", "--get", "commit.gpgsign").RunInDir(repoPath) + value, _, _ := git.NewCommand(ctx, "config", "--get", "commit.gpgsign").RunStdString(&git.RunOpts{Dir: repoPath}) sign, valid := git.ParseBool(strings.TrimSpace(value)) if !sign || !valid { return "", nil } - signingKey, _ := git.NewCommand(ctx, "config", "--get", "user.signingkey").RunInDir(repoPath) - signingName, _ := git.NewCommand(ctx, "config", "--get", "user.name").RunInDir(repoPath) - signingEmail, _ := git.NewCommand(ctx, "config", "--get", "user.email").RunInDir(repoPath) + signingKey, _, _ := git.NewCommand(ctx, "config", "--get", "user.signingkey").RunStdString(&git.RunOpts{Dir: repoPath}) + signingName, _, _ := git.NewCommand(ctx, "config", "--get", "user.name").RunStdString(&git.RunOpts{Dir: repoPath}) + signingEmail, _, _ := git.NewCommand(ctx, "config", "--get", "user.email").RunStdString(&git.RunOpts{Dir: repoPath}) return strings.TrimSpace(signingKey), &git.Signature{ Name: strings.TrimSpace(signingName), Email: strings.TrimSpace(signingEmail), @@ -143,7 +143,7 @@ Loop: case always: break Loop case pubkey: - keys, err := asymkey_model.ListGPGKeys(db.DefaultContext, u.ID, db.ListOptions{}) + keys, err := asymkey_model.ListGPGKeys(ctx, u.ID, db.ListOptions{}) if err != nil { return false, "", nil, err } @@ -179,7 +179,7 @@ Loop: case always: break Loop case pubkey: - keys, err := asymkey_model.ListGPGKeys(db.DefaultContext, u.ID, db.ListOptions{}) + keys, err := asymkey_model.ListGPGKeys(ctx, u.ID, db.ListOptions{}) if err != nil { return false, "", nil, err } @@ -195,7 +195,7 @@ Loop: return false, "", nil, &ErrWontSign{twofa} } case parentSigned: - gitRepo, err := git.OpenRepositoryCtx(ctx, repoWikiPath) + gitRepo, err := git.OpenRepository(ctx, repoWikiPath) if err != nil { return false, "", nil, err } @@ -232,7 +232,7 @@ Loop: case always: break Loop case pubkey: - keys, err := asymkey_model.ListGPGKeys(db.DefaultContext, u.ID, db.ListOptions{}) + keys, err := asymkey_model.ListGPGKeys(ctx, u.ID, db.ListOptions{}) if err != nil { return false, "", nil, err } @@ -248,7 +248,7 @@ Loop: return false, "", nil, &ErrWontSign{twofa} } case parentSigned: - gitRepo, err := git.OpenRepositoryCtx(ctx, tmpBasePath) + gitRepo, err := git.OpenRepository(ctx, tmpBasePath) if err != nil { return false, "", nil, err } @@ -271,7 +271,7 @@ Loop: // SignMerge determines if we should sign a PR merge commit to the base repository func SignMerge(ctx context.Context, pr *models.PullRequest, u *user_model.User, tmpBasePath, baseCommit, headCommit string) (bool, string, *git.Signature, error) { - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("Unable to get Base Repo for pull request") return false, "", nil, err } @@ -294,7 +294,7 @@ Loop: case always: break Loop case pubkey: - keys, err := asymkey_model.ListGPGKeys(db.DefaultContext, u.ID, db.ListOptions{}) + keys, err := asymkey_model.ListGPGKeys(ctx, u.ID, db.ListOptions{}) if err != nil { return false, "", nil, err } @@ -317,12 +317,12 @@ Loop: if protectedBranch == nil { return false, "", nil, &ErrWontSign{approved} } - if protectedBranch.GetGrantedApprovalsCount(pr) < 1 { + if protectedBranch.GetGrantedApprovalsCount(ctx, pr) < 1 { return false, "", nil, &ErrWontSign{approved} } case baseSigned: if gitRepo == nil { - gitRepo, err = git.OpenRepositoryCtx(ctx, tmpBasePath) + gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { return false, "", nil, err } @@ -338,7 +338,7 @@ Loop: } case headSigned: if gitRepo == nil { - gitRepo, err = git.OpenRepositoryCtx(ctx, tmpBasePath) + gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { return false, "", nil, err } @@ -354,7 +354,7 @@ Loop: } case commitsSigned: if gitRepo == nil { - gitRepo, err = git.OpenRepositoryCtx(ctx, tmpBasePath) + gitRepo, err = git.OpenRepository(ctx, tmpBasePath) if err != nil { return false, "", nil, err } diff --git a/services/attachment/attachment_test.go b/services/attachment/attachment_test.go index 9ebd0d29ab..ffce5943e5 100644 --- a/services/attachment/attachment_test.go +++ b/services/attachment/attachment_test.go @@ -17,7 +17,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } func TestUploadAttachment(t *testing.T) { diff --git a/services/auth/auth.go b/services/auth/auth.go index bdff777f50..3a5bb9d27e 100644 --- a/services/auth/auth.go +++ b/services/auth/auth.go @@ -8,7 +8,6 @@ package auth import ( "fmt" "net/http" - "reflect" "regexp" "strings" @@ -21,80 +20,22 @@ import ( "code.gitea.io/gitea/modules/web/middleware" ) -// authMethods contains the list of authentication plugins in the order they are expected to be -// executed. -// -// The OAuth2 plugin is expected to be executed first, as it must ignore the user id stored -// in the session (if there is a user id stored in session other plugins might return the user -// object for that id). -// -// The Session plugin is expected to be executed second, in order to skip authentication -// for users that have already signed in. -var authMethods = []Method{ - &OAuth2{}, - &Basic{}, - &Session{}, -} - -// The purpose of the following three function variables is to let the linter know that -// those functions are not dead code and are actually being used -var ( - _ = handleSignIn -) - -// Methods returns the instances of all registered methods -func Methods() []Method { - return authMethods -} - -// Register adds the specified instance to the list of available methods -func Register(method Method) { - authMethods = append(authMethods, method) -} - // Init should be called exactly once when the application starts to allow plugins // to allocate necessary resources func Init() { - if setting.Service.EnableReverseProxyAuth { - Register(&ReverseProxy{}) - } - specialInit() - for _, method := range Methods() { - initializable, ok := method.(Initializable) - if !ok { - continue - } - - err := initializable.Init() - if err != nil { - log.Error("Could not initialize '%s' auth method, error: %s", reflect.TypeOf(method).String(), err) - } - } - webauthn.Init() } -// Free should be called exactly once when the application is terminating to allow Auth plugins -// to release necessary resources -func Free() { - for _, method := range Methods() { - freeable, ok := method.(Freeable) - if !ok { - continue - } - - err := freeable.Free() - if err != nil { - log.Error("Could not free '%s' auth method, error: %s", reflect.TypeOf(method).String(), err) - } - } -} - // isAttachmentDownload check if request is a file download (GET) with URL to an attachment func isAttachmentDownload(req *http.Request) bool { return strings.HasPrefix(req.URL.Path, "/attachments/") && req.Method == "GET" } +// isContainerPath checks if the request targets the container endpoint +func isContainerPath(req *http.Request) bool { + return strings.HasPrefix(req.URL.Path, "/v2/") +} + var ( gitRawReleasePathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/(?:(?:git-(?:(?:upload)|(?:receive))-pack$)|(?:info/refs$)|(?:HEAD$)|(?:objects/)|(?:raw/)|(?:releases/download/))`) lfsPathRe = regexp.MustCompile(`^/[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+/info/lfs/`) diff --git a/services/auth/basic.go b/services/auth/basic.go index d8667c65d5..1869662e92 100644 --- a/services/auth/basic.go +++ b/services/auth/basic.go @@ -43,7 +43,7 @@ func (b *Basic) Name() string { // Returns nil if header is empty or validation fails. func (b *Basic) Verify(req *http.Request, w http.ResponseWriter, store DataStore, sess SessionStore) *user_model.User { // Basic authentication should only fire on API, Download or on Git or LFSPaths - if !middleware.IsAPIPath(req) && !isAttachmentDownload(req) && !isGitRawReleaseOrLFSPath(req) { + if !middleware.IsAPIPath(req) && !isContainerPath(req) && !isAttachmentDownload(req) && !isGitRawReleaseOrLFSPath(req) { return nil } diff --git a/services/auth/group.go b/services/auth/group.go index bf047338bb..0f40e1a76c 100644 --- a/services/auth/group.go +++ b/services/auth/group.go @@ -6,6 +6,8 @@ package auth import ( "net/http" + "reflect" + "strings" "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" @@ -30,6 +32,24 @@ func NewGroup(methods ...Method) *Group { } } +// Add adds a new method to group +func (b *Group) Add(method Method) { + b.methods = append(b.methods, method) +} + +// Name returns group's methods name +func (b *Group) Name() string { + names := make([]string, 0, len(b.methods)) + for _, m := range b.methods { + if n, ok := m.(Named); ok { + names = append(names, n.Name()) + } else { + names = append(names, reflect.TypeOf(m).Elem().Name()) + } + } + return strings.Join(names, ",") +} + // Init does nothing as the Basic implementation does not need to allocate any resources func (b *Group) Init() error { for _, method := range b.methods { diff --git a/services/auth/placeholder.go b/services/auth/placeholder.go deleted file mode 100644 index d9a0ceae7c..0000000000 --- a/services/auth/placeholder.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2021 The Gitea Authors. All rights reserved. -// Use of this source code is governed by a MIT-style -// license that can be found in the LICENSE file. - -//go:build !windows -// +build !windows - -package auth - -func specialInit() {} diff --git a/services/auth/reverseproxy.go b/services/auth/reverseproxy.go index 1b151f6504..299d7abd34 100644 --- a/services/auth/reverseproxy.go +++ b/services/auth/reverseproxy.go @@ -12,6 +12,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/mailer" @@ -105,11 +106,15 @@ func (r *ReverseProxy) newUser(req *http.Request) *user_model.User { } user := &user_model.User{ - Name: username, - Email: email, - IsActive: true, + Name: username, + Email: email, } - if err := user_model.CreateUser(user); err != nil { + + overwriteDefault := user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolTrue, + } + + if err := user_model.CreateUser(user, &overwriteDefault); err != nil { // FIXME: should I create a system notice? log.Error("CreateUser: %v", err) return nil diff --git a/services/auth/signin.go b/services/auth/signin.go index aa9a9660c0..3ccf68c3a7 100644 --- a/services/auth/signin.go +++ b/services/auth/signin.go @@ -23,19 +23,23 @@ import ( // UserSignIn validates user name and password. func UserSignIn(username, password string) (*user_model.User, *auth.Source, error) { var user *user_model.User + isEmail := false if strings.Contains(username, "@") { + isEmail = true emailAddress := user_model.EmailAddress{LowerEmail: strings.ToLower(strings.TrimSpace(username))} // check same email - has, err := db.GetEngine(db.DefaultContext).Where("is_activated=?", true).Get(&emailAddress) + has, err := db.GetEngine(db.DefaultContext).Get(&emailAddress) if err != nil { return nil, nil, err } - if !has { - return nil, nil, user_model.ErrEmailAddressNotExist{ - Email: username, + if has { + if !emailAddress.IsActivated { + return nil, nil, user_model.ErrEmailAddressNotExist{ + Email: username, + } } + user = &user_model.User{ID: emailAddress.UID} } - user = &user_model.User{ID: emailAddress.UID} } else { trimmedUsername := strings.TrimSpace(username) if len(trimmedUsername) == 0 { @@ -45,38 +49,40 @@ func UserSignIn(username, password string) (*user_model.User, *auth.Source, erro user = &user_model.User{LowerName: strings.ToLower(trimmedUsername)} } - hasUser, err := user_model.GetUser(user) - if err != nil { - return nil, nil, err - } - - if hasUser { - source, err := auth.GetSourceByID(user.LoginSource) + if user != nil { + hasUser, err := user_model.GetUser(user) if err != nil { return nil, nil, err } - if !source.IsActive { - return nil, nil, oauth2.ErrAuthSourceNotActived - } + if hasUser { + source, err := auth.GetSourceByID(user.LoginSource) + if err != nil { + return nil, nil, err + } - authenticator, ok := source.Cfg.(PasswordAuthenticator) - if !ok { - return nil, nil, smtp.ErrUnsupportedLoginType - } + if !source.IsActive { + return nil, nil, oauth2.ErrAuthSourceNotActived + } - user, err := authenticator.Authenticate(user, user.LoginName, password) - if err != nil { - return nil, nil, err - } + authenticator, ok := source.Cfg.(PasswordAuthenticator) + if !ok { + return nil, nil, smtp.ErrUnsupportedLoginType + } - // WARN: DON'T check user.IsActive, that will be checked on reqSign so that - // user could be hint to resend confirm email. - if user.ProhibitLogin { - return nil, nil, user_model.ErrUserProhibitLogin{UID: user.ID, Name: user.Name} - } + user, err := authenticator.Authenticate(user, user.LoginName, password) + if err != nil { + return nil, nil, err + } - return user, source, nil + // WARN: DON'T check user.IsActive, that will be checked on reqSign so that + // user could be hint to resend confirm email. + if user.ProhibitLogin { + return nil, nil, user_model.ErrUserProhibitLogin{UID: user.ID, Name: user.Name} + } + + return user, source, nil + } } sources, err := auth.AllActiveSources() @@ -111,5 +117,9 @@ func UserSignIn(username, password string) (*user_model.User, *auth.Source, erro } } + if isEmail { + return nil, nil, user_model.ErrEmailAddressNotExist{Email: username} + } + return nil, nil, user_model.ErrUserNotExist{Name: username} } diff --git a/services/auth/source/ldap/source_authenticate.go b/services/auth/source/ldap/source_authenticate.go index 2c1bcc29cc..d8d11f18e1 100644 --- a/services/auth/source/ldap/source_authenticate.go +++ b/services/auth/source/ldap/source_authenticate.go @@ -8,11 +8,12 @@ import ( "fmt" "strings" - "code.gitea.io/gitea/models" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/auth" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/mailer" user_service "code.gitea.io/gitea/services/user" ) @@ -65,8 +66,8 @@ func (source *Source) Authenticate(user *user_model.User, userName, password str if user != nil { if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) { - orgCache := make(map[string]*models.Organization) - teamCache := make(map[string]*models.Team) + orgCache := make(map[string]*organization.Organization) + teamCache := make(map[string]*organization.Team) source.SyncLdapGroupsToTeams(user, sr.LdapTeamAdd, sr.LdapTeamRemove, orgCache, teamCache) } if isAttributeSSHPublicKeySet && asymkey_model.SynchronizePublicKeys(user, source.authSource, sr.SSHPublicKey) { @@ -85,19 +86,21 @@ func (source *Source) Authenticate(user *user_model.User, userName, password str } user = &user_model.User{ - LowerName: strings.ToLower(sr.Username), - Name: sr.Username, - FullName: composeFullName(sr.Name, sr.Surname, sr.Username), - Email: sr.Mail, - LoginType: source.authSource.Type, - LoginSource: source.authSource.ID, - LoginName: userName, - IsActive: true, - IsAdmin: sr.IsAdmin, - IsRestricted: sr.IsRestricted, + LowerName: strings.ToLower(sr.Username), + Name: sr.Username, + FullName: composeFullName(sr.Name, sr.Surname, sr.Username), + Email: sr.Mail, + LoginType: source.authSource.Type, + LoginSource: source.authSource.ID, + LoginName: userName, + IsAdmin: sr.IsAdmin, + } + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsRestricted: util.OptionalBoolOf(sr.IsRestricted), + IsActive: util.OptionalBoolTrue, } - err := user_model.CreateUser(user) + err := user_model.CreateUser(user, overwriteDefault) if err != nil { return user, err } @@ -111,8 +114,8 @@ func (source *Source) Authenticate(user *user_model.User, userName, password str _ = user_service.UploadAvatar(user, sr.Avatar) } if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) { - orgCache := make(map[string]*models.Organization) - teamCache := make(map[string]*models.Team) + orgCache := make(map[string]*organization.Organization) + teamCache := make(map[string]*organization.Team) source.SyncLdapGroupsToTeams(user, sr.LdapTeamAdd, sr.LdapTeamRemove, orgCache, teamCache) } diff --git a/services/auth/source/ldap/source_group_sync.go b/services/auth/source/ldap/source_group_sync.go index 7c62af705e..e797e015b2 100644 --- a/services/auth/source/ldap/source_group_sync.go +++ b/services/auth/source/ldap/source_group_sync.go @@ -6,12 +6,14 @@ package ldap import ( "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" ) // SyncLdapGroupsToTeams maps LDAP groups to organization and team memberships -func (source *Source) SyncLdapGroupsToTeams(user *user_model.User, ldapTeamAdd, ldapTeamRemove map[string][]string, orgCache map[string]*models.Organization, teamCache map[string]*models.Team) { +func (source *Source) SyncLdapGroupsToTeams(user *user_model.User, ldapTeamAdd, ldapTeamRemove map[string][]string, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) { var err error if source.GroupsEnabled && source.GroupTeamMapRemoval { // when the user is not a member of configs LDAP group, remove mapped organizations/teams memberships @@ -20,7 +22,7 @@ func (source *Source) SyncLdapGroupsToTeams(user *user_model.User, ldapTeamAdd, for orgName, teamNames := range ldapTeamAdd { org, ok := orgCache[orgName] if !ok { - org, err = models.GetOrgByName(orgName) + org, err = organization.GetOrgByName(orgName) if err != nil { // organization must be created before LDAP group sync log.Warn("LDAP group sync: Could not find organisation %s: %v", orgName, err) @@ -28,14 +30,7 @@ func (source *Source) SyncLdapGroupsToTeams(user *user_model.User, ldapTeamAdd, } orgCache[orgName] = org } - if isMember, err := models.IsOrganizationMember(org.ID, user.ID); !isMember && err == nil { - log.Trace("LDAP group sync: adding user [%s] to organization [%s]", user.Name, org.Name) - err = org.AddMember(user.ID) - if err != nil { - log.Error("LDAP group sync: Could not add user to organization: %v", err) - continue - } - } + for _, teamName := range teamNames { team, ok := teamCache[orgName+teamName] if !ok { @@ -47,12 +42,12 @@ func (source *Source) SyncLdapGroupsToTeams(user *user_model.User, ldapTeamAdd, } teamCache[orgName+teamName] = team } - if isMember, err := models.IsTeamMember(org.ID, team.ID, user.ID); !isMember && err == nil { + if isMember, err := organization.IsTeamMember(db.DefaultContext, org.ID, team.ID, user.ID); !isMember && err == nil { log.Trace("LDAP group sync: adding user [%s] to team [%s]", user.Name, org.Name) } else { continue } - err := team.AddMember(user.ID) + err := models.AddTeamMember(team, user.ID) if err != nil { log.Error("LDAP group sync: Could not add user to team: %v", err) } @@ -63,12 +58,12 @@ func (source *Source) SyncLdapGroupsToTeams(user *user_model.User, ldapTeamAdd, // remove membership to organizations/teams if user is not member of corresponding LDAP group // e.g. lets assume user is member of LDAP group "x", but LDAP group team map contains LDAP groups "x" and "y" // then users membership gets removed for all organizations/teams mapped by LDAP group "y" -func removeMappedMemberships(user *user_model.User, ldapTeamRemove map[string][]string, orgCache map[string]*models.Organization, teamCache map[string]*models.Team) { +func removeMappedMemberships(user *user_model.User, ldapTeamRemove map[string][]string, orgCache map[string]*organization.Organization, teamCache map[string]*organization.Team) { var err error for orgName, teamNames := range ldapTeamRemove { org, ok := orgCache[orgName] if !ok { - org, err = models.GetOrgByName(orgName) + org, err = organization.GetOrgByName(orgName) if err != nil { // organization must be created before LDAP group sync log.Warn("LDAP group sync: Could not find organisation %s: %v", orgName, err) @@ -86,12 +81,12 @@ func removeMappedMemberships(user *user_model.User, ldapTeamRemove map[string][] continue } } - if isMember, err := models.IsTeamMember(org.ID, team.ID, user.ID); isMember && err == nil { + if isMember, err := organization.IsTeamMember(db.DefaultContext, org.ID, team.ID, user.ID); isMember && err == nil { log.Trace("LDAP group sync: removing user [%s] from team [%s]", user.Name, org.Name) } else { continue } - err = team.RemoveMember(user.ID) + err = models.RemoveTeamMember(team, user.ID) if err != nil { log.Error("LDAP group sync: Could not remove user from team: %v", err) } diff --git a/services/auth/source/ldap/source_search.go b/services/auth/source/ldap/source_search.go index f2b940cabe..d01fd14c8b 100644 --- a/services/auth/source/ldap/source_search.go +++ b/services/auth/source/ldap/source_search.go @@ -433,14 +433,6 @@ func (ls *Source) SearchEntry(name, passwd string, directBind bool) *SearchResul isRestricted = checkRestricted(l, ls, userDN) } - if !directBind && ls.AttributesInBind { - // binds user (checking password) after looking-up attributes in BindDN context - err = bindUser(l, userDN, passwd) - if err != nil { - return nil - } - } - if isAtributeAvatarSet { Avatar = sr.Entries[0].GetRawAttributeValue(ls.AttributeAvatar) } @@ -451,6 +443,14 @@ func (ls *Source) SearchEntry(name, passwd string, directBind bool) *SearchResul teamsToAdd, teamsToRemove = ls.getMappedMemberships(l, uid) } + if !directBind && ls.AttributesInBind { + // binds user (checking password) after looking-up attributes in BindDN context + err = bindUser(l, userDN, passwd) + if err != nil { + return nil + } + } + return &SearchResult{ LowerName: strings.ToLower(username), Username: username, diff --git a/services/auth/source/ldap/source_sync.go b/services/auth/source/ldap/source_sync.go index 0d4d94a3ad..a245f4c6ff 100644 --- a/services/auth/source/ldap/source_sync.go +++ b/services/auth/source/ldap/source_sync.go @@ -10,11 +10,12 @@ import ( "sort" "strings" - "code.gitea.io/gitea/models" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" user_service "code.gitea.io/gitea/services/user" ) @@ -62,8 +63,8 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { }) userPos := 0 - orgCache := make(map[string]*models.Organization) - teamCache := make(map[string]*models.Team) + orgCache := make(map[string]*organization.Organization) + teamCache := make(map[string]*organization.Team) for _, su := range sr { select { @@ -102,19 +103,21 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { log.Trace("SyncExternalUsers[%s]: Creating user %s", source.authSource.Name, su.Username) usr = &user_model.User{ - LowerName: su.LowerName, - Name: su.Username, - FullName: fullName, - LoginType: source.authSource.Type, - LoginSource: source.authSource.ID, - LoginName: su.Username, - Email: su.Mail, - IsAdmin: su.IsAdmin, - IsRestricted: su.IsRestricted, - IsActive: true, + LowerName: su.LowerName, + Name: su.Username, + FullName: fullName, + LoginType: source.authSource.Type, + LoginSource: source.authSource.ID, + LoginName: su.Username, + Email: su.Mail, + IsAdmin: su.IsAdmin, + } + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsRestricted: util.OptionalBoolOf(su.IsRestricted), + IsActive: util.OptionalBoolTrue, } - err = user_model.CreateUser(usr) + err = user_model.CreateUser(usr, overwriteDefault) if err != nil { log.Error("SyncExternalUsers[%s]: Error creating user %s: %v", source.authSource.Name, su.Username, err) @@ -202,7 +205,7 @@ func (source *Source) Sync(ctx context.Context, updateExisting bool) error { log.Trace("SyncExternalUsers[%s]: Deactivating user %s", source.authSource.Name, usr.Name) usr.IsActive = false - err = user_model.UpdateUserCols(db.DefaultContext, usr, "is_active") + err = user_model.UpdateUserCols(ctx, usr, "is_active") if err != nil { log.Error("SyncExternalUsers[%s]: Error deactivating user %s: %v", source.authSource.Name, usr.Name, err) } diff --git a/services/auth/source/pam/source_authenticate.go b/services/auth/source/pam/source_authenticate.go index d5bd940996..16ddc0598e 100644 --- a/services/auth/source/pam/source_authenticate.go +++ b/services/auth/source/pam/source_authenticate.go @@ -12,6 +12,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/auth/pam" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/services/mailer" "github.com/google/uuid" @@ -58,10 +59,12 @@ func (source *Source) Authenticate(user *user_model.User, userName, password str LoginType: auth.PAM, LoginSource: source.authSource.ID, LoginName: userName, // This is what the user typed in - IsActive: true, + } + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolTrue, } - if err := user_model.CreateUser(user); err != nil { + if err := user_model.CreateUser(user, overwriteDefault); err != nil { return user, err } diff --git a/services/auth/source/smtp/source_authenticate.go b/services/auth/source/smtp/source_authenticate.go index 3be2f1128d..dff24d494e 100644 --- a/services/auth/source/smtp/source_authenticate.go +++ b/services/auth/source/smtp/source_authenticate.go @@ -74,10 +74,12 @@ func (source *Source) Authenticate(user *user_model.User, userName, password str LoginType: auth_model.SMTP, LoginSource: source.authSource.ID, LoginName: userName, - IsActive: true, + } + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolTrue, } - if err := user_model.CreateUser(user); err != nil { + if err := user_model.CreateUser(user, overwriteDefault); err != nil { return user, err } diff --git a/services/auth/sspi_windows.go b/services/auth/sspi_windows.go index cadf721796..9bc4041a74 100644 --- a/services/auth/sspi_windows.go +++ b/services/auth/sspi_windows.go @@ -16,6 +16,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/templates" + "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web/middleware" "code.gitea.io/gitea/services/auth/source/sspi" "code.gitea.io/gitea/services/mailer" @@ -109,7 +110,7 @@ func (s *SSPI) Verify(req *http.Request, w http.ResponseWriter, store DataStore, store.GetData()["EnableOpenIDSignIn"] = setting.Service.EnableOpenIDSignIn store.GetData()["EnableSSPI"] = true - err := s.rnd.HTML(w, 401, string(tplSignIn), templates.BaseVars().Merge(store.GetData())) + err := s.rnd.HTML(w, http.StatusUnauthorized, string(tplSignIn), templates.BaseVars().Merge(store.GetData())) if err != nil { log.Error("%v", err) } @@ -187,17 +188,20 @@ func (s *SSPI) shouldAuthenticate(req *http.Request) (shouldAuth bool) { func (s *SSPI) newUser(username string, cfg *sspi.Source) (*user_model.User, error) { email := gouuid.New().String() + "@localhost.localdomain" user := &user_model.User{ - Name: username, - Email: email, - KeepEmailPrivate: true, - Passwd: gouuid.New().String(), - IsActive: cfg.AutoActivateUsers, - Language: cfg.DefaultLanguage, - UseCustomAvatar: true, - Avatar: avatars.DefaultAvatarLink(), - EmailNotificationsPreference: user_model.EmailNotificationsDisabled, + Name: username, + Email: email, + Passwd: gouuid.New().String(), + Language: cfg.DefaultLanguage, + UseCustomAvatar: true, + Avatar: avatars.DefaultAvatarLink(), } - if err := user_model.CreateUser(user); err != nil { + emailNotificationPreference := user_model.EmailNotificationsDisabled + overwriteDefault := &user_model.CreateUserOverwriteOptions{ + IsActive: util.OptionalBoolOf(cfg.AutoActivateUsers), + KeepEmailPrivate: util.OptionalBoolTrue, + EmailNotificationsPreference: &emailNotificationPreference, + } + if err := user_model.CreateUser(user, overwriteDefault); err != nil { return nil, err } @@ -244,13 +248,3 @@ func sanitizeUsername(username string, cfg *sspi.Source) string { username = replaceSeparators(username, cfg) return username } - -// specialInit registers the SSPI auth method as the last method in the list. -// The SSPI plugin is expected to be executed last, as it returns 401 status code if negotiation -// fails (or if negotiation should continue), which would prevent other authentication methods -// to execute at all. -func specialInit() { - if auth.IsSSPIEnabled() { - Register(&SSPI{}) - } -} diff --git a/services/automerge/automerge.go b/services/automerge/automerge.go new file mode 100644 index 0000000000..85af2659c6 --- /dev/null +++ b/services/automerge/automerge.go @@ -0,0 +1,263 @@ +// Copyright 2021 Gitea. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package automerge + +import ( + "context" + "errors" + "fmt" + "strconv" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + pull_model "code.gitea.io/gitea/models/pull" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" + "code.gitea.io/gitea/modules/queue" + pull_service "code.gitea.io/gitea/services/pull" +) + +// prAutoMergeQueue represents a queue to handle update pull request tests +var prAutoMergeQueue queue.UniqueQueue + +// Init runs the task queue to that handles auto merges +func Init() error { + prAutoMergeQueue = queue.CreateUniqueQueue("pr_auto_merge", handle, "") + if prAutoMergeQueue == nil { + return fmt.Errorf("Unable to create pr_auto_merge Queue") + } + go graceful.GetManager().RunWithShutdownFns(prAutoMergeQueue.Run) + return nil +} + +// handle passed PR IDs and test the PRs +func handle(data ...queue.Data) []queue.Data { + for _, d := range data { + var id int64 + var sha string + if _, err := fmt.Sscanf(d.(string), "%d_%s", &id, &sha); err != nil { + log.Error("could not parse data from pr_auto_merge queue (%v): %v", d, err) + continue + } + handlePull(id, sha) + } + return nil +} + +func addToQueue(pr *models.PullRequest, sha string) { + if err := prAutoMergeQueue.PushFunc(fmt.Sprintf("%d_%s", pr.ID, sha), func() error { + log.Trace("Adding pullID: %d to the pull requests patch checking queue with sha %s", pr.ID, sha) + return nil + }); err != nil { + log.Error("Error adding pullID: %d to the pull requests patch checking queue %v", pr.ID, err) + } +} + +// ScheduleAutoMerge if schedule is false and no error, pull can be merged directly +func ScheduleAutoMerge(ctx context.Context, doer *user_model.User, pull *models.PullRequest, style repo_model.MergeStyle, message string) (scheduled bool, err error) { + err = db.WithTx(func(ctx context.Context) error { + lastCommitStatus, err := pull_service.GetPullRequestCommitStatusState(ctx, pull) + if err != nil { + return err + } + + // we don't need to schedule + if lastCommitStatus.IsSuccess() { + return nil + } + + if err := pull_model.ScheduleAutoMerge(ctx, doer, pull.ID, style, message); err != nil { + return err + } + scheduled = true + + _, err = models.CreateAutoMergeComment(ctx, models.CommentTypePRScheduledToAutoMerge, pull, doer) + return err + }, ctx) + return +} + +// RemoveScheduledAutoMerge cancels a previously scheduled pull request +func RemoveScheduledAutoMerge(ctx context.Context, doer *user_model.User, pull *models.PullRequest) error { + return db.WithTx(func(ctx context.Context) error { + if err := pull_model.DeleteScheduledAutoMerge(ctx, pull.ID); err != nil { + return err + } + + _, err := models.CreateAutoMergeComment(ctx, models.CommentTypePRUnScheduledToAutoMerge, pull, doer) + return err + }, ctx) +} + +// MergeScheduledPullRequest merges a previously scheduled pull request when all checks succeeded +func MergeScheduledPullRequest(ctx context.Context, sha string, repo *repo_model.Repository) error { + pulls, err := getPullRequestsByHeadSHA(ctx, sha, repo, func(pr *models.PullRequest) bool { + return !pr.HasMerged && pr.CanAutoMerge() + }) + if err != nil { + return err + } + + for _, pr := range pulls { + addToQueue(pr, sha) + } + + return nil +} + +func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.Repository, filter func(*models.PullRequest) bool) (map[int64]*models.PullRequest, error) { + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) + if err != nil { + return nil, err + } + defer gitRepo.Close() + + refs, err := gitRepo.GetRefsBySha(sha, "") + if err != nil { + return nil, err + } + + pulls := make(map[int64]*models.PullRequest) + + for _, ref := range refs { + // Each pull branch starts with refs/pull/ we then go from there to find the index of the pr and then + // use that to get the pr. + if strings.HasPrefix(ref, git.PullPrefix) { + parts := strings.Split(ref[len(git.PullPrefix):], "/") + + // e.g. 'refs/pull/1/head' would be []string{"1", "head"} + if len(parts) != 2 { + log.Error("getPullRequestsByHeadSHA found broken pull ref [%s] on repo [%-v]", ref, repo) + continue + } + + prIndex, err := strconv.ParseInt(parts[0], 10, 64) + if err != nil { + log.Error("getPullRequestsByHeadSHA found broken pull ref [%s] on repo [%-v]", ref, repo) + continue + } + + p, err := models.GetPullRequestByIndexCtx(ctx, repo.ID, prIndex) + if err != nil { + // If there is no pull request for this branch, we don't try to merge it. + if models.IsErrPullRequestNotExist(err) { + continue + } + return nil, err + } + + if filter(p) { + pulls[p.ID] = p + } + } + } + + return pulls, nil +} + +func handlePull(pullID int64, sha string) { + ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), + fmt.Sprintf("Handle AutoMerge of pull[%d] with sha[%s]", pullID, sha)) + defer finished() + + pr, err := models.GetPullRequestByID(ctx, pullID) + if err != nil { + log.Error("GetPullRequestByID[%d]: %v", pullID, err) + return + } + + // Check if there is a scheduled pr in the db + exists, scheduledPRM, err := pull_model.GetScheduledMergeByPullID(ctx, pr.ID) + if err != nil { + log.Error("pull[%d] GetScheduledMergeByPullID: %v", pr.ID, err) + return + } + if !exists { + return + } + + // Get all checks for this pr + // We get the latest sha commit hash again to handle the case where the check of a previous push + // did not succeed or was not finished yet. + + if err = pr.LoadHeadRepoCtx(ctx); err != nil { + log.Error("pull[%d] LoadHeadRepoCtx: %v", pr.ID, err) + return + } + + headGitRepo, err := git.OpenRepository(ctx, pr.HeadRepo.RepoPath()) + if err != nil { + log.Error("OpenRepository: %v", err) + return + } + defer headGitRepo.Close() + + headBranchExist := headGitRepo.IsBranchExist(pr.HeadBranch) + + if pr.HeadRepo == nil || !headBranchExist { + log.Warn("Head branch of auto merge pr does not exist [HeadRepoID: %d, Branch: %s, PR ID: %d]", pr.HeadRepoID, pr.HeadBranch, pr.ID) + return + } + + // Check if all checks succeeded + pass, err := pull_service.IsPullCommitStatusPass(ctx, pr) + if err != nil { + log.Error("IsPullCommitStatusPass: %v", err) + return + } + if !pass { + log.Info("Scheduled auto merge pr has unsuccessful status checks [PullID: %d]", pr.ID) + return + } + + // Merge if all checks succeeded + doer, err := user_model.GetUserByIDCtx(ctx, scheduledPRM.DoerID) + if err != nil { + log.Error("GetUserByIDCtx: %v", err) + return + } + + perm, err := models.GetUserRepoPermission(ctx, pr.HeadRepo, doer) + if err != nil { + log.Error("GetUserRepoPermission: %v", err) + return + } + + if err := pull_service.CheckPullMergable(ctx, doer, &perm, pr, false, false); err != nil { + if errors.Is(pull_service.ErrUserNotAllowedToMerge, err) { + log.Info("PR %d was scheduled to automerge by an unauthorized user", pr.ID) + return + } + log.Error("pull[%d] CheckPullMergable: %v", pr.ID, err) + return + } + + var baseGitRepo *git.Repository + if pr.BaseRepoID == pr.HeadRepoID { + baseGitRepo = headGitRepo + } else { + if err = pr.LoadBaseRepoCtx(ctx); err != nil { + log.Error("LoadBaseRepoCtx: %v", err) + return + } + + baseGitRepo, err = git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) + if err != nil { + log.Error("OpenRepository: %v", err) + return + } + defer baseGitRepo.Close() + } + + if err := pull_service.Merge(ctx, pr, doer, baseGitRepo, scheduledPRM.MergeStyle, "", scheduledPRM.Message); err != nil { + log.Error("pull_service.Merge: %v", err) + return + } +} diff --git a/services/comments/comments.go b/services/comments/comments.go index 6f63060c45..c1b3ab73c9 100644 --- a/services/comments/comments.go +++ b/services/comments/comments.go @@ -28,7 +28,7 @@ func CreateIssueComment(doer *user_model.User, repo *repo_model.Repository, issu return nil, err } - mentions, err := issue.FindAndUpdateIssueMentions(db.DefaultContext, doer, comment.Content) + mentions, err := models.FindAndUpdateIssueMentions(db.DefaultContext, issue, doer, comment.Content) if err != nil { return nil, err } diff --git a/services/context/user.go b/services/context/user.go new file mode 100644 index 0000000000..c5efd43782 --- /dev/null +++ b/services/context/user.go @@ -0,0 +1,62 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package context + +import ( + "fmt" + "net/http" + "strings" + + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/context" +) + +// UserAssignmentWeb returns a middleware to handle context-user assignment for web routes +func UserAssignmentWeb() func(ctx *context.Context) { + return func(ctx *context.Context) { + userAssignment(ctx, func(status int, title string, obj interface{}) { + err, ok := obj.(error) + if !ok { + err = fmt.Errorf("%s", obj) + } + if status == http.StatusNotFound { + ctx.NotFound(title, err) + } else { + ctx.ServerError(title, err) + } + }) + } +} + +// UserAssignmentAPI returns a middleware to handle context-user assignment for api routes +func UserAssignmentAPI() func(ctx *context.APIContext) { + return func(ctx *context.APIContext) { + userAssignment(ctx.Context, ctx.Error) + } +} + +func userAssignment(ctx *context.Context, errCb func(int, string, interface{})) { + username := ctx.Params(":username") + + if ctx.IsSigned && ctx.Doer.LowerName == strings.ToLower(username) { + ctx.ContextUser = ctx.Doer + } else { + var err error + ctx.ContextUser, err = user_model.GetUserByName(username) + if err != nil { + if user_model.IsErrUserNotExist(err) { + if redirectUserID, err := user_model.LookupUserRedirect(username); err == nil { + context.RedirectToUser(ctx, username, redirectUserID) + } else if user_model.IsErrUserRedirectNotExist(err) { + errCb(http.StatusNotFound, "GetUserByName", err) + } else { + errCb(http.StatusInternalServerError, "LookupUserRedirect", err) + } + } else { + errCb(http.StatusInternalServerError, "GetUserByName", err) + } + } + } +} diff --git a/services/cron/cron.go b/services/cron/cron.go index 19f703caf1..ebbcd75b6d 100644 --- a/services/cron/cron.go +++ b/services/cron/cron.go @@ -7,9 +7,11 @@ package cron import ( "context" + "runtime/pprof" "time" "code.gitea.io/gitea/modules/graceful" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/sync" "github.com/gogs/cron" @@ -23,7 +25,9 @@ var taskStatusTable = sync.NewStatusTable() // NewContext begins cron tasks // Each cron task is run within the shutdown context as a running server // AtShutdown the cron server is stopped -func NewContext() { +func NewContext(original context.Context) { + defer pprof.SetGoroutineLabels(original) + _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().ShutdownContext(), "Service: Cron", process.SystemProcessType, true) initBasicTasks() initExtendedTasks() @@ -42,16 +46,29 @@ func NewContext() { lock.Lock() started = false lock.Unlock() + finished() }) } // TaskTableRow represents a task row in the tasks table type TaskTableRow struct { - Name string - Spec string - Next time.Time - Prev time.Time - ExecTimes int64 + Name string + Spec string + Next time.Time + Prev time.Time + Status string + LastMessage string + LastDoer string + ExecTimes int64 + task *Task +} + +func (t *TaskTableRow) FormatLastMessage(locale string) string { + if t.Status == "finished" { + return t.task.GetConfig().FormatMessage(locale, t.Name, t.Status, t.LastDoer) + } + + return t.task.GetConfig().FormatMessage(locale, t.Name, t.Status, t.LastDoer, t.LastMessage) } // TaskTable represents a table of tasks @@ -80,11 +97,15 @@ func ListTasks() TaskTable { } task.lock.Lock() tTable = append(tTable, &TaskTableRow{ - Name: task.Name, - Spec: spec, - Next: next, - Prev: prev, - ExecTimes: task.ExecTimes, + Name: task.Name, + Spec: spec, + Next: next, + Prev: prev, + ExecTimes: task.ExecTimes, + LastMessage: task.LastMessage, + Status: task.Status, + LastDoer: task.LastDoer, + task: task, }) task.lock.Unlock() } diff --git a/services/cron/setting.go b/services/cron/setting.go index 3d9495764d..9b59a562f7 100644 --- a/services/cron/setting.go +++ b/services/cron/setting.go @@ -7,9 +7,7 @@ package cron import ( "time" - user_model "code.gitea.io/gitea/models/user" - - "github.com/unknwon/i18n" + "code.gitea.io/gitea/modules/translation/i18n" ) // Config represents a basic configuration interface that cron task @@ -17,7 +15,7 @@ type Config interface { IsEnabled() bool DoRunAtStart() bool GetSchedule() string - FormatMessage(name, status string, doer *user_model.User, args ...interface{}) string + FormatMessage(locale, name, status, doer string, args ...interface{}) string DoNoticeOnSuccess() bool } @@ -26,7 +24,7 @@ type BaseConfig struct { Enabled bool RunAtStart bool Schedule string - NoSuccessNotice bool + NoticeOnSuccess bool } // OlderThanConfig represents a cron task with OlderThan setting @@ -66,23 +64,24 @@ func (b *BaseConfig) DoRunAtStart() bool { // DoNoticeOnSuccess returns whether a success notice should be posted func (b *BaseConfig) DoNoticeOnSuccess() bool { - return !b.NoSuccessNotice + return b.NoticeOnSuccess } // FormatMessage returns a message for the task -func (b *BaseConfig) FormatMessage(name, status string, doer *user_model.User, args ...interface{}) string { +// Please note the `status` string will be concatenated with `admin.dashboard.cron.` and `admin.dashboard.task.` to provide locale messages. Similarly `name` will be composed with `admin.dashboard.` to provide the locale name for the task. +func (b *BaseConfig) FormatMessage(locale, name, status, doer string, args ...interface{}) string { realArgs := make([]interface{}, 0, len(args)+2) - realArgs = append(realArgs, i18n.Tr("en-US", "admin.dashboard."+name)) - if doer == nil { + realArgs = append(realArgs, i18n.Tr(locale, "admin.dashboard."+name)) + if doer == "" { realArgs = append(realArgs, "(Cron)") } else { - realArgs = append(realArgs, doer.Name) + realArgs = append(realArgs, doer) } if len(args) > 0 { realArgs = append(realArgs, args...) } - if doer == nil || (doer.ID == -1 && doer.Name == "(Cron)") { - return i18n.Tr("en-US", "admin.dashboard.cron."+status, realArgs...) + if doer == "" { + return i18n.Tr(locale, "admin.dashboard.cron."+status, realArgs...) } - return i18n.Tr("en-US", "admin.dashboard.task."+status, realArgs...) + return i18n.Tr(locale, "admin.dashboard.task."+status, realArgs...) } diff --git a/services/cron/tasks.go b/services/cron/tasks.go index 070fb6e9e1..2252ad21e2 100644 --- a/services/cron/tasks.go +++ b/services/cron/tasks.go @@ -29,11 +29,14 @@ var ( // Task represents a Cron task type Task struct { - lock sync.Mutex - Name string - config Config - fun func(context.Context, *user_model.User, Config) error - ExecTimes int64 + lock sync.Mutex + Name string + config Config + fun func(context.Context, *user_model.User, Config) error + Status string + LastMessage string + LastDoer string + ExecTimes int64 } // DoRunAtStart returns if this task should run at the start @@ -86,24 +89,45 @@ func (t *Task) RunWithUser(doer *user_model.User, config Config) { }() graceful.GetManager().RunWithShutdownContext(func(baseCtx context.Context) { pm := process.GetManager() - ctx, _, finished := pm.AddContext(baseCtx, config.FormatMessage(t.Name, "process", doer)) + doerName := "" + if doer != nil && doer.ID != -1 { + doerName = doer.Name + } + + ctx, _, finished := pm.AddContext(baseCtx, config.FormatMessage("en-US", t.Name, "process", doerName)) defer finished() if err := t.fun(ctx, doer, config); err != nil { + var message string + var status string if db.IsErrCancelled(err) { - message := err.(db.ErrCancelled).Message - if err := admin_model.CreateNotice(ctx, admin_model.NoticeTask, config.FormatMessage(t.Name, "aborted", doer, message)); err != nil { - log.Error("CreateNotice: %v", err) - } - return + status = "cancelled" + message = err.(db.ErrCancelled).Message + } else { + status = "error" + message = err.Error() } - if err := admin_model.CreateNotice(ctx, admin_model.NoticeTask, config.FormatMessage(t.Name, "error", doer, err)); err != nil { + + t.lock.Lock() + t.LastMessage = message + t.Status = status + t.LastDoer = doerName + t.lock.Unlock() + + if err := admin_model.CreateNotice(ctx, admin_model.NoticeTask, config.FormatMessage("en-US", t.Name, "cancelled", doerName, message)); err != nil { log.Error("CreateNotice: %v", err) } return } + + t.lock.Lock() + t.Status = "finished" + t.LastMessage = "" + t.LastDoer = doerName + t.lock.Unlock() + if config.DoNoticeOnSuccess() { - if err := admin_model.CreateNotice(ctx, admin_model.NoticeTask, config.FormatMessage(t.Name, "finished", doer)); err != nil { + if err := admin_model.CreateNotice(ctx, admin_model.NoticeTask, config.FormatMessage("en-US", t.Name, "finished", doerName)); err != nil { log.Error("CreateNotice: %v", err) } } diff --git a/services/cron/tasks_basic.go b/services/cron/tasks_basic.go index fdf8550c31..6f3fcb42c3 100644 --- a/services/cron/tasks_basic.go +++ b/services/cron/tasks_basic.go @@ -15,6 +15,7 @@ import ( "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/migrations" mirror_service "code.gitea.io/gitea/services/mirror" + packages_service "code.gitea.io/gitea/services/packages" repo_service "code.gitea.io/gitea/services/repository" archiver_service "code.gitea.io/gitea/services/repository/archiver" ) @@ -28,10 +29,9 @@ func registerUpdateMirrorTask() { RegisterTaskFatal("update_mirrors", &UpdateMirrorTaskConfig{ BaseConfig: BaseConfig{ - Enabled: true, - RunAtStart: false, - Schedule: "@every 10m", - NoSuccessNotice: true, + Enabled: true, + RunAtStart: false, + Schedule: "@every 10m", }, PullLimit: 50, PushLimit: 50, @@ -140,6 +140,20 @@ func registerCleanupHookTaskTable() { }) } +func registerCleanupPackages() { + RegisterTaskFatal("cleanup_packages", &OlderThanConfig{ + BaseConfig: BaseConfig{ + Enabled: true, + RunAtStart: true, + Schedule: "@midnight", + }, + OlderThan: 24 * time.Hour, + }, func(ctx context.Context, _ *user_model.User, config Config) error { + realConfig := config.(*OlderThanConfig) + return packages_service.Cleanup(ctx, realConfig.OlderThan) + }) +} + func initBasicTasks() { registerUpdateMirrorTask() registerRepoHealthCheck() @@ -151,4 +165,7 @@ func initBasicTasks() { registerUpdateMigrationPosterID() } registerCleanupHookTaskTable() + if setting.Packages.Enabled { + registerCleanupPackages() + } } diff --git a/services/cron/tasks_extended.go b/services/cron/tasks_extended.go index ded819a71e..2d1bf53234 100644 --- a/services/cron/tasks_extended.go +++ b/services/cron/tasks_extended.go @@ -9,6 +9,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/admin" asymkey_model "code.gitea.io/gitea/models/asymkey" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" @@ -154,6 +155,20 @@ func registerUpdateGiteaChecker() { }) } +func registerDeleteOldSystemNotices() { + RegisterTaskFatal("delete_old_system_notices", &OlderThanConfig{ + BaseConfig: BaseConfig{ + Enabled: false, + RunAtStart: false, + Schedule: "@every 168h", + }, + OlderThan: 365 * 24 * time.Hour, + }, func(ctx context.Context, _ *user_model.User, config Config) error { + olderThanConfig := config.(*OlderThanConfig) + return admin.DeleteOldSystemNotices(olderThanConfig.OlderThan) + }) +} + func initExtendedTasks() { registerDeleteInactiveUsers() registerDeleteRepositoryArchives() @@ -166,4 +181,5 @@ func initExtendedTasks() { registerRemoveRandomAvatars() registerDeleteOldActions() registerUpdateGiteaChecker() + registerDeleteOldSystemNotices() } diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go index 3760c71f2a..18cbac751c 100644 --- a/services/forms/repo_form.go +++ b/services/forms/repo_form.go @@ -11,6 +11,7 @@ import ( "strings" "code.gitea.io/gitea/models" + project_model "code.gitea.io/gitea/models/project" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" @@ -142,6 +143,7 @@ type RepoSettingForm struct { TrackerIssueStyle string EnableCloseIssuesViaCommitInAnyBranch bool EnableProjects bool + EnablePackages bool EnablePulls bool PullsIgnoreWhitespace bool PullsAllowMerge bool @@ -238,6 +240,7 @@ type WebhookForm struct { PullRequestReview bool PullRequestSync bool Repository bool + Package bool Active bool BranchFilter string `binding:"GlobPattern"` } @@ -420,15 +423,16 @@ func (f *NewPackagistHookForm) Validate(req *http.Request, errs binding.Errors) // CreateIssueForm form for creating issue type CreateIssueForm struct { - Title string `binding:"Required;MaxSize(255)"` - LabelIDs string `form:"label_ids"` - AssigneeIDs string `form:"assignee_ids"` - Ref string `form:"ref"` - MilestoneID int64 - ProjectID int64 - AssigneeID int64 - Content string - Files []string + Title string `binding:"Required;MaxSize(255)"` + LabelIDs string `form:"label_ids"` + AssigneeIDs string `form:"assignee_ids"` + Ref string `form:"ref"` + MilestoneID int64 + ProjectID int64 + AssigneeID int64 + Content string + Files []string + AllowMaintainerEdit bool } // Validate validates the fields @@ -499,7 +503,7 @@ func (i IssueLockForm) HasValidReason() bool { type CreateProjectForm struct { Title string `binding:"Required;MaxSize(100)"` Content string - BoardType models.ProjectBoardType + BoardType project_model.BoardType } // UserCreateProjectForm is a from for creating an individual or organization @@ -507,7 +511,7 @@ type CreateProjectForm struct { type UserCreateProjectForm struct { Title string `binding:"Required;MaxSize(100)"` Content string - BoardType models.ProjectBoardType + BoardType project_model.BoardType UID int64 `binding:"Required"` } @@ -588,6 +592,7 @@ type MergePullRequestForm struct { MergeCommitID string // only used for manually-merged HeadCommitID string `json:"head_commit_id,omitempty"` ForceMerge *bool `json:"force_merge,omitempty"` + MergeWhenChecksSucceed bool `json:"merge_when_checks_succeed,omitempty"` DeleteBranchAfterMerge bool `json:"delete_branch_after_merge,omitempty"` } @@ -657,6 +662,11 @@ type DismissReviewForm struct { Message string } +// UpdateAllowEditsForm form for changing if PR allows edits from maintainers +type UpdateAllowEditsForm struct { + AllowMaintainerEdit bool +} + // __________ .__ // \______ \ ____ | | ____ _____ ______ ____ // | _// __ \| | _/ __ \\__ \ / ___// __ \ diff --git a/services/forms/user_form.go b/services/forms/user_form.go index a886e89f87..405b4a9a49 100644 --- a/services/forms/user_form.go +++ b/services/forms/user_form.go @@ -430,3 +430,15 @@ func (f *WebauthnDeleteForm) Validate(req *http.Request, errs binding.Errors) bi ctx := context.GetContext(req) return middleware.Validate(errs, ctx.Data, f, ctx.Locale) } + +// PackageSettingForm form for package settings +type PackageSettingForm struct { + Action string + RepoID int64 `form:"repo_id"` +} + +// Validate validates the fields +func (f *PackageSettingForm) Validate(req *http.Request, errs binding.Errors) binding.Errors { + ctx := context.GetContext(req) + return middleware.Validate(errs, ctx.Data, f, ctx.Locale) +} diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go index 58c25ff98f..92ff92a6c0 100644 --- a/services/gitdiff/gitdiff.go +++ b/services/gitdiff/gitdiff.go @@ -22,8 +22,10 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + pull_model "code.gitea.io/gitea/models/pull" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/analyze" + "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/charset" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/highlight" @@ -602,25 +604,28 @@ func (diffSection *DiffSection) GetComputedInlineDiffFor(diffLine *DiffLine) Dif // DiffFile represents a file diff. type DiffFile struct { - Name string - OldName string - Index int - Addition, Deletion int - Type DiffFileType - IsCreated bool - IsDeleted bool - IsBin bool - IsLFSFile bool - IsRenamed bool - IsAmbiguous bool - IsSubmodule bool - Sections []*DiffSection - IsIncomplete bool - IsIncompleteLineTooLong bool - IsProtected bool - IsGenerated bool - IsVendored bool - Language string + Name string + NameHash string + OldName string + Index int + Addition, Deletion int + Type DiffFileType + IsCreated bool + IsDeleted bool + IsBin bool + IsLFSFile bool + IsRenamed bool + IsAmbiguous bool + IsSubmodule bool + Sections []*DiffSection + IsIncomplete bool + IsIncompleteLineTooLong bool + IsProtected bool + IsGenerated bool + IsVendored bool + IsViewed bool // User specific + HasChangedSinceLastReview bool // User specific + Language string } // GetType returns type of diff file. @@ -663,6 +668,18 @@ func (diffFile *DiffFile) GetTailSection(gitRepo *git.Repository, leftCommitID, return tailSection } +// GetDiffFileName returns the name of the diff file, or its old name in case it was deleted +func (diffFile *DiffFile) GetDiffFileName() string { + if diffFile.Name == "" { + return diffFile.OldName + } + return diffFile.Name +} + +func (diffFile *DiffFile) ShouldBeHidden() bool { + return diffFile.IsGenerated || diffFile.IsViewed +} + func getCommitFileLineCount(commit *git.Commit, filePath string) int { blob, err := commit.GetBlobByPath(filePath) if err != nil { @@ -677,10 +694,12 @@ func getCommitFileLineCount(commit *git.Commit, filePath string) int { // Diff represents a difference between two git trees. type Diff struct { - Start, End string - NumFiles, TotalAddition, TotalDeletion int - Files []*DiffFile - IsIncomplete bool + Start, End string + NumFiles int + TotalAddition, TotalDeletion int + Files []*DiffFile + IsIncomplete bool + NumViewedFiles int // user-specific } // LoadComments loads comments into each line @@ -935,7 +954,6 @@ parsingLoop: break curFileLoop } } - } // TODO: There are numerous issues with this: @@ -947,6 +965,8 @@ parsingLoop: diffLineTypeBuffers[DiffLineAdd] = new(bytes.Buffer) diffLineTypeBuffers[DiffLineDel] = new(bytes.Buffer) for _, f := range diff.Files { + f.NameHash = base.EncodeSha1(f.Name) + for _, buffer := range diffLineTypeBuffers { buffer.Reset() } @@ -1378,7 +1398,7 @@ func GetDiff(gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff go func(ctx context.Context, diffArgs []string, repoPath string, writer *io.PipeWriter) { cmd := git.NewCommand(ctx, diffArgs...) cmd.SetDescription(fmt.Sprintf("GetDiffRange [repo_path: %s]", repoPath)) - if err := cmd.RunWithContext(&git.RunContext{ + if err := cmd.Run(&git.RunOpts{ Timeout: time.Duration(setting.Git.Timeout.Default) * time.Second, Dir: repoPath, Stderr: os.Stderr, @@ -1497,6 +1517,70 @@ func GetDiff(gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff return diff, nil } +// SyncAndGetUserSpecificDiff is like GetDiff, except that user specific data such as which files the given user has already viewed on the given PR will also be set +// Additionally, the database asynchronously is updated if files have changed since the last review +func SyncAndGetUserSpecificDiff(ctx context.Context, userID int64, pull *models.PullRequest, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) { + diff, err := GetDiff(gitRepo, opts, files...) + if err != nil { + return nil, err + } + review, err := pull_model.GetNewestReviewState(ctx, userID, pull.ID) + if err != nil || review == nil || review.UpdatedFiles == nil { + return diff, err + } + + latestCommit := opts.AfterCommitID + if latestCommit == "" { + latestCommit = pull.HeadBranch // opts.AfterCommitID is preferred because it handles PRs from forks correctly and the branch name doesn't + } + + changedFiles, err := gitRepo.GetFilesChangedBetween(review.CommitSHA, latestCommit) + if err != nil { + return diff, err + } + + filesChangedSinceLastDiff := make(map[string]pull_model.ViewedState) +outer: + for _, diffFile := range diff.Files { + fileViewedState := review.UpdatedFiles[diffFile.GetDiffFileName()] + + // Check whether it was previously detected that the file has changed since the last review + if fileViewedState == pull_model.HasChanged { + diffFile.HasChangedSinceLastReview = true + continue + } + + filename := diffFile.GetDiffFileName() + + // Check explicitly whether the file has changed since the last review + for _, changedFile := range changedFiles { + diffFile.HasChangedSinceLastReview = filename == changedFile + if diffFile.HasChangedSinceLastReview { + filesChangedSinceLastDiff[filename] = pull_model.HasChanged + continue outer // We don't want to check if the file is viewed here as that would fold the file, which is in this case unwanted + } + } + // Check whether the file has already been viewed + if fileViewedState == pull_model.Viewed { + diffFile.IsViewed = true + diff.NumViewedFiles++ + } + } + + // Explicitly store files that have changed in the database, if any is present at all. + // This has the benefit that the "Has Changed" attribute will be present as long as the user does not explicitly mark this file as viewed, so it will even survive a page reload after marking another file as viewed. + // On the other hand, this means that even if a commit reverting an unseen change is committed, the file will still be seen as changed. + if len(filesChangedSinceLastDiff) > 0 { + err := pull_model.UpdateReviewState(ctx, review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff) + if err != nil { + log.Warn("Could not update review for user %d, pull %d, commit %s and the changed files %v: %v", review.UserID, review.PullID, review.CommitSHA, filesChangedSinceLastDiff, err) + return nil, err + } + } + + return diff, err +} + // CommentAsDiff returns c.Patch as *Diff func CommentAsDiff(c *models.Comment) (*Diff, error) { diff, err := ParsePatch(setting.Git.MaxGitDiffLines, diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go index d57a3e2aba..3457785e5d 100644 --- a/services/gitdiff/gitdiff_test.go +++ b/services/gitdiff/gitdiff_test.go @@ -690,7 +690,7 @@ func TestDiffLine_GetCommentSide(t *testing.T) { } func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) { - gitRepo, err := git.OpenRepository("./testdata/academic-module") + gitRepo, err := git.OpenRepository(git.DefaultContext, "./testdata/academic-module") if !assert.NoError(t, err) { return } diff --git a/services/gitdiff/main_test.go b/services/gitdiff/main_test.go index 8c76e7e153..d4d9364ebf 100644 --- a/services/gitdiff/main_test.go +++ b/services/gitdiff/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/services/gitdiff/testdata/academic-module/hooks/applypatch-msg.sample b/services/gitdiff/testdata/academic-module/hooks/applypatch-msg.sample deleted file mode 100755 index a5d7b84a67..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/applypatch-msg.sample +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message taken by -# applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. The hook is -# allowed to edit the commit message file. -# -# To enable this hook, rename this file to "applypatch-msg". - -. git-sh-setup -commitmsg="$(git rev-parse --git-path hooks/commit-msg)" -test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} -: diff --git a/services/gitdiff/testdata/academic-module/hooks/commit-msg.sample b/services/gitdiff/testdata/academic-module/hooks/commit-msg.sample deleted file mode 100755 index b58d1184a9..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/commit-msg.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to check the commit log message. -# Called by "git commit" with one argument, the name of the file -# that has the commit message. The hook should exit with non-zero -# status after issuing an appropriate message if it wants to stop the -# commit. The hook is allowed to edit the commit message file. -# -# To enable this hook, rename this file to "commit-msg". - -# Uncomment the below to add a Signed-off-by line to the message. -# Doing this in a hook is a bad idea in general, but the prepare-commit-msg -# hook is more suited to it. -# -# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" - -# This example catches duplicate Signed-off-by lines. - -test "" = "$(grep '^Signed-off-by: ' "$1" | - sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { - echo >&2 Duplicate Signed-off-by lines. - exit 1 -} diff --git a/services/gitdiff/testdata/academic-module/hooks/fsmonitor-watchman.sample b/services/gitdiff/testdata/academic-module/hooks/fsmonitor-watchman.sample deleted file mode 100755 index e673bb3980..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/fsmonitor-watchman.sample +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/perl - -use strict; -use warnings; -use IPC::Open2; - -# An example hook script to integrate Watchman -# (https://facebook.github.io/watchman/) with git to speed up detecting -# new and modified files. -# -# The hook is passed a version (currently 1) and a time in nanoseconds -# formatted as a string and outputs to stdout all files that have been -# modified since the given time. Paths must be relative to the root of -# the working tree and separated by a single NUL. -# -# To enable this hook, rename this file to "query-watchman" and set -# 'git config core.fsmonitor .git/hooks/query-watchman' -# -my ($version, $time) = @ARGV; - -# Check the hook interface version - -if ($version == 1) { - # convert nanoseconds to seconds - $time = int $time / 1000000000; -} else { - die "Unsupported query-fsmonitor hook version '$version'.\n" . - "Falling back to scanning...\n"; -} - -my $git_work_tree; -if ($^O =~ 'msys' || $^O =~ 'cygwin') { - $git_work_tree = Win32::GetCwd(); - $git_work_tree =~ tr/\\/\//; -} else { - require Cwd; - $git_work_tree = Cwd::cwd(); -} - -my $retry = 1; - -launch_watchman(); - -sub launch_watchman { - - my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty') - or die "open2() failed: $!\n" . - "Falling back to scanning...\n"; - - # In the query expression below we're asking for names of files that - # changed since $time but were not transient (ie created after - # $time but no longer exist). - # - # To accomplish this, we're using the "since" generator to use the - # recency index to select candidate nodes and "fields" to limit the - # output to file names only. Then we're using the "expression" term to - # further constrain the results. - # - # The category of transient files that we want to ignore will have a - # creation clock (cclock) newer than $time_t value and will also not - # currently exist. - - my $query = <<" END"; - ["query", "$git_work_tree", { - "since": $time, - "fields": ["name"], - "expression": ["not", ["allof", ["since", $time, "cclock"], ["not", "exists"]]] - }] - END - - print CHLD_IN $query; - close CHLD_IN; - my $response = do {local $/; }; - - die "Watchman: command returned no output.\n" . - "Falling back to scanning...\n" if $response eq ""; - die "Watchman: command returned invalid output: $response\n" . - "Falling back to scanning...\n" unless $response =~ /^\{/; - - my $json_pkg; - eval { - require JSON::XS; - $json_pkg = "JSON::XS"; - 1; - } or do { - require JSON::PP; - $json_pkg = "JSON::PP"; - }; - - my $o = $json_pkg->new->utf8->decode($response); - - if ($retry > 0 and $o->{error} and $o->{error} =~ m/unable to resolve root .* directory (.*) is not watched/) { - print STDERR "Adding '$git_work_tree' to watchman's watch list.\n"; - $retry--; - qx/watchman watch "$git_work_tree"/; - die "Failed to make watchman watch '$git_work_tree'.\n" . - "Falling back to scanning...\n" if $? != 0; - - # Watchman will always return all files on the first query so - # return the fast "everything is dirty" flag to git and do the - # Watchman query just to get it over with now so we won't pay - # the cost in git to look up each individual file. - print "/\0"; - eval { launch_watchman() }; - exit 0; - } - - die "Watchman: $o->{error}.\n" . - "Falling back to scanning...\n" if $o->{error}; - - binmode STDOUT, ":utf8"; - local $, = "\0"; - print @{$o->{files}}; -} diff --git a/services/gitdiff/testdata/academic-module/hooks/post-update.sample b/services/gitdiff/testdata/academic-module/hooks/post-update.sample deleted file mode 100755 index ec17ec1939..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/post-update.sample +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare a packed repository for use over -# dumb transports. -# -# To enable this hook, rename this file to "post-update". - -exec git update-server-info diff --git a/services/gitdiff/testdata/academic-module/hooks/pre-applypatch.sample b/services/gitdiff/testdata/academic-module/hooks/pre-applypatch.sample deleted file mode 100755 index 4142082bcb..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/pre-applypatch.sample +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed -# by applypatch from an e-mail message. -# -# The hook should exit with non-zero status after issuing an -# appropriate message if it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-applypatch". - -. git-sh-setup -precommit="$(git rev-parse --git-path hooks/pre-commit)" -test -x "$precommit" && exec "$precommit" ${1+"$@"} -: diff --git a/services/gitdiff/testdata/academic-module/hooks/pre-commit.sample b/services/gitdiff/testdata/academic-module/hooks/pre-commit.sample deleted file mode 100755 index 6a75641638..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/pre-commit.sample +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -if git rev-parse --verify HEAD >/dev/null 2>&1 -then - against=HEAD -else - # Initial commit: diff against an empty tree object - against=$(git hash-object -t tree /dev/null) -fi - -# If you want to allow non-ASCII filenames set this variable to true. -allownonascii=$(git config --bool hooks.allownonascii) - -# Redirect output to stderr. -exec 1>&2 - -# Cross platform projects tend to avoid non-ASCII filenames; prevent -# them from being added to the repository. We exploit the fact that the -# printable range starts at the space character and ends with tilde. -if [ "$allownonascii" != "true" ] && - # Note that the use of brackets around a tr range is ok here, (it's - # even required, for portability to Solaris 10's /usr/bin/tr), since - # the square bracket bytes happen to fall in the designated range. - test $(git diff --cached --name-only --diff-filter=A -z $against | - LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 -then - cat <<\EOF -Error: Attempt to add a non-ASCII file name. - -This can cause problems if you want to work with people on other platforms. - -To be portable it is advisable to rename the file. - -If you know what you are doing you can disable this check using: - - git config hooks.allownonascii true -EOF - exit 1 -fi - -# If there are whitespace errors, print the offending file names and fail. -exec git diff-index --check --cached $against -- diff --git a/services/gitdiff/testdata/academic-module/hooks/pre-push.sample b/services/gitdiff/testdata/academic-module/hooks/pre-push.sample deleted file mode 100755 index 6187dbf439..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/pre-push.sample +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# An example hook script to verify what is about to be pushed. Called by "git -# push" after it has checked the remote status, but before anything has been -# pushed. If this script exits with a non-zero status nothing will be pushed. -# -# This hook is called with the following parameters: -# -# $1 -- Name of the remote to which the push is being done -# $2 -- URL to which the push is being done -# -# If pushing without using a named remote those arguments will be equal. -# -# Information about the commits which are being pushed is supplied as lines to -# the standard input in the form: -# -# -# -# This sample shows how to prevent push of commits where the log message starts -# with "WIP" (work in progress). - -remote="$1" -url="$2" - -z40=0000000000000000000000000000000000000000 - -while read local_ref local_sha remote_ref remote_sha -do - if [ "$local_sha" = $z40 ] - then - # Handle delete - : - else - if [ "$remote_sha" = $z40 ] - then - # New branch, examine all commits - range="$local_sha" - else - # Update to existing branch, examine new commits - range="$remote_sha..$local_sha" - fi - - # Check for WIP commit - commit=`git rev-list -n 1 --grep '^WIP' "$range"` - if [ -n "$commit" ] - then - echo >&2 "Found WIP commit in $local_ref, not pushing" - exit 1 - fi - fi -done - -exit 0 diff --git a/services/gitdiff/testdata/academic-module/hooks/pre-rebase.sample b/services/gitdiff/testdata/academic-module/hooks/pre-rebase.sample deleted file mode 100755 index 6cbef5c370..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/pre-rebase.sample +++ /dev/null @@ -1,169 +0,0 @@ -#!/bin/sh -# -# Copyright (c) 2006, 2008 Junio C Hamano -# -# The "pre-rebase" hook is run just before "git rebase" starts doing -# its job, and can prevent the command from running by exiting with -# non-zero status. -# -# The hook is called with the following parameters: -# -# $1 -- the upstream the series was forked from. -# $2 -- the branch being rebased (or empty when rebasing the current branch). -# -# This sample shows how to prevent topic branches that are already -# merged to 'next' branch from getting rebased, because allowing it -# would result in rebasing already published history. - -publish=next -basebranch="$1" -if test "$#" = 2 -then - topic="refs/heads/$2" -else - topic=`git symbolic-ref HEAD` || - exit 0 ;# we do not interrupt rebasing detached HEAD -fi - -case "$topic" in -refs/heads/??/*) - ;; -*) - exit 0 ;# we do not interrupt others. - ;; -esac - -# Now we are dealing with a topic branch being rebased -# on top of master. Is it OK to rebase it? - -# Does the topic really exist? -git show-ref -q "$topic" || { - echo >&2 "No such branch $topic" - exit 1 -} - -# Is topic fully merged to master? -not_in_master=`git rev-list --pretty=oneline ^master "$topic"` -if test -z "$not_in_master" -then - echo >&2 "$topic is fully merged to master; better remove it." - exit 1 ;# we could allow it, but there is no point. -fi - -# Is topic ever merged to next? If so you should not be rebasing it. -only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` -only_next_2=`git rev-list ^master ${publish} | sort` -if test "$only_next_1" = "$only_next_2" -then - not_in_topic=`git rev-list "^$topic" master` - if test -z "$not_in_topic" - then - echo >&2 "$topic is already up to date with master" - exit 1 ;# we could allow it, but there is no point. - else - exit 0 - fi -else - not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` - /usr/bin/perl -e ' - my $topic = $ARGV[0]; - my $msg = "* $topic has commits already merged to public branch:\n"; - my (%not_in_next) = map { - /^([0-9a-f]+) /; - ($1 => 1); - } split(/\n/, $ARGV[1]); - for my $elem (map { - /^([0-9a-f]+) (.*)$/; - [$1 => $2]; - } split(/\n/, $ARGV[2])) { - if (!exists $not_in_next{$elem->[0]}) { - if ($msg) { - print STDERR $msg; - undef $msg; - } - print STDERR " $elem->[1]\n"; - } - } - ' "$topic" "$not_in_next" "$not_in_master" - exit 1 -fi - -<<\DOC_END - -This sample hook safeguards topic branches that have been -published from being rewound. - -The workflow assumed here is: - - * Once a topic branch forks from "master", "master" is never - merged into it again (either directly or indirectly). - - * Once a topic branch is fully cooked and merged into "master", - it is deleted. If you need to build on top of it to correct - earlier mistakes, a new topic branch is created by forking at - the tip of the "master". This is not strictly necessary, but - it makes it easier to keep your history simple. - - * Whenever you need to test or publish your changes to topic - branches, merge them into "next" branch. - -The script, being an example, hardcodes the publish branch name -to be "next", but it is trivial to make it configurable via -$GIT_DIR/config mechanism. - -With this workflow, you would want to know: - -(1) ... if a topic branch has ever been merged to "next". Young - topic branches can have stupid mistakes you would rather - clean up before publishing, and things that have not been - merged into other branches can be easily rebased without - affecting other people. But once it is published, you would - not want to rewind it. - -(2) ... if a topic branch has been fully merged to "master". - Then you can delete it. More importantly, you should not - build on top of it -- other people may already want to - change things related to the topic as patches against your - "master", so if you need further changes, it is better to - fork the topic (perhaps with the same name) afresh from the - tip of "master". - -Let's look at this example: - - o---o---o---o---o---o---o---o---o---o "next" - / / / / - / a---a---b A / / - / / / / - / / c---c---c---c B / - / / / \ / - / / / b---b C \ / - / / / / \ / - ---o---o---o---o---o---o---o---o---o---o---o "master" - - -A, B and C are topic branches. - - * A has one fix since it was merged up to "next". - - * B has finished. It has been fully merged up to "master" and "next", - and is ready to be deleted. - - * C has not merged to "next" at all. - -We would want to allow C to be rebased, refuse A, and encourage -B to be deleted. - -To compute (1): - - git rev-list ^master ^topic next - git rev-list ^master next - - if these match, topic has not merged in next at all. - -To compute (2): - - git rev-list master..topic - - if this is empty, it is fully merged to "master". - -DOC_END diff --git a/services/gitdiff/testdata/academic-module/hooks/pre-receive.sample b/services/gitdiff/testdata/academic-module/hooks/pre-receive.sample deleted file mode 100755 index a1fd29ec14..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/pre-receive.sample +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# -# An example hook script to make use of push options. -# The example simply echoes all push options that start with 'echoback=' -# and rejects all pushes when the "reject" push option is used. -# -# To enable this hook, rename this file to "pre-receive". - -if test -n "$GIT_PUSH_OPTION_COUNT" -then - i=0 - while test "$i" -lt "$GIT_PUSH_OPTION_COUNT" - do - eval "value=\$GIT_PUSH_OPTION_$i" - case "$value" in - echoback=*) - echo "echo from the pre-receive-hook: ${value#*=}" >&2 - ;; - reject) - exit 1 - esac - i=$((i + 1)) - done -fi diff --git a/services/gitdiff/testdata/academic-module/hooks/prepare-commit-msg.sample b/services/gitdiff/testdata/academic-module/hooks/prepare-commit-msg.sample deleted file mode 100755 index 10fa14c5ab..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/prepare-commit-msg.sample +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -# -# An example hook script to prepare the commit log message. -# Called by "git commit" with the name of the file that has the -# commit message, followed by the description of the commit -# message's source. The hook's purpose is to edit the commit -# message file. If the hook fails with a non-zero status, -# the commit is aborted. -# -# To enable this hook, rename this file to "prepare-commit-msg". - -# This hook includes three examples. The first one removes the -# "# Please enter the commit message..." help message. -# -# The second includes the output of "git diff --name-status -r" -# into the message, just before the "git status" output. It is -# commented because it doesn't cope with --amend or with squashed -# commits. -# -# The third example adds a Signed-off-by line to the message, that can -# still be edited. This is rarely a good idea. - -COMMIT_MSG_FILE=$1 -COMMIT_SOURCE=$2 -SHA1=$3 - -/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" - -# case "$COMMIT_SOURCE,$SHA1" in -# ,|template,) -# /usr/bin/perl -i.bak -pe ' -# print "\n" . `git diff --cached --name-status -r` -# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; -# *) ;; -# esac - -# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') -# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" -# if test -z "$COMMIT_SOURCE" -# then -# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" -# fi diff --git a/services/gitdiff/testdata/academic-module/hooks/update.sample b/services/gitdiff/testdata/academic-module/hooks/update.sample deleted file mode 100755 index 80ba94135c..0000000000 --- a/services/gitdiff/testdata/academic-module/hooks/update.sample +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh -# -# An example hook script to block unannotated tags from entering. -# Called by "git receive-pack" with arguments: refname sha1-old sha1-new -# -# To enable this hook, rename this file to "update". -# -# Config -# ------ -# hooks.allowunannotated -# This boolean sets whether unannotated tags will be allowed into the -# repository. By default they won't be. -# hooks.allowdeletetag -# This boolean sets whether deleting tags will be allowed in the -# repository. By default they won't be. -# hooks.allowmodifytag -# This boolean sets whether a tag may be modified after creation. By default -# it won't be. -# hooks.allowdeletebranch -# This boolean sets whether deleting branches will be allowed in the -# repository. By default they won't be. -# hooks.denycreatebranch -# This boolean sets whether remotely creating branches will be denied -# in the repository. By default this is allowed. -# - -# --- Command line -refname="$1" -oldrev="$2" -newrev="$3" - -# --- Safety check -if [ -z "$GIT_DIR" ]; then - echo "Don't run this script from the command line." >&2 - echo " (if you want, you could supply GIT_DIR then run" >&2 - echo " $0 )" >&2 - exit 1 -fi - -if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then - echo "usage: $0 " >&2 - exit 1 -fi - -# --- Config -allowunannotated=$(git config --bool hooks.allowunannotated) -allowdeletebranch=$(git config --bool hooks.allowdeletebranch) -denycreatebranch=$(git config --bool hooks.denycreatebranch) -allowdeletetag=$(git config --bool hooks.allowdeletetag) -allowmodifytag=$(git config --bool hooks.allowmodifytag) - -# check for no description -projectdesc=$(sed -e '1q' "$GIT_DIR/description") -case "$projectdesc" in -"Unnamed repository"* | "") - echo "*** Project description file hasn't been set" >&2 - exit 1 - ;; -esac - -# --- Check types -# if $newrev is 0000...0000, it's a commit to delete a ref. -zero="0000000000000000000000000000000000000000" -if [ "$newrev" = "$zero" ]; then - newrev_type=delete -else - newrev_type=$(git cat-file -t $newrev) -fi - -case "$refname","$newrev_type" in - refs/tags/*,commit) - # un-annotated tag - short_refname=${refname##refs/tags/} - if [ "$allowunannotated" != "true" ]; then - echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 - echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 - exit 1 - fi - ;; - refs/tags/*,delete) - # delete tag - if [ "$allowdeletetag" != "true" ]; then - echo "*** Deleting a tag is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/tags/*,tag) - # annotated tag - if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 - then - echo "*** Tag '$refname' already exists." >&2 - echo "*** Modifying a tag is not allowed in this repository." >&2 - exit 1 - fi - ;; - refs/heads/*,commit) - # branch - if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then - echo "*** Creating a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/heads/*,delete) - # delete branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - refs/remotes/*,commit) - # tracking branch - ;; - refs/remotes/*,delete) - # delete tracking branch - if [ "$allowdeletebranch" != "true" ]; then - echo "*** Deleting a tracking branch is not allowed in this repository" >&2 - exit 1 - fi - ;; - *) - # Anything else (is there anything else?) - echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 - exit 1 - ;; -esac - -# --- Finished -exit 0 diff --git a/services/issue/assignee.go b/services/issue/assignee.go index f09c51293b..e6169b9c7e 100644 --- a/services/issue/assignee.go +++ b/services/issue/assignee.go @@ -5,7 +5,11 @@ package issue import ( + "context" + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -40,7 +44,7 @@ func DeleteNotPassedAssignee(issue *models.Issue, doer *user_model.User, assigne // ToggleAssignee changes a user between assigned and not assigned for this issue, and make issue comment for it. func ToggleAssignee(issue *models.Issue, doer *user_model.User, assigneeID int64) (removed bool, comment *models.Comment, err error) { - removed, comment, err = issue.ToggleAssignee(doer, assigneeID) + removed, comment, err = models.ToggleIssueAssignee(issue, doer, assigneeID) if err != nil { return } @@ -76,7 +80,7 @@ func ReviewRequest(issue *models.Issue, doer, reviewer *user_model.User, isAdd b } // IsValidReviewRequest Check permission for ReviewRequest -func IsValidReviewRequest(reviewer, doer *user_model.User, isAdd bool, issue *models.Issue, permDoer *models.Permission) error { +func IsValidReviewRequest(ctx context.Context, reviewer, doer *user_model.User, isAdd bool, issue *models.Issue, permDoer *models.Permission) error { if reviewer.IsOrganization() { return models.ErrNotValidReviewRequest{ Reason: "Organization can't be added as reviewer", @@ -92,14 +96,14 @@ func IsValidReviewRequest(reviewer, doer *user_model.User, isAdd bool, issue *mo } } - permReviewer, err := models.GetUserRepoPermission(issue.Repo, reviewer) + permReviewer, err := models.GetUserRepoPermission(ctx, issue.Repo, reviewer) if err != nil { return err } if permDoer == nil { permDoer = new(models.Permission) - *permDoer, err = models.GetUserRepoPermission(issue.Repo, doer) + *permDoer, err = models.GetUserRepoPermission(ctx, issue.Repo, doer) if err != nil { return err } @@ -166,7 +170,7 @@ func IsValidReviewRequest(reviewer, doer *user_model.User, isAdd bool, issue *mo } // IsValidTeamReviewRequest Check permission for ReviewRequest Team -func IsValidTeamReviewRequest(reviewer *models.Team, doer *user_model.User, isAdd bool, issue *models.Issue) error { +func IsValidTeamReviewRequest(ctx context.Context, reviewer *organization.Team, doer *user_model.User, isAdd bool, issue *models.Issue) error { if doer.IsOrganization() { return models.ErrNotValidReviewRequest{ Reason: "Organization can't be doer to add reviewer", @@ -175,7 +179,7 @@ func IsValidTeamReviewRequest(reviewer *models.Team, doer *user_model.User, isAd } } - permission, err := models.GetUserRepoPermission(issue.Repo, doer) + permission, err := models.GetUserRepoPermission(ctx, issue.Repo, doer) if err != nil { log.Error("Unable to GetUserRepoPermission for %-v in %-v#%d", doer, issue.Repo, issue.Index) return err @@ -183,7 +187,7 @@ func IsValidTeamReviewRequest(reviewer *models.Team, doer *user_model.User, isAd if isAdd { if issue.Repo.IsPrivate { - hasTeam := models.HasTeamRepo(reviewer.OrgID, reviewer.ID, issue.RepoID) + hasTeam := organization.HasTeamRepo(ctx, reviewer.OrgID, reviewer.ID, issue.RepoID) if !hasTeam { return models.ErrNotValidReviewRequest{ @@ -221,7 +225,7 @@ func IsValidTeamReviewRequest(reviewer *models.Team, doer *user_model.User, isAd } // TeamReviewRequest add or remove a review request from a team for this PR, and make comment for it. -func TeamReviewRequest(issue *models.Issue, doer *user_model.User, reviewer *models.Team, isAdd bool) (comment *models.Comment, err error) { +func TeamReviewRequest(issue *models.Issue, doer *user_model.User, reviewer *organization.Team, isAdd bool) (comment *models.Comment, err error) { if isAdd { comment, err = models.AddTeamReviewRequest(issue, reviewer, doer) } else { @@ -241,11 +245,14 @@ func TeamReviewRequest(issue *models.Issue, doer *user_model.User, reviewer *mod return } - if err = reviewer.GetMembers(&models.SearchMembersOptions{}); err != nil { + members, err := organization.GetTeamMembers(db.DefaultContext, &organization.SearchMembersOptions{ + TeamID: reviewer.ID, + }) + if err != nil { return } - for _, member := range reviewer.Members { + for _, member := range members { if member.ID == comment.Issue.PosterID { continue } diff --git a/services/issue/commit.go b/services/issue/commit.go index 0dda5f202f..b5d97e12a8 100644 --- a/services/issue/commit.go +++ b/services/issue/commit.go @@ -14,6 +14,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/references" @@ -130,7 +131,7 @@ func UpdateIssuesCommit(doer *user_model.User, repo *repo_model.Repository, comm continue } - perm, err := models.GetUserRepoPermission(refRepo, doer) + perm, err := models.GetUserRepoPermission(db.DefaultContext, refRepo, doer) if err != nil { return err } diff --git a/services/issue/content.go b/services/issue/content.go index 6d7883090b..a60878479b 100644 --- a/services/issue/content.go +++ b/services/issue/content.go @@ -14,7 +14,7 @@ import ( func ChangeContent(issue *models.Issue, doer *user_model.User, content string) (err error) { oldContent := issue.Content - if err := issue.ChangeContent(doer, content); err != nil { + if err := models.ChangeIssueContent(issue, doer, content); err != nil { return err } diff --git a/services/issue/issue.go b/services/issue/issue.go index 6e5e4bfd37..6bc3959979 100644 --- a/services/issue/issue.go +++ b/services/issue/issue.go @@ -28,7 +28,7 @@ func NewIssue(repo *repo_model.Repository, issue *models.Issue, labelIDs []int64 } } - mentions, err := issue.FindAndUpdateIssueMentions(db.DefaultContext, issue.Poster, issue.Content) + mentions, err := models.FindAndUpdateIssueMentions(db.DefaultContext, issue, issue.Poster, issue.Content) if err != nil { return err } @@ -49,7 +49,7 @@ func ChangeTitle(issue *models.Issue, doer *user_model.User, title string) (err oldTitle := issue.Title issue.Title = title - if err = issue.ChangeTitle(doer, oldTitle); err != nil { + if err = models.ChangeIssueTitle(issue, doer, oldTitle); err != nil { return } @@ -63,7 +63,7 @@ func ChangeIssueRef(issue *models.Issue, doer *user_model.User, ref string) erro oldRef := issue.Ref issue.Ref = ref - if err := issue.ChangeRef(doer, oldRef); err != nil { + if err := models.ChangeIssueRef(issue, doer, oldRef); err != nil { return err } diff --git a/services/issue/label.go b/services/issue/label.go index 96df7ed345..62ccc0ad65 100644 --- a/services/issue/label.go +++ b/services/issue/label.go @@ -6,13 +6,14 @@ package issue import ( "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/notification" ) // ClearLabels clears all of an issue's labels func ClearLabels(issue *models.Issue, doer *user_model.User) (err error) { - if err = issue.ClearLabels(doer); err != nil { + if err = models.ClearIssueLabels(issue, doer); err != nil { return } @@ -43,11 +44,17 @@ func AddLabels(issue *models.Issue, doer *user_model.User, labels []*models.Labe // RemoveLabel removes a label from issue by given ID. func RemoveLabel(issue *models.Issue, doer *user_model.User, label *models.Label) error { - if err := issue.LoadRepo(); err != nil { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + if err := issue.LoadRepo(ctx); err != nil { return err } - perm, err := models.GetUserRepoPermission(issue.Repo, doer) + perm, err := models.GetUserRepoPermission(ctx, issue.Repo, doer) if err != nil { return err } @@ -58,7 +65,11 @@ func RemoveLabel(issue *models.Issue, doer *user_model.User, label *models.Label return models.ErrRepoLabelNotExist{} } - if err := models.DeleteIssueLabel(issue, label, doer); err != nil { + if err := models.DeleteIssueLabel(ctx, issue, label, doer); err != nil { + return err + } + + if err := committer.Commit(); err != nil { return err } @@ -73,7 +84,7 @@ func ReplaceLabels(issue *models.Issue, doer *user_model.User, labels []*models. return err } - if err := issue.ReplaceLabels(labels, doer); err != nil { + if err := models.ReplaceIssueLabels(issue, labels, doer); err != nil { return err } diff --git a/services/issue/main_test.go b/services/issue/main_test.go index cbcfd08a49..689ae744f6 100644 --- a/services/issue/main_test.go +++ b/services/issue/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/services/issue/milestone.go b/services/issue/milestone.go index 999da50844..287f8ae285 100644 --- a/services/issue/milestone.go +++ b/services/issue/milestone.go @@ -5,15 +5,68 @@ package issue import ( + "context" + "fmt" + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/notification" ) +func changeMilestoneAssign(ctx context.Context, doer *user_model.User, issue *models.Issue, oldMilestoneID int64) error { + if err := models.UpdateIssueCols(ctx, issue, "milestone_id"); err != nil { + return err + } + + if oldMilestoneID > 0 { + if err := issues_model.UpdateMilestoneCounters(ctx, oldMilestoneID); err != nil { + return err + } + } + + if issue.MilestoneID > 0 { + if err := issues_model.UpdateMilestoneCounters(ctx, issue.MilestoneID); err != nil { + return err + } + } + + if oldMilestoneID > 0 || issue.MilestoneID > 0 { + if err := issue.LoadRepo(ctx); err != nil { + return err + } + + opts := &models.CreateCommentOptions{ + Type: models.CommentTypeMilestone, + Doer: doer, + Repo: issue.Repo, + Issue: issue, + OldMilestoneID: oldMilestoneID, + MilestoneID: issue.MilestoneID, + } + if _, err := models.CreateCommentCtx(ctx, opts); err != nil { + return err + } + } + + return nil +} + // ChangeMilestoneAssign changes assignment of milestone for issue. func ChangeMilestoneAssign(issue *models.Issue, doer *user_model.User, oldMilestoneID int64) (err error) { - if err = models.ChangeMilestoneAssign(issue, doer, oldMilestoneID); err != nil { - return + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + if err = changeMilestoneAssign(ctx, doer, issue, oldMilestoneID); err != nil { + return err + } + + if err = committer.Commit(); err != nil { + return fmt.Errorf("Commit: %v", err) } notification.NotifyIssueChangeMilestone(doer, issue, oldMilestoneID) diff --git a/services/issue/milestone_test.go b/services/issue/milestone_test.go new file mode 100644 index 0000000000..80e37a8acd --- /dev/null +++ b/services/issue/milestone_test.go @@ -0,0 +1,35 @@ +// Copyright 2019 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package issue + +import ( + "testing" + + "code.gitea.io/gitea/models" + issues_model "code.gitea.io/gitea/models/issues" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + + "github.com/stretchr/testify/assert" +) + +func TestChangeMilestoneAssign(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + issue := unittest.AssertExistsAndLoadBean(t, &models.Issue{RepoID: 1}).(*models.Issue) + doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) + assert.NotNil(t, issue) + assert.NotNil(t, doer) + + oldMilestoneID := issue.MilestoneID + issue.MilestoneID = 2 + assert.NoError(t, ChangeMilestoneAssign(issue, doer, oldMilestoneID)) + unittest.AssertExistsAndLoadBean(t, &models.Comment{ + IssueID: issue.ID, + Type: models.CommentTypeMilestone, + MilestoneID: issue.MilestoneID, + OldMilestoneID: oldMilestoneID, + }) + unittest.CheckConsistencyFor(t, &issues_model.Milestone{}, &models.Issue{}) +} diff --git a/services/issue/status.go b/services/issue/status.go index 64fbccd26a..d2b4fc303e 100644 --- a/services/issue/status.go +++ b/services/issue/status.go @@ -5,6 +5,8 @@ package issue import ( + "context" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" user_model "code.gitea.io/gitea/models/user" @@ -14,10 +16,16 @@ import ( // ChangeStatus changes issue status to open or closed. func ChangeStatus(issue *models.Issue, doer *user_model.User, closed bool) error { - comment, err := issue.ChangeStatus(doer, closed) + return changeStatusCtx(db.DefaultContext, issue, doer, closed) +} + +// changeStatusCtx changes issue status to open or closed. +// TODO: if context is not db.DefaultContext we get a deadlock!!! +func changeStatusCtx(ctx context.Context, issue *models.Issue, doer *user_model.User, closed bool) error { + comment, err := models.ChangeIssueStatus(ctx, issue, doer, closed) if err != nil { if models.IsErrDependenciesLeft(err) && closed { - if err := models.FinishIssueStopwatchIfPossible(db.DefaultContext, doer, issue); err != nil { + if err := models.FinishIssueStopwatchIfPossible(ctx, doer, issue); err != nil { log.Error("Unable to stop stopwatch for issue[%d]#%d: %v", issue.ID, issue.Index, err) } } @@ -25,7 +33,7 @@ func ChangeStatus(issue *models.Issue, doer *user_model.User, closed bool) error } if closed { - if err := models.FinishIssueStopwatchIfPossible(db.DefaultContext, doer, issue); err != nil { + if err := models.FinishIssueStopwatchIfPossible(ctx, doer, issue); err != nil { return err } } diff --git a/services/lfs/locks.go b/services/lfs/locks.go index 3839119b27..0299452205 100644 --- a/services/lfs/locks.go +++ b/services/lfs/locks.go @@ -52,7 +52,7 @@ func GetListLockHandler(ctx *context.Context) { if err != nil { log.Debug("Could not find repository: %s/%s - %s", rv.User, rv.Repo, err) ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") - ctx.JSON(401, api.LFSLockError{ + ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ Message: "You must have pull access to list locks", }) return @@ -88,7 +88,7 @@ func GetListLockHandler(ctx *context.Context) { }) return } - lock, err := models.GetLFSLockByID(v) + lock, err := models.GetLFSLockByID(ctx, v) if err != nil && !models.IsErrLFSLockNotExist(err) { log.Error("Unable to get lock with ID[%s]: Error: %v", v, err) } @@ -98,7 +98,7 @@ func GetListLockHandler(ctx *context.Context) { path := ctx.FormString("path") if path != "" { // Case where we request a specific id - lock, err := models.GetLFSLock(repository, path) + lock, err := models.GetLFSLock(ctx, repository, path) if err != nil && !models.IsErrLFSLockNotExist(err) { log.Error("Unable to get lock for repository %-v with path %s: Error: %v", repository, path, err) } @@ -139,7 +139,7 @@ func PostLockHandler(ctx *context.Context) { if err != nil { log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err) ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") - ctx.JSON(401, api.LFSLockError{ + ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ Message: "You must have push access to create locks", }) return @@ -164,13 +164,13 @@ func PostLockHandler(ctx *context.Context) { dec := json.NewDecoder(bodyReader) if err := dec.Decode(&req); err != nil { log.Warn("Failed to decode lock request as json. Error: %v", err) - writeStatus(ctx, 400) + writeStatus(ctx, http.StatusBadRequest) return } lock, err := models.CreateLFSLock(repository, &models.LFSLock{ Path: req.Path, - OwnerID: ctx.User.ID, + OwnerID: ctx.Doer.ID, }) if err != nil { if models.IsErrLFSLockAlreadyExist(err) { @@ -187,7 +187,7 @@ func PostLockHandler(ctx *context.Context) { }) return } - log.Error("Unable to CreateLFSLock in repository %-v at %s for user %-v: Error: %v", repository, req.Path, ctx.User, err) + log.Error("Unable to CreateLFSLock in repository %-v at %s for user %-v: Error: %v", repository, req.Path, ctx.Doer, err) ctx.JSON(http.StatusInternalServerError, api.LFSLockError{ Message: "internal server error : Internal Server Error", }) @@ -206,7 +206,7 @@ func VerifyLockHandler(ctx *context.Context) { if err != nil { log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err) ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") - ctx.JSON(401, api.LFSLockError{ + ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ Message: "You must have push access to verify locks", }) return @@ -249,7 +249,7 @@ func VerifyLockHandler(ctx *context.Context) { lockOursListAPI := make([]*api.LFSLock, 0, len(lockList)) lockTheirsListAPI := make([]*api.LFSLock, 0, len(lockList)) for _, l := range lockList { - if l.OwnerID == ctx.User.ID { + if l.OwnerID == ctx.Doer.ID { lockOursListAPI = append(lockOursListAPI, convert.ToLFSLock(l)) } else { lockTheirsListAPI = append(lockTheirsListAPI, convert.ToLFSLock(l)) @@ -272,7 +272,7 @@ func UnLockHandler(ctx *context.Context) { if err != nil { log.Error("Unable to get repository: %s/%s Error: %v", userName, repoName, err) ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") - ctx.JSON(401, api.LFSLockError{ + ctx.JSON(http.StatusUnauthorized, api.LFSLockError{ Message: "You must have push access to delete locks", }) return @@ -297,11 +297,11 @@ func UnLockHandler(ctx *context.Context) { dec := json.NewDecoder(bodyReader) if err := dec.Decode(&req); err != nil { log.Warn("Failed to decode lock request as json. Error: %v", err) - writeStatus(ctx, 400) + writeStatus(ctx, http.StatusBadRequest) return } - lock, err := models.DeleteLFSLockByID(ctx.ParamsInt64("lid"), repository, ctx.User, req.Force) + lock, err := models.DeleteLFSLockByID(ctx.ParamsInt64("lid"), repository, ctx.Doer, req.Force) if err != nil { if models.IsErrLFSUnauthorizedAction(err) { ctx.Resp.Header().Set("WWW-Authenticate", "Basic realm=gitea-lfs") @@ -310,7 +310,7 @@ func UnLockHandler(ctx *context.Context) { }) return } - log.Error("Unable to DeleteLFSLockByID[%d] by user %-v with force %t: Error: %v", ctx.ParamsInt64("lid"), ctx.User, req.Force, err) + log.Error("Unable to DeleteLFSLockByID[%d] by user %-v with force %t: Error: %v", ctx.ParamsInt64("lid"), ctx.Doer, req.Force, err) ctx.JSON(http.StatusInternalServerError, api.LFSLockError{ Message: "unable to delete lock : Internal Server Error", }) diff --git a/services/lfs/server.go b/services/lfs/server.go index 7c3f88e57b..c095bbfab4 100644 --- a/services/lfs/server.go +++ b/services/lfs/server.go @@ -222,7 +222,7 @@ func BatchHandler(ctx *context.Context) { } if exists && meta == nil { - accessible, err := models.LFSObjectAccessible(ctx.User, p.Oid) + accessible, err := models.LFSObjectAccessible(ctx.Doer, p.Oid) if err != nil { log.Error("Unable to check if LFS MetaObject [%s] is accessible. Error: %v", p.Oid, err) writeStatus(ctx, http.StatusInternalServerError) @@ -296,7 +296,7 @@ func UploadHandler(ctx *context.Context) { uploadOrVerify := func() error { if exists { - accessible, err := models.LFSObjectAccessible(ctx.User, p.Oid) + accessible, err := models.LFSObjectAccessible(ctx.Doer, p.Oid) if err != nil { log.Error("Unable to check if LFS MetaObject [%s] is accessible. Error: %v", p.Oid, err) return err @@ -488,9 +488,9 @@ func authenticate(ctx *context.Context, repository *repo_model.Repository, autho } // ctx.IsSigned is unnecessary here, this will be checked in perm.CanAccess - perm, err := models.GetUserRepoPermission(repository, ctx.User) + perm, err := models.GetUserRepoPermission(ctx, repository, ctx.Doer) if err != nil { - log.Error("Unable to GetUserRepoPermission for user %-v in repo %-v Error: %v", ctx.User, repository) + log.Error("Unable to GetUserRepoPermission for user %-v in repo %-v Error: %v", ctx.Doer, repository) return false } @@ -505,7 +505,7 @@ func authenticate(ctx *context.Context, repository *repo_model.Repository, autho log.Warn("Authentication failure for provided token with Error: %v", err) return false } - ctx.User = user + ctx.Doer = user return true } diff --git a/services/mailer/mail.go b/services/mailer/mail.go index 3983237fc5..bdd7e25cab 100644 --- a/services/mailer/mail.go +++ b/services/mailer/mail.go @@ -7,6 +7,7 @@ package mailer import ( "bytes" + "context" "fmt" "html/template" "mime" @@ -17,6 +18,7 @@ import ( "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" @@ -77,8 +79,9 @@ func sendUserMail(language string, u *user_model.User, tpl base.TplName, code, s "Code": code, "Language": locale.Language(), // helper - "i18n": locale, - "Str2html": templates.Str2html, + "i18n": locale, + "Str2html": templates.Str2html, + "DotEscape": templates.DotEscape, } var content bytes.Buffer @@ -127,8 +130,9 @@ func SendActivateEmailMail(u *user_model.User, email *user_model.EmailAddress) { "Email": email.Email, "Language": locale.Language(), // helper - "i18n": locale, - "Str2html": templates.Str2html, + "i18n": locale, + "Str2html": templates.Str2html, + "DotEscape": templates.DotEscape, } var content bytes.Buffer @@ -146,8 +150,8 @@ func SendActivateEmailMail(u *user_model.User, email *user_model.EmailAddress) { // SendRegisterNotifyMail triggers a notify e-mail by admin created a account. func SendRegisterNotifyMail(u *user_model.User) { - if setting.MailService == nil { - // No mail service configured + if setting.MailService == nil || !u.IsActive { + // No mail service configured OR user is inactive return } locale := translation.NewLocale(u.Language) @@ -157,8 +161,9 @@ func SendRegisterNotifyMail(u *user_model.User) { "Username": u.Name, "Language": locale.Language(), // helper - "i18n": locale, - "Str2html": templates.Str2html, + "i18n": locale, + "Str2html": templates.Str2html, + "DotEscape": templates.DotEscape, } var content bytes.Buffer @@ -176,8 +181,8 @@ func SendRegisterNotifyMail(u *user_model.User) { // SendCollaboratorMail sends mail notification to new collaborator. func SendCollaboratorMail(u, doer *user_model.User, repo *repo_model.Repository) { - if setting.MailService == nil { - // No mail service configured + if setting.MailService == nil || !u.IsActive { + // No mail service configured OR the user is inactive return } locale := translation.NewLocale(u.Language) @@ -190,8 +195,9 @@ func SendCollaboratorMail(u, doer *user_model.User, repo *repo_model.Repository) "Link": repo.HTMLURL(), "Language": locale.Language(), // helper - "i18n": locale, - "Str2html": templates.Str2html, + "i18n": locale, + "Str2html": templates.Str2html, + "DotEscape": templates.DotEscape, } var content bytes.Buffer @@ -274,8 +280,9 @@ func composeIssueCommentMessages(ctx *mailCommentContext, lang string, recipient "ReviewComments": reviewComments, "Language": locale.Language(), // helper - "i18n": locale, - "Str2html": templates.Str2html, + "i18n": locale, + "Str2html": templates.Str2html, + "DotEscape": templates.DotEscape, } var mailSubject bytes.Buffer @@ -360,6 +367,7 @@ func generateAdditionalHeaders(ctx *mailCommentContext, reason string, recipient //"List-Post": https://github.com/go-gitea/gitea/pull/13585 "List-Unsubscribe": ctx.Issue.HTMLURL(), + "X-Mailer": "Gitea", "X-Gitea-Reason": reason, "X-Gitea-Sender": ctx.Doer.DisplayName(), "X-Gitea-Recipient": recipient.DisplayName(), @@ -398,18 +406,23 @@ func SendIssueAssignedMail(issue *models.Issue, doer *user_model.User, content s return nil } - if err := issue.LoadRepo(); err != nil { + if err := issue.LoadRepo(db.DefaultContext); err != nil { log.Error("Unable to load repo [%d] for issue #%d [%d]. Error: %v", issue.RepoID, issue.Index, issue.ID, err) return err } langMap := make(map[string][]*user_model.User) for _, user := range recipients { + if !user.IsActive { + // don't send emails to inactive users + continue + } langMap[user.Language] = append(langMap[user.Language], user) } for lang, tos := range langMap { msgs, err := composeIssueCommentMessages(&mailCommentContext{ + Context: context.TODO(), // TODO: use a correct context Issue: issue, Doer: doer, ActionType: models.ActionType(0), diff --git a/services/mailer/mail_issue.go b/services/mailer/mail_issue.go index bd5008f076..c24edf50c9 100644 --- a/services/mailer/mail_issue.go +++ b/services/mailer/mail_issue.go @@ -9,7 +9,6 @@ import ( "fmt" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -41,7 +40,7 @@ const ( // 2. Users who are not in 1. but get mentioned in current issue/comment. func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_model.User) error { // Required by the mail composer; make sure to load these before calling the async function - if err := ctx.Issue.LoadRepo(); err != nil { + if err := ctx.Issue.LoadRepo(ctx); err != nil { return fmt.Errorf("LoadRepo(): %v", err) } if err := ctx.Issue.LoadPoster(); err != nil { @@ -81,7 +80,7 @@ func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_mo // =========== Repo watchers =========== // Make repo watchers last, since it's likely the list with the most users if !(ctx.Issue.IsPull && ctx.Issue.PullRequest.IsWorkInProgress() && ctx.ActionType != models.ActionCreatePullRequest) { - ids, err = repo_model.GetRepoWatchersIDs(db.DefaultContext, ctx.Issue.RepoID) + ids, err = repo_model.GetRepoWatchersIDs(ctx, ctx.Issue.RepoID) if err != nil { return fmt.Errorf("GetRepoWatchersIDs(%d): %v", ctx.Issue.RepoID, err) } @@ -126,6 +125,10 @@ func mailIssueCommentBatch(ctx *mailCommentContext, users []*user_model.User, vi langMap := make(map[string][]*user_model.User) for _, user := range users { + if !user.IsActive { + // Exclude deactivated users + continue + } // At this point we exclude: // user that don't have all mails enabled or users only get mail on mention and this is one ... if !(user.EmailNotificationsPreference == user_model.EmailNotificationsEnabled || @@ -182,6 +185,7 @@ func MailParticipants(issue *models.Issue, doer *user_model.User, opType models. } if err := mailIssueCommentToParticipants( &mailCommentContext{ + Context: context.TODO(), // TODO: use a correct context Issue: issue, Doer: doer, ActionType: opType, diff --git a/services/mailer/mail_release.go b/services/mailer/mail_release.go index 1ca9ad02d7..b6bddeac04 100644 --- a/services/mailer/mail_release.go +++ b/services/mailer/mail_release.go @@ -9,7 +9,6 @@ import ( "context" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" @@ -32,7 +31,7 @@ func MailNewRelease(ctx context.Context, rel *models.Release) { return } - watcherIDList, err := repo_model.GetRepoWatchersIDs(db.DefaultContext, rel.RepoID) + watcherIDList, err := repo_model.GetRepoWatchersIDs(ctx, rel.RepoID) if err != nil { log.Error("GetRepoWatchersIDs(%d): %v", rel.RepoID, err) return @@ -76,8 +75,9 @@ func mailNewRelease(ctx context.Context, lang string, tos []string, rel *models. "Subject": subject, "Language": locale.Language(), // helper - "i18n": locale, - "Str2html": templates.Str2html, + "i18n": locale, + "Str2html": templates.Str2html, + "DotEscape": templates.DotEscape, } var mailBody bytes.Buffer diff --git a/services/mailer/mail_repo.go b/services/mailer/mail_repo.go index a5343f8128..7f856f2d40 100644 --- a/services/mailer/mail_repo.go +++ b/services/mailer/mail_repo.go @@ -8,7 +8,8 @@ import ( "bytes" "fmt" - "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/setting" @@ -24,13 +25,17 @@ func SendRepoTransferNotifyMail(doer, newOwner *user_model.User, repo *repo_mode } if newOwner.IsOrganization() { - users, err := models.GetUsersWhoCanCreateOrgRepo(newOwner.ID) + users, err := organization.GetUsersWhoCanCreateOrgRepo(db.DefaultContext, newOwner.ID) if err != nil { return err } langMap := make(map[string][]string) for _, user := range users { + if !user.IsActive { + // don't send emails to inactive users + continue + } langMap[user.Language] = append(langMap[user.Language], user.Email) } @@ -69,8 +74,9 @@ func sendRepoTransferNotifyMailPerLang(lang string, newOwner, doer *user_model.U "Language": locale.Language(), "Destination": destination, // helper - "i18n": locale, - "Str2html": templates.Str2html, + "i18n": locale, + "Str2html": templates.Str2html, + "DotEscape": templates.DotEscape, } if err := bodyTemplates.ExecuteTemplate(&content, string(mailRepoTransferNotify), data); err != nil { diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go index ba82fc6ff8..baf426146a 100644 --- a/services/mailer/mail_test.go +++ b/services/mailer/mail_test.go @@ -6,6 +6,7 @@ package mailer import ( "bytes" + "context" "fmt" "html/template" "strings" @@ -13,6 +14,7 @@ import ( texttmpl "text/template" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -56,7 +58,7 @@ func prepareMailerTest(t *testing.T) (doer *user_model.User, repo *repo_model.Re doer = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}).(*user_model.User) repo = unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1, Owner: doer}).(*repo_model.Repository) issue = unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 1, Repo: repo, Poster: doer}).(*models.Issue) - assert.NoError(t, issue.LoadRepo()) + assert.NoError(t, issue.LoadRepo(db.DefaultContext)) comment = unittest.AssertExistsAndLoadBean(t, &models.Comment{ID: 2, Issue: issue}).(*models.Comment) return } @@ -70,7 +72,8 @@ func TestComposeIssueCommentMessage(t *testing.T) { recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(&mailCommentContext{ - Issue: issue, Doer: doer, ActionType: models.ActionCommentIssue, + Context: context.TODO(), // TODO: use a correct context + Issue: issue, Doer: doer, ActionType: models.ActionCommentIssue, Content: "test body", Comment: comment, }, "en-US", recipients, false, "issue comment") assert.NoError(t, err) @@ -99,7 +102,8 @@ func TestComposeIssueMessage(t *testing.T) { recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}, {Name: "Test2", Email: "test2@gitea.com"}} msgs, err := composeIssueCommentMessages(&mailCommentContext{ - Issue: issue, Doer: doer, ActionType: models.ActionCreateIssue, + Context: context.TODO(), // TODO: use a correct context + Issue: issue, Doer: doer, ActionType: models.ActionCreateIssue, Content: "test body", }, "en-US", recipients, false, "issue create") assert.NoError(t, err) @@ -145,13 +149,15 @@ func TestTemplateSelection(t *testing.T) { } msg := testComposeIssueCommentMessage(t, &mailCommentContext{ - Issue: issue, Doer: doer, ActionType: models.ActionCreateIssue, + Context: context.TODO(), // TODO: use a correct context + Issue: issue, Doer: doer, ActionType: models.ActionCreateIssue, Content: "test body", }, recipients, false, "TestTemplateSelection") expect(t, msg, "issue/new/subject", "issue/new/body") msg = testComposeIssueCommentMessage(t, &mailCommentContext{ - Issue: issue, Doer: doer, ActionType: models.ActionCommentIssue, + Context: context.TODO(), // TODO: use a correct context + Issue: issue, Doer: doer, ActionType: models.ActionCommentIssue, Content: "test body", Comment: comment, }, recipients, false, "TestTemplateSelection") expect(t, msg, "issue/default/subject", "issue/default/body") @@ -159,13 +165,15 @@ func TestTemplateSelection(t *testing.T) { pull := unittest.AssertExistsAndLoadBean(t, &models.Issue{ID: 2, Repo: repo, Poster: doer}).(*models.Issue) comment = unittest.AssertExistsAndLoadBean(t, &models.Comment{ID: 4, Issue: pull}).(*models.Comment) msg = testComposeIssueCommentMessage(t, &mailCommentContext{ - Issue: pull, Doer: doer, ActionType: models.ActionCommentPull, + Context: context.TODO(), // TODO: use a correct context + Issue: pull, Doer: doer, ActionType: models.ActionCommentPull, Content: "test body", Comment: comment, }, recipients, false, "TestTemplateSelection") expect(t, msg, "pull/comment/subject", "pull/comment/body") msg = testComposeIssueCommentMessage(t, &mailCommentContext{ - Issue: issue, Doer: doer, ActionType: models.ActionCloseIssue, + Context: context.TODO(), // TODO: use a correct context + Issue: issue, Doer: doer, ActionType: models.ActionCloseIssue, Content: "test body", Comment: comment, }, recipients, false, "TestTemplateSelection") expect(t, msg, "Re: [user2/repo1] issue1 (#1)", "issue/close/body") @@ -173,7 +181,7 @@ func TestTemplateSelection(t *testing.T) { func TestTemplateServices(t *testing.T) { doer, _, issue, comment := prepareMailerTest(t) - assert.NoError(t, issue.LoadRepo()) + assert.NoError(t, issue.LoadRepo(db.DefaultContext)) expect := func(t *testing.T, issue *models.Issue, comment *models.Comment, doer *user_model.User, actionType models.ActionType, fromMention bool, tplSubject, tplBody, expSubject, expBody string, @@ -184,7 +192,8 @@ func TestTemplateServices(t *testing.T) { recipients := []*user_model.User{{Name: "Test", Email: "test@gitea.com"}} msg := testComposeIssueCommentMessage(t, &mailCommentContext{ - Issue: issue, Doer: doer, ActionType: actionType, + Context: context.TODO(), // TODO: use a correct context + Issue: issue, Doer: doer, ActionType: actionType, Content: "test body", Comment: comment, }, recipients, fromMention, "TestTemplateServices") @@ -226,7 +235,7 @@ func testComposeIssueCommentMessage(t *testing.T, ctx *mailCommentContext, recip func TestGenerateAdditionalHeaders(t *testing.T) { doer, _, issue, _ := prepareMailerTest(t) - ctx := &mailCommentContext{Issue: issue, Doer: doer} + ctx := &mailCommentContext{Context: context.TODO() /* TODO: use a correct context */, Issue: issue, Doer: doer} recipient := &user_model.User{Name: "Test", Email: "test@gitea.com"} headers := generateAdditionalHeaders(ctx, "dummy-reason", recipient) diff --git a/services/mailer/main_test.go b/services/mailer/main_test.go index ae3b2c12b4..0bd154113f 100644 --- a/services/mailer/main_test.go +++ b/services/mailer/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/services/migrations/codebase.go b/services/migrations/codebase.go index be0b5d4004..bb74c0a49d 100644 --- a/services/migrations/codebase.go +++ b/services/migrations/codebase.go @@ -266,17 +266,7 @@ func (d *CodebaseDownloader) GetLabels() ([]*base.Label, error) { } type codebaseIssueContext struct { - foreignID int64 - localID int64 - Comments []*base.Comment -} - -func (c codebaseIssueContext) LocalID() int64 { - return c.localID -} - -func (c codebaseIssueContext) ForeignID() int64 { - return c.foreignID + Comments []*base.Comment } // GetIssues returns issues, limits are not supported @@ -402,10 +392,9 @@ func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, Labels: []*base.Label{ {Name: issue.Type.Name}, }, + ForeignIndex: issue.TicketID.Value, Context: codebaseIssueContext{ - foreignID: issue.TicketID.Value, - localID: issue.TicketID.Value, - Comments: comments[1:], + Comments: comments[1:], }, }) @@ -418,10 +407,10 @@ func (d *CodebaseDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, } // GetComments returns comments -func (d *CodebaseDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { - context, ok := opts.Context.(codebaseIssueContext) +func (d *CodebaseDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + context, ok := commentable.GetContext().(codebaseIssueContext) if !ok { - return nil, false, fmt.Errorf("unexpected comment context: %+v", opts.Context) + return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext()) } return context.Comments, true, nil @@ -570,10 +559,9 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq SHA: d.getHeadCommit(rawMergeRequest.TargetRef), RepoName: d.repoName, }, + ForeignIndex: rawMergeRequest.ID.Value, Context: codebaseIssueContext{ - foreignID: rawMergeRequest.ID.Value, - localID: number, - Comments: comments[1:], + Comments: comments[1:], }, }) } @@ -581,16 +569,6 @@ func (d *CodebaseDownloader) GetPullRequests(page, perPage int) ([]*base.PullReq return pullRequests, true, nil } -// GetReviews returns pull requests reviews -func (d *CodebaseDownloader) GetReviews(context base.IssueContext) ([]*base.Review, error) { - return []*base.Review{}, nil -} - -// GetTopics return repository topics -func (d *CodebaseDownloader) GetTopics() ([]string, error) { - return []string{}, nil -} - func (d *CodebaseDownloader) tryGetUser(userID int64) *codebaseUser { if len(d.userMap) == 0 { var rawUsers struct { diff --git a/services/migrations/codebase_test.go b/services/migrations/codebase_test.go index ef39b9f146..cb70a2bf75 100644 --- a/services/migrations/codebase_test.go +++ b/services/migrations/codebase_test.go @@ -108,9 +108,7 @@ func TestCodebaseDownloadRepo(t *testing.T) { }, }, issues) - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: issues[0].Context, - }) + comments, _, err := downloader.GetComments(issues[0]) assert.NoError(t, err) assertCommentsEqual(t, []*base.Comment{ { @@ -148,7 +146,7 @@ func TestCodebaseDownloadRepo(t *testing.T) { }, }, prs) - rvs, err := downloader.GetReviews(prs[0].Context) + rvs, err := downloader.GetReviews(prs[0]) assert.NoError(t, err) assert.Empty(t, rvs) } diff --git a/services/migrations/dump.go b/services/migrations/dump.go index 9a093ef298..6410aa1ee0 100644 --- a/services/migrations/dump.go +++ b/services/migrations/dump.go @@ -22,6 +22,7 @@ import ( "code.gitea.io/gitea/modules/log" base "code.gitea.io/gitea/modules/migration" "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/structs" "gopkg.in/yaml.v2" @@ -149,9 +150,10 @@ func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOp } err = git.Clone(g.ctx, remoteAddr, repoPath, git.CloneRepoOptions{ - Mirror: true, - Quiet: true, - Timeout: migrateTimeout, + Mirror: true, + Quiet: true, + Timeout: migrateTimeout, + SkipTLSVerify: setting.Migrations.SkipTLSVerify, }) if err != nil { return fmt.Errorf("Clone: %v", err) @@ -166,10 +168,11 @@ func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOp } if err := git.Clone(g.ctx, wikiRemotePath, wikiPath, git.CloneRepoOptions{ - Mirror: true, - Quiet: true, - Timeout: migrateTimeout, - Branch: "master", + Mirror: true, + Quiet: true, + Timeout: migrateTimeout, + Branch: "master", + SkipTLSVerify: setting.Migrations.SkipTLSVerify, }); err != nil { log.Warn("Clone wiki: %v", err) if err := os.RemoveAll(wikiPath); err != nil { @@ -179,7 +182,7 @@ func (g *RepositoryDumper) CreateRepo(repo *base.Repository, opts base.MigrateOp } } - g.gitRepo, err = git.OpenRepositoryCtx(g.ctx, g.gitPath()) + g.gitRepo, err = git.OpenRepository(g.ctx, g.gitPath()) return err } @@ -476,7 +479,7 @@ func (g *RepositoryDumper) CreatePullRequests(prs ...*base.PullRequest) error { } if ok { - _, err = git.NewCommand(g.ctx, "fetch", remote, pr.Head.Ref).RunInDir(g.gitPath()) + _, _, err = git.NewCommand(g.ctx, "fetch", remote, pr.Head.Ref).RunStdString(&git.RunOpts{Dir: g.gitPath()}) if err != nil { log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err) } else { diff --git a/services/migrations/gitbucket.go b/services/migrations/gitbucket.go index c4fb0df93a..92b6cac738 100644 --- a/services/migrations/gitbucket.go +++ b/services/migrations/gitbucket.go @@ -64,8 +64,3 @@ func NewGitBucketDownloader(ctx context.Context, baseURL, userName, password, to func (g *GitBucketDownloader) SupportGetRepoComments() bool { return false } - -// GetReviews is not supported -func (g *GitBucketDownloader) GetReviews(context base.IssueContext) ([]*base.Review, error) { - return nil, &base.ErrNotSupported{Entity: "Reviews"} -} diff --git a/services/migrations/gitea_downloader.go b/services/migrations/gitea_downloader.go index be3c6c1202..3c02e112ca 100644 --- a/services/migrations/gitea_downloader.go +++ b/services/migrations/gitea_downloader.go @@ -415,22 +415,22 @@ func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, err } allIssues = append(allIssues, &base.Issue{ - Title: issue.Title, - Number: issue.Index, - PosterID: issue.Poster.ID, - PosterName: issue.Poster.UserName, - PosterEmail: issue.Poster.Email, - Content: issue.Body, - Milestone: milestone, - State: string(issue.State), - Created: issue.Created, - Updated: issue.Updated, - Closed: issue.Closed, - Reactions: reactions, - Labels: labels, - Assignees: assignees, - IsLocked: issue.IsLocked, - Context: base.BasicIssueContext(issue.Index), + Title: issue.Title, + Number: issue.Index, + PosterID: issue.Poster.ID, + PosterName: issue.Poster.UserName, + PosterEmail: issue.Poster.Email, + Content: issue.Body, + Milestone: milestone, + State: string(issue.State), + Created: issue.Created, + Updated: issue.Updated, + Closed: issue.Closed, + Reactions: reactions, + Labels: labels, + Assignees: assignees, + IsLocked: issue.IsLocked, + ForeignIndex: issue.Index, }) } @@ -442,7 +442,7 @@ func (g *GiteaDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, err } // GetComments returns comments according issueNumber -func (g *GiteaDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { +func (g *GiteaDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { allComments := make([]*base.Comment, 0, g.maxPerPage) for i := 1; ; i++ { @@ -453,26 +453,26 @@ func (g *GiteaDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comm default: } - comments, _, err := g.client.ListIssueComments(g.repoOwner, g.repoName, opts.Context.ForeignID(), gitea_sdk.ListIssueCommentOptions{ListOptions: gitea_sdk.ListOptions{ + comments, _, err := g.client.ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex(), gitea_sdk.ListIssueCommentOptions{ListOptions: gitea_sdk.ListOptions{ PageSize: g.maxPerPage, Page: i, }}) if err != nil { - return nil, false, fmt.Errorf("error while listing comments for issue #%d. Error: %v", opts.Context.ForeignID(), err) + return nil, false, fmt.Errorf("error while listing comments for issue #%d. Error: %v", commentable.GetForeignIndex(), err) } for _, comment := range comments { reactions, err := g.getCommentReactions(comment.ID) if err != nil { - log.Warn("Unable to load comment reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", opts.Context.ForeignID(), comment.ID, g.repoOwner, g.repoName, err) + log.Warn("Unable to load comment reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", commentable.GetForeignIndex(), comment.ID, g.repoOwner, g.repoName, err) if err2 := admin_model.CreateRepositoryNotice( - fmt.Sprintf("Unable to load reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", opts.Context.ForeignID(), comment.ID, g.repoOwner, g.repoName, err)); err2 != nil { + fmt.Sprintf("Unable to load reactions during migrating issue #%d for comment %d to %s/%s. Error: %v", commentable.GetForeignIndex(), comment.ID, g.repoOwner, g.repoName, err)); err2 != nil { log.Error("create repository notice failed: ", err2) } } allComments = append(allComments, &base.Comment{ - IssueIndex: opts.Context.LocalID(), + IssueIndex: commentable.GetLocalIndex(), Index: comment.ID, PosterID: comment.Poster.ID, PosterName: comment.Poster.UserName, @@ -602,7 +602,7 @@ func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullReques RepoName: g.repoName, OwnerName: g.repoOwner, }, - Context: base.BasicIssueContext(pr.Index), + ForeignIndex: pr.Index, }) } @@ -614,7 +614,7 @@ func (g *GiteaDownloader) GetPullRequests(page, perPage int) ([]*base.PullReques } // GetReviews returns pull requests review -func (g *GiteaDownloader) GetReviews(context base.IssueContext) ([]*base.Review, error) { +func (g *GiteaDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { if err := g.client.CheckServerVersionConstraint(">=1.12"); err != nil { log.Info("GiteaDownloader: instance to old, skip GetReviews") return nil, nil @@ -630,7 +630,7 @@ func (g *GiteaDownloader) GetReviews(context base.IssueContext) ([]*base.Review, default: } - prl, _, err := g.client.ListPullReviews(g.repoOwner, g.repoName, context.ForeignID(), gitea_sdk.ListPullReviewsOptions{ListOptions: gitea_sdk.ListOptions{ + prl, _, err := g.client.ListPullReviews(g.repoOwner, g.repoName, reviewable.GetForeignIndex(), gitea_sdk.ListPullReviewsOptions{ListOptions: gitea_sdk.ListOptions{ Page: i, PageSize: g.maxPerPage, }}) @@ -640,7 +640,7 @@ func (g *GiteaDownloader) GetReviews(context base.IssueContext) ([]*base.Review, for _, pr := range prl { - rcl, _, err := g.client.ListPullReviewComments(g.repoOwner, g.repoName, context.ForeignID(), pr.ID) + rcl, _, err := g.client.ListPullReviewComments(g.repoOwner, g.repoName, reviewable.GetForeignIndex(), pr.ID) if err != nil { return nil, err } @@ -666,7 +666,7 @@ func (g *GiteaDownloader) GetReviews(context base.IssueContext) ([]*base.Review, allReviews = append(allReviews, &base.Review{ ID: pr.ID, - IssueIndex: context.LocalID(), + IssueIndex: reviewable.GetLocalIndex(), ReviewerID: pr.Reviewer.ID, ReviewerName: pr.Reviewer.UserName, Official: pr.Official, diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go index 2c70dc4213..601b0a7c79 100644 --- a/services/migrations/gitea_downloader_test.go +++ b/services/migrations/gitea_downloader_test.go @@ -25,7 +25,7 @@ func TestGiteaDownloadRepo(t *testing.T) { } resp, err := http.Get("https://gitea.com/gitea") - if err != nil || resp.StatusCode != 200 { + if err != nil || resp.StatusCode != http.StatusOK { t.Skipf("Can't reach https://gitea.com, skipping %s", t.Name()) } @@ -198,9 +198,7 @@ func TestGiteaDownloadRepo(t *testing.T) { }, }, issues) - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: base.BasicIssueContext(4), - }) + comments, _, err := downloader.GetComments(&base.Issue{Number: 4, ForeignIndex: 4}) assert.NoError(t, err) assertCommentsEqual(t, []*base.Comment{ { @@ -265,7 +263,7 @@ func TestGiteaDownloadRepo(t *testing.T) { PatchURL: "https://gitea.com/gitea/test_repo/pulls/12.patch", }, prs[1]) - reviews, err := downloader.GetReviews(base.BasicIssueContext(7)) + reviews, err := downloader.GetReviews(&base.Issue{Number: 7, ForeignIndex: 7}) assert.NoError(t, err) assertReviewsEqual(t, []*base.Review{ { diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go index 21c2dc8f8e..34dd59d7fc 100644 --- a/services/migrations/gitea_uploader.go +++ b/services/migrations/gitea_uploader.go @@ -7,15 +7,19 @@ package migrations import ( "context" + "errors" "fmt" "io" "os" "path/filepath" + "strconv" "strings" "time" "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/foreignreference" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" @@ -76,7 +80,7 @@ func (g *GiteaLocalUploader) MaxBatchInsertSize(tp string) int { case "comment": return db.MaxBatchInsertSize(new(models.Comment)) case "milestone": - return db.MaxBatchInsertSize(new(models.Milestone)) + return db.MaxBatchInsertSize(new(issues_model.Milestone)) case "label": return db.MaxBatchInsertSize(new(models.Label)) case "release": @@ -134,7 +138,7 @@ func (g *GiteaLocalUploader) CreateRepo(repo *base.Repository, opts base.Migrate if err != nil { return err } - g.gitRepo, err = git.OpenRepositoryCtx(g.ctx, r.RepoPath()) + g.gitRepo, err = git.OpenRepository(g.ctx, r.RepoPath()) return err } @@ -161,7 +165,7 @@ func (g *GiteaLocalUploader) CreateTopics(topics ...string) error { // CreateMilestones creates milestones func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) error { - mss := make([]*models.Milestone, 0, len(milestones)) + mss := make([]*issues_model.Milestone, 0, len(milestones)) for _, milestone := range milestones { var deadline timeutil.TimeStamp if milestone.Deadline != nil { @@ -184,7 +188,7 @@ func (g *GiteaLocalUploader) CreateMilestones(milestones ...*base.Milestone) err milestone.Updated = &milestone.Created } - ms := models.Milestone{ + ms := issues_model.Milestone{ RepoID: g.repo.ID, Name: milestone.Title, Content: milestone.Description, @@ -250,7 +254,6 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error { LowerTagName: strings.ToLower(release.TagName), Target: release.TargetCommitish, Title: release.Name, - Sha1: release.TargetCommitish, Note: release.Body, IsDraft: release.Draft, IsPrerelease: release.Prerelease, @@ -262,15 +265,18 @@ func (g *GiteaLocalUploader) CreateReleases(releases ...*base.Release) error { return err } - // calc NumCommits if no draft - if !release.Draft { + // calc NumCommits if possible + if rel.TagName != "" { commit, err := g.gitRepo.GetTagCommit(rel.TagName) - if err != nil { - return fmt.Errorf("GetTagCommit[%v]: %v", rel.TagName, err) - } - rel.NumCommits, err = commit.CommitsCount() - if err != nil { - return fmt.Errorf("CommitsCount: %v", err) + if !errors.Is(err, git.ErrNotExist{}) { + if err != nil { + return fmt.Errorf("GetTagCommit[%v]: %v", rel.TagName, err) + } + rel.Sha1 = commit.ID.String() + rel.NumCommits, err = commit.CommitsCount() + if err != nil { + return fmt.Errorf("CommitsCount: %v", err) + } } } @@ -373,6 +379,12 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error { Labels: labels, CreatedUnix: timeutil.TimeStamp(issue.Created.Unix()), UpdatedUnix: timeutil.TimeStamp(issue.Updated.Unix()), + ForeignReference: &foreignreference.ForeignReference{ + LocalIndex: issue.GetLocalIndex(), + ForeignIndex: strconv.FormatInt(issue.GetForeignIndex(), 10), + RepoID: g.repo.ID, + Type: foreignreference.TypeIssue, + }, } if err := g.remapUser(issue, &is); err != nil { @@ -384,7 +396,7 @@ func (g *GiteaLocalUploader) CreateIssues(issues ...*base.Issue) error { } // add reactions for _, reaction := range issue.Reactions { - res := models.Reaction{ + res := issues_model.Reaction{ Type: reaction.Content, CreatedUnix: timeutil.TimeStampNow(), } @@ -440,7 +452,7 @@ func (g *GiteaLocalUploader) CreateComments(comments ...*base.Comment) error { // add reactions for _, reaction := range comment.Reactions { - res := models.Reaction{ + res := issues_model.Reaction{ Type: reaction.Content, CreatedUnix: timeutil.TimeStampNow(), } @@ -544,7 +556,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head } if ok { - _, err = git.NewCommand(g.ctx, "fetch", remote, pr.Head.Ref).RunInDir(g.repo.RepoPath()) + _, _, err = git.NewCommand(g.ctx, "fetch", "--no-tags", "--", remote, pr.Head.Ref).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()}) if err != nil { log.Error("Fetch branch from %s failed: %v", pr.Head.CloneURL, err) } else { @@ -568,7 +580,7 @@ func (g *GiteaLocalUploader) updateGitForPullRequest(pr *base.PullRequest) (head } else { head = pr.Head.Ref // Ensure the closed PR SHA still points to an existing ref - _, err = git.NewCommand(g.ctx, "rev-list", "--quiet", "-1", pr.Head.SHA).RunInDir(g.repo.RepoPath()) + _, _, err = git.NewCommand(g.ctx, "rev-list", "--quiet", "-1", pr.Head.SHA).RunStdString(&git.RunOpts{Dir: g.repo.RepoPath()}) if err != nil { if pr.Head.SHA != "" { // Git update-ref remove bad references with a relative path @@ -638,7 +650,7 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*models.PullR // add reactions for _, reaction := range pr.Reactions { - res := models.Reaction{ + res := issues_model.Reaction{ Type: reaction.Content, CreatedUnix: timeutil.TimeStampNow(), } @@ -745,13 +757,13 @@ func (g *GiteaLocalUploader) CreateReviews(reviews ...*base.Review) error { _ = reader.Close() _ = writer.Close() }() - go func() { + go func(comment *base.ReviewComment) { if err := git.GetRepoRawDiffForFile(g.gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, comment.TreePath, writer); err != nil { // We should ignore the error since the commit maybe removed when force push to the pull request log.Warn("GetRepoRawDiffForFile failed when migrating [%s, %s, %s, %s]: %v", g.gitRepo.Path, pr.MergeBase, headCommitID, comment.TreePath, err) } _ = writer.Close() - }() + }(comment) patch, _ = git.CutDiffAroundLine(reader, int64((&models.Comment{Line: int64(line + comment.Position - 1)}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines) @@ -809,7 +821,7 @@ func (g *GiteaLocalUploader) Finish() error { return err } - if err := models.UpdateRepoStats(db.DefaultContext, g.repo.ID); err != nil { + if err := models.UpdateRepoStats(g.ctx, g.repo.ID); err != nil { return err } diff --git a/services/migrations/gitea_uploader_test.go b/services/migrations/gitea_uploader_test.go index 34107b7f6a..f57c8e2333 100644 --- a/services/migrations/gitea_uploader_test.go +++ b/services/migrations/gitea_uploader_test.go @@ -17,6 +17,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + issues_model "code.gitea.io/gitea/models/issues" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -65,14 +66,14 @@ func TestGiteaUploadRepo(t *testing.T) { assert.True(t, repo.HasWiki()) assert.EqualValues(t, repo_model.RepositoryReady, repo.Status) - milestones, _, err := models.GetMilestones(models.GetMilestonesOption{ + milestones, _, err := issues_model.GetMilestones(issues_model.GetMilestonesOption{ RepoID: repo.ID, State: structs.StateOpen, }) assert.NoError(t, err) assert.Len(t, milestones, 1) - milestones, _, err = models.GetMilestones(models.GetMilestonesOption{ + milestones, _, err = issues_model.GetMilestones(issues_model.GetMilestonesOption{ RepoID: repo.ID, State: structs.StateClosed, }) @@ -104,7 +105,7 @@ func TestGiteaUploadRepo(t *testing.T) { assert.Len(t, releases, 1) issues, err := models.Issues(&models.IssuesOptions{ - RepoIDs: []int64{repo.ID}, + RepoID: repo.ID, IsPull: util.OptionalBoolFalse, SortType: "oldest", }) @@ -233,7 +234,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { fromRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) baseRef := "master" assert.NoError(t, git.InitRepository(git.DefaultContext, fromRepo.RepoPath(), false)) - _, err := git.NewCommand(git.DefaultContext, "symbolic-ref", "HEAD", git.BranchPrefix+baseRef).RunInDir(fromRepo.RepoPath()) + err := git.NewCommand(git.DefaultContext, "symbolic-ref", "HEAD", git.BranchPrefix+baseRef).Run(&git.RunOpts{Dir: fromRepo.RepoPath()}) assert.NoError(t, err) assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# Testing Repository\n\nOriginally created in: %s", fromRepo.RepoPath())), 0o644)) assert.NoError(t, git.AddChanges(fromRepo.RepoPath(), true)) @@ -247,7 +248,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { Author: &signature, Message: "Initial Commit", })) - fromGitRepo, err := git.OpenRepositoryCtx(git.DefaultContext, fromRepo.RepoPath()) + fromGitRepo, err := git.OpenRepository(git.DefaultContext, fromRepo.RepoPath()) assert.NoError(t, err) defer fromGitRepo.Close() baseSHA, err := fromGitRepo.GetBranchCommitID(baseRef) @@ -257,7 +258,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { // fromRepo branch1 // headRef := "branch1" - _, err = git.NewCommand(git.DefaultContext, "checkout", "-b", headRef).RunInDir(fromRepo.RepoPath()) + _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b", headRef).RunStdString(&git.RunOpts{Dir: fromRepo.RepoPath()}) assert.NoError(t, err) assert.NoError(t, os.WriteFile(filepath.Join(fromRepo.RepoPath(), "README.md"), []byte("SOMETHING"), 0o644)) assert.NoError(t, git.AddChanges(fromRepo.RepoPath(), true)) @@ -281,7 +282,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { assert.NoError(t, git.CloneWithArgs(git.DefaultContext, fromRepo.RepoPath(), forkRepo.RepoPath(), []string{}, git.CloneRepoOptions{ Branch: headRef, })) - _, err = git.NewCommand(git.DefaultContext, "checkout", "-b", forkHeadRef).RunInDir(forkRepo.RepoPath()) + _, _, err = git.NewCommand(git.DefaultContext, "checkout", "-b", forkHeadRef).RunStdString(&git.RunOpts{Dir: forkRepo.RepoPath()}) assert.NoError(t, err) assert.NoError(t, os.WriteFile(filepath.Join(forkRepo.RepoPath(), "README.md"), []byte(fmt.Sprintf("# branch2 %s", forkRepo.RepoPath())), 0o644)) assert.NoError(t, git.AddChanges(forkRepo.RepoPath(), true)) @@ -290,7 +291,7 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) { Author: &signature, Message: "branch2 commit", })) - forkGitRepo, err := git.OpenRepositoryCtx(git.DefaultContext, forkRepo.RepoPath()) + forkGitRepo, err := git.OpenRepository(git.DefaultContext, forkRepo.RepoPath()) assert.NoError(t, err) defer forkGitRepo.Close() forkHeadSHA, err := forkGitRepo.GetBranchCommitID(forkHeadRef) diff --git a/services/migrations/github.go b/services/migrations/github.go index f86ba94393..faf0cf0794 100644 --- a/services/migrations/github.go +++ b/services/migrations/github.go @@ -446,22 +446,22 @@ func (g *GithubDownloaderV3) GetIssues(page, perPage int) ([]*base.Issue, bool, } allIssues = append(allIssues, &base.Issue{ - Title: *issue.Title, - Number: int64(*issue.Number), - PosterID: issue.GetUser().GetID(), - PosterName: issue.GetUser().GetLogin(), - PosterEmail: issue.GetUser().GetEmail(), - Content: issue.GetBody(), - Milestone: issue.GetMilestone().GetTitle(), - State: issue.GetState(), - Created: issue.GetCreatedAt(), - Updated: issue.GetUpdatedAt(), - Labels: labels, - Reactions: reactions, - Closed: issue.ClosedAt, - IsLocked: issue.GetLocked(), - Assignees: assignees, - Context: base.BasicIssueContext(*issue.Number), + Title: *issue.Title, + Number: int64(*issue.Number), + PosterID: issue.GetUser().GetID(), + PosterName: issue.GetUser().GetLogin(), + PosterEmail: issue.GetUser().GetEmail(), + Content: issue.GetBody(), + Milestone: issue.GetMilestone().GetTitle(), + State: issue.GetState(), + Created: issue.GetCreatedAt(), + Updated: issue.GetUpdatedAt(), + Labels: labels, + Reactions: reactions, + Closed: issue.ClosedAt, + IsLocked: issue.GetLocked(), + Assignees: assignees, + ForeignIndex: int64(*issue.Number), }) } @@ -474,16 +474,12 @@ func (g *GithubDownloaderV3) SupportGetRepoComments() bool { } // GetComments returns comments according issueNumber -func (g *GithubDownloaderV3) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { - if opts.Context != nil { - comments, err := g.getComments(opts.Context) - return comments, false, err - } - - return g.GetAllComments(opts.Page, opts.PageSize) +func (g *GithubDownloaderV3) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + comments, err := g.getComments(commentable) + return comments, false, err } -func (g *GithubDownloaderV3) getComments(issueContext base.IssueContext) ([]*base.Comment, error) { +func (g *GithubDownloaderV3) getComments(commentable base.Commentable) ([]*base.Comment, error) { var ( allComments = make([]*base.Comment, 0, g.maxPerPage) created = "created" @@ -498,7 +494,7 @@ func (g *GithubDownloaderV3) getComments(issueContext base.IssueContext) ([]*bas } for { g.waitAndPickClient() - comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, int(issueContext.ForeignID()), opt) + comments, resp, err := g.getClient().Issues.ListComments(g.ctx, g.repoOwner, g.repoName, int(commentable.GetForeignIndex()), opt) if err != nil { return nil, fmt.Errorf("error while listing repos: %v", err) } @@ -531,7 +527,7 @@ func (g *GithubDownloaderV3) getComments(issueContext base.IssueContext) ([]*bas } allComments = append(allComments, &base.Comment{ - IssueIndex: issueContext.LocalID(), + IssueIndex: commentable.GetLocalIndex(), Index: comment.GetID(), PosterID: comment.GetUser().GetID(), PosterName: comment.GetUser().GetLogin(), @@ -709,9 +705,9 @@ func (g *GithubDownloaderV3) GetPullRequests(page, perPage int) ([]*base.PullReq RepoName: pr.GetBase().GetRepo().GetName(), OwnerName: pr.GetBase().GetUser().GetLogin(), }, - PatchURL: pr.GetPatchURL(), - Reactions: reactions, - Context: base.BasicIssueContext(*pr.Number), + PatchURL: pr.GetPatchURL(), + Reactions: reactions, + ForeignIndex: int64(*pr.Number), }) } @@ -777,28 +773,28 @@ func (g *GithubDownloaderV3) convertGithubReviewComments(cs []*github.PullReques } // GetReviews returns pull requests review -func (g *GithubDownloaderV3) GetReviews(context base.IssueContext) ([]*base.Review, error) { +func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { allReviews := make([]*base.Review, 0, g.maxPerPage) opt := &github.ListOptions{ PerPage: g.maxPerPage, } for { g.waitAndPickClient() - reviews, resp, err := g.getClient().PullRequests.ListReviews(g.ctx, g.repoOwner, g.repoName, int(context.ForeignID()), opt) + reviews, resp, err := g.getClient().PullRequests.ListReviews(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), opt) if err != nil { return nil, fmt.Errorf("error while listing repos: %v", err) } g.setRate(&resp.Rate) for _, review := range reviews { r := convertGithubReview(review) - r.IssueIndex = context.LocalID() + r.IssueIndex = reviewable.GetLocalIndex() // retrieve all review comments opt2 := &github.ListOptions{ PerPage: g.maxPerPage, } for { g.waitAndPickClient() - reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(g.ctx, g.repoOwner, g.repoName, int(context.ForeignID()), review.GetID(), opt2) + reviewComments, resp, err := g.getClient().PullRequests.ListReviewComments(g.ctx, g.repoOwner, g.repoName, int(reviewable.GetForeignIndex()), review.GetID(), opt2) if err != nil { return nil, fmt.Errorf("error while listing repos: %v", err) } diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go index 7540037d92..90c1fcaef5 100644 --- a/services/migrations/github_test.go +++ b/services/migrations/github_test.go @@ -215,9 +215,7 @@ func TestGitHubDownloadRepo(t *testing.T) { }, issues) // downloader.GetComments() - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: base.BasicIssueContext(2), - }) + comments, _, err := downloader.GetComments(&base.Issue{Number: 2, ForeignIndex: 2}) assert.NoError(t, err) assertCommentsEqual(t, []*base.Comment{ { @@ -286,7 +284,7 @@ func TestGitHubDownloadRepo(t *testing.T) { Merged: true, MergedTime: timePtr(time.Date(2019, 11, 12, 21, 39, 27, 0, time.UTC)), MergeCommitSHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2", - Context: base.BasicIssueContext(3), + ForeignIndex: 3, }, { Number: 4, @@ -333,11 +331,11 @@ func TestGitHubDownloadRepo(t *testing.T) { Content: "+1", }, }, - Context: base.BasicIssueContext(4), + ForeignIndex: 4, }, }, prs) - reviews, err := downloader.GetReviews(base.BasicIssueContext(3)) + reviews, err := downloader.GetReviews(&base.PullRequest{Number: 3, ForeignIndex: 3}) assert.NoError(t, err) assertReviewsEqual(t, []*base.Review{ { @@ -369,7 +367,7 @@ func TestGitHubDownloadRepo(t *testing.T) { }, }, reviews) - reviews, err = downloader.GetReviews(base.BasicIssueContext(4)) + reviews, err = downloader.GetReviews(&base.PullRequest{Number: 4, ForeignIndex: 4}) assert.NoError(t, err) assertReviewsEqual(t, []*base.Review{ { diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go index c05d081e9a..549e3cb659 100644 --- a/services/migrations/gitlab.go +++ b/services/migrations/gitlab.go @@ -91,7 +91,7 @@ func NewGitlabDownloader(ctx context.Context, baseURL, repoPath, username, passw u, _ := url.Parse(baseURL) for len(pathParts) >= 2 { _, resp, err = gitlabClient.Version.GetVersion() - if err == nil || resp != nil && resp.StatusCode == 401 { + if err == nil || resp != nil && resp.StatusCode == http.StatusUnauthorized { err = nil // if no authentication given, this still should work break } @@ -349,19 +349,9 @@ func (g *GitlabDownloader) GetReleases() ([]*base.Release, error) { } type gitlabIssueContext struct { - foreignID int64 - localID int64 IsMergeRequest bool } -func (c gitlabIssueContext) LocalID() int64 { - return c.localID -} - -func (c gitlabIssueContext) ForeignID() int64 { - return c.foreignID -} - // GetIssues returns issues according start and limit // Note: issue label description and colors are not supported by the go-gitlab library at this time func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { @@ -421,24 +411,21 @@ func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er } allIssues = append(allIssues, &base.Issue{ - Title: issue.Title, - Number: int64(issue.IID), - PosterID: int64(issue.Author.ID), - PosterName: issue.Author.Username, - Content: issue.Description, - Milestone: milestone, - State: issue.State, - Created: *issue.CreatedAt, - Labels: labels, - Reactions: reactions, - Closed: issue.ClosedAt, - IsLocked: issue.DiscussionLocked, - Updated: *issue.UpdatedAt, - Context: gitlabIssueContext{ - foreignID: int64(issue.IID), - localID: int64(issue.IID), - IsMergeRequest: false, - }, + Title: issue.Title, + Number: int64(issue.IID), + PosterID: int64(issue.Author.ID), + PosterName: issue.Author.Username, + Content: issue.Description, + Milestone: milestone, + State: issue.State, + Created: *issue.CreatedAt, + Labels: labels, + Reactions: reactions, + Closed: issue.ClosedAt, + IsLocked: issue.DiscussionLocked, + Updated: *issue.UpdatedAt, + ForeignIndex: int64(issue.IID), + Context: gitlabIssueContext{IsMergeRequest: false}, }) // increment issueCount, to be used in GetPullRequests() @@ -450,10 +437,10 @@ func (g *GitlabDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er // GetComments returns comments according issueNumber // TODO: figure out how to transfer comment reactions -func (g *GitlabDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { - context, ok := opts.Context.(gitlabIssueContext) +func (g *GitlabDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + context, ok := commentable.GetContext().(gitlabIssueContext) if !ok { - return nil, false, fmt.Errorf("unexpected context: %+v", opts.Context) + return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext()) } allComments := make([]*base.Comment, 0, g.maxPerPage) @@ -465,12 +452,12 @@ func (g *GitlabDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Com var resp *gitlab.Response var err error if !context.IsMergeRequest { - comments, resp, err = g.client.Discussions.ListIssueDiscussions(g.repoID, int(context.ForeignID()), &gitlab.ListIssueDiscussionsOptions{ + comments, resp, err = g.client.Discussions.ListIssueDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListIssueDiscussionsOptions{ Page: page, PerPage: g.maxPerPage, }, nil, gitlab.WithContext(g.ctx)) } else { - comments, resp, err = g.client.Discussions.ListMergeRequestDiscussions(g.repoID, int(context.ForeignID()), &gitlab.ListMergeRequestDiscussionsOptions{ + comments, resp, err = g.client.Discussions.ListMergeRequestDiscussions(g.repoID, int(commentable.GetForeignIndex()), &gitlab.ListMergeRequestDiscussionsOptions{ Page: page, PerPage: g.maxPerPage, }, nil, gitlab.WithContext(g.ctx)) @@ -484,7 +471,7 @@ func (g *GitlabDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Com if !comment.IndividualNote { for _, note := range comment.Notes { allComments = append(allComments, &base.Comment{ - IssueIndex: context.LocalID(), + IssueIndex: commentable.GetLocalIndex(), Index: int64(note.ID), PosterID: int64(note.Author.ID), PosterName: note.Author.Username, @@ -496,7 +483,7 @@ func (g *GitlabDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Com } else { c := comment.Notes[0] allComments = append(allComments, &base.Comment{ - IssueIndex: context.LocalID(), + IssueIndex: commentable.GetLocalIndex(), Index: int64(c.ID), PosterID: int64(c.Author.ID), PosterName: c.Author.Username, @@ -619,12 +606,9 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque RepoName: g.repoName, OwnerName: pr.Author.Username, }, - PatchURL: pr.WebURL + ".patch", - Context: gitlabIssueContext{ - foreignID: int64(pr.IID), - localID: newPRNumber, - IsMergeRequest: true, - }, + PatchURL: pr.WebURL + ".patch", + ForeignIndex: int64(pr.IID), + Context: gitlabIssueContext{IsMergeRequest: true}, }) } @@ -632,10 +616,10 @@ func (g *GitlabDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque } // GetReviews returns pull requests review -func (g *GitlabDownloader) GetReviews(context base.IssueContext) ([]*base.Review, error) { - approvals, resp, err := g.client.MergeRequestApprovals.GetConfiguration(g.repoID, int(context.ForeignID()), gitlab.WithContext(g.ctx)) +func (g *GitlabDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { + approvals, resp, err := g.client.MergeRequestApprovals.GetConfiguration(g.repoID, int(reviewable.GetForeignIndex()), gitlab.WithContext(g.ctx)) if err != nil { - if resp != nil && resp.StatusCode == 404 { + if resp != nil && resp.StatusCode == http.StatusNotFound { log.Error(fmt.Sprintf("GitlabDownloader: while migrating a error occurred: '%s'", err.Error())) return []*base.Review{}, nil } @@ -654,7 +638,7 @@ func (g *GitlabDownloader) GetReviews(context base.IssueContext) ([]*base.Review reviews := make([]*base.Review, 0, len(approvals.ApprovedBy)) for _, user := range approvals.ApprovedBy { reviews = append(reviews, &base.Review{ - IssueIndex: context.LocalID(), + IssueIndex: reviewable.GetLocalIndex(), ReviewerID: int64(user.User.ID), ReviewerName: user.User.Username, CreatedAt: createdAt, diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go index ad61577653..e63d674186 100644 --- a/services/migrations/gitlab_test.go +++ b/services/migrations/gitlab_test.go @@ -28,7 +28,7 @@ func TestGitlabDownloadRepo(t *testing.T) { } resp, err := http.Get("https://gitlab.com/gitea/test_repo") - if err != nil || resp.StatusCode != 200 { + if err != nil || resp.StatusCode != http.StatusOK { t.Skipf("Can't access test repo, skipping %s", t.Name()) } @@ -214,12 +214,10 @@ func TestGitlabDownloadRepo(t *testing.T) { }, }, issues) - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: gitlabIssueContext{ - foreignID: 2, - localID: 2, - IsMergeRequest: false, - }, + comments, _, err := downloader.GetComments(&base.Issue{ + Number: 2, + ForeignIndex: 2, + Context: gitlabIssueContext{IsMergeRequest: false}, }) assert.NoError(t, err) assertCommentsEqual(t, []*base.Comment{ @@ -301,15 +299,12 @@ func TestGitlabDownloadRepo(t *testing.T) { Merged: false, MergedTime: nil, MergeCommitSHA: "", - Context: gitlabIssueContext{ - foreignID: 2, - localID: 4, - IsMergeRequest: true, - }, + ForeignIndex: 2, + Context: gitlabIssueContext{IsMergeRequest: true}, }, }, prs) - rvs, err := downloader.GetReviews(base.BasicIssueContext(1)) + rvs, err := downloader.GetReviews(&base.PullRequest{Number: 1, ForeignIndex: 1}) assert.NoError(t, err) assertReviewsEqual(t, []*base.Review{ { @@ -328,7 +323,7 @@ func TestGitlabDownloadRepo(t *testing.T) { }, }, rvs) - rvs, err = downloader.GetReviews(base.BasicIssueContext(2)) + rvs, err = downloader.GetReviews(&base.PullRequest{Number: 2, ForeignIndex: 2}) assert.NoError(t, err) assertReviewsEqual(t, []*base.Review{ { @@ -469,7 +464,8 @@ func TestGitlabGetReviews(t *testing.T) { mock, review := convertTestCase(testCase) mux.HandleFunc(fmt.Sprintf("/api/v4/projects/%d/merge_requests/%d/approvals", testCase.repoID, testCase.prID), mock) - rvs, err := downloader.GetReviews(base.BasicIssueContext(testCase.prID)) + id := int64(testCase.prID) + rvs, err := downloader.GetReviews(&base.Issue{Number: id, ForeignIndex: id}) assert.NoError(t, err) assertReviewsEqual(t, []*base.Review{&review}, rvs) } diff --git a/services/migrations/gogs.go b/services/migrations/gogs.go index 0ef39484b7..a28033218e 100644 --- a/services/migrations/gogs.go +++ b/services/migrations/gogs.go @@ -223,10 +223,10 @@ func (g *GogsDownloader) getIssues(page int, state string) ([]*base.Issue, bool, } // GetComments returns comments according issueNumber -func (g *GogsDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { +func (g *GogsDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { allComments := make([]*base.Comment, 0, 100) - comments, err := g.client.ListIssueComments(g.repoOwner, g.repoName, opts.Context.ForeignID()) + comments, err := g.client.ListIssueComments(g.repoOwner, g.repoName, commentable.GetForeignIndex()) if err != nil { return nil, false, fmt.Errorf("error while listing repos: %v", err) } @@ -235,7 +235,7 @@ func (g *GogsDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comme continue } allComments = append(allComments, &base.Comment{ - IssueIndex: opts.Context.LocalID(), + IssueIndex: commentable.GetLocalIndex(), Index: comment.ID, PosterID: comment.Poster.ID, PosterName: comment.Poster.Login, @@ -288,19 +288,19 @@ func convertGogsIssue(issue *gogs.Issue) *base.Issue { } return &base.Issue{ - Title: issue.Title, - Number: issue.Index, - PosterID: issue.Poster.ID, - PosterName: issue.Poster.Login, - PosterEmail: issue.Poster.Email, - Content: issue.Body, - Milestone: milestone, - State: string(issue.State), - Created: issue.Created, - Updated: issue.Updated, - Labels: labels, - Closed: closed, - Context: base.BasicIssueContext(issue.Index), + Title: issue.Title, + Number: issue.Index, + PosterID: issue.Poster.ID, + PosterName: issue.Poster.Login, + PosterEmail: issue.Poster.Email, + Content: issue.Body, + Milestone: milestone, + State: string(issue.State), + Created: issue.Created, + Updated: issue.Updated, + Labels: labels, + Closed: closed, + ForeignIndex: issue.Index, } } diff --git a/services/migrations/gogs_test.go b/services/migrations/gogs_test.go index f9d74d3be3..501161b610 100644 --- a/services/migrations/gogs_test.go +++ b/services/migrations/gogs_test.go @@ -111,9 +111,7 @@ func TestGogsDownloadRepo(t *testing.T) { }, issues) // downloader.GetComments() - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: base.BasicIssueContext(1), - }) + comments, _, err := downloader.GetComments(&base.Issue{Number: 1, ForeignIndex: 1}) assert.NoError(t, err) assertCommentsEqual(t, []*base.Comment{ { diff --git a/services/migrations/main_test.go b/services/migrations/main_test.go index b040df83d1..ad9bc9c731 100644 --- a/services/migrations/main_test.go +++ b/services/migrations/main_test.go @@ -17,7 +17,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } func timePtr(t time.Time) *time.Time { diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go index 7bca128ac5..700f06af35 100644 --- a/services/migrations/migrate.go +++ b/services/migrations/migrate.go @@ -81,10 +81,9 @@ func IsMigrateURLAllowed(remoteURL string, doer *user_model.User) error { err = nil //nolint hostName = u.Host } - addrList, err := net.LookupIP(hostName) - if err != nil { - return &models.ErrInvalidCloneAddr{Host: u.Host, NotResolvedIP: true} - } + + // some users only use proxy, there is no DNS resolver. it's safe to ignore the LookupIP error + addrList, _ := net.LookupIP(hostName) var ipAllowed bool var ipBlocked bool @@ -325,9 +324,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts allComments := make([]*base.Comment, 0, commentBatchSize) for _, issue := range issues { log.Trace("migrating issue %d's comments", issue.Number) - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: issue.Context, - }) + comments, _, err := downloader.GetComments(issue) if err != nil { if !base.IsErrNotSupported(err) { return err @@ -383,9 +380,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts allComments := make([]*base.Comment, 0, commentBatchSize) for _, pr := range prs { log.Trace("migrating pull request %d's comments", pr.Number) - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: pr.Context, - }) + comments, _, err := downloader.GetComments(pr) if err != nil { if !base.IsErrNotSupported(err) { return err @@ -412,7 +407,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts // migrate reviews allReviews := make([]*base.Review, 0, reviewBatchSize) for _, pr := range prs { - reviews, err := downloader.GetReviews(pr.Context) + reviews, err := downloader.GetReviews(pr) if err != nil { if !base.IsErrNotSupported(err) { return err @@ -446,10 +441,7 @@ func migrateRepository(downloader base.Downloader, uploader base.Uploader, opts if opts.Comments && supportAllComments { log.Trace("migrating comments") for i := 1; ; i++ { - comments, isEnd, err := downloader.GetComments(base.GetCommentOptions{ - Page: i, - PageSize: commentBatchSize, - }) + comments, isEnd, err := downloader.GetAllComments(i, commentBatchSize) if err != nil { return err } diff --git a/services/migrations/onedev.go b/services/migrations/onedev.go index d27cbbed4f..d4b30939ce 100644 --- a/services/migrations/onedev.go +++ b/services/migrations/onedev.go @@ -262,19 +262,9 @@ func (d *OneDevDownloader) GetLabels() ([]*base.Label, error) { } type onedevIssueContext struct { - foreignID int64 - localID int64 IsPullRequest bool } -func (c onedevIssueContext) LocalID() int64 { - return c.localID -} - -func (c onedevIssueContext) ForeignID() int64 { - return c.foreignID -} - // GetIssues returns issues func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, error) { rawIssues := make([]struct { @@ -346,21 +336,18 @@ func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er } poster := d.tryGetUser(issue.SubmitterID) issues = append(issues, &base.Issue{ - Title: issue.Title, - Number: issue.Number, - PosterName: poster.Name, - PosterEmail: poster.Email, - Content: issue.Description, - Milestone: d.milestoneMap[milestoneID], - State: state, - Created: issue.SubmitDate, - Updated: issue.SubmitDate, - Labels: []*base.Label{label}, - Context: onedevIssueContext{ - foreignID: issue.ID, - localID: issue.Number, - IsPullRequest: false, - }, + Title: issue.Title, + Number: issue.Number, + PosterName: poster.Name, + PosterEmail: poster.Email, + Content: issue.Description, + Milestone: d.milestoneMap[milestoneID], + State: state, + Created: issue.SubmitDate, + Updated: issue.SubmitDate, + Labels: []*base.Label{label}, + ForeignIndex: issue.ID, + Context: onedevIssueContext{IsPullRequest: false}, }) if d.maxIssueIndex < issue.Number { @@ -372,10 +359,10 @@ func (d *OneDevDownloader) GetIssues(page, perPage int) ([]*base.Issue, bool, er } // GetComments returns comments -func (d *OneDevDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { - context, ok := opts.Context.(onedevIssueContext) +func (d *OneDevDownloader) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { + context, ok := commentable.GetContext().(onedevIssueContext) if !ok { - return nil, false, fmt.Errorf("unexpected comment context: %+v", opts.Context) + return nil, false, fmt.Errorf("unexpected context: %+v", commentable.GetContext()) } rawComments := make([]struct { @@ -387,9 +374,9 @@ func (d *OneDevDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Com var endpoint string if context.IsPullRequest { - endpoint = fmt.Sprintf("/api/pull-requests/%d/comments", context.ForeignID()) + endpoint = fmt.Sprintf("/api/pull-requests/%d/comments", commentable.GetForeignIndex()) } else { - endpoint = fmt.Sprintf("/api/issues/%d/comments", context.ForeignID()) + endpoint = fmt.Sprintf("/api/issues/%d/comments", commentable.GetForeignIndex()) } err := d.callAPI( @@ -408,9 +395,9 @@ func (d *OneDevDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Com }, 0, 100) if context.IsPullRequest { - endpoint = fmt.Sprintf("/api/pull-requests/%d/changes", context.ForeignID()) + endpoint = fmt.Sprintf("/api/pull-requests/%d/changes", commentable.GetForeignIndex()) } else { - endpoint = fmt.Sprintf("/api/issues/%d/changes", context.ForeignID()) + endpoint = fmt.Sprintf("/api/issues/%d/changes", commentable.GetForeignIndex()) } err = d.callAPI( @@ -429,7 +416,7 @@ func (d *OneDevDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Com } poster := d.tryGetUser(comment.UserID) comments = append(comments, &base.Comment{ - IssueIndex: context.LocalID(), + IssueIndex: commentable.GetLocalIndex(), Index: comment.ID, PosterID: poster.ID, PosterName: poster.Name, @@ -454,7 +441,7 @@ func (d *OneDevDownloader) GetComments(opts base.GetCommentOptions) ([]*base.Com poster := d.tryGetUser(change.UserID) comments = append(comments, &base.Comment{ - IssueIndex: context.LocalID(), + IssueIndex: commentable.GetLocalIndex(), PosterID: poster.ID, PosterName: poster.Name, PosterEmail: poster.Email, @@ -552,11 +539,8 @@ func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque SHA: mergePreview.TargetHeadCommitHash, RepoName: d.repoName, }, - Context: onedevIssueContext{ - foreignID: pr.ID, - localID: number, - IsPullRequest: true, - }, + ForeignIndex: pr.ID, + Context: onedevIssueContext{IsPullRequest: true}, }) } @@ -564,7 +548,7 @@ func (d *OneDevDownloader) GetPullRequests(page, perPage int) ([]*base.PullReque } // GetReviews returns pull requests reviews -func (d *OneDevDownloader) GetReviews(context base.IssueContext) ([]*base.Review, error) { +func (d *OneDevDownloader) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { rawReviews := make([]struct { ID int64 `json:"id"` UserID int64 `json:"userId"` @@ -576,7 +560,7 @@ func (d *OneDevDownloader) GetReviews(context base.IssueContext) ([]*base.Review }, 0, 100) err := d.callAPI( - fmt.Sprintf("/api/pull-requests/%d/reviews", context.ForeignID()), + fmt.Sprintf("/api/pull-requests/%d/reviews", reviewable.GetForeignIndex()), nil, &rawReviews, ) @@ -600,7 +584,7 @@ func (d *OneDevDownloader) GetReviews(context base.IssueContext) ([]*base.Review poster := d.tryGetUser(review.UserID) reviews = append(reviews, &base.Review{ - IssueIndex: context.LocalID(), + IssueIndex: reviewable.GetLocalIndex(), ReviewerID: poster.ID, ReviewerName: poster.Name, Content: content, diff --git a/services/migrations/onedev_test.go b/services/migrations/onedev_test.go index 59b7cae5fe..0cf1ab852c 100644 --- a/services/migrations/onedev_test.go +++ b/services/migrations/onedev_test.go @@ -19,7 +19,7 @@ import ( func TestOneDevDownloadRepo(t *testing.T) { resp, err := http.Get("https://code.onedev.io/projects/go-gitea-test_repo") - if err != nil || resp.StatusCode != 200 { + if err != nil || resp.StatusCode != http.StatusOK { t.Skipf("Can't access test repo, skipping %s", t.Name()) } @@ -74,11 +74,8 @@ func TestOneDevDownloadRepo(t *testing.T) { Name: "Improvement", }, }, - Context: onedevIssueContext{ - foreignID: 398, - localID: 4, - IsPullRequest: false, - }, + ForeignIndex: 398, + Context: onedevIssueContext{IsPullRequest: false}, }, { Number: 3, @@ -94,20 +91,15 @@ func TestOneDevDownloadRepo(t *testing.T) { Name: "New Feature", }, }, - Context: onedevIssueContext{ - foreignID: 397, - localID: 3, - IsPullRequest: false, - }, + ForeignIndex: 397, + Context: onedevIssueContext{IsPullRequest: false}, }, }, issues) - comments, _, err := downloader.GetComments(base.GetCommentOptions{ - Context: onedevIssueContext{ - foreignID: 398, - localID: 4, - IsPullRequest: false, - }, + comments, _, err := downloader.GetComments(&base.Issue{ + Number: 4, + ForeignIndex: 398, + Context: onedevIssueContext{IsPullRequest: false}, }) assert.NoError(t, err) assertCommentsEqual(t, []*base.Comment{ @@ -141,18 +133,12 @@ func TestOneDevDownloadRepo(t *testing.T) { SHA: "f32b0a9dfd09a60f616f29158f772cedd89942d2", RepoName: "go-gitea-test_repo", }, - Context: onedevIssueContext{ - foreignID: 186, - localID: 5, - IsPullRequest: true, - }, + ForeignIndex: 186, + Context: onedevIssueContext{IsPullRequest: true}, }, }, prs) - rvs, err := downloader.GetReviews(onedevIssueContext{ - foreignID: 186, - localID: 5, - }) + rvs, err := downloader.GetReviews(&base.PullRequest{Number: 5, ForeignIndex: 186}) assert.NoError(t, err) assertReviewsEqual(t, []*base.Review{ { diff --git a/services/migrations/restore.go b/services/migrations/restore.go index d30d90a6c4..8c9654a7e3 100644 --- a/services/migrations/restore.go +++ b/services/migrations/restore.go @@ -193,17 +193,13 @@ func (r *RepositoryRestorer) GetIssues(page, perPage int) ([]*base.Issue, bool, } return nil, false, err } - - for _, issue := range issues { - issue.Context = base.BasicIssueContext(issue.Number) - } return issues, true, nil } // GetComments returns comments according issueNumber -func (r *RepositoryRestorer) GetComments(opts base.GetCommentOptions) ([]*base.Comment, bool, error) { +func (r *RepositoryRestorer) GetComments(commentable base.Commentable) ([]*base.Comment, bool, error) { comments := make([]*base.Comment, 0, 10) - p := filepath.Join(r.commentDir(), fmt.Sprintf("%d.yml", opts.Context.ForeignID())) + p := filepath.Join(r.commentDir(), fmt.Sprintf("%d.yml", commentable.GetForeignIndex())) _, err := os.Stat(p) if err != nil { if os.IsNotExist(err) { @@ -247,15 +243,14 @@ func (r *RepositoryRestorer) GetPullRequests(page, perPage int) ([]*base.PullReq } for _, pr := range pulls { pr.PatchURL = "file://" + filepath.Join(r.baseDir, pr.PatchURL) - pr.Context = base.BasicIssueContext(pr.Number) } return pulls, true, nil } // GetReviews returns pull requests review -func (r *RepositoryRestorer) GetReviews(context base.IssueContext) ([]*base.Review, error) { +func (r *RepositoryRestorer) GetReviews(reviewable base.Reviewable) ([]*base.Review, error) { reviews := make([]*base.Review, 0, 10) - p := filepath.Join(r.reviewDir(), fmt.Sprintf("%d.yml", context.ForeignID())) + p := filepath.Join(r.reviewDir(), fmt.Sprintf("%d.yml", reviewable.GetForeignIndex())) _, err := os.Stat(p) if err != nil { if os.IsNotExist(err) { diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go index 6c9c4a0483..ecd031b387 100644 --- a/services/mirror/mirror_pull.go +++ b/services/mirror/mirror_pull.go @@ -12,7 +12,6 @@ import ( "code.gitea.io/gitea/models" admin_model "code.gitea.io/gitea/models/admin" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/cache" "code.gitea.io/gitea/modules/git" @@ -34,12 +33,18 @@ func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error remoteName := m.GetRemoteName() repoPath := m.Repo.RepoPath() // Remove old remote - _, err := git.NewCommand(ctx, "remote", "rm", remoteName).RunInDir(repoPath) + _, _, err := git.NewCommand(ctx, "remote", "rm", remoteName).RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { return err } - _, err = git.NewCommand(ctx, "remote", "add", remoteName, "--mirror=fetch", addr).RunInDir(repoPath) + cmd := git.NewCommand(ctx, "remote", "add", remoteName, "--mirror=fetch", addr) + if strings.Contains(addr, "://") && strings.Contains(addr, "@") { + cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, util.SanitizeCredentialURLs(addr), repoPath)) + } else { + cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, addr, repoPath)) + } + _, _, err = cmd.RunStdString(&git.RunOpts{Dir: repoPath}) if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { return err } @@ -48,12 +53,18 @@ func UpdateAddress(ctx context.Context, m *repo_model.Mirror, addr string) error wikiPath := m.Repo.WikiPath() wikiRemotePath := repo_module.WikiRemoteURL(ctx, addr) // Remove old remote of wiki - _, err := git.NewCommand(ctx, "remote", "rm", remoteName).RunInDir(wikiPath) + _, _, err = git.NewCommand(ctx, "remote", "rm", remoteName).RunStdString(&git.RunOpts{Dir: wikiPath}) if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { return err } - _, err = git.NewCommand(ctx, "remote", "add", remoteName, "--mirror=fetch", wikiRemotePath).RunInDir(wikiPath) + cmd = git.NewCommand(ctx, "remote", "add", remoteName, "--mirror=fetch", wikiRemotePath) + if strings.Contains(wikiRemotePath, "://") && strings.Contains(wikiRemotePath, "@") { + cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, util.SanitizeCredentialURLs(wikiRemotePath), wikiPath)) + } else { + cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=fetch %s [repo_path: %s]", remoteName, wikiRemotePath, wikiPath)) + } + _, _, err = cmd.RunStdString(&git.RunOpts{Dir: wikiPath}) if err != nil && !strings.HasPrefix(err.Error(), "exit status 128 - fatal: No such remote ") { return err } @@ -149,7 +160,6 @@ func pruneBrokenReferences(ctx context.Context, repoPath string, timeout time.Duration, stdoutBuilder, stderrBuilder *strings.Builder, - sanitizer *strings.Replacer, isWiki bool, ) error { wiki := "" @@ -161,7 +171,7 @@ func pruneBrokenReferences(ctx context.Context, stdoutBuilder.Reset() pruneErr := git.NewCommand(ctx, "remote", "prune", m.GetRemoteName()). SetDescription(fmt.Sprintf("Mirror.runSync %ssPrune references: %s ", wiki, m.Repo.FullName())). - RunWithContext(&git.RunContext{ + Run(&git.RunOpts{ Timeout: timeout, Dir: repoPath, Stdout: stdoutBuilder, @@ -173,8 +183,8 @@ func pruneBrokenReferences(ctx context.Context, // sanitize the output, since it may contain the remote address, which may // contain a password - stderrMessage := sanitizer.Replace(stderr) - stdoutMessage := sanitizer.Replace(stdout) + stderrMessage := util.SanitizeCredentialURLs(stderr) + stdoutMessage := util.SanitizeCredentialURLs(stdout) log.Error("Failed to prune mirror repository %s%-v references:\nStdout: %s\nStderr: %s\nErr: %v", wiki, m.Repo, stdoutMessage, stderrMessage, pruneErr) desc := fmt.Sprintf("Failed to prune mirror repository %s'%s' references: %s", wiki, repoPath, stderrMessage) @@ -193,6 +203,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo timeout := time.Duration(setting.Git.Timeout.Mirror) * time.Second log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo) + gitArgs := []string{"remote", "update"} if m.EnablePrune { gitArgs = append(gitArgs, "--prune") @@ -208,7 +219,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stderrBuilder := strings.Builder{} if err := git.NewCommand(ctx, gitArgs...). SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())). - RunWithContext(&git.RunContext{ + Run(&git.RunOpts{ Timeout: timeout, Dir: repoPath, Stdout: &stdoutBuilder, @@ -217,11 +228,9 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdout := stdoutBuilder.String() stderr := stderrBuilder.String() - // sanitize the output, since it may contain the remote address, which may - // contain a password - sanitizer := util.NewURLSanitizer(remoteAddr, true) - stderrMessage := sanitizer.Replace(stderr) - stdoutMessage := sanitizer.Replace(stdout) + // sanitize the output, since it may contain the remote address, which may contain a password + stderrMessage := util.SanitizeCredentialURLs(stderr) + stdoutMessage := util.SanitizeCredentialURLs(stdout) // Now check if the error is a resolve reference due to broken reference if strings.Contains(stderr, "unable to resolve reference") && strings.Contains(stderr, "reference broken") { @@ -229,14 +238,14 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo err = nil // Attempt prune - pruneErr := pruneBrokenReferences(ctx, m, repoPath, timeout, &stdoutBuilder, &stderrBuilder, sanitizer, false) + pruneErr := pruneBrokenReferences(ctx, m, repoPath, timeout, &stdoutBuilder, &stderrBuilder, false) if pruneErr == nil { // Successful prune - reattempt mirror stderrBuilder.Reset() stdoutBuilder.Reset() if err = git.NewCommand(ctx, gitArgs...). SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())). - RunWithContext(&git.RunContext{ + Run(&git.RunOpts{ Timeout: timeout, Dir: repoPath, Stdout: &stdoutBuilder, @@ -247,8 +256,8 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo // sanitize the output, since it may contain the remote address, which may // contain a password - stderrMessage = sanitizer.Replace(stderr) - stdoutMessage = sanitizer.Replace(stdout) + stderrMessage = util.SanitizeCredentialURLs(stderr) + stdoutMessage = util.SanitizeCredentialURLs(stdout) } } } @@ -265,7 +274,11 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo } output := stderrBuilder.String() - gitRepo, err := git.OpenRepositoryCtx(ctx, repoPath) + if err := git.WriteCommitGraph(ctx, repoPath); err != nil { + log.Error("SyncMirrors [repo: %-v]: %v", m.Repo, err) + } + + gitRepo, err := git.OpenRepository(ctx, repoPath) if err != nil { log.Error("SyncMirrors [repo: %-v]: failed to OpenRepository: %v", m.Repo, err) return nil, false @@ -287,7 +300,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo gitRepo.Close() log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo) - if err := models.UpdateRepoSize(db.DefaultContext, m.Repo); err != nil { + if err := models.UpdateRepoSize(ctx, m.Repo); err != nil { log.Error("SyncMirrors [repo: %-v]: failed to update size for mirror repository: %v", m.Repo, err) } @@ -297,7 +310,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdoutBuilder.Reset() if err := git.NewCommand(ctx, "remote", "update", "--prune", m.GetRemoteName()). SetDescription(fmt.Sprintf("Mirror.runSync Wiki: %s ", m.Repo.FullName())). - RunWithContext(&git.RunContext{ + Run(&git.RunOpts{ Timeout: timeout, Dir: wikiPath, Stdout: &stdoutBuilder, @@ -306,19 +319,9 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdout := stdoutBuilder.String() stderr := stderrBuilder.String() - // sanitize the output, since it may contain the remote address, which may - // contain a password - - remoteAddr, remoteErr := git.GetRemoteAddress(ctx, wikiPath, m.GetRemoteName()) - if remoteErr != nil { - log.Error("SyncMirrors [repo: %-v Wiki]: unable to get GetRemoteAddress Error %v", m.Repo, remoteErr) - } - - // sanitize the output, since it may contain the remote address, which may - // contain a password - sanitizer := util.NewURLSanitizer(remoteAddr, true) - stderrMessage := sanitizer.Replace(stderr) - stdoutMessage := sanitizer.Replace(stdout) + // sanitize the output, since it may contain the remote address, which may contain a password + stderrMessage := util.SanitizeCredentialURLs(stderr) + stdoutMessage := util.SanitizeCredentialURLs(stdout) // Now check if the error is a resolve reference due to broken reference if strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken") { @@ -326,7 +329,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo err = nil // Attempt prune - pruneErr := pruneBrokenReferences(ctx, m, repoPath, timeout, &stdoutBuilder, &stderrBuilder, sanitizer, true) + pruneErr := pruneBrokenReferences(ctx, m, repoPath, timeout, &stdoutBuilder, &stderrBuilder, true) if pruneErr == nil { // Successful prune - reattempt mirror stderrBuilder.Reset() @@ -334,7 +337,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo if err = git.NewCommand(ctx, "remote", "update", "--prune", m.GetRemoteName()). SetDescription(fmt.Sprintf("Mirror.runSync Wiki: %s ", m.Repo.FullName())). - RunWithContext(&git.RunContext{ + Run(&git.RunOpts{ Timeout: timeout, Dir: wikiPath, Stdout: &stdoutBuilder, @@ -342,8 +345,8 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo }); err != nil { stdout := stdoutBuilder.String() stderr := stderrBuilder.String() - stderrMessage = sanitizer.Replace(stderr) - stdoutMessage = sanitizer.Replace(stdout) + stderrMessage = util.SanitizeCredentialURLs(stderr) + stdoutMessage = util.SanitizeCredentialURLs(stdout) } } } @@ -357,6 +360,10 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo } return nil, false } + + if err := git.WriteCommitGraph(ctx, wikiPath); err != nil { + log.Error("SyncMirrors [repo: %-v]: %v", m.Repo, err) + } } log.Trace("SyncMirrors [repo: %-v Wiki]: git remote update complete", m.Repo) } @@ -400,6 +407,9 @@ func SyncPullMirror(ctx context.Context, repoID int64) bool { log.Trace("SyncMirrors [repo: %-v]: Running Sync", m.Repo) results, ok := runSync(ctx, m) if !ok { + if err = repo_model.TouchMirror(ctx, m); err != nil { + log.Error("SyncMirrors [repo: %-v]: failed to TouchMirror: %v", m.Repo, err) + } return false } @@ -415,7 +425,7 @@ func SyncPullMirror(ctx context.Context, repoID int64) bool { log.Trace("SyncMirrors [repo: %-v]: no branches updated", m.Repo) } else { log.Trace("SyncMirrors [repo: %-v]: %d branches updated", m.Repo, len(results)) - gitRepo, err = git.OpenRepositoryCtx(ctx, m.Repo.RepoPath()) + gitRepo, err = git.OpenRepository(ctx, m.Repo.RepoPath()) if err != nil { log.Error("SyncMirrors [repo: %-v]: unable to OpenRepository: %v", m.Repo, err) return false diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go index cff53ba8d0..5c0c14c627 100644 --- a/services/mirror/mirror_push.go +++ b/services/mirror/mirror_push.go @@ -10,6 +10,7 @@ import ( "fmt" "io" "regexp" + "strings" "time" repo_model "code.gitea.io/gitea/models/repo" @@ -28,13 +29,19 @@ var stripExitStatus = regexp.MustCompile(`exit status \d+ - `) // AddPushMirrorRemote registers the push mirror remote. func AddPushMirrorRemote(ctx context.Context, m *repo_model.PushMirror, addr string) error { addRemoteAndConfig := func(addr, path string) error { - if _, err := git.NewCommand(ctx, "remote", "add", "--mirror=push", m.RemoteName, addr).RunInDir(path); err != nil { + cmd := git.NewCommand(ctx, "remote", "add", "--mirror=push", m.RemoteName, addr) + if strings.Contains(addr, "://") && strings.Contains(addr, "@") { + cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=push %s [repo_path: %s]", m.RemoteName, util.SanitizeCredentialURLs(addr), path)) + } else { + cmd.SetDescription(fmt.Sprintf("remote add %s --mirror=push %s [repo_path: %s]", m.RemoteName, addr, path)) + } + if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: path}); err != nil { return err } - if _, err := git.NewCommand(ctx, "config", "--add", "remote."+m.RemoteName+".push", "+refs/heads/*:refs/heads/*").RunInDir(path); err != nil { + if _, _, err := git.NewCommand(ctx, "config", "--add", "remote."+m.RemoteName+".push", "+refs/heads/*:refs/heads/*").RunStdString(&git.RunOpts{Dir: path}); err != nil { return err } - if _, err := git.NewCommand(ctx, "config", "--add", "remote."+m.RemoteName+".push", "+refs/tags/*:refs/tags/*").RunInDir(path); err != nil { + if _, _, err := git.NewCommand(ctx, "config", "--add", "remote."+m.RemoteName+".push", "+refs/tags/*:refs/tags/*").RunStdString(&git.RunOpts{Dir: path}); err != nil { return err } return nil @@ -60,12 +67,12 @@ func AddPushMirrorRemote(ctx context.Context, m *repo_model.PushMirror, addr str func RemovePushMirrorRemote(ctx context.Context, m *repo_model.PushMirror) error { cmd := git.NewCommand(ctx, "remote", "rm", m.RemoteName) - if _, err := cmd.RunInDir(m.Repo.RepoPath()); err != nil { + if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: m.Repo.RepoPath()}); err != nil { return err } if m.Repo.HasWiki() { - if _, err := cmd.RunInDir(m.Repo.WikiPath()); err != nil { + if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: m.Repo.WikiPath()}); err != nil { // The wiki remote may not exist log.Warn("Wiki Remote[%d] could not be removed: %v", m.ID, err) } @@ -130,7 +137,7 @@ func runPushSync(ctx context.Context, m *repo_model.PushMirror) error { if setting.LFS.StartServer { log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo) - gitRepo, err := git.OpenRepositoryCtx(ctx, path) + gitRepo, err := git.OpenRepository(ctx, path) if err != nil { log.Error("OpenRepository: %v", err) return errors.New("Unexpected error") @@ -140,7 +147,7 @@ func runPushSync(ctx context.Context, m *repo_model.PushMirror) error { endpoint := lfs.DetermineEndpoint(remoteAddr.String(), "") lfsClient := lfs.NewClient(endpoint, nil) if err := pushAllLFSObjects(ctx, gitRepo, lfsClient); err != nil { - return util.NewURLSanitizedError(err, remoteAddr, true) + return util.SanitizeErrorCredentialURLs(err) } } @@ -154,7 +161,7 @@ func runPushSync(ctx context.Context, m *repo_model.PushMirror) error { }); err != nil { log.Error("Error pushing %s mirror[%d] remote %s: %v", path, m.ID, m.RemoteName, err) - return util.NewURLSanitizedError(err, remoteAddr, true) + return util.SanitizeErrorCredentialURLs(err) } return nil diff --git a/services/org/org.go b/services/org/org.go index 4c0cfe87fc..d7b3019e74 100644 --- a/services/org/org.go +++ b/services/org/org.go @@ -9,6 +9,8 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" + packages_model "code.gitea.io/gitea/models/packages" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/storage" @@ -16,7 +18,7 @@ import ( ) // DeleteOrganization completely and permanently deletes everything of organization. -func DeleteOrganization(org *models.Organization) error { +func DeleteOrganization(org *organization.Organization) error { ctx, commiter, err := db.TxContext() if err != nil { return err @@ -31,7 +33,14 @@ func DeleteOrganization(org *models.Organization) error { return models.ErrUserOwnRepos{UID: org.ID} } - if err := models.DeleteOrganization(ctx, org); err != nil { + // Check ownership of packages. + if ownsPackages, err := packages_model.HasOwnerPackages(ctx, org.ID); err != nil { + return fmt.Errorf("HasOwnerPackages: %v", err) + } else if ownsPackages { + return models.ErrUserOwnPackages{UID: org.ID} + } + + if err := organization.DeleteOrganization(ctx, org); err != nil { return fmt.Errorf("DeleteOrganization: %v", err) } diff --git a/services/org/org_test.go b/services/org/org_test.go index aaa2756bb5..7f90d85807 100644 --- a/services/org/org_test.go +++ b/services/org/org_test.go @@ -9,6 +9,7 @@ import ( "testing" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -16,23 +17,25 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } func TestDeleteOrganization(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) - org := unittest.AssertExistsAndLoadBean(t, &models.Organization{ID: 6}).(*models.Organization) + org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 6}).(*organization.Organization) assert.NoError(t, DeleteOrganization(org)) - unittest.AssertNotExistsBean(t, &models.Organization{ID: 6}) - unittest.AssertNotExistsBean(t, &models.OrgUser{OrgID: 6}) - unittest.AssertNotExistsBean(t, &models.Team{OrgID: 6}) + unittest.AssertNotExistsBean(t, &organization.Organization{ID: 6}) + unittest.AssertNotExistsBean(t, &organization.OrgUser{OrgID: 6}) + unittest.AssertNotExistsBean(t, &organization.Team{OrgID: 6}) - org = unittest.AssertExistsAndLoadBean(t, &models.Organization{ID: 3}).(*models.Organization) + org = unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3}).(*organization.Organization) err := DeleteOrganization(org) assert.Error(t, err) assert.True(t, models.IsErrUserOwnRepos(err)) - user := unittest.AssertExistsAndLoadBean(t, &models.Organization{ID: 5}).(*models.Organization) + user := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 5}).(*organization.Organization) assert.Error(t, DeleteOrganization(user)) - unittest.CheckConsistencyFor(t, &user_model.User{}, &models.Team{}) + unittest.CheckConsistencyFor(t, &user_model.User{}, &organization.Team{}) } diff --git a/services/packages/auth.go b/services/packages/auth.go new file mode 100644 index 0000000000..50212fccfd --- /dev/null +++ b/services/packages/auth.go @@ -0,0 +1,66 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "fmt" + "net/http" + "strings" + "time" + + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/setting" + + "github.com/golang-jwt/jwt/v4" +) + +type packageClaims struct { + jwt.RegisteredClaims + UserID int64 +} + +func CreateAuthorizationToken(u *user_model.User) (string, error) { + now := time.Now() + + claims := packageClaims{ + RegisteredClaims: jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(now.Add(24 * time.Hour)), + NotBefore: jwt.NewNumericDate(now), + }, + UserID: u.ID, + } + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + + tokenString, err := token.SignedString([]byte(setting.SecretKey)) + if err != nil { + return "", err + } + + return tokenString, nil +} + +func ParseAuthorizationToken(req *http.Request) (int64, error) { + parts := strings.SplitN(req.Header.Get("Authorization"), " ", 2) + if len(parts) != 2 { + return 0, fmt.Errorf("no token") + } + + token, err := jwt.ParseWithClaims(parts[1], &packageClaims{}, func(t *jwt.Token) (interface{}, error) { + if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", t.Header["alg"]) + } + return []byte(setting.SecretKey), nil + }) + if err != nil { + return 0, err + } + + c, ok := token.Claims.(*packageClaims) + if !token.Valid || !ok { + return 0, fmt.Errorf("invalid token claim") + } + + return c.UserID, nil +} diff --git a/services/packages/container/blob_uploader.go b/services/packages/container/blob_uploader.go new file mode 100644 index 0000000000..762f9e5259 --- /dev/null +++ b/services/packages/container/blob_uploader.go @@ -0,0 +1,136 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "context" + "errors" + "io" + "os" + "path" + "path/filepath" + "strings" + + packages_model "code.gitea.io/gitea/models/packages" + packages_module "code.gitea.io/gitea/modules/packages" + "code.gitea.io/gitea/modules/setting" +) + +var ( + // errWriteAfterRead occurs if Write is called after a read operation + errWriteAfterRead = errors.New("write is unsupported after a read operation") + // errOffsetMissmatch occurs if the file offset is different than the model + errOffsetMissmatch = errors.New("offset mismatch between file and model") +) + +// BlobUploader handles chunked blob uploads +type BlobUploader struct { + *packages_model.PackageBlobUpload + *packages_module.MultiHasher + file *os.File + reading bool +} + +func buildFilePath(id string) string { + return filepath.Join(setting.Packages.ChunkedUploadPath, path.Clean("/" + strings.ReplaceAll(id, "\\", "/"))[1:]) +} + +// NewBlobUploader creates a new blob uploader for the given id +func NewBlobUploader(ctx context.Context, id string) (*BlobUploader, error) { + model, err := packages_model.GetBlobUploadByID(ctx, id) + if err != nil { + return nil, err + } + + hash := packages_module.NewMultiHasher() + if len(model.HashStateBytes) != 0 { + if err := hash.UnmarshalBinary(model.HashStateBytes); err != nil { + return nil, err + } + } + + f, err := os.OpenFile(buildFilePath(model.ID), os.O_RDWR|os.O_CREATE, 0o666) + if err != nil { + return nil, err + } + + return &BlobUploader{ + model, + hash, + f, + false, + }, nil +} + +// Close implements io.Closer +func (u *BlobUploader) Close() error { + return u.file.Close() +} + +// Append appends a chunk of data and updates the model +func (u *BlobUploader) Append(ctx context.Context, r io.Reader) error { + if u.reading { + return errWriteAfterRead + } + + offset, err := u.file.Seek(0, io.SeekEnd) + if err != nil { + return err + } + if offset != u.BytesReceived { + return errOffsetMissmatch + } + + n, err := io.Copy(io.MultiWriter(u.file, u.MultiHasher), r) + if err != nil { + return err + } + + // fast path if nothing was written + if n == 0 { + return nil + } + + u.BytesReceived += n + + u.HashStateBytes, err = u.MultiHasher.MarshalBinary() + if err != nil { + return err + } + + return packages_model.UpdateBlobUpload(ctx, u.PackageBlobUpload) +} + +func (u *BlobUploader) Size() int64 { + return u.BytesReceived +} + +// Read implements io.Reader +func (u *BlobUploader) Read(p []byte) (int, error) { + if !u.reading { + _, err := u.file.Seek(0, io.SeekStart) + if err != nil { + return 0, err + } + + u.reading = true + } + + return u.file.Read(p) +} + +// Remove deletes the data and the model of a blob upload +func RemoveBlobUploadByID(ctx context.Context, id string) error { + if err := packages_model.DeleteBlobUploadByID(ctx, id); err != nil { + return err + } + + err := os.Remove(buildFilePath(id)) + if err != nil && !os.IsNotExist(err) { + return err + } + + return nil +} diff --git a/services/packages/container/cleanup.go b/services/packages/container/cleanup.go new file mode 100644 index 0000000000..390a0b7b05 --- /dev/null +++ b/services/packages/container/cleanup.go @@ -0,0 +1,80 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package container + +import ( + "context" + "time" + + packages_model "code.gitea.io/gitea/models/packages" + container_model "code.gitea.io/gitea/models/packages/container" + "code.gitea.io/gitea/modules/util" +) + +// Cleanup removes expired container data +func Cleanup(ctx context.Context, olderThan time.Duration) error { + if err := cleanupExpiredBlobUploads(ctx, olderThan); err != nil { + return err + } + return cleanupExpiredUploadedBlobs(ctx, olderThan) +} + +// cleanupExpiredBlobUploads removes expired blob uploads +func cleanupExpiredBlobUploads(ctx context.Context, olderThan time.Duration) error { + pbus, err := packages_model.FindExpiredBlobUploads(ctx, olderThan) + if err != nil { + return err + } + + for _, pbu := range pbus { + if err := RemoveBlobUploadByID(ctx, pbu.ID); err != nil { + return err + } + } + + return nil +} + +// cleanupExpiredUploadedBlobs removes expired uploaded blobs not referenced by a manifest +func cleanupExpiredUploadedBlobs(ctx context.Context, olderThan time.Duration) error { + pfs, err := container_model.SearchExpiredUploadedBlobs(ctx, olderThan) + if err != nil { + return err + } + + for _, pf := range pfs { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeFile, pf.ID); err != nil { + return err + } + if err := packages_model.DeleteFileByID(ctx, pf.ID); err != nil { + return err + } + } + + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + Type: packages_model.TypeContainer, + Version: packages_model.SearchValue{ + ExactMatch: true, + Value: container_model.UploadVersion, + }, + IsInternal: true, + HasFiles: util.OptionalBoolFalse, + }) + if err != nil { + return err + } + + for _, pv := range pvs { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeVersion, pv.ID); err != nil { + return err + } + + if err := packages_model.DeleteVersionByID(ctx, pv.ID); err != nil { + return err + } + } + + return nil +} diff --git a/services/packages/packages.go b/services/packages/packages.go new file mode 100644 index 0000000000..7f25fce5b8 --- /dev/null +++ b/services/packages/packages.go @@ -0,0 +1,453 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "context" + "fmt" + "io" + "strings" + "time" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/notification" + packages_module "code.gitea.io/gitea/modules/packages" + container_service "code.gitea.io/gitea/services/packages/container" +) + +// PackageInfo describes a package +type PackageInfo struct { + Owner *user_model.User + PackageType packages_model.Type + Name string + Version string +} + +// PackageCreationInfo describes a package to create +type PackageCreationInfo struct { + PackageInfo + SemverCompatible bool + Creator *user_model.User + Metadata interface{} + Properties map[string]string +} + +// PackageFileInfo describes a package file +type PackageFileInfo struct { + Filename string + CompositeKey string +} + +// PackageFileCreationInfo describes a package file to create +type PackageFileCreationInfo struct { + PackageFileInfo + Data packages_module.HashedSizeReader + IsLead bool + Properties map[string]string + OverwriteExisting bool +} + +// CreatePackageAndAddFile creates a package with a file. If the same package exists already, ErrDuplicatePackageVersion is returned +func CreatePackageAndAddFile(pvci *PackageCreationInfo, pfci *PackageFileCreationInfo) (*packages_model.PackageVersion, *packages_model.PackageFile, error) { + return createPackageAndAddFile(pvci, pfci, false) +} + +// CreatePackageOrAddFileToExisting creates a package with a file or adds the file if the package exists already +func CreatePackageOrAddFileToExisting(pvci *PackageCreationInfo, pfci *PackageFileCreationInfo) (*packages_model.PackageVersion, *packages_model.PackageFile, error) { + return createPackageAndAddFile(pvci, pfci, true) +} + +func createPackageAndAddFile(pvci *PackageCreationInfo, pfci *PackageFileCreationInfo, allowDuplicate bool) (*packages_model.PackageVersion, *packages_model.PackageFile, error) { + ctx, committer, err := db.TxContext() + if err != nil { + return nil, nil, err + } + defer committer.Close() + + pv, created, err := createPackageAndVersion(ctx, pvci, allowDuplicate) + if err != nil { + return nil, nil, err + } + + pf, pb, blobCreated, err := addFileToPackageVersion(ctx, pv, pfci) + removeBlob := false + defer func() { + if blobCreated && removeBlob { + contentStore := packages_module.NewContentStore() + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob from content store: %v", err) + } + } + }() + if err != nil { + removeBlob = true + return nil, nil, err + } + + if err := committer.Commit(); err != nil { + removeBlob = true + return nil, nil, err + } + + if created { + pd, err := packages_model.GetPackageDescriptor(ctx, pv) + if err != nil { + return nil, nil, err + } + + notification.NotifyPackageCreate(pvci.Creator, pd) + } + + return pv, pf, nil +} + +func createPackageAndVersion(ctx context.Context, pvci *PackageCreationInfo, allowDuplicate bool) (*packages_model.PackageVersion, bool, error) { + log.Trace("Creating package: %v, %v, %v, %s, %s, %+v, %v", pvci.Creator.ID, pvci.Owner.ID, pvci.PackageType, pvci.Name, pvci.Version, pvci.Properties, allowDuplicate) + + p := &packages_model.Package{ + OwnerID: pvci.Owner.ID, + Type: pvci.PackageType, + Name: pvci.Name, + LowerName: strings.ToLower(pvci.Name), + SemverCompatible: pvci.SemverCompatible, + } + var err error + if p, err = packages_model.TryInsertPackage(ctx, p); err != nil { + if err != packages_model.ErrDuplicatePackage { + log.Error("Error inserting package: %v", err) + return nil, false, err + } + } + + metadataJSON, err := json.Marshal(pvci.Metadata) + if err != nil { + return nil, false, err + } + + created := true + pv := &packages_model.PackageVersion{ + PackageID: p.ID, + CreatorID: pvci.Creator.ID, + Version: pvci.Version, + LowerVersion: strings.ToLower(pvci.Version), + MetadataJSON: string(metadataJSON), + } + if pv, err = packages_model.GetOrInsertVersion(ctx, pv); err != nil { + if err == packages_model.ErrDuplicatePackageVersion { + created = false + } + if err != packages_model.ErrDuplicatePackageVersion || !allowDuplicate { + log.Error("Error inserting package: %v", err) + return nil, false, err + } + } + + if created { + for name, value := range pvci.Properties { + if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeVersion, pv.ID, name, value); err != nil { + log.Error("Error setting package version property: %v", err) + return nil, false, err + } + } + } + + return pv, created, nil +} + +// AddFileToExistingPackage adds a file to an existing package. If the package does not exist, ErrPackageNotExist is returned +func AddFileToExistingPackage(pvi *PackageInfo, pfci *PackageFileCreationInfo) (*packages_model.PackageVersion, *packages_model.PackageFile, error) { + ctx, committer, err := db.TxContext() + if err != nil { + return nil, nil, err + } + defer committer.Close() + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version) + if err != nil { + return nil, nil, err + } + + pf, pb, blobCreated, err := addFileToPackageVersion(ctx, pv, pfci) + removeBlob := false + defer func() { + if removeBlob { + contentStore := packages_module.NewContentStore() + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob from content store: %v", err) + } + } + }() + if err != nil { + removeBlob = blobCreated + return nil, nil, err + } + + if err := committer.Commit(); err != nil { + removeBlob = blobCreated + return nil, nil, err + } + + return pv, pf, nil +} + +// NewPackageBlob creates a package blob instance +func NewPackageBlob(hsr packages_module.HashedSizeReader) *packages_model.PackageBlob { + hashMD5, hashSHA1, hashSHA256, hashSHA512 := hsr.Sums() + + return &packages_model.PackageBlob{ + Size: hsr.Size(), + HashMD5: fmt.Sprintf("%x", hashMD5), + HashSHA1: fmt.Sprintf("%x", hashSHA1), + HashSHA256: fmt.Sprintf("%x", hashSHA256), + HashSHA512: fmt.Sprintf("%x", hashSHA512), + } +} + +func addFileToPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pfci *PackageFileCreationInfo) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) { + log.Trace("Adding package file: %v, %s", pv.ID, pfci.Filename) + + pb, exists, err := packages_model.GetOrInsertBlob(ctx, NewPackageBlob(pfci.Data)) + if err != nil { + log.Error("Error inserting package blob: %v", err) + return nil, nil, false, err + } + if !exists { + contentStore := packages_module.NewContentStore() + if err := contentStore.Save(packages_module.BlobHash256Key(pb.HashSHA256), pfci.Data, pfci.Data.Size()); err != nil { + log.Error("Error saving package blob in content store: %v", err) + return nil, nil, false, err + } + } + + if pfci.OverwriteExisting { + pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, pfci.Filename, pfci.CompositeKey) + if err != nil && err != packages_model.ErrPackageFileNotExist { + return nil, pb, !exists, err + } + if pf != nil { + // Short circuit if blob is the same + if pf.BlobID == pb.ID { + return pf, pb, !exists, nil + } + + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeFile, pf.ID); err != nil { + return nil, pb, !exists, err + } + if err := packages_model.DeleteFileByID(ctx, pf.ID); err != nil { + return nil, pb, !exists, err + } + } + } + + pf := &packages_model.PackageFile{ + VersionID: pv.ID, + BlobID: pb.ID, + Name: pfci.Filename, + LowerName: strings.ToLower(pfci.Filename), + CompositeKey: pfci.CompositeKey, + IsLead: pfci.IsLead, + } + if pf, err = packages_model.TryInsertFile(ctx, pf); err != nil { + if err != packages_model.ErrDuplicatePackageFile { + log.Error("Error inserting package file: %v", err) + } + return nil, pb, !exists, err + } + + for name, value := range pfci.Properties { + if _, err := packages_model.InsertProperty(ctx, packages_model.PropertyTypeFile, pf.ID, name, value); err != nil { + log.Error("Error setting package file property: %v", err) + return pf, pb, !exists, err + } + } + + return pf, pb, !exists, nil +} + +// RemovePackageVersionByNameAndVersion deletes a package version and all associated files +func RemovePackageVersionByNameAndVersion(doer *user_model.User, pvi *PackageInfo) error { + pv, err := packages_model.GetVersionByNameAndVersion(db.DefaultContext, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version) + if err != nil { + return err + } + + return RemovePackageVersion(doer, pv) +} + +// RemovePackageVersion deletes the package version and all associated files +func RemovePackageVersion(doer *user_model.User, pv *packages_model.PackageVersion) error { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + pd, err := packages_model.GetPackageDescriptor(ctx, pv) + if err != nil { + return err + } + + log.Trace("Deleting package: %v", pv.ID) + + if err := DeletePackageVersionAndReferences(ctx, pv); err != nil { + return err + } + + if err := committer.Commit(); err != nil { + return err + } + + notification.NotifyPackageDelete(doer, pd) + + return nil +} + +// DeletePackageVersionAndReferences deletes the package version and its properties and files +func DeletePackageVersionAndReferences(ctx context.Context, pv *packages_model.PackageVersion) error { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeVersion, pv.ID); err != nil { + return err + } + + pfs, err := packages_model.GetFilesByVersionID(ctx, pv.ID) + if err != nil { + return err + } + + for _, pf := range pfs { + if err := DeletePackageFile(ctx, pf); err != nil { + return err + } + } + + return packages_model.DeleteVersionByID(ctx, pv.ID) +} + +// DeletePackageFile deletes the package file and its properties +func DeletePackageFile(ctx context.Context, pf *packages_model.PackageFile) error { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypeFile, pf.ID); err != nil { + return err + } + return packages_model.DeleteFileByID(ctx, pf.ID) +} + +// Cleanup removes expired package data +func Cleanup(unused context.Context, olderThan time.Duration) error { + ctx, committer, err := db.TxContext() + if err != nil { + return err + } + defer committer.Close() + + if err := container_service.Cleanup(ctx, olderThan); err != nil { + return err + } + + if err := packages_model.DeletePackagesIfUnreferenced(ctx); err != nil { + return err + } + + pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan) + if err != nil { + return err + } + + for _, pb := range pbs { + if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil { + return err + } + } + + if err := committer.Commit(); err != nil { + return err + } + + contentStore := packages_module.NewContentStore() + for _, pb := range pbs { + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob [%v]: %v", pb.ID, err) + } + } + + return nil +} + +// GetFileStreamByPackageNameAndVersion returns the content of the specific package file +func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadCloser, *packages_model.PackageFile, error) { + log.Trace("Getting package file stream: %v, %v, %s, %s, %s, %s", pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version, pfi.Filename, pfi.CompositeKey) + + pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version) + if err != nil { + if err == packages_model.ErrPackageNotExist { + return nil, nil, err + } + log.Error("Error getting package: %v", err) + return nil, nil, err + } + + return GetFileStreamByPackageVersion(ctx, pv, pfi) +} + +// GetFileStreamByPackageVersionAndFileID returns the content of the specific package file +func GetFileStreamByPackageVersionAndFileID(ctx context.Context, owner *user_model.User, versionID, fileID int64) (io.ReadCloser, *packages_model.PackageFile, error) { + log.Trace("Getting package file stream: %v, %v, %v", owner.ID, versionID, fileID) + + pv, err := packages_model.GetVersionByID(ctx, versionID) + if err != nil { + if err != packages_model.ErrPackageNotExist { + log.Error("Error getting package version: %v", err) + } + return nil, nil, err + } + + p, err := packages_model.GetPackageByID(ctx, pv.PackageID) + if err != nil { + log.Error("Error getting package: %v", err) + return nil, nil, err + } + + if p.OwnerID != owner.ID { + return nil, nil, packages_model.ErrPackageNotExist + } + + pf, err := packages_model.GetFileForVersionByID(ctx, versionID, fileID) + if err != nil { + log.Error("Error getting file: %v", err) + return nil, nil, err + } + + return GetPackageFileStream(ctx, pf) +} + +// GetFileStreamByPackageVersion returns the content of the specific package file +func GetFileStreamByPackageVersion(ctx context.Context, pv *packages_model.PackageVersion, pfi *PackageFileInfo) (io.ReadCloser, *packages_model.PackageFile, error) { + pf, err := packages_model.GetFileForVersionByName(db.DefaultContext, pv.ID, pfi.Filename, pfi.CompositeKey) + if err != nil { + return nil, nil, err + } + + return GetPackageFileStream(ctx, pf) +} + +// GetPackageFileStream returns the content of the specific package file +func GetPackageFileStream(ctx context.Context, pf *packages_model.PackageFile) (io.ReadCloser, *packages_model.PackageFile, error) { + pb, err := packages_model.GetBlobByID(ctx, pf.BlobID) + if err != nil { + return nil, nil, err + } + + s, err := packages_module.NewContentStore().Get(packages_module.BlobHash256Key(pb.HashSHA256)) + if err == nil { + if pf.IsLead { + if err := packages_model.IncrementDownloadCounter(ctx, pf.VersionID); err != nil { + log.Error("Error incrementing download counter: %v", err) + } + } + } + return s, pf, err +} diff --git a/services/pull/check.go b/services/pull/check.go index b1e9237d11..6852940b22 100644 --- a/services/pull/check.go +++ b/services/pull/check.go @@ -7,6 +7,7 @@ package pull import ( "context" + "errors" "fmt" "os" "strconv" @@ -25,14 +26,25 @@ import ( "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/util" + asymkey_service "code.gitea.io/gitea/services/asymkey" ) -// prQueue represents a queue to handle update pull request tests -var prQueue queue.UniqueQueue +// prPatchCheckerQueue represents a queue to handle update pull request tests +var prPatchCheckerQueue queue.UniqueQueue + +var ( + ErrIsClosed = errors.New("pull is closed") + ErrUserNotAllowedToMerge = models.ErrDisallowedToMerge{} + ErrHasMerged = errors.New("has already been merged") + ErrIsWorkInProgress = errors.New("work in progress PRs cannot be merged") + ErrIsChecking = errors.New("cannot merge while conflict checking is in progress") + ErrNotMergableState = errors.New("not in mergeable state") + ErrDependenciesLeft = errors.New("is blocked by an open dependency") +) // AddToTaskQueue adds itself to pull request test task queue. func AddToTaskQueue(pr *models.PullRequest) { - err := prQueue.PushFunc(strconv.FormatInt(pr.ID, 10), func() error { + err := prPatchCheckerQueue.PushFunc(strconv.FormatInt(pr.ID, 10), func() error { pr.Status = models.PullRequestStatusChecking err := pr.UpdateColsIfNotMerged("status") if err != nil { @@ -47,6 +59,85 @@ func AddToTaskQueue(pr *models.PullRequest) { } } +// CheckPullMergable check if the pull mergable based on all conditions (branch protection, merge options, ...) +func CheckPullMergable(stdCtx context.Context, doer *user_model.User, perm *models.Permission, pr *models.PullRequest, manuallMerge, force bool) error { + return db.WithTx(func(ctx context.Context) error { + if pr.HasMerged { + return ErrHasMerged + } + + if err := pr.LoadIssueCtx(ctx); err != nil { + return err + } else if pr.Issue.IsClosed { + return ErrIsClosed + } + + if allowedMerge, err := IsUserAllowedToMerge(ctx, pr, *perm, doer); err != nil { + return err + } else if !allowedMerge { + return ErrUserNotAllowedToMerge + } + + if manuallMerge { + // don't check rules to "auto merge", doer is going to mark this pull as merged manually + return nil + } + + if pr.IsWorkInProgress() { + return ErrIsWorkInProgress + } + + if !pr.CanAutoMerge() { + return ErrNotMergableState + } + + if pr.IsChecking() { + return ErrIsChecking + } + + if err := CheckPullBranchProtections(ctx, pr, false); err != nil { + if models.IsErrDisallowedToMerge(err) { + if force { + if isRepoAdmin, err2 := models.IsUserRepoAdminCtx(ctx, pr.BaseRepo, doer); err2 != nil { + return err2 + } else if !isRepoAdmin { + return err + } + } + } else { + return err + } + } + + if _, err := isSignedIfRequired(ctx, pr, doer); err != nil { + return err + } + + if noDeps, err := models.IssueNoDependenciesLeft(ctx, pr.Issue); err != nil { + return err + } else if !noDeps { + return ErrDependenciesLeft + } + + return nil + }, stdCtx) +} + +// isSignedIfRequired check if merge will be signed if required +func isSignedIfRequired(ctx context.Context, pr *models.PullRequest, doer *user_model.User) (bool, error) { + if err := pr.LoadProtectedBranchCtx(ctx); err != nil { + return false, err + } + + if pr.ProtectedBranch == nil || !pr.ProtectedBranch.RequireSignedCommits { + return true, nil + } + + sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.GetGitRefName()) + + return sign, err +} + // checkAndUpdateStatus checks if pull request is possible to leaving checking status, // and set to be either conflict or mergeable. func checkAndUpdateStatus(pr *models.PullRequest) { @@ -56,7 +147,7 @@ func checkAndUpdateStatus(pr *models.PullRequest) { } // Make sure there is no waiting test to process before leaving the checking status. - has, err := prQueue.Has(strconv.FormatInt(pr.ID, 10)) + has, err := prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10)) if err != nil { log.Error("Unable to check if the queue is waiting to reprocess pr.ID %d. Error: %v", pr.ID, err) } @@ -92,8 +183,8 @@ func getMergeCommit(ctx context.Context, pr *models.PullRequest) (*git.Commit, e headFile := pr.GetGitRefName() // Check if a pull request is merged into BaseBranch - _, err = git.NewCommand(ctx, "merge-base", "--is-ancestor", headFile, pr.BaseBranch). - RunInDirWithEnv(pr.BaseRepo.RepoPath(), []string{"GIT_INDEX_FILE=" + indexTmpPath, "GIT_DIR=" + pr.BaseRepo.RepoPath()}) + _, _, err = git.NewCommand(ctx, "merge-base", "--is-ancestor", headFile, pr.BaseBranch). + RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath(), Env: []string{"GIT_INDEX_FILE=" + indexTmpPath, "GIT_DIR=" + pr.BaseRepo.RepoPath()}}) if err != nil { // Errors are signaled by a non-zero status that is not 1 if strings.Contains(err.Error(), "exit status 1") { @@ -113,8 +204,8 @@ func getMergeCommit(ctx context.Context, pr *models.PullRequest) (*git.Commit, e cmd := commitID[:40] + ".." + pr.BaseBranch // Get the commit from BaseBranch where the pull request got merged - mergeCommit, err := git.NewCommand(ctx, "rev-list", "--ancestry-path", "--merges", "--reverse", cmd). - RunInDirWithEnv("", []string{"GIT_INDEX_FILE=" + indexTmpPath, "GIT_DIR=" + pr.BaseRepo.RepoPath()}) + mergeCommit, _, err := git.NewCommand(ctx, "rev-list", "--ancestry-path", "--merges", "--reverse", cmd). + RunStdString(&git.RunOpts{Dir: "", Env: []string{"GIT_INDEX_FILE=" + indexTmpPath, "GIT_DIR=" + pr.BaseRepo.RepoPath()}}) if err != nil { return nil, fmt.Errorf("git rev-list --ancestry-path --merges --reverse: %v", err) } else if len(mergeCommit) < 40 { @@ -122,7 +213,7 @@ func getMergeCommit(ctx context.Context, pr *models.PullRequest) (*git.Commit, e mergeCommit = commitID[:40] } - gitRepo, err := git.OpenRepositoryCtx(ctx, pr.BaseRepo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { return nil, fmt.Errorf("OpenRepository: %v", err) } @@ -139,7 +230,7 @@ func getMergeCommit(ctx context.Context, pr *models.PullRequest) (*git.Commit, e // manuallyMerged checks if a pull request got manually merged // When a pull request got manually merged mark the pull request as merged func manuallyMerged(ctx context.Context, pr *models.PullRequest) bool { - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("PullRequest[%d].LoadBaseRepo: %v", pr.ID, err) return false } @@ -168,7 +259,7 @@ func manuallyMerged(ctx context.Context, pr *models.PullRequest) bool { // When the commit author is unknown set the BaseRepo owner as merger if merger == nil { if pr.BaseRepo.Owner == nil { - if err = pr.BaseRepo.GetOwner(db.DefaultContext); err != nil { + if err = pr.BaseRepo.GetOwner(ctx); err != nil { log.Error("BaseRepo.GetOwner[%d]: %v", pr.ID, err) return false } @@ -178,7 +269,7 @@ func manuallyMerged(ctx context.Context, pr *models.PullRequest) bool { pr.Merger = merger pr.MergerID = merger.ID - if merged, err := pr.SetMerged(); err != nil { + if merged, err := pr.SetMerged(ctx); err != nil { log.Error("PullRequest[%d].setMerged : %v", pr.ID, err) return false } else if !merged { @@ -205,7 +296,7 @@ func InitializePullRequests(ctx context.Context) { case <-ctx.Done(): return default: - if err := prQueue.PushFunc(strconv.FormatInt(prID, 10), func() error { + if err := prPatchCheckerQueue.PushFunc(strconv.FormatInt(prID, 10), func() error { log.Trace("Adding PR ID: %d to the pull requests patch checking queue", prID) return nil }); err != nil { @@ -226,10 +317,12 @@ func handle(data ...queue.Data) []queue.Data { } func testPR(id int64) { + pullWorkingPool.CheckIn(fmt.Sprint(id)) + defer pullWorkingPool.CheckOut(fmt.Sprint(id)) ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("Test PR[%d] from patch checking queue", id)) defer finished() - pr, err := models.GetPullRequestByID(id) + pr, err := models.GetPullRequestByID(ctx, id) if err != nil { log.Error("GetPullRequestByID[%d]: %v", id, err) return @@ -270,13 +363,13 @@ func CheckPrsForBaseBranch(baseRepo *repo_model.Repository, baseBranchName strin // Init runs the task queue to test all the checking status pull requests func Init() error { - prQueue = queue.CreateUniqueQueue("pr_patch_checker", handle, "") + prPatchCheckerQueue = queue.CreateUniqueQueue("pr_patch_checker", handle, "") - if prQueue == nil { + if prPatchCheckerQueue == nil { return fmt.Errorf("Unable to create pr_patch_checker Queue") } - go graceful.GetManager().RunWithShutdownFns(prQueue.Run) + go graceful.GetManager().RunWithShutdownFns(prPatchCheckerQueue.Run) go graceful.GetManager().RunWithShutdownContext(InitializePullRequests) return nil } diff --git a/services/pull/check_test.go b/services/pull/check_test.go index 4cdd17cc7b..bc4c45ffad 100644 --- a/services/pull/check_test.go +++ b/services/pull/check_test.go @@ -32,16 +32,16 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { WorkerPoolConfiguration: queue.WorkerPoolConfiguration{ QueueLength: 10, BatchLength: 1, + Name: "temporary-queue", }, Workers: 1, - Name: "temporary-queue", }, "") assert.NoError(t, err) queueShutdown := []func(){} queueTerminate := []func(){} - prQueue = q.(queue.UniqueQueue) + prPatchCheckerQueue = q.(queue.UniqueQueue) pr := unittest.AssertExistsAndLoadBean(t, &models.PullRequest{ID: 2}).(*models.PullRequest) AddToTaskQueue(pr) @@ -51,11 +51,11 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { return pr.Status == models.PullRequestStatusChecking }, 1*time.Second, 100*time.Millisecond) - has, err := prQueue.Has(strconv.FormatInt(pr.ID, 10)) + has, err := prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10)) assert.True(t, has) assert.NoError(t, err) - prQueue.Run(func(shutdown func()) { + prPatchCheckerQueue.Run(func(shutdown func()) { queueShutdown = append(queueShutdown, shutdown) }, func(terminate func()) { queueTerminate = append(queueTerminate, terminate) @@ -68,7 +68,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { assert.Fail(t, "Timeout: nothing was added to pullRequestQueue") } - has, err = prQueue.Has(strconv.FormatInt(pr.ID, 10)) + has, err = prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10)) assert.False(t, has) assert.NoError(t, err) @@ -82,5 +82,5 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) { callback() } - prQueue = nil + prPatchCheckerQueue = nil } diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go index be8df0c9b1..ec4cc2aa07 100644 --- a/services/pull/commit_status.go +++ b/services/pull/commit_status.go @@ -83,7 +83,7 @@ func IsCommitStatusContextSuccess(commitStatuses []*models.CommitStatus, require // IsPullCommitStatusPass returns if all required status checks PASS func IsPullCommitStatusPass(ctx context.Context, pr *models.PullRequest) (bool, error) { - if err := pr.LoadProtectedBranch(); err != nil { + if err := pr.LoadProtectedBranchCtx(ctx); err != nil { return false, errors.Wrap(err, "GetLatestCommitStatus") } if pr.ProtectedBranch == nil || !pr.ProtectedBranch.EnableStatusCheck { @@ -100,7 +100,7 @@ func IsPullCommitStatusPass(ctx context.Context, pr *models.PullRequest) (bool, // GetPullRequestCommitStatusState returns pull request merged commit status state func GetPullRequestCommitStatusState(ctx context.Context, pr *models.PullRequest) (structs.CommitStatusState, error) { // Ensure HeadRepo is loaded - if err := pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { return "", errors.Wrap(err, "LoadHeadRepo") } @@ -114,7 +114,7 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *models.PullRequest if pr.Flow == models.PullRequestFlowGithub && !headGitRepo.IsBranchExist(pr.HeadBranch) { return "", errors.New("Head branch does not exist, can not merge") } - if pr.Flow == models.PullRequestFlowAGit && !git.IsReferenceExist(headGitRepo.Ctx, headGitRepo.Path, pr.GetGitRefName()) { + if pr.Flow == models.PullRequestFlowAGit && !git.IsReferenceExist(ctx, headGitRepo.Path, pr.GetGitRefName()) { return "", errors.New("Head branch does not exist, can not merge") } @@ -128,14 +128,22 @@ func GetPullRequestCommitStatusState(ctx context.Context, pr *models.PullRequest return "", err } - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { return "", errors.Wrap(err, "LoadBaseRepo") } - commitStatuses, _, err := models.GetLatestCommitStatus(pr.BaseRepo.ID, sha, db.ListOptions{}) + commitStatuses, _, err := models.GetLatestCommitStatusCtx(ctx, pr.BaseRepo.ID, sha, db.ListOptions{}) if err != nil { return "", errors.Wrap(err, "GetLatestCommitStatus") } - return MergeRequiredContextsCommitStatus(commitStatuses, pr.ProtectedBranch.StatusCheckContexts), nil + if err := pr.LoadProtectedBranchCtx(ctx); err != nil { + return "", errors.Wrap(err, "LoadProtectedBranch") + } + var requiredContexts []string + if pr.ProtectedBranch != nil { + requiredContexts = pr.ProtectedBranch.StatusCheckContexts + } + + return MergeRequiredContextsCommitStatus(commitStatuses, requiredContexts), nil } diff --git a/services/pull/edits.go b/services/pull/edits.go new file mode 100644 index 0000000000..68515ec141 --- /dev/null +++ b/services/pull/edits.go @@ -0,0 +1,40 @@ +// Copyright 2022 The Gitea Authors. +// All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package pull + +import ( + "context" + "errors" + + "code.gitea.io/gitea/models" + unit_model "code.gitea.io/gitea/models/unit" + user_model "code.gitea.io/gitea/models/user" +) + +var ErrUserHasNoPermissionForAction = errors.New("user not allowed to do this action") + +// SetAllowEdits allow edits from maintainers to PRs +func SetAllowEdits(ctx context.Context, doer *user_model.User, pr *models.PullRequest, allow bool) error { + if doer == nil || !pr.Issue.IsPoster(doer.ID) { + return ErrUserHasNoPermissionForAction + } + + if err := pr.LoadHeadRepo(); err != nil { + return err + } + + permission, err := models.GetUserRepoPermission(ctx, pr.HeadRepo, doer) + if err != nil { + return err + } + + if !permission.CanWrite(unit_model.TypeCode) { + return ErrUserHasNoPermissionForAction + } + + pr.AllowMaintainerEdit = allow + return models.UpdateAllowEdits(ctx, pr) +} diff --git a/services/pull/main_test.go b/services/pull/main_test.go index 6059a291ad..5471686e72 100644 --- a/services/pull/main_test.go +++ b/services/pull/main_test.go @@ -13,5 +13,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/services/pull/merge.go b/services/pull/merge.go index cb857cc60d..0af3cc1613 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -13,11 +13,13 @@ import ( "os" "path/filepath" "regexp" + "strconv" "strings" "time" "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + pull_model "code.gitea.io/gitea/models/pull" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unit" user_model "code.gitea.io/gitea/models/user" @@ -26,24 +28,124 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/references" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" asymkey_service "code.gitea.io/gitea/services/asymkey" issue_service "code.gitea.io/gitea/services/issue" ) +// GetDefaultMergeMessage returns default message used when merging pull request +func GetDefaultMergeMessage(baseGitRepo *git.Repository, pr *models.PullRequest, mergeStyle repo_model.MergeStyle) (string, error) { + if err := pr.LoadHeadRepo(); err != nil { + return "", err + } + if err := pr.LoadBaseRepo(); err != nil { + return "", err + } + if pr.BaseRepo == nil { + return "", repo_model.ErrRepoNotExist{ID: pr.BaseRepoID} + } + + if err := pr.LoadIssue(); err != nil { + return "", err + } + + isExternalTracker := pr.BaseRepo.UnitEnabled(unit.TypeExternalTracker) + issueReference := "#" + if isExternalTracker { + issueReference = "!" + } + + if mergeStyle != "" { + templateFilepath := fmt.Sprintf(".gitea/default_merge_message/%s_TEMPLATE.md", strings.ToUpper(string(mergeStyle))) + commit, err := baseGitRepo.GetBranchCommit(pr.BaseRepo.DefaultBranch) + if err != nil { + return "", err + } + templateContent, err := commit.GetFileContent(templateFilepath, setting.Repository.PullRequest.DefaultMergeMessageSize) + if err != nil { + if !git.IsErrNotExist(err) { + return "", err + } + } else { + vars := map[string]string{ + "BaseRepoOwnerName": pr.BaseRepo.OwnerName, + "BaseRepoName": pr.BaseRepo.Name, + "BaseBranch": pr.BaseBranch, + "HeadRepoOwnerName": "", + "HeadRepoName": "", + "HeadBranch": pr.HeadBranch, + "PullRequestTitle": pr.Issue.Title, + "PullRequestDescription": pr.Issue.Content, + "PullRequestPosterName": pr.Issue.Poster.Name, + "PullRequestIndex": strconv.FormatInt(pr.Index, 10), + "PullRequestReference": fmt.Sprintf("%s%d", issueReference, pr.Index), + } + if pr.HeadRepo != nil { + vars["HeadRepoOwnerName"] = pr.HeadRepo.OwnerName + vars["HeadRepoName"] = pr.HeadRepo.Name + } + refs, err := pr.ResolveCrossReferences(baseGitRepo.Ctx) + if err == nil { + closeIssueIndexes := make([]string, 0, len(refs)) + closeWord := "close" + if len(setting.Repository.PullRequest.CloseKeywords) > 0 { + closeWord = setting.Repository.PullRequest.CloseKeywords[0] + } + for _, ref := range refs { + if ref.RefAction == references.XRefActionCloses { + closeIssueIndexes = append(closeIssueIndexes, fmt.Sprintf("%s %s%d", closeWord, issueReference, ref.Issue.Index)) + } + } + if len(closeIssueIndexes) > 0 { + vars["ClosingIssues"] = strings.Join(closeIssueIndexes, ", ") + } else { + vars["ClosingIssues"] = "" + } + } + + return os.Expand(templateContent, func(s string) string { + return vars[s] + }), nil + } + } + + // Squash merge has a different from other styles. + if mergeStyle == repo_model.MergeStyleSquash { + return fmt.Sprintf("%s (%s%d)", pr.Issue.Title, issueReference, pr.Issue.Index), nil + } + + if pr.BaseRepoID == pr.HeadRepoID { + return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch), nil + } + + if pr.HeadRepo == nil { + return fmt.Sprintf("Merge pull request '%s' (%s%d) from :%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch), nil + } + + return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch), nil +} + // Merge merges pull request to base repository. // Caller should check PR is ready to be merged (review and status checks) -// FIXME: add repoWorkingPull make sure two merges does not happen at same time. -func Merge(ctx context.Context, pr *models.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) (err error) { - if err = pr.LoadHeadRepo(); err != nil { +func Merge(ctx context.Context, pr *models.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string) error { + if err := pr.LoadHeadRepo(); err != nil { log.Error("LoadHeadRepo: %v", err) return fmt.Errorf("LoadHeadRepo: %v", err) - } else if err = pr.LoadBaseRepo(); err != nil { + } else if err := pr.LoadBaseRepo(); err != nil { log.Error("LoadBaseRepo: %v", err) return fmt.Errorf("LoadBaseRepo: %v", err) } + pullWorkingPool.CheckIn(fmt.Sprint(pr.ID)) + defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID)) + + // Removing an auto merge pull and ignore if not exist + if err := pull_model.DeleteScheduledAutoMerge(db.DefaultContext, pr.ID); err != nil && !db.IsErrNotExist(err) { + return err + } + prUnit, err := pr.BaseRepo.GetUnit(unit.TypePullRequests) if err != nil { log.Error("pr.BaseRepo.GetUnit(unit.TypePullRequests): %v", err) @@ -60,7 +162,9 @@ func Merge(ctx context.Context, pr *models.PullRequest, doer *user_model.User, b go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "") }() - pr.MergedCommitID, err = rawMerge(ctx, pr, doer, mergeStyle, expectedHeadCommitID, message) + // TODO: make it able to do this in a database session + mergeCtx := context.Background() + pr.MergedCommitID, err = rawMerge(mergeCtx, pr, doer, mergeStyle, expectedHeadCommitID, message) if err != nil { return err } @@ -69,18 +173,18 @@ func Merge(ctx context.Context, pr *models.PullRequest, doer *user_model.User, b pr.Merger = doer pr.MergerID = doer.ID - if _, err := pr.SetMerged(); err != nil { + if _, err := pr.SetMerged(ctx); err != nil { log.Error("setMerged [%d]: %v", pr.ID, err) } - if err := pr.LoadIssue(); err != nil { + if err := pr.LoadIssueCtx(ctx); err != nil { log.Error("loadIssue [%d]: %v", pr.ID, err) } - if err := pr.Issue.LoadRepo(); err != nil { + if err := pr.Issue.LoadRepo(ctx); err != nil { log.Error("loadRepo for issue [%d]: %v", pr.ID, err) } - if err := pr.Issue.Repo.GetOwner(db.DefaultContext); err != nil { + if err := pr.Issue.Repo.GetOwner(ctx); err != nil { log.Error("GetOwner for issue repo [%d]: %v", pr.ID, err) } @@ -90,17 +194,17 @@ func Merge(ctx context.Context, pr *models.PullRequest, doer *user_model.User, b cache.Remove(pr.Issue.Repo.GetCommitsCountCacheKey(pr.BaseBranch, true)) // Resolve cross references - refs, err := pr.ResolveCrossReferences() + refs, err := pr.ResolveCrossReferences(ctx) if err != nil { log.Error("ResolveCrossReferences: %v", err) return nil } for _, ref := range refs { - if err = ref.LoadIssue(); err != nil { + if err = ref.LoadIssueCtx(ctx); err != nil { return err } - if err = ref.Issue.LoadRepo(); err != nil { + if err = ref.Issue.LoadRepo(ctx); err != nil { return err } close := ref.RefAction == references.XRefActionCloses @@ -113,7 +217,6 @@ func Merge(ctx context.Context, pr *models.PullRequest, doer *user_model.User, b } } } - return nil } @@ -132,7 +235,7 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User return "", err } defer func() { - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("Merge: RemoveTemporaryPath: %s", err) } }() @@ -142,7 +245,7 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User stagingBranch := "staging" if expectedHeadCommitID != "" { - trackingCommitID, err := git.NewCommand(ctx, "show-ref", "--hash", git.BranchPrefix+trackingBranch).RunInDir(tmpBasePath) + trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash", git.BranchPrefix+trackingBranch).RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { log.Error("show-ref[%s] --hash refs/heads/trackingn: %v", tmpBasePath, git.BranchPrefix+trackingBranch, err) return "", fmt.Errorf("getDiffTree: %v", err) @@ -189,11 +292,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User // Switch off LFS process (set required, clean and smudge here also) if err := gitConfigCommand().AddArguments("filter.lfs.process", ""). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git config [filter.lfs.process -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git config [filter.lfs.process -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) @@ -202,11 +304,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User errbuf.Reset() if err := gitConfigCommand().AddArguments("filter.lfs.required", "false"). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git config [filter.lfs.required -> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git config [filter.lfs.required -> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) @@ -215,11 +316,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User errbuf.Reset() if err := gitConfigCommand().AddArguments("filter.lfs.clean", ""). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git config [filter.lfs.clean -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git config [filter.lfs.clean -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) @@ -228,11 +328,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User errbuf.Reset() if err := gitConfigCommand().AddArguments("filter.lfs.smudge", ""). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git config [filter.lfs.smudge -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git config [filter.lfs.smudge -> <> ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) @@ -241,11 +340,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User errbuf.Reset() if err := gitConfigCommand().AddArguments("core.sparseCheckout", "true"). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git config [core.sparseCheckout -> true ]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git config [core.sparsecheckout -> true]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) @@ -255,11 +353,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User // Read base branch index if err := git.NewCommand(ctx, "read-tree", "HEAD"). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git read-tree HEAD: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("Unable to read base branch in to the index: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) @@ -316,11 +413,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User case repo_model.MergeStyleRebaseMerge: // Checkout head branch if err := git.NewCommand(ctx, "checkout", "-b", stagingBranch, trackingBranch). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) @@ -330,11 +426,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User // Rebase before merging if err := git.NewCommand(ctx, "rebase", baseBranch). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { // Rebase will leave a REBASE_HEAD file in .git if there is a conflict if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil { @@ -384,11 +479,10 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User // Checkout base branch again if err := git.NewCommand(ctx, "checkout", baseBranch). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git checkout base prior to merge post staging rebase [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) @@ -430,12 +524,11 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User sig := pr.Issue.Poster.NewGitSig() if signArg == "" { if err := git.NewCommand(ctx, "commit", fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email), "-m", message). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) @@ -446,12 +539,11 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String()) } if err := git.NewCommand(ctx, "commit", signArg, fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email), "-m", message). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) @@ -487,7 +579,7 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User } var headUser *user_model.User - err = pr.HeadRepo.GetOwner(db.DefaultContext) + err = pr.HeadRepo.GetOwner(ctx) if err != nil { if !user_model.IsErrUserNotExist(err) { log.Error("Can't find user: %d for head repository - %v", pr.HeadRepo.OwnerID, err) @@ -499,7 +591,7 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User headUser = pr.HeadRepo.Owner } - env = models.FullPushingEnvironment( + env = repo_module.FullPushingEnvironment( headUser, doer, pr.BaseRepo, @@ -516,12 +608,13 @@ func rawMerge(ctx context.Context, pr *models.PullRequest, doer *user_model.User } // Push back to upstream. - if err := pushCmd.RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + // TODO: this cause an api call to "/api/internal/hook/post-receive/...", + // that prevents us from doint the whole merge in one db transaction + if err := pushCmd.Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { if strings.Contains(errbuf.String(), "non-fast-forward") { return "", &git.ErrPushOutOfDate{ @@ -550,24 +643,22 @@ func commitAndSignNoAuthor(ctx context.Context, pr *models.PullRequest, message, var outbuf, errbuf strings.Builder if signArg == "" { if err := git.NewCommand(ctx, "commit", "-m", message). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) return fmt.Errorf("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) } } else { if err := git.NewCommand(ctx, "commit", signArg, "-m", message). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) return fmt.Errorf("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) @@ -578,11 +669,10 @@ func commitAndSignNoAuthor(ctx context.Context, pr *models.PullRequest, message, func runMergeCommand(pr *models.PullRequest, mergeStyle repo_model.MergeStyle, cmd *git.Command, tmpBasePath string) error { var outbuf, errbuf strings.Builder - if err := cmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + if err := cmd.Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { // Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict if _, statErr := os.Stat(filepath.Join(tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil { @@ -617,11 +707,10 @@ func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string) ( var outbuf, errbuf strings.Builder // Compute the diff-tree for sparse-checkout if err := git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-z", "--root", baseBranch, headBranch, "--"). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: repoPath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: repoPath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { return "", fmt.Errorf("git diff-tree [%s base:%s head:%s]: %s", repoPath, baseBranch, headBranch, errbuf.String()) } @@ -661,46 +750,31 @@ func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string) ( return out.String(), nil } -// IsSignedIfRequired check if merge will be signed if required -func IsSignedIfRequired(ctx context.Context, pr *models.PullRequest, doer *user_model.User) (bool, error) { - if err := pr.LoadProtectedBranch(); err != nil { - return false, err - } - - if pr.ProtectedBranch == nil || !pr.ProtectedBranch.RequireSignedCommits { - return true, nil - } - - sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.GetGitRefName()) - - return sign, err -} - // IsUserAllowedToMerge check if user is allowed to merge PR with given permissions and branch protections -func IsUserAllowedToMerge(pr *models.PullRequest, p models.Permission, user *user_model.User) (bool, error) { +func IsUserAllowedToMerge(ctx context.Context, pr *models.PullRequest, p models.Permission, user *user_model.User) (bool, error) { if user == nil { return false, nil } - err := pr.LoadProtectedBranch() + err := pr.LoadProtectedBranchCtx(ctx) if err != nil { return false, err } - if (p.CanWrite(unit.TypeCode) && pr.ProtectedBranch == nil) || (pr.ProtectedBranch != nil && models.IsUserMergeWhitelisted(pr.ProtectedBranch, user.ID, p)) { + if (p.CanWrite(unit.TypeCode) && pr.ProtectedBranch == nil) || (pr.ProtectedBranch != nil && models.IsUserMergeWhitelisted(ctx, pr.ProtectedBranch, user.ID, p)) { return true, nil } return false, nil } -// CheckPRReadyToMerge checks whether the PR is ready to be merged (reviews and status checks) -func CheckPRReadyToMerge(ctx context.Context, pr *models.PullRequest, skipProtectedFilesCheck bool) (err error) { - if err = pr.LoadBaseRepo(); err != nil { +// CheckPullBranchProtections checks whether the PR is ready to be merged (reviews and status checks) +func CheckPullBranchProtections(ctx context.Context, pr *models.PullRequest, skipProtectedFilesCheck bool) (err error) { + if err = pr.LoadBaseRepoCtx(ctx); err != nil { return fmt.Errorf("LoadBaseRepo: %v", err) } - if err = pr.LoadProtectedBranch(); err != nil { + if err = pr.LoadProtectedBranchCtx(ctx); err != nil { return fmt.Errorf("LoadProtectedBranch: %v", err) } if pr.ProtectedBranch == nil { @@ -712,29 +786,29 @@ func CheckPRReadyToMerge(ctx context.Context, pr *models.PullRequest, skipProtec return err } if !isPass { - return models.ErrNotAllowedToMerge{ + return models.ErrDisallowedToMerge{ Reason: "Not all required status checks successful", } } - if !pr.ProtectedBranch.HasEnoughApprovals(pr) { - return models.ErrNotAllowedToMerge{ + if !pr.ProtectedBranch.HasEnoughApprovals(ctx, pr) { + return models.ErrDisallowedToMerge{ Reason: "Does not have enough approvals", } } - if pr.ProtectedBranch.MergeBlockedByRejectedReview(pr) { - return models.ErrNotAllowedToMerge{ + if pr.ProtectedBranch.MergeBlockedByRejectedReview(ctx, pr) { + return models.ErrDisallowedToMerge{ Reason: "There are requested changes", } } - if pr.ProtectedBranch.MergeBlockedByOfficialReviewRequests(pr) { - return models.ErrNotAllowedToMerge{ + if pr.ProtectedBranch.MergeBlockedByOfficialReviewRequests(ctx, pr) { + return models.ErrDisallowedToMerge{ Reason: "There are official review requests", } } if pr.ProtectedBranch.MergeBlockedByOutdatedBranch(pr) { - return models.ErrNotAllowedToMerge{ + return models.ErrDisallowedToMerge{ Reason: "The head branch is behind the base branch", } } @@ -744,7 +818,7 @@ func CheckPRReadyToMerge(ctx context.Context, pr *models.PullRequest, skipProtec } if pr.ProtectedBranch.MergeBlockedByProtectedFiles(pr) { - return models.ErrNotAllowedToMerge{ + return models.ErrDisallowedToMerge{ Reason: "Changed protected files", } } @@ -753,52 +827,61 @@ func CheckPRReadyToMerge(ctx context.Context, pr *models.PullRequest, skipProtec } // MergedManually mark pr as merged manually -func MergedManually(pr *models.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, commitID string) (err error) { - prUnit, err := pr.BaseRepo.GetUnit(unit.TypePullRequests) - if err != nil { - return - } - prConfig := prUnit.PullRequestsConfig() +func MergedManually(pr *models.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, commitID string) error { + pullWorkingPool.CheckIn(fmt.Sprint(pr.ID)) + defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID)) - // Check if merge style is correct and allowed - if !prConfig.IsMergeStyleAllowed(repo_model.MergeStyleManuallyMerged) { - return models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: repo_model.MergeStyleManuallyMerged} - } + if err := db.WithTx(func(ctx context.Context) error { + prUnit, err := pr.BaseRepo.GetUnitCtx(ctx, unit.TypePullRequests) + if err != nil { + return err + } + prConfig := prUnit.PullRequestsConfig() - if len(commitID) < 40 { - return fmt.Errorf("Wrong commit ID") - } + // Check if merge style is correct and allowed + if !prConfig.IsMergeStyleAllowed(repo_model.MergeStyleManuallyMerged) { + return models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: repo_model.MergeStyleManuallyMerged} + } - commit, err := baseGitRepo.GetCommit(commitID) - if err != nil { - if git.IsErrNotExist(err) { + if len(commitID) < 40 { return fmt.Errorf("Wrong commit ID") } - return - } - ok, err := baseGitRepo.IsCommitInBranch(commitID, pr.BaseBranch) - if err != nil { - return - } - if !ok { - return fmt.Errorf("Wrong commit ID") - } + commit, err := baseGitRepo.GetCommit(commitID) + if err != nil { + if git.IsErrNotExist(err) { + return fmt.Errorf("Wrong commit ID") + } + return err + } + commitID = commit.ID.String() - pr.MergedCommitID = commitID - pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix()) - pr.Status = models.PullRequestStatusManuallyMerged - pr.Merger = doer - pr.MergerID = doer.ID + ok, err := baseGitRepo.IsCommitInBranch(commitID, pr.BaseBranch) + if err != nil { + return err + } + if !ok { + return fmt.Errorf("Wrong commit ID") + } - merged := false - if merged, err = pr.SetMerged(); err != nil { - return - } else if !merged { - return fmt.Errorf("SetMerged failed") + pr.MergedCommitID = commitID + pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix()) + pr.Status = models.PullRequestStatusManuallyMerged + pr.Merger = doer + pr.MergerID = doer.ID + + merged := false + if merged, err = pr.SetMerged(ctx); err != nil { + return err + } else if !merged { + return fmt.Errorf("SetMerged failed") + } + return nil + }); err != nil { + return err } notification.NotifyMergePullRequest(pr, doer) - log.Info("manuallyMerged[%d]: Marked as manually merged into %s/%s by commit id: %s", pr.ID, pr.BaseRepo.Name, pr.BaseBranch, commit.ID.String()) + log.Info("manuallyMerged[%d]: Marked as manually merged into %s/%s by commit id: %s", pr.ID, pr.BaseRepo.Name, pr.BaseBranch, commitID) return nil } diff --git a/services/pull/patch.go b/services/pull/patch.go index f401b85345..6e2889b060 100644 --- a/services/pull/patch.go +++ b/services/pull/patch.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/util" "github.com/gobwas/glob" @@ -27,7 +28,7 @@ import ( // DownloadDiffOrPatch will write the patch for the pr to the writer func DownloadDiffOrPatch(ctx context.Context, pr *models.PullRequest, w io.Writer, patch, binary bool) error { - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("Unable to load base repository ID %d for pr #%d [%d]", pr.BaseRepoID, pr.Index, pr.ID) return err } @@ -64,19 +65,19 @@ func TestPatch(pr *models.PullRequest) error { return err } defer func() { - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("Merge: RemoveTemporaryPath: %s", err) } }() - gitRepo, err := git.OpenRepositoryCtx(ctx, tmpBasePath) + gitRepo, err := git.OpenRepository(ctx, tmpBasePath) if err != nil { return fmt.Errorf("OpenRepository: %v", err) } defer gitRepo.Close() // 1. update merge base - pr.MergeBase, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunInDir(tmpBasePath) + pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { var err2 error pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base") @@ -166,7 +167,7 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, g } // Need to get the objects from the object db to attempt to merge - root, err := git.NewCommand(ctx, "unpack-file", file.stage1.sha).RunInDir(tmpBasePath) + root, _, err := git.NewCommand(ctx, "unpack-file", file.stage1.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { return fmt.Errorf("unable to get root object: %s at path: %s for merging. Error: %w", file.stage1.sha, file.stage1.path, err) } @@ -175,7 +176,7 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, g _ = util.Remove(filepath.Join(tmpBasePath, root)) }() - base, err := git.NewCommand(ctx, "unpack-file", file.stage2.sha).RunInDir(tmpBasePath) + base, _, err := git.NewCommand(ctx, "unpack-file", file.stage2.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { return fmt.Errorf("unable to get base object: %s at path: %s for merging. Error: %w", file.stage2.sha, file.stage2.path, err) } @@ -183,7 +184,7 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, g defer func() { _ = util.Remove(base) }() - head, err := git.NewCommand(ctx, "unpack-file", file.stage3.sha).RunInDir(tmpBasePath) + head, _, err := git.NewCommand(ctx, "unpack-file", file.stage3.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { return fmt.Errorf("unable to get head object:%s at path: %s for merging. Error: %w", file.stage3.sha, file.stage3.path, err) } @@ -193,13 +194,13 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, g }() // now git merge-file annoyingly takes a different order to the merge-tree ... - _, conflictErr := git.NewCommand(ctx, "merge-file", base, root, head).RunInDir(tmpBasePath) + _, _, conflictErr := git.NewCommand(ctx, "merge-file", base, root, head).RunStdString(&git.RunOpts{Dir: tmpBasePath}) if conflictErr != nil { return &errMergeConflict{file.stage2.path} } // base now contains the merged data - hash, err := git.NewCommand(ctx, "hash-object", "-w", "--path", file.stage2.path, base).RunInDir(tmpBasePath) + hash, _, err := git.NewCommand(ctx, "hash-object", "-w", "--path", file.stage2.path, base).RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { return err } @@ -223,7 +224,7 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo defer cancel() // First we use read-tree to do a simple three-way merge - if _, err := git.NewCommand(ctx, "read-tree", "-m", base, ours, theirs).RunInDir(gitPath); err != nil { + if _, _, err := git.NewCommand(ctx, "read-tree", "-m", base, ours, theirs).RunStdString(&git.RunOpts{Dir: gitPath}); err != nil { log.Error("Unable to run read-tree -m! Error: %v", err) return false, nil, fmt.Errorf("unable to run read-tree -m! Error: %v", err) } @@ -270,6 +271,10 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo } func checkConflicts(ctx context.Context, pr *models.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) { + // 1. checkConflicts resets the conflict status - therefore - reset the conflict status + pr.ConflictedFiles = nil + + // 2. AttemptThreeWayMerge first - this is much quicker than plain patch to base description := fmt.Sprintf("PR[%d] %s/%s#%d", pr.ID, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, pr.Index) conflict, _, err := AttemptThreeWayMerge(ctx, tmpBasePath, gitRepo, pr.MergeBase, "base", "tracking", description) @@ -278,7 +283,8 @@ func checkConflicts(ctx context.Context, pr *models.PullRequest, gitRepo *git.Re } if !conflict { - treeHash, err := git.NewCommand(ctx, "write-tree").RunInDir(tmpBasePath) + var treeHash string + treeHash, _, err = git.NewCommand(ctx, "write-tree").RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { return false, err } @@ -290,16 +296,14 @@ func checkConflicts(ctx context.Context, pr *models.PullRequest, gitRepo *git.Re if treeHash == baseTree.ID.String() { log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID) pr.Status = models.PullRequestStatusEmpty - pr.ConflictedFiles = []string{} - pr.ChangedProtectedFiles = []string{} } return false, nil } - // OK read-tree has failed so we need to try a different thing - this might actually succeed where the above fails due to whitespace handling. + // 3. OK read-tree has failed so we need to try a different thing - this might actually succeed where the above fails due to whitespace handling. - // 1. Create a plain patch from head to base + // 3a. Create a plain patch from head to base tmpPatchFile, err := os.CreateTemp("", "patch") if err != nil { log.Error("Unable to create temporary patch file! Error: %v", err) @@ -322,34 +326,29 @@ func checkConflicts(ctx context.Context, pr *models.PullRequest, gitRepo *git.Re patchPath := tmpPatchFile.Name() tmpPatchFile.Close() - // 1a. if the size of that patch is 0 - there can be no conflicts! + // 3b. if the size of that patch is 0 - there can be no conflicts! if stat.Size() == 0 { log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID) pr.Status = models.PullRequestStatusEmpty - pr.ConflictedFiles = []string{} - pr.ChangedProtectedFiles = []string{} return false, nil } log.Trace("PullRequest[%d].testPatch (patchPath): %s", pr.ID, patchPath) - // 2. preset the pr.Status as checking (this is not save at present) - pr.Status = models.PullRequestStatusChecking - - // 3. Read the base branch in to the index of the temporary repository - _, err = git.NewCommand(gitRepo.Ctx, "read-tree", "base").RunInDir(tmpBasePath) + // 4. Read the base branch in to the index of the temporary repository + _, _, err = git.NewCommand(gitRepo.Ctx, "read-tree", "base").RunStdString(&git.RunOpts{Dir: tmpBasePath}) if err != nil { return false, fmt.Errorf("git read-tree %s: %v", pr.BaseBranch, err) } - // 4. Now get the pull request configuration to check if we need to ignore whitespace + // 5. Now get the pull request configuration to check if we need to ignore whitespace prUnit, err := pr.BaseRepo.GetUnit(unit.TypePullRequests) if err != nil { return false, err } prConfig := prUnit.PullRequestsConfig() - // 5. Prepare the arguments to apply the patch against the index + // 6. Prepare the arguments to apply the patch against the index args := []string{"apply", "--check", "--cached"} if prConfig.IgnoreWhitespaceConflicts { args = append(args, "--ignore-whitespace") @@ -360,9 +359,8 @@ func checkConflicts(ctx context.Context, pr *models.PullRequest, gitRepo *git.Re is3way = true } args = append(args, patchPath) - pr.ConflictedFiles = make([]string, 0, 5) - // 6. Prep the pipe: + // 7. Prep the pipe: // - Here we could do the equivalent of: // `git apply --check --cached patch_file > conflicts` // Then iterate through the conflicts. However, that means storing all the conflicts @@ -380,13 +378,12 @@ func checkConflicts(ctx context.Context, pr *models.PullRequest, gitRepo *git.Re _ = stderrWriter.Close() }() - // 7. Run the check command + // 8. Run the check command conflict = false err = git.NewCommand(gitRepo.Ctx, args...). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stderr: stderrWriter, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stderr: stderrWriter, PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { // Close the writer end of the pipe to begin processing _ = stderrWriter.Close() @@ -448,14 +445,16 @@ func checkConflicts(ctx context.Context, pr *models.PullRequest, gitRepo *git.Re }, }) - // 8. If there is a conflict the `git apply` command will return a non-zero error code - so there will be a positive error. - if err != nil { + // 9. Check if the found conflictedfiles is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts. + // Note: `"err" could be non-nil` is due that if enable 3-way merge, it doesn't return any error on found conflicts. + if len(pr.ConflictedFiles) > 0 { if conflict { pr.Status = models.PullRequestStatusConflict log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles) return true, nil } + } else if err != nil { return false, fmt.Errorf("git apply --check: %v", err) } return false, nil @@ -518,6 +517,11 @@ func CheckUnprotectedFiles(repo *git.Repository, oldCommitID, newCommitID string // checkPullFilesProtection check if pr changed protected files and save results func checkPullFilesProtection(pr *models.PullRequest, gitRepo *git.Repository) error { + if pr.Status == models.PullRequestStatusEmpty { + pr.ChangedProtectedFiles = nil + return nil + } + if err := pr.LoadProtectedBranch(); err != nil { return err } diff --git a/services/pull/patch_unmerged.go b/services/pull/patch_unmerged.go index abd54b07cf..3839419142 100644 --- a/services/pull/patch_unmerged.go +++ b/services/pull/patch_unmerged.go @@ -63,11 +63,10 @@ func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan stderr := &strings.Builder{} err = git.NewCommand(ctx, "ls-files", "-u", "-z"). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: lsFilesWriter, - Stderr: stderr, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: lsFilesWriter, + Stderr: stderr, PipelineFunc: func(_ context.Context, _ context.CancelFunc) error { _ = lsFilesWriter.Close() defer func() { diff --git a/services/pull/pull.go b/services/pull/pull.go index 82deb74a4e..b94b6769a4 100644 --- a/services/pull/pull.go +++ b/services/pull/pull.go @@ -24,10 +24,15 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/process" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/sync" issue_service "code.gitea.io/gitea/services/issue" ) +// TODO: use clustered lock (unique queue? or *abuse* cache) +var pullWorkingPool = sync.NewExclusivePool() + // NewPullRequest creates new pull request with labels for repository. func NewPullRequest(ctx context.Context, repo *repo_model.Repository, pull *models.Issue, labelIDs []int64, uuids []string, pr *models.PullRequest, assigneeIDs []int64) error { if err := TestPatch(pr); err != nil { @@ -69,7 +74,7 @@ func NewPullRequest(ctx context.Context, repo *repo_model.Repository, pull *mode return err } - mentions, err := pull.FindAndUpdateIssueMentions(db.DefaultContext, pull.Poster, pull.Content) + mentions, err := models.FindAndUpdateIssueMentions(ctx, pull, pull.Poster, pull.Content) if err != nil { return err } @@ -83,7 +88,7 @@ func NewPullRequest(ctx context.Context, repo *repo_model.Repository, pull *mode } // add first push codes comment - baseGitRepo, err := git.OpenRepositoryCtx(prCtx, pr.BaseRepo.RepoPath()) + baseGitRepo, err := git.OpenRepository(prCtx, pr.BaseRepo.RepoPath()) if err != nil { return err } @@ -124,6 +129,9 @@ func NewPullRequest(ctx context.Context, repo *repo_model.Repository, pull *mode // ChangeTargetBranch changes the target branch of this pull request, as the given user. func ChangeTargetBranch(ctx context.Context, pr *models.PullRequest, doer *user_model.User, targetBranch string) (err error) { + pullWorkingPool.CheckIn(fmt.Sprint(pr.ID)) + defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID)) + // Current target branch is already the same if pr.BaseBranch == targetBranch { return nil @@ -224,13 +232,13 @@ func checkForInvalidation(ctx context.Context, requests models.PullRequestList, if err != nil { return fmt.Errorf("GetRepositoryByID: %v", err) } - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { return fmt.Errorf("git.OpenRepository: %v", err) } go func() { // FIXME: graceful: We need to tell the manager we're doing something... - err := requests.InvalidateCodeComments(doer, gitRepo, branch) + err := requests.InvalidateCodeComments(ctx, doer, gitRepo, branch) if err != nil { log.Error("PullRequestList.InvalidateCodeComments: %v", err) } @@ -246,7 +254,7 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string, graceful.GetManager().RunWithShutdownContext(func(ctx context.Context) { // There is no sensible way to shut this down ":-(" // If you don't let it run all the way then you will lose data - // FIXME: graceful: AddTestPullRequestTask needs to become a queue! + // TODO: graceful: AddTestPullRequestTask needs to become a queue! prs, err := models.GetUnmergedPullRequestsByHeadInfo(repoID, branch) if err != nil { @@ -341,18 +349,18 @@ func AddTestPullRequestTask(doer *user_model.User, repoID int64, branch string, // checkIfPRContentChanged checks if diff to target branch has changed by push // A commit can be considered to leave the PR untouched if the patch/diff with its merge base is unchanged func checkIfPRContentChanged(ctx context.Context, pr *models.PullRequest, oldCommitID, newCommitID string) (hasChanged bool, err error) { - if err = pr.LoadHeadRepo(); err != nil { + if err = pr.LoadHeadRepoCtx(ctx); err != nil { return false, fmt.Errorf("LoadHeadRepo: %v", err) } else if pr.HeadRepo == nil { // corrupt data assumed changed return true, nil } - if err = pr.LoadBaseRepo(); err != nil { + if err = pr.LoadBaseRepoCtx(ctx); err != nil { return false, fmt.Errorf("LoadBaseRepo: %v", err) } - headGitRepo, err := git.OpenRepositoryCtx(ctx, pr.HeadRepo.RepoPath()) + headGitRepo, err := git.OpenRepository(ctx, pr.HeadRepo.RepoPath()) if err != nil { return false, fmt.Errorf("OpenRepository: %v", err) } @@ -419,13 +427,13 @@ func PushToBaseRepo(ctx context.Context, pr *models.PullRequest) (err error) { func pushToBaseRepoHelper(ctx context.Context, pr *models.PullRequest, prefixHeadBranch string) (err error) { log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitRefName()) - if err := pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { log.Error("Unable to load head repository for PR[%d] Error: %v", pr.ID, err) return err } headRepoPath := pr.HeadRepo.RepoPath() - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err) return err } @@ -445,7 +453,7 @@ func pushToBaseRepoHelper(ctx context.Context, pr *models.PullRequest, prefixHea Branch: prefixHeadBranch + pr.HeadBranch + ":" + gitRefName, Force: true, // Use InternalPushingEnvironment here because we know that pre-receive and post-receive do not run on a refs/pulls/... - Env: models.InternalPushingEnvironment(pr.Issue.Poster, pr.BaseRepo), + Env: repo_module.InternalPushingEnvironment(pr.Issue.Poster, pr.BaseRepo), }); err != nil { if git.IsErrPushOutOfDate(err) { // This should not happen as we're using force! @@ -474,12 +482,12 @@ func pushToBaseRepoHelper(ctx context.Context, pr *models.PullRequest, prefixHea // UpdateRef update refs/pull/id/head directly for agit flow pull request func UpdateRef(ctx context.Context, pr *models.PullRequest) (err error) { log.Trace("UpdateRef[%d]: upgate pull request ref in base repo '%s'", pr.ID, pr.GetGitRefName()) - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err) return err } - _, err = git.NewCommand(ctx, "update-ref", pr.GetGitRefName(), pr.HeadCommitID).RunInDir(pr.BaseRepo.RepoPath()) + _, _, err = git.NewCommand(ctx, "update-ref", pr.GetGitRefName(), pr.HeadCommitID).RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()}) if err != nil { log.Error("Unable to update ref in base repository for PR[%d] Error: %v", pr.ID, err) } @@ -726,18 +734,25 @@ func GetSquashMergeCommitMessages(ctx context.Context, pr *models.PullRequest) s return stringBuilder.String() } -// GetIssuesLastCommitStatus returns a map +// GetIssuesLastCommitStatus returns a map of issue ID to the most recent commit's latest status func GetIssuesLastCommitStatus(ctx context.Context, issues models.IssueList) (map[int64]*models.CommitStatus, error) { + _, lastStatus, err := GetIssuesAllCommitStatus(ctx, issues) + return lastStatus, err +} + +// GetIssuesAllCommitStatus returns a map of issue ID to a list of all statuses for the most recent commit as well as a map of issue ID to only the commit's latest status +func GetIssuesAllCommitStatus(ctx context.Context, issues models.IssueList) (map[int64][]*models.CommitStatus, map[int64]*models.CommitStatus, error) { if err := issues.LoadPullRequests(); err != nil { - return nil, err + return nil, nil, err } if _, err := issues.LoadRepositories(); err != nil { - return nil, err + return nil, nil, err } var ( gitRepos = make(map[int64]*git.Repository) - res = make(map[int64]*models.CommitStatus) + res = make(map[int64][]*models.CommitStatus) + lastRes = make(map[int64]*models.CommitStatus) err error ) defer func() { @@ -752,7 +767,7 @@ func GetIssuesLastCommitStatus(ctx context.Context, issues models.IssueList) (ma } gitRepo, ok := gitRepos[issue.RepoID] if !ok { - gitRepo, err = git.OpenRepositoryCtx(ctx, issue.Repo.RepoPath()) + gitRepo, err = git.OpenRepository(ctx, issue.Repo.RepoPath()) if err != nil { log.Error("Cannot open git repository %-v for issue #%d[%d]. Error: %v", issue.Repo, issue.Index, issue.ID, err) continue @@ -760,34 +775,33 @@ func GetIssuesLastCommitStatus(ctx context.Context, issues models.IssueList) (ma gitRepos[issue.RepoID] = gitRepo } - status, err := getLastCommitStatus(gitRepo, issue.PullRequest) + statuses, lastStatus, err := getAllCommitStatus(gitRepo, issue.PullRequest) if err != nil { - log.Error("getLastCommitStatus: cant get last commit of pull [%d]: %v", issue.PullRequest.ID, err) + log.Error("getAllCommitStatus: cant get commit statuses of pull [%d]: %v", issue.PullRequest.ID, err) continue } - res[issue.PullRequest.ID] = status + res[issue.PullRequest.ID] = statuses + lastRes[issue.PullRequest.ID] = lastStatus } - return res, nil + return res, lastRes, nil } -// getLastCommitStatus get pr's last commit status. PR's last commit status is the head commit id's last commit status -func getLastCommitStatus(gitRepo *git.Repository, pr *models.PullRequest) (status *models.CommitStatus, err error) { - sha, err := gitRepo.GetRefCommitID(pr.GetGitRefName()) - if err != nil { - return nil, err +// getAllCommitStatus get pr's commit statuses. +func getAllCommitStatus(gitRepo *git.Repository, pr *models.PullRequest) (statuses []*models.CommitStatus, lastStatus *models.CommitStatus, err error) { + sha, shaErr := gitRepo.GetRefCommitID(pr.GetGitRefName()) + if shaErr != nil { + return nil, nil, shaErr } - statusList, _, err := models.GetLatestCommitStatus(pr.BaseRepo.ID, sha, db.ListOptions{}) - if err != nil { - return nil, err - } - return models.CalcCommitStatus(statusList), nil + statuses, _, err = models.GetLatestCommitStatus(pr.BaseRepo.ID, sha, db.ListOptions{}) + lastStatus = models.CalcCommitStatus(statuses) + return statuses, lastStatus, err } // IsHeadEqualWithBranch returns if the commits of branchName are available in pull request head func IsHeadEqualWithBranch(ctx context.Context, pr *models.PullRequest, branchName string) (bool, error) { var err error - if err = pr.LoadBaseRepo(); err != nil { + if err = pr.LoadBaseRepoCtx(ctx); err != nil { return false, err } baseGitRepo, closer, err := git.RepositoryFromContextOrOpen(ctx, pr.BaseRepo.RepoPath()) @@ -801,7 +815,7 @@ func IsHeadEqualWithBranch(ctx context.Context, pr *models.PullRequest, branchNa return false, err } - if err = pr.LoadHeadRepo(); err != nil { + if err = pr.LoadHeadRepoCtx(ctx); err != nil { return false, err } var headGitRepo *git.Repository diff --git a/services/pull/pull_test.go b/services/pull/pull_test.go index 81627ebb77..09bae97780 100644 --- a/services/pull/pull_test.go +++ b/services/pull/pull_test.go @@ -8,6 +8,12 @@ package pull import ( "testing" + "code.gitea.io/gitea/models" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unit" + "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/git" + "github.com/stretchr/testify/assert" ) @@ -29,3 +35,57 @@ func TestPullRequest_CommitMessageTrailersPattern(t *testing.T) { assert.True(t, commitMessageTrailersPattern.MatchString("Additional whitespace is accepted.\n\nSigned-off-by \t : \tBob ")) assert.True(t, commitMessageTrailersPattern.MatchString("Folded value.\n\nFolded-trailer: This is\n a folded\n trailer value\nOther-Trailer: Value")) } + +func TestPullRequest_GetDefaultMergeMessage_InternalTracker(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + pr := unittest.AssertExistsAndLoadBean(t, &models.PullRequest{ID: 2}).(*models.PullRequest) + + assert.NoError(t, pr.LoadBaseRepo()) + gitRepo, err := git.OpenRepository(git.DefaultContext, pr.BaseRepo.RepoPath()) + assert.NoError(t, err) + defer gitRepo.Close() + + mergeMessage, err := GetDefaultMergeMessage(gitRepo, pr, "") + assert.NoError(t, err) + assert.Equal(t, "Merge pull request 'issue3' (#3) from branch2 into master", mergeMessage) + + pr.BaseRepoID = 1 + pr.HeadRepoID = 2 + mergeMessage, err = GetDefaultMergeMessage(gitRepo, pr, "") + assert.NoError(t, err) + assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo1:branch2 into master", mergeMessage) +} + +func TestPullRequest_GetDefaultMergeMessage_ExternalTracker(t *testing.T) { + assert.NoError(t, unittest.PrepareTestDatabase()) + + externalTracker := repo_model.RepoUnit{ + Type: unit.TypeExternalTracker, + Config: &repo_model.ExternalTrackerConfig{ + ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}", + }, + } + baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) + baseRepo.Units = []*repo_model.RepoUnit{&externalTracker} + + pr := unittest.AssertExistsAndLoadBean(t, &models.PullRequest{ID: 2, BaseRepo: baseRepo}).(*models.PullRequest) + + assert.NoError(t, pr.LoadBaseRepo()) + gitRepo, err := git.OpenRepository(git.DefaultContext, pr.BaseRepo.RepoPath()) + assert.NoError(t, err) + defer gitRepo.Close() + + mergeMessage, err := GetDefaultMergeMessage(gitRepo, pr, "") + assert.NoError(t, err) + + assert.Equal(t, "Merge pull request 'issue3' (!3) from branch2 into master", mergeMessage) + + pr.BaseRepoID = 1 + pr.HeadRepoID = 2 + pr.BaseRepo = nil + pr.HeadRepo = nil + mergeMessage, err = GetDefaultMergeMessage(gitRepo, pr, "") + assert.NoError(t, err) + + assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo2:branch2 into master", mergeMessage) +} diff --git a/services/pull/review.go b/services/pull/review.go index 25eef78d97..940fe4470d 100644 --- a/services/pull/review.go +++ b/services/pull/review.go @@ -44,7 +44,7 @@ func CreateCodeComment(ctx context.Context, doer *user_model.User, gitRepo *git. // Comments that are replies don't require a review header to show up in the issue view if !isReview && existsReview { - if err = issue.LoadRepo(); err != nil { + if err = issue.LoadRepo(ctx); err != nil { return nil, err } @@ -61,7 +61,7 @@ func CreateCodeComment(ctx context.Context, doer *user_model.User, gitRepo *git. return nil, err } - mentions, err := issue.FindAndUpdateIssueMentions(db.DefaultContext, doer, comment.Content) + mentions, err := models.FindAndUpdateIssueMentions(ctx, issue, doer, comment.Content) if err != nil { return nil, err } @@ -122,12 +122,12 @@ func createCodeComment(ctx context.Context, doer *user_model.User, repo *repo_mo return nil, fmt.Errorf("GetPullRequestByIssueID: %v", err) } pr := issue.PullRequest - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { return nil, fmt.Errorf("LoadHeadRepo: %v", err) } gitRepo, closer, err := git.RepositoryFromContextOrOpen(ctx, pr.BaseRepo.RepoPath()) if err != nil { - return nil, fmt.Errorf("OpenRepository: %v", err) + return nil, fmt.Errorf("RepositoryFromContextOrOpen: %v", err) } defer closer.Close() @@ -248,7 +248,7 @@ func SubmitReview(ctx context.Context, doer *user_model.User, gitRepo *git.Repos return nil, nil, err } - mentions, err := issue.FindAndUpdateIssueMentions(ctx, doer, comm.Content) + mentions, err := models.FindAndUpdateIssueMentions(ctx, issue, doer, comm.Content) if err != nil { return nil, nil, err } @@ -258,7 +258,7 @@ func SubmitReview(ctx context.Context, doer *user_model.User, gitRepo *git.Repos for _, lines := range review.CodeComments { for _, comments := range lines { for _, codeComment := range comments { - mentions, err := issue.FindAndUpdateIssueMentions(ctx, doer, codeComment.Content) + mentions, err := models.FindAndUpdateIssueMentions(ctx, issue, doer, codeComment.Content) if err != nil { return nil, nil, err } diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go index 831d98745e..6b01809d49 100644 --- a/services/pull/temp_repo.go +++ b/services/pull/temp_repo.go @@ -13,16 +13,16 @@ import ( "strings" "code.gitea.io/gitea/models" - "code.gitea.io/gitea/models/db" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" ) // createTemporaryRepo creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch // it also create a second base branch called "original_base" func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, error) { - if err := pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { log.Error("LoadHeadRepo: %v", err) return "", fmt.Errorf("LoadHeadRepo: %v", err) } else if pr.HeadRepo == nil { @@ -30,7 +30,7 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e return "", &repo_model.ErrRepoNotExist{ ID: pr.HeadRepoID, } - } else if err := pr.LoadBaseRepo(); err != nil { + } else if err := pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("LoadBaseRepo: %v", err) return "", fmt.Errorf("LoadBaseRepo: %v", err) } else if pr.BaseRepo == nil { @@ -38,16 +38,16 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e return "", &repo_model.ErrRepoNotExist{ ID: pr.BaseRepoID, } - } else if err := pr.HeadRepo.GetOwner(db.DefaultContext); err != nil { + } else if err := pr.HeadRepo.GetOwner(ctx); err != nil { log.Error("HeadRepo.GetOwner: %v", err) return "", fmt.Errorf("HeadRepo.GetOwner: %v", err) - } else if err := pr.BaseRepo.GetOwner(db.DefaultContext); err != nil { + } else if err := pr.BaseRepo.GetOwner(ctx); err != nil { log.Error("BaseRepo.GetOwner: %v", err) return "", fmt.Errorf("BaseRepo.GetOwner: %v", err) } // Clone base repo. - tmpBasePath, err := models.CreateTemporaryPath("pull") + tmpBasePath, err := repo_module.CreateTemporaryPath("pull") if err != nil { log.Error("CreateTemporaryPath: %v", err) return "", err @@ -58,7 +58,7 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e if err := git.InitRepository(ctx, tmpBasePath, false); err != nil { log.Error("git init tmpBasePath: %v", err) - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } return "", err @@ -86,7 +86,7 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e if err := addCacheRepo(tmpBasePath, baseRepoPath); err != nil { log.Error("Unable to add base repository to temporary repo [%s -> %s]: %v", pr.BaseRepo.FullName(), tmpBasePath, err) - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } return "", fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %v", pr.BaseRepo.FullName(), err) @@ -94,14 +94,13 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e var outbuf, errbuf strings.Builder if err := git.NewCommand(ctx, "remote", "add", "-t", pr.BaseBranch, "-m", pr.BaseBranch, "origin", baseRepoPath). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr.BaseRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } return "", fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %v\n%s\n%s", pr.BaseRepo.FullName(), err, outbuf.String(), errbuf.String()) @@ -110,14 +109,13 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e errbuf.Reset() if err := git.NewCommand(ctx, "fetch", "origin", "--no-tags", "--", pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } return "", fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %v\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) @@ -126,14 +124,13 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e errbuf.Reset() if err := git.NewCommand(ctx, "symbolic-ref", "HEAD", git.BranchPrefix+baseBranch). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("Unable to set HEAD as base branch [%s]: %v\n%s\n%s", tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } return "", fmt.Errorf("Unable to set HEAD as base branch [tmpBasePath]: %v\n%s\n%s", err, outbuf.String(), errbuf.String()) @@ -143,21 +140,20 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil { log.Error("Unable to add head repository to temporary repo [%s -> %s]: %v", pr.HeadRepo.FullName(), tmpBasePath, err) - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } return "", fmt.Errorf("Unable to head base repository to temporary repo [%s -> tmpBasePath]: %v", pr.HeadRepo.FullName(), err) } if err := git.NewCommand(ctx, "remote", "add", remoteRepoName, headRepoPath). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { log.Error("Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr.HeadRepo.FullName(), tmpBasePath, err, outbuf.String(), errbuf.String()) - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } return "", fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %v\n%s\n%s", pr.HeadRepo.FullName(), err, outbuf.String(), errbuf.String()) @@ -176,13 +172,12 @@ func createTemporaryRepo(ctx context.Context, pr *models.PullRequest) (string, e headBranch = pr.GetGitRefName() } if err := git.NewCommand(ctx, "fetch", "--no-tags", remoteRepoName, headBranch+":"+trackingBranch). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, + Run(&git.RunOpts{ + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, }); err != nil { - if err := models.RemoveTemporaryPath(tmpBasePath); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil { log.Error("CreateTempRepo: RemoveTemporaryPath: %s", err) } if !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) { diff --git a/services/pull/update.go b/services/pull/update.go index 2ad58ecd29..3c5c1c048c 100644 --- a/services/pull/update.go +++ b/services/pull/update.go @@ -14,6 +14,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" ) // Update updates pull request with base branch. @@ -23,6 +24,9 @@ func Update(ctx context.Context, pull *models.PullRequest, doer *user_model.User style repo_model.MergeStyle ) + pullWorkingPool.CheckIn(fmt.Sprint(pull.ID)) + defer pullWorkingPool.CheckOut(fmt.Sprint(pull.ID)) + if rebase { pr = pull style = repo_model.MergeStyleRebaseUpdate @@ -42,10 +46,10 @@ func Update(ctx context.Context, pull *models.PullRequest, doer *user_model.User return fmt.Errorf("Not support update agit flow pull request's head branch") } - if err := pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { log.Error("LoadHeadRepo: %v", err) return fmt.Errorf("LoadHeadRepo: %v", err) - } else if err = pr.LoadBaseRepo(); err != nil { + } else if err = pr.LoadBaseRepoCtx(ctx); err != nil { log.Error("LoadBaseRepo: %v", err) return fmt.Errorf("LoadBaseRepo: %v", err) } @@ -71,7 +75,7 @@ func Update(ctx context.Context, pull *models.PullRequest, doer *user_model.User } // IsUserAllowedToUpdate check if user is allowed to update PR with given permissions and branch protections -func IsUserAllowedToUpdate(pull *models.PullRequest, user *user_model.User) (mergeAllowed, rebaseAllowed bool, err error) { +func IsUserAllowedToUpdate(ctx context.Context, pull *models.PullRequest, user *user_model.User) (mergeAllowed, rebaseAllowed bool, err error) { if pull.Flow == models.PullRequestFlowAGit { return false, false, nil } @@ -79,7 +83,7 @@ func IsUserAllowedToUpdate(pull *models.PullRequest, user *user_model.User) (mer if user == nil { return false, false, nil } - headRepoPerm, err := models.GetUserRepoPermission(pull.HeadRepo, user) + headRepoPerm, err := models.GetUserRepoPermission(ctx, pull.HeadRepo, user) if err != nil { return false, false, err } @@ -111,21 +115,35 @@ func IsUserAllowedToUpdate(pull *models.PullRequest, user *user_model.User) (mer return false, false, nil } - mergeAllowed, err = IsUserAllowedToMerge(pr, headRepoPerm, user) + baseRepoPerm, err := models.GetUserRepoPermission(ctx, pull.BaseRepo, user) if err != nil { return false, false, err } + mergeAllowed, err = IsUserAllowedToMerge(ctx, pr, headRepoPerm, user) + if err != nil { + return false, false, err + } + + if pull.AllowMaintainerEdit { + mergeAllowedMaintainer, err := IsUserAllowedToMerge(ctx, pr, baseRepoPerm, user) + if err != nil { + return false, false, err + } + + mergeAllowed = mergeAllowed || mergeAllowedMaintainer + } + return mergeAllowed, rebaseAllowed, nil } // GetDiverging determines how many commits a PR is ahead or behind the PR base branch func GetDiverging(ctx context.Context, pr *models.PullRequest) (*git.DivergeObject, error) { log.Trace("GetDiverging[%d]: compare commits", pr.ID) - if err := pr.LoadBaseRepo(); err != nil { + if err := pr.LoadBaseRepoCtx(ctx); err != nil { return nil, err } - if err := pr.LoadHeadRepo(); err != nil { + if err := pr.LoadHeadRepoCtx(ctx); err != nil { return nil, err } @@ -137,7 +155,7 @@ func GetDiverging(ctx context.Context, pr *models.PullRequest) (*git.DivergeObje return nil, err } defer func() { - if err := models.RemoveTemporaryPath(tmpRepo); err != nil { + if err := repo_module.RemoveTemporaryPath(tmpRepo); err != nil { log.Error("Merge: RemoveTemporaryPath: %s", err) } }() diff --git a/services/release/release.go b/services/release/release.go index 0df8635230..0372e3a690 100644 --- a/services/release/release.go +++ b/services/release/release.go @@ -130,7 +130,7 @@ func CreateRelease(gitRepo *git.Repository, rel *models.Release, attachmentUUIDs return err } - if err = models.AddReleaseAttachments(db.DefaultContext, rel.ID, attachmentUUIDs); err != nil { + if err = models.AddReleaseAttachments(gitRepo.Ctx, rel.ID, attachmentUUIDs); err != nil { return err } @@ -297,9 +297,9 @@ func DeleteReleaseByID(ctx context.Context, id int64, doer *user_model.User, del } if delTag { - if stdout, err := git.NewCommand(ctx, "tag", "-d", rel.TagName). + if stdout, _, err := git.NewCommand(ctx, "tag", "-d", rel.TagName). SetDescription(fmt.Sprintf("DeleteReleaseByID (git tag -d): %d", rel.ID)). - RunInDir(repo.RepoPath()); err != nil && !strings.Contains(err.Error(), "not found") { + RunStdString(&git.RunOpts{Dir: repo.RepoPath()}); err != nil && !strings.Contains(err.Error(), "not found") { log.Error("DeleteReleaseByID (git tag -d): %d in %v Failed:\nStdout: %s\nError: %v", rel.ID, repo, stdout, err) return fmt.Errorf("git tag -d: %v", err) } @@ -319,7 +319,7 @@ func DeleteReleaseByID(ctx context.Context, id int64, doer *user_model.User, del } else { rel.IsTag = true - if err = models.UpdateRelease(db.DefaultContext, rel); err != nil { + if err = models.UpdateRelease(ctx, rel); err != nil { return fmt.Errorf("Update: %v", err) } } diff --git a/services/release/release_test.go b/services/release/release_test.go index 413a56116b..19d985491f 100644 --- a/services/release/release_test.go +++ b/services/release/release_test.go @@ -21,7 +21,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } func TestRelease_Create(t *testing.T) { @@ -31,7 +33,7 @@ func TestRelease_Create(t *testing.T) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) repoPath := repo_model.RepoPath(user.Name, repo.Name) - gitRepo, err := git.OpenRepository(repoPath) + gitRepo, err := git.OpenRepository(git.DefaultContext, repoPath) assert.NoError(t, err) defer gitRepo.Close() @@ -135,7 +137,7 @@ func TestRelease_Update(t *testing.T) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) repoPath := repo_model.RepoPath(user.Name, repo.Name) - gitRepo, err := git.OpenRepository(repoPath) + gitRepo, err := git.OpenRepository(git.DefaultContext, repoPath) assert.NoError(t, err) defer gitRepo.Close() @@ -277,7 +279,7 @@ func TestRelease_createTag(t *testing.T) { repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) repoPath := repo_model.RepoPath(user.Name, repo.Name) - gitRepo, err := git.OpenRepository(repoPath) + gitRepo, err := git.OpenRepository(git.DefaultContext, repoPath) assert.NoError(t, err) defer gitRepo.Close() diff --git a/services/repository/adopt.go b/services/repository/adopt.go index 72fe284ad3..b287d94f9d 100644 --- a/services/repository/adopt.go +++ b/services/repository/adopt.go @@ -79,14 +79,14 @@ func AdoptRepository(doer, u *user_model.User, opts models.CreateRepoOptions) (* // Initialize Issue Labels if selected if len(opts.IssueLabels) > 0 { - if err := models.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { + if err := repo_module.InitializeLabels(ctx, repo.ID, opts.IssueLabels, false); err != nil { return fmt.Errorf("InitializeLabels: %v", err) } } - if stdout, err := git.NewCommand(ctx, "update-server-info"). + if stdout, _, err := git.NewCommand(ctx, "update-server-info"). SetDescription(fmt.Sprintf("CreateRepository(git update-server-info): %s", repoPath)). - RunInDir(repoPath); err != nil { + RunStdString(&git.RunOpts{Dir: repoPath}); err != nil { log.Error("CreateRepository(git update-server-info) in %v: Stdout: %s\nError: %v", repo, stdout, err) return fmt.Errorf("CreateRepository(git update-server-info): %v", err) } @@ -123,7 +123,7 @@ func adoptRepository(ctx context.Context, repoPath string, u *user_model.User, r repo.IsEmpty = false // Don't bother looking this repo in the context it won't be there - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { return fmt.Errorf("openRepository: %v", err) } @@ -339,6 +339,13 @@ func ListUnadoptedRepositories(query string, opts *db.ListOptions) ([]string, in } repoNamesToCheck = append(repoNamesToCheck, name) + if len(repoNamesToCheck) > setting.Database.IterateBufferSize { + if err = checkUnadoptedRepositories(userName, repoNamesToCheck, unadopted); err != nil { + return err + } + repoNamesToCheck = repoNamesToCheck[:0] + + } return filepath.SkipDir }); err != nil { return nil, 0, err diff --git a/services/repository/archiver/archiver.go b/services/repository/archiver/archiver.go index ad2141ef33..ebd3eaf236 100644 --- a/services/repository/archiver/archiver.go +++ b/services/repository/archiver/archiver.go @@ -172,13 +172,13 @@ func doArchive(r *ArchiveRequest) (*repo_model.RepoArchiver, error) { w.Close() rd.Close() }() - done := make(chan error) + done := make(chan error, 1) // Ensure that there is some capacity which will ensure that the goroutine below can always finish repo, err := repo_model.GetRepositoryByID(archiver.RepoID) if err != nil { return nil, fmt.Errorf("archiver.LoadRepo failed: %v", err) } - gitRepo, err := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { return nil, err } diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go index b7fb6cb0ca..24437ce76c 100644 --- a/services/repository/archiver/archiver_test.go +++ b/services/repository/archiver/archiver_test.go @@ -17,7 +17,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } func TestArchive_Basic(t *testing.T) { diff --git a/services/repository/branch.go b/services/repository/branch.go index b1a6dafb58..88fc67fa8c 100644 --- a/services/repository/branch.go +++ b/services/repository/branch.go @@ -35,8 +35,8 @@ func CreateNewBranch(ctx context.Context, doer *user_model.User, repo *repo_mode if err := git.Push(ctx, repo.RepoPath(), git.PushOptions{ Remote: repo.RepoPath(), - Branch: fmt.Sprintf("%s:%s%s", oldBranchName, git.BranchPrefix, branchName), - Env: models.PushingEnvironment(doer, repo), + Branch: fmt.Sprintf("%s%s:%s%s", git.BranchPrefix, oldBranchName, git.BranchPrefix, branchName), + Env: repo_module.PushingEnvironment(doer, repo), }); err != nil { if git.IsErrPushOutOfDate(err) || git.IsErrPushRejected(err) { return err @@ -55,7 +55,7 @@ func GetBranches(ctx context.Context, repo *repo_model.Repository, skip, limit i // checkBranchName validates branch name with existing repository branches func checkBranchName(ctx context.Context, repo *repo_model.Repository, name string) error { - _, err := git.WalkReferences(ctx, repo.RepoPath(), func(refName string) error { + _, err := git.WalkReferences(ctx, repo.RepoPath(), func(_, refName string) error { branchRefName := strings.TrimPrefix(refName, git.BranchPrefix) switch { case branchRefName == name: @@ -93,7 +93,7 @@ func CreateNewBranchFromCommit(ctx context.Context, doer *user_model.User, repo if err := git.Push(ctx, repo.RepoPath(), git.PushOptions{ Remote: repo.RepoPath(), Branch: fmt.Sprintf("%s:%s%s", commit, git.BranchPrefix, branchName), - Env: models.PushingEnvironment(doer, repo), + Env: repo_module.PushingEnvironment(doer, repo), }); err != nil { if git.IsErrPushOutOfDate(err) || git.IsErrPushRejected(err) { return err diff --git a/services/repository/check.go b/services/repository/check.go index 6adb8479c4..6fb86d0dc3 100644 --- a/services/repository/check.go +++ b/services/repository/check.go @@ -27,7 +27,7 @@ func GitFsck(ctx context.Context, timeout time.Duration, args []string) error { log.Trace("Doing: GitFsck") if err := db.Iterate( - db.DefaultContext, + ctx, new(repo_model.Repository), builder.Expr("id>0 AND is_fsck_enabled=?", true), func(idx int, bean interface{}) error { @@ -62,7 +62,7 @@ func GitGcRepos(ctx context.Context, timeout time.Duration, args ...string) erro args = append([]string{"gc"}, args...) if err := db.Iterate( - db.DefaultContext, + ctx, new(repo_model.Repository), builder.Gt{"id": 0}, func(idx int, bean interface{}) error { @@ -77,15 +77,7 @@ func GitGcRepos(ctx context.Context, timeout time.Duration, args ...string) erro SetDescription(fmt.Sprintf("Repository Garbage Collection: %s", repo.FullName())) var stdout string var err error - if timeout > 0 { - var stdoutBytes []byte - stdoutBytes, err = command.RunInDirTimeout( - timeout, - repo.RepoPath()) - stdout = string(stdoutBytes) - } else { - stdout, err = command.RunInDir(repo.RepoPath()) - } + stdout, _, err = command.RunStdString(&git.RunOpts{Timeout: timeout, Dir: repo.RepoPath()}) if err != nil { log.Error("Repository garbage collection failed for %v. Stdout: %s\nError: %v", repo, stdout, err) @@ -97,7 +89,7 @@ func GitGcRepos(ctx context.Context, timeout time.Duration, args ...string) erro } // Now update the size of the repository - if err := models.UpdateRepoSize(db.DefaultContext, repo); err != nil { + if err := models.UpdateRepoSize(ctx, repo); err != nil { log.Error("Updating size as part of garbage collection failed for %v. Stdout: %s\nError: %v", repo, stdout, err) desc := fmt.Sprintf("Updating size as part of garbage collection failed for %s. Stdout: %s\nError: %v", repo.RepoPath(), stdout, err) if err = admin_model.CreateRepositoryNotice(desc); err != nil { @@ -119,7 +111,7 @@ func GitGcRepos(ctx context.Context, timeout time.Duration, args ...string) erro func gatherMissingRepoRecords(ctx context.Context) ([]*repo_model.Repository, error) { repos := make([]*repo_model.Repository, 0, 10) if err := db.Iterate( - db.DefaultContext, + ctx, new(repo_model.Repository), builder.Gt{"id": 0}, func(idx int, bean interface{}) error { diff --git a/services/repository/files/cherry_pick.go b/services/repository/files/cherry_pick.go index dc932b39c2..0107d99e66 100644 --- a/services/repository/files/cherry_pick.go +++ b/services/repository/files/cherry_pick.go @@ -97,9 +97,9 @@ func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_mod // Now commit the tree var commitHash string if opts.Dates != nil { - commitHash, err = t.CommitTreeWithDate(author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) + commitHash, err = t.CommitTreeWithDate("HEAD", author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) } else { - commitHash, err = t.CommitTree(author, committer, treeHash, message, opts.Signoff) + commitHash, err = t.CommitTree("HEAD", author, committer, treeHash, message, opts.Signoff) } if err != nil { return nil, err diff --git a/services/repository/files/commit.go b/services/repository/files/commit.go index e7604e3f92..6ecabb4020 100644 --- a/services/repository/files/commit.go +++ b/services/repository/files/commit.go @@ -14,6 +14,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/services/automerge" ) // CreateCommitStatus creates a new CommitStatus given a bunch of parameters @@ -44,6 +45,12 @@ func CreateCommitStatus(ctx context.Context, repo *repo_model.Repository, creato return fmt.Errorf("NewCommitStatus[repo_id: %d, user_id: %d, sha: %s]: %v", repo.ID, creator.ID, sha, err) } + if status.State.IsSuccess() { + if err := automerge.MergeScheduledPullRequest(ctx, sha, repo); err != nil { + return fmt.Errorf("MergeScheduledPullRequest[repo_id: %d, user_id: %d, sha: %s]: %w", repo.ID, creator.ID, sha, err) + } + } + return nil } diff --git a/services/repository/files/content.go b/services/repository/files/content.go index 9037a84349..2237671a60 100644 --- a/services/repository/files/content.go +++ b/services/repository/files/content.go @@ -164,7 +164,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref // Now populate the rest of the ContentsResponse based on entry type if entry.IsRegular() || entry.IsExecutable() { contentsResponse.Type = string(ContentTypeRegular) - if blobResponse, err := GetBlobBySHA(ctx, repo, entry.ID.String()); err != nil { + if blobResponse, err := GetBlobBySHA(ctx, repo, gitRepo, entry.ID.String()); err != nil { return nil, err } else if !forList { // We don't show the content if we are getting a list of FileContentResponses @@ -220,12 +220,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref } // GetBlobBySHA get the GitBlobResponse of a repository using a sha hash. -func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, sha string) (*api.GitBlobResponse, error) { - gitRepo, closer, err := git.RepositoryFromContextOrOpen(ctx, repo.RepoPath()) - if err != nil { - return nil, err - } - defer closer.Close() +func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlobResponse, error) { gitBlob, err := gitRepo.GetBlob(sha) if err != nil { return nil, err diff --git a/services/repository/files/content_test.go b/services/repository/files/content_test.go index 84bb1c2e02..342ebae329 100644 --- a/services/repository/files/content_test.go +++ b/services/repository/files/content_test.go @@ -8,7 +8,9 @@ import ( "path/filepath" "testing" + repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/git" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/test" @@ -16,7 +18,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", "..", ".."), + }) } func getExpectedReadmeContentsResponse() *api.ContentsResponse { @@ -232,7 +236,12 @@ func TestGetBlobBySHA(t *testing.T) { ctx.SetParams(":id", "1") ctx.SetParams(":sha", sha) - gbr, err := GetBlobBySHA(ctx, ctx.Repo.Repository, ctx.Params(":sha")) + gitRepo, err := git.OpenRepository(ctx, repo_model.RepoPath(ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)) + if err != nil { + t.Fail() + } + + gbr, err := GetBlobBySHA(ctx, ctx.Repo.Repository, gitRepo, ctx.Params(":sha")) expectedGBR := &api.GitBlobResponse{ Content: "dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK", Encoding: "base64", diff --git a/services/repository/files/delete.go b/services/repository/files/delete.go index 95d05b5202..781a762d0f 100644 --- a/services/repository/files/delete.go +++ b/services/repository/files/delete.go @@ -179,9 +179,9 @@ func DeleteRepoFile(ctx context.Context, repo *repo_model.Repository, doer *user // Now commit the tree var commitHash string if opts.Dates != nil { - commitHash, err = t.CommitTreeWithDate(author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) + commitHash, err = t.CommitTreeWithDate("HEAD", author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) } else { - commitHash, err = t.CommitTree(author, committer, treeHash, message, opts.Signoff) + commitHash, err = t.CommitTree("HEAD", author, committer, treeHash, message, opts.Signoff) } if err != nil { return nil, err diff --git a/services/repository/files/diff_test.go b/services/repository/files/diff_test.go index c0a378dc4b..310308ca88 100644 --- a/services/repository/files/diff_test.go +++ b/services/repository/files/diff_test.go @@ -37,6 +37,7 @@ func TestGetDiffPreview(t *testing.T) { { Name: "README.md", OldName: "README.md", + NameHash: "8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d", Index: 1, Addition: 2, Deletion: 1, @@ -123,7 +124,7 @@ func TestGetDiffPreview(t *testing.T) { assert.NoError(t, err) bs, err := json.Marshal(diff) assert.NoError(t, err) - assert.EqualValues(t, expectedBs, bs) + assert.EqualValues(t, string(expectedBs), string(bs)) }) t.Run("empty branch, same results", func(t *testing.T) { diff --git a/services/repository/files/file_test.go b/services/repository/files/file_test.go index 24e4949832..ee0582dfc2 100644 --- a/services/repository/files/file_test.go +++ b/services/repository/files/file_test.go @@ -109,7 +109,7 @@ func TestGetFileResponseFromCommit(t *testing.T) { repo := ctx.Repo.Repository branch := repo.DefaultBranch treePath := "README.md" - gitRepo, _ := git.OpenRepositoryCtx(ctx, repo.RepoPath()) + gitRepo, _ := git.OpenRepository(ctx, repo.RepoPath()) defer gitRepo.Close() commit, _ := gitRepo.GetBranchCommit(branch) expectedFileResponse := getExpectedFileResponse() diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go index 09a8b3ea0c..240cb4fe2c 100644 --- a/services/repository/files/patch.go +++ b/services/repository/files/patch.go @@ -145,12 +145,11 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user } cmd := git.NewCommand(ctx, args...) - if err := cmd.RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: t.basePath, - Stdout: stdout, - Stderr: stderr, - Stdin: strings.NewReader(opts.Content), + if err := cmd.Run(&git.RunOpts{ + Dir: t.basePath, + Stdout: stdout, + Stderr: stderr, + Stdin: strings.NewReader(opts.Content), }); err != nil { return nil, fmt.Errorf("Error: Stdout: %s\nStderr: %s\nErr: %v", stdout.String(), stderr.String(), err) } @@ -164,9 +163,9 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user // Now commit the tree var commitHash string if opts.Dates != nil { - commitHash, err = t.CommitTreeWithDate(author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) + commitHash, err = t.CommitTreeWithDate("HEAD", author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) } else { - commitHash, err = t.CommitTree(author, committer, treeHash, message, opts.Signoff) + commitHash, err = t.CommitTree("HEAD", author, committer, treeHash, message, opts.Signoff) } if err != nil { return nil, err diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go index 2223e1c8fd..9c7d9aafec 100644 --- a/services/repository/files/temp_repo.go +++ b/services/repository/files/temp_repo.go @@ -19,6 +19,7 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" + repo_module "code.gitea.io/gitea/modules/repository" "code.gitea.io/gitea/modules/setting" asymkey_service "code.gitea.io/gitea/services/asymkey" "code.gitea.io/gitea/services/gitdiff" @@ -34,7 +35,7 @@ type TemporaryUploadRepository struct { // NewTemporaryUploadRepository creates a new temporary upload repository func NewTemporaryUploadRepository(ctx context.Context, repo *repo_model.Repository) (*TemporaryUploadRepository, error) { - basePath, err := models.CreateTemporaryPath("upload") + basePath, err := repo_module.CreateTemporaryPath("upload") if err != nil { return nil, err } @@ -45,14 +46,14 @@ func NewTemporaryUploadRepository(ctx context.Context, repo *repo_model.Reposito // Close the repository cleaning up all files func (t *TemporaryUploadRepository) Close() { defer t.gitRepo.Close() - if err := models.RemoveTemporaryPath(t.basePath); err != nil { + if err := repo_module.RemoveTemporaryPath(t.basePath); err != nil { log.Error("Failed to remove temporary path %s: %v", t.basePath, err) } } // Clone the base repository to our path and set branch as the HEAD func (t *TemporaryUploadRepository) Clone(branch string) error { - if _, err := git.NewCommand(t.ctx, "clone", "-s", "--bare", "-b", branch, t.repo.RepoPath(), t.basePath).Run(); err != nil { + if _, _, err := git.NewCommand(t.ctx, "clone", "-s", "--bare", "-b", branch, t.repo.RepoPath(), t.basePath).RunStdString(nil); err != nil { stderr := err.Error() if matched, _ := regexp.MatchString(".*Remote branch .* not found in upstream origin.*", stderr); matched { return git.ErrBranchNotExist{ @@ -69,7 +70,20 @@ func (t *TemporaryUploadRepository) Clone(branch string) error { return fmt.Errorf("Clone: %v %s", err, stderr) } } - gitRepo, err := git.OpenRepositoryCtx(t.ctx, t.basePath) + gitRepo, err := git.OpenRepository(t.ctx, t.basePath) + if err != nil { + return err + } + t.gitRepo = gitRepo + return nil +} + +// Init the repository +func (t *TemporaryUploadRepository) Init() error { + if err := git.InitRepository(t.ctx, t.basePath, false); err != nil { + return err + } + gitRepo, err := git.OpenRepository(t.ctx, t.basePath) if err != nil { return err } @@ -79,7 +93,7 @@ func (t *TemporaryUploadRepository) Clone(branch string) error { // SetDefaultIndex sets the git index to our HEAD func (t *TemporaryUploadRepository) SetDefaultIndex() error { - if _, err := git.NewCommand(t.ctx, "read-tree", "HEAD").RunInDir(t.basePath); err != nil { + if _, _, err := git.NewCommand(t.ctx, "read-tree", "HEAD").RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil { return fmt.Errorf("SetDefaultIndex: %v", err) } return nil @@ -98,11 +112,10 @@ func (t *TemporaryUploadRepository) LsFiles(filenames ...string) ([]string, erro } if err := git.NewCommand(t.ctx, cmdArgs...). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: t.basePath, - Stdout: stdOut, - Stderr: stdErr, + Run(&git.RunOpts{ + Dir: t.basePath, + Stdout: stdOut, + Stderr: stdErr, }); err != nil { log.Error("Unable to run git ls-files for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String()) err = fmt.Errorf("Unable to run git ls-files for temporary repo of: %s Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) @@ -131,12 +144,11 @@ func (t *TemporaryUploadRepository) RemoveFilesFromIndex(filenames ...string) er } if err := git.NewCommand(t.ctx, "update-index", "--remove", "-z", "--index-info"). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: t.basePath, - Stdin: stdIn, - Stdout: stdOut, - Stderr: stdErr, + Run(&git.RunOpts{ + Dir: t.basePath, + Stdin: stdIn, + Stdout: stdOut, + Stderr: stdErr, }); err != nil { log.Error("Unable to update-index for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String()) return fmt.Errorf("Unable to update-index for temporary repo: %s Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) @@ -150,12 +162,11 @@ func (t *TemporaryUploadRepository) HashObject(content io.Reader) (string, error stdErr := new(bytes.Buffer) if err := git.NewCommand(t.ctx, "hash-object", "-w", "--stdin"). - RunWithContext(&git.RunContext{ - Timeout: -1, - Dir: t.basePath, - Stdin: content, - Stdout: stdOut, - Stderr: stdErr, + Run(&git.RunOpts{ + Dir: t.basePath, + Stdin: content, + Stdout: stdOut, + Stderr: stdErr, }); err != nil { log.Error("Unable to hash-object to temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String()) return "", fmt.Errorf("Unable to hash-object to temporary repo: %s Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) @@ -166,7 +177,7 @@ func (t *TemporaryUploadRepository) HashObject(content io.Reader) (string, error // AddObjectToIndex adds the provided object hash to the index with the provided mode and path func (t *TemporaryUploadRepository) AddObjectToIndex(mode, objectHash, objectPath string) error { - if _, err := git.NewCommand(t.ctx, "update-index", "--add", "--replace", "--cacheinfo", mode, objectHash, objectPath).RunInDir(t.basePath); err != nil { + if _, _, err := git.NewCommand(t.ctx, "update-index", "--add", "--replace", "--cacheinfo", mode, objectHash, objectPath).RunStdString(&git.RunOpts{Dir: t.basePath}); err != nil { stderr := err.Error() if matched, _ := regexp.MatchString(".*Invalid path '.*", stderr); matched { return models.ErrFilePathInvalid{ @@ -182,7 +193,7 @@ func (t *TemporaryUploadRepository) AddObjectToIndex(mode, objectHash, objectPat // WriteTree writes the current index as a tree to the object db and returns its hash func (t *TemporaryUploadRepository) WriteTree() (string, error) { - stdout, err := git.NewCommand(t.ctx, "write-tree").RunInDir(t.basePath) + stdout, _, err := git.NewCommand(t.ctx, "write-tree").RunStdString(&git.RunOpts{Dir: t.basePath}) if err != nil { log.Error("Unable to write tree in temporary repo: %s(%s): Error: %v", t.repo.FullName(), t.basePath, err) return "", fmt.Errorf("Unable to write-tree in temporary repo for: %s Error: %v", t.repo.FullName(), err) @@ -200,7 +211,7 @@ func (t *TemporaryUploadRepository) GetLastCommitByRef(ref string) (string, erro if ref == "" { ref = "HEAD" } - stdout, err := git.NewCommand(t.ctx, "rev-parse", ref).RunInDir(t.basePath) + stdout, _, err := git.NewCommand(t.ctx, "rev-parse", ref).RunStdString(&git.RunOpts{Dir: t.basePath}) if err != nil { log.Error("Unable to get last ref for %s in temporary repo: %s(%s): Error: %v", ref, t.repo.FullName(), t.basePath, err) return "", fmt.Errorf("Unable to rev-parse %s in temporary repo for: %s Error: %v", ref, t.repo.FullName(), err) @@ -209,12 +220,12 @@ func (t *TemporaryUploadRepository) GetLastCommitByRef(ref string) (string, erro } // CommitTree creates a commit from a given tree for the user with provided message -func (t *TemporaryUploadRepository) CommitTree(author, committer *user_model.User, treeHash, message string, signoff bool) (string, error) { - return t.CommitTreeWithDate(author, committer, treeHash, message, signoff, time.Now(), time.Now()) +func (t *TemporaryUploadRepository) CommitTree(parent string, author, committer *user_model.User, treeHash, message string, signoff bool) (string, error) { + return t.CommitTreeWithDate(parent, author, committer, treeHash, message, signoff, time.Now(), time.Now()) } // CommitTreeWithDate creates a commit from a given tree for the user with provided message -func (t *TemporaryUploadRepository) CommitTreeWithDate(author, committer *user_model.User, treeHash, message string, signoff bool, authorDate, committerDate time.Time) (string, error) { +func (t *TemporaryUploadRepository) CommitTreeWithDate(parent string, author, committer *user_model.User, treeHash, message string, signoff bool, authorDate, committerDate time.Time) (string, error) { authorSig := author.NewGitSig() committerSig := committer.NewGitSig() @@ -235,11 +246,23 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(author, committer *user_m _, _ = messageBytes.WriteString(message) _, _ = messageBytes.WriteString("\n") - args := []string{"commit-tree", treeHash, "-p", "HEAD"} + var args []string + if parent != "" { + args = []string{"commit-tree", treeHash, "-p", parent} + } else { + args = []string{"commit-tree", treeHash} + } // Determine if we should sign if git.CheckGitVersionAtLeast("1.7.9") == nil { - sign, keyID, signer, _ := asymkey_service.SignCRUDAction(t.ctx, t.repo.RepoPath(), author, t.basePath, "HEAD") + var sign bool + var keyID string + var signer *git.Signature + if parent != "" { + sign, keyID, signer, _ = asymkey_service.SignCRUDAction(t.ctx, t.repo.RepoPath(), author, t.basePath, parent) + } else { + sign, keyID, signer, _ = asymkey_service.SignInitialCommit(t.ctx, t.repo.RepoPath(), author) + } if sign { args = append(args, "-S"+keyID) if t.repo.GetTrustModel() == repo_model.CommitterTrustModel || t.repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { @@ -275,13 +298,12 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(author, committer *user_m stdout := new(bytes.Buffer) stderr := new(bytes.Buffer) if err := git.NewCommand(t.ctx, args...). - RunWithContext(&git.RunContext{ - Env: env, - Timeout: -1, - Dir: t.basePath, - Stdin: messageBytes, - Stdout: stdout, - Stderr: stderr, + Run(&git.RunOpts{ + Env: env, + Dir: t.basePath, + Stdin: messageBytes, + Stdout: stdout, + Stderr: stderr, }); err != nil { log.Error("Unable to commit-tree in temporary repo: %s (%s) Error: %v\nStdout: %s\nStderr: %s", t.repo.FullName(), t.basePath, err, stdout, stderr) @@ -294,7 +316,7 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(author, committer *user_m // Push the provided commitHash to the repository branch by the provided user func (t *TemporaryUploadRepository) Push(doer *user_model.User, commitHash, branch string) error { // Because calls hooks we need to pass in the environment - env := models.PushingEnvironment(doer, t.repo) + env := repo_module.PushingEnvironment(doer, t.repo) if err := git.Push(t.ctx, t.basePath, git.PushOptions{ Remote: t.repo.RepoPath(), Branch: strings.TrimSpace(commitHash) + ":" + git.BranchPrefix + strings.TrimSpace(branch), @@ -332,7 +354,7 @@ func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) { var finalErr error if err := git.NewCommand(t.ctx, "diff-index", "--src-prefix=\\a/", "--dst-prefix=\\b/", "--cached", "-p", "HEAD"). - RunWithContext(&git.RunContext{ + Run(&git.RunOpts{ Timeout: 30 * time.Second, Dir: t.basePath, Stdout: stdoutWriter, diff --git a/services/repository/files/update.go b/services/repository/files/update.go index 4b8653a7f7..2cb40aac47 100644 --- a/services/repository/files/update.go +++ b/services/repository/files/update.go @@ -141,7 +141,7 @@ func CreateOrUpdateRepoFile(ctx context.Context, repo *repo_model.Repository, do defer closer.Close() // oldBranch must exist for this operation - if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil { + if _, err := gitRepo.GetBranch(opts.OldBranch); err != nil && !repo.IsEmpty { return nil, err } @@ -191,118 +191,130 @@ func CreateOrUpdateRepoFile(ctx context.Context, repo *repo_model.Repository, do log.Error("%v", err) } defer t.Close() + hasOldBranch := true if err := t.Clone(opts.OldBranch); err != nil { - return nil, err - } - if err := t.SetDefaultIndex(); err != nil { - return nil, err - } - - // Get the commit of the original branch - commit, err := t.GetBranchCommit(opts.OldBranch) - if err != nil { - return nil, err // Couldn't get a commit for the branch - } - - // Assigned LastCommitID in opts if it hasn't been set - if opts.LastCommitID == "" { - opts.LastCommitID = commit.ID.String() - } else { - lastCommitID, err := t.gitRepo.ConvertToSHA1(opts.LastCommitID) - if err != nil { - return nil, fmt.Errorf("DeleteRepoFile: Invalid last commit ID: %v", err) + if !git.IsErrBranchNotExist(err) || !repo.IsEmpty { + return nil, err + } + if err := t.Init(); err != nil { + return nil, err + } + hasOldBranch = false + opts.LastCommitID = "" + } + if hasOldBranch { + if err := t.SetDefaultIndex(); err != nil { + return nil, err } - opts.LastCommitID = lastCommitID.String() - } encoding := "UTF-8" bom := false executable := false - if !opts.IsNewFile { - fromEntry, err := commit.GetTreeEntryByPath(fromTreePath) + if hasOldBranch { + // Get the commit of the original branch + commit, err := t.GetBranchCommit(opts.OldBranch) if err != nil { - return nil, err + return nil, err // Couldn't get a commit for the branch } - if opts.SHA != "" { - // If a SHA was given and the SHA given doesn't match the SHA of the fromTreePath, throw error - if opts.SHA != fromEntry.ID.String() { - return nil, models.ErrSHADoesNotMatch{ - Path: treePath, - GivenSHA: opts.SHA, - CurrentSHA: fromEntry.ID.String(), - } + + // Assigned LastCommitID in opts if it hasn't been set + if opts.LastCommitID == "" { + opts.LastCommitID = commit.ID.String() + } else { + lastCommitID, err := t.gitRepo.ConvertToSHA1(opts.LastCommitID) + if err != nil { + return nil, fmt.Errorf("ConvertToSHA1: Invalid last commit ID: %v", err) } - } else if opts.LastCommitID != "" { - // If a lastCommitID was given and it doesn't match the commitID of the head of the branch throw - // an error, but only if we aren't creating a new branch. - if commit.ID.String() != opts.LastCommitID && opts.OldBranch == opts.NewBranch { - if changed, err := commit.FileChangedSinceCommit(treePath, opts.LastCommitID); err != nil { - return nil, err - } else if changed { - return nil, models.ErrCommitIDDoesNotMatch{ - GivenCommitID: opts.LastCommitID, - CurrentCommitID: opts.LastCommitID, + opts.LastCommitID = lastCommitID.String() + + } + + if !opts.IsNewFile { + fromEntry, err := commit.GetTreeEntryByPath(fromTreePath) + if err != nil { + return nil, err + } + if opts.SHA != "" { + // If a SHA was given and the SHA given doesn't match the SHA of the fromTreePath, throw error + if opts.SHA != fromEntry.ID.String() { + return nil, models.ErrSHADoesNotMatch{ + Path: treePath, + GivenSHA: opts.SHA, + CurrentSHA: fromEntry.ID.String(), } } - // The file wasn't modified, so we are good to delete it + } else if opts.LastCommitID != "" { + // If a lastCommitID was given and it doesn't match the commitID of the head of the branch throw + // an error, but only if we aren't creating a new branch. + if commit.ID.String() != opts.LastCommitID && opts.OldBranch == opts.NewBranch { + if changed, err := commit.FileChangedSinceCommit(treePath, opts.LastCommitID); err != nil { + return nil, err + } else if changed { + return nil, models.ErrCommitIDDoesNotMatch{ + GivenCommitID: opts.LastCommitID, + CurrentCommitID: opts.LastCommitID, + } + } + // The file wasn't modified, so we are good to delete it + } + } else { + // When updating a file, a lastCommitID or SHA needs to be given to make sure other commits + // haven't been made. We throw an error if one wasn't provided. + return nil, models.ErrSHAOrCommitIDNotProvided{} } - } else { - // When updating a file, a lastCommitID or SHA needs to be given to make sure other commits - // haven't been made. We throw an error if one wasn't provided. - return nil, models.ErrSHAOrCommitIDNotProvided{} + encoding, bom = detectEncodingAndBOM(fromEntry, repo) + executable = fromEntry.IsExecutable() } - encoding, bom = detectEncodingAndBOM(fromEntry, repo) - executable = fromEntry.IsExecutable() - } - // For the path where this file will be created/updated, we need to make - // sure no parts of the path are existing files or links except for the last - // item in the path which is the file name, and that shouldn't exist IF it is - // a new file OR is being moved to a new path. - treePathParts := strings.Split(treePath, "/") - subTreePath := "" - for index, part := range treePathParts { - subTreePath = path.Join(subTreePath, part) - entry, err := commit.GetTreeEntryByPath(subTreePath) - if err != nil { - if git.IsErrNotExist(err) { - // Means there is no item with that name, so we're good - break + // For the path where this file will be created/updated, we need to make + // sure no parts of the path are existing files or links except for the last + // item in the path which is the file name, and that shouldn't exist IF it is + // a new file OR is being moved to a new path. + treePathParts := strings.Split(treePath, "/") + subTreePath := "" + for index, part := range treePathParts { + subTreePath = path.Join(subTreePath, part) + entry, err := commit.GetTreeEntryByPath(subTreePath) + if err != nil { + if git.IsErrNotExist(err) { + // Means there is no item with that name, so we're good + break + } + return nil, err } - return nil, err - } - if index < len(treePathParts)-1 { - if !entry.IsDir() { + if index < len(treePathParts)-1 { + if !entry.IsDir() { + return nil, models.ErrFilePathInvalid{ + Message: fmt.Sprintf("a file exists where you’re trying to create a subdirectory [path: %s]", subTreePath), + Path: subTreePath, + Name: part, + Type: git.EntryModeBlob, + } + } + } else if entry.IsLink() { return nil, models.ErrFilePathInvalid{ - Message: fmt.Sprintf("a file exists where you’re trying to create a subdirectory [path: %s]", subTreePath), + Message: fmt.Sprintf("a symbolic link exists where you’re trying to create a subdirectory [path: %s]", subTreePath), Path: subTreePath, Name: part, - Type: git.EntryModeBlob, + Type: git.EntryModeSymlink, + } + } else if entry.IsDir() { + return nil, models.ErrFilePathInvalid{ + Message: fmt.Sprintf("a directory exists where you’re trying to create a file [path: %s]", subTreePath), + Path: subTreePath, + Name: part, + Type: git.EntryModeTree, + } + } else if fromTreePath != treePath || opts.IsNewFile { + // The entry shouldn't exist if we are creating new file or moving to a new path + return nil, models.ErrRepoFileAlreadyExists{ + Path: treePath, } } - } else if entry.IsLink() { - return nil, models.ErrFilePathInvalid{ - Message: fmt.Sprintf("a symbolic link exists where you’re trying to create a subdirectory [path: %s]", subTreePath), - Path: subTreePath, - Name: part, - Type: git.EntryModeSymlink, - } - } else if entry.IsDir() { - return nil, models.ErrFilePathInvalid{ - Message: fmt.Sprintf("a directory exists where you’re trying to create a file [path: %s]", subTreePath), - Path: subTreePath, - Name: part, - Type: git.EntryModeTree, - } - } else if fromTreePath != treePath || opts.IsNewFile { - // The entry shouldn't exist if we are creating new file or moving to a new path - return nil, models.ErrRepoFileAlreadyExists{ - Path: treePath, - } - } + } } // Get the two paths (might be the same if not moving) from the index if they exist @@ -354,7 +366,7 @@ func CreateOrUpdateRepoFile(ctx context.Context, repo *repo_model.Repository, do opts.Content = content var lfsMetaObject *models.LFSMetaObject - if setting.LFS.StartServer { + if setting.LFS.StartServer && hasOldBranch { // Check there is no way this can return multiple infos filename2attribute2info, err := t.gitRepo.CheckAttribute(git.CheckAttributeOpts{ Attributes: []string{"filter"}, @@ -401,9 +413,9 @@ func CreateOrUpdateRepoFile(ctx context.Context, repo *repo_model.Repository, do // Now commit the tree var commitHash string if opts.Dates != nil { - commitHash, err = t.CommitTreeWithDate(author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) + commitHash, err = t.CommitTreeWithDate(opts.LastCommitID, author, committer, treeHash, message, opts.Signoff, opts.Dates.Author, opts.Dates.Committer) } else { - commitHash, err = t.CommitTree(author, committer, treeHash, message, opts.Signoff) + commitHash, err = t.CommitTree(opts.LastCommitID, author, committer, treeHash, message, opts.Signoff) } if err != nil { return nil, err @@ -436,7 +448,7 @@ func CreateOrUpdateRepoFile(ctx context.Context, repo *repo_model.Repository, do return nil, err } - commit, err = t.GetCommit(commitHash) + commit, err := t.GetCommit(commitHash) if err != nil { return nil, err } diff --git a/services/repository/files/upload.go b/services/repository/files/upload.go index 79fca3ead7..ff0448fc87 100644 --- a/services/repository/files/upload.go +++ b/services/repository/files/upload.go @@ -123,7 +123,7 @@ func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use committer := doer // Now commit the tree - commitHash, err := t.CommitTree(author, committer, treeHash, opts.Message, opts.Signoff) + commitHash, err := t.CommitTree(opts.LastCommitID, author, committer, treeHash, opts.Message, opts.Signoff) if err != nil { return err } diff --git a/services/repository/fork.go b/services/repository/fork.go index ec8fb1a09e..a2ef75bbd0 100644 --- a/services/repository/fork.go +++ b/services/repository/fork.go @@ -30,8 +30,8 @@ type ForkRepoOptions struct { } // ForkRepository forks a repository -func ForkRepository(doer, owner *user_model.User, opts ForkRepoOptions) (_ *repo_model.Repository, err error) { - forkedRepo, err := repo_model.GetUserFork(opts.BaseRepo.ID, owner.ID) +func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts ForkRepoOptions) (*repo_model.Repository, error) { + forkedRepo, err := repo_model.GetUserFork(ctx, opts.BaseRepo.ID, owner.ID) if err != nil { return nil, err } @@ -91,38 +91,38 @@ func ForkRepository(doer, owner *user_model.User, opts ForkRepoOptions) (_ *repo panic(panicErr) }() - err = db.WithTx(func(ctx context.Context) error { - if err = models.CreateRepository(ctx, doer, owner, repo, false); err != nil { + err = db.WithTx(func(txCtx context.Context) error { + if err = models.CreateRepository(txCtx, doer, owner, repo, false); err != nil { return err } - if err = models.IncrementRepoForkNum(ctx, opts.BaseRepo.ID); err != nil { + if err = models.IncrementRepoForkNum(txCtx, opts.BaseRepo.ID); err != nil { return err } // copy lfs files failure should not be ignored - if err = models.CopyLFS(ctx, repo, opts.BaseRepo); err != nil { + if err = models.CopyLFS(txCtx, repo, opts.BaseRepo); err != nil { return err } needsRollback = true repoPath := repo_model.RepoPath(owner.Name, repo.Name) - if stdout, err := git.NewCommand(ctx, + if stdout, _, err := git.NewCommand(txCtx, "clone", "--bare", oldRepoPath, repoPath). SetDescription(fmt.Sprintf("ForkRepository(git clone): %s to %s", opts.BaseRepo.FullName(), repo.FullName())). - RunInDirTimeout(10*time.Minute, ""); err != nil { + RunStdBytes(&git.RunOpts{Timeout: 10 * time.Minute}); err != nil { log.Error("Fork Repository (git clone) Failed for %v (from %v):\nStdout: %s\nError: %v", repo, opts.BaseRepo, stdout, err) return fmt.Errorf("git clone: %v", err) } - if err := models.CheckDaemonExportOK(ctx, repo); err != nil { + if err := models.CheckDaemonExportOK(txCtx, repo); err != nil { return fmt.Errorf("checkDaemonExportOK: %v", err) } - if stdout, err := git.NewCommand(ctx, "update-server-info"). + if stdout, _, err := git.NewCommand(txCtx, "update-server-info"). SetDescription(fmt.Sprintf("ForkRepository(git update-server-info): %s", repo.FullName())). - RunInDir(repoPath); err != nil { + RunStdString(&git.RunOpts{Dir: repoPath}); err != nil { log.Error("Fork Repository (git update-server-info) failed for %v:\nStdout: %s\nError: %v", repo, stdout, err) return fmt.Errorf("git update-server-info: %v", err) } @@ -139,14 +139,14 @@ func ForkRepository(doer, owner *user_model.User, opts ForkRepoOptions) (_ *repo } // even if below operations failed, it could be ignored. And they will be retried - if err := models.UpdateRepoSize(db.DefaultContext, repo); err != nil { + if err := models.UpdateRepoSize(ctx, repo); err != nil { log.Error("Failed to update size for repository: %v", err) } if err := repo_model.CopyLanguageStat(opts.BaseRepo, repo); err != nil { log.Error("Copy language stat from oldRepo failed: %v", err) } - gitRepo, err := git.OpenRepositoryCtx(git.DefaultContext, repo.RepoPath()) + gitRepo, err := git.OpenRepository(ctx, repo.RepoPath()) if err != nil { log.Error("Open created git repository failed: %v", err) } else { diff --git a/services/repository/fork_test.go b/services/repository/fork_test.go index 5d392e224f..965887b5d1 100644 --- a/services/repository/fork_test.go +++ b/services/repository/fork_test.go @@ -11,6 +11,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" "github.com/stretchr/testify/assert" ) @@ -22,7 +23,7 @@ func TestForkRepository(t *testing.T) { user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 13}).(*user_model.User) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 10}).(*repo_model.Repository) - fork, err := ForkRepository(user, user, ForkRepoOptions{ + fork, err := ForkRepository(git.DefaultContext, user, user, ForkRepoOptions{ BaseRepo: repo, Name: "test", Description: "test", diff --git a/services/repository/hooks.go b/services/repository/hooks.go index 3905249499..67931ffcb6 100644 --- a/services/repository/hooks.go +++ b/services/repository/hooks.go @@ -23,7 +23,7 @@ func SyncRepositoryHooks(ctx context.Context) error { log.Trace("Doing: SyncRepositoryHooks") if err := db.Iterate( - db.DefaultContext, + ctx, new(repo_model.Repository), builder.Gt{"id": 0}, func(idx int, bean interface{}) error { @@ -54,13 +54,13 @@ func SyncRepositoryHooks(ctx context.Context) error { // GenerateGitHooks generates git hooks from a template repository func GenerateGitHooks(ctx context.Context, templateRepo, generateRepo *repo_model.Repository) error { - generateGitRepo, err := git.OpenRepositoryCtx(ctx, generateRepo.RepoPath()) + generateGitRepo, err := git.OpenRepository(ctx, generateRepo.RepoPath()) if err != nil { return err } defer generateGitRepo.Close() - templateGitRepo, err := git.OpenRepositoryCtx(ctx, templateRepo.RepoPath()) + templateGitRepo, err := git.OpenRepository(ctx, templateRepo.RepoPath()) if err != nil { return err } diff --git a/services/repository/main_test.go b/services/repository/main_test.go index 262d339481..42134fa7ad 100644 --- a/services/repository/main_test.go +++ b/services/repository/main_test.go @@ -12,5 +12,7 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } diff --git a/services/repository/push.go b/services/repository/push.go index fafe4736ab..4eb52c18c2 100644 --- a/services/repository/push.go +++ b/services/repository/push.go @@ -89,13 +89,13 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { repoPath := repo.RepoPath() - gitRepo, err := git.OpenRepositoryCtx(ctx, repoPath) + gitRepo, err := git.OpenRepository(ctx, repoPath) if err != nil { return fmt.Errorf("OpenRepository[%s]: %v", repoPath, err) } defer gitRepo.Close() - if err = models.UpdateRepoSize(db.DefaultContext, repo); err != nil { + if err = models.UpdateRepoSize(ctx, repo); err != nil { log.Error("Failed to update size for repository: %v", err) } @@ -222,7 +222,34 @@ func pushUpdates(optsList []*repo_module.PushUpdateOptions) error { if len(commits.Commits) > setting.UI.FeedMaxCommitNum { commits.Commits = commits.Commits[:setting.UI.FeedMaxCommitNum] } - commits.CompareURL = repo.ComposeCompareURL(opts.OldCommitID, opts.NewCommitID) + + oldCommitID := opts.OldCommitID + if oldCommitID == git.EmptySHA && len(commits.Commits) > 0 { + oldCommit, err := gitRepo.GetCommit(commits.Commits[len(commits.Commits)-1].Sha1) + if err != nil && !git.IsErrNotExist(err) { + log.Error("unable to GetCommit %s from %-v: %v", oldCommitID, repo, err) + } + if oldCommit != nil { + for i := 0; i < oldCommit.ParentCount(); i++ { + commitID, _ := oldCommit.ParentID(i) + if !commitID.IsZero() { + oldCommitID = commitID.String() + break + } + } + } + } + + if oldCommitID == git.EmptySHA && repo.DefaultBranch != branch { + oldCommitID = repo.DefaultBranch + } + + if oldCommitID != git.EmptySHA { + commits.CompareURL = repo.ComposeCompareURL(oldCommitID, opts.NewCommitID) + } else { + commits.CompareURL = "" + } + notification.NotifyPushCommits(pusher, repo, opts, commits) if err = models.RemoveDeletedBranchByName(repo.ID, branch); err != nil { diff --git a/services/repository/repository.go b/services/repository/repository.go index c3ca867187..6799ca586e 100644 --- a/services/repository/repository.go +++ b/services/repository/repository.go @@ -9,12 +9,16 @@ import ( "fmt" "code.gitea.io/gitea/models" + admin_model "code.gitea.io/gitea/models/admin" + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" + packages_model "code.gitea.io/gitea/models/packages" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/notification" repo_module "code.gitea.io/gitea/modules/repository" - cfg "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/setting" pull_service "code.gitea.io/gitea/services/pull" ) @@ -42,15 +46,18 @@ func DeleteRepository(ctx context.Context, doer *user_model.User, repo *repo_mod notification.NotifyDeleteRepository(doer, repo) } - err := models.DeleteRepository(doer, repo.OwnerID, repo.ID) - return err + if err := models.DeleteRepository(doer, repo.OwnerID, repo.ID); err != nil { + return err + } + + return packages_model.UnlinkRepositoryFromAllPackages(ctx, repo.ID) } // PushCreateRepo creates a repository when a new repository is pushed to an appropriate namespace func PushCreateRepo(authUser, owner *user_model.User, repoName string) (*repo_model.Repository, error) { if !authUser.IsAdmin { if owner.IsOrganization() { - if ok, err := models.CanCreateOrgRepo(owner.ID, authUser.ID); err != nil { + if ok, err := organization.CanCreateOrgRepo(owner.ID, authUser.ID); err != nil { return nil, err } else if !ok { return nil, fmt.Errorf("cannot push-create repository for org") @@ -62,7 +69,7 @@ func PushCreateRepo(authUser, owner *user_model.User, repoName string) (*repo_mo repo, err := CreateRepository(authUser, owner, models.CreateRepoOptions{ Name: repoName, - IsPrivate: cfg.Repository.DefaultPushCreatePrivate, + IsPrivate: setting.Repository.DefaultPushCreatePrivate, }) if err != nil { return nil, err @@ -71,7 +78,10 @@ func PushCreateRepo(authUser, owner *user_model.User, repoName string) (*repo_mo return repo, nil } -// NewContext start repository service -func NewContext() error { +// Init start repository service +func Init() error { + repo_module.LoadRepoConfig() + admin_model.RemoveAllWithNotice(db.DefaultContext, "Clean up temporary repository uploads", setting.Repository.Upload.TempPath) + admin_model.RemoveAllWithNotice(db.DefaultContext, "Clean up temporary repositories", repo_module.LocalCopyPath()) return initPushQueue() } diff --git a/services/repository/transfer.go b/services/repository/transfer.go index 1e8b78dfc6..3feeb68f22 100644 --- a/services/repository/transfer.go +++ b/services/repository/transfer.go @@ -9,6 +9,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/perm" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" @@ -18,10 +19,11 @@ import ( ) // repoWorkingPool represents a working pool to order the parallel changes to the same repository +// TODO: use clustered lock (unique queue? or *abuse* cache) var repoWorkingPool = sync.NewExclusivePool() // TransferOwnership transfers all corresponding setting from old user to new one. -func TransferOwnership(doer, newOwner *user_model.User, repo *repo_model.Repository, teams []*models.Team) error { +func TransferOwnership(doer, newOwner *user_model.User, repo *repo_model.Repository, teams []*organization.Team) error { if err := repo.GetOwner(db.DefaultContext); err != nil { return err } @@ -46,7 +48,7 @@ func TransferOwnership(doer, newOwner *user_model.User, repo *repo_model.Reposit } for _, team := range teams { - if err := team.AddRepository(newRepo); err != nil { + if err := models.AddRepository(team, newRepo); err != nil { return err } } @@ -81,7 +83,7 @@ func ChangeRepositoryName(doer *user_model.User, repo *repo_model.Repository, ne // StartRepositoryTransfer transfer a repo from one owner to a new one. // it make repository into pending transfer state, if doer can not create repo for new owner. -func StartRepositoryTransfer(doer, newOwner *user_model.User, repo *repo_model.Repository, teams []*models.Team) error { +func StartRepositoryTransfer(doer, newOwner *user_model.User, repo *repo_model.Repository, teams []*organization.Team) error { if err := models.TestRepositoryReadyForTransfer(repo.Status); err != nil { return err } @@ -93,7 +95,7 @@ func StartRepositoryTransfer(doer, newOwner *user_model.User, repo *repo_model.R // If new owner is an org and user can create repos he can transfer directly too if newOwner.IsOrganization() { - allowed, err := models.CanCreateOrgRepo(newOwner.ID, doer.ID) + allowed, err := organization.CanCreateOrgRepo(newOwner.ID, doer.ID) if err != nil { return err } diff --git a/services/repository/transfer_test.go b/services/repository/transfer_test.go index d4fbe95834..1081c76c7e 100644 --- a/services/repository/transfer_test.go +++ b/services/repository/transfer_test.go @@ -9,6 +9,7 @@ import ( "testing" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -53,7 +54,7 @@ func TestTransferOwnership(t *testing.T) { Content: "user3/repo3", }) - unittest.CheckConsistencyFor(t, &repo_model.Repository{}, &user_model.User{}, &models.Team{}) + unittest.CheckConsistencyFor(t, &repo_model.Repository{}, &user_model.User{}, &organization.Team{}) } func TestStartRepositoryTransferSetPermission(t *testing.T) { @@ -74,5 +75,5 @@ func TestStartRepositoryTransferSetPermission(t *testing.T) { assert.NoError(t, err) assert.True(t, hasAccess) - unittest.CheckConsistencyFor(t, &repo_model.Repository{}, &user_model.User{}, &models.Team{}) + unittest.CheckConsistencyFor(t, &repo_model.Repository{}, &user_model.User{}, &organization.Team{}) } diff --git a/services/task/migrate.go b/services/task/migrate.go index d6ff514320..6f35134525 100644 --- a/services/task/migrate.go +++ b/services/task/migrate.go @@ -129,7 +129,7 @@ func runMigrateTask(t *models.Task) (err error) { } // remoteAddr may contain credentials, so we sanitize it - err = util.NewStringURLSanitizedError(err, opts.CloneAddr, true) + err = util.SanitizeErrorCredentialURLs(err) if strings.Contains(err.Error(), "Authentication failed") || strings.Contains(err.Error(), "could not read Username") { return fmt.Errorf("Authentication failed: %v", err.Error()) diff --git a/services/task/task.go b/services/task/task.go index 3f823fc224..9deb0286c5 100644 --- a/services/task/task.go +++ b/services/task/task.go @@ -77,7 +77,7 @@ func CreateMigrateTask(doer, u *user_model.User, opts base.MigrateOptions) (*mod if err != nil { return nil, err } - opts.CloneAddr = util.NewStringURLSanitizer(opts.CloneAddr, true).Replace(opts.CloneAddr) + opts.CloneAddr = util.SanitizeCredentialURLs(opts.CloneAddr) opts.AuthPasswordEncrypted, err = secret.EncryptSecret(setting.SecretKey, opts.AuthPassword) if err != nil { return nil, err diff --git a/services/user/user.go b/services/user/user.go index 21f1a74f62..d41fc42493 100644 --- a/services/user/user.go +++ b/services/user/user.go @@ -16,6 +16,8 @@ import ( admin_model "code.gitea.io/gitea/models/admin" asymkey_model "code.gitea.io/gitea/models/asymkey" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" + packages_model "code.gitea.io/gitea/models/packages" repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/avatar" @@ -50,13 +52,20 @@ func DeleteUser(u *user_model.User) error { } // Check membership of organization. - count, err = models.GetOrganizationCount(ctx, u) + count, err = organization.GetOrganizationCount(ctx, u) if err != nil { return fmt.Errorf("GetOrganizationCount: %v", err) } else if count > 0 { return models.ErrUserHasOrgs{UID: u.ID} } + // Check ownership of packages. + if ownsPackages, err := packages_model.HasOwnerPackages(ctx, u.ID); err != nil { + return fmt.Errorf("HasOwnerPackages: %v", err) + } else if ownsPackages { + return models.ErrUserOwnPackages{UID: u.ID} + } + if err := models.DeleteUser(ctx, u); err != nil { return fmt.Errorf("DeleteUser: %v", err) } @@ -78,7 +87,7 @@ func DeleteUser(u *user_model.User) error { path := user_model.UserPath(u.Name) if err := util.RemoveAll(path); err != nil { err = fmt.Errorf("Failed to RemoveAll %s: %v", path, err) - _ = admin_model.CreateNotice(db.DefaultContext, admin_model.NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err)) + _ = admin_model.CreateNotice(ctx, admin_model.NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err)) return err } @@ -86,7 +95,7 @@ func DeleteUser(u *user_model.User) error { avatarPath := u.CustomAvatarRelativePath() if err := storage.Avatars.Delete(avatarPath); err != nil { err = fmt.Errorf("Failed to remove %s: %v", avatarPath, err) - _ = admin_model.CreateNotice(db.DefaultContext, admin_model.NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err)) + _ = admin_model.CreateNotice(ctx, admin_model.NoticeTask, fmt.Sprintf("delete user '%s': %v", u.Name, err)) return err } } @@ -110,7 +119,7 @@ func DeleteInactiveUsers(ctx context.Context, olderThan time.Duration) error { } if err := DeleteUser(u); err != nil { // Ignore users that were set inactive by admin. - if models.IsErrUserOwnRepos(err) || models.IsErrUserHasOrgs(err) { + if models.IsErrUserOwnRepos(err) || models.IsErrUserHasOrgs(err) || models.IsErrUserOwnPackages(err) { continue } return err diff --git a/services/user/user_test.go b/services/user/user_test.go index 5b35db790c..cfa02b0033 100644 --- a/services/user/user_test.go +++ b/services/user/user_test.go @@ -10,6 +10,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/organization" repo_model "code.gitea.io/gitea/models/repo" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" @@ -19,7 +20,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } func TestDeleteUser(t *testing.T) { @@ -36,11 +39,11 @@ func TestDeleteUser(t *testing.T) { return } - orgUsers := make([]*models.OrgUser, 0, 10) - assert.NoError(t, db.GetEngine(db.DefaultContext).Find(&orgUsers, &models.OrgUser{UID: userID})) + orgUsers := make([]*organization.OrgUser, 0, 10) + assert.NoError(t, db.GetEngine(db.DefaultContext).Find(&orgUsers, &organization.OrgUser{UID: userID})) for _, orgUser := range orgUsers { if err := models.RemoveOrgUser(orgUser.OrgID, orgUser.UID); err != nil { - assert.True(t, models.IsErrLastOrgOwner(err)) + assert.True(t, organization.IsErrLastOrgOwner(err)) return } } diff --git a/services/webhook/deliver.go b/services/webhook/deliver.go index 88b709cb41..77744473f1 100644 --- a/services/webhook/deliver.go +++ b/services/webhook/deliver.go @@ -15,7 +15,6 @@ import ( "io" "net/http" "net/url" - "strconv" "strings" "sync" "time" @@ -24,14 +23,16 @@ import ( "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/hostmatcher" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/proxy" + "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" "github.com/gobwas/glob" ) // Deliver deliver hook task -func Deliver(t *webhook_model.HookTask) error { +func Deliver(ctx context.Context, t *webhook_model.HookTask) error { w, err := webhook_model.GetWebhookByID(t.HookID) if err != nil { return err @@ -148,6 +149,8 @@ func Deliver(t *webhook_model.HookTask) error { t.Delivered = time.Now().UnixNano() if t.IsSucceed { log.Trace("Hook delivered: %s", t.UUID) + } else if !w.IsActive { + log.Trace("Hook delivery skipped as webhook is inactive: %s", t.UUID) } else { log.Trace("Hook delivery failed: %s", t.UUID) } @@ -172,7 +175,11 @@ func Deliver(t *webhook_model.HookTask) error { return fmt.Errorf("webhook task skipped (webhooks disabled): [%d]", t.ID) } - resp, err := webhookHTTPClient.Do(req.WithContext(graceful.GetManager().ShutdownContext())) + if !w.IsActive { + return nil + } + + resp, err := webhookHTTPClient.Do(req.WithContext(ctx)) if err != nil { t.ResponseInfo.Body = fmt.Sprintf("Delivery: %v", err) return err @@ -195,15 +202,15 @@ func Deliver(t *webhook_model.HookTask) error { return nil } -// DeliverHooks checks and delivers undelivered hooks. -// FIXME: graceful: This would likely benefit from either a worker pool with dummy queue -// or a full queue. Then more hooks could be sent at same time. -func DeliverHooks(ctx context.Context) { +// populateDeliverHooks checks and delivers undelivered hooks. +func populateDeliverHooks(ctx context.Context) { select { case <-ctx.Done(): return default: } + ctx, _, finished := process.GetManager().AddTypedContext(ctx, "Service: DeliverHooks", process.SystemProcessType, true) + defer finished() tasks, err := webhook_model.FindUndeliveredHookTasks() if err != nil { log.Error("DeliverHooks: %v", err) @@ -217,42 +224,9 @@ func DeliverHooks(ctx context.Context) { return default: } - if err = Deliver(t); err != nil { - log.Error("deliver: %v", err) - } - } - // Start listening on new hook requests. - for { - select { - case <-ctx.Done(): - hookQueue.Close() - return - case repoIDStr := <-hookQueue.Queue(): - log.Trace("DeliverHooks [repo_id: %v]", repoIDStr) - hookQueue.Remove(repoIDStr) - - repoID, err := strconv.ParseInt(repoIDStr, 10, 64) - if err != nil { - log.Error("Invalid repo ID: %s", repoIDStr) - continue - } - - tasks, err := webhook_model.FindRepoUndeliveredHookTasks(repoID) - if err != nil { - log.Error("Get repository [%d] hook tasks: %v", repoID, err) - continue - } - for _, t := range tasks { - select { - case <-ctx.Done(): - return - default: - } - if err = Deliver(t); err != nil { - log.Error("deliver: %v", err) - } - } + if err := addToTask(t.RepoID); err != nil { + log.Error("DeliverHook failed [%d]: %v", t.RepoID, err) } } } @@ -288,8 +262,8 @@ func webhookProxy() func(req *http.Request) (*url.URL, error) { } } -// InitDeliverHooks starts the hooks delivery thread -func InitDeliverHooks() { +// Init starts the hooks delivery thread +func Init() error { timeout := time.Duration(setting.Webhook.DeliverTimeout) * time.Second allowedHostListValue := setting.Webhook.AllowedHostList @@ -307,5 +281,13 @@ func InitDeliverHooks() { }, } - go graceful.GetManager().RunWithShutdownContext(DeliverHooks) + hookQueue = queue.CreateUniqueQueue("webhook_sender", handle, "") + if hookQueue == nil { + return fmt.Errorf("Unable to create webhook_sender Queue") + } + go graceful.GetManager().RunWithShutdownFns(hookQueue.Run) + + populateDeliverHooks(graceful.GetManager().HammerContext()) + + return nil } diff --git a/services/webhook/main_test.go b/services/webhook/main_test.go index a87b74e89d..1dc2e1bd83 100644 --- a/services/webhook/main_test.go +++ b/services/webhook/main_test.go @@ -9,10 +9,16 @@ import ( "testing" "code.gitea.io/gitea/models/unittest" + "code.gitea.io/gitea/modules/setting" _ "code.gitea.io/gitea/models" ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + setting.LoadForTest() + setting.NewQueueService() + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + SetUp: Init, + }) } diff --git a/services/webhook/webhook.go b/services/webhook/webhook.go index 607fac9634..b15b8173f5 100644 --- a/services/webhook/webhook.go +++ b/services/webhook/webhook.go @@ -6,15 +6,17 @@ package webhook import ( "fmt" + "strconv" "strings" repo_model "code.gitea.io/gitea/models/repo" webhook_model "code.gitea.io/gitea/models/webhook" "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/graceful" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" - "code.gitea.io/gitea/modules/sync" "code.gitea.io/gitea/modules/util" "github.com/gobwas/glob" @@ -79,7 +81,7 @@ func IsValidHookTaskType(name string) bool { } // hookQueue is a global queue of web hooks -var hookQueue = sync.NewUniqueQueue(setting.Webhook.QueueLength) +var hookQueue queue.UniqueQueue // getPayloadBranch returns branch for hook event, if applicable. func getPayloadBranch(p api.Payloader) string { @@ -100,14 +102,47 @@ func getPayloadBranch(p api.Payloader) string { return "" } +// handle passed PR IDs and test the PRs +func handle(data ...queue.Data) []queue.Data { + for _, datum := range data { + repoIDStr := datum.(string) + log.Trace("DeliverHooks [repo_id: %v]", repoIDStr) + + repoID, err := strconv.ParseInt(repoIDStr, 10, 64) + if err != nil { + log.Error("Invalid repo ID: %s", repoIDStr) + continue + } + + tasks, err := webhook_model.FindRepoUndeliveredHookTasks(repoID) + if err != nil { + log.Error("Get repository [%d] hook tasks: %v", repoID, err) + continue + } + for _, t := range tasks { + if err = Deliver(graceful.GetManager().HammerContext(), t); err != nil { + log.Error("deliver: %v", err) + } + } + } + return nil +} + +func addToTask(repoID int64) error { + err := hookQueue.PushFunc(strconv.FormatInt(repoID, 10), nil) + if err != nil && err != queue.ErrAlreadyInQueue { + return err + } + return nil +} + // PrepareWebhook adds special webhook to task queue for given payload. func PrepareWebhook(w *webhook_model.Webhook, repo *repo_model.Repository, event webhook_model.HookEventType, p api.Payloader) error { if err := prepareWebhook(w, repo, event, p); err != nil { return err } - go hookQueue.Add(repo.ID) - return nil + return addToTask(repo.ID) } func checkBranch(w *webhook_model.Webhook, branch string) bool { @@ -187,8 +222,7 @@ func PrepareWebhooks(repo *repo_model.Repository, event webhook_model.HookEventT return err } - go hookQueue.Add(repo.ID) - return nil + return addToTask(repo.ID) } func prepareWebhooks(repo *repo_model.Repository, event webhook_model.HookEventType, p api.Payloader) error { @@ -214,7 +248,7 @@ func prepareWebhooks(repo *repo_model.Repository, event webhook_model.HookEventT } // Add any admin-defined system webhooks - systemHooks, err := webhook_model.GetSystemWebhooks() + systemHooks, err := webhook_model.GetSystemWebhooks(util.OptionalBoolTrue) if err != nil { return fmt.Errorf("GetSystemWebhooks: %v", err) } @@ -239,7 +273,5 @@ func ReplayHookTask(w *webhook_model.Webhook, uuid string) error { return err } - go hookQueue.Add(t.RepoID) - - return nil + return addToTask(t.RepoID) } diff --git a/services/wiki/wiki.go b/services/wiki/wiki.go index 919753726f..43e35eed69 100644 --- a/services/wiki/wiki.go +++ b/services/wiki/wiki.go @@ -27,7 +27,8 @@ import ( var ( reservedWikiNames = []string{"_pages", "_new", "_edit", "raw"} - wikiWorkingPool = sync.NewExclusivePool() + // TODO: use clustered lock (unique queue? or *abuse* cache) + wikiWorkingPool = sync.NewExclusivePool() ) func nameAllowed(name string) error { @@ -81,7 +82,7 @@ func InitWiki(ctx context.Context, repo *repo_model.Repository) error { return fmt.Errorf("InitRepository: %v", err) } else if err = repo_module.CreateDelegateHooks(repo.WikiPath()); err != nil { return fmt.Errorf("createDelegateHooks: %v", err) - } else if _, err = git.NewCommand(ctx, "symbolic-ref", "HEAD", git.BranchPrefix+"master").RunInDir(repo.WikiPath()); err != nil { + } else if _, _, err = git.NewCommand(ctx, "symbolic-ref", "HEAD", git.BranchPrefix+"master").RunStdString(&git.RunOpts{Dir: repo.WikiPath()}); err != nil { return fmt.Errorf("unable to set default wiki branch to master: %v", err) } return nil @@ -132,12 +133,12 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model hasMasterBranch := git.IsBranchExist(ctx, repo.WikiPath(), "master") - basePath, err := models.CreateTemporaryPath("update-wiki") + basePath, err := repo_module.CreateTemporaryPath("update-wiki") if err != nil { return err } defer func() { - if err := models.RemoveTemporaryPath(basePath); err != nil { + if err := repo_module.RemoveTemporaryPath(basePath); err != nil { log.Error("Merge: RemoveTemporaryPath: %s", err) } }() @@ -156,7 +157,7 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model return fmt.Errorf("Failed to clone repository: %s (%v)", repo.FullName(), err) } - gitRepo, err := git.OpenRepositoryCtx(ctx, basePath) + gitRepo, err := git.OpenRepository(ctx, basePath) if err != nil { log.Error("Unable to open temporary repository: %s (%v)", basePath, err) return fmt.Errorf("Failed to open new temporary repository in: %s %v", basePath, err) @@ -248,7 +249,7 @@ func updateWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model if err := git.Push(gitRepo.Ctx, basePath, git.PushOptions{ Remote: "origin", Branch: fmt.Sprintf("%s:%s%s", commitHash.String(), git.BranchPrefix, "master"), - Env: models.FullPushingEnvironment( + Env: repo_module.FullPushingEnvironment( doer, doer, repo, @@ -286,12 +287,12 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model return fmt.Errorf("InitWiki: %v", err) } - basePath, err := models.CreateTemporaryPath("update-wiki") + basePath, err := repo_module.CreateTemporaryPath("update-wiki") if err != nil { return err } defer func() { - if err := models.RemoveTemporaryPath(basePath); err != nil { + if err := repo_module.RemoveTemporaryPath(basePath); err != nil { log.Error("Merge: RemoveTemporaryPath: %s", err) } }() @@ -305,7 +306,7 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model return fmt.Errorf("Failed to clone repository: %s (%v)", repo.FullName(), err) } - gitRepo, err := git.OpenRepositoryCtx(ctx, basePath) + gitRepo, err := git.OpenRepository(ctx, basePath) if err != nil { log.Error("Unable to open temporary repository: %s (%v)", basePath, err) return fmt.Errorf("Failed to open new temporary repository in: %s %v", basePath, err) @@ -362,7 +363,7 @@ func DeleteWikiPage(ctx context.Context, doer *user_model.User, repo *repo_model if err := git.Push(gitRepo.Ctx, basePath, git.PushOptions{ Remote: "origin", Branch: fmt.Sprintf("%s:%s%s", commitHash.String(), git.BranchPrefix, "master"), - Env: models.PushingEnvironment(doer, repo), + Env: repo_module.PushingEnvironment(doer, repo), }); err != nil { if git.IsErrPushOutOfDate(err) || git.IsErrPushRejected(err) { return err diff --git a/services/wiki/wiki_test.go b/services/wiki/wiki_test.go index 928d08c828..0c73074dbe 100644 --- a/services/wiki/wiki_test.go +++ b/services/wiki/wiki_test.go @@ -20,7 +20,9 @@ import ( ) func TestMain(m *testing.M) { - unittest.MainTest(m, filepath.Join("..", "..")) + unittest.MainTest(m, &unittest.TestOptions{ + GiteaRootPath: filepath.Join("..", ".."), + }) } func TestWikiNameToSubURL(t *testing.T) { @@ -138,7 +140,7 @@ func TestRepository_AddWikiPage(t *testing.T) { t.Parallel() assert.NoError(t, AddWikiPage(git.DefaultContext, doer, repo, wikiName, wikiContent, commitMsg)) // Now need to show that the page has been added: - gitRepo, err := git.OpenRepositoryCtx(git.DefaultContext, repo.WikiPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo.WikiPath()) assert.NoError(t, err) defer gitRepo.Close() masterTree, err := gitRepo.GetTree("master") @@ -183,7 +185,7 @@ func TestRepository_EditWikiPage(t *testing.T) { assert.NoError(t, EditWikiPage(git.DefaultContext, doer, repo, "Home", newWikiName, newWikiContent, commitMsg)) // Now need to show that the page has been added: - gitRepo, err := git.OpenRepositoryCtx(git.DefaultContext, repo.WikiPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo.WikiPath()) assert.NoError(t, err) masterTree, err := gitRepo.GetTree("master") assert.NoError(t, err) @@ -207,7 +209,7 @@ func TestRepository_DeleteWikiPage(t *testing.T) { assert.NoError(t, DeleteWikiPage(git.DefaultContext, doer, repo, "Home")) // Now need to show that the page has been added: - gitRepo, err := git.OpenRepositoryCtx(git.DefaultContext, repo.WikiPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo.WikiPath()) assert.NoError(t, err) defer gitRepo.Close() masterTree, err := gitRepo.GetTree("master") @@ -220,7 +222,7 @@ func TestRepository_DeleteWikiPage(t *testing.T) { func TestPrepareWikiFileName(t *testing.T) { unittest.PrepareTestEnv(t) repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1}).(*repo_model.Repository) - gitRepo, err := git.OpenRepositoryCtx(git.DefaultContext, repo.WikiPath()) + gitRepo, err := git.OpenRepository(git.DefaultContext, repo.WikiPath()) defer gitRepo.Close() assert.NoError(t, err) @@ -283,7 +285,7 @@ func TestPrepareWikiFileName_FirstPage(t *testing.T) { err = git.InitRepository(git.DefaultContext, tmpDir, true) assert.NoError(t, err) - gitRepo, err := git.OpenRepository(tmpDir) + gitRepo, err := git.OpenRepository(git.DefaultContext, tmpDir) defer gitRepo.Close() assert.NoError(t, err) diff --git a/templates/admin/auth/list.tmpl b/templates/admin/auth/list.tmpl index 71e5bfbda8..b4a703e413 100644 --- a/templates/admin/auth/list.tmpl +++ b/templates/admin/auth/list.tmpl @@ -10,7 +10,7 @@
- +
diff --git a/templates/admin/auth/new.tmpl b/templates/admin/auth/new.tmpl index b8e80dbcaa..9882cde03b 100644 --- a/templates/admin/auth/new.tmpl +++ b/templates/admin/auth/new.tmpl @@ -14,7 +14,7 @@
ID
+ + + + + + + + + + + + + {{range .Entries}} + + + + + + + + + + {{end}} + +
{{.i18n.Tr "admin.monitor.name"}}{{.i18n.Tr "admin.monitor.schedule"}}{{.i18n.Tr "admin.monitor.next"}}{{.i18n.Tr "admin.monitor.previous"}}{{.i18n.Tr "admin.monitor.execute_times"}}{{.i18n.Tr "admin.monitor.last_execution_result"}}
{{$.i18n.Tr (printf "admin.dashboard.%s" .Name)}}{{.Spec}}{{DateFmtLong .Next}}{{if gt .Prev.Year 1 }}{{DateFmtLong .Prev}}{{else}}N/A{{end}}{{.ExecTimes}}{{if eq .Status "" }}—{{else if eq .Status "finished"}}{{svg "octicon-check" 16}}{{else}}{{svg "octicon-x" 16}}{{end}}
+ + {{.CsrfTokenHtml}} + +
diff --git a/templates/admin/emails/list.tmpl b/templates/admin/emails/list.tmpl index e73213c1df..277c777a89 100644 --- a/templates/admin/emails/list.tmpl +++ b/templates/admin/emails/list.tmpl @@ -30,7 +30,7 @@
- +
{{else}} {{$inlineDiff := $.section.GetComputedInlineDiffFor $line}} - + - + {{else}} - - + + {{end}} {{$inlineDiff := $.section.GetComputedInlineDiffFor $line}} diff --git a/templates/repo/diff/box.tmpl b/templates/repo/diff/box.tmpl index f115a5f499..bfac8c52e8 100644 --- a/templates/repo/diff/box.tmpl +++ b/templates/repo/diff/box.tmpl @@ -18,6 +18,12 @@ {{svg "octicon-diff" 16 "mr-2"}}{{.i18n.Tr "repo.diff.stats_desc" .Diff.NumFiles .Diff.TotalAddition .Diff.TotalDeletion | Str2html}}
+ {{if and .PageIsPullFiles $.SignedUserID (not .IsArchived)}} + + + {{end}} {{template "repo/diff/whitespace_dropdown" .}} {{template "repo/diff/options_dropdown" .}} {{if and .PageIsPullFiles $.SignedUserID (not .IsArchived)}} @@ -39,7 +45,7 @@
  - {{.Name}} + {{.Name}} {{end}} {{if .Diff.IsIncomplete}} @@ -57,11 +63,11 @@ {{$isImage := or (call $.IsBlobAnImage $blobBase) (call $.IsBlobAnImage $blobHead)}} {{$isCsv := (call $.IsCsvFile $file)}} {{$showFileViewToggle := or $isImage (and (not $file.IsIncomplete) $isCsv)}} -
+

- {{if $file.IsRenamed}}{{$file.OldName}} → {{end}}{{$file.Name}}{{if .IsLFSFile}} ({{$.i18n.Tr "repo.stored_lfs"}}){{end}} + {{if $file.IsRenamed}}{{$file.OldName}} → {{end}}{{$file.Name}}{{if .IsLFSFile}} ({{$.i18n.Tr "repo.stored_lfs"}}){{end}} {{if $file.IsGenerated}} {{$.i18n.Tr "repo.diff.generated"}} {{end}} @@ -105,9 +111,18 @@ {{$.i18n.Tr "repo.diff.view_file"}} {{end}} {{end}} + {{if and $.IsSigned $.PageIsPullFiles (not $.IsArchived)}} + {{if $file.HasChangedSinceLastReview}} + {{$.i18n.Tr "repo.pulls.has_changed_since_last_review"}} + {{end}} +
+ + +
+ {{end}}

-
+
{{if or $file.IsIncomplete $file.IsBin}}
@@ -162,7 +177,7 @@
diff --git a/templates/repo/diff/comment_form.tmpl b/templates/repo/diff/comment_form.tmpl index cb7234b3b0..7f7fb7e329 100644 --- a/templates/repo/diff/comment_form.tmpl +++ b/templates/repo/diff/comment_form.tmpl @@ -11,7 +11,7 @@
diff --git a/templates/repo/diff/comments.tmpl b/templates/repo/diff/comments.tmpl index 3b8f1c2a9c..863e295862 100644 --- a/templates/repo/diff/comments.tmpl +++ b/templates/repo/diff/comments.tmpl @@ -37,7 +37,7 @@
{{if and .Review}} {{if eq .Review.Type 0}} -
+
{{$.root.i18n.Tr "repo.issues.review.pending"}}
{{else}} diff --git a/templates/repo/diff/compare.tmpl b/templates/repo/diff/compare.tmpl index 4517f0029c..743eaa2efa 100644 --- a/templates/repo/diff/compare.tmpl +++ b/templates/repo/diff/compare.tmpl @@ -191,7 +191,7 @@
{{.i18n.Tr "repo.pulls.has_pull_request" (Escape $.RepoLink) (Escape $.RepoRelPath) .PullRequest.Index | Safe}}

- {{RenderIssueTitle .PullRequest.Issue.Title $.RepoLink $.Repository.ComposeMetas}} + {{RenderIssueTitle $.Context .PullRequest.Issue.Title $.RepoLink $.Repository.ComposeMetas}} #{{.PullRequest.Issue.Index}}

diff --git a/templates/repo/diff/new_review.tmpl b/templates/repo/diff/new_review.tmpl index cbaabe255e..e4110b50ed 100644 --- a/templates/repo/diff/new_review.tmpl +++ b/templates/repo/diff/new_review.tmpl @@ -1,6 +1,7 @@
+ - + {{else}} {{$inlineDiff := $section.GetComputedInlineDiffFor $line}} - + - + {{else}} - - + + {{end}} {{$inlineDiff := $section.GetComputedInlineDiffFor $line -}} @@ -31,9 +31,7 @@ {{if eq .GetType 4}} + */}} {{else}}
diff --git a/templates/admin/monitor.tmpl b/templates/admin/monitor.tmpl index a35b587bd5..86686101ac 100644 --- a/templates/admin/monitor.tmpl +++ b/templates/admin/monitor.tmpl @@ -3,45 +3,12 @@ {{template "admin/navbar" .}}
{{template "base/alert" .}} -

- {{.i18n.Tr "admin.monitor.cron"}} -

-
-
- - - - - - - - - - - - - {{range .Entries}} - - - - - - - - - {{end}} - -
{{.i18n.Tr "admin.monitor.name"}}{{.i18n.Tr "admin.monitor.schedule"}}{{.i18n.Tr "admin.monitor.next"}}{{.i18n.Tr "admin.monitor.previous"}}{{.i18n.Tr "admin.monitor.execute_times"}}
{{$.i18n.Tr (printf "admin.dashboard.%s" .Name)}}{{.Spec}}{{DateFmtLong .Next}}{{if gt .Prev.Year 1 }}{{DateFmtLong .Prev}}{{else}}N/A{{end}}{{.ExecTimes}}
- - {{.CsrfTokenHtml}} -
-
- + {{template "admin/cron" .}}

{{.i18n.Tr "admin.monitor.queues"}}

- +
diff --git a/templates/admin/navbar.tmpl b/templates/admin/navbar.tmpl index c656d0619b..24a0a093a6 100644 --- a/templates/admin/navbar.tmpl +++ b/templates/admin/navbar.tmpl @@ -12,6 +12,9 @@ {{.i18n.Tr "admin.repositories"}} + + {{.i18n.Tr "packages.title"}} + {{if not DisableWebhooks}} {{.i18n.Tr "admin.hooks"}} diff --git a/templates/admin/notice.tmpl b/templates/admin/notice.tmpl index 9fec53b315..8d0e1c2206 100644 --- a/templates/admin/notice.tmpl +++ b/templates/admin/notice.tmpl @@ -7,7 +7,7 @@ {{.i18n.Tr "admin.notices.system_notice_list"}} ({{.i18n.Tr "admin.total" .Total}})
-
{{.i18n.Tr "admin.monitor.queue.name"}}
+
diff --git a/templates/admin/org/list.tmpl b/templates/admin/org/list.tmpl index 75c4d39196..0782ef64e9 100644 --- a/templates/admin/org/list.tmpl +++ b/templates/admin/org/list.tmpl @@ -13,7 +13,7 @@ {{template "admin/base/search" .}}
-
+
diff --git a/templates/admin/packages/list.tmpl b/templates/admin/packages/list.tmpl new file mode 100644 index 0000000000..df89d8bed2 --- /dev/null +++ b/templates/admin/packages/list.tmpl @@ -0,0 +1,98 @@ +{{template "base/head" .}} +
+ {{template "admin/navbar" .}} +
+ {{template "base/alert" .}} +

+ {{.i18n.Tr "admin.packages.package_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}}, {{.i18n.Tr "admin.packages.total_size" (FileSize .TotalBlobSize)}}) +

+
+
+
+ + + +
+ +
+
+
ID{{SortArrow "oldest" "newest" $.SortType false}}
+ + + + + + + + + + + + + + + + {{range .PackageDescriptors}} + + + + + + + + + + + + + {{end}} + +
ID{{.i18n.Tr "admin.packages.owner"}}{{.i18n.Tr "admin.packages.type"}} + {{.i18n.Tr "admin.packages.name"}} + {{SortArrow "alphabetically" "reversealphabetically" .SortType false}} + + {{.i18n.Tr "admin.packages.version"}} + {{SortArrow "highestversion" "lowestversion" .SortType false}} + {{.i18n.Tr "admin.packages.creator"}}{{.i18n.Tr "admin.packages.repository"}}{{.i18n.Tr "admin.packages.size"}} + {{.i18n.Tr "admin.packages.published"}} + {{SortArrow "oldest" "newest" .SortType true}} + {{.i18n.Tr "admin.notices.op"}}
{{.Version.ID}} + {{.Owner.Name}} + {{if .Owner.Visibility.IsPrivate}} + {{svg "octicon-lock"}} + {{end}} + {{.Package.Type.Name}}{{.Creator.Name}} + {{if .Repository}} + {{.Repository.Name}} + {{end}} + {{FileSize .CalculateBlobSize}}{{.Version.CreatedUnix.FormatShort}}{{svg "octicon-trash"}}
+
+ + {{template "base/paginate" .}} +
+ + + +{{template "base/footer" .}} diff --git a/templates/admin/process-row.tmpl b/templates/admin/process-row.tmpl index 146ecc7b29..2191677a5c 100644 --- a/templates/admin/process-row.tmpl +++ b/templates/admin/process-row.tmpl @@ -1,11 +1,14 @@
diff --git a/templates/admin/process.tmpl b/templates/admin/process.tmpl index 719c10cead..c44300dbb7 100644 --- a/templates/admin/process.tmpl +++ b/templates/admin/process.tmpl @@ -1,5 +1,8 @@

{{.i18n.Tr "admin.monitor.process"}} +

diff --git a/templates/admin/repo/list.tmpl b/templates/admin/repo/list.tmpl index d7561dde7d..da05bfab96 100644 --- a/templates/admin/repo/list.tmpl +++ b/templates/admin/repo/list.tmpl @@ -13,7 +13,7 @@ {{template "admin/repo/search" .}}
- +
diff --git a/templates/admin/repo/unadopted.tmpl b/templates/admin/repo/unadopted.tmpl index b1f172720a..345f59401a 100644 --- a/templates/admin/repo/unadopted.tmpl +++ b/templates/admin/repo/unadopted.tmpl @@ -25,7 +25,7 @@ {{range $dirI, $dir := .Dirs}}
- {{svg "octicon-file-directory"}} + {{svg "octicon-file-directory-fill"}} {{$dir}}
diff --git a/templates/admin/stacktrace-row.tmpl b/templates/admin/stacktrace-row.tmpl new file mode 100644 index 0000000000..a21ef72d63 --- /dev/null +++ b/templates/admin/stacktrace-row.tmpl @@ -0,0 +1,66 @@ +
+
+
+ {{if eq .Process.Type "request"}} + {{svg "octicon-globe" 16 }} + {{else if eq .Process.Type "system"}} + {{svg "octicon-cpu" 16 }} + {{else if eq .Process.Type "normal"}} + {{svg "octicon-terminal" 16 }} + {{else}} + {{svg "octicon-code" 16 }} + {{end}} +
+
+
{{.Process.Description}}
+
{{if ne .Process.Type "none"}}{{TimeSince .Process.Start .root.i18n.Lang}}{{end}}
+
+
+ {{if or (eq .Process.Type "request") (eq .Process.Type "normal") }} + {{svg "octicon-trash" 16 "text-red"}} + {{end}} +
+
+ {{if .Process.Stacks}} +
+ {{range .Process.Stacks}} +
+
+ +
+
+ {{svg "octicon-code" 16 }}{{.Description}}{{if gt .Count 1}} * {{.Count}}{{end}} +
+
+ {{range .Labels}} +
{{.Name}}
{{.Value}}
+ {{end}} +
+
+
+
+ {{range .Entry}} +
+ {{svg "octicon-dot-fill" 16 }} +
+
{{.Function}}
+
{{.File}}:{{.Line}}
+
+
+ {{end}} +
+
+
+ {{end}} +
+ {{end}} + + {{if .Process.Children}} +
+ {{range .Process.Children}} + {{template "admin/stacktrace-row" dict "Process" . "root" $.root}} + {{end}} +
+ {{end}} + +
diff --git a/templates/admin/stacktrace.tmpl b/templates/admin/stacktrace.tmpl new file mode 100644 index 0000000000..68dfbe066d --- /dev/null +++ b/templates/admin/stacktrace.tmpl @@ -0,0 +1,33 @@ +{{template "base/head" .}} +
+ {{template "admin/navbar" .}} +
+ {{template "base/alert" .}} +

+ {{.i18n.Tr "admin.monitor.stacktrace"}}: {{.i18n.Tr "admin.monitor.goroutines" .GoroutineCount}} + +

+
+
+ {{range .ProcessStacks}} + {{template "admin/stacktrace-row" dict "Process" . "root" $}} + {{end}} +
+
+
+
+ + +{{template "base/footer" .}} diff --git a/templates/admin/user/edit.tmpl b/templates/admin/user/edit.tmpl index 17bd2b936c..1ee46f3077 100644 --- a/templates/admin/user/edit.tmpl +++ b/templates/admin/user/edit.tmpl @@ -116,7 +116,7 @@
-
+
diff --git a/templates/admin/user/list.tmpl b/templates/admin/user/list.tmpl index 93e6f38c27..755e4436f8 100644 --- a/templates/admin/user/list.tmpl +++ b/templates/admin/user/list.tmpl @@ -61,7 +61,7 @@
-
ID{{SortArrow "oldest" "newest" $.SortType false}}
+
diff --git a/templates/api/packages/pypi/simple.tmpl b/templates/api/packages/pypi/simple.tmpl new file mode 100644 index 0000000000..d8e480d9c6 --- /dev/null +++ b/templates/api/packages/pypi/simple.tmpl @@ -0,0 +1,15 @@ + + + + Links for {{.PackageDescriptor.Package.Name}} + + +

Links for {{.PackageDescriptor.Package.Name}}

+ {{range .PackageDescriptors}} + {{$p := .}} + {{range .Files}} + {{.File.Name}}
+ {{end}} + {{end}} + + diff --git a/templates/base/footer.tmpl b/templates/base/footer.tmpl index 1aabfa2f5c..9bf16f8aa5 100644 --- a/templates/base/footer.tmpl +++ b/templates/base/footer.tmpl @@ -22,7 +22,7 @@ {{end}} {{end}} - + {{template "custom/footer" .}} diff --git a/templates/base/footer_content.tmpl b/templates/base/footer_content.tmpl index ef183191a4..f5ec6b0bf3 100644 --- a/templates/base/footer_content.tmpl +++ b/templates/base/footer_content.tmpl @@ -1,7 +1,20 @@
- {{.i18n.Tr "powered_by" "Gitea"}} {{if (or .ShowFooterVersion .PageIsAdmin)}}{{.i18n.Tr "version"}}: {{AppVer}}{{end}} {{if ShowFooterTemplateLoadTime}}{{.i18n.Tr "page"}}: {{LoadTimes .PageStartTime}} {{.i18n.Tr "template"}}{{if .TemplateName}} {{.TemplateName}}{{end}}: {{call .TemplateLoadTimes}}{{end}} + {{.i18n.Tr "powered_by" "Gitea"}} + {{if (or .ShowFooterVersion .PageIsAdmin)}} + {{.i18n.Tr "version"}}: + {{if .IsAdmin}} + {{AppVer}} + {{else}} + {{AppVer}} + {{end}} + {{end}} + {{if and .TemplateLoadTimes ShowFooterTemplateLoadTime}} + {{.i18n.Tr "page"}}: {{LoadTimes .PageStartTime}} + {{.i18n.Tr "template"}} + {{if .TemplateName}} {{.TemplateName}}{{end}}: {{call .TemplateLoadTimes}} + {{end}}
diff --git a/templates/base/head.tmpl b/templates/base/head.tmpl index 32e206a95d..f3dcfe8429 100644 --- a/templates/base/head.tmpl +++ b/templates/base/head.tmpl @@ -12,54 +12,19 @@ {{if .GoGetImport}} - + {{end}} - +{{if .FeedURL}} + + +{{end}} + + {{template "base/head_script" .}} + {{if .PageIsUserProfile}} - + @@ -120,7 +85,7 @@ {{template "custom/body_inner_pre" .}} {{if not .PageIsInstall}} -
ID{{SortArrow "oldest" "newest" .SortType false}}
+ + + + + + + + {{range $dependency, $version := .dependencies}} + + + + + {{end}} + +
{{.root.i18n.Tr "packages.dependency.id"}}{{.root.i18n.Tr "packages.dependency.version"}}
{{$dependency}}{{$version}}
+{{end}} diff --git a/templates/package/content/conan.tmpl b/templates/package/content/conan.tmpl new file mode 100644 index 0000000000..3c1c35c00f --- /dev/null +++ b/templates/package/content/conan.tmpl @@ -0,0 +1,34 @@ +{{if eq .PackageDescriptor.Package.Type "conan"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
conan remote add gitea {{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/conan
+
+
+ +
conan install --remote=gitea {{.PackageDescriptor.Package.Name}}/{{.PackageDescriptor.Version.Version}}
+
+
+ +
+
+
+ + {{if .PackageDescriptor.Metadata.Description}} +

{{.i18n.Tr "packages.about"}}

+
+ {{if .PackageDescriptor.Metadata.Description}}{{.PackageDescriptor.Metadata.Description}}{{end}} +
+ {{end}} + + {{if or .PackageDescriptor.Metadata.Keywords}} +

{{.i18n.Tr "packages.keywords"}}

+
+ {{range .PackageDescriptor.Metadata.Keywords}} + {{.}} + {{end}} +
+ {{end}} +{{end}} diff --git a/templates/package/content/container.tmpl b/templates/package/content/container.tmpl new file mode 100644 index 0000000000..14d4a56398 --- /dev/null +++ b/templates/package/content/container.tmpl @@ -0,0 +1,78 @@ +{{if eq .PackageDescriptor.Package.Type "container"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ + {{if eq .PackageDescriptor.Metadata.Type "helm"}} +
helm pull oci://{{.RegistryHost}}/{{.PackageDescriptor.Owner.LowerName}}/{{.PackageDescriptor.Package.LowerName}} --version {{.PackageDescriptor.Version.LowerVersion}}
+ {{else}} + {{$separator := ":"}} + {{if not .PackageDescriptor.Metadata.IsTagged}} + {{$separator = "@"}} + {{end}} +
docker pull {{.RegistryHost}}/{{.PackageDescriptor.Owner.LowerName}}/{{.PackageDescriptor.Package.LowerName}}{{$separator}}{{.PackageDescriptor.Version.LowerVersion}}
+ {{end}} +
+
+ +
+
+
+ {{if .PackageDescriptor.Metadata.MultiArch}} +

{{.i18n.Tr "packages.container.multi_arch"}}

+
+
+ {{range $arch, $digest := .PackageDescriptor.Metadata.MultiArch}} +
+ + {{if eq $.PackageDescriptor.Metadata.Type "oci"}} +
docker pull {{$.RegistryHost}}/{{$.PackageDescriptor.Owner.LowerName}}/{{$.PackageDescriptor.Package.LowerName}}@{{$digest}}
+ {{end}} +
+ {{end}} +
+
+ {{end}} + {{if .PackageDescriptor.Metadata.Description}} +

{{.i18n.Tr "packages.about"}}

+
+ {{.PackageDescriptor.Metadata.Description}} +
+ {{end}} + {{if .PackageDescriptor.Metadata.ImageLayers}} +

{{.i18n.Tr "packages.container.layers"}}

+
+ + + {{range .PackageDescriptor.Metadata.ImageLayers}} + + + + {{end}} + +
{{.}}
+
+ {{end}} + {{if .PackageDescriptor.Metadata.Labels}} +

{{.i18n.Tr "packages.container.labels"}}

+
+ + + + + + + + + {{range $key, $value := .PackageDescriptor.Metadata.Labels}} + + + + + {{end}} + +
{{.i18n.Tr "packages.container.labels.key"}}{{.i18n.Tr "packages.container.labels.value"}}
{{$key}}{{$value}}
+
+ {{end}} +{{end}} diff --git a/templates/package/content/generic.tmpl b/templates/package/content/generic.tmpl new file mode 100644 index 0000000000..05a47b3ef4 --- /dev/null +++ b/templates/package/content/generic.tmpl @@ -0,0 +1,14 @@ +{{if eq .PackageDescriptor.Package.Type "generic"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
curl {{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/generic/{{.PackageDescriptor.Package.Name}}/{{.PackageDescriptor.Version.Version}}/{{(index .PackageDescriptor.Files 0).File.Name}}
+
+
+ +
+
+
+{{end}} diff --git a/templates/package/content/helm.tmpl b/templates/package/content/helm.tmpl new file mode 100644 index 0000000000..a85f7c4850 --- /dev/null +++ b/templates/package/content/helm.tmpl @@ -0,0 +1,57 @@ +{{if eq .PackageDescriptor.Package.Type "helm"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
helm repo add gitea {{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/helm
+helm repo update
+
+
+ +
helm install {{.PackageDescriptor.Package.Name}} gitea/{{.PackageDescriptor.Package.Name}}
+
+
+ +
+
+
+ + {{if .PackageDescriptor.Metadata.Description}} +

{{.i18n.Tr "packages.about"}}

+
+ {{.PackageDescriptor.Metadata.Description}} +
+ {{end}} + + {{if .PackageDescriptor.Metadata.Dependencies}} +

{{.i18n.Tr "packages.dependencies"}}

+
+ + + + + + + + + {{range .PackageDescriptor.Metadata.Dependencies}} + + + + + {{end}} + +
{{.i18n.Tr "packages.dependency.id"}}{{.i18n.Tr "packages.dependency.version"}}
{{.Name}}{{.Version}}
+
+ {{end}} + + {{if .PackageDescriptor.Metadata.Keywords}} +

{{.i18n.Tr "packages.keywords"}}

+
+ {{range .PackageDescriptor.Metadata.Keywords}} + {{.}} + {{end}} +
+ {{end}} +{{end}} diff --git a/templates/package/content/maven.tmpl b/templates/package/content/maven.tmpl new file mode 100644 index 0000000000..32b89616cb --- /dev/null +++ b/templates/package/content/maven.tmpl @@ -0,0 +1,71 @@ +{{if eq .PackageDescriptor.Package.Type "maven"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
<repositories>
+	<repository>
+		<id>gitea</id>
+		<url>{{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/maven</url>
+	</repository>
+</repositories>
+
+<distributionManagement>
+	<repository>
+		<id>gitea</id>
+		<url>{{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/maven</url>
+	</repository>
+
+	<snapshotRepository>
+		<id>gitea</id>
+		<url>{{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/maven</url>
+	</snapshotRepository>
+</distributionManagement>
+
+
+ +
<dependency>
+	<groupId>{{.PackageDescriptor.Metadata.GroupID}}</groupId>
+	<artifactId>{{.PackageDescriptor.Metadata.ArtifactID}}</artifactId>
+	<version>{{.PackageDescriptor.Version.Version}}</version>
+</dependency>
+
+
+ +
mvn install
+
+
+ +
mvn dependency:get -DremoteRepositories={{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/maven -Dartifact={{.PackageDescriptor.Metadata.GroupID}}:{{.PackageDescriptor.Metadata.ArtifactID}}:{{.PackageDescriptor.Version.Version}}
+
+
+ +
+
+
+ + {{if .PackageDescriptor.Metadata.Description}} +

{{.i18n.Tr "packages.about"}}

+
+ {{.PackageDescriptor.Metadata.Description}} +
+ {{end}} + + {{if .PackageDescriptor.Metadata.Dependencies}} +

{{.i18n.Tr "packages.dependencies"}}

+
+
+ {{range .PackageDescriptor.Metadata.Dependencies}} +
+ {{svg "octicon-package-dependencies" 16 ""}} +
+
{{.GroupID}}:{{.ArtifactID}}
+
{{.Version}}
+
+
+ {{end}} +
+
+ {{end}} +{{end}} diff --git a/templates/package/content/npm.tmpl b/templates/package/content/npm.tmpl new file mode 100644 index 0000000000..bc714e5c97 --- /dev/null +++ b/templates/package/content/npm.tmpl @@ -0,0 +1,56 @@ +{{if eq .PackageDescriptor.Package.Type "npm"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
{{if .PackageDescriptor.Metadata.Scope}}{{.PackageDescriptor.Metadata.Scope}}:{{end}}registry={{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/npm/
+
+
+ +
npm install {{.PackageDescriptor.Package.Name}}@{{.PackageDescriptor.Version.Version}}
+
+
+ +
"{{.PackageDescriptor.Package.Name}}": "{{.PackageDescriptor.Version.Version}}"
+
+
+ +
+
+
+ + {{if or .PackageDescriptor.Metadata.Description .PackageDescriptor.Metadata.Readme}} +

{{.i18n.Tr "packages.about"}}

+
+ {{if .PackageDescriptor.Metadata.Readme}} +
+ {{RenderMarkdownToHtml .PackageDescriptor.Metadata.Readme}} +
+ {{else if .PackageDescriptor.Metadata.Description}} + {{.PackageDescriptor.Metadata.Description}} + {{end}} +
+ {{end}} + + {{if or .PackageDescriptor.Metadata.Dependencies .PackageDescriptor.Metadata.DevelopmentDependencies .PackageDescriptor.Metadata.PeerDependencies .PackageDescriptor.Metadata.OptionalDependencies}} +

{{.i18n.Tr "packages.dependencies"}}

+
+
+ {{template "package/content/npm_dependencies" dict "root" $ "dependencies" .PackageDescriptor.Metadata.Dependencies "title" (.i18n.Tr "packages.npm.dependencies")}} + {{template "package/content/npm_dependencies" dict "root" $ "dependencies" .PackageDescriptor.Metadata.DevelopmentDependencies "title" (.i18n.Tr "packages.npm.dependencies.development")}} + {{template "package/content/npm_dependencies" dict "root" $ "dependencies" .PackageDescriptor.Metadata.PeerDependencies "title" (.i18n.Tr "packages.npm.dependencies.peer")}} + {{template "package/content/npm_dependencies" dict "root" $ "dependencies" .PackageDescriptor.Metadata.OptionalDependencies "title" (.i18n.Tr "packages.npm.dependencies.optional")}} +
+
+ {{end}} + + {{if .PackageDescriptor.Metadata.Keywords}} +

{{.i18n.Tr "packages.keywords"}}

+
+ {{range .PackageDescriptor.Metadata.Keywords}} + {{.}} + {{end}} +
+ {{end}} +{{end}} diff --git a/templates/package/content/npm_dependencies.tmpl b/templates/package/content/npm_dependencies.tmpl new file mode 100644 index 0000000000..1ab644f417 --- /dev/null +++ b/templates/package/content/npm_dependencies.tmpl @@ -0,0 +1,19 @@ +{{if .dependencies}} +

{{.title}}

+ + + + + + + + + {{range $dependency, $version := .dependencies}} + + + + + {{end}} + +
{{.root.i18n.Tr "packages.dependency.id"}}{{.root.i18n.Tr "packages.dependency.version"}}
{{$dependency}}{{$version}}
+{{end}} diff --git a/templates/package/content/nuget.tmpl b/templates/package/content/nuget.tmpl new file mode 100644 index 0000000000..879d7d0176 --- /dev/null +++ b/templates/package/content/nuget.tmpl @@ -0,0 +1,52 @@ +{{if eq .PackageDescriptor.Package.Type "nuget"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
dotnet nuget add source --name Gitea --username your_username --password your_token {{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/nuget/index.json
+
+
+ +
dotnet add package --source Gitea --version {{.PackageDescriptor.Version.Version}} {{.PackageDescriptor.Package.Name}}
+
+
+ +
+
+
+ + {{if or .PackageDescriptor.Metadata.Description .PackageDescriptor.Metadata.ReleaseNotes}} +

{{.i18n.Tr "packages.about"}}

+
+ {{if .PackageDescriptor.Metadata.Description}}{{.PackageDescriptor.Metadata.Description}}{{end}} + {{if .PackageDescriptor.Metadata.ReleaseNotes}}{{Str2html .PackageDescriptor.Metadata.ReleaseNotes}}{{end}} +
+ {{end}} + + {{if .PackageDescriptor.Metadata.Dependencies}} +

{{.i18n.Tr "packages.dependencies"}}

+
+ + + + + + + + + + {{range $framework, $dependencies := .PackageDescriptor.Metadata.Dependencies}} + {{range $dependencies}} + + + + + + {{end}} + {{end}} + +
{{.i18n.Tr "packages.dependency.id"}}{{.i18n.Tr "packages.dependency.version"}}{{.i18n.Tr "packages.nuget.dependency.framework"}}
{{.ID}}{{.Version}}{{$framework}}
+
+ {{end}} +{{end}} diff --git a/templates/package/content/pypi.tmpl b/templates/package/content/pypi.tmpl new file mode 100644 index 0000000000..352f4f617f --- /dev/null +++ b/templates/package/content/pypi.tmpl @@ -0,0 +1,31 @@ +{{if eq .PackageDescriptor.Package.Type "pypi"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
pip install --extra-index-url {{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/pypi/simple {{.PackageDescriptor.Package.Name}}
+
+
+ +
+
+
+ {{if or .PackageDescriptor.Metadata.Description .PackageDescriptor.Metadata.LongDescription .PackageDescriptor.Metadata.Summary}} +

{{.i18n.Tr "packages.about"}}

+
+

{{if .PackageDescriptor.Metadata.Summary}}{{.PackageDescriptor.Metadata.Summary}}{{end}}

+ {{if .PackageDescriptor.Metadata.LongDescription}} + {{RenderMarkdownToHtml .PackageDescriptor.Metadata.LongDescription}} + {{else if .PackageDescriptor.Metadata.Description}} + {{RenderMarkdownToHtml .PackageDescriptor.Metadata.Description}} + {{end}} +
+ {{end}} + {{if .PackageDescriptor.Metadata.RequiresPython}} +

{{.i18n.Tr "packages.requirements"}}

+
+ {{.i18n.Tr "packages.pypi.requires"}}: {{.PackageDescriptor.Metadata.RequiresPython}} +
+ {{end}} +{{end}} diff --git a/templates/package/content/rubygems.tmpl b/templates/package/content/rubygems.tmpl new file mode 100644 index 0000000000..6e22d7fbea --- /dev/null +++ b/templates/package/content/rubygems.tmpl @@ -0,0 +1,40 @@ +{{if eq .PackageDescriptor.Package.Type "rubygems"}} +

{{.i18n.Tr "packages.installation"}}

+
+
+
+ +
gem install {{.PackageDescriptor.Package.Name}} --version "{{.PackageDescriptor.Version.Version}}" --source "{{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/rubygems"
+
+
+ +
source "{{AppUrl}}api/packages/{{.PackageDescriptor.Owner.Name}}/rubygems" do
+	gem "{{.PackageDescriptor.Package.Name}}", "{{.PackageDescriptor.Version.Version}}"
+end
+
+
+ +
+
+
+ {{if .PackageDescriptor.Metadata.Description}} +

{{.i18n.Tr "packages.about"}}

+
{{.PackageDescriptor.Metadata.Description}}
+ {{end}} + {{if or .PackageDescriptor.Metadata.RequiredRubyVersion .PackageDescriptor.Metadata.RequiredRubygemsVersion}} +

{{.i18n.Tr "packages.requirements"}}

+
+ {{if .PackageDescriptor.Metadata.RequiredRubyVersion}}

{{.i18n.Tr "packages.rubygems.required.ruby"}}: {{range $i, $v := .PackageDescriptor.Metadata.RequiredRubyVersion}}{{if gt $i 0}}, {{end}}{{$v.Restriction}}{{$v.Version}}{{end}}

{{end}} + {{if .PackageDescriptor.Metadata.RequiredRubygemsVersion}}

{{.i18n.Tr "packages.rubygems.required.rubygems"}}: {{range $i, $v := .PackageDescriptor.Metadata.RequiredRubygemsVersion}}{{if gt $i 0}}, {{end}}{{$v.Restriction}}{{$v.Version}}{{end}}

{{end}} +
+ {{end}} + {{if or .PackageDescriptor.Metadata.RuntimeDependencies .PackageDescriptor.Metadata.DevelopmentDependencies}} +

{{.i18n.Tr "packages.dependencies"}}

+
+
+ {{template "package/content/rubygems_dependencies" dict "root" $ "dependencies" .PackageDescriptor.Metadata.RuntimeDependencies "title" (.i18n.Tr "packages.rubygems.dependencies.runtime")}} + {{template "package/content/rubygems_dependencies" dict "root" $ "dependencies" .PackageDescriptor.Metadata.DevelopmentDependencies "title" (.i18n.Tr "packages.rubygems.dependencies.development")}} +
+
+ {{end}} +{{end}} diff --git a/templates/package/content/rubygems_dependencies.tmpl b/templates/package/content/rubygems_dependencies.tmpl new file mode 100644 index 0000000000..79f66ad3f9 --- /dev/null +++ b/templates/package/content/rubygems_dependencies.tmpl @@ -0,0 +1,19 @@ +{{if .dependencies}} +

{{.title}}

+ + + + + + + + + {{range .dependencies}} + + + + + {{end}} + +
{{.root.i18n.Tr "packages.dependency.id"}}{{.root.i18n.Tr "packages.dependency.version"}}
{{.Name}}{{range $i, $v := .Version}}{{if gt $i 0}}, {{end}}{{$v.Restriction}}{{$v.Version}}{{end}}
+{{end}} diff --git a/templates/package/metadata/composer.tmpl b/templates/package/metadata/composer.tmpl new file mode 100644 index 0000000000..1178d00e0d --- /dev/null +++ b/templates/package/metadata/composer.tmpl @@ -0,0 +1,5 @@ +{{if eq .PackageDescriptor.Package.Type "composer"}} + {{range .PackageDescriptor.Metadata.Authors}}
{{svg "octicon-person" 16 "mr-3"}} {{.Name}}
{{end}} + {{if .PackageDescriptor.Metadata.Homepage}}
{{end}} + {{range .PackageDescriptor.Metadata.License}}
{{svg "octicon-law" 16 "mr-3"}} {{.}}
{{end}} +{{end}} diff --git a/templates/package/metadata/conan.tmpl b/templates/package/metadata/conan.tmpl new file mode 100644 index 0000000000..1ef82aea4e --- /dev/null +++ b/templates/package/metadata/conan.tmpl @@ -0,0 +1,6 @@ +{{if eq .PackageDescriptor.Package.Type "conan"}} + {{if .PackageDescriptor.Metadata.Author}}
{{svg "octicon-person" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Author}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.License}}
{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.License}}
{{end}} + {{if .PackageDescriptor.Metadata.RepositoryURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.conan.details.repository"}}
{{end}} +{{end}} diff --git a/templates/package/metadata/container.tmpl b/templates/package/metadata/container.tmpl new file mode 100644 index 0000000000..117d7e46a2 --- /dev/null +++ b/templates/package/metadata/container.tmpl @@ -0,0 +1,9 @@ +{{if eq .PackageDescriptor.Package.Type "container"}} +
{{svg "octicon-package" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Type.Name}}
+ {{if .PackageDescriptor.Metadata.Platform}}
{{svg "octicon-cpu" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Platform}}
{{end}} + {{range .PackageDescriptor.Metadata.Authors}}
{{svg "octicon-person" 16 "mr-3"}} {{.}}
{{end}} + {{if .PackageDescriptor.Metadata.Licenses}}
{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Licenses}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.RepositoryURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.container.details.repository_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.DocumentationURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.container.details.documentation_site"}}
{{end}} +{{end}} diff --git a/templates/package/metadata/generic.tmpl b/templates/package/metadata/generic.tmpl new file mode 100644 index 0000000000..e69de29bb2 diff --git a/templates/package/metadata/helm.tmpl b/templates/package/metadata/helm.tmpl new file mode 100644 index 0000000000..7c97c6358e --- /dev/null +++ b/templates/package/metadata/helm.tmpl @@ -0,0 +1,4 @@ +{{if eq .PackageDescriptor.Package.Type "helm"}} + {{range .PackageDescriptor.Metadata.Maintainers}}
{{svg "octicon-person" 16 "mr-3"}} {{.Name}}
{{end}} + {{if .PackageDescriptor.Metadata.Home}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} +{{end}} diff --git a/templates/package/metadata/maven.tmpl b/templates/package/metadata/maven.tmpl new file mode 100644 index 0000000000..14a613be47 --- /dev/null +++ b/templates/package/metadata/maven.tmpl @@ -0,0 +1,5 @@ +{{if eq .PackageDescriptor.Package.Type "maven"}} + {{if .PackageDescriptor.Metadata.Name}}
{{svg "octicon-note" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Name}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} + {{range .PackageDescriptor.Metadata.Licenses}}
{{svg "octicon-law" 16 "mr-3"}} {{.}}
{{end}} +{{end}} diff --git a/templates/package/metadata/npm.tmpl b/templates/package/metadata/npm.tmpl new file mode 100644 index 0000000000..3279f9edbf --- /dev/null +++ b/templates/package/metadata/npm.tmpl @@ -0,0 +1,8 @@ +{{if eq .PackageDescriptor.Package.Type "npm"}} + {{if .PackageDescriptor.Metadata.Author}}
{{svg "octicon-person" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Author}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.License}}
{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.License}}
{{end}} + {{range .PackageDescriptor.Properties}} + {{if eq .Name "npm.tag"}}
{{svg "octicon-versions" 16 "mr-3"}} {{.Value}}
{{end}} + {{end}} +{{end}} diff --git a/templates/package/metadata/nuget.tmpl b/templates/package/metadata/nuget.tmpl new file mode 100644 index 0000000000..d5a3e909b9 --- /dev/null +++ b/templates/package/metadata/nuget.tmpl @@ -0,0 +1,4 @@ +{{if eq .PackageDescriptor.Package.Type "nuget"}} + {{if .PackageDescriptor.Metadata.Authors}}
{{svg "octicon-person" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Authors}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} +{{end}} diff --git a/templates/package/metadata/pypi.tmpl b/templates/package/metadata/pypi.tmpl new file mode 100644 index 0000000000..5cdfbdfe66 --- /dev/null +++ b/templates/package/metadata/pypi.tmpl @@ -0,0 +1,5 @@ +{{if eq .PackageDescriptor.Package.Type "pypi"}} + {{if .PackageDescriptor.Metadata.Author}}
{{svg "octicon-person" 16 "mr-3"}} {{.PackageDescriptor.Metadata.Author}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.License}}
{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.License}}
{{end}} +{{end}} diff --git a/templates/package/metadata/rubygems.tmpl b/templates/package/metadata/rubygems.tmpl new file mode 100644 index 0000000000..dff6830df3 --- /dev/null +++ b/templates/package/metadata/rubygems.tmpl @@ -0,0 +1,5 @@ +{{if eq .PackageDescriptor.Package.Type "rubygems"}} + {{range .PackageDescriptor.Metadata.Authors}}
{{svg "octicon-person" 16 "mr-3"}} {{.}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.i18n.Tr "packages.details.project_site"}}
{{end}} + {{range .PackageDescriptor.Metadata.Licenses}}
{{svg "octicon-law" 16 "mr-3"}} {{.}}
{{end}} +{{end}} diff --git a/templates/package/settings.tmpl b/templates/package/settings.tmpl new file mode 100644 index 0000000000..bf2d1d4912 --- /dev/null +++ b/templates/package/settings.tmpl @@ -0,0 +1,71 @@ +{{template "base/head" .}} +
+ {{template "user/overview/header" .}} +
+ {{template "base/alert" .}} +

{{.PackageDescriptor.Package.Name}} ({{.PackageDescriptor.Version.Version}}) / {{.i18n.Tr "repo.settings"}}

+

+ {{.i18n.Tr "packages.settings.link"}} +

+
+

{{.i18n.Tr "packages.settings.link.description"}}

+
+ {{template "base/disable_form_autofill"}} + {{.CsrfTokenHtml}} + +
+ +
+
+ +
+
+
+

+ {{.i18n.Tr "repo.settings.danger_zone"}} +

+
+
+
+ +
+
+
{{.i18n.Tr "packages.settings.delete"}}
+

{{.i18n.Tr "packages.settings.delete.description"}}

+
+ +
+
+
+
+{{template "base/footer" .}} diff --git a/templates/package/shared/list.tmpl b/templates/package/shared/list.tmpl new file mode 100644 index 0000000000..9e6bf5ce9e --- /dev/null +++ b/templates/package/shared/list.tmpl @@ -0,0 +1,58 @@ +
+ {{template "base/alert" .}} +
+
+ + + +
+
+
+ {{range .PackageDescriptors}} +
  • +
    +
    + {{.Package.Name}} + {{svg .Package.Type.SVGName 16}} {{.Package.Type.Name}} +
    +
    + {{$timeStr := TimeSinceUnix .Version.CreatedUnix $.i18n.Lang}} + {{$hasRepositoryAccess := false}} + {{if .Repository}} + {{$hasRepositoryAccess = index $.RepositoryAccessMap .Repository.ID}} + {{end}} + {{if $hasRepositoryAccess}} + {{$.i18n.Tr "packages.published_by_in" $timeStr .Creator.HomeLink (.Creator.GetDisplayName | Escape) .Repository.HTMLURL (.Repository.FullName | Escape) | Safe}} + {{else}} + {{$.i18n.Tr "packages.published_by" $timeStr .Creator.HomeLink (.Creator.GetDisplayName | Escape) | Safe}} + {{end}} +
    +
    +
  • + {{else}} + {{if not .HasPackages}} +
    + {{svg "octicon-package" 32}} +

    {{.i18n.Tr "packages.empty"}}

    +

    {{.i18n.Tr "packages.empty.documentation" | Safe}}

    +
    + {{else}} +

    {{.i18n.Tr "packages.filter.no_result"}}

    + {{end}} + {{end}} + {{template "base/paginate" .}} +
    +
    diff --git a/templates/package/shared/versionlist.tmpl b/templates/package/shared/versionlist.tmpl new file mode 100644 index 0000000000..e2aa19cc8c --- /dev/null +++ b/templates/package/shared/versionlist.tmpl @@ -0,0 +1,33 @@ +
    +

    {{.PackageDescriptor.Package.Name}} / {{.i18n.Tr "packages.versions"}}

    +
    +
    + + {{if eq .PackageDescriptor.Package.Type "container"}} + + {{end}} + +
    +
    +
    + {{range .PackageDescriptors}} +
  • +
    + +
    + {{$.i18n.Tr "packages.published_by" (TimeSinceUnix .Version.CreatedUnix $.i18n.Lang) .Creator.HomeLink (.Creator.GetDisplayName | Escape) | Safe}} +
    +
    +
  • + {{else}} +

    {{.i18n.Tr "packages.filter.no_result"}}

    + {{end}} + {{template "base/paginate" .}} +
    +
    diff --git a/templates/package/view.tmpl b/templates/package/view.tmpl new file mode 100644 index 0000000000..efad9f9b8f --- /dev/null +++ b/templates/package/view.tmpl @@ -0,0 +1,96 @@ +{{template "base/head" .}} +
    + {{template "user/overview/header" .}} +
    +
    +
    +
    +
    +

    {{.PackageDescriptor.Package.Name}} ({{.PackageDescriptor.Version.Version}})

    +
    +
    + {{$timeStr := TimeSinceUnix .PackageDescriptor.Version.CreatedUnix $.i18n.Lang}} + {{if .HasRepositoryAccess}} + {{.i18n.Tr "packages.published_by_in" $timeStr .PackageDescriptor.Creator.HomeLink (.PackageDescriptor.Creator.GetDisplayName | Escape) .PackageDescriptor.Repository.HTMLURL (.PackageDescriptor.Repository.FullName | Escape) | Safe}} + {{else}} + {{.i18n.Tr "packages.published_by" $timeStr .PackageDescriptor.Creator.HomeLink (.PackageDescriptor.Creator.GetDisplayName | Escape) | Safe}} + {{end}} +
    +
    +
    +
    + {{template "package/content/composer" .}} + {{template "package/content/conan" .}} + {{template "package/content/container" .}} + {{template "package/content/generic" .}} + {{template "package/content/helm" .}} + {{template "package/content/maven" .}} + {{template "package/content/npm" .}} + {{template "package/content/nuget" .}} + {{template "package/content/pypi" .}} + {{template "package/content/rubygems" .}} +
    +
    +
    + {{.i18n.Tr "packages.details"}} +
    +
    {{svg .PackageDescriptor.Package.Type.SVGName 16 "mr-3"}} {{.PackageDescriptor.Package.Type.Name}}
    + {{if .HasRepositoryAccess}} +
    {{svg "octicon-repo" 16 "mr-3"}} {{.PackageDescriptor.Repository.FullName}}
    + {{end}} +
    {{svg "octicon-calendar" 16 "mr-3"}} {{.PackageDescriptor.Version.CreatedUnix.FormatDate}}
    +
    {{svg "octicon-download" 16 "mr-3"}} {{.PackageDescriptor.Version.DownloadCount}}
    + {{template "package/metadata/composer" .}} + {{template "package/metadata/conan" .}} + {{template "package/metadata/container" .}} + {{template "package/metadata/generic" .}} + {{template "package/metadata/helm" .}} + {{template "package/metadata/maven" .}} + {{template "package/metadata/npm" .}} + {{template "package/metadata/nuget" .}} + {{template "package/metadata/pypi" .}} + {{template "package/metadata/rubygems" .}} +
    + {{if not (eq .PackageDescriptor.Package.Type "container")}} +
    + {{.i18n.Tr "packages.assets"}} ({{len .PackageDescriptor.Files}}) +
    + {{range .PackageDescriptor.Files}} +
    + {{.File.Name}} + {{FileSize .Blob.Size}} +
    + {{end}} +
    + {{end}} + {{if .LatestVersions}} +
    + {{.i18n.Tr "packages.versions"}} ({{.TotalVersionCount}}) + {{.i18n.Tr "packages.versions.view_all"}} +
    + {{range .LatestVersions}} +
    + {{.Version}} + {{$.i18n.Tr "packages.versions.on"}} {{.CreatedUnix.FormatDate}} +
    + {{end}} +
    + {{end}} + {{if or .CanWritePackages .HasRepositoryAccess}} +
    +
    + {{if .HasRepositoryAccess}} +
    {{svg "octicon-issue-opened" 16 "mr-3"}} {{.i18n.Tr "repo.issues"}}
    + {{end}} + {{if .CanWritePackages}} +
    {{svg "octicon-tools" 16 "mr-3"}} {{.i18n.Tr "repo.settings"}}
    + {{end}} +
    + {{end}} +
    +
    +
    +
    +
    +
    +{{template "base/footer" .}} diff --git a/templates/repo/branch_dropdown.tmpl b/templates/repo/branch_dropdown.tmpl index 0e263e8075..487dce1531 100644 --- a/templates/repo/branch_dropdown.tmpl +++ b/templates/repo/branch_dropdown.tmpl @@ -45,7 +45,7 @@
    {{$inlineDiff := $.section.GetComputedInlineDiffFor $line}}{{$inlineDiff.Content}} {{if and $line.LeftIdx $inlineDiff.EscapeStatus.Escaped}}{{end}} {{if $line.LeftIdx}}{{end}} {{/* */}}{{if $line.LeftIdx}}{{$inlineDiff.Content}}{{end}}{{/* */}} {{if and $line.RightIdx $inlineDiff.EscapeStatus.Escaped}}{{end}} {{if $line.RightIdx}}{{end}} {{/* @@ -59,8 +59,8 @@ {{end}} {{if $inlineDiff.EscapeStatus.Escaped}}{{end}} {{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 5) }} - + {{svg "octicon-fold-down"}} {{end}} {{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 4) }} - + {{svg "octicon-fold-up"}} {{end}} {{if eq $line.GetExpandDirection 2}} - + {{svg "octicon-fold"}} {{end}} @@ -28,7 +28,7 @@ {{$match := index $section.Lines $line.Match}} {{- $leftDiff := ""}}{{if $line.LeftIdx}}{{$leftDiff = $section.GetComputedInlineDiffFor $line}}{{end}} {{- $rightDiff := ""}}{{if $match.RightIdx}}{{$rightDiff = $section.GetComputedInlineDiffFor $match}}{{end}} - {{if $line.LeftIdx}}{{if $leftDiff.EscapeStatus.Escaped}}{{end}}{{end}} {{/* @@ -43,7 +43,7 @@ */}}{{/* */}}{{end}}{{/* */}} {{if $match.RightIdx}}{{if $rightDiff.EscapeStatus.Escaped}}{{end}}{{end}} {{if $match.RightIdx}}{{end}} {{/* @@ -60,7 +60,7 @@ */}} {{if $line.LeftIdx}}{{if $inlineDiff.EscapeStatus.Escaped}}{{end}}{{end}} {{if $line.LeftIdx}}{{end}} {{/* @@ -75,7 +75,7 @@ */}}{{/* */}}{{end}}{{/* */}} {{if $line.RightIdx}}{{if $inlineDiff.EscapeStatus.Escaped}}{{end}}{{end}} {{if $line.RightIdx}}{{end}} {{/* diff --git a/templates/repo/diff/section_unified.tmpl b/templates/repo/diff/section_unified.tmpl index dbd0ca269f..173b637e86 100644 --- a/templates/repo/diff/section_unified.tmpl +++ b/templates/repo/diff/section_unified.tmpl @@ -6,24 +6,24 @@ {{if eq .GetType 4}} {{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 5) }} - + {{svg "octicon-fold-down"}} {{end}} {{if or (eq $line.GetExpandDirection 3) (eq $line.GetExpandDirection 4) }} - + {{svg "octicon-fold-up"}} {{end}} {{if eq $line.GetExpandDirection 2}} - + {{svg "octicon-fold"}} {{end}} {{if $inlineDiff.EscapeStatus.Escaped}}{{end}}{{/* */}}{{$inlineDiff.Content}}{{/* - */}} - {{$line.Content}} - {{/* */}}{{if and $.root.SignedUserID $.root.PageIsPullFiles}}{{/* diff --git a/templates/repo/editor/edit.tmpl b/templates/repo/editor/edit.tmpl index 5e3351c865..8135348b9b 100644 --- a/templates/repo/editor/edit.tmpl +++ b/templates/repo/editor/edit.tmpl @@ -31,13 +31,13 @@
    diff --git a/templates/repo/issue/list.tmpl b/templates/repo/issue/list.tmpl index 63d746f570..ba7f1c1480 100644 --- a/templates/repo/issue/list.tmpl +++ b/templates/repo/issue/list.tmpl @@ -2,7 +2,7 @@
    {{template "repo/header" .}}
    -
    +
    {{template "repo/issue/navbar" .}}
    diff --git a/templates/repo/issue/new_form.tmpl b/templates/repo/issue/new_form.tmpl index 1089c82415..9e6a69f71a 100644 --- a/templates/repo/issue/new_form.tmpl +++ b/templates/repo/issue/new_form.tmpl @@ -20,7 +20,7 @@
    {{template "repo/issue/comment_tab" .}}
    -
    + {{if and .PageIsComparePull (not (eq .HeadRepo.FullName .BaseCompareRepo.FullName)) .CanWriteToHeadRepo}} +
    +
    +
    + + +
    +
    + {{end}}
    diff --git a/templates/repo/issue/view.tmpl b/templates/repo/issue/view.tmpl index acfd25e7e0..c1fedac04c 100644 --- a/templates/repo/issue/view.tmpl +++ b/templates/repo/issue/view.tmpl @@ -2,7 +2,7 @@
    {{template "repo/header" .}}
    -
    +
    {{template "repo/issue/navbar" .}}
    diff --git a/templates/repo/issue/view_content.tmpl b/templates/repo/issue/view_content.tmpl index 820055c136..91af5160b6 100644 --- a/templates/repo/issue/view_content.tmpl +++ b/templates/repo/issue/view_content.tmpl @@ -41,6 +41,9 @@ {{if .Repository.OriginalURL}} ({{$.i18n.Tr "repo.migrated_from" (.Repository.OriginalURL|Escape) (.Repository.GetOriginalURLHostname|Escape) | Safe }}){{end}} {{else}} + + {{avatar .Issue.Poster}} + {{.Issue.Poster.GetDisplayName}} {{.i18n.Tr "repo.issues.commented_at" (.Issue.HashTag|Escape) $createdStr | Safe}} @@ -50,12 +53,12 @@
    {{if gt .Issue.ShowRole 0}} {{if (.Issue.ShowRole.HasRole "Writer")}} -
    +
    {{$.i18n.Tr "repo.issues.collaborator"}}
    {{end}} {{if (.Issue.ShowRole.HasRole "Owner")}} -
    +
    {{$.i18n.Tr "repo.issues.owner"}}
    {{end}} @@ -118,7 +121,7 @@
    {{end}} {{end}} -
    @@ -169,7 +172,7 @@
    {{end}} {{end}} -
    @@ -195,7 +198,7 @@
    diff --git a/templates/repo/issue/view_content/comments.tmpl b/templates/repo/issue/view_content/comments.tmpl index 7b0941148f..235f4c8fc2 100644 --- a/templates/repo/issue/view_content/comments.tmpl +++ b/templates/repo/issue/view_content/comments.tmpl @@ -10,7 +10,8 @@ 22 = REVIEW, 23 = ISSUE_LOCKED, 24 = ISSUE_UNLOCKED, 25 = TARGET_BRANCH_CHANGED, 26 = DELETE_TIME_MANUAL, 27 = REVIEW_REQUEST, 28 = MERGE_PULL_REQUEST, 29 = PULL_PUSH_EVENT, 30 = PROJECT_CHANGED, 31 = PROJECT_BOARD_CHANGED - 32 = DISMISSED_REVIEW --> + 32 = DISMISSED_REVIEW, 33 = COMMENT_TYPE_CHANGE_ISSUE_REF, 34 = PR_SCHEDULE_TO_AUTO_MERGE, + 35 = CANCEL_SCHEDULED_AUTO_MERGE_PR --> {{if eq .Type 0}}
    {{if .OriginalAuthor }} @@ -35,6 +36,11 @@ ({{$.i18n.Tr "repo.migrated_from" ($.Repository.OriginalURL|Escape) ($.Repository.GetOriginalURLHostname|Escape) | Safe }}){{end}} {{else}} + {{if gt .Poster.ID 0}} + + {{avatar .Poster}} + + {{end}} {{.Poster.GetDisplayName}} @@ -505,13 +511,13 @@ {{ range $filename, $lines := .Review.CodeComments}} {{range $line, $comms := $lines}}
    -
    +
    {{$invalid := (index $comms 0).Invalidated}} {{$resolved := (index $comms 0).IsResolved}} {{$resolveDoer := (index $comms 0).ResolveDoer}} {{$isNotPending := (not (eq (index $comms 0).Review.Type 0))}}
    - {{$filename}} + {{$filename}} {{if $invalid }} {{$.i18n.Tr "repo.issues.review.outdated"}} @@ -832,6 +838,15 @@ {{end}}
    + {{else if or (eq .Type 34) (eq .Type 35)}} +
    + {{svg "octicon-git-merge" 16}} + + {{.Poster.GetDisplayName}} + {{if eq .Type 34}}{{$.i18n.Tr "repo.pulls.pull_request_scheduled_auto_merge" $createdStr | Safe}} + {{else}}{{$.i18n.Tr "repo.pulls.pull_request_canceled_scheduled_auto_merge" $createdStr | Safe}}{{end}} + +
    {{end}} {{end}} {{end}} diff --git a/templates/repo/issue/view_content/pull.tmpl b/templates/repo/issue/view_content/pull.tmpl index fef868af1f..195da6bf87 100644 --- a/templates/repo/issue/view_content/pull.tmpl +++ b/templates/repo/issue/view_content/pull.tmpl @@ -329,7 +329,7 @@ {{.CsrfTokenHtml}}
    - +
    @@ -375,7 +375,7 @@ {{.CsrfTokenHtml}}
    - +
    @@ -401,7 +401,7 @@ {{.CsrfTokenHtml}}
    - +
    diff --git a/templates/repo/issue/view_content/sidebar.tmpl b/templates/repo/issue/view_content/sidebar.tmpl index 65c1cf75e8..ffc46bfede 100644 --- a/templates/repo/issue/view_content/sidebar.tmpl +++ b/templates/repo/issue/view_content/sidebar.tmpl @@ -424,12 +424,12 @@

    {{else}} -

    {{.i18n.Tr "repo.issues.due_date_not_set"}}

    +

    {{.i18n.Tr "repo.issues.due_date_not_set"}}

    {{end}} {{if and .HasIssuesOrPullsWritePermission (not .Repository.IsArchived)}} +
    + {{if and .IsRepoAdmin (not .Repository.IsArchived)}}
    @@ -667,5 +676,21 @@
    {{end}} + + {{if and .Issue.IsPull .IsIssuePoster (not .Issue.IsClosed)}} + {{if and (not (eq .Issue.PullRequest.HeadRepo.FullName .Issue.PullRequest.BaseRepo.FullName)) .CanWriteToHeadRepo}} +
    +
    +
    + + +
    +
    + {{end}} + {{end}}
    diff --git a/templates/repo/migrate/migrating.tmpl b/templates/repo/migrate/migrating.tmpl index 6df7f0a65d..8858e88dad 100644 --- a/templates/repo/migrate/migrating.tmpl +++ b/templates/repo/migrate/migrating.tmpl @@ -1,5 +1,5 @@ {{template "base/head" .}} -
    +
    {{template "repo/header" .}}
    diff --git a/templates/repo/packages.tmpl b/templates/repo/packages.tmpl new file mode 100644 index 0000000000..69bea014d7 --- /dev/null +++ b/templates/repo/packages.tmpl @@ -0,0 +1,6 @@ +{{template "base/head" .}} +
    + {{template "repo/header" .}} + {{template "package/shared/list" .}} +
    +{{template "base/footer" .}} diff --git a/templates/repo/projects/view.tmpl b/templates/repo/projects/view.tmpl index e96b92fb77..7d4d21f8fe 100644 --- a/templates/repo/projects/view.tmpl +++ b/templates/repo/projects/view.tmpl @@ -8,7 +8,7 @@
    {{if and .CanWriteProjects (not .Repository.IsArchived)}} - {{.i18n.Tr "repo.issues.new"}} + {{.i18n.Tr "repo.issues.new"}} {{.i18n.Tr "new_project_board"}} {{end}} + +
    +
    +
    + + + {{.i18n.Tr "repo.settings.event_package_desc"}} +
    +
    +
    diff --git a/templates/repo/view_file.tmpl b/templates/repo/view_file.tmpl index 9e1d83b836..c5efd3d2d4 100644 --- a/templates/repo/view_file.tmpl +++ b/templates/repo/view_file.tmpl @@ -128,6 +128,9 @@ {{.i18n.Tr "repo.issues.context.reference_issue"}}
    {{end}} + diff --git a/templates/repo/view_list.tmpl b/templates/repo/view_list.tmpl index bc56041c7d..ef0ab866f5 100644 --- a/templates/repo/view_list.tmpl +++ b/templates/repo/view_list.tmpl @@ -62,7 +62,7 @@ {{if $entry.IsDir}} {{$subJumpablePathName := $entry.GetSubJumpablePathName}} {{$subJumpablePath := SubJumpablePath $subJumpablePathName}} - {{svg "octicon-file-directory"}} + {{svg "octicon-file-directory-fill"}} {{if eq (len $subJumpablePath) 2}} {{index $subJumpablePath 0}}{{index $subJumpablePath 1}} diff --git a/templates/repo/wiki/new.tmpl b/templates/repo/wiki/new.tmpl index 5b8cdc8164..1f9c3788a2 100644 --- a/templates/repo/wiki/new.tmpl +++ b/templates/repo/wiki/new.tmpl @@ -21,11 +21,11 @@
    - +
    diff --git a/templates/repo/wiki/view.tmpl b/templates/repo/wiki/view.tmpl index ce1b00b4f2..04faa90b9e 100644 --- a/templates/repo/wiki/view.tmpl +++ b/templates/repo/wiki/view.tmpl @@ -3,7 +3,7 @@ {{template "repo/header" .}} {{ $title := .title}}
    -